Projects >> lucene-solr >>e7aec6b5327e5c0e67a11f8bbd821a1b69c38b44

Chunk
Conflicting content
          throws IOException {
        return delegate().docs(liveDocs, reuse, flags);
      }
<<<<<<< HEAD
      
=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }

    @Override
Solution content
          throws IOException {
        return delegate().docs(liveDocs, reuse, flags);
      }

    }

    @Override
File
BloomFilteringPostingsFormat.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
File
DirectPostingsFormat.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.Closeable;
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
File
MemoryPostingsFormat.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.TermStats;
<<<<<<< HEAD
import org.apache.lucene.codecs.TermsConsumer;
=======
import org.apache.lucene.index.DocsAndPositionsEnum;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions;
Solution content
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.TermStats;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions;
File
MemoryPostingsFormat.java
Developer's decision
None
Kind of conflict
Import
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.IOException;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.TreeMap;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.PostingsReaderBase;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.PostingsReaderBase;
File
PulsingPostingsReader.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
File
SimpleTextFieldsReader.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
    private boolean readPositions;
    private int startOffset;
    private int endOffset;
<<<<<<< HEAD
    private int posPending;
=======
    private int cost;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2

    public SimpleTextDocsAndPositionsEnum() {
      this.inStart = SimpleTextFieldsReader.this.in;
Solution content
    private boolean readPositions;
    private int startOffset;
    private int endOffset;
    private int posPending;
    private int cost;

    public SimpleTextDocsAndPositionsEnum() {
      this.inStart = SimpleTextFieldsReader.this.in;
File
SimpleTextFieldsReader.java
Developer's decision
Concatenation
Kind of conflict
Attribute
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.DocsEnum;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.index.DocsEnum;
File
SimpleTextTermVectorsReader.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.Closeable;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.index.DocsEnum;
Solution content
 * limitations under the License.
 */

import java.io.Closeable;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.index.DocsEnum;
File
Lucene40TermVectorsReader.java
Developer's decision
Version 2
Kind of conflict
Import
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import static org.apache.lucene.codecs.lucene41.Lucene41PostingsFormat.BLOCK_SIZE;
import static org.apache.lucene.codecs.lucene41.ForUtil.MAX_DATA_SIZE;
import static org.apache.lucene.codecs.lucene41.ForUtil.MAX_ENCODED_SIZE;
import static org.apache.lucene.codecs.lucene41.Lucene41PostingsWriter.IntBlockTermState;

import java.io.IOException;
import java.util.Arrays;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.PostingsReaderBase;
Solution content
 * limitations under the License.
 */


import static org.apache.lucene.codecs.lucene41.Lucene41PostingsFormat.BLOCK_SIZE;
import static org.apache.lucene.codecs.lucene41.ForUtil.MAX_DATA_SIZE;
import static org.apache.lucene.codecs.lucene41.ForUtil.MAX_ENCODED_SIZE;
import static org.apache.lucene.codecs.lucene41.Lucene41PostingsWriter.IntBlockTermState;

import java.io.IOException;
import java.util.Arrays;

import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.PostingsReaderBase;
File
Lucene41PostingsReader.java
Developer's decision
Version 2
Kind of conflict
Import
Chunk
Conflicting content
    public int advance(int target) throws IOException {
      return in.advance(target);
    }
<<<<<<< HEAD
=======

    @Override
    public long cost() {
      return in.cost();
    }
  }

  /** Base class for filtering {@link DocsAndPositionsEnum} implementations. */
  public static class FilterDocsAndPositionsEnum extends DocsAndPositionsEnum {
    /** The underlying DocsAndPositionsEnum instance. */
    protected final DocsAndPositionsEnum in;

    /**
     * Create a new FilterDocsAndPositionsEnum
     * @param in the underlying DocsAndPositionsEnum instance.
     */
    public FilterDocsAndPositionsEnum(DocsAndPositionsEnum in) {
      this.in = in;
    }

    @Override
    public AttributeSource attributes() {
      return in.attributes();
    }

    @Override
    public int docID() {
      return in.docID();
    }

    @Override
    public int freq() throws IOException {
      return in.freq();
    }
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2

    @Override
    public int nextPosition() throws IOException {
Solution content
    public int advance(int target) throws IOException {
      return in.advance(target);
    }

    @Override
    public int nextPosition() throws IOException {
File
FilterAtomicReader.java
Developer's decision
Version 1
Kind of conflict
Annotation
Attribute
Class signature
Comment
Method declaration
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
File
BooleanQuery.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
    public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
        boolean topScorer, PostingFeatures flags, Bits acceptDocs)
        throws IOException {
<<<<<<< HEAD
      if (termConjunction) {
        // specialized scorer for term conjunctions
        return createConjunctionTermScorer(context, acceptDocs, flags);
      }
=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      List required = new ArrayList();
      List prohibited = new ArrayList();
      List optional = new ArrayList();
Solution content
    public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
        boolean topScorer, PostingFeatures flags, Bits acceptDocs)
        throws IOException {

      List required = new ArrayList();
      List prohibited = new ArrayList();
      List optional = new ArrayList();
File
BooleanQuery.java
Developer's decision
Version 2
Kind of conflict
If statement
Chunk
Conflicting content
      // detect and we never do so today... (ie, we only
      // return BooleanScorer for topScorer):

<<<<<<< HEAD
      // Check if we can return a BooleanScorer
      // nocommit - we need to somehow detect if we need to iterate positions
      // for now, always return BS2
      boolean needsPositions = true;
      if (!needsPositions && !scoreDocsInOrder && flags == PostingFeatures.DOCS_AND_FREQS && topScorer && required.size() == 0) {
=======
      // Check if we can and should return a BooleanScorer
      // TODO: (LUCENE-4872) in some cases BooleanScorer may be faster for minNrShouldMatch
      // but the same is even true of pure conjunctions...
      if (!scoreDocsInOrder && topScorer && required.size() == 0 && minNrShouldMatch <= 1) {
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
        return new BooleanScorer(this, disableCoord, minNrShouldMatch, optional, prohibited, maxCoord);
      }
      
Solution content
      // detect and we never do so today... (ie, we only
      // return BooleanScorer for topScorer):


      // Check if we can return a BooleanScorer
      // nocommit - we need to somehow detect if we need to iterate positions
      // for now, always return BS2
      boolean needsPositions = true;
      if (!needsPositions && !scoreDocsInOrder && flags == PostingFeatures.DOCS_AND_FREQS && topScorer && required.size() == 0) {
        return new BooleanScorer(this, disableCoord, minNrShouldMatch, optional, prohibited, maxCoord);
      }
      
File
BooleanQuery.java
Developer's decision
Version 1
Kind of conflict
Comment
If statement
Variable
Chunk
Conflicting content
        return null;
      }
      
<<<<<<< HEAD
      // Return a BooleanScorer2
      return new BooleanScorer2(this, disableCoord, minNrShouldMatch, required, prohibited, optional, maxCoord);
    }

    private Scorer createConjunctionTermScorer(AtomicReaderContext context, Bits acceptDocs, PostingFeatures flags)
        throws IOException {

      // TODO: fix scorer API to specify "needsScores" up
      // front, so we can do match-only if caller doesn't
      // needs scores

      final DocsAndFreqs[] docsAndFreqs = new DocsAndFreqs[weights.size()];
      for (int i = 0; i < docsAndFreqs.length; i++) {
        final TermWeight weight = (TermWeight) weights.get(i);
        final Scorer scorer = weight.scorer(context, true, false, flags, acceptDocs);
        if (scorer == null) {
          return null;
        } else {
          assert scorer instanceof TermScorer;
          docsAndFreqs[i] = new DocsAndFreqs((TermScorer) scorer);
=======
      // simple conjunction
      if (optional.size() == 0 && prohibited.size() == 0) {
        float coord = disableCoord ? 1.0f : coord(required.size(), maxCoord);
        return new ConjunctionScorer(this, required.toArray(new Scorer[required.size()]), coord);
      }
      
      // simple disjunction
      if (required.size() == 0 && prohibited.size() == 0 && minNrShouldMatch <= 1 && optional.size() > 1) {
        float coord[] = new float[optional.size()+1];
        for (int i = 0; i < coord.length; i++) {
          coord[i] = disableCoord ? 1.0f : coord(i, maxCoord);
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
        }
        return new DisjunctionSumScorer(this, optional.toArray(new Scorer[optional.size()]), coord);
      }
Solution content
        return null;
      }
      
      // simple conjunction
      if (optional.size() == 0 && prohibited.size() == 0) {
        float coord = disableCoord ? 1.0f : coord(required.size(), maxCoord);
        return new ConjunctionScorer(this, required.toArray(new Scorer[required.size()]), coord);
      }
      
      // simple disjunction
      if (required.size() == 0 && prohibited.size() == 0 && minNrShouldMatch <= 1 && optional.size() > 1) {
        float coord[] = new float[optional.size()+1];
        for (int i = 0; i < coord.length; i++) {
          coord[i] = disableCoord ? 1.0f : coord(i, maxCoord);
        }
        return new DisjunctionSumScorer(this, optional.toArray(new Scorer[optional.size()]), coord);
      }
File
BooleanQuery.java
Developer's decision
Version 2
Kind of conflict
Array access
Assert statement
Attribute
Cast expression
Comment
For statement
If statement
Method invocation
Method signature
Return statement
Variable
Chunk
Conflicting content
    public int advance(int target) throws IOException {
      return scorer.advance(target);
    }
<<<<<<< HEAD
    
    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      return scorer.intervals(collectIntervals);
    }

    @Override
    public int nextPosition() throws IOException {
      return scorer.nextPosition();
    }

    @Override
    @Override
    public int startPosition() throws IOException {
      return scorer.startPosition();
    }

    @Override
    public int endPosition() throws IOException {
      return scorer.endPosition();
    }

    @Override
    public int startOffset() throws IOException {
      return scorer.startOffset();
    }

    @Override
    public int endOffset() throws IOException {
      return scorer.endOffset();
=======

    public long cost() {
      return scorer.cost();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
Solution content
    public int advance(int target) throws IOException {
      return scorer.advance(target);
    }
    
    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      return scorer.intervals(collectIntervals);
    }

    @Override
    public int nextPosition() throws IOException {
      return scorer.nextPosition();
    }

    @Override
    public int startPosition() throws IOException {
      return scorer.startPosition();
    }

    @Override
    public int endPosition() throws IOException {
      return scorer.endPosition();
    }

    @Override
    public int startOffset() throws IOException {
      return scorer.startOffset();
    }

    @Override
    public int endOffset() throws IOException {
      return scorer.endOffset();
    }

    @Override
    public long cost() {
      return scorer.cost();
    }

  }
File
BooleanScorer2.java
Developer's decision
Manual
Kind of conflict
Annotation
Method declaration
Method invocation
Method signature
Return statement
Chunk
Conflicting content
  }
  
  @Override
<<<<<<< HEAD
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    return countingSumScorer.intervals(collectIntervals);
  }

  @Override
  public int nextPosition() throws IOException {
    return countingSumScorer.nextPosition();
  }

  @Override
  public int startPosition() throws IOException {
    return countingSumScorer.startPosition();
  }

  @Override
  public int endPosition() throws IOException {
    return countingSumScorer.endPosition();
  }

  @Override
  public int startOffset() throws IOException {
    return countingSumScorer.startOffset();
  }

  @Override
  public int endOffset() throws IOException {
    return countingSumScorer.endOffset();
=======
  public long cost() {
    return countingSumScorer.cost();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  }

  @Override
Solution content
  }

  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    return countingSumScorer.intervals(collectIntervals);
  }

  @Override
  public long cost() {
    return countingSumScorer.cost();
  }

  @Override
  public int nextPosition() throws IOException {
    return countingSumScorer.nextPosition();
  }

  @Override
  public int startPosition() throws IOException {
    return countingSumScorer.startPosition();
  }

  @Override
  public int endPosition() throws IOException {
    return countingSumScorer.endPosition();
  }

  @Override
  public int startOffset() throws IOException {
    return countingSumScorer.startOffset();
  }

  @Override
  public int endOffset() throws IOException {
    return countingSumScorer.endOffset();
  }

  @Override
File
BooleanScorer2.java
Developer's decision
Manual
Kind of conflict
Annotation
Method declaration
Method invocation
Method signature
Return statement
Chunk
Conflicting content
    
    @Override
    public final int nextDoc() { throw new UnsupportedOperationException(); }
<<<<<<< HEAD

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException { throw new UnsupportedOperationException(); }
=======
    
    @Override
    public long cost() { return 1; }
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }

  // A CachingCollector which caches scores
Solution content
    
    @Override
    public final int nextDoc() { throw new UnsupportedOperationException(); }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException { throw new UnsupportedOperationException(); }
    
    @Override
    public long cost() { return 1; }

  }

  // A CachingCollector which caches scores
File
CachingCollector.java
Developer's decision
Concatenation
Kind of conflict
Annotation
Method declaration
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
import org.apache.lucene.search.intervals.ConjunctionIntervalIterator;
import org.apache.lucene.search.intervals.IntervalIterator;
import org.apache.lucene.util.ArrayUtil;

=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.search.intervals.ConjunctionIntervalIterator;
import org.apache.lucene.search.intervals.IntervalIterator;
import org.apache.lucene.util.ArrayUtil;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
File
ConjunctionScorer.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
/** Scorer for conjunctions, sets of queries, all of which are required. */
class ConjunctionScorer extends Scorer {
<<<<<<< HEAD
  
  private final Scorer[] scorersOrdered;
  private final Scorer[] scorers;
  private int lastDoc = -1;
  final PositionQueue posQueue;
=======
  protected int lastDoc = -1;
  protected final DocsAndFreqs[] docsAndFreqs;
  private final DocsAndFreqs lead;
  private final float coord;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2

  ConjunctionScorer(Weight weight, Scorer[] scorers) {
    this(weight, scorers, 1f);
Solution content
/** Scorer for conjunctions, sets of queries, all of which are required. */
class ConjunctionScorer extends Scorer {
  
  private final Scorer[] scorersOrdered;
  private final Scorer[] scorers;
  private int lastDoc = -1;
  private final float coord;
  final PositionQueue posQueue;
File
ConjunctionScorer.java
Developer's decision
Combination
Kind of conflict
Attribute
Chunk
Conflicting content
    this(weight, scorers, 1f);
  }
  
<<<<<<< HEAD
  public ConjunctionScorer(Weight weight, Scorer... scorers) throws IOException {
    super(weight);
    scorersOrdered = new Scorer[scorers.length];
    System.arraycopy(scorers, 0, scorersOrdered, 0, scorers.length);
    this.scorers = scorers;
    posQueue = new PositionQueue(scorers);
    
=======
  ConjunctionScorer(Weight weight, Scorer[] scorers, float coord) {
    super(weight);
    this.coord = coord;
    this.docsAndFreqs = new DocsAndFreqs[scorers.length];
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    for (int i = 0; i < scorers.length; i++) {
      docsAndFreqs[i] = new DocsAndFreqs(scorers[i]);
    }
Solution content
    for (int i = 0; i < scorers.length; i++) {
  }
  
  public ConjunctionScorer(Weight weight, Scorer[] scorers, float coord) throws IOException {
    super(weight);
    scorersOrdered = new Scorer[scorers.length];
    System.arraycopy(scorers, 0, scorersOrdered, 0, scorers.length);
    this.scorers = scorers;
    this.coord = coord;
    posQueue = new PositionQueue(scorers);
    
      if (scorers[i].nextDoc() == NO_MORE_DOCS) {
        // If even one of the sub-scorers does not have any documents, this
        // scorer should not attempt to do any more work.
        lastDoc = NO_MORE_DOCS;
        return;
      }
    }
File
ConjunctionScorer.java
Developer's decision
Manual
Kind of conflict
Attribute
Method invocation
Method signature
Chunk
Conflicting content
      // advance head for next iteration
      doc = lead.doc = lead.scorer.advance(doc);
    }
<<<<<<< HEAD
    posQueue.advanceTo(doc);
    return doc;
=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  }

  @Override
Solution content
    }
    posQueue.advanceTo(doc);
    return doc;
  }
  
  @Override
File
ConjunctionScorer.java
Developer's decision
Version 1
Kind of conflict
Method invocation
Return statement
Variable
Chunk
Conflicting content
      return docIdSetIterator.advance(target);
    }
    
<<<<<<< HEAD
    
=======
    @Override
    public long cost() {
      return docIdSetIterator.cost();
    }

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    private Collector wrapCollector(final Collector collector) {
      return new Collector() {
        @Override
Solution content
      return docIdSetIterator.advance(target);
    }

    @Override
    public long cost() {
      return docIdSetIterator.cost();
    }

    private Collector wrapCollector(final Collector collector) {
      return new Collector() {
        @Override
File
ConstantScoreQuery.java
Developer's decision
Version 2
Kind of conflict
Annotation
Method declaration
Chunk
Conflicting content
    /** Create the scorer used to score our associated DisjunctionMaxQuery */
    @Override
    public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
<<<<<<< HEAD
        boolean topScorer, PostingFeatures flags, Bits acceptDocs) throws IOException {
      Scorer[] scorers = new Scorer[weights.size()];
      int idx = 0;
=======
        boolean topScorer, Bits acceptDocs) throws IOException {
      List scorers = new ArrayList();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      for (Weight w : weights) {
        // we will advance() subscorers
        Scorer subScorer = w.scorer(context, true, false, flags, acceptDocs);
Solution content
    /** Create the scorer used to score our associated DisjunctionMaxQuery */
    @Override
    public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
        boolean topScorer, PostingFeatures flags, Bits acceptDocs) throws IOException {
      List scorers = new ArrayList();
      for (Weight w : weights) {
        // we will advance() subscorers
        Scorer subScorer = w.scorer(context, true, false, flags, acceptDocs);
File
DisjunctionMaxQuery.java
Developer's decision
Combination
Kind of conflict
Method invocation
Variable
Chunk
Conflicting content
        
  }

<<<<<<< HEAD
  @Override
  public int nextDoc() throws IOException {
    if (numScorers == 0) return doc = NO_MORE_DOCS;
    while (subScorers[0].docID() == doc) {
      if (subScorers[0].nextDoc() != NO_MORE_DOCS) {
        heapAdjust(0);
      } else {
        heapRemoveRoot();
        if (numScorers == 0) {
          return doc = NO_MORE_DOCS;
        }
      }
    }
    doc = subScorers[0].docID();
    posQueue.advanceTo(doc);
    return doc;
  }

  @Override
  public int docID() {
    return doc;
  }

=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  /** Determine the current document score.  Initially invalid, until {@link #nextDoc()} is called the first time.
   * @return the score of the current generated document
   */
Solution content
        
  }

  /** Determine the current document score.  Initially invalid, until {@link #nextDoc()} is called the first time.
   * @return the score of the current generated document
   */
File
DisjunctionMaxScorer.java
Developer's decision
Version 2
Kind of conflict
Annotation
Method declaration
Chunk
Conflicting content
  public int freq() throws IOException {
    return freq;
  }
<<<<<<< HEAD

  @Override
  public int advance(int target) throws IOException {
    if (numScorers == 0) return doc = NO_MORE_DOCS;
    while (subScorers[0].docID() < target) {
      if (subScorers[0].advance(target) != NO_MORE_DOCS) {
        heapAdjust(0);
      } else {
        heapRemoveRoot();
        if (numScorers == 0) {
          return doc = NO_MORE_DOCS;
        }
      }
    }
    doc = subScorers[0].docID();
    posQueue.advanceTo(doc);
    return doc;
  }
  
  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    return new DisjunctionIntervalIterator(this, collectIntervals, pullIterators(collectIntervals, subScorers));
  }
=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
}
Solution content
  public int freq() throws IOException {
    return freq;
  }
  
  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    return new DisjunctionIntervalIterator(this, collectIntervals, pullIterators(collectIntervals, subScorers));
  }
}
File
DisjunctionMaxScorer.java
Developer's decision
Combination
Kind of conflict
Annotation
Method declaration
Chunk
Conflicting content
    super(weight);
    this.subScorers = subScorers;
<<<<<<< HEAD
  protected DisjunctionScorer(Weight weight, Scorer subScorers[]) {
    this.numScorers = numScorers;
    this.posQueue = new PositionQueue(subScorers);
=======
    this.numScorers = subScorers.length;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    heapify();
  }
  
Solution content
  protected DisjunctionScorer(Weight weight, Scorer subScorers[]) {
    super(weight);
    this.subScorers = subScorers;
    this.numScorers = subScorers.length;
    this.posQueue = new PositionQueue(subScorers);
    heapify();
  }
  
File
DisjunctionScorer.java
Developer's decision
Combination
Kind of conflict
Attribute
Method invocation
Chunk
Conflicting content
  }

  @Override
<<<<<<< HEAD
  public int nextPosition() throws IOException {
    return posQueue.nextPosition();
  }

  @Override
  public int startPosition() throws IOException {
    return posQueue.startPosition();
  }

  @Override
  public int endPosition() throws IOException {
    return posQueue.endPosition();
  }

  @Override
  public int startOffset() throws IOException {
    return posQueue.startOffset();
  }

  @Override
  public int endOffset() throws IOException {
    return posQueue.endOffset();
  }
=======
  public long cost() {
    long sum = 0;
    for (int i = 0; i < numScorers; i++) {
      sum += subScorers[i].cost();
    }
    return sum;
  } 
  
  @Override
  public int docID() {
   return doc;
  }
 
  @Override
  public int nextDoc() throws IOException {
    assert doc != NO_MORE_DOCS;
    while(true) {
      if (subScorers[0].nextDoc() != NO_MORE_DOCS) {
        heapAdjust(0);
      } else {
        heapRemoveRoot();
        if (numScorers == 0) {
          return doc = NO_MORE_DOCS;
        }
      }
      if (subScorers[0].docID() != doc) {
        afterNext();
        return doc;
      }
    }
  }
  
  @Override
  public int advance(int target) throws IOException {
    assert doc != NO_MORE_DOCS;
    while(true) {
      if (subScorers[0].advance(target) != NO_MORE_DOCS) {
        heapAdjust(0);
      } else {
        heapRemoveRoot();
        if (numScorers == 0) {
          return doc = NO_MORE_DOCS;
        }
      }
      if (subScorers[0].docID() >= target) {
        afterNext();
        return doc;
      }
    }
  }
  
  /** 
   * Called after next() or advance() land on a new document.
   * 

* {@code subScorers[0]} will be positioned to the new docid, * which could be {@code NO_MORE_DOCS} (subclass must handle this). *

* implementations should assign {@code doc} appropriately, and do any * other work necessary to implement {@code score()} and {@code freq()} */ // TODO: make this less horrible protected abstract void afterNext() throws IOException; >>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2 }

Solution content
 
  @Override
  }

  @Override
  public int nextPosition() throws IOException {
    //System.out.println("Advancing " + this.toString());
    int pos = posQueue.nextPosition();
    //System.out.println(this);
    return pos;
  }

  @Override
  public int startPosition() throws IOException {
    return posQueue.startPosition();
  }

  @Override
  public int endPosition() throws IOException {
    return posQueue.endPosition();
  }

  @Override
  public int startOffset() throws IOException {
    return posQueue.startOffset();
  }

  @Override
  public int endOffset() throws IOException {
    return posQueue.endOffset();
  }

  @Override
  public String toString() {
    try {
      return String.format(Locale.ROOT, "DisjScorer[%s] %d(%d)->%d(%d)", weight.toString(),
                            posQueue.startPosition(),
                            posQueue.startOffset(), posQueue.endPosition(), posQueue.endOffset());
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
  }

  @Override
  public long cost() {
    long sum = 0;
    for (int i = 0; i < numScorers; i++) {
      sum += subScorers[i].cost();
    }
    return sum;
  } 
  
  @Override
  public int docID() {
   return doc;
  }
  public int nextDoc() throws IOException {
    assert doc != NO_MORE_DOCS;
    while(true) {
      if (subScorers[0].nextDoc() != NO_MORE_DOCS) {
        heapAdjust(0);
      } else {
        heapRemoveRoot();
        if (numScorers == 0) {
          return doc = NO_MORE_DOCS;
        }
      }
      if (subScorers[0].docID() != doc) {
        afterNext();
        return doc;
      }
    }
  }
  
  @Override
  public int advance(int target) throws IOException {
    assert doc != NO_MORE_DOCS;
    while(true) {
      if (subScorers[0].advance(target) != NO_MORE_DOCS) {
        heapAdjust(0);
      } else {
        heapRemoveRoot();
        if (numScorers == 0) {
          return doc = NO_MORE_DOCS;
        }
      }
      if (subScorers[0].docID() >= target) {
        afterNext();
        return doc;
      }
    }
  }
  
  /** 
   * Called after next() or advance() land on a new document.
   * 

* {@code subScorers[0]} will be positioned to the new docid, * which could be {@code NO_MORE_DOCS} (subclass must handle this). *

* implementations should assign {@code doc} appropriately, and do any * other work necessary to implement {@code score()} and {@code freq()} */ // TODO: make this less horrible protected abstract void afterNext() throws IOException; }

File
DisjunctionScorer.java
Developer's decision
Manual
Kind of conflict
Annotation
Comment
Method declaration
Method interface
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
import org.apache.lucene.search.intervals.ConjunctionIntervalIterator;
import org.apache.lucene.search.intervals.DisjunctionIntervalIterator;
import org.apache.lucene.search.intervals.IntervalIterator;

=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import java.io.IOException;
import java.util.List;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.search.intervals.ConjunctionIntervalIterator;
import org.apache.lucene.search.intervals.DisjunctionIntervalIterator;
import org.apache.lucene.search.intervals.IntervalIterator;

import java.io.IOException;
import java.util.List;
File
DisjunctionSumScorer.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
  /** The number of subscorers that provide the current match. */
  protected int nrMatchers = -1;

<<<<<<< HEAD
  private double score = Float.NaN;

=======
  protected double score = Float.NaN;
  private final float[] coord;
  
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  /** Construct a DisjunctionScorer.
   * @param weight The weight to be used.
   * @param subScorers Array of at least two subscorers.
Solution content
  /** The number of subscorers that provide the current match. */
  protected int nrMatchers = -1;

  protected double score = Float.NaN;
  private final float[] coord;

  /** Construct a DisjunctionScorer.
   * @param weight The weight to be used.
   * @param subScorers Array of at least two subscorers.
File
DisjunctionSumScorer.java
Developer's decision
Version 2
Kind of conflict
Attribute
Chunk
Conflicting content
   * @param subScorers Array of at least two subscorers.
   * @param coord Table of coordination factors
   */
<<<<<<< HEAD
  public DisjunctionSumScorer(Weight weight, List subScorers, int minimumNrMatchers) throws IOException {
    super(weight, subScorers.toArray(new Scorer[subScorers.size()]), subScorers.size());
    if (minimumNrMatchers <= 0) {
      throw new IllegalArgumentException("Minimum nr of matchers must be positive");
    }
=======
  DisjunctionSumScorer(Weight weight, Scorer[] subScorers, float[] coord) throws IOException {
    super(weight, subScorers);

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    if (numScorers <= 1) {
      throw new IllegalArgumentException("There must be at least 2 subScorers");
    }
Solution content
   * @param subScorers Array of at least two subscorers.
   * @param coord Table of coordination factors
   */
  DisjunctionSumScorer(Weight weight, Scorer[] subScorers, float[] coord) throws IOException {
    super(weight, subScorers);
    if (numScorers <= 1) {
      throw new IllegalArgumentException("There must be at least 2 subScorers");
    }
File
DisjunctionSumScorer.java
Developer's decision
Version 2
Kind of conflict
If statement
Method invocation
Method signature
Chunk
Conflicting content
  }
  
  @Override
<<<<<<< HEAD

  public int nextDoc() throws IOException {
    assert doc != NO_MORE_DOCS;
    while(true) {
      while (subScorers[0].docID() == doc) {
        if (subScorers[0].nextDoc() != NO_MORE_DOCS) {
          heapAdjust(0);
        } else {
          heapRemoveRoot();
          if (numScorers < minimumNrMatchers) {
            return doc = NO_MORE_DOCS;
          }
        }
      }
      afterNext();
      if (nrMatchers >= minimumNrMatchers) {
        break;
      }
    }
    posQueue.advanceTo(doc);
    return doc;
  }
  
  private void afterNext() throws IOException {
=======
  protected void afterNext() throws IOException {
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    final Scorer sub = subScorers[0];
    doc = sub.docID();
    if (doc != NO_MORE_DOCS) {
Solution content
  }

  protected void afterNext() throws IOException {
    final Scorer sub = subScorers[0];
    doc = sub.docID();
    posQueue.advanceTo(doc);
    if (doc != NO_MORE_DOCS) {
File
DisjunctionSumScorer.java
Developer's decision
Manual
Kind of conflict
Method declaration
Method signature
Chunk
Conflicting content
  public int freq() throws IOException {
    return nrMatchers;
  }
<<<<<<< HEAD

  /**
   * Advances to the first match beyond the current whose document number is
   * greater than or equal to a given target. 
* The implementation uses the advance() method on the subscorers. * * @param target * The target document number. * @return the document whose number is greater than or equal to the given * target, or -1 if none exist. */ @Override public int advance(int target) throws IOException { if (numScorers == 0) return doc = NO_MORE_DOCS; while (subScorers[0].docID() < target) { if (subScorers[0].advance(target) != NO_MORE_DOCS) { heapAdjust(0); } else { if (minimumNrMatchers > 1) { heapRemoveRoot(); if (numScorers == 0) { return doc = NO_MORE_DOCS; } } } afterNext(); posQueue.advanceTo(doc); if (nrMatchers >= minimumNrMatchers) { return doc; } else { return nextDoc(); } } @Override public IntervalIterator intervals(boolean collectIntervals) throws IOException { return new ConjunctionIntervalIterator(this, collectIntervals, minimumNrMatchers, pullIterators(collectIntervals, subScorers)); } return new DisjunctionIntervalIterator(this, collectIntervals, pullIterators(collectIntervals, subScorers)); } ======= >>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2 }
Solution content
  public int freq() throws IOException {
    return nrMatchers;
  }
  
  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    return new DisjunctionIntervalIterator(this, collectIntervals, pullIterators(collectIntervals, subScorers));
  }
}
File
DisjunctionSumScorer.java
Developer's decision
Combination
Kind of conflict
Annotation
Comment
Method declaration
Chunk
Conflicting content
  private final int[] gens = new int[CHUNK];
  
  boolean noDocs;
<<<<<<< HEAD
  
=======
  private final long cost;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  private final static class ChunkState {
    final TermDocsEnumFactory factory;
    final DocsEnum posEnum;
Solution content
  private final int[] gens = new int[CHUNK];
  
  boolean noDocs;
  private final long cost;

  private final static class ChunkState {
    final TermDocsEnumFactory factory;
    final DocsEnum posEnum;
File
ExactPhraseScorer.java
Developer's decision
Version 2
Kind of conflict
Attribute
Chunk
Conflicting content
  
  private int docID = -1;
  private int freq;
<<<<<<< HEAD
=======

  private final Similarity.SimScorer docScorer;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  
  private final Similarity.ExactSimScorer docScorer;
Solution content
  
  private int docID = -1;
  private int freq;
  
  private final Similarity.SimScorer docScorer;
File
ExactPhraseScorer.java
Developer's decision
Version 1
Kind of conflict
Attribute
Chunk
Conflicting content
  private final Similarity.ExactSimScorer docScorer;

  ExactPhraseScorer(Weight weight, PhraseQuery.PostingsAndFreq[] postings,
<<<<<<< HEAD
      Similarity.ExactSimScorer docScorer) throws IOException {
=======
                    Similarity.SimScorer docScorer) throws IOException {
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    super(weight);
    this.docScorer = docScorer;
    
Solution content
  ExactPhraseScorer(Weight weight, PhraseQuery.PostingsAndFreq[] postings,
                    Similarity.SimScorer docScorer) throws IOException {
    super(weight);
    this.docScorer = docScorer;
    
File
ExactPhraseScorer.java
Developer's decision
Version 2
Kind of conflict
Variable
Chunk
Conflicting content
    this.docScorer = docScorer;
    
    chunkStates = new ChunkState[postings.length];
<<<<<<< HEAD
    
    endMinus1 = postings.length - 1;
    
    for (int i = 0; i < postings.length; i++) {
      
=======

    endMinus1 = postings.length-1;
    
    // min(cost)
    cost = postings[0].postings.cost();

    for(int i=0;i>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      // Coarse optimization: advance(target) is fairly
      // costly, so, if the relative freq of the 2nd
      // rarest term is not that much (> 1/5th) rarer than
Solution content
    this.docScorer = docScorer;
    
    chunkStates = new ChunkState[postings.length];

    endMinus1 = postings.length-1;
    
    // min(cost)
    cost = postings[0].postings.cost();

    for(int i=0;i 1/5th) rarer than
File
ExactPhraseScorer.java
Developer's decision
Version 2
Kind of conflict
Attribute
Comment
For statement
Method invocation
Variable
Chunk
Conflicting content
    
    return freq;
  }
<<<<<<< HEAD
  
  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    TermIntervalIterator[] posIters = new TermIntervalIterator[chunkStates.length];
    DocsEnum[] enums = new DocsEnum[chunkStates.length];
    for (int i = 0; i < chunkStates.length; i++) {
      posIters[i] = new TermIntervalIterator(this, enums[i] = chunkStates[i].factory.docsAndPositionsEnum(), false, collectIntervals);
    }
    return new SloppyPhraseScorer.AdvancingIntervalIterator(this, collectIntervals, enums, new BlockIntervalIterator(this, collectIntervals, posIters));
=======

  @Override
  public long cost() {
    return cost;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  }
}
Solution content
    
    return freq;
  }

  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    TermIntervalIterator[] posIters = new TermIntervalIterator[chunkStates.length];
    DocsEnum[] enums = new DocsEnum[chunkStates.length];
    for (int i = 0; i < chunkStates.length; i++) {
      posIters[i] = new TermIntervalIterator(this, enums[i] = chunkStates[i].factory.docsAndPositionsEnum(), false, collectIntervals);
    }
    return new SloppyPhraseScorer.AdvancingIntervalIterator(this, collectIntervals, enums, new BlockIntervalIterator(this, collectIntervals, posIters));
  }

  @Override
  public long cost() {
    return cost;
  }

}
File
ExactPhraseScorer.java
Developer's decision
Manual
Kind of conflict
Annotation
Attribute
For statement
Method invocation
Method signature
Return statement
Variable
Chunk
Conflicting content
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocTermOrds;
<<<<<<< HEAD
import org.apache.lucene.index.DocsEnum;
=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexReader;
Solution content
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocTermOrds;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexReader;
File
FieldCacheImpl.java
Developer's decision
Version 2
Kind of conflict
Import
Chunk
Conflicting content
        return term;
    }

    @Override
<<<<<<< HEAD
    public int size() {
      return docToTermOrd.size();
    }

    @Override
    public BytesRef lookup(int ord, BytesRef ret) {
      return bytes.fill(ret, termOrdToBytesOffset.get(ord));
    }

    @Override
    public TermsEnum getTermsEnum() {
      return this.new DocTermsIndexEnum();
    }

    class DocTermsIndexEnum extends TermsEnum {
      int currentOrd;
      int currentBlockNumber;
      int end;  // end position in the current block
      final byte[][] blocks;
      final int[] blockEnds;

      final BytesRef term = new BytesRef();

      public DocTermsIndexEnum() {
        currentOrd = 0;
        currentBlockNumber = 0;
        blocks = bytes.getBlocks();
        blockEnds = bytes.getBlockEnds();
        currentBlockNumber = bytes.fillAndGetIndex(term, termOrdToBytesOffset.get(0));
        end = blockEnds[currentBlockNumber];
      }

      @Override
      public SeekStatus seekCeil(BytesRef text, boolean useCache /* ignored */) throws IOException {
        int low = 1;
        int high = numOrd-1;
        
        while (low <= high) {
          int mid = (low + high) >>> 1;
          seekExact(mid);
          int cmp = term.compareTo(text);

          if (cmp < 0)
            low = mid + 1;
          else if (cmp > 0)
            high = mid - 1;
          else
            return SeekStatus.FOUND; // key found
        }
        
        if (low == numOrd) {
          return SeekStatus.END;
        } else {
          seekExact(low);
          return SeekStatus.NOT_FOUND;
        }
      }

      @Override
      public void seekExact(long ord) throws IOException {
        assert(ord >= 0 && ord <= numOrd);
        // TODO: if gap is small, could iterate from current position?  Or let user decide that?
        currentBlockNumber = bytes.fillAndGetIndex(term, termOrdToBytesOffset.get((int)ord));
        end = blockEnds[currentBlockNumber];
        currentOrd = (int)ord;
      }

      @Override
      public BytesRef next() throws IOException {
        int start = term.offset + term.length;
        if (start >= end) {
          // switch byte blocks
          if (currentBlockNumber +1 >= blocks.length) {
            return null;
          }
          currentBlockNumber++;
          term.bytes = blocks[currentBlockNumber];
          end = blockEnds[currentBlockNumber];
          start = 0;
          if (end<=0) return null;  // special case of empty last array
        }

        currentOrd++;

        byte[] block = term.bytes;
        if ((block[start] & 128) == 0) {
          term.length = block[start];
          term.offset = start+1;
        } else {
          term.length = (((block[start] & 0x7f)) << 8) | (block[1+start] & 0xff);
          term.offset = start+2;
        }
      }

      @Override
      @Override
      public BytesRef term() throws IOException {
        return term;
      }

      @Override
      public long ord() throws IOException {
        return currentOrd;
      }

      @Override
      public int docFreq() {
        throw new UnsupportedOperationException();
      }

      @Override
      public long totalTermFreq() {
        return -1;
      }

      public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
        throw new UnsupportedOperationException();
      }

      @Override
      public DocsEnum docsAndPositions(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
        throw new UnsupportedOperationException();
      }

      @Override
      public Comparator getComparator() {
        return BytesRef.getUTF8SortedAsUnicodeComparator();
      }

      @Override
      public void seekExact(BytesRef term, TermState state) throws IOException {
        assert state != null && state instanceof OrdTermState;
        this.seekExact(((OrdTermState)state).ord);
      }

      @Override
      public TermState termState() throws IOException {
        OrdTermState state = new OrdTermState();
        state.ord = currentOrd;
        return state;
=======
    public void lookupOrd(int ord, BytesRef ret) {
      if (ord < 0) {
        throw new IllegalArgumentException("ord must be >=0 (got ord=" + ord + ")");
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      }
      bytes.fill(ret, termOrdToBytesOffset.get(ord));
    }
Solution content
    }

    @Override
    public void lookupOrd(int ord, BytesRef ret) {
      if (ord < 0) {
        throw new IllegalArgumentException("ord must be >=0 (got ord=" + ord + ")");
      }

      bytes.fill(ret, termOrdToBytesOffset.get(ord));
    }
File
FieldCacheImpl.java
Developer's decision
Combination
Kind of conflict
Annotation
Attribute
Class signature
Comment
If statement
Method declaration
Method invocation
Method signature
Return statement
Throw statement
Variable
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals)
        throws IOException {
      return scorer.intervals(collectIntervals);
=======
    public long cost() {
      return scorer.cost();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
  
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals)
        throws IOException {
      return scorer.intervals(collectIntervals);
    }

    @Override
    public long cost() {
      return scorer.cost();
    }
  }
  
File
FilteredQuery.java
Developer's decision
Concatenation
Kind of conflict
Method invocation
Method signature
Return statement
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals)
        throws IOException {
      return scorer.intervals(collectIntervals);
=======
    public long cost() {
      return Math.min(primary.cost(), secondary.cost());
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
  
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals)
        throws IOException {
      return scorer.intervals(collectIntervals);
    }

    @Override
    public long cost() {
      return Math.min(primary.cost(), secondary.cost());
    }
  }
  
File
FilteredQuery.java
Developer's decision
Concatenation
Kind of conflict
Method invocation
Method signature
Return statement
Chunk
Conflicting content
    // threaded...?  the Collector could be sync'd?
    // always use single thread:
    for (AtomicReaderContext ctx : leaves) { // search each subreader
<<<<<<< HEAD
      collector.setNextReader(ctx);
      Scorer scorer = weight.scorer(ctx, !collector.acceptsDocsOutOfOrder(), true, collector.postingFeatures(), ctx.reader().getLiveDocs());
=======
      try {
        collector.setNextReader(ctx);
      } catch (CollectionTerminatedException e) {
        // there is no doc of interest in this reader context
        // continue with the following leaf
        continue;
      }
      Scorer scorer = weight.scorer(ctx, !collector.acceptsDocsOutOfOrder(), true, ctx.reader().getLiveDocs());
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      if (scorer != null) {
        try {
          scorer.score(collector);
Solution content
    // threaded...?  the Collector could be sync'd?
    // always use single thread:
    for (AtomicReaderContext ctx : leaves) { // search each subreader
      try {
        collector.setNextReader(ctx);
      } catch (CollectionTerminatedException e) {
        // there is no doc of interest in this reader context
        // continue with the following leaf
        continue;
      }
      Scorer scorer = weight.scorer(ctx, !collector.acceptsDocsOutOfOrder(), true, collector.postingFeatures(), ctx.reader().getLiveDocs());
      if (scorer != null) {
        try {
          scorer.score(collector);
File
IndexSearcher.java
Developer's decision
Combination
Kind of conflict
Method invocation
Try statement
Variable
Chunk
Conflicting content
=======
      }

      @Override
<<<<<<< HEAD
      public IntervalIterator intervals(boolean collectIntervals) throws IOException {
        return null;
      public long cost() {
        return 1;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      }
    }
Solution content
      }

      @Override
      public IntervalIterator intervals(boolean collectIntervals) throws IOException {
        return null;
      }

      @Override
      public long cost() {
        return 1;
      }
    }
File
IndexSearcher.java
Developer's decision
Concatenation
Kind of conflict
Method signature
Return statement
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException("MatchAllDocsQuery doesn't support IntervalIterators");
=======
    public long cost() {
      return maxDoc;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException("MatchAllDocsQuery doesn't support IntervalIterators");
    }

    @Override
    public long cost() {
      return maxDoc;
    }
  }
File
MatchAllDocsQuery.java
Developer's decision
Concatenation
Kind of conflict
Attribute
Method signature
Return statement
Throw statement
Chunk
Conflicting content
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
<<<<<<< HEAD
import org.apache.lucene.search.PhraseQuery.TermDocsEnumFactory;
import org.apache.lucene.search.Weight.PostingFeatures;
=======
import org.apache.lucene.search.similarities.Similarity.SimScorer;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.SloppySimScorer;
import org.apache.lucene.util.ArrayUtil;
Solution content
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.PhraseQuery.TermDocsEnumFactory;
import org.apache.lucene.search.Weight.PostingFeatures;
import org.apache.lucene.search.similarities.Similarity;

import org.apache.lucene.search.similarities.Similarity.SimScorer;
import org.apache.lucene.util.ArrayUtil;
File
MultiPhraseQuery.java
Developer's decision
Concatenation
Kind of conflict
Import
Chunk
Conflicting content
  private int _doc;
  private int _freq;
  private DocsQueue _queue;
<<<<<<< HEAD
  private PositionQueue _posList;
  private int posPending;

  public UnionDocsAndPositionsEnum(Bits liveDocs, AtomicReaderContext context, Term[] terms,
                                   Map termContexts, TermsEnum termsEnum) throws IOException {
    this(liveDocs, context, terms, termContexts, termsEnum, PostingFeatures.POSITIONS);
  }
=======
  private IntQueue _posList;
  private long cost;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2

  public UnionDocsAndPositionsEnum(Bits liveDocs, AtomicReaderContext context, Term[] terms,
                                     Map termContexts, TermsEnum termsEnum, PostingFeatures flags) throws IOException {
Solution content
  private int _doc;
  private int _freq;
  private DocsQueue _queue;
  private PositionQueue _posList;
  private int posPending;
  private long cost;

  public UnionDocsAndPositionsEnum(Bits liveDocs, AtomicReaderContext context, Term[] terms,
                                   Map termContexts, TermsEnum termsEnum) throws IOException {
    this(liveDocs, context, terms, termContexts, termsEnum, PostingFeatures.POSITIONS);
  }

  public UnionDocsAndPositionsEnum(Bits liveDocs, AtomicReaderContext context, Term[] terms,
                                     Map termContexts, TermsEnum termsEnum, PostingFeatures flags) throws IOException {
File
MultiPhraseQuery.java
Developer's decision
Combination
Kind of conflict
Attribute
Method declaration
Chunk
Conflicting content
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
<<<<<<< HEAD
import org.apache.lucene.search.Weight.PostingFeatures;
=======
import org.apache.lucene.search.similarities.Similarity.SimScorer;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.SloppySimScorer;
import org.apache.lucene.util.ArrayUtil;
Solution content
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.Weight.PostingFeatures;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.SimScorer;
import org.apache.lucene.util.ArrayUtil;
File
PhraseQuery.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
  }

  @Override
<<<<<<< HEAD
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    if (reqScorer == null)
      return IntervalIterator.NO_MORE_INTERVALS;
    return new ConjunctionIntervalIterator(this, collectIntervals, reqScorer.intervals(collectIntervals));
=======
  public long cost() {
    return reqScorer.cost();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  }
}
Solution content
  }

  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    if (reqScorer == null)
      return IntervalIterator.NO_MORE_INTERVALS;
    return new ConjunctionIntervalIterator(this, collectIntervals, reqScorer.intervals(collectIntervals));
  }

  @Override
  public long cost() {
    return reqScorer.cost();
  }
}
File
ReqExclScorer.java
Developer's decision
Concatenation
Kind of conflict
If statement
Method invocation
Method signature
Return statement
Chunk
Conflicting content
  private float sloppyFreq; //phrase frequency in current doc as computed by phraseFreq().

<<<<<<< HEAD
  private final Similarity.SloppySimScorer docScorer;
  private final PhraseQuery.PostingsAndFreq[] postings;
=======
  private final Similarity.SimScorer docScorer;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  
  private final int slop;
  private final int numPostings;
Solution content
  private float sloppyFreq; //phrase frequency in current doc as computed by phraseFreq().

  private final Similarity.SimScorer docScorer;
  private final PhraseQuery.PostingsAndFreq[] postings;
  
  private final int slop;
  private final int numPostings;
File
SloppyPhraseScorer.java
Developer's decision
Combination
Kind of conflict
Attribute
Chunk
Conflicting content
import org.apache.lucene.index.TermContext;
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermsEnum;
<<<<<<< HEAD
=======
import org.apache.lucene.search.similarities.Similarity.SimScorer;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.ExactSimScorer;
import org.apache.lucene.util.Bits;
Solution content
import org.apache.lucene.index.TermContext;
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.SimScorer;
import org.apache.lucene.util.Bits;
File
TermQuery.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
    } else {
  public Weight createWeight(IndexSearcher searcher) throws IOException {
    final IndexReaderContext context = searcher.getTopReaderContext();
    final TermContext termState;
<<<<<<< HEAD
    if (perReaderTermState == null
        || perReaderTermState.topReaderContext != context) {
      // make TermQuery single-pass if we don't have a PRTS or if the context
      // differs!
      termState = TermContext.build(context, term, true); // cache term lookups!
=======
    if (perReaderTermState == null || perReaderTermState.topReaderContext != context) {
      // make TermQuery single-pass if we don't have a PRTS or if the context differs!
      termState = TermContext.build(context, term);
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      // PRTS was pre-build for this IS
      termState = this.perReaderTermState;
Solution content
  public Weight createWeight(IndexSearcher searcher) throws IOException {
    final IndexReaderContext context = searcher.getTopReaderContext();
    final TermContext termState;
    if (perReaderTermState == null
        || perReaderTermState.topReaderContext != context) {
      // make TermQuery single-pass if we don't have a PRTS or if the context
      // differs!
      termState = TermContext.build(context, term);
    } else {
      // PRTS was pre-build for this IS
      termState = this.perReaderTermState;
File
TermQuery.java
Developer's decision
Combination
Kind of conflict
Comment
If statement
Method invocation
Variable
Chunk
Conflicting content
  /** Returns a string representation of this TermScorer. */
  @Override
<<<<<<< HEAD
  public String toString() {
    return "scorer(" + weight + ")[" + super.toString() + "]";
  }
  
  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    return new TermIntervalIterator(this, docsEnum, false, collectIntervals);
  }
  // TODO: benchmark if the specialized conjunction really benefits
  // from this, or if instead its from sorting by docFreq, or both

  DocsEnum getDocsEnum() {
    return docsEnum;
  }
  
  // TODO: generalize something like this for scorers?
  // even this is just an estimation...
  
  int getDocFreq() {
    return docFreq;
  }
=======
  public String toString() { return "scorer(" + weight + ")"; }
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
}
Solution content
  /** Returns a string representation of this TermScorer. */
  @Override
  public String toString() {
    return "scorer(" + weight + ")[" + super.toString() + "]";
  }
  
  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    return new TermIntervalIterator(this, docsEnum, false, collectIntervals);
  }
  // TODO: benchmark if the specialized conjunction really benefits
  // from this, or if instead its from sorting by docFreq, or both

  DocsEnum getDocsEnum() {
    return docsEnum;
  }


}
File
TermScorer.java
Developer's decision
Combination
Kind of conflict
Annotation
Comment
Method declaration
Chunk
Conflicting content
  public float sloppyFreq() throws IOException {
    return freq;
  }
<<<<<<< HEAD

  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    return null;
=======
  
  @Override
  public long cost() {
    return spans.cost();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  }
}
Solution content
  public float sloppyFreq() throws IOException {
    return freq;
  }

  @Override
  public IntervalIterator intervals(boolean collectIntervals) throws IOException {
    return null;
  }

  @Override
  public long cost() {
    return spans.cost();
  }

}
File
SpanScorer.java
Developer's decision
Manual
Kind of conflict
Annotation
Method invocation
Method signature
Return statement
Chunk
Conflicting content
          term = field.terms[upto];
          if (random().nextInt(3) == 1) {
            final DocsEnum docs;
<<<<<<< HEAD
            final DocsEnum docsAndFreqs;
            final DocsEnum postings;
=======
            final DocsAndPositionsEnum postings;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
            if (!field.omitTF) {
              postings = termsEnum.docsAndPositions(null, null);
              if (postings != null) {
Solution content
          term = field.terms[upto];
          if (random().nextInt(3) == 1) {
            final DocsEnum docs;
            final DocsEnum postings;
            if (!field.omitTF) {
              postings = termsEnum.docsAndPositions(null, null);
              if (postings != null) {
File
TestCodecs.java
Developer's decision
Combination
Kind of conflict
Variable
Chunk
Conflicting content
      }
      writer.addDocument(document);
    }
<<<<<<< HEAD
  }
  
  /** 
   * checks collection-level statistics on Terms 
   */
  public void assertTermsStatistics(Terms leftTerms, Terms rightTerms) throws Exception {
    assert leftTerms.getComparator() == rightTerms.getComparator();
    if (leftTerms.getDocCount() != -1 && rightTerms.getDocCount() != -1) {
      assertEquals(info, leftTerms.getDocCount(), rightTerms.getDocCount());
    }
    if (leftTerms.getSumDocFreq() != -1 && rightTerms.getSumDocFreq() != -1) {
      assertEquals(info, leftTerms.getSumDocFreq(), rightTerms.getSumDocFreq());
    }
    if (leftTerms.getSumTotalTermFreq() != -1 && rightTerms.getSumTotalTermFreq() != -1) {
      assertEquals(info, leftTerms.getSumTotalTermFreq(), rightTerms.getSumTotalTermFreq());
    }
    if (leftTerms.size() != -1 && rightTerms.size() != -1) {
      assertEquals(info, leftTerms.size(), rightTerms.size());
    }
  }


        return;
  /** 
   * checks the terms enum sequentially
   * if deep is false, it does a 'shallow' test that doesnt go down to the docsenums
   */
  public void assertTermsEnum(TermsEnum leftTermsEnum, TermsEnum rightTermsEnum, boolean deep) throws Exception {
    BytesRef term;
    Bits randomBits = new RandomBits(leftReader.maxDoc(), random().nextDouble(), random());
    DocsEnum leftPositions = null;
    DocsEnum rightPositions = null;
    DocsEnum leftDocs = null;
    DocsEnum rightDocs = null;
    
    while ((term = leftTermsEnum.next()) != null) {
      assertEquals(info, term, rightTermsEnum.next());
      assertTermStats(leftTermsEnum, rightTermsEnum);
      if (deep) {
        assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.docsAndPositions(null, leftPositions),
                                   rightPositions = rightTermsEnum.docsAndPositions(null, rightPositions));
        assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.docsAndPositions(randomBits, leftPositions),
                                   rightPositions = rightTermsEnum.docsAndPositions(randomBits, rightPositions));

        assertPositionsSkipping(leftTermsEnum.docFreq(), 
                                leftPositions = leftTermsEnum.docsAndPositions(null, leftPositions),
                                rightPositions = rightTermsEnum.docsAndPositions(null, rightPositions));
        assertPositionsSkipping(leftTermsEnum.docFreq(), 
                                leftPositions = leftTermsEnum.docsAndPositions(randomBits, leftPositions),
                                rightPositions = rightTermsEnum.docsAndPositions(randomBits, rightPositions));

        // with freqs:
        assertDocsEnum(leftDocs = leftTermsEnum.docs(null, leftDocs),
            rightDocs = rightTermsEnum.docs(null, rightDocs),
            true);
        assertDocsEnum(leftDocs = leftTermsEnum.docs(randomBits, leftDocs),
            rightDocs = rightTermsEnum.docs(randomBits, rightDocs),
            true);

        // w/o freqs:
        assertDocsEnum(leftDocs = leftTermsEnum.docs(null, leftDocs, DocsEnum.FLAG_NONE),
            rightDocs = rightTermsEnum.docs(null, rightDocs, DocsEnum.FLAG_NONE),
            false);
        assertDocsEnum(leftDocs = leftTermsEnum.docs(randomBits, leftDocs, DocsEnum.FLAG_NONE),
            rightDocs = rightTermsEnum.docs(randomBits, rightDocs, DocsEnum.FLAG_NONE),
            false);
        
        // with freqs:
        assertDocsSkipping(leftTermsEnum.docFreq(), 
            leftDocs = leftTermsEnum.docs(null, leftDocs),
            rightDocs = rightTermsEnum.docs(null, rightDocs),
            true);
        assertDocsSkipping(leftTermsEnum.docFreq(), 
            leftDocs = leftTermsEnum.docs(randomBits, leftDocs),
            rightDocs = rightTermsEnum.docs(randomBits, rightDocs),
            true);

        // w/o freqs:
        assertDocsSkipping(leftTermsEnum.docFreq(), 
            leftDocs = leftTermsEnum.docs(null, leftDocs, DocsEnum.FLAG_NONE),
            rightDocs = rightTermsEnum.docs(null, rightDocs, DocsEnum.FLAG_NONE),
            false);
        assertDocsSkipping(leftTermsEnum.docFreq(), 
            leftDocs = leftTermsEnum.docs(randomBits, leftDocs, DocsEnum.FLAG_NONE),
            rightDocs = rightTermsEnum.docs(randomBits, rightDocs, DocsEnum.FLAG_NONE),
            false);
      }
    }
    assertNull(info, rightTermsEnum.next());
  }
  
  /**
   * checks term-level statistics
   */
  public void assertTermStats(TermsEnum leftTermsEnum, TermsEnum rightTermsEnum) throws Exception {
    assertEquals(info, leftTermsEnum.docFreq(), rightTermsEnum.docFreq());
    if (leftTermsEnum.totalTermFreq() != -1 && rightTermsEnum.totalTermFreq() != -1) {
      assertEquals(info, leftTermsEnum.totalTermFreq(), rightTermsEnum.totalTermFreq());
    }
  }
  
  /**
   * checks docs + freqs + positions + payloads, sequentially
   */
  public void assertDocsAndPositionsEnum(DocsEnum leftDocs, DocsEnum rightDocs) throws Exception {
    if (leftDocs == null || rightDocs == null) {
      assertNull(leftDocs);
      assertNull(rightDocs);
      return;
    }
    assertTrue(info, leftDocs.docID() == -1 || leftDocs.docID() == DocIdSetIterator.NO_MORE_DOCS);
    assertTrue(info, rightDocs.docID() == -1 || rightDocs.docID() == DocIdSetIterator.NO_MORE_DOCS);
    int docid;
    while ((docid = leftDocs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
      assertEquals(info, docid, rightDocs.nextDoc());
      int freq = leftDocs.freq();
      assertEquals(info, freq, rightDocs.freq());
      for (int i = 0; i < freq; i++) {
        assertEquals(info, leftDocs.nextPosition(), rightDocs.nextPosition());
        assertEquals(info, leftDocs.getPayload(), rightDocs.getPayload());
        assertEquals(info, leftDocs.startOffset(), rightDocs.startOffset());
        assertEquals(info, leftDocs.endOffset(), rightDocs.endOffset());
      }
    }
    assertEquals(info, DocIdSetIterator.NO_MORE_DOCS, rightDocs.nextDoc());
  }
  
  /**
   * checks docs + freqs, sequentially
   */
  public void assertDocsEnum(DocsEnum leftDocs, DocsEnum rightDocs, boolean hasFreqs) throws Exception {
    if (leftDocs == null) {
      assertNull(rightDocs);
      return;
    }
    assertTrue(info, leftDocs.docID() == -1 || leftDocs.docID() == DocIdSetIterator.NO_MORE_DOCS);
    assertTrue(info, rightDocs.docID() == -1 || rightDocs.docID() == DocIdSetIterator.NO_MORE_DOCS);
    int docid;
    while ((docid = leftDocs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
      assertEquals(info, docid, rightDocs.nextDoc());
      if (hasFreqs) {
        assertEquals(info, leftDocs.freq(), rightDocs.freq());
      }
    }
    assertEquals(info, DocIdSetIterator.NO_MORE_DOCS, rightDocs.nextDoc());
  }
  
  /**
   * checks advancing docs
   */
  public void assertDocsSkipping(int docFreq, DocsEnum leftDocs, DocsEnum rightDocs, boolean hasFreqs) throws Exception {
    if (leftDocs == null) {
      assertNull(rightDocs);
      return;
    }
    int docid = -1;
    int averageGap = leftReader.maxDoc() / (1+docFreq);
    int skipInterval = 16;

    while (true) {
      if (random().nextBoolean()) {
        // nextDoc()
        docid = leftDocs.nextDoc();
        assertEquals(info, docid, rightDocs.nextDoc());
      } else {
        // advance()
        int skip = docid + (int) Math.ceil(Math.abs(skipInterval + random().nextGaussian() * averageGap));
        docid = leftDocs.advance(skip);
        assertEquals(info, docid, rightDocs.advance(skip));
      }
      
      if (docid == DocIdSetIterator.NO_MORE_DOCS) {
        return;
      }
      if (hasFreqs) {
        assertEquals(info, leftDocs.freq(), rightDocs.freq());
      }
    }
  }
  
  /**
   * checks advancing docs + positions
   */
  public void assertPositionsSkipping(int docFreq, DocsEnum leftDocs, DocsEnum rightDocs) throws Exception {
    if (leftDocs == null || rightDocs == null) {
      assertNull(leftDocs);
      assertNull(rightDocs);
      return;
    }
    
    int docid = -1;
    int averageGap = leftReader.maxDoc() / (1+docFreq);
    int skipInterval = 16;

    while (true) {
      if (random().nextBoolean()) {
        // nextDoc()
        docid = leftDocs.nextDoc();
        assertEquals(info, docid, rightDocs.nextDoc());
      } else {
        // advance()
        int skip = docid + (int) Math.ceil(Math.abs(skipInterval + random().nextGaussian() * averageGap));
        docid = leftDocs.advance(skip);
        assertEquals(info, docid, rightDocs.advance(skip));
      }
      
      if (docid == DocIdSetIterator.NO_MORE_DOCS) {
      }
      int freq = leftDocs.freq();
      assertEquals(info, freq, rightDocs.freq());
      for (int i = 0; i < freq; i++) {
        assertEquals(info, leftDocs.nextPosition(), rightDocs.nextPosition());
        assertEquals(info, leftDocs.getPayload(), rightDocs.getPayload());
      }
    }
  }
  
  /** 
   * checks that norms are the same across all fields 
   */
  public void assertNorms(IndexReader leftReader, IndexReader rightReader) throws Exception {
    Fields leftFields = MultiFields.getFields(leftReader);
    Fields rightFields = MultiFields.getFields(rightReader);
    // Fields could be null if there are no postings,
    // but then it must be null for both
    if (leftFields == null || rightFields == null) {
      assertNull(info, leftFields);
      assertNull(info, rightFields);
      return;
    }
    
    for (String field : leftFields) {
      DocValues leftNorms = MultiDocValues.getNormDocValues(leftReader, field);
      DocValues rightNorms = MultiDocValues.getNormDocValues(rightReader, field);
      if (leftNorms != null && rightNorms != null) {
        assertDocValues(leftNorms, rightNorms);
      } else {
        assertNull(leftNorms);
        assertNull(rightNorms);
      }
    }
  }
  
  /** 
   * checks that stored fields of all documents are the same 
   */
  public void assertStoredFields(IndexReader leftReader, IndexReader rightReader) throws Exception {
    assert leftReader.maxDoc() == rightReader.maxDoc();
    for (int i = 0; i < leftReader.maxDoc(); i++) {
      StoredDocument leftDoc = leftReader.document(i);
      StoredDocument rightDoc = rightReader.document(i);
      
      // TODO: I think this is bogus because we don't document what the order should be
      // from these iterators, etc. I think the codec/IndexReader should be free to order this stuff
      // in whatever way it wants (e.g. maybe it packs related fields together or something)
      // To fix this, we sort the fields in both documents by name, but
      // we still assume that all instances with same name are in order:
      Comparator comp = new Comparator() {
        @Override
        public int compare(StorableField arg0, StorableField arg1) {
          return arg0.name().compareTo(arg1.name());
        }        
      };
      Collections.sort(leftDoc.getFields(), comp);
      Collections.sort(rightDoc.getFields(), comp);

      Iterator leftIterator = leftDoc.iterator();
      Iterator rightIterator = rightDoc.iterator();
      while (leftIterator.hasNext()) {
        assertTrue(info, rightIterator.hasNext());
        assertStoredField(leftIterator.next(), rightIterator.next());
      }
      assertFalse(info, rightIterator.hasNext());
    }
  }
  
  /** 
   * checks that two stored fields are equivalent 
   */
  public void assertStoredField(StorableField leftField, StorableField rightField) {
    assertEquals(info, leftField.name(), rightField.name());
    assertEquals(info, leftField.binaryValue(), rightField.binaryValue());
    assertEquals(info, leftField.stringValue(), rightField.stringValue());
    assertEquals(info, leftField.numericValue(), rightField.numericValue());
    // TODO: should we check the FT at all?
  }
  
  /** 
   * checks that term vectors across all fields are equivalent 
   */
  public void assertTermVectors(IndexReader leftReader, IndexReader rightReader) throws Exception {
    assert leftReader.maxDoc() == rightReader.maxDoc();
    for (int i = 0; i < leftReader.maxDoc(); i++) {
      Fields leftFields = leftReader.getTermVectors(i);
      Fields rightFields = rightReader.getTermVectors(i);
      assertFields(leftFields, rightFields, rarely());
    }
  }

  private static Set getDVFields(IndexReader reader) {
    Set fields = new HashSet();
    for(FieldInfo fi : MultiFields.getMergedFieldInfos(reader)) {
      if (fi.hasDocValues()) {
        fields.add(fi.name);
      }
    }

    return fields;
  }
  
  /**
   * checks that docvalues across all fields are equivalent
   */
  public void assertDocValues(IndexReader leftReader, IndexReader rightReader) throws Exception {
    Set leftValues = getDVFields(leftReader);
    Set rightValues = getDVFields(rightReader);
    assertEquals(info, leftValues, rightValues);
    for (String field : leftValues) {
      DocValues leftDocValues = MultiDocValues.getDocValues(leftReader, field);
      DocValues rightDocValues = MultiDocValues.getDocValues(rightReader, field);
      if (leftDocValues != null && rightDocValues != null) {
        assertDocValues(leftDocValues, rightDocValues);
      } else {
        assertNull(leftDocValues);
        assertNull(rightDocValues);
      }
    }
  }
  
  public void assertDocValues(DocValues leftDocValues, DocValues rightDocValues) throws Exception {
    assertNotNull(info, leftDocValues);
    assertNotNull(info, rightDocValues);
    assertEquals(info, leftDocValues.getType(), rightDocValues.getType());
    assertEquals(info, leftDocValues.getValueSize(), rightDocValues.getValueSize());
    assertDocValuesSource(leftDocValues.getDirectSource(), rightDocValues.getDirectSource());
    assertDocValuesSource(leftDocValues.getSource(), rightDocValues.getSource());
=======
    
    lineFileDocs.close();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  }
  
  /**
Solution content
      }
      writer.addDocument(document);
    }
    
    lineFileDocs.close();
  }
  
  /**
File
TestDuelingCodecs.java
Developer's decision
Version 2
Kind of conflict
Comment
Method declaration
Method invocation
Method signature
Chunk
Conflicting content
    Terms terms = reader.getTermVector(1, "field");
    assert terms != null;
    TermsEnum termsEnum = terms.iterator(null);
<<<<<<< HEAD
    assertTrue(termsEnum.seekExact(new BytesRef("withPayload"), true));
    DocsEnum de = termsEnum.docsAndPositions(null, null);
=======
    assertTrue(termsEnum.seekExact(new BytesRef("withPayload")));
    DocsAndPositionsEnum de = termsEnum.docsAndPositions(null, null);
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    assertEquals(0, de.nextDoc());
    assertEquals(0, de.nextPosition());
    assertEquals(new BytesRef("test"), de.getPayload());
Solution content
    Terms terms = reader.getTermVector(1, "field");
    assert terms != null;
    TermsEnum termsEnum = terms.iterator(null);
    assertTrue(termsEnum.seekExact(new BytesRef("withPayload")));
    DocsEnum de = termsEnum.docsAndPositions(null, null);
    assertEquals(0, de.nextDoc());
    assertEquals(0, de.nextPosition());
    assertEquals(new BytesRef("test"), de.getPayload());
File
TestPayloadsOnVectors.java
Developer's decision
Combination
Kind of conflict
Method invocation
Variable
Chunk
Conflicting content
    Terms terms = reader.getTermVector(0, "field");
    assert terms != null;
    TermsEnum termsEnum = terms.iterator(null);
<<<<<<< HEAD
    assertTrue(termsEnum.seekExact(new BytesRef("withPayload"), true));
    DocsEnum de = termsEnum.docsAndPositions(null, null);
=======
    assertTrue(termsEnum.seekExact(new BytesRef("withPayload")));
    DocsAndPositionsEnum de = termsEnum.docsAndPositions(null, null);
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    assertEquals(0, de.nextDoc());
    assertEquals(3, de.nextPosition());
    assertEquals(new BytesRef("test"), de.getPayload());
Solution content
    Terms terms = reader.getTermVector(0, "field");
    assert terms != null;
    TermsEnum termsEnum = terms.iterator(null);
    assertTrue(termsEnum.seekExact(new BytesRef("withPayload")));
    DocsEnum de = termsEnum.docsAndPositions(null, null);
    assertEquals(0, de.nextDoc());
    assertEquals(3, de.nextPosition());
    assertEquals(new BytesRef("test"), de.getPayload());
File
TestPayloadsOnVectors.java
Developer's decision
Combination
Kind of conflict
Method invocation
Variable
Chunk
Conflicting content
      //System.out.println("\nsub=" + sub);
      final TermsEnum termsEnum = sub.fields().terms("content").iterator(null);
      DocsEnum docs = null;
<<<<<<< HEAD
      DocsEnum docsAndPositions = null;
      DocsEnum docsAndPositionsAndOffsets = null;
      final int docIDToID[] = FieldCache.DEFAULT.getInts(sub, "id", false);
=======
      DocsAndPositionsEnum docsAndPositions = null;
      DocsAndPositionsEnum docsAndPositionsAndOffsets = null;
      final FieldCache.Ints docIDToID = FieldCache.DEFAULT.getInts(sub, "id", false);
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      for(String term : terms) {
        //System.out.println("  term=" + term);
        if (termsEnum.seekExact(new BytesRef(term))) {
Solution content
      //System.out.println("\nsub=" + sub);
      final TermsEnum termsEnum = sub.fields().terms("content").iterator(null);
      DocsEnum docs = null;
      DocsEnum docsAndPositions = null;
      DocsEnum docsAndPositionsAndOffsets = null;
      final FieldCache.Ints docIDToID = FieldCache.DEFAULT.getInts(sub, "id", false);
      for(String term : terms) {
        //System.out.println("  term=" + term);
        if (termsEnum.seekExact(new BytesRef(term))) {
File
TestPostingsOffsets.java
Developer's decision
Combination
Kind of conflict
Method invocation
Variable
Chunk
Conflicting content
import java.io.IOException;

import org.apache.lucene.index.AtomicReaderContext;
<<<<<<< HEAD
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.Norm;
import org.apache.lucene.search.intervals.IntervalIterator;
=======
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
Solution content
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.intervals.IntervalIterator;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
File
JustCompileSearch.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
    public int advance(int target) {
      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
    }
<<<<<<< HEAD

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
=======
    
    @Override
    public long cost() {
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
    }
  }
Solution content
    public int advance(int target) {
      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
    }

    @Override
    public long cost() {
      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
    }
  }
File
JustCompileSearch.java
Developer's decision
Concatenation
Kind of conflict
Annotation
Method signature
Chunk
Conflicting content
      idx = target;
      return idx < scores.length ? idx : NO_MORE_DOCS;
    }
<<<<<<< HEAD

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException();
    }
=======
    
    @Override
    public long cost() {
      return scores.length;
    } 
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  }

  // The scores must have positive as well as negative values
Solution content
      idx = target;
      return idx < scores.length ? idx : NO_MORE_DOCS;
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException();
    }

    @Override
    public long cost() {
      return scores.length;
    } 
  }

  // The scores must have positive as well as negative values
File
TestPositiveScoresOnlyCollector.java
Developer's decision
Concatenation
Kind of conflict
Annotation
Method declaration
Chunk
Conflicting content
      doc = target;
      return doc < scores.length ? doc : NO_MORE_DOCS;
    }
<<<<<<< HEAD

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException(); 
=======
    
    @Override
    public long cost() {
      return scores.length;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
  
Solution content
      doc = target;
      return doc < scores.length ? doc : NO_MORE_DOCS;
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException(); 
    }
    @Override
    public long cost() {
      return scores.length;
    }
  }
  
File
TestScoreCachingWrappingScorer.java
Developer's decision
Concatenation
Kind of conflict
Annotation
Method signature
Return statement
Throw statement
Chunk
Conflicting content
    Weight weight = indexSearcher.createNormalizedWeight(termQuery);
    assertTrue(indexSearcher.getTopReaderContext() instanceof AtomicReaderContext);
    AtomicReaderContext context = (AtomicReaderContext) indexSearcher.getTopReaderContext();
<<<<<<< HEAD
    Scorer ts = weight.scorer(context, true, true, PostingFeatures.DOCS_AND_FREQS, context.reader().getLiveDocs());
=======
    Scorer ts = weight.scorer(context, true, false, context.reader().getLiveDocs());
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    assertTrue("next did not return a doc",
        ts.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
    assertTrue("score is not correct", ts.score() == 1.6931472f);
Solution content
    Weight weight = indexSearcher.createNormalizedWeight(termQuery);
    assertTrue(indexSearcher.getTopReaderContext() instanceof AtomicReaderContext);
    AtomicReaderContext context = (AtomicReaderContext) indexSearcher.getTopReaderContext();
    Scorer ts = weight.scorer(context, true, false, PostingFeatures.DOCS_AND_FREQS, context.reader().getLiveDocs());
    assertTrue("next did not return a doc",
        ts.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
    assertTrue("score is not correct", ts.score() == 1.6931472f);
File
TestTermScorer.java
Developer's decision
Manual
Kind of conflict
Method invocation
Variable
Chunk
Conflicting content
    Weight weight = indexSearcher.createNormalizedWeight(termQuery);
    assertTrue(indexSearcher.getTopReaderContext() instanceof AtomicReaderContext);
    AtomicReaderContext context = (AtomicReaderContext) indexSearcher.getTopReaderContext();
<<<<<<< HEAD
    Scorer ts = weight.scorer(context, true, true, PostingFeatures.DOCS_AND_FREQS, context.reader().getLiveDocs());
=======
    Scorer ts = weight.scorer(context, true, false, context.reader().getLiveDocs());
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    assertTrue("Didn't skip", ts.advance(3) != DocIdSetIterator.NO_MORE_DOCS);
    // The next doc should be doc 5
    assertTrue("doc should be number 5", ts.docID() == 5);
Solution content
    Weight weight = indexSearcher.createNormalizedWeight(termQuery);
    assertTrue(indexSearcher.getTopReaderContext() instanceof AtomicReaderContext);
    AtomicReaderContext context = (AtomicReaderContext) indexSearcher.getTopReaderContext();
    Scorer ts = weight.scorer(context, true, false, PostingFeatures.DOCS_AND_FREQS, context.reader().getLiveDocs());
    assertTrue("Didn't skip", ts.advance(3) != DocIdSetIterator.NO_MORE_DOCS);
    // The next doc should be doc 5
    assertTrue("doc should be number 5", ts.docID() == 5);
File
TestTermScorer.java
Developer's decision
Manual
Kind of conflict
Method invocation
Variable
Chunk
Conflicting content
    directory = null;
  }

<<<<<<< HEAD
  public void test() {
    assertTrue(searcher != null);
  }

  public void testTermVectors() throws IOException {
    Query query = new TermQuery(new Term("field", "seventy"));
    ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
    assertEquals(100, hits.length);
      
    for (int i = 0; i < hits.length; i++) {
      Fields vectors = searcher.reader.getTermVectors(hits[i].doc);
      assertNotNull(vectors);
      assertEquals("doc=" + hits[i].doc + " tv=" + vectors, 1, vectors.size());
    }
    Terms vector;
    vector = searcher.reader.getTermVectors(hits[0].doc).terms("noTV");
    assertNull(vector);
  }
  
  public void testTermVectorsFieldOrder() throws IOException {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
    Document doc = new Document();
    FieldType ft = new FieldType(TextField.TYPE_STORED);
    ft.setStoreTermVectors(true);
    ft.setStoreTermVectorOffsets(true);
    ft.setStoreTermVectorPositions(true);
    doc.add(newField("c", "some content here", ft));
    doc.add(newField("a", "some content here", ft));
    doc.add(newField("b", "some content here", ft));
    doc.add(newField("x", "some content here", ft));
    writer.addDocument(doc);
    IndexReader reader = writer.getReader();
    writer.close();
    Fields v = reader.getTermVectors(0);
    assertEquals(4, v.size());
    String[] expectedFields = new String[]{"a", "b", "c", "x"};
    int[] expectedPositions = new int[]{1, 2, 0};
    Iterator fieldsEnum = v.iterator();
    for(int i=0;i test4Map = new HashMap();
    test4Map.put("chocolate", Integer.valueOf(3));
    test4Map.put("lab", Integer.valueOf(2));
    test4Map.put("eating", Integer.valueOf(1));
    test4Map.put("computer", Integer.valueOf(1));
    test4Map.put("with", Integer.valueOf(1));
    test4Map.put("a", Integer.valueOf(1));
    test4Map.put("colored", Integer.valueOf(1));
    test4Map.put("in", Integer.valueOf(1));
    test4Map.put("an", Integer.valueOf(1));
    test4Map.put("computer", Integer.valueOf(1));
    test4Map.put("old", Integer.valueOf(1));
    
    Document testDoc1 = new Document();
    setupDoc(testDoc1, test1);
    Document testDoc2 = new Document();
    setupDoc(testDoc2, test2);
    Document testDoc3 = new Document();
    setupDoc(testDoc3, test3);
    Document testDoc4 = new Document();
    setupDoc(testDoc4, test4);
    
    Directory dir = newDirectory();
    
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir, 
        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true))
          .setOpenMode(OpenMode.CREATE)
          .setMergePolicy(newLogMergePolicy())
          .setSimilarity(new DefaultSimilarity()));
    writer.addDocument(testDoc1);
    writer.addDocument(testDoc2);
    writer.addDocument(testDoc3);
    writer.addDocument(testDoc4);
    IndexReader reader = writer.getReader();
    writer.close();
    IndexSearcher knownSearcher = newSearcher(reader);
    knownSearcher.setSimilarity(new DefaultSimilarity());
    Fields fields = MultiFields.getFields(knownSearcher.reader);
    
    DocsEnum docs = null;
    for (String fieldName : fields) {
      Terms terms = fields.terms(fieldName);
      assertNotNull(terms); // NOTE: kinda sketchy assumptions, but ideally we would fix fieldsenum api... 
      TermsEnum termsEnum = terms.iterator(null);

      while (termsEnum.next() != null) {
        String text = termsEnum.term().utf8ToString();
        docs = _TestUtil.docs(random(), termsEnum, MultiFields.getLiveDocs(knownSearcher.reader), docs, DocsEnum.FLAG_FREQS);
        
        while (docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
          int docId = docs.docID();
          int freq = docs.freq();
          //System.out.println("Doc Id: " + docId + " freq " + freq);
          Terms vector = knownSearcher.reader.getTermVectors(docId).terms("field");
          //float tf = sim.tf(freq);
          //float idf = sim.idf(knownSearcher.docFreq(term), knownSearcher.maxDoc());
          //float qNorm = sim.queryNorm()
          //This is fine since we don't have stop words
          //float lNorm = sim.lengthNorm("field", vector.getTerms().length);
          //float coord = sim.coord()
          //System.out.println("TF: " + tf + " IDF: " + idf + " LenNorm: " + lNorm);
          assertNotNull(vector);
          TermsEnum termsEnum2 = vector.iterator(null);

          while(termsEnum2.next() != null) {
            if (text.equals(termsEnum2.term().utf8ToString())) {
              assertEquals(freq, termsEnum2.totalTermFreq());
            }
          }
        }
      }
      //System.out.println("--------");
    }
    Query query = new TermQuery(new Term("field", "chocolate"));
    ScoreDoc[] hits = knownSearcher.search(query, null, 1000).scoreDocs;
    //doc 3 should be the first hit b/c it is the shortest match
    assertTrue(hits.length == 3);
    /*System.out.println("Hit 0: " + hits.id(0) + " Score: " + hits.score(0) + " String: " + hits.doc(0).toString());
      System.out.println("Explain: " + knownSearcher.explain(query, hits.id(0)));
      System.out.println("Hit 1: " + hits.id(1) + " Score: " + hits.score(1) + " String: " + hits.doc(1).toString());
      System.out.println("Explain: " + knownSearcher.explain(query, hits.id(1)));
      System.out.println("Hit 2: " + hits.id(2) + " Score: " + hits.score(2) + " String: " +  hits.doc(2).toString());
      System.out.println("Explain: " + knownSearcher.explain(query, hits.id(2)));*/
    assertTrue(hits[0].doc == 2);
    assertTrue(hits[1].doc == 3);
    assertTrue(hits[2].doc == 0);
    Terms vector = knownSearcher.reader.getTermVectors(hits[1].doc).terms("field");
    assertNotNull(vector);
    //System.out.println("Vector: " + vector);
    assertEquals(10, vector.size());
    TermsEnum termsEnum = vector.iterator(null);
    while(termsEnum.next() != null) {
      String term = termsEnum.term().utf8ToString();
      //System.out.println("Term: " + term);
      int freq = (int) termsEnum.totalTermFreq();
      assertTrue(test4.indexOf(term) != -1);
      Integer freqInt = test4Map.get(term);
      assertTrue(freqInt != null);
      assertEquals(freqInt.intValue(), freq);
    }
    reader.close();
    dir.close();
  } 
  
  private void setupDoc(Document doc, String text)
  {
    FieldType ft = new FieldType(TextField.TYPE_STORED);
    ft.setStoreTermVectors(true);
    ft.setStoreTermVectorOffsets(true);
    ft.setStoreTermVectorPositions(true);
    FieldType ft2 = new FieldType(TextField.TYPE_STORED);
    ft2.setStoreTermVectors(true);
    doc.add(newField("field2", text, ft));
    doc.add(newField("field", text, ft2));
    //System.out.println("Document: " + doc);
  }

  // Test only a few docs having vectors
  public void testRareVectors() throws IOException {
    RandomIndexWriter writer = new RandomIndexWriter(random(), directory, 
        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true))
        .setOpenMode(OpenMode.CREATE));
    if (VERBOSE) {
      System.out.println("TEST: now add non-vectors");
    }
    for (int i = 0; i < 100; i++) {
      Document doc = new Document();
      doc.add(newTextField("field", English.intToEnglish(i), Field.Store.YES));
      writer.addDocument(doc);
    }
    if (VERBOSE) {
      System.out.println("TEST: now add vectors");
    }
    FieldType ft = new FieldType(TextField.TYPE_STORED);
    ft.setStoreTermVectors(true);
    ft.setStoreTermVectorOffsets(true);
    ft.setStoreTermVectorPositions(true);
    for(int i=0;i<10;i++) {
      Document doc = new Document();
      doc.add(newField("field", English.intToEnglish(100+i), ft));
      writer.addDocument(doc);
    }

    if (VERBOSE) {
      System.out.println("TEST: now getReader");
    }
    IndexReader reader = writer.getReader();
    writer.close();
    IndexSearcher searcher = newSearcher(reader);

    Query query = new TermQuery(new Term("field", "hundred"));
    ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
    assertEquals(10, hits.length);
    for (int i = 0; i < hits.length; i++) {

      Fields vectors = searcher.reader.getTermVectors(hits[i].doc);
      assertNotNull(vectors);
      assertEquals(1, vectors.size());
    }
    reader.close();
  }


=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  // In a single doc, for the same field, mix the term
  // vectors up
  public void testMixedVectrosVectors() throws IOException {
Solution content
    directory = null;
  }

  // In a single doc, for the same field, mix the term
  // vectors up
  public void testMixedVectrosVectors() throws IOException {
File
TestTermVectors.java
Developer's decision
Version 2
Kind of conflict
Comment
Method declaration
Chunk
Conflicting content
=======
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DirectoryReader;
<<<<<<< HEAD:lucene/facet/src/java/org/apache/lucene/facet/index/FacetsPayloadMigrationReader.java
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValues.Source;
import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.FieldInfo.DocValuesType;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2:lucene/facet/src/java/org/apache/lucene/facet/util/FacetsPayloadMigrationReader.java
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields;
Solution content
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields;
File
FacetsPayloadMigrationReader.java
Developer's decision
Manual
Kind of conflict
Import
Chunk
Conflicting content
  private class PayloadMigratingBinaryDocValues extends BinaryDocValues {

<<<<<<< HEAD:lucene/facet/src/java/org/apache/lucene/facet/index/FacetsPayloadMigrationReader.java
    private final DocsEnum dpe;
    
    public PayloadMigratingDocValues(DocsEnum dpe) {
      this.dpe = dpe;
    }

    @Override
    protected Source loadDirectSource() throws IOException {
      return new PayloadMigratingSource(getType(), dpe);
    }
=======
    private Fields fields;
    private Term term;
    private DocsAndPositionsEnum dpe;
    private int curDocID = -1;
    private int lastRequestedDocID;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2:lucene/facet/src/java/org/apache/lucene/facet/util/FacetsPayloadMigrationReader.java

    private DocsAndPositionsEnum getDPE() {
      try {
Solution content
  private class PayloadMigratingBinaryDocValues extends BinaryDocValues {

    private Fields fields;
    private Term term;
    private DocsEnum dpe;
    private int curDocID = -1;
    private int lastRequestedDocID;

    private DocsEnum getDPE() {
      try {
File
FacetsPayloadMigrationReader.java
Developer's decision
Manual
Kind of conflict
Annotation
Attribute
Method declaration
Chunk
Conflicting content
      }
    }
    
<<<<<<< HEAD:lucene/facet/src/java/org/apache/lucene/facet/index/FacetsPayloadMigrationReader.java
  }
  
  private class PayloadMigratingSource extends Source {

    private final DocsEnum dpe;
    private int curDocID;
    
    protected PayloadMigratingSource(Type type, DocsEnum dpe) {
      super(type);
      this.dpe = dpe;
=======
    protected PayloadMigratingBinaryDocValues(Fields fields, Term term) {
      this.fields = fields;
      this.term = term;
      this.dpe = getDPE();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2:lucene/facet/src/java/org/apache/lucene/facet/util/FacetsPayloadMigrationReader.java
      if (dpe == null) {
        curDocID = DocIdSetIterator.NO_MORE_DOCS;
      } else {
Solution content
      }
    }
    
    protected PayloadMigratingBinaryDocValues(Fields fields, Term term) {
      this.fields = fields;
      this.term = term;
      this.dpe = getDPE();
      if (dpe == null) {
        curDocID = DocIdSetIterator.NO_MORE_DOCS;
      } else {
File
FacetsPayloadMigrationReader.java
Developer's decision
Version 2
Kind of conflict
Attribute
Class signature
Method invocation
Method signature
Chunk
Conflicting content
    if (term == null) {
      return super.getBinaryDocValues(field);
    } else {
<<<<<<< HEAD:lucene/facet/src/java/org/apache/lucene/facet/index/FacetsPayloadMigrationReader.java
      DocsEnum dpe = null;
      Fields fields = fields();
      if (fields != null) {
        Terms terms = fields.terms(term.field());
        if (terms != null) {
          TermsEnum te = terms.iterator(null); // no use for reusing
          if (te.seekExact(term.bytes(), true)) {
            // we're not expected to be called for deleted documents
            dpe = te.docsAndPositions(null, null, DocsEnum.FLAG_PAYLOADS);
          }
        }
      }
=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2:lucene/facet/src/java/org/apache/lucene/facet/util/FacetsPayloadMigrationReader.java
      // we shouldn't return null, even if the term does not exist or has no
      // payloads, since we already marked the field as having DocValues.
      return new PayloadMigratingBinaryDocValues(fields(), term);
Solution content
    if (term == null) {
      return super.getBinaryDocValues(field);
    } else {
      // we shouldn't return null, even if the term does not exist or has no
      // payloads, since we already marked the field as having DocValues.
      return new PayloadMigratingBinaryDocValues(fields(), term);
File
FacetsPayloadMigrationReader.java
Developer's decision
Version 2
Kind of conflict
If statement
Method invocation
Variable
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException();
=======
    public long cost() {
      return 1;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException();
    }

    @Override
    public long cost() {
      return 1;
    }
  }
File
BlockGroupingCollector.java
Developer's decision
Concatenation
Kind of conflict
Method signature
Return statement
Throw statement
Chunk
Conflicting content
import java.util.Map;
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.IOException;
import java.text.BreakIterator;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.PriorityQueue;
import java.util.SortedSet;
import java.util.TreeSet;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocsEnum;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocsEnum;
File
PostingsHighlighter.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
  // algorithm: treat sentence snippets as miniature documents
  // we can intersect these with the postings lists via BreakIterator.preceding(offset),s
  // score each sentence as norm(sentenceStartOffset) * sum(weight * tf(freq))
<<<<<<< HEAD
  private Passage[] highlightDoc(String field, Term terms[], int contentLength, BreakIterator bi, int doc, 
      TermsEnum termsEnum, DocsEnum[] postings, int n) throws IOException {
=======
  private Passage[] highlightDoc(String field, BytesRef terms[], int contentLength, BreakIterator bi, int doc, 
      TermsEnum termsEnum, DocsAndPositionsEnum[] postings, int n) throws IOException {
    PassageScorer scorer = getScorer(field);
    if (scorer == null) {
      throw new NullPointerException("PassageScorer cannot be null");
    }
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    PriorityQueue pq = new PriorityQueue();
    float weights[] = new float[terms.length];
    // initialize postings
Solution content
  // algorithm: treat sentence snippets as miniature documents
  // we can intersect these with the postings lists via BreakIterator.preceding(offset),s
  // score each sentence as norm(sentenceStartOffset) * sum(weight * tf(freq))
  private Passage[] highlightDoc(String field, BytesRef terms[], int contentLength, BreakIterator bi, int doc, 
      TermsEnum termsEnum, DocsEnum[] postings, int n) throws IOException {
    PassageScorer scorer = getScorer(field);
    if (scorer == null) {
      throw new NullPointerException("PassageScorer cannot be null");
    }
    PriorityQueue pq = new PriorityQueue();
    float weights[] = new float[terms.length];
    // initialize postings
File
PostingsHighlighter.java
Developer's decision
Combination
Kind of conflict
If statement
Method invocation
Method signature
Variable
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals)
        throws IOException {
      return null;
=======
    public long cost() {
      return cost;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals)
        throws IOException {
      return null;
    }

    @Override
    public long cost() {
      return cost;
    }
  }
File
TermsIncludingScoreQuery.java
Developer's decision
Concatenation
Kind of conflict
Method signature
Return statement
Variable
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      return parentScorer.intervals(collectIntervals);
=======
    public long cost() {
      return parentScorer.cost();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      return parentScorer.intervals(collectIntervals);
    }

    @Override
    public long cost() {
      return parentScorer.cost();
    }
  }
File
ToChildBlockJoinQuery.java
Developer's decision
Concatenation
Kind of conflict
Method invocation
Method signature
Return statement
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException();
=======
    public long cost() {
      return 1;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException();
    }

    @Override
    public long cost() {
      return 1;
    }
  }
File
ToParentBlockJoinCollector.java
Developer's decision
Concatenation
Kind of conflict
Method signature
Return statement
Throw statement
Chunk
Conflicting content
    @Override
    public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
<<<<<<< HEAD
      BlockJoinScorer scorer = (BlockJoinScorer) scorer(context, true, false, PostingFeatures.DOCS_AND_FREQS, context.reader().getLiveDocs());
      if (scorer != null) {
        if (scorer.advance(doc) == doc) {
          return scorer.explain(context.docBase);
        }
=======
      BlockJoinScorer scorer = (BlockJoinScorer) scorer(context, true, false, context.reader().getLiveDocs());
      if (scorer != null && scorer.advance(doc) == doc) {
        return scorer.explain(context.docBase);
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      }
      return new ComplexExplanation(false, 0.0f, "Not a match");
    }
Solution content
    @Override
    public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
      BlockJoinScorer scorer = (BlockJoinScorer) scorer(context, true, false, PostingFeatures.DOCS_AND_FREQS, context.reader().getLiveDocs());
      if (scorer != null && scorer.advance(doc) == doc) {
        return scorer.explain(context.docBase);
      }
      return new ComplexExplanation(false, 0.0f, "Not a match");
    }
File
ToParentBlockJoinQuery.java
Developer's decision
Combination
Kind of conflict
Cast expression
If statement
Method invocation
Return statement
Variable
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException();
    }

=======
    public long cost() {
      return childScorer.cost();
    }

    /**
     * Instructs this scorer to keep track of the child docIds and score ids for retrieval purposes.
     */
    public void trackPendingChildHits() {
      pendingChildDocs = new int[5];
      if (scoreMode != ScoreMode.None) {
        pendingChildScores = new float[5];
      }
    }
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
  }

  @Override
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      throw new UnsupportedOperationException();
    }

    @Override
    public long cost() {
      return childScorer.cost();
    }

    /**
     * Instructs this scorer to keep track of the child docIds and score ids for retrieval purposes.
     */
    public void trackPendingChildHits() {
      pendingChildDocs = new int[5];
      if (scoreMode != ScoreMode.None) {
        pendingChildScores = new float[5];
      }
    }
  }

  @Override
File
ToParentBlockJoinQuery.java
Developer's decision
Concatenation
Kind of conflict
Comment
Method declaration
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
File
MemoryIndex.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
<<<<<<< HEAD
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.Norm;
=======
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.NumericDocValues;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.index.OrdTermState;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
Solution content
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.OrdTermState;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
File
MemoryIndex.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
<<<<<<< HEAD
import org.apache.lucene.index.memory.MemoryIndexNormDocValues.SingleValueSource;
=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
Solution content
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
File
MemoryIndex.java
Developer's decision
Version 2
Kind of conflict
Import
Chunk
Conflicting content
import org.apache.lucene.util.BytesRefHash;
import org.apache.lucene.util.BytesRefHash.DirectBytesStartArray;
import org.apache.lucene.util.Counter;
<<<<<<< HEAD
import org.apache.lucene.util.IntBlockPool;
import org.apache.lucene.util.IntBlockPool.SliceReader;
import org.apache.lucene.util.IntBlockPool.SliceWriter;
=======
import org.apache.lucene.util.IntBlockPool.SliceReader;
import org.apache.lucene.util.IntBlockPool.SliceWriter;
import org.apache.lucene.util.IntBlockPool;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.RecyclingByteBlockAllocator;
import org.apache.lucene.util.RecyclingIntBlockAllocator;
Solution content
import org.apache.lucene.util.BytesRefHash;
import org.apache.lucene.util.BytesRefHash.DirectBytesStartArray;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.IntBlockPool;
import org.apache.lucene.util.IntBlockPool.SliceReader;
import org.apache.lucene.util.IntBlockPool.SliceWriter;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.RecyclingByteBlockAllocator;
import org.apache.lucene.util.RecyclingIntBlockAllocator;
File
MemoryIndex.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
<<<<<<< HEAD
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.CompositeReader;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValues.Source;
import org.apache.lucene.index.DocsEnum;
=======
import org.apache.lucene.index.DocsAndPositionsEnum;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
Solution content
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.CompositeReader;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
File
MemoryIndexTest.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      return subQueryScorer.intervals(collectIntervals);
=======
    public long cost() {
      return subQueryScorer.cost();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      return subQueryScorer.intervals(collectIntervals);
    }

    @Override
    public long cost() {
      return subQueryScorer.cost();
    }
  }
File
CustomScoreQuery.java
Developer's decision
Concatenation
Kind of conflict
Method invocation
Method signature
Return statement
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      return scorer.intervals(collectIntervals);
=======
    public long cost() {
      return scorer.cost();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      return scorer.intervals(collectIntervals);
    }

    @Override
    public long cost() {
      return scorer.cost();
    }
  }
File
BoostedQuery.java
Developer's decision
Concatenation
Kind of conflict
Method invocation
Method signature
Return statement
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicInteger;

>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
File
RAMOnlyPostingsFormat.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.TermStats;
<<<<<<< HEAD
import org.apache.lucene.codecs.TermsConsumer;
=======
import org.apache.lucene.index.DocsAndPositionsEnum;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
<<<<<<< HEAD
Solution content
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.TermStats;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
File
RAMOnlyPostingsFormat.java
Developer's decision
None
Kind of conflict
Import
Chunk
Conflicting content
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
<<<<<<< HEAD
import org.apache.lucene.index.FieldInfo.IndexOptions;
=======
import org.apache.lucene.index.Fields;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState;
Solution content
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState;
File
RAMOnlyPostingsFormat.java
Developer's decision
Concatenation
Kind of conflict
Import
Chunk
Conflicting content
  /** Wraps a docsenum with additional checks */
  public static class AssertingDocsEnum extends FilterDocsEnum {
    private DocsEnumState state = DocsEnumState.START;
<<<<<<< HEAD
    int positionCount = 0;
    int positionMax = 0;
=======
    private int doc;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    
    public AssertingDocsEnum(DocsEnum in) {
      this(in, true);
Solution content
  /** Wraps a docsenum with additional checks */
  public static class AssertingDocsEnum extends FilterDocsEnum {
    private DocsEnumState state = DocsEnumState.START;
    int positionCount = 0;
    int positionMax = 0;
    private int doc;

    public AssertingDocsEnum(DocsEnum in) {
      this(in, true);
File
AssertingAtomicReader.java
Developer's decision
Concatenation
Kind of conflict
Attribute
Chunk
Conflicting content
        state = DocsEnumState.ITERATING;
        positionMax = super.freq();
      }
<<<<<<< HEAD
      positionCount = 0;
      return nextDoc;
=======
      assert super.docID() == nextDoc;
      return doc = nextDoc;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }

    @Override
Solution content
        state = DocsEnumState.ITERATING;
        positionMax = super.freq();
      }
      positionCount = 0;
      assert super.docID() == nextDoc;
      return doc = nextDoc;
    }

    @Override
File
AssertingAtomicReader.java
Developer's decision
Combination
Kind of conflict
Assert statement
Attribute
Return statement
Variable
Chunk
Conflicting content
        state = DocsEnumState.ITERATING;
        positionMax = super.freq();
      }
<<<<<<< HEAD
      positionCount = 0;
      return advanced;
=======
      assert super.docID() == advanced;
      return doc = advanced;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }

    @Override
Solution content
        state = DocsEnumState.ITERATING;
        positionMax = super.freq();
      }
      positionCount = 0;
      assert super.docID() == advanced;
      return doc = advanced;
    }

    @Override
File
AssertingAtomicReader.java
Developer's decision
Combination
Kind of conflict
Assert statement
Attribute
Return statement
Variable
Chunk
Conflicting content
 * limitations under the License.
 */

<<<<<<< HEAD
=======
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Random;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;

import org.apache.lucene.analysis.MockAnalyzer;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
Solution content
 * limitations under the License.
 */

import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
File
BasePostingsFormatTestCase.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
      }

      @Override
<<<<<<< HEAD
      public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
          boolean topScorer, PostingFeatures flags, Bits acceptDocs) throws IOException {
        Scorer scorer = w.scorer(context, scoreDocsInOrder, topScorer, flags, acceptDocs);
        if (scorer != null) {
          // check that scorer obeys disi contract for docID() before next()/advance
          try {
            int docid = scorer.docID();
            assert docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS;
          } catch (UnsupportedOperationException ignored) {
            // from a top-level BS1
            assert topScorer;
          }
        }
        return scorer;
      }

      @Override
=======
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
      public float getValueForNormalization() {
        throw new IllegalStateException("Weight already normalized.");
      }
Solution content
      }

      @Override
      public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
          boolean topScorer, PostingFeatures flags, Bits acceptDocs) throws IOException {
        Scorer scorer = w.scorer(context, scoreDocsInOrder, topScorer, flags, acceptDocs);
        if (scorer != null) {
          // check that scorer obeys disi contract for docID() before next()/advance
          try {
            int docid = scorer.docID();
            assert docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS;
          } catch (UnsupportedOperationException ignored) {
            // from a top-level BS1
            assert topScorer;
          }
        }
        return scorer;
      }

      @Override
      public float getValueForNormalization() {
        throw new IllegalStateException("Weight already normalized.");
      }
File
AssertingIndexSearcher.java
Developer's decision
Version 1
Kind of conflict
Annotation
Method declaration
Chunk
Conflicting content
import org.apache.lucene.index.CheckIndex.Status.TermIndexStatus;
import org.apache.lucene.index.CheckIndex.Status.TermVectorStatus;
import org.apache.lucene.index.ConcurrentMergeScheduler;
<<<<<<< HEAD
import org.apache.lucene.index.DocValues;
=======
import org.apache.lucene.index.DocsAndPositionsEnum;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfos;
Solution content
import org.apache.lucene.index.CheckIndex.Status.TermIndexStatus;
import org.apache.lucene.index.CheckIndex.Status.TermVectorStatus;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfos;
File
_TestUtil.java
Developer's decision
None
Kind of conflict
Import
Chunk
Conflicting content
import org.apache.solr.common.SolrException;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.VectorValueSource;
<<<<<<< HEAD
import org.apache.lucene.search.*;
import org.apache.lucene.search.intervals.IntervalIterator;

import com.spatial4j.core.io.ParseUtils;
import com.spatial4j.core.context.SpatialContext;
import com.spatial4j.core.distance.DistanceUtils;
import com.spatial4j.core.exception.InvalidShapeException;
import com.spatial4j.core.shape.Rectangle;
=======
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.Weight;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.util.Bits;
import org.apache.solr.response.TextResponseWriter;
Solution content
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.VectorValueSource;
import org.apache.lucene.search.intervals.IntervalIterator;

import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.apache.solr.common.SolrException;
import org.apache.solr.response.TextResponseWriter;
File
LatLonType.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
    }

    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      if (iter instanceof Scorer) {
        return ((Scorer) iter).intervals(collectIntervals);
      }
      throw new UnsupportedOperationException("Positions are only supported for Scorers");

=======
    public long cost() {
      return iter.cost();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      if (iter instanceof Scorer) {
        return ((Scorer) iter).intervals(collectIntervals);
      }
      throw new UnsupportedOperationException("Positions are only supported for Scorers");

    }

    @Override
    public long cost() {
      return iter.cost();
    }
  }
File
JoinQParserPlugin.java
Developer's decision
Concatenation
Kind of conflict
If statement
Method invocation
Method signature
Return statement
Throw statement
Chunk
Conflicting content
    }
    @Override
<<<<<<< HEAD
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      if (docIdSetIterator instanceof Scorer) {
        return ((Scorer) docIdSetIterator).intervals(collectIntervals);
      }
      throw new UnsupportedOperationException("Positions are only supported for Scorers");
=======
    public long cost() {
      return docIdSetIterator.cost();
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
    }
  }
Solution content
    }

    @Override
    public IntervalIterator intervals(boolean collectIntervals) throws IOException {
      if (docIdSetIterator instanceof Scorer) {
        return ((Scorer) docIdSetIterator).intervals(collectIntervals);
      }
      throw new UnsupportedOperationException("Positions are only supported for Scorers");
    }

    @Override
    public long cost() {
      return docIdSetIterator.cost();
    }
  }
File
SolrConstantScoreQuery.java
Developer's decision
Concatenation
Kind of conflict
If statement
Method invocation
Method signature
Return statement
Throw statement
Chunk
Conflicting content
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.TextField;
<<<<<<< HEAD
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.search.Weight.PostingFeatures;
=======
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocsEnum;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.StoredDocument;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.Weight;
>>>>>>> 9c47892d4ea8f70beff0f5a8357d749e843c2ca2
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
Solution content
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocsEnum;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.StoredDocument;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.Weight.PostingFeatures;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
File
SolrIndexSearcher.java
Developer's decision
Combination
Kind of conflict
Import