Lucene.Net  3.0.3
Lucene.Net is a port of the Lucene search engine library, written in C# and targeted at .NET runtime users.
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Properties Pages
FreqProxFieldMergeState.cs
Go to the documentation of this file.
1 /*
2  * Licensed to the Apache Software Foundation (ASF) under one or more
3  * contributor license agreements. See the NOTICE file distributed with
4  * this work for additional information regarding copyright ownership.
5  * The ASF licenses this file to You under the Apache License, Version 2.0
6  * (the "License"); you may not use this file except in compliance with
7  * the License. You may obtain a copy of the License at
8  *
9  * http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  */
17 
18 using System;
19 using Lucene.Net.Support;
20 
21 namespace Lucene.Net.Index
22 {
23 
24  // TODO FI: some of this is "generic" to TermsHash* so we
25  // should factor it out so other consumers don't have to
26  // duplicate this code
27 
28  /// <summary>Used by DocumentsWriter to merge the postings from
29  /// multiple ThreadStates when creating a segment
30  /// </summary>
32  {
33 
34  internal FreqProxTermsWriterPerField field;
35  internal int numPostings;
36  internal CharBlockPool charPool;
37  internal RawPostingList[] postings;
38 
39  private FreqProxTermsWriter.PostingList p;
40  internal char[] text;
41  internal int textOffset;
42 
43  private int postingUpto = - 1;
44 
45  internal ByteSliceReader freq = new ByteSliceReader();
46  internal ByteSliceReader prox = new ByteSliceReader();
47 
48  internal int docID;
49  internal int termFreq;
50 
52  {
53  this.field = field;
54  this.charPool = field.perThread.termsHashPerThread.charPool;
55  this.numPostings = field.termsHashPerField.numPostings;
56  this.postings = field.termsHashPerField.SortPostings();
57  }
58 
59  internal bool NextTerm()
60  {
61  postingUpto++;
62  if (postingUpto == numPostings)
63  return false;
64 
65  p = (FreqProxTermsWriter.PostingList) postings[postingUpto];
66  docID = 0;
67 
68  text = charPool.buffers[p.textStart >> DocumentsWriter.CHAR_BLOCK_SHIFT];
69  textOffset = p.textStart & DocumentsWriter.CHAR_BLOCK_MASK;
70 
71  field.termsHashPerField.InitReader(freq, p, 0);
72  if (!field.fieldInfo.omitTermFreqAndPositions)
73  field.termsHashPerField.InitReader(prox, p, 1);
74 
75  // Should always be true
76  bool result = NextDoc();
77  System.Diagnostics.Debug.Assert(result);
78 
79  return true;
80  }
81 
82  public bool NextDoc()
83  {
84  if (freq.Eof())
85  {
86  if (p.lastDocCode != - 1)
87  {
88  // Return last doc
89  docID = p.lastDocID;
90  if (!field.omitTermFreqAndPositions)
91  termFreq = p.docFreq;
92  p.lastDocCode = - 1;
93  return true;
94  }
95  // EOF
96  else
97  return false;
98  }
99 
100  int code = freq.ReadVInt();
101  if (field.omitTermFreqAndPositions)
102  docID += code;
103  else
104  {
105  docID += Number.URShift(code, 1);
106  if ((code & 1) != 0)
107  termFreq = 1;
108  else
109  termFreq = freq.ReadVInt();
110  }
111 
112  System.Diagnostics.Debug.Assert(docID != p.lastDocID);
113 
114  return true;
115  }
116  }
117 }