19 using System.Collections.Generic;
21 using Lucene.Net.Support;
22 using Document = Lucene.Net.Documents.Document;
25 using Lock = Lucene.Net.Store.Lock;
29 namespace Lucene.Net.Index
35 private class AnonymousClassFindSegmentsFile:
SegmentInfos.FindSegmentsFile
37 private void InitBlock(
bool readOnly,
IndexDeletionPolicy deletionPolicy,
int termInfosIndexDivisor)
39 this.readOnly = readOnly;
40 this.deletionPolicy = deletionPolicy;
41 this.termInfosIndexDivisor = termInfosIndexDivisor;
43 private bool readOnly;
45 private int termInfosIndexDivisor;
46 internal AnonymousClassFindSegmentsFile(
bool readOnly, Lucene.Net.Index.IndexDeletionPolicy deletionPolicy,
int termInfosIndexDivisor, Lucene.Net.Store.Directory Param1):base(Param1)
48 InitBlock(readOnly, deletionPolicy, termInfosIndexDivisor);
50 public override System.Object DoBody(System.String segmentFileName)
53 infos.Read(directory, segmentFileName);
57 return new DirectoryReader(directory, infos, deletionPolicy,
false, termInfosIndexDivisor);
60 private class AnonymousClassFindSegmentsFile1:
SegmentInfos.FindSegmentsFile
62 private void InitBlock(
bool openReadOnly,
DirectoryReader enclosingInstance)
64 this.openReadOnly = openReadOnly;
65 this.enclosingInstance = enclosingInstance;
67 private bool openReadOnly;
73 return enclosingInstance;
77 internal AnonymousClassFindSegmentsFile1(
bool openReadOnly,
DirectoryReader enclosingInstance, Lucene.Net.Store.
Directory Param1):base(Param1)
79 InitBlock(openReadOnly, enclosingInstance);
81 public override System.Object DoBody(System.String segmentFileName)
84 infos.Read(directory, segmentFileName);
85 return Enclosing_Instance.DoReopen(infos,
false, openReadOnly);
88 protected internal Directory internalDirectory;
89 protected internal bool readOnly;
94 private readonly HashSet<string> synced =
new HashSet<string>();
95 private Lock writeLock;
99 private readonly
int termInfosIndexDivisor;
101 private bool rollbackHasChanges;
104 private int[] starts;
105 private System.Collections.Generic.IDictionary<string, byte[]> normsCache =
new HashMap<string, byte[]>();
106 private int maxDoc = 0;
107 private int numDocs = - 1;
108 private bool hasDeletions =
false;
113 private long maxIndexVersion;
117 return (
IndexReader)
new AnonymousClassFindSegmentsFile(readOnly, deletionPolicy, termInfosIndexDivisor, directory).Run(commit);
123 internalDirectory = directory;
124 this.readOnly = readOnly;
125 this.segmentInfos = sis;
126 this.deletionPolicy = deletionPolicy;
127 this.termInfosIndexDivisor = termInfosIndexDivisor;
133 synced.UnionWith(sis.
Files(directory,
true));
142 for (
int i = sis.Count - 1; i >= 0; i--)
144 bool success =
false;
155 for (i++; i < sis.Count; i++)
161 catch (System.Exception)
176 this.internalDirectory = writer.
Directory;
177 this.readOnly =
true;
178 segmentInfos = infos;
180 this.termInfosIndexDivisor = termInfosIndexDivisor;
185 synced.UnionWith(infos.
Files(internalDirectory,
true));
191 int numSegments = infos.Count;
196 for (
int i = 0; i < numSegments; i++)
198 bool success =
false;
204 readers[upto++] = writer.readerPool.GetReadOnlyClone(info,
true, termInfosIndexDivisor);
213 for (upto--; upto >= 0; upto--)
217 readers[upto].Close();
219 catch (System.Exception)
228 this.writer = writer;
230 if (upto < readers.Length)
234 Array.Copy(readers, 0, newReaders, 0, upto);
235 readers = newReaders;
243 IEnumerable<KeyValuePair<
string, byte[]>> oldNormsCache,
bool readOnly,
bool doClone,
int termInfosIndexDivisor)
245 this.internalDirectory = directory;
246 this.readOnly = readOnly;
247 this.segmentInfos = infos;
248 this.termInfosIndexDivisor = termInfosIndexDivisor;
253 synced.UnionWith(infos.
Files(directory,
true));
258 IDictionary<string, int> segmentReaders =
new HashMap<string, int>();
260 if (oldReaders != null)
263 for (
int i = 0; i < oldReaders.Length; i++)
273 var readerShared =
new bool[infos.Count];
275 for (
int i = infos.Count - 1; i >= 0; i--)
278 if (!segmentReaders.ContainsKey(infos.
Info(i).
name))
281 newReaders[i] = null;
286 newReaders[i] = oldReaders[segmentReaders[infos.
Info(i).
name]];
289 bool success =
false;
293 if (newReaders[i] == null || infos.
Info(i).
GetUseCompoundFile() != newReaders[i].SegmentInfo.GetUseCompoundFile())
297 System.Diagnostics.Debug.Assert(!doClone);
304 newReader = newReaders[i].ReopenSegment(infos.
Info(i), doClone, readOnly);
306 if (newReader == newReaders[i])
310 readerShared[i] =
true;
315 readerShared[i] =
false;
316 newReaders[i] = newReader;
324 for (i++; i < infos.Count; i++)
326 if (newReaders[i] != null)
330 if (!readerShared[i])
334 newReaders[i].
Close();
340 newReaders[i].DecRef();
343 catch (System.IO.IOException)
354 Initialize(newReaders);
357 if (oldNormsCache != null)
359 foreach(var entry
in oldNormsCache)
361 String field = entry.Key;
362 if (!HasNorms(field))
367 byte[] oldBytes = entry.Value;
369 var bytes =
new byte[MaxDoc];
371 for (
int i = 0; i < subReaders.Length; i++)
373 int oldReaderIndex = segmentReaders[subReaders[i].SegmentName];
376 if (segmentReaders.ContainsKey(subReaders[i].SegmentName) &&
377 (oldReaders[oldReaderIndex] == subReaders[i]
378 || oldReaders[oldReaderIndex].norms[field] == subReaders[i].norms[field]))
383 Array.Copy(oldBytes, oldStarts[oldReaderIndex], bytes, starts[i], starts[i + 1] - starts[i]);
387 subReaders[i].Norms(field, bytes, starts[i]);
391 normsCache[field] = bytes;
398 this.subReaders = subReaders;
399 starts =
new int[subReaders.Length + 1];
400 for (
int i = 0; i < subReaders.Length; i++)
403 maxDoc += subReaders[i].
MaxDoc;
405 if (subReaders[i].HasDeletions)
408 starts[subReaders.Length] = maxDoc;
416 public override Object Clone()
422 return Clone(readOnly);
426 throw new SystemException(ex.Message, ex);
437 if (
this != newReader)
439 newReader.deletionPolicy = deletionPolicy;
441 newReader.writer = writer;
444 if (!openReadOnly && writeLock != null)
447 System.Diagnostics.Debug.Assert(writer == null);
448 newReader.writeLock = writeLock;
449 newReader.hasChanges = hasChanges;
450 newReader.hasDeletions = hasDeletions;
462 return DoReopen(readOnly, null);
467 return DoReopen(openReadOnly, null);
472 return DoReopen(
true, commit);
477 System.Diagnostics.Debug.Assert(readOnly);
481 throw new System.ArgumentException(
"a reader obtained from IndexWriter.getReader() can only be reopened with openReadOnly=true (got false)");
486 throw new System.ArgumentException(
"a reader obtained from IndexWriter.getReader() cannot currently accept a commit");
495 internal virtual IndexReader DoReopen(
bool openReadOnly, IndexCommit commit)
499 System.Diagnostics.Debug.Assert(commit == null || openReadOnly);
505 return DoReopenFromWriter(openReadOnly, commit);
509 return DoReopenNoWriter(openReadOnly, commit);
513 private IndexReader DoReopenNoWriter(
bool openReadOnly, IndexCommit commit)
522 System.Diagnostics.Debug.Assert(readOnly ==
false);
524 System.Diagnostics.Debug.Assert(writeLock != null);
527 System.Diagnostics.Debug.Assert(IsCurrent());
531 return Clone(openReadOnly);
538 else if (IsCurrent())
540 if (openReadOnly != readOnly)
543 return Clone(openReadOnly);
553 if (internalDirectory != commit.Directory)
554 throw new System.IO.IOException(
"the specified commit does not match the specified Directory");
555 if (segmentInfos != null && commit.SegmentsFileName.Equals(segmentInfos.GetCurrentSegmentFileName()))
557 if (readOnly != openReadOnly)
560 return Clone(openReadOnly);
569 return (
IndexReader)
new AnonymousFindSegmentsFile(internalDirectory, openReadOnly,
this).Run(commit);
573 class AnonymousFindSegmentsFile : SegmentInfos.FindSegmentsFile
575 readonly DirectoryReader enclosingInstance;
576 readonly
bool openReadOnly;
578 public AnonymousFindSegmentsFile(
Directory directory,
bool openReadOnly, DirectoryReader dirReader) : base(directory)
580 this.dir = directory;
581 this.openReadOnly = openReadOnly;
582 enclosingInstance = dirReader;
585 public override object DoBody(
string segmentFileName)
587 var infos =
new SegmentInfos();
588 infos.Read(dir, segmentFileName);
589 return enclosingInstance.DoReopen(infos,
false, openReadOnly);
593 private DirectoryReader DoReopen(SegmentInfos infos,
bool doClone,
bool openReadOnly)
597 DirectoryReader reader;
600 reader =
new ReadOnlyDirectoryReader(internalDirectory, infos, subReaders, starts, normsCache, doClone, termInfosIndexDivisor);
604 reader =
new DirectoryReader(internalDirectory, infos, subReaders, starts, normsCache,
false, doClone, termInfosIndexDivisor);
617 return segmentInfos.Version;
624 int i = ReaderIndex(n);
631 int i = ReaderIndex(n);
636 public override void GetTermFreqVector(
int docNumber, System.String field,
TermVectorMapper mapper)
639 int i = ReaderIndex(docNumber);
646 int i = ReaderIndex(docNumber);
652 public override bool IsOptimized()
655 return segmentInfos.Count == 1 && !HasDeletions;
658 public override int NumDocs()
666 int n = subReaders.Sum(t => t.NumDocs());
672 public override int MaxDoc
685 int i = ReaderIndex(n);
686 return subReaders[i].
Document(n - starts[i], fieldSelector);
689 public override bool IsDeleted(
int n)
692 int i = ReaderIndex(n);
693 return subReaders[i].
IsDeleted(n - starts[i]);
696 public override bool HasDeletions
705 protected internal override void DoDelete(
int n)
708 int i = ReaderIndex(n);
713 protected internal override void DoUndeleteAll()
715 foreach (SegmentReader t
in subReaders)
718 hasDeletions =
false;
722 private int ReaderIndex(
int n)
725 return ReaderIndex(n, this.starts, this.subReaders.Length);
728 internal static int ReaderIndex(
int n,
int[] starts,
int numSubReaders)
732 int hi = numSubReaders - 1;
737 int midValue = starts[mid];
740 else if (n > midValue)
745 while (mid + 1 < numSubReaders && starts[mid + 1] == midValue)
755 public override bool HasNorms(System.String field)
758 return subReaders.Any(t => t.HasNorms(field));
761 public override byte[] Norms(System.String field)
766 byte[] bytes = normsCache[field];
769 if (!HasNorms(field))
772 bytes =
new byte[MaxDoc];
773 for (
int i = 0; i < subReaders.Length; i++)
774 subReaders[i].Norms(field, bytes, starts[i]);
775 normsCache[field] = bytes;
780 public override void Norms(System.String field, byte[] result,
int offset)
785 byte[] bytes = normsCache[field];
786 if (bytes == null && !HasNorms(field))
789 for (
int index = offset; index < result.Length; index++)
790 result.SetValue(val, index);
792 else if (bytes != null)
795 Array.Copy(bytes, 0, result, offset, MaxDoc);
799 for (
int i = 0; i < subReaders.Length; i++)
802 subReaders[i].Norms(field, result, offset + starts[i]);
808 protected internal override void DoSetNorm(
int n, System.String field, byte value_Renamed)
812 normsCache.Remove(field);
814 int i = ReaderIndex(n);
815 subReaders[i].SetNorm(n - starts[i], field, value_Renamed);
830 public override int DocFreq(
Term t)
834 for (
int i = 0; i < subReaders.Length; i++)
835 total += subReaders[i].DocFreq(t);
862 protected internal override void AcquireWriteLock()
873 if (segmentInfos != null)
877 throw new StaleReaderException(
"IndexReader out of date and no longer valid for delete, undelete, or setNorm operations");
879 if (this.writeLock == null)
881 Lock writeLock = internalDirectory.MakeLock(
IndexWriter.WRITE_LOCK_NAME);
882 if (!writeLock.Obtain(
IndexWriter.WRITE_LOCK_TIMEOUT))
887 this.writeLock = writeLock;
892 if (SegmentInfos.ReadCurrentVersion(internalDirectory) > maxIndexVersion)
895 this.writeLock.Release();
896 this.writeLock = null;
897 throw new StaleReaderException(
"IndexReader out of date and no longer valid for delete, undelete, or setNorm operations");
910 protected internal override void DoCommit(IDictionary<string, string> commitUserData)
914 segmentInfos.UserData = commitUserData;
917 var deleter =
new IndexFileDeleter(internalDirectory, deletionPolicy ??
new KeepOnlyLastCommitDeletionPolicy(), segmentInfos, null, null, synced);
919 segmentInfos.UpdateGeneration(deleter.LastSegmentInfos);
925 bool success =
false;
928 foreach (SegmentReader t
in subReaders)
932 foreach(
string fileName
in segmentInfos.Files(internalDirectory,
false))
934 if(!synced.Contains(fileName))
936 System.Diagnostics.Debug.Assert(internalDirectory.FileExists(fileName));
937 internalDirectory.Sync(fileName);
938 synced.Add(fileName);
942 segmentInfos.Commit(internalDirectory);
967 deleter.Checkpoint(segmentInfos,
true);
970 maxIndexVersion = segmentInfos.Version;
972 if (writeLock != null)
981 internal virtual void StartCommit()
983 rollbackHasChanges = hasChanges;
984 foreach (SegmentReader t
in subReaders)
990 internal virtual void RollbackCommit()
992 hasChanges = rollbackHasChanges;
993 foreach (SegmentReader t
in subReaders)
999 public override IDictionary<string, string> CommitUserData
1004 return segmentInfos.UserData;
1008 public override bool IsCurrent()
1011 if (writer == null || writer.IsClosed())
1018 return writer.NrtIsCurrent(segmentInfosStart);
1022 protected internal override void DoClose()
1026 System.IO.IOException ioe = null;
1035 catch (System.IO.IOException e)
1045 Search.FieldCache_Fields.DEFAULT.Purge(
this);
1053 public override ICollection<string> GetFieldNames(
IndexReader.FieldOption fieldNames)
1056 return GetFieldNames(fieldNames, this.subReaders);
1059 internal static ICollection<string> GetFieldNames(
IndexReader.FieldOption fieldNames,
IndexReader[] subReaders)
1062 ISet<string> fieldSet = Support.Compatibility.SetFactory.CreateHashSet<
string>();
1081 return internalDirectory;
1084 public override int TermInfosIndexDivisor
1086 get {
return termInfosIndexDivisor; }
1095 get {
return new ReaderCommit(segmentInfos, internalDirectory); }
1100 public static new ICollection<IndexCommit> ListCommits(
Directory dir)
1102 String[] files = dir.ListAll();
1104 ICollection<IndexCommit> commits =
new List<IndexCommit>();
1108 long currentGen = latest.Generation;
1110 commits.Add(
new ReaderCommit(latest, dir));
1112 foreach (
string fileName
in files)
1122 sis.Read(dir, fileName);
1124 catch (System.IO.FileNotFoundException)
1137 commits.Add(
new ReaderCommit(sis, dir));
1146 private readonly String segmentsFileName;
1147 private readonly ICollection<string> files;
1149 private readonly
long generation;
1150 private readonly
long version;
1151 private readonly
bool isOptimized;
1152 private readonly IDictionary<string, string> userData;
1159 files = infos.
Files(dir,
true);
1164 public override string ToString()
1166 return "DirectoryReader.ReaderCommit(" + segmentsFileName +
")";
1169 public override bool IsOptimized
1171 get {
return isOptimized; }
1174 public override string SegmentsFileName
1176 get {
return segmentsFileName; }
1179 public override ICollection<string> FileNames
1181 get {
return files; }
1191 get {
return version; }
1194 public override long Generation
1196 get {
return generation; }
1199 public override bool IsDeleted
1201 get {
return false; }
1204 public override IDictionary<string, string> UserData
1206 get {
return userData; }
1209 public override void Delete()
1211 throw new System.NotSupportedException(
"This IndexCommit does not support deletions");
1218 private readonly SegmentMergeQueue queue;
1221 private int docFreq;
1222 internal SegmentMergeInfo[] matchingSegments;
1226 this.topReader = topReader;
1227 queue =
new SegmentMergeQueue(readers.Length);
1228 matchingSegments =
new SegmentMergeInfo[readers.Length + 1];
1229 for (
int i = 0; i < readers.Length; i++)
1233 TermEnum termEnum = t != null ? reader.
Terms(t) : reader.Terms();
1235 var smi =
new SegmentMergeInfo(starts[i], termEnum, reader) {ord = i};
1236 if (t == null?smi.Next():termEnum.Term != null)
1243 if (t != null && queue.Size() > 0)
1249 public override bool Next()
1251 foreach (SegmentMergeInfo smi
in matchingSegments)
1261 int numMatchingSegments = 0;
1262 matchingSegments[0] = null;
1264 SegmentMergeInfo top = queue.Top();
1275 while (top != null && term.CompareTo(top.term) == 0)
1277 matchingSegments[numMatchingSegments++] = top;
1279 docFreq += top.termEnum.DocFreq();
1283 matchingSegments[numMatchingSegments] = null;
1289 get {
return term; }
1292 public override int DocFreq()
1297 protected override void Dispose(
bool disposing)
1310 protected internal int[] starts;
1311 protected internal Term term;
1313 protected internal int base_Renamed = 0;
1314 protected internal int pointer = 0;
1316 private readonly
TermDocs[] readerTermDocs;
1317 protected internal TermDocs current;
1320 internal int matchingSegmentPos;
1321 internal SegmentMergeInfo smi;
1325 this.topReader = topReader;
1329 readerTermDocs =
new TermDocs[r.Length];
1332 public virtual int Doc
1334 get {
return base_Renamed + current.Doc; }
1337 public virtual int Freq
1339 get {
return current.Freq; }
1342 public virtual void Seek(
Term term)
1345 this.base_Renamed = 0;
1347 this.current = null;
1350 this.matchingSegmentPos = 0;
1353 public virtual void Seek(
TermEnum termEnum)
1355 Seek(termEnum.Term);
1357 if (multiTermEnum != null)
1359 tenum = multiTermEnum;
1360 if (topReader != tenum.topReader)
1365 public virtual bool Next()
1369 if (current != null && current.Next())
1373 else if (pointer < readers.Length)
1377 smi = tenum.matchingSegments[matchingSegmentPos++];
1380 pointer = readers.Length;
1385 base_Renamed = starts[pointer];
1396 public virtual int Read(
int[] docs,
int[] freqs)
1400 while (current == null)
1402 if (pointer < readers.Length)
1407 smi = tenum.matchingSegments[matchingSegmentPos++];
1410 pointer = readers.Length;
1415 base_Renamed = starts[pointer];
1423 int end = current.Read(docs, freqs);
1432 int b = base_Renamed;
1433 for (
int i = 0; i < end; i++)
1441 public virtual bool SkipTo(
int target)
1445 if (current != null && current.SkipTo(target - base_Renamed))
1449 else if (pointer < readers.Length)
1453 SegmentMergeInfo smi = tenum.matchingSegments[matchingSegmentPos++];
1456 pointer = readers.Length;
1461 base_Renamed = starts[pointer];
1471 TermDocs result = readerTermDocs[i] ?? (readerTermDocs[i] =
TermDocs(readers[i]));
1474 System.Diagnostics.Debug.Assert((smi.ord == i));
1475 System.Diagnostics.Debug.Assert((smi.termEnum.Term.Equals(term)));
1476 result.Seek(smi.termEnum);
1487 return term == null ? reader.TermDocs(null):reader.TermDocs();
1490 public virtual void Close()
1495 public virtual void Dispose()
1500 protected virtual void Dispose(
bool disposing)
1504 foreach (
TermDocs t
in readerTermDocs)
1521 return reader.TermPositions();
1524 public virtual int NextPosition()
1529 public virtual int PayloadLength
1534 public virtual byte[] GetPayload(byte[] data,
int offset)
1542 public virtual bool IsPayloadAvailable
1544 get {
return ((
TermPositions) current).IsPayloadAvailable; }