• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

C# Index.SegmentInfo类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C#中Lucene.Net.Index.SegmentInfo的典型用法代码示例。如果您正苦于以下问题:C# SegmentInfo类的具体用法?C# SegmentInfo怎么用?C# SegmentInfo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



SegmentInfo类属于Lucene.Net.Index命名空间,在下文中一共展示了SegmentInfo类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。

示例1: SegmentReadState

 /// <summary>
 /// Create a {@code SegmentReadState}. </summary>
 public SegmentReadState(Directory dir, SegmentInfo info, FieldInfos fieldInfos, IOContext context, int termsIndexDivisor, string segmentSuffix)
 {
     this.Directory = dir;
     this.SegmentInfo = info;
     this.FieldInfos = fieldInfos;
     this.Context = context;
     this.TermsIndexDivisor = termsIndexDivisor;
     this.SegmentSuffix = segmentSuffix;
 }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:11,代码来源:SegmentReadState.cs


示例2: CompressingTermVectorsReader

        /// <summary>
        /// Sole constructor. </summary>
        public CompressingTermVectorsReader(Directory d, SegmentInfo si, string segmentSuffix, FieldInfos fn, IOContext context, string formatName, CompressionMode compressionMode)
        {
            this.compressionMode = compressionMode;
            string segment = si.Name;
            bool success = false;
            fieldInfos = fn;
            numDocs = si.DocCount;
            ChecksumIndexInput indexStream = null;
            try
            {
                // Load the index into memory
                string indexStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, CompressingTermVectorsWriter.VECTORS_INDEX_EXTENSION);
                indexStream = d.OpenChecksumInput(indexStreamFN, context);
                string codecNameIdx = formatName + CompressingTermVectorsWriter.CODEC_SFX_IDX;
                version = CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingTermVectorsWriter.VERSION_START, CompressingTermVectorsWriter.VERSION_CURRENT);
                Debug.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.FilePointer);
                indexReader = new CompressingStoredFieldsIndexReader(indexStream, si);

                if (version >= CompressingTermVectorsWriter.VERSION_CHECKSUM)
                {
                    indexStream.ReadVLong(); // the end of the data file
                    CodecUtil.CheckFooter(indexStream);
                }
                else
                {
                    CodecUtil.CheckEOF(indexStream);
                }
                indexStream.Dispose();
                indexStream = null;

                // Open the data file and read metadata
                string vectorsStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, CompressingTermVectorsWriter.VECTORS_EXTENSION);
                vectorsStream = d.OpenInput(vectorsStreamFN, context);
                string codecNameDat = formatName + CompressingTermVectorsWriter.CODEC_SFX_DAT;
                int version2 = CodecUtil.CheckHeader(vectorsStream, codecNameDat, CompressingTermVectorsWriter.VERSION_START, CompressingTermVectorsWriter.VERSION_CURRENT);
                if (version != version2)
                {
                    throw new Exception("Version mismatch between stored fields index and data: " + version + " != " + version2);
                }
                Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == vectorsStream.FilePointer);

                packedIntsVersion = vectorsStream.ReadVInt();
                chunkSize = vectorsStream.ReadVInt();
                decompressor = compressionMode.NewDecompressor();
                this.reader = new BlockPackedReaderIterator(vectorsStream, packedIntsVersion, CompressingTermVectorsWriter.BLOCK_SIZE, 0);

                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.CloseWhileHandlingException(this, indexStream);
                }
            }
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:58,代码来源:CompressingTermVectorsReader.cs


示例3: CompressingStoredFieldsWriter

        private int NumBufferedDocs; // docBase + numBufferedDocs == current doc ID

        /// <summary>
        /// Sole constructor. </summary>
        public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize)
        {
            Debug.Assert(directory != null);
            this.Directory = directory;
            this.Segment = si.Name;
            this.SegmentSuffix = segmentSuffix;
            this.CompressionMode = compressionMode;
            this.Compressor = compressionMode.NewCompressor();
            this.ChunkSize = chunkSize;
            this.DocBase = 0;
            this.BufferedDocs = new GrowableByteArrayDataOutput(chunkSize);
            this.NumStoredFields = new int[16];
            this.EndOffsets = new int[16];
            this.NumBufferedDocs = 0;

            bool success = false;
            IndexOutput indexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(Segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION), context);
            try
            {
                FieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(Segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_EXTENSION), context);

                string codecNameIdx = formatName + CODEC_SFX_IDX;
                string codecNameDat = formatName + CODEC_SFX_DAT;
                CodecUtil.WriteHeader(indexStream, codecNameIdx, VERSION_CURRENT);
                CodecUtil.WriteHeader(FieldsStream, codecNameDat, VERSION_CURRENT);
                Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == FieldsStream.FilePointer);
                Debug.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.FilePointer);

                IndexWriter = new CompressingStoredFieldsIndexWriter(indexStream);
                indexStream = null;

                FieldsStream.WriteVInt(chunkSize);
                FieldsStream.WriteVInt(PackedInts.VERSION_CURRENT);

                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.CloseWhileHandlingException(indexStream);
                    Abort();
                }
            }
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:49,代码来源:CompressingStoredFieldsWriter.cs


示例4: SegmentMerger

 // note, just like in codec apis Directory 'dir' is NOT the same as segmentInfo.dir!!
 public SegmentMerger(IList<AtomicReader> readers, SegmentInfo segmentInfo, InfoStream infoStream, Directory dir, int termIndexInterval, MergeState.CheckAbort checkAbort, FieldInfos.FieldNumbers fieldNumbers, IOContext context, bool validate)
 {
     // validate incoming readers
     if (validate)
     {
         foreach (AtomicReader reader in readers)
         {
             reader.CheckIntegrity();
         }
     }
     MergeState = new MergeState(readers, segmentInfo, infoStream, checkAbort);
     Directory = dir;
     this.TermIndexInterval = termIndexInterval;
     this.Codec = segmentInfo.Codec;
     this.Context = context;
     this.FieldInfosBuilder = new FieldInfos.Builder(fieldNumbers);
     MergeState.SegmentInfo.DocCount = SetDocMaps();
 }
开发者ID:paulirwin,项目名称:lucene.net,代码行数:19,代码来源:SegmentMerger.cs


示例5: Read

		public void  Read(Directory directory)
		{
			
			IndexInput input = directory.OpenInput(IndexFileNames.SEGMENTS);
			try
			{
				int format = input.ReadInt();
				if (format < 0)
				{
					// file contains explicit format info
					// check that it is a format we can understand
					if (format < FORMAT)
						throw new System.IO.IOException("Unknown format version: " + format);
					version = input.ReadLong(); // read version
					counter = input.ReadInt(); // read counter
				}
				else
				{
					// file is in old format without explicit format info
					counter = format;
				}
				
				for (int i = input.ReadInt(); i > 0; i--)
				{
					// read segmentInfos
					SegmentInfo si = new SegmentInfo(input.ReadString(), input.ReadInt(), directory);
					Add(si);
				}
				
				if (format >= 0)
				{
					// in old format the version number may be at the end of the file
					if (input.GetFilePointer() >= input.Length())
						version = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
					// old file format without version number
					else
						version = input.ReadLong(); // read version
				}
			}
			finally
			{
				input.Close();
			}
		}
开发者ID:ArsenShnurkov,项目名称:beagle-1,代码行数:44,代码来源:SegmentInfos.cs


示例6: Reset

		/// <summary> Copy everything from src SegmentInfo into our instance.</summary>
		internal void  Reset(SegmentInfo src)
		{
			name = src.name;
			docCount = src.docCount;
			dir = src.dir;
			preLockless = src.preLockless;
			delGen = src.delGen;
			if (src.normGen == null)
			{
				normGen = null;
			}
			else
			{
				normGen = new long[src.normGen.Length];
				Array.Copy(src.normGen, 0, normGen, 0, src.normGen.Length);
			}
			isCompoundFile = src.isCompoundFile;
			hasSingleNormFile = src.hasSingleNormFile;
		}
开发者ID:zweib730,项目名称:beagrep,代码行数:20,代码来源:SegmentInfo.cs


示例7: Lucene41PostingsReader

        // public static boolean DEBUG = false;
        /// <summary>
        /// Sole constructor. </summary>
        public Lucene41PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo segmentInfo, IOContext ioContext, string segmentSuffix)
        {
            bool success = false;
            IndexInput docIn = null;
            IndexInput posIn = null;
            IndexInput payIn = null;
            try
            {
                docIn = dir.OpenInput(IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, Lucene41PostingsFormat.DOC_EXTENSION), ioContext);
                Version = CodecUtil.CheckHeader(docIn, Lucene41PostingsWriter.DOC_CODEC, Lucene41PostingsWriter.VERSION_START, Lucene41PostingsWriter.VERSION_CURRENT);
                forUtil = new ForUtil(docIn);

                if (fieldInfos.HasProx())
                {
                    posIn = dir.OpenInput(IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, Lucene41PostingsFormat.POS_EXTENSION), ioContext);
                    CodecUtil.CheckHeader(posIn, Lucene41PostingsWriter.POS_CODEC, Version, Version);

                    if (fieldInfos.HasPayloads() || fieldInfos.HasOffsets())
                    {
                        payIn = dir.OpenInput(IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, Lucene41PostingsFormat.PAY_EXTENSION), ioContext);
                        CodecUtil.CheckHeader(payIn, Lucene41PostingsWriter.PAY_CODEC, Version, Version);
                    }
                }

                this.DocIn = docIn;
                this.PosIn = posIn;
                this.PayIn = payIn;
                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.CloseWhileHandlingException(docIn, posIn, payIn);
                }
            }
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:40,代码来源:Lucene41PostingsReader.cs


示例8: SegmentCommitInfo

        /// <summary>
        /// Sole constructor.
        /// </summary>
        /// <param name="info">
        ///          <seealso cref="SegmentInfo"/> that we wrap </param>
        /// <param name="delCount">
        ///          number of deleted documents in this segment </param>
        /// <param name="delGen">
        ///          deletion generation number (used to name deletion files) </param>
        /// <param name="fieldInfosGen">
        ///          FieldInfos generation number (used to name field-infos files)
        ///  </param>
        public SegmentCommitInfo(SegmentInfo info, int delCount, long delGen, long fieldInfosGen)
        {
            this.Info = info;
            this.DelCount_Renamed = delCount;
            this.DelGen_Renamed = delGen;
            if (delGen == -1)
            {
                NextWriteDelGen = 1;
            }
            else
            {
                NextWriteDelGen = delGen + 1;
            }

            this.FieldInfosGen_Renamed = fieldInfosGen;
            if (fieldInfosGen == -1)
            {
                NextWriteFieldInfosGen = 1;
            }
            else
            {
                NextWriteFieldInfosGen = fieldInfosGen + 1;
            }
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:36,代码来源:SegmentCommitInfo.cs


示例9: Initialize

		private void  Initialize(SegmentInfo si)
		{
			segment = si.name;
			this.si = si;
			
			bool success = false;
			
			try
			{
				// Use compound file directory for some files, if it exists
				Directory cfsDir = Directory();
				if (si.GetUseCompoundFile())
				{
					cfsReader = new CompoundFileReader(Directory(), segment + ".cfs");
					cfsDir = cfsReader;
				}
				
				// No compound file exists - use the multi-file format
				fieldInfos = new FieldInfos(cfsDir, segment + ".fnm");
				fieldsReader = new FieldsReader(cfsDir, segment, fieldInfos);
				
				// Verify two sources of "maxDoc" agree:
				if (fieldsReader.Size() != si.docCount)
				{
					throw new System.SystemException("doc counts differ for segment " + si.name + ": fieldsReader shows " + fieldsReader.Size() + " but segmentInfo shows " + si.docCount);
				}
				
				tis = new TermInfosReader(cfsDir, segment, fieldInfos);
				
				// NOTE: the bitvector is stored using the regular directory, not cfs
				if (HasDeletions(si))
				{
					deletedDocs = new BitVector(Directory(), si.GetDelFileName());
					
					// Verify # deletes does not exceed maxDoc for this segment:
					if (deletedDocs.Count() > MaxDoc())
					{
						throw new System.SystemException("number of deletes (" + deletedDocs.Count() + ") exceeds max doc (" + MaxDoc() + ") for segment " + si.name);
					}
				}
				
				// make sure that all index files have been read or are kept open
				// so that if an index update removes them we'll still have them
				freqStream = cfsDir.OpenInput(segment + ".frq");
				proxStream = cfsDir.OpenInput(segment + ".prx");
				OpenNorms(cfsDir);
				
				if (fieldInfos.HasVectors())
				{
					// open term vector files only as needed
					termVectorsReaderOrig = new TermVectorsReader(cfsDir, segment, fieldInfos);
				}
				success = true;
			}
			finally
			{
				
				// With lock-less commits, it's entirely possible (and
				// fine) to hit a FileNotFound exception above.  In
				// this case, we want to explicitly close any subset
				// of things that were opened so that we don't have to
				// wait for a GC to do so.
				if (!success)
				{
					DoClose();
				}
			}
		}
开发者ID:zweib730,项目名称:beagrep,代码行数:68,代码来源:SegmentReader.cs


示例10: Get

		public static SegmentReader Get(Directory dir, SegmentInfo si, SegmentInfos sis, bool closeDir, bool ownDir)
		{
			SegmentReader instance;
			try
			{
				instance = (SegmentReader) System.Activator.CreateInstance(IMPL);
			}
			catch (System.Exception e)
			{
				throw new System.SystemException("cannot load SegmentReader class: " + e, e);
			}
			instance.Init(dir, sis, closeDir, ownDir);
			instance.Initialize(si);
			return instance;
		}
开发者ID:zweib730,项目名称:beagrep,代码行数:15,代码来源:SegmentReader.cs


示例11: Get

		public static SegmentReader Get(SegmentInfo si)
		{
			return Get(false, si.dir, si, BufferedIndexInput.BUFFER_SIZE, true, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
		}
开发者ID:nzdunic,项目名称:ravendb,代码行数:4,代码来源:SegmentReader.cs


示例12: HasSeparateNorms

		internal static bool HasSeparateNorms(SegmentInfo si)
		{
			return si.HasSeparateNorms();
		}
开发者ID:zweib730,项目名称:beagrep,代码行数:4,代码来源:SegmentReader.cs


示例13: TestStoredFields

 /// <summary> Test stored fields for a segment.</summary>
 private Status.StoredFieldStatus TestStoredFields(SegmentInfo info, SegmentReader reader, System.Globalization.NumberFormatInfo format)
 {
     var status = new Status.StoredFieldStatus();
     
     try
     {
         if (infoStream != null)
         {
             infoStream.Write("    test: stored fields.......");
         }
         
         // Scan stored fields for all documents
         for (int j = 0; j < info.docCount; ++j)
         {
             if (!reader.IsDeleted(j))
             {
                 status.docCount++;
                 Document doc = reader.Document(j);
                 status.totFields += doc.GetFields().Count;
             }
         }
         
         // Validate docCount
         if (status.docCount != reader.NumDocs())
         {
             throw new System.SystemException("docCount=" + status.docCount + " but saw " + status.docCount + " undeleted docs");
         }
         
         Msg(string.Format(format, "OK [{0:d} total field count; avg {1:f} fields per doc]", new object[] { status.totFields, (((float) status.totFields) / status.docCount) }));
     }
     catch (System.Exception e)
     {
         Msg("ERROR [" + System.Convert.ToString(e.Message) + "]");
         status.error = e;
         if (infoStream != null)
         {
             infoStream.WriteLine(e.StackTrace);
         }
     }
     
     return status;
 }
开发者ID:Nangal,项目名称:lucene.net,代码行数:43,代码来源:CheckIndex.cs


示例14: ReWrite

			internal void  ReWrite(SegmentInfo si)
			{
				// NOTE: norms are re-written in regular directory, not cfs
				
				System.String oldFileName = si.GetNormFileName(this.number);
				if (oldFileName != null && !oldFileName.EndsWith("." + IndexFileNames.NORMS_EXTENSION))
				{
					// Mark this file for deletion.  Note that we don't
					// actually try to delete it until the new segments files is
					// successfully written:
					Enclosing_Instance.deleter.AddPendingFile(oldFileName);
				}
				
				si.AdvanceNormGen(this.number);
				IndexOutput out_Renamed = Enclosing_Instance.Directory().CreateOutput(si.GetNormFileName(this.number));
				try
				{
					out_Renamed.WriteBytes(bytes, Enclosing_Instance.MaxDoc());
				}
				finally
				{
					out_Renamed.Close();
				}
				this.dirty = false;
			}
开发者ID:zweib730,项目名称:beagrep,代码行数:25,代码来源:SegmentReader.cs


示例15: Flush

		/// <summary>Flush all pending docs to a new segment </summary>
		internal int Flush(bool closeDocStore)
		{
			lock (this)
			{
				
				System.Diagnostics.Debug.Assert(AllThreadsIdle());
				
				System.Diagnostics.Debug.Assert(numDocsInRAM > 0);
				
				System.Diagnostics.Debug.Assert(nextDocID == numDocsInRAM);
				System.Diagnostics.Debug.Assert(waitQueue.numWaiting == 0);
				System.Diagnostics.Debug.Assert(waitQueue.waitingBytes == 0);
				
				InitFlushState(false);
				
				docStoreOffset = numDocsInStore;
				
				if (infoStream != null)
					Message("flush postings as segment " + flushState.segmentName + " numDocs=" + numDocsInRAM);
				
				bool success = false;
				
				try
				{
					
					if (closeDocStore)
					{
						System.Diagnostics.Debug.Assert(flushState.docStoreSegmentName != null);
						System.Diagnostics.Debug.Assert(flushState.docStoreSegmentName.Equals(flushState.segmentName));
						CloseDocStore();
						flushState.numDocsInStore = 0;
					}
					
					System.Collections.Hashtable threads = new System.Collections.Hashtable();
					for (int i = 0; i < threadStates.Length; i++)
						threads[threadStates[i].consumer] = threadStates[i].consumer;
					consumer.Flush(threads, flushState);
					
					if (infoStream != null)
					{
                        SegmentInfo si = new SegmentInfo(flushState.segmentName, flushState.numDocs, directory);
                        long newSegmentSize = si.SizeInBytes();
                        System.String message = System.String.Format(nf, "  oldRAMSize={0:d} newFlushedSize={1:d} docs/MB={2:f} new/old={3:%}",
                            new System.Object[] { numBytesUsed, newSegmentSize, (numDocsInRAM / (newSegmentSize / 1024.0 / 1024.0)), (100.0 * newSegmentSize / numBytesUsed) });
						Message(message);
					}
					
					flushedDocCount += flushState.numDocs;
					
					DoAfterFlush();
					
					success = true;
				}
				finally
				{
					if (!success)
					{
						Abort();
					}
				}
				
				System.Diagnostics.Debug.Assert(waitQueue.waitingBytes == 0);
				
				return flushState.numDocs;
			}
		}
开发者ID:Inzaghi2012,项目名称:teamlab.v7.5,代码行数:67,代码来源:DocumentsWriter.cs


示例16: ReopenSegment

		internal virtual SegmentReader ReopenSegment(SegmentInfo si, bool doClone, bool openReadOnly)
		{
			lock (this)
			{
				bool deletionsUpToDate = (this.si.HasDeletions() == si.HasDeletions()) && (!si.HasDeletions() || this.si.GetDelFileName().Equals(si.GetDelFileName()));
				bool normsUpToDate = true;
				
				bool[] fieldNormsChanged = new bool[core.fieldInfos.Size()];
				int fieldCount = core.fieldInfos.Size();
				for (int i = 0; i < fieldCount; i++)
				{
					if (!this.si.GetNormFileName(i).Equals(si.GetNormFileName(i)))
					{
						normsUpToDate = false;
						fieldNormsChanged[i] = true;
					}
				}
				
				// if we're cloning we need to run through the reopenSegment logic
				// also if both old and new readers aren't readonly, we clone to avoid sharing modifications
				if (normsUpToDate && deletionsUpToDate && !doClone && openReadOnly && readOnly)
				{
					return this;
				}
				
				// When cloning, the incoming SegmentInfos should not
				// have any changes in it:
				System.Diagnostics.Debug.Assert(!doClone ||(normsUpToDate && deletionsUpToDate));
				
				// clone reader
				SegmentReader clone;
				try
				{
					if (openReadOnly)
						clone = (SegmentReader) System.Activator.CreateInstance(READONLY_IMPL);
					else
						clone = (SegmentReader) System.Activator.CreateInstance(IMPL);
				}
				catch (System.Exception e)
				{
					throw new System.SystemException("cannot load SegmentReader class: " + e, e);
				}
				
				bool success = false;
				try
				{
					core.IncRef();
					clone.core = core;
					clone.readOnly = openReadOnly;
					clone.si = si;
					clone.readBufferSize = readBufferSize;
					
					if (!openReadOnly && hasChanges)
					{
						// My pending changes transfer to the new reader
						clone.pendingDeleteCount = pendingDeleteCount;
						clone.deletedDocsDirty = deletedDocsDirty;
						clone.normsDirty = normsDirty;
						clone.hasChanges = hasChanges;
						hasChanges = false;
					}
					
					if (doClone)
					{
						if (deletedDocs != null)
						{
							deletedDocsRef.IncRef();
							clone.deletedDocs = deletedDocs;
							clone.deletedDocsRef = deletedDocsRef;
						}
					}
					else
					{
						if (!deletionsUpToDate)
						{
							// load deleted docs
							System.Diagnostics.Debug.Assert(clone.deletedDocs == null);
							clone.LoadDeletedDocs();
						}
						else if (deletedDocs != null)
						{
							deletedDocsRef.IncRef();
							clone.deletedDocs = deletedDocs;
							clone.deletedDocsRef = deletedDocsRef;
						}
					}
					
					clone.SetDisableFakeNorms(GetDisableFakeNorms());
					clone.norms = new System.Collections.Hashtable();
					
					// Clone norms
					for (int i = 0; i < fieldNormsChanged.Length; i++)
					{
						
						// Clone unchanged norms to the cloned reader
						if (doClone || !fieldNormsChanged[i])
						{
							System.String curField = core.fieldInfos.FieldInfo(i).name;
							Norm norm = (Norm) this.norms[curField];
							if (norm != null)
//.........这里部分代码省略.........
开发者ID:nzdunic,项目名称:ravendb,代码行数:101,代码来源:SegmentReader.cs


示例17: HasDeletions

		internal static bool HasDeletions(SegmentInfo si)
		{
			return si.HasDeletions();
		}
开发者ID:zweib730,项目名称:beagrep,代码行数:4,代码来源:SegmentReader.cs


示例18: TestTermVectors

 /// <summary> Test term vectors for a segment.</summary>
 private Status.TermVectorStatus TestTermVectors(SegmentInfo info, SegmentReader reader, System.Globalization.NumberFormatInfo format)
 {
     var status = new Status.TermVectorStatus();
     
     try
     {
         if (infoStream != null)
         {
             infoStream.Write("    test: term vectors........");
         }
         
         for (int j = 0; j < info.docCount; ++j)
         {
             if (!reader.IsDeleted(j))
             {
                 status.docCount++;
                 ITermFreqVector[] tfv = reader.GetTermFreqVectors(j);
                 if (tfv != null)
                 {
                     status.totVectors += tfv.Length;
                 }
             }
         }
         
         Msg(System.String.Format(format, "OK [{0:d} total vector count; avg {1:f} term/freq vector fields per doc]", new object[] { status.totVectors, (((float) status.totVectors) / status.docCount) }));
     }
     catch (System.Exception e)
     {
         Msg("ERROR [" + System.Convert.ToString(e.Message) + "]");
         status.error = e;
         if (infoStream != null)
         {
             infoStream.WriteLine(e.StackTrace);
         }
     }
     
     return status;
 }
开发者ID:Nangal,项目名称:lucene.net,代码行数:39,代码来源:CheckIndex.cs


示例19: UsesCompoundFile

		internal static bool UsesCompoundFile(SegmentInfo si)
		{
			return si.GetUseCompoundFile();
		}
开发者ID:zweib730,项目名称:beagrep,代码行数:4,代码来源:SegmentReader.cs


示例20: Size

		protected internal override long Size(SegmentInfo info)
		{
			return SizeDocs(info);
		}
开发者ID:modulexcite,项目名称:Xamarin-Lucene.Net,代码行数:4,代码来源:LogDocMergePolicy.cs



注:本文中的Lucene.Net.Index.SegmentInfo类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C# Index.SegmentInfos类代码示例发布时间:2022-05-26
下一篇:
C# Index.RawPostingList类代码示例发布时间:2022-05-26
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap