• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

C# Documents.Field类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C#中Lucene.Net.Documents.Field的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Documents.Field类的具体用法?C# Lucene.Net.Documents.Field怎么用?C# Lucene.Net.Documents.Field使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



Lucene.Net.Documents.Field类属于命名空间,在下文中一共展示了Lucene.Net.Documents.Field类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。

示例1: Index

        void Index()
        {
            Lucene.Net.Index.IndexWriter wr = new Lucene.Net.Index.IndexWriter(dir, new Lucene.Net.Analysis.WhitespaceAnalyzer(), Lucene.Net.Index.IndexWriter.MaxFieldLength.UNLIMITED);

            Lucene.Net.Documents.Document doc = null;
            Lucene.Net.Documents.Field f = null;

            doc = new Lucene.Net.Documents.Document();
            f = new Lucene.Net.Documents.Field("field", "a b c d", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
            doc.Add(f);
            wr.AddDocument(doc);

            doc = new Lucene.Net.Documents.Document();
            f = new Lucene.Net.Documents.Field("field", "a b a d", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
            doc.Add(f);
            wr.AddDocument(doc);

            doc = new Lucene.Net.Documents.Document();
            f = new Lucene.Net.Documents.Field("field", "a b e f", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
            doc.Add(f);
            wr.AddDocument(doc);
            
            doc = new Lucene.Net.Documents.Document();
            f = new Lucene.Net.Documents.Field("field", "x y z", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
            doc.Add(f);
            wr.AddDocument(doc);
            
            wr.Close();
        }
开发者ID:hanabi1224,项目名称:lucene.net,代码行数:29,代码来源:TestSerialization.cs


示例2: Doc

		protected internal static Document Doc(Field[] fields)
		{
			Document doc = new Document();
			for (int i = 0; i < fields.Length; i++)
			{
				doc.Add(fields[i]);
			}
			return doc;
		}
开发者ID:VirtueMe,项目名称:ravendb,代码行数:9,代码来源:TestFieldMaskingSpanQuery.cs


示例3: TestFlushExceptions

		public virtual void  TestFlushExceptions()
		{
			
			MockRAMDirectory directory = new MockRAMDirectory();
			FailOnlyOnFlush failure = new FailOnlyOnFlush();
			directory.FailOn(failure);
			
			IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
			writer.SetMergeScheduler(cms);
			writer.SetMaxBufferedDocs(2);
			Document doc = new Document();
			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
			doc.Add(idField);
			for (int i = 0; i < 10; i++)
			{
				for (int j = 0; j < 20; j++)
				{
					idField.SetValue(System.Convert.ToString(i * 20 + j));
					writer.AddDocument(doc);
				}
				
				writer.AddDocument(doc);
				
				failure.SetDoFail();
				try
				{
					writer.Flush();
					Assert.Fail("failed to hit IOException");
				}
				catch (System.IO.IOException ioe)
				{
					failure.ClearDoFail();
				}
			}
			
			writer.Close();
			IndexReader reader = IndexReader.Open(directory);
			Assert.AreEqual(200, reader.NumDocs());
			reader.Close();
			directory.Close();
		}
开发者ID:kstenson,项目名称:NHibernate.Search,代码行数:42,代码来源:TestConcurrentMergeScheduler.cs


示例4: AssignFieldValues

        private static void AssignFieldValues(SearchResult result, Field uriField, List<SitecoreItem> items)
        {
            var itemInfo = new SitecoreItem(new ItemUri(uriField.StringValue()));
            foreach (Field field in result.Document.GetFields())
            {
                itemInfo.Fields[field.Name()] = field.StringValue();
            }

            items.Add(itemInfo);
        }
开发者ID:csteeg,项目名称:Sitecore-Item-Buckets,代码行数:10,代码来源:SearchHelper.cs


示例5: MakeDocument

		private static Document MakeDocument(System.String docText)
		{
			Document doc = new Document();
			Field f = new Field("f", docText, Field.Store.NO, Field.Index.ANALYZED);
			f.SetOmitNorms(true);
			doc.Add(f);
			return doc;
		}
开发者ID:Rationalle,项目名称:ravendb,代码行数:8,代码来源:TestSloppyPhraseQuery.cs


示例6: SetUp

		public override void  SetUp()
		{
			base.SetUp();
			RAMDirectory directory = new RAMDirectory();
			PayloadAnalyzer analyzer = new PayloadAnalyzer(this);
			IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
			writer.SetSimilarity(similarity);
			//writer.infoStream = System.out;
			for (int i = 0; i < 1000; i++)
			{
				Document doc = new Document();
				Field noPayloadField = new Field(PayloadHelper.NO_PAYLOAD_FIELD, English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED);
				//noPayloadField.setBoost(0);
				doc.Add(noPayloadField);
				doc.Add(new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
				doc.Add(new Field("multiField", English.IntToEnglish(i) + "  " + English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
				writer.AddDocument(doc);
			}
			writer.Optimize();
			writer.Close();
			
			searcher = new IndexSearcher(directory);
			searcher.SetSimilarity(similarity);
		}
开发者ID:VirtueMe,项目名称:ravendb,代码行数:24,代码来源:TestBoostingTermQuery.cs


示例7: AddNoProxDoc

		private void  AddNoProxDoc(IndexWriter writer)
		{
			Document doc = new Document();
			Field f = new Field("content3", "aaa", Field.Store.YES, Field.Index.ANALYZED);
			f.SetOmitTf(true);
			doc.Add(f);
			f = new Field("content4", "aaa", Field.Store.YES, Field.Index.NO);
			f.SetOmitTf(true);
			doc.Add(f);
			writer.AddDocument(doc);
		}
开发者ID:Rationalle,项目名称:ravendb,代码行数:11,代码来源:TestBackwardsCompatibility.cs


示例8: TestMixedRAM

 public virtual void  TestMixedRAM()
 {
     Directory ram = new MockRAMDirectory();
     Analyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
     IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
     writer.SetMaxBufferedDocs(10);
     writer.MergeFactor = 2;
     Document d = new Document();
     
     // this field will have Tf
     Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
     d.Add(f1);
     
     // this field will NOT have Tf
     Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
     d.Add(f2);
     
     for (int i = 0; i < 5; i++)
         writer.AddDocument(d);
     
     f2.OmitTermFreqAndPositions = true;
     
     for (int i = 0; i < 20; i++)
         writer.AddDocument(d);
     
     // force merge
     writer.Optimize();
     
     // flush
     writer.Close();
     
     _TestUtil.CheckIndex(ram);
     
     SegmentReader reader = SegmentReader.GetOnlySegmentReader(ram);
     FieldInfos fi = reader.FieldInfos();
     Assert.IsTrue(!fi.FieldInfo("f1").omitTermFreqAndPositions_ForNUnit, "OmitTermFreqAndPositions field bit should not be set.");
     Assert.IsTrue(fi.FieldInfo("f2").omitTermFreqAndPositions_ForNUnit, "OmitTermFreqAndPositions field bit should be set.");
     
     reader.Close();
     ram.Close();
 }
开发者ID:Nangal,项目名称:lucene.net,代码行数:41,代码来源:TestOmitTf.cs


示例9: AddDoc

		private void  AddDoc(IndexWriter writer, System.String text)
		{
			Document d = new Document();
			Field f = new Field(FIELD_NAME, text, Field.Store.YES, Field.Index.TOKENIZED);
			d.Add(f);
			writer.AddDocument(d);
		}
开发者ID:kstenson,项目名称:NHibernate.Search,代码行数:7,代码来源:HighlighterTest.cs


示例10: TestTermVectorCorruption3

		public virtual void  TestTermVectorCorruption3()
		{
			Directory dir = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(dir, false, new StandardAnalyzer());
			writer.SetMaxBufferedDocs(2);
			writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
			writer.SetMergeScheduler(new SerialMergeScheduler());
			writer.SetMergePolicy(new LogDocMergePolicy());
			
			Document document = new Document();
			
			document = new Document();
			Field storedField = new Field("stored", "stored", Field.Store.YES, Field.Index.NO);
			document.Add(storedField);
			Field termVectorField = new Field("termVector", "termVector", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
			document.Add(termVectorField);
			for (int i = 0; i < 10; i++)
				writer.AddDocument(document);
			writer.Close();
			
			writer = new IndexWriter(dir, false, new StandardAnalyzer());
			writer.SetMaxBufferedDocs(2);
			writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
			writer.SetMergeScheduler(new SerialMergeScheduler());
			writer.SetMergePolicy(new LogDocMergePolicy());
			for (int i = 0; i < 6; i++)
				writer.AddDocument(document);
			
			writer.Optimize();
			writer.Close();
			
			IndexReader reader = IndexReader.Open(dir);
			for (int i = 0; i < 10; i++)
			{
				reader.GetTermFreqVectors(i);
				reader.Document(i);
			}
			reader.Close();
			dir.Close();
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:40,代码来源:TestIndexWriter.cs


示例11: TestNoWaitClose

		public virtual void  TestNoWaitClose()
		{
			RAMDirectory directory = new MockRAMDirectory();
			
			Document doc = new Document();
			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
			doc.Add(idField);
			
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = pass == 0;
				IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
				
				for (int iter = 0; iter < 10; iter++)
				{
					ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
					writer.SetMergeScheduler(cms);
					writer.SetMaxBufferedDocs(2);
					writer.SetMergeFactor(100);
					
					for (int j = 0; j < 201; j++)
					{
						idField.SetValue(System.Convert.ToString(iter * 201 + j));
						writer.AddDocument(doc);
					}
					
					int delID = iter * 201;
					for (int j = 0; j < 20; j++)
					{
						writer.DeleteDocuments(new Term("id", System.Convert.ToString(delID)));
						delID += 5;
					}
					
					// Force a bunch of merge threads to kick off so we
					// stress out aborting them on close:
					writer.SetMergeFactor(3);
					writer.AddDocument(doc);
					writer.Flush();
					
					writer.Close(false);
					
					IndexReader reader = IndexReader.Open(directory);
					Assert.AreEqual((1 + iter) * 182, reader.NumDocs());
					reader.Close();
					
					// Reopen
					writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
				}
				writer.Close();
			}
			
			directory.Close();
		}
开发者ID:kstenson,项目名称:NHibernate.Search,代码行数:53,代码来源:TestConcurrentMergeScheduler.cs


示例12: TestEnablingNorms

		public virtual void  TestEnablingNorms()
		{
			RAMDirectory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			writer.SetMaxBufferedDocs(10);
			// Enable norms for only 1 doc, pre flush
			for (int j = 0; j < 10; j++)
			{
				Document doc = new Document();
				Field f = new Field("field", "aaa", Field.Store.YES, Field.Index.TOKENIZED);
				if (j != 8)
				{
					f.SetOmitNorms(true);
				}
				doc.Add(f);
				writer.AddDocument(doc);
			}
			writer.Close();
			
			Term searchTerm = new Term("field", "aaa");
			
			IndexSearcher searcher = new IndexSearcher(dir);
			Hits hits = searcher.Search(new TermQuery(searchTerm));
			Assert.AreEqual(10, hits.Length());
			searcher.Close();
			
			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			writer.SetMaxBufferedDocs(10);
			// Enable norms for only 1 doc, post flush
			for (int j = 0; j < 27; j++)
			{
				Document doc = new Document();
				Field f = new Field("field", "aaa", Field.Store.YES, Field.Index.TOKENIZED);
				if (j != 26)
				{
					f.SetOmitNorms(true);
				}
				doc.Add(f);
				writer.AddDocument(doc);
			}
			writer.Close();
			searcher = new IndexSearcher(dir);
			hits = searcher.Search(new TermQuery(searchTerm));
			Assert.AreEqual(27, hits.Length());
			searcher.Close();
			
			IndexReader reader = IndexReader.Open(dir);
			reader.Close();
			
			dir.Close();
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:51,代码来源:TestIndexWriter.cs


示例13: TestTermVectorCorruption2

		public virtual void  TestTermVectorCorruption2()
		{
			Directory dir = new MockRAMDirectory();
			for (int iter = 0; iter < 4; iter++)
			{
				bool autoCommit = 1 == iter / 2;
				IndexWriter writer = new IndexWriter(dir, autoCommit, new StandardAnalyzer());
				writer.SetMaxBufferedDocs(2);
				writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
				writer.SetMergeScheduler(new SerialMergeScheduler());
				writer.SetMergePolicy(new LogDocMergePolicy());
				
				Document document = new Document();
				
				Field storedField = new Field("stored", "stored", Field.Store.YES, Field.Index.NO);
				document.Add(storedField);
				writer.AddDocument(document);
				writer.AddDocument(document);
				
				document = new Document();
				document.Add(storedField);
				Field termVectorField = new Field("termVector", "termVector", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
				document.Add(termVectorField);
				writer.AddDocument(document);
				writer.Optimize();
				writer.Close();
				
				IndexReader reader = IndexReader.Open(dir);
				Assert.IsTrue(reader.GetTermFreqVectors(0) == null);
				Assert.IsTrue(reader.GetTermFreqVectors(1) == null);
				Assert.IsTrue(reader.GetTermFreqVectors(2) != null);
				reader.Close();
			}
			dir.Close();
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:35,代码来源:TestIndexWriter.cs


示例14: TestDeletesNumDocs

        public void TestDeletesNumDocs()
        {
            Directory dir = new MockRAMDirectory();
            IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(),
                                                       IndexWriter.MaxFieldLength.LIMITED);
            Document doc = new Document();
            doc.Add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
            Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
            doc.Add(id);
            id.SetValue("0");
            w.AddDocument(doc);
            id.SetValue("1");
            w.AddDocument(doc);
            IndexReader r = w.GetReader();
            Assert.AreEqual(2, r.NumDocs());
            r.Close();

            w.DeleteDocuments(new Term("id", "0"));
            r = w.GetReader();
            Assert.AreEqual(1, r.NumDocs());
            r.Close();

            w.DeleteDocuments(new Term("id", "1"));
            r = w.GetReader();
            Assert.AreEqual(0, r.NumDocs());
            r.Close();

            w.Close();
            dir.Close();
        }
开发者ID:Nangal,项目名称:lucene.net,代码行数:30,代码来源:TestIndexWriterReader.cs


示例15: TestBasic

 public virtual void  TestBasic()
 {
     Directory dir = new MockRAMDirectory();
     Analyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
     IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
     writer.MergeFactor = 2;
     writer.SetMaxBufferedDocs(2);
     writer.SetSimilarity(new SimpleSimilarity());
     
     
     System.Text.StringBuilder sb = new System.Text.StringBuilder(265);
     System.String term = "term";
     for (int i = 0; i < 30; i++)
     {
         Document d = new Document();
         sb.Append(term).Append(" ");
         System.String content = sb.ToString();
         Field noTf = new Field("noTf", content + (i % 2 == 0?"":" notf"), Field.Store.NO, Field.Index.ANALYZED);
         noTf.OmitTermFreqAndPositions = true;
         d.Add(noTf);
         
         Field tf = new Field("tf", content + (i % 2 == 0?" tf":""), Field.Store.NO, Field.Index.ANALYZED);
         d.Add(tf);
         
         writer.AddDocument(d);
         //System.out.println(d);
     }
     
     writer.Optimize();
     // flush
     writer.Close();
     _TestUtil.CheckIndex(dir);
     
     /*
     * Verify the index
     */
     Searcher searcher = new IndexSearcher(dir, true);
     searcher.Similarity = new SimpleSimilarity();
     
     Term a = new Term("noTf", term);
     Term b = new Term("tf", term);
     Term c = new Term("noTf", "notf");
     Term d2 = new Term("tf", "tf");
     TermQuery q1 = new TermQuery(a);
     TermQuery q2 = new TermQuery(b);
     TermQuery q3 = new TermQuery(c);
     TermQuery q4 = new TermQuery(d2);
     
     
     searcher.Search(q1, new AnonymousClassCountingHitCollector(this));
     //System.out.println(CountingHitCollector.getCount());
     
     
     searcher.Search(q2, new AnonymousClassCountingHitCollector1(this));
     //System.out.println(CountingHitCollector.getCount());
     
     
     
     
     
     searcher.Search(q3, new AnonymousClassCountingHitCollector2(this));
     //System.out.println(CountingHitCollector.getCount());
     
     
     searcher.Search(q4, new AnonymousClassCountingHitCollector3(this));
     //System.out.println(CountingHitCollector.getCount());
     
     
     
     BooleanQuery bq = new BooleanQuery();
     bq.Add(q1, Occur.MUST);
     bq.Add(q4, Occur.MUST);
     
     searcher.Search(bq, new AnonymousClassCountingHitCollector4(this));
     Assert.IsTrue(15 == CountingHitCollector.GetCount());
     
     searcher.Close();
     dir.Close();
 }
开发者ID:Nangal,项目名称:lucene.net,代码行数:79,代码来源:TestOmitTf.cs


示例16: TestNoPrxFile

 public virtual void  TestNoPrxFile()
 {
     Directory ram = new MockRAMDirectory();
     Analyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
     IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
     writer.SetMaxBufferedDocs(3);
     writer.MergeFactor = 2;
     writer.UseCompoundFile = false;
     Document d = new Document();
     
     Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
     f1.OmitTermFreqAndPositions = true;
     d.Add(f1);
     
     for (int i = 0; i < 30; i++)
         writer.AddDocument(d);
     
     writer.Commit();
     
     AssertNoPrx(ram);
     
     // force merge
     writer.Optimize();
     // flush
     writer.Close();
     
     AssertNoPrx(ram);
     _TestUtil.CheckIndex(ram);
     ram.Close();
 }
开发者ID:Nangal,项目名称:lucene.net,代码行数:30,代码来源:TestOmitTf.cs


示例17: Create

		private void  Create()
		{
			
			// NOTE: put seed in here to make failures
			// deterministic, but do not commit with a seed (to
			// better test):
			dir = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
			writer.SetMaxBufferedDocs(17);
			
			Document doc = new Document();
			Document doc2 = new Document();
			
			Field id = new Field("id", "", Field.Store.YES, Field.Index.NO);
			doc.Add(id);
			doc2.Add(id);
			
			Field contents = new Field("contents", "", Field.Store.NO, Field.Index.ANALYZED);
			doc.Add(contents);
			doc2.Add(contents);
			
			Field byteField = new Field("byte", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(byteField);
			doc2.Add(byteField);
			
			Field shortField = new Field("short", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(shortField);
			doc2.Add(shortField);
			
			Field intField = new Field("int", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(intField);
			doc2.Add(intField);
			
			Field longField = new Field("long", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(longField);
			doc2.Add(longField);
			
			Field floatField = new Field("float", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(floatField);
			doc2.Add(floatField);
			
			Field doubleField = new Field("double", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(doubleField);
			doc2.Add(doubleField);
			
			// we use two diff string fields so our FieldCache usage
			// is less suspicious to cache inspection
			Field stringField = new Field("string", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(stringField);
			Field stringFieldIdx = new Field("stringIdx", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(stringFieldIdx);
			// doc2 doesn't have stringField or stringFieldIdx, so we get nulls
			
			for (int i = 0; i < NUM_DOCS; i++)
			{
				id.SetValue("" + i);
				if (i % 1000 == 0)
				{
					contents.SetValue("a b c z");
				}
				else if (i % 100 == 0)
				{
					contents.SetValue("a b c y");
				}
				else if (i % 10 == 0)
				{
					contents.SetValue("a b c x");
				}
				else
				{
					contents.SetValue("a b c");
				}
				byteField.SetValue("" + NextInt((sbyte) System.SByte.MinValue, (sbyte) System.SByte.MaxValue));
				if (NextInt(10) == 3)
				{
					shortField.SetValue("" + System.Int16.MinValue);
				}
				else if (NextInt(10) == 7)
				{
					shortField.SetValue("" + System.Int16.MaxValue);
				}
				else
				{
					shortField.SetValue("" + NextInt(System.Int16.MinValue, System.Int16.MaxValue));
				}
				
				if (NextInt(10) == 3)
				{
					intField.SetValue("" + System.Int32.MinValue);
				}
				else if (NextInt(10) == 7)
				{
					intField.SetValue("" + System.Int32.MaxValue);
				}
				else
				{
					intField.SetValue("" + this.r.Next());
				}
				
				if (NextInt(10) == 3)
//.........这里部分代码省略.........
开发者ID:Rationalle,项目名称:ravendb,代码行数:101,代码来源:TestStressSort.cs


示例18: AddDoc

		private void  AddDoc(System.String text, IndexWriter iw, float boost)
		{
			Document doc = new Document();
			Field f = new Field("key", text, Field.Store.YES, Field.Index.ANALYZED);
			f.SetBoost(boost);
			doc.Add(f);
			iw.AddDocument(doc);
		}
开发者ID:kstenson,项目名称:NHibernate.Search,代码行数:8,代码来源:TestMatchAllDocsQuery.cs


示例19: TestDeleteMerging

		public virtual void  TestDeleteMerging()
		{
			
			RAMDirectory directory = new MockRAMDirectory();
			
			IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
			writer.SetMergeScheduler(cms);
			
			LogDocMergePolicy mp = new LogDocMergePolicy(writer);
			writer.SetMergePolicy(mp);
			
			// Force degenerate merging so we can get a mix of
			// merging of segments with and without deletes at the
			// start:
			mp.SetMinMergeDocs(1000);
			
			Document doc = new Document();
			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
			doc.Add(idField);
			for (int i = 0; i < 10; i++)
			{
				for (int j = 0; j < 100; j++)
				{
					idField.SetValue(System.Convert.ToString(i * 100 + j));
					writer.AddDocument(doc);
				}
				
				int delID = i;
				while (delID < 100 * (1 + i))
				{
					writer.DeleteDocuments(new Term("id", "" + delID));
					delID += 10;
				}
				
				writer.Flush();
			}
			
			writer.Close();
			IndexReader reader = IndexReader.Open(directory);
			// Verify that we did not lose any deletes...
			Assert.AreEqual(450, reader.NumDocs());
			reader.Close();
			directory.Close();
		}
开发者ID:kstenson,项目名称:NHibernate.Search,代码行数:45,代码来源:TestConcurrentMergeScheduler.cs


示例20: TestSubclassConcurrentMergeScheduler

		public virtual void  TestSubclassConcurrentMergeScheduler()
		{
			MockRAMDirectory dir = new MockRAMDirectory();
			dir.FailOn(new FailOnlyOnMerge());
			
			Document doc = new Document();
			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
			doc.Add(idField);
			
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			MyMergeScheduler ms = new MyMergeScheduler(this);
			writer.SetMergeScheduler(ms);
			writer.SetMaxBufferedDocs(2);
			writer.SetRAMBufferSizeMB(Lucene.Net.Index.IndexWriter.DISABLE_AUTO_FLUSH);
			for (int i = 0; i < 20; i++)
				writer.AddDocument(doc);
			
			ms.Sync();
			writer.Close();
			
			Assert.IsTrue(mergeThreadCreated);
			Assert.IsTrue(mergeCalled);
			Assert.IsTrue(excCalled);
			dir.Close();
			Assert.IsTrue(ConcurrentMergeScheduler.AnyUnhandledExceptions());
		}
开发者ID:kstenson,项目名称:NHibernate.Search,代码行数:26,代码来源:TestMergeSchedulerExternal.cs



注:本文中的Lucene.Net.Documents.Field类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C# Index.IndexReader类代码示例发布时间:2022-05-24
下一篇:
C# Documents.Document类代码示例发布时间:2022-05-24
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap