本文整理汇总了C#中Lucene.Net.Util.AttributeSource类的典型用法代码示例。如果您正苦于以下问题:C# AttributeSource类的具体用法?C# AttributeSource怎么用?C# AttributeSource使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
AttributeSource类属于Lucene.Net.Util命名空间,在下文中一共展示了AttributeSource类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: Accept
public override bool Accept(AttributeSource source)
{
if (typeAtt == null)
{
typeAtt = source.AddAttribute<ITypeAttribute>();
}
return typeToMatch.Equals(typeAtt.Type);
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:9,代码来源:TokenTypeSinkFilter.cs
示例2: LowerCaseKeywordTokenizer
protected LowerCaseKeywordTokenizer(AttributeSource source, System.IO.TextReader input)
: base(source, input)
{
offsetAtt = AddAttribute<IOffsetAttribute>();
termAtt = AddAttribute<ITermAttribute>();
isAsciiCasingSameAsInvariant = CultureInfo.InvariantCulture.CompareInfo.Compare("abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOPQRSTUVWXYZ", CompareOptions.IgnoreCase) == 0;
invariantTextInfo = CultureInfo.InvariantCulture.TextInfo;
}
开发者ID:jrusbatch,项目名称:ravendb,代码行数:9,代码来源:LowerCaseKeywordTokenizer.cs
示例3: GetTermsEnum
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
{
if (_terms.Size() == 0)
{
return TermsEnum.EMPTY;
}
return new SeekingTermSetTermsEnum(terms.Iterator(null), _terms, _ords);
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:9,代码来源:TermsQuery.cs
示例4: KeywordTokenizer
public KeywordTokenizer(AttributeSource.AttributeFactory factory, Reader input, int bufferSize)
: base(factory, input)
{
if (bufferSize <= 0)
{
throw new System.ArgumentException("bufferSize must be > 0");
}
termAtt.ResizeBuffer(bufferSize);
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:9,代码来源:KeywordTokenizer.cs
示例5: Accept
public override bool Accept(AttributeSource source)
{
if (typeAtt == null)
{
typeAtt = source.AddAttribute<ITypeAttribute>();
}
//check to see if this is a Category
return (typeToMatch.Equals(typeAtt.Type));
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:10,代码来源:TokenTypeSinkFilter.cs
示例6: AttributeSource
/// <summary> An AttributeSource that uses the same attributes as the supplied one.</summary>
public AttributeSource(AttributeSource input)
{
if (input == null)
{
throw new System.ArgumentException("input AttributeSource must not be null");
}
this.attributes = input.attributes;
this.attributeImpls = input.attributeImpls;
this.factory = input.factory;
}
开发者ID:andylaudotnet,项目名称:StockFoo,代码行数:11,代码来源:AttributeSource.cs
示例7: TestCaptureState
public virtual void TestCaptureState()
{
// init a first instance
AttributeSource src = new AttributeSource();
TermAttribute termAtt = (TermAttribute)src.AddAttribute(typeof(TermAttribute));
TypeAttribute typeAtt = (TypeAttribute)src.AddAttribute(typeof(TypeAttribute));
termAtt.SetTermBuffer("TestTerm");
typeAtt.SetType("TestType");
int hashCode = src.GetHashCode();
AttributeSource.State state = src.CaptureState();
// modify the attributes
termAtt.SetTermBuffer("AnotherTestTerm");
typeAtt.SetType("AnotherTestType");
Assert.IsTrue(hashCode != src.GetHashCode(), "Hash code should be different");
src.RestoreState(state);
Assert.AreEqual("TestTerm", termAtt.Term());
Assert.AreEqual("TestType", typeAtt.Type());
Assert.AreEqual(hashCode, src.GetHashCode(), "Hash code should be equal after restore");
// restore into an exact configured copy
AttributeSource copy = new AttributeSource();
copy.AddAttribute(typeof(TermAttribute));
copy.AddAttribute(typeof(TypeAttribute));
copy.RestoreState(state);
Assert.AreEqual(src.GetHashCode(), copy.GetHashCode(), "Both AttributeSources should have same hashCode after restore");
Assert.AreEqual(src, copy, "Both AttributeSources should be equal after restore");
// init a second instance (with attributes in different order and one additional attribute)
AttributeSource src2 = new AttributeSource();
typeAtt = (TypeAttribute)src2.AddAttribute(typeof(TypeAttribute));
Lucene.Net.Analysis.Tokenattributes.FlagsAttribute flagsAtt = (Lucene.Net.Analysis.Tokenattributes.FlagsAttribute)src2.AddAttribute(typeof(Lucene.Net.Analysis.Tokenattributes.FlagsAttribute));
termAtt = (TermAttribute)src2.AddAttribute(typeof(TermAttribute));
flagsAtt.SetFlags(12345);
src2.RestoreState(state);
Assert.AreEqual("TestTerm", termAtt.Term());
Assert.AreEqual("TestType", typeAtt.Type());
Assert.AreEqual(12345, flagsAtt.GetFlags(), "FlagsAttribute should not be touched");
// init a third instance missing one Attribute
AttributeSource src3 = new AttributeSource();
termAtt = (TermAttribute)src3.AddAttribute(typeof(TermAttribute));
try
{
src3.RestoreState(state);
Assert.Fail("The third instance is missing the TypeAttribute, so restoreState() should throw IllegalArgumentException");
}
catch (System.ArgumentException iae)
{
// pass
}
}
开发者ID:Rationalle,项目名称:ravendb,代码行数:55,代码来源:TestAttributeSource.cs
示例8: TestCaptureState
public virtual void TestCaptureState()
{
// init a first instance
AttributeSource src = new AttributeSource();
ICharTermAttribute termAtt = src.AddAttribute<ICharTermAttribute>();
ITypeAttribute typeAtt = src.AddAttribute<ITypeAttribute>();
termAtt.Append("TestTerm");
typeAtt.Type = "TestType";
int hashCode = src.GetHashCode();
AttributeSource.State state = src.CaptureState();
// modify the attributes
termAtt.SetEmpty().Append("AnotherTestTerm");
typeAtt.Type = "AnotherTestType";
Assert.IsTrue(hashCode != src.GetHashCode(), "Hash code should be different");
src.RestoreState(state);
Assert.AreEqual(termAtt.ToString(), "TestTerm");
Assert.AreEqual(typeAtt.Type, "TestType");
Assert.AreEqual(hashCode, src.GetHashCode(), "Hash code should be equal after restore");
// restore into an exact configured copy
AttributeSource copy = new AttributeSource();
copy.AddAttribute<ICharTermAttribute>();
copy.AddAttribute<ITypeAttribute>();
copy.RestoreState(state);
Assert.AreEqual(src.GetHashCode(), copy.GetHashCode(), "Both AttributeSources should have same hashCode after restore");
Assert.AreEqual(src, copy, "Both AttributeSources should be equal after restore");
// init a second instance (with attributes in different order and one additional attribute)
AttributeSource src2 = new AttributeSource();
typeAtt = src2.AddAttribute<ITypeAttribute>();
IFlagsAttribute flagsAtt = src2.AddAttribute<IFlagsAttribute>();
termAtt = src2.AddAttribute<ICharTermAttribute>();
flagsAtt.Flags = 12345;
src2.RestoreState(state);
Assert.AreEqual(termAtt.ToString(), "TestTerm");
Assert.AreEqual(typeAtt.Type, "TestType");
Assert.AreEqual(12345, flagsAtt.Flags, "FlagsAttribute should not be touched");
// init a third instance missing one Attribute
AttributeSource src3 = new AttributeSource();
termAtt = src3.AddAttribute<ICharTermAttribute>();
try
{
src3.RestoreState(state);
Assert.Fail("The third instance is missing the TypeAttribute, so restoreState() should throw IllegalArgumentException");
}
catch (System.ArgumentException iae)
{
// pass
}
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:55,代码来源:TestAttributeSource.cs
示例9: Create
/// <summary>
/// Creates the <seealso cref="TokenStream"/> of n-grams from the given <seealso cref="Reader"/> and <seealso cref="AttributeSource.AttributeFactory"/>. </summary>
public override Tokenizer Create(AttributeSource.AttributeFactory factory, Reader input)
{
if (luceneMatchVersion.OnOrAfter(Version.LUCENE_44))
{
return new NGramTokenizer(luceneMatchVersion, factory, input, minGramSize, maxGramSize);
}
else
{
return new Lucene43NGramTokenizer(factory, input, minGramSize, maxGramSize);
}
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:13,代码来源:NGramTokenizerFactory.cs
示例10: Create
/// <summary>
/// Creates the <seealso cref="TokenStream"/> of n-grams from the given <seealso cref="TextReader"/> and <seealso cref="AttributeSource.AttributeFactory"/>. </summary>
public override Tokenizer Create(AttributeSource.AttributeFactory factory, TextReader input)
{
#pragma warning disable 612, 618
if (luceneMatchVersion.OnOrAfter(LuceneVersion.LUCENE_44))
#pragma warning restore 612, 618
{
return new NGramTokenizer(luceneMatchVersion, factory, input, minGramSize, maxGramSize);
}
else
{
#pragma warning disable 612, 618
return new Lucene43NGramTokenizer(factory, input, minGramSize, maxGramSize);
#pragma warning restore 612, 618
}
}
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:17,代码来源:NGramTokenizerFactory.cs
示例11: Accept
public override bool Accept(AttributeSource source)
{
try
{
if (count >= lower && count < upper)
{
return true;
}
return false;
}
finally
{
count++;
}
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:15,代码来源:TokenRangeSinkFilter.cs
示例12: Create
public override Tokenizer Create(AttributeSource.AttributeFactory factory, TextReader input)
{
if (luceneMatchVersion.OnOrAfter(LuceneVersion.LUCENE_44))
{
if (!EdgeNGramTokenFilter.Side.FRONT.Label.Equals(side))
{
throw new System.ArgumentException(typeof(EdgeNGramTokenizer).SimpleName + " does not support backward n-grams as of Lucene 4.4");
}
return new EdgeNGramTokenizer(luceneMatchVersion, input, minGramSize, maxGramSize);
}
else
{
return new Lucene43EdgeNGramTokenizer(luceneMatchVersion, input, side, minGramSize, maxGramSize);
}
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:15,代码来源:EdgeNGramTokenizerFactory.cs
示例13: Create
public override Tokenizer Create(AttributeSource.AttributeFactory factory, TextReader input)
{
#pragma warning disable 612, 618
if (luceneMatchVersion.OnOrAfter(LuceneVersion.LUCENE_44))
#pragma warning restore 612, 618
{
EdgeNGramTokenFilter.Side sideEnum;
if (!Enum.TryParse(this.side, true, out sideEnum))
{
throw new System.ArgumentException(typeof(EdgeNGramTokenizer).Name + " does not support backward n-grams as of Lucene 4.4");
}
return new EdgeNGramTokenizer(luceneMatchVersion, input, minGramSize, maxGramSize);
}
else
{
#pragma warning disable 612, 618
return new Lucene43EdgeNGramTokenizer(luceneMatchVersion, input, side, minGramSize, maxGramSize);
#pragma warning restore 612, 618
}
}
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:20,代码来源:EdgeNGramTokenizerFactory.cs
示例14: Accept
public override bool Accept(AttributeSource source)
{
if (termAtt == null)
{
termAtt = source.AddAttribute<ITermAttribute>();
}
try
{
DateTime date = DateTime.Parse(termAtt.Term, dateFormat);//We don't care about the date, just that we can parse it as a date
if (date != null)
{
return true;
}
}
catch (FormatException)
{
}
return false;
}
开发者ID:synhershko,项目名称:lucene.net,代码行数:21,代码来源:DateRecognizerSinkFilter.cs
示例15: LowerCaseKeywordTokenizer
protected LowerCaseKeywordTokenizer(AttributeSource source, System.IO.TextReader input)
: base(source, input)
{
offsetAtt = AddAttribute<IOffsetAttribute>();
termAtt = AddAttribute<ITermAttribute>();
}
开发者ID:j2jensen,项目名称:ravendb,代码行数:6,代码来源:LowerCaseKeywordTokenizer.cs
示例16: CJKTokenizer
public CJKTokenizer(AttributeSource source, TextReader _in)
: base(source, _in)
{
Init();
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:5,代码来源:CJKTokenizer.cs
示例17: EdgeNGramTokenizer
/*
* Creates EdgeNGramTokenizer that can generate n-grams in the sizes of the given range
*
* <param name="source"><see cref="AttributeSource"/> to use</param>
* <param name="input"><see cref="TextReader"/> holding the input to be tokenized</param>
* <param name="side">the <see cref="Side"/> from which to chop off an n-gram</param>
* <param name="minGram">the smallest n-gram to generate</param>
* <param name="maxGram">the largest n-gram to generate</param>
*/
public EdgeNGramTokenizer(AttributeSource source, TextReader input, Side side, int minGram, int maxGram)
: base(source, input)
{
init(side, minGram, maxGram);
}
开发者ID:raol,项目名称:lucene.net,代码行数:15,代码来源:EdgeNGramTokenizer.cs
示例18: LetterTokenizer
/// <summary>
/// Construct a new LetterTokenizer using a given
/// <seealso cref="org.apache.lucene.util.AttributeSource.AttributeFactory"/>.
/// </summary>
/// <param name="matchVersion">
/// Lucene version to match See <seealso cref="<a href="#version">above</a>"/> </param>
/// <param name="factory">
/// the attribute factory to use for this <seealso cref="Tokenizer"/> </param>
/// <param name="in">
/// the input to split up into tokens </param>
public LetterTokenizer(LuceneVersion matchVersion, AttributeSource.AttributeFactory factory, TextReader @in)
: base(matchVersion, factory, @in)
{
}
开发者ID:WakeflyCBass,项目名称:lucenenet,代码行数:14,代码来源:LetterTokenizer.cs
示例19: Accept
public override bool Accept(AttributeSource source)
{
if (termAtt == null)
{
termAtt = source.AddAttribute<ICharTermAttribute>();
}
DateTime date; //We don't care about the date, just that we can parse it as a date
if (formats == null)
{
return DateTime.TryParse(termAtt.ToString(), culture, style, out date);
}
else
{
return DateTime.TryParseExact(termAtt.ToString(), formats, culture, style, out date);
}
}
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:17,代码来源:DateRecognizerSinkFilter.cs
示例20: FuzzyTermsEnum
/// <summary>
/// Constructor for enumeration of all terms from specified <code>reader</code> which share a prefix of
/// length <code>prefixLength</code> with <code>term</code> and which have a fuzzy similarity >
/// <code>minSimilarity</code>.
/// <p>
/// After calling the constructor the enumeration is already pointing to the first
/// valid term if such a term exists.
/// </summary>
/// <param name="terms"> Delivers terms. </param>
/// <param name="atts"> <seealso cref="AttributeSource"/> created by the rewrite method of <seealso cref="MultiTermQuery"/>
/// thats contains information about competitive boosts during rewrite. It is also used
/// to cache DFAs between segment transitions. </param>
/// <param name="term"> Pattern term. </param>
/// <param name="minSimilarity"> Minimum required similarity for terms from the reader. Pass an integer value
/// representing edit distance. Passing a fraction is deprecated. </param>
/// <param name="prefixLength"> Length of required common prefix. Default value is 0. </param>
/// <exception cref="IOException"> if there is a low-level IO error </exception>
public FuzzyTermsEnum(Terms terms, AttributeSource atts, Term term, float minSimilarity, int prefixLength, bool transpositions)
{
if (!InstanceFieldsInitialized)
{
InitializeInstanceFields();
InstanceFieldsInitialized = true;
}
if (minSimilarity >= 1.0f && minSimilarity != (int)minSimilarity)
{
throw new System.ArgumentException("fractional edit distances are not allowed");
}
if (minSimilarity < 0.0f)
{
throw new System.ArgumentException("minimumSimilarity cannot be less than 0");
}
if (prefixLength < 0)
{
throw new System.ArgumentException("prefixLength cannot be less than 0");
}
this.Terms = terms;
this.Term_Renamed = term;
// convert the string into a utf32 int[] representation for fast comparisons
string utf16 = term.Text();
//LUCENE TO-DO
//this.TermText = new int[utf16.codePointCount(0, utf16.Length)];
this.TermText = new int[utf16.Length];
for (int cp, i = 0, j = 0; i < utf16.Length; i += Character.CharCount(cp))
{
TermText[j++] = cp = Character.CodePointAt(utf16, i);
}
this.TermLength = TermText.Length;
this.DfaAtt = atts.AddAttribute<ILevenshteinAutomataAttribute>();
//The prefix could be longer than the word.
//It's kind of silly though. It means we must match the entire word.
this.RealPrefixLength = prefixLength > TermLength ? TermLength : prefixLength;
// if minSimilarity >= 1, we treat it as number of edits
if (minSimilarity >= 1f)
{
this.MinSimilarity_Renamed = 0; // just driven by number of edits
MaxEdits = (int)minSimilarity;
Raw = true;
}
else
{
this.MinSimilarity_Renamed = minSimilarity;
// calculate the maximum k edits for this similarity
MaxEdits = InitialMaxDistance(this.MinSimilarity_Renamed, TermLength);
Raw = false;
}
if (transpositions && MaxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE)
{
throw new System.NotSupportedException("with transpositions enabled, distances > " + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE + " are not supported ");
}
this.Transpositions = transpositions;
this.Scale_factor = 1.0f / (1.0f - this.MinSimilarity_Renamed);
this.MaxBoostAtt = atts.AddAttribute<IMaxNonCompetitiveBoostAttribute>();
Bottom = MaxBoostAtt.MaxNonCompetitiveBoost;
BottomTerm = MaxBoostAtt.CompetitiveTerm;
BottomChanged(null, true);
}
开发者ID:paulirwin,项目名称:lucene.net,代码行数:80,代码来源:FuzzyTermsEnum.cs
注:本文中的Lucene.Net.Util.AttributeSource类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论