本文整理汇总了C#中Mono.CSharp.Tokenizer类的典型用法代码示例。如果您正苦于以下问题:C# Tokenizer类的具体用法?C# Tokenizer怎么用?C# Tokenizer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
Tokenizer类属于Mono.CSharp命名空间,在下文中一共展示了Tokenizer类的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: tokenize_file
void tokenize_file (CompilationUnit file, CompilerContext ctx)
{
Stream input;
try {
input = File.OpenRead (file.Name);
} catch {
Report.Error (2001, "Source file `" + file.Name + "' could not be found");
return;
}
using (input){
SeekableStreamReader reader = new SeekableStreamReader (input, RootContext.Encoding);
Tokenizer lexer = new Tokenizer (reader, file, ctx);
int token, tokens = 0, errors = 0;
while ((token = lexer.token ()) != Token.EOF){
tokens++;
if (token == Token.ERROR)
errors++;
}
Console.WriteLine ("Tokenized: " + tokens + " found " + errors + " errors");
}
return;
}
开发者ID:nekresh,项目名称:mono,代码行数:26,代码来源:driver.cs
示例2: tokenize_file
void tokenize_file (SourceFile sourceFile, ModuleContainer module, ParserSession session)
{
Stream input;
try {
input = File.OpenRead (sourceFile.Name);
} catch {
Report.Error (2001, "Source file `" + sourceFile.Name + "' could not be found");
return;
}
using (input) {
SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding);
var file = new CompilationSourceFile (module, sourceFile);
if (sourceFile.FileType == SourceFileType.CSharp) {
Tokenizer lexer = new Tokenizer (reader, file, session);
int token, tokens = 0, errors = 0;
while ((token = lexer.token ()) != Token.EOF){
tokens++;
if (token == Token.ERROR)
errors++;
}
} else {
Mono.PlayScript.Tokenizer lexer = new Mono.PlayScript.Tokenizer (reader, file, session);
lexer.ParsingPlayScript = sourceFile.PsExtended;
int token, tokens = 0, errors = 0;
while ((token = lexer.token ()) != Mono.PlayScript.Token.EOF){
tokens++;
if (token == Mono.PlayScript.Token.ERROR)
errors++;
}
}
}
return;
}
开发者ID:robterrell,项目名称:playscript-mono,代码行数:39,代码来源:driver.cs
示例3: ToplevelOrStatement
//
// Deambiguates the input string to determine if we
// want to process a statement or if we want to
// process a compilation unit.
//
// This is done using a top-down predictive parser,
// since the yacc/jay parser can not deambiguage this
// without more than one lookahead token. There are very
// few ambiguities.
//
InputKind ToplevelOrStatement (SeekableStreamReader seekable)
{
Tokenizer tokenizer = new Tokenizer (seekable, source_file);
int t = tokenizer.token ();
switch (t){
case Token.EOF:
return InputKind.EOF;
// These are toplevels
case Token.EXTERN:
case Token.OPEN_BRACKET:
case Token.ABSTRACT:
case Token.CLASS:
case Token.ENUM:
case Token.INTERFACE:
case Token.INTERNAL:
case Token.NAMESPACE:
case Token.PRIVATE:
case Token.PROTECTED:
case Token.PUBLIC:
case Token.SEALED:
case Token.STATIC:
case Token.STRUCT:
return InputKind.CompilationUnit;
// Definitely expression
case Token.FIXED:
case Token.BOOL:
case Token.BYTE:
case Token.CHAR:
case Token.DECIMAL:
case Token.DOUBLE:
case Token.FLOAT:
case Token.INT:
case Token.LONG:
case Token.NEW:
case Token.OBJECT:
case Token.SBYTE:
case Token.SHORT:
case Token.STRING:
case Token.UINT:
case Token.ULONG:
return InputKind.StatementOrExpression;
// These need deambiguation help
case Token.USING:
t = tokenizer.token ();
if (t == Token.EOF)
return InputKind.EOF;
if (t == Token.IDENTIFIER)
return InputKind.CompilationUnit;
return InputKind.StatementOrExpression;
// Distinguish between:
// delegate opt_anonymous_method_signature block
// delegate type
case Token.DELEGATE:
t = tokenizer.token ();
if (t == Token.EOF)
return InputKind.EOF;
if (t == Token.OPEN_PARENS || t == Token.OPEN_BRACE)
return InputKind.StatementOrExpression;
return InputKind.CompilationUnit;
// Distinguih between:
// unsafe block
// unsafe as modifier of a type declaration
case Token.UNSAFE:
t = tokenizer.token ();
if (t == Token.EOF)
return InputKind.EOF;
if (t == Token.OPEN_PARENS)
return InputKind.StatementOrExpression;
return InputKind.CompilationUnit;
// These are errors: we list explicitly what we had
// from the grammar, ERROR and then everything else
case Token.READONLY:
case Token.OVERRIDE:
case Token.ERROR:
return InputKind.Error;
// This catches everything else allowed by
// expressions. We could add one-by-one use cases
// if needed.
default:
//.........这里部分代码省略.........
开发者ID:KAW0,项目名称:Alter-Native,代码行数:101,代码来源:eval.cs
示例4: CSharpParser
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, Report report, ParserSession session)
{
this.file = file;
current_container = current_namespace = file;
this.module = file.Module;
this.compiler = file.Compiler;
this.settings = compiler.Settings;
this.report = report;
lang_version = settings.Version;
yacc_verbose_flag = settings.VerboseParserFlag;
doc_support = settings.DocumentationFile != null;
lexer = new Tokenizer (reader, file, session);
oob_stack = new Stack<object> ();
lbag = session.LocationsBag;
use_global_stacks = session.UseJayGlobalArrays;
parameters_bucket = session.ParametersStack;
}
开发者ID:segaman,项目名称:NRefactory,代码行数:19,代码来源:cs-parser.cs
示例5: CSharpParser
public CSharpParser (SeekableStreamReader reader, CompilationUnit file, CompilerContext ctx)
{
if (RootContext.EvalMode)
undo = new Undo ();
this.file = file;
this.compiler = ctx;
current_namespace = new NamespaceEntry (ctx, null, file, null);
current_class = current_namespace.SlaveDeclSpace;
current_container = current_class.PartialContainer; // == RootContest.ToplevelTypes
oob_stack.Clear ();
lexer = new Tokenizer (reader, file, ctx);
use_global_stacks = true;
}
开发者ID:Ein,项目名称:monodevelop,代码行数:15,代码来源:cs-parser.cs
示例6: Position
public Position (Tokenizer t)
{
position = t.reader.Position;
line = t.line;
ref_line = t.ref_line;
col = t.col;
hidden = t.hidden;
putback_char = t.putback_char;
previous_col = t.previous_col;
if (t.ifstack != null && t.ifstack.Count != 0)
ifstack = (Stack)t.ifstack.Clone ();
parsing_generic_less_than = t.parsing_generic_less_than;
current_token = t.current_token;
}
开发者ID:calumjiao,项目名称:Mono-Class-Libraries,代码行数:14,代码来源:cs-tokenizer.cs
示例7: CSharpParser
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file)
{
this.file = file;
current_namespace = file.NamespaceContainer;
this.module = current_namespace.Module;
this.compiler = module.Compiler;
this.settings = compiler.Settings;
lang_version = settings.Version;
doc_support = settings.Documentation != null;
current_class = current_namespace.SlaveDeclSpace;
current_container = current_class.PartialContainer; // == RootContest.ToplevelTypes
oob_stack.Clear ();
lexer = new Tokenizer (reader, file, compiler);
use_global_stacks = true;
}
开发者ID:okrmartin,项目名称:monodevelop,代码行数:17,代码来源:cs-parser.cs
示例8: CSharpParser
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, Report report)
{
this.file = file;
current_container = current_namespace = file;
this.module = file.Module;
this.compiler = file.Compiler;
this.settings = compiler.Settings;
this.report = report;
lang_version = settings.Version;
yacc_verbose_flag = settings.VerboseParserFlag;
doc_support = settings.DocumentationFile != null;
oob_stack.Clear ();
lexer = new Tokenizer (reader, file);
#if FULL_AST
lbag = new LocationsBag ();
#else
lbag = null;
#endif
use_global_stacks = true;
}
开发者ID:xamarin-release-manager,项目名称:monodevelop,代码行数:24,代码来源:cs-parser.cs
注:本文中的Mono.CSharp.Tokenizer类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论