| /* |
| * [The "BSD license"] |
| * Copyright (c) 2007-2008 Johannes Luber |
| * Copyright (c) 2005-2007 Kunle Odutola |
| * Copyright (c) 2011 Sam Harwell |
| * Copyright (c) 2011 Terence Parr |
| * All rights reserved. |
| * |
| * Redistribution and use in source and binary forms, with or without |
| * modification, are permitted provided that the following conditions |
| * are met: |
| * 1. Redistributions of source code must retain the above copyright |
| * notice, this list of conditions and the following disclaimer. |
| * 2. Redistributions in binary form must reproduce the above copyright |
| * notice, this list of conditions and the following disclaimer in the |
| * documentation and/or other materials provided with the distribution. |
| * 3. The name of the author may not be used to endorse or promote products |
| * derived from this software without specific prior written permission. |
| * |
| * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
| * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
| * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
| * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
| * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
| * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
| * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| */ |
| |
| csharpVisibilityMap ::= [ |
| "private":"private", |
| "protected":"protected", |
| "public":"public", |
| "fragment":"private", |
| default:"private" |
| ] |
| |
| /** The overall file structure of a recognizer; stores methods for rules |
| * and cyclic DFAs plus support code. |
| */ |
| outputFile( LEXER,PARSER,TREE_PARSER, actionScope, actions, |
| docComment, recognizer, |
| name, tokens, tokenNames, rules, cyclicDFAs, |
| bitsets, buildTemplate, buildAST, rewriteMode, profile, |
| backtracking, synpreds, memoize, numRules, |
| fileName, ANTLRVersion, generatedTimestamp, trace, |
| scopes, superClass, literals) ::= |
| << |
| //------------------------------------------------------------------------------ |
| // \<auto-generated> |
| // This code was generated by a tool. |
| // ANTLR Version: <ANTLRVersion> |
| // |
| // Changes to this file may cause incorrect behavior and will be lost if |
| // the code is regenerated. |
| // \</auto-generated> |
| //------------------------------------------------------------------------------ |
| |
| // $ANTLR <ANTLRVersion> <fileName> <generatedTimestamp> |
| |
| <if(trace)> |
| #define ANTLR_TRACE |
| <endif> |
| <@debugPreprocessor()> |
| // The variable 'variable' is assigned but its value is never used. |
| #pragma warning disable 168, 219 |
| // Unreachable code detected. |
| #pragma warning disable 162 |
| // Missing XML comment for publicly visible type or member 'Type_or_Member' |
| #pragma warning disable 1591 |
| |
| <actions.(actionScope).header> |
| |
| <@imports> |
| using System.Collections.Generic; |
| using Antlr.Runtime; |
| using Antlr.Runtime.Misc; |
| <if(TREE_PARSER)> |
| using Antlr.Runtime.Tree; |
| using RewriteRuleITokenStream = Antlr.Runtime.Tree.RewriteRuleTokenStream; |
| <endif> |
| using ConditionalAttribute = System.Diagnostics.ConditionalAttribute; |
| <@end> |
| <if(actions.(actionScope).namespace)> |
| namespace <actions.(actionScope).namespace> |
| { |
| <endif> |
| <docComment> |
| <recognizer> |
| <if(actions.(actionScope).namespace)> |
| |
| } // namespace <actions.(actionScope).namespace> |
| <endif> |
| >> |
| |
| lexerInputStreamType() ::= << |
| <actions.(actionScope).inputStreamType; null="ICharStream"> |
| >> |
| |
| lexer(grammar, name, tokens, scopes, rules, numRules, filterMode, labelType="CommonToken", |
| superClass={<if(actions.(actionScope).superClass)><actions.(actionScope).superClass><else>Antlr.Runtime.Lexer<endif>}) ::= << |
| [System.CodeDom.Compiler.GeneratedCode("ANTLR", "<ANTLRVersion>")] |
| [System.CLSCompliant(false)] |
| <parserModifier(grammar=grammar, actions=actions)> partial class <grammar.recognizerName> : <@superClassName><superClass><@end> |
| { |
| <tokens:{it|public const int <it.name; format="id">=<it.type>;}; separator="\n"> |
| <scopes:{it|<if(it.isDynamicGlobalScope)><globalAttributeScope(scope=it)><endif>}> |
| <actions.lexer.members> |
| |
| // delegates |
| <grammar.delegates: |
| {g|private <g.recognizerName> <g:delegateName()>;}; separator="\n"> |
| // delegators |
| <grammar.delegators: |
| {g|private <g.recognizerName> <g:delegateName()>;}; separator="\n"> |
| <last(grammar.delegators):{g|private <g.recognizerName> gParent;}> |
| |
| <actions.(actionScope).ctorModifier; null="public"> <grammar.recognizerName>()<! needed by subclasses !> |
| { |
| OnCreated(); |
| } |
| |
| <actions.(actionScope).ctorModifier; null="public"> <grammar.recognizerName>(<lexerInputStreamType()> input<grammar.delegators:{g|, <g.recognizerName> <g:delegateName()>}> ) |
| : this(input, new RecognizerSharedState()<grammar.delegators:{g|, <g:delegateName()>}>) |
| { |
| } |
| |
| <actions.(actionScope).ctorModifier; null="public"> <grammar.recognizerName>(<lexerInputStreamType()> input, RecognizerSharedState state<grammar.delegators:{g|, <g.recognizerName> <g:delegateName()>}>) |
| : base(input, state) |
| { |
| <if(memoize)> |
| <if(grammar.grammarIsRoot)> |
| state.ruleMemo = new System.Collections.Generic.Dictionary\<int, int>[<numRules>+1];<\n><! index from 1..n !> |
| <endif> |
| <endif> |
| <grammar.directDelegates: |
| {g|<g:delegateName()> = new <g.recognizerName>(input, this.state<trunc(g.delegators):{p|, <p:delegateName()>}>, this);}; separator="\n"> |
| <grammar.delegators: |
| {g|this.<g:delegateName()> = <g:delegateName()>;}; separator="\n"> |
| <last(grammar.delegators):{g|gParent = <g:delegateName()>;}> |
| |
| OnCreated(); |
| } |
| public override string GrammarFileName { get { return "<fileName>"; } } |
| |
| private static readonly bool[] decisionCanBacktrack = new bool[0]; |
| |
| <if(grammar.hasDelegates)> |
| public override <lexerInputStreamType()> CharStream |
| { |
| get |
| { |
| return base.CharStream; |
| } |
| set |
| { |
| base.CharStream = value; |
| <grammar.directDelegates: |
| {g|<g:delegateName()> = new <g.recognizerName>(input, state<trunc(g.delegators):{p|, <p:delegateName()>}>, this);}; separator="\n"> |
| <grammar.delegators: |
| {g|this.<g:delegateName()> = <g:delegateName()>;}; separator="\n"> |
| <last(grammar.delegators):{g|gParent = <g:delegateName()>;}> |
| } |
| } |
| |
| <endif> |
| <if(filterMode)> |
| <filteringNextToken()> |
| <endif> |
| |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void OnCreated() {} |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void EnterRule(string ruleName, int ruleIndex) {} |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void LeaveRule(string ruleName, int ruleIndex) {} |
| |
| <rules; separator="\n"> |
| |
| <insertLexerSynpreds(synpreds)> |
| |
| #region DFA |
| <cyclicDFAs:{dfa | DFA<dfa.decisionNumber> dfa<dfa.decisionNumber>;}; separator="\n"> |
| |
| protected override void InitDFAs() |
| { |
| base.InitDFAs(); |
| <cyclicDFAs:{dfa | dfa<dfa.decisionNumber> = new DFA<dfa.decisionNumber>(this<if(dfa.specialStateSTs)>, SpecialStateTransition<dfa.decisionNumber><endif>);}; separator="\n"> |
| } |
| |
| <cyclicDFAs:cyclicDFA()> <! dump tables for all DFA !> |
| #endregion |
| |
| } |
| >> |
| |
| /** A override of Lexer.nextToken() that backtracks over mTokens() looking |
| * for matches. No error can be generated upon error; just rewind, consume |
| * a token and then try again. backtracking needs to be set as well. |
| * Make rule memoization happen only at levels above 1 as we start mTokens |
| * at backtracking==1. |
| */ |
| filteringNextToken() ::= << |
| public override IToken NextToken() |
| { |
| while (true) |
| { |
| if (input.LA(1) == CharStreamConstants.EndOfFile) |
| { |
| IToken eof = new CommonToken((ICharStream)input, CharStreamConstants.EndOfFile, TokenChannels.Default, input.Index, input.Index); |
| eof.Line = Line; |
| eof.CharPositionInLine = CharPositionInLine; |
| return eof; |
| } |
| state.token = null; |
| state.channel = TokenChannels.Default; |
| state.tokenStartCharIndex = input.Index; |
| state.tokenStartCharPositionInLine = input.CharPositionInLine; |
| state.tokenStartLine = input.Line; |
| state.text = null; |
| try |
| { |
| int m = input.Mark(); |
| state.backtracking=1;<! means we won't throw slow exception !> |
| state.failed=false; |
| mTokens(); |
| state.backtracking=0; |
| <! mTokens backtracks with synpred at backtracking==2 |
| and we set the synpredgate to allow actions at level 1. !> |
| if (state.failed) |
| { |
| input.Rewind(m); |
| input.Consume();<! advance one char and try again !> |
| } |
| else |
| { |
| Emit(); |
| return state.token; |
| } |
| } |
| catch (RecognitionException re) |
| { |
| // shouldn't happen in backtracking mode, but... |
| ReportError(re); |
| Recover(re); |
| } |
| } |
| } |
| |
| public override void Memoize(IIntStream input, int ruleIndex, int ruleStartIndex) |
| { |
| if (state.backtracking > 1) |
| base.Memoize(input, ruleIndex, ruleStartIndex); |
| } |
| |
| public override bool AlreadyParsedRule(IIntStream input, int ruleIndex) |
| { |
| if (state.backtracking > 1) |
| return base.AlreadyParsedRule(input, ruleIndex); |
| |
| return false; |
| } |
| >> |
| |
| actionGate() ::= "state.backtracking == 0" |
| |
| filteringActionGate() ::= "state.backtracking == 1" |
| |
| /** How to generate a parser */ |
| genericParser(grammar, name, scopes, tokens, tokenNames, rules, numRules, |
| bitsets, inputStreamType, superClass, |
| labelType, members, rewriteElementType, |
| filterMode, ASTLabelType="object") ::= << |
| [System.CodeDom.Compiler.GeneratedCode("ANTLR", "<ANTLRVersion>")] |
| [System.CLSCompliant(false)] |
| <parserModifier(grammar=grammar, actions=actions)> partial class <grammar.recognizerName> : <@superClassName><superClass><@end> |
| { |
| <if(grammar.grammarIsRoot)> |
| internal static readonly string[] tokenNames = new string[] { |
| "\<invalid>", "\<EOR>", "\<DOWN>", "\<UP>", <tokenNames; separator=", "> |
| }; |
| <endif> |
| <tokens:{it|public const int <it.name; format="id">=<it.type>;}; separator="\n"> |
| |
| <if(grammar.delegates)> |
| // delegates |
| <grammar.delegates: |
| {g|private <g.recognizerName> <g:delegateName()>;}; separator="\n"> |
| <endif> |
| <if(grammar.delegators)> |
| // delegators |
| <grammar.delegators: |
| {g|private <g.recognizerName> <g:delegateName()>;}; separator="\n"> |
| <last(grammar.delegators):{g|private <g.recognizerName> gParent;}> |
| <endif> |
| |
| <scopes:{it|<if(it.isDynamicGlobalScope)><globalAttributeScope(scope=it)><endif>}> |
| <@members()> |
| |
| public override string[] TokenNames { get { return <grammar.composite.rootGrammar.recognizerName>.tokenNames; } } |
| public override string GrammarFileName { get { return "<fileName>"; } } |
| |
| <members> |
| |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void OnCreated() {} |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void EnterRule(string ruleName, int ruleIndex) {} |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void LeaveRule(string ruleName, int ruleIndex) {} |
| |
| #region Rules |
| <rules; separator="\n"> |
| #endregion Rules |
| |
| <if(grammar.delegatedRules)> |
| <! generate rule/method definitions for imported rules so they |
| appear to be defined in this recognizer. !> |
| #region Delegated rules |
| <grammar.delegatedRules:{ruleDescriptor| |
| <ruleModifier(grammar=grammar,ruleDescriptor=ruleDescriptor)> <returnType(ruleDescriptor)> <ruleDescriptor.name; format="id">(<ruleDescriptor.parameterScope:parameterScope()>) <!throws RecognitionException !>{ <if(ruleDescriptor.hasReturnValue)>return <endif><ruleDescriptor.grammar:delegateName()>.<ruleDescriptor.name; format="id">(<ruleDescriptor.parameterScope.attributes:{a|<a.name; format="id">}; separator=", ">); \}}; separator="\n"> |
| #endregion Delegated rules |
| <endif> |
| |
| <insertSynpreds(synpreds)> |
| |
| <if(cyclicDFAs)> |
| #region DFA |
| <cyclicDFAs:{dfa | private DFA<dfa.decisionNumber> dfa<dfa.decisionNumber>;}; separator="\n"> |
| |
| protected override void InitDFAs() |
| { |
| base.InitDFAs(); |
| <cyclicDFAs:{dfa | dfa<dfa.decisionNumber> = new DFA<dfa.decisionNumber>( this<if(dfa.specialStateSTs)>, SpecialStateTransition<dfa.decisionNumber><endif> );}; separator="\n"> |
| } |
| |
| <cyclicDFAs:cyclicDFA()><! dump tables for all DFA !> |
| #endregion DFA |
| <endif> |
| |
| <if(bitsets)> |
| #region Follow sets |
| private static class Follow |
| { |
| <bitsets:{it|<bitset(name={_<it.name>_in_<it.inName><it.tokenIndex>}, words64=it.bits)>}; separator="\n"> |
| } |
| #endregion Follow sets |
| <endif> |
| } |
| >> |
| |
| @genericParser.members() ::= << |
| #if ANTLR_DEBUG |
| private static readonly bool[] decisionCanBacktrack = |
| new bool[] |
| { |
| false, // invalid decision |
| <grammar.decisions:{d | <d.dfa.hasSynPred>}; wrap="\n", separator=", "> |
| }; |
| #else |
| private static readonly bool[] decisionCanBacktrack = new bool[0]; |
| #endif |
| <! WARNING. bug in ST: this is cut-n-paste into Dbg.stg !> |
| <actions.(actionScope).ctorModifier; null="public"> <grammar.recognizerName>(<inputStreamType> input<grammar.delegators:{g|, <g.recognizerName> <g:delegateName()>}>) |
| : this(input, new RecognizerSharedState()<grammar.delegators:{g|, <g:delegateName()>}>) |
| { |
| } |
| <actions.(actionScope).ctorModifier; null="public"> <grammar.recognizerName>(<inputStreamType> input, RecognizerSharedState state<grammar.delegators:{g|, <g.recognizerName> <g:delegateName()>}>) |
| : base(input, state) |
| { |
| <parserCtorBody()> |
| <if(grammar.directDelegates)> |
| <grammar.directDelegates: |
| {g|<g:delegateName()> = new <g.recognizerName>(input, state<trunc(g.delegators):{p|, <p:delegateName()>}>, this);}; separator="\n"> |
| <endif> |
| <if(grammar.indirectDelegates)> |
| <grammar.indirectDelegates:{g | <g:delegateName()> = <g.delegator:delegateName()>.<g:delegateName()>;}; separator="\n"> |
| <endif> |
| <if(grammar.delegators)> |
| <last(grammar.delegators):{g|gParent = <g:delegateName()>;}> |
| <endif> |
| OnCreated(); |
| } |
| >> |
| |
| // imported grammars are 'public' (can't be internal because their return scope classes must be accessible) |
| parserModifier(grammar, actions) ::= << |
| <if(grammar.grammarIsRoot)><actions.(actionScope).modifier; null="public"><else>public<endif> |
| >> |
| |
| parserCtorBody() ::= << |
| <if(memoize)> |
| <if(grammar.grammarIsRoot)> |
| this.state.ruleMemo = new System.Collections.Generic.Dictionary\<int, int>[<length(grammar.allImportedRules)>+1];<\n><! index from 1..n !> |
| <endif> |
| <endif> |
| <grammar.delegators: |
| {g|this.<g:delegateName()> = <g:delegateName()>;}; separator="\n"> |
| >> |
| |
| parser(grammar, name, scopes, tokens, tokenNames, rules, numRules, bitsets, |
| ASTLabelType="object", superClass={<if(actions.(actionScope).superClass)><actions.(actionScope).superClass><else>Antlr.Runtime.Parser<endif>}, labelType="IToken", |
| members={<actions.parser.members>}) ::= << |
| <genericParser(inputStreamType="ITokenStream", rewriteElementType="IToken", ...)> |
| >> |
| |
| /** How to generate a tree parser; same as parser except the input |
| * stream is a different type. |
| */ |
| treeParser(grammar, name, scopes, tokens, tokenNames, globalAction, rules, |
| numRules, bitsets, filterMode, labelType={<ASTLabelType>}, ASTLabelType="object", |
| superClass={<if(actions.(actionScope).superClass)><actions.(actionScope).superClass><else>Antlr.Runtime.Tree.<if(filterMode)><if(buildAST)>TreeRewriter<else>TreeFilter<endif><else>TreeParser<endif><endif>}, |
| members={<actions.treeparser.members>}) ::= << |
| <genericParser(inputStreamType="ITreeNodeStream", rewriteElementType="Node", ...)> |
| >> |
| |
| /** A simpler version of a rule template that is specific to the imaginary |
| * rules created for syntactic predicates. As they never have return values |
| * nor parameters etc..., just give simplest possible method. Don't do |
| * any of the normal memoization stuff in here either; it's a waste. |
| * As predicates cannot be inlined into the invoking rule, they need to |
| * be in a rule by themselves. |
| */ |
| synpredRule(ruleName, ruleDescriptor, block, description, nakedBlock) ::= |
| << |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void EnterRule_<ruleName>_fragment() {} |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void LeaveRule_<ruleName>_fragment() {} |
| |
| // $ANTLR start <ruleName> |
| public <!final !>void <ruleName>_fragment(<ruleDescriptor.parameterScope:parameterScope()>) |
| { |
| <ruleLabelDefs()> |
| EnterRule_<ruleName>_fragment(); |
| EnterRule("<ruleName>_fragment", <ruleDescriptor.index>); |
| TraceIn("<ruleName>_fragment", <ruleDescriptor.index>); |
| try |
| { |
| <block> |
| } |
| finally |
| { |
| TraceOut("<ruleName>_fragment", <ruleDescriptor.index>); |
| LeaveRule("<ruleName>_fragment", <ruleDescriptor.index>); |
| LeaveRule_<ruleName>_fragment(); |
| } |
| } |
| // $ANTLR end <ruleName> |
| >> |
| |
| insertLexerSynpreds(synpreds) ::= << |
| <insertSynpreds(synpreds)> |
| >> |
| |
| insertSynpreds(synpreds) ::= << |
| <if(synpreds)> |
| #region Synpreds |
| private bool EvaluatePredicate(System.Action fragment) |
| { |
| bool success = false; |
| state.backtracking++; |
| <@start()> |
| try { DebugBeginBacktrack(state.backtracking); |
| int start = input.Mark(); |
| try |
| { |
| fragment(); |
| } |
| catch ( RecognitionException re ) |
| { |
| System.Console.Error.WriteLine("impossible: "+re); |
| } |
| success = !state.failed; |
| input.Rewind(start); |
| } finally { DebugEndBacktrack(state.backtracking, success); } |
| <@stop()> |
| state.backtracking--; |
| state.failed=false; |
| return success; |
| } |
| #endregion Synpreds |
| <endif> |
| >> |
| |
| ruleMemoization(name) ::= << |
| <if(memoize)> |
| if (state.backtracking > 0 && AlreadyParsedRule(input, <ruleDescriptor.index>)) { <returnFromRule()> } |
| <endif> |
| >> |
| |
| /** How to test for failure and return from rule */ |
| checkRuleBacktrackFailure() ::= << |
| <if(backtracking)>if (state.failed) <returnFromRule()><endif> |
| >> |
| |
| /** This rule has failed, exit indicating failure during backtrack */ |
| ruleBacktrackFailure() ::= << |
| <if(backtracking)>if (state.backtracking>0) {state.failed=true; <returnFromRule()>}<endif> |
| >> |
| |
| /** How to generate code for a rule. This includes any return type |
| * data aggregates required for multiple return values. |
| */ |
| rule(ruleName,ruleDescriptor,block,emptyRule,description,exceptions,finally,memoize) ::= << |
| <ruleAttributeScope(scope=ruleDescriptor.ruleScope)> |
| <returnScope(ruleDescriptor.returnScope)> |
| |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void EnterRule_<ruleName>() {} |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void LeaveRule_<ruleName>() {} |
| |
| // $ANTLR start "<ruleName>" |
| // <fileName>:<description> |
| [GrammarRule("<ruleName>")] |
| <ruleModifier(grammar=grammar,ruleDescriptor=ruleDescriptor)> <returnType(ruleDescriptor)> <ruleName; format="id">(<ruleDescriptor.parameterScope:parameterScope()>) |
| { |
| EnterRule_<ruleName>(); |
| EnterRule("<ruleName>", <ruleDescriptor.index>); |
| TraceIn("<ruleName>", <ruleDescriptor.index>); |
| <ruleScopeSetUp()> |
| <ruleDeclarations()> |
| <ruleLabelDefs()> |
| <ruleDescriptor.actions.init> |
| try { DebugEnterRule(GrammarFileName, "<ruleName>"); |
| DebugLocation(<ruleDescriptor.tree.line>, <ruleDescriptor.EORNode.charPositionInLine>); |
| <@preamble()> |
| try |
| { |
| <ruleMemoization(name=ruleName)> |
| <block> |
| <ruleCleanUp()> |
| <(ruleDescriptor.actions.after):execAction()> |
| } |
| <if(exceptions)> |
| <exceptions:{e|<catch(decl=e.decl,action=e.action)><\n>}> |
| <else> |
| <if(!emptyRule)> |
| <if(actions.(actionScope).rulecatch)> |
| <actions.(actionScope).rulecatch> |
| <else> |
| catch (RecognitionException re) |
| { |
| ReportError(re); |
| Recover(input,re); |
| <@setErrorReturnValue()> |
| } |
| <endif> |
| <endif> |
| <endif> |
| finally |
| { |
| TraceOut("<ruleName>", <ruleDescriptor.index>); |
| LeaveRule("<ruleName>", <ruleDescriptor.index>); |
| LeaveRule_<ruleName>(); |
| <memoize()> |
| <ruleScopeCleanUp()> |
| <finally> |
| } |
| DebugLocation(<ruleDescriptor.EORNode.line>, <ruleDescriptor.EORNode.charPositionInLine>); |
| } finally { DebugExitRule(GrammarFileName, "<ruleName>"); } |
| <@postamble()> |
| <returnFromRule()><\n> |
| } |
| // $ANTLR end "<ruleName>" |
| >> |
| |
| // imported grammars need to have internal rules |
| ruleModifier(grammar,ruleDescriptor) ::= << |
| <if(grammar.grammarIsRoot)><csharpVisibilityMap.(ruleDescriptor.modifier); null="private"><else>internal<endif> |
| >> |
| |
| // imported grammars need to have public return scopes |
| returnScopeModifier(grammar,ruleDescriptor) ::= << |
| <if(grammar.grammarIsRoot)><csharpVisibilityMap.(ruleDescriptor.modifier); null="private"><else>public<endif> |
| >> |
| |
| catch(decl,action) ::= << |
| catch (<e.decl>) |
| { |
| <e.action> |
| } |
| >> |
| |
| ruleDeclarations() ::= << |
| <if(ruleDescriptor.hasMultipleReturnValues)> |
| <returnType(ruleDescriptor)> retval = new <returnType(ruleDescriptor)>(); |
| retval.Start = (<labelType>)input.LT(1); |
| <else> |
| <ruleDescriptor.returnScope.attributes:{ a | |
| <a.type> <a.name; format="id"> = <if(a.initValue)><a.initValue><else><initValue(a.type)><endif>; |
| }> |
| <endif> |
| <if(memoize)> |
| int <ruleDescriptor.name>_StartIndex = input.Index; |
| <endif> |
| >> |
| |
| ruleScopeSetUp() ::= << |
| <ruleDescriptor.useScopes:{it|<it>_stack.Push(new <it>_scope());<it>_scopeInit(<it>_stack.Peek());}; separator="\n"> |
| <ruleDescriptor.ruleScope:{it|<it.name>_stack.Push(new <it.name>_scope());<it.name>_scopeInit(<it.name>_stack.Peek());}; separator="\n"> |
| >> |
| |
| ruleScopeCleanUp() ::= << |
| <ruleDescriptor.useScopes:{it|<it>_scopeAfter(<it>_stack.Peek());<it>_stack.Pop();}; separator="\n"> |
| <ruleDescriptor.ruleScope:{it|<it.name>_scopeAfter(<it.name>_stack.Peek());<it.name>_stack.Pop();}; separator="\n"> |
| >> |
| |
| ruleLabelDefs() ::= << |
| <[ruleDescriptor.tokenLabels,ruleDescriptor.tokenListLabels,ruleDescriptor.wildcardTreeLabels,ruleDescriptor.wildcardTreeListLabels] |
| :{it|<labelType> <it.label.text> = default(<labelType>);}; separator="\n" |
| > |
| <ruleDescriptor.tokenListLabels |
| :{it|List\<<labelType>\> list_<it.label.text> = null;}; separator="\n" |
| > |
| <[ruleDescriptor.ruleListLabels,ruleDescriptor.wildcardTreeListLabels] |
| :{it|List\<<ASTLabelType>\> list_<it.label.text> = null;}; separator="\n" |
| > |
| <ruleDescriptor.ruleLabels:ruleLabelDef(); separator="\n"> |
| <ruleDescriptor.ruleListLabels:ruleLabelDef(); separator="\n"> |
| >> |
| |
| lexerRuleLabelDefs() ::= << |
| <[ruleDescriptor.tokenLabels, |
| ruleDescriptor.tokenListLabels, |
| ruleDescriptor.ruleLabels] |
| :{it|<labelType> <it.label.text> = default(<labelType>);}; separator="\n" |
| > |
| <[ruleDescriptor.charListLabels, |
| ruleDescriptor.charLabels] |
| :{it|int <it.label.text> = 0;}; separator="\n" |
| > |
| <[ruleDescriptor.tokenListLabels, |
| ruleDescriptor.ruleListLabels] |
| :{it|List\<<labelType>\> list_<it.label.text> = null;}; separator="\n" |
| > |
| <ruleDescriptor.charListLabels:{it|List\<int\> list_<it.label.text> = null;}; separator="\n" |
| > |
| >> |
| |
| returnFromRule() ::= <% |
| return |
| <if(!ruleDescriptor.isSynPred)> |
| <if(ruleDescriptor.hasReturnValue)> |
| <if(ruleDescriptor.hasSingleReturnValue)> |
| <! This comment is a hack to make sure the following |
| single space appears in the output. !> <ruleDescriptor.singleValueReturnName> |
| <else> |
| <!!> retval |
| <endif> |
| <endif> |
| <endif> |
| ; |
| %> |
| |
| ruleCleanUp() ::= << |
| <if(ruleDescriptor.hasMultipleReturnValues)> |
| <if(!TREE_PARSER)> |
| retval.Stop = (<labelType>)input.LT(-1); |
| <endif> |
| <endif> |
| >> |
| |
| memoize() ::= << |
| <if(memoize)> |
| <if(backtracking)> |
| if (state.backtracking > 0) { Memoize(input, <ruleDescriptor.index>, <ruleDescriptor.name>_StartIndex); } |
| <endif> |
| <endif> |
| >> |
| |
| /** How to generate a rule in the lexer; naked blocks are used for |
| * fragment rules. |
| */ |
| lexerRule(ruleName,nakedBlock,ruleDescriptor,block,memoize) ::= << |
| |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void EnterRule_<ruleName>() {} |
| [Conditional("ANTLR_TRACE")] |
| protected virtual void LeaveRule_<ruleName>() {} |
| |
| // $ANTLR start "<ruleName>" |
| [GrammarRule("<ruleName>")] |
| <ruleModifier(grammar=grammar,ruleDescriptor=ruleDescriptor)> void m<ruleName>(<ruleDescriptor.parameterScope:parameterScope()>) |
| { |
| EnterRule_<ruleName>(); |
| EnterRule("<ruleName>", <ruleDescriptor.index>); |
| TraceIn("<ruleName>", <ruleDescriptor.index>); |
| <ruleScopeSetUp()> |
| <ruleDeclarations()> |
| try |
| { |
| <if(nakedBlock)> |
| <ruleMemoization(name=ruleName)> |
| <lexerRuleLabelDefs()> |
| <ruleDescriptor.actions.init> |
| <block> |
| <else> |
| int _type = <ruleName>; |
| int _channel = DefaultTokenChannel; |
| <ruleMemoization(name=ruleName)> |
| <lexerRuleLabelDefs()> |
| <ruleDescriptor.actions.init> |
| <block> |
| <ruleCleanUp()> |
| state.type = _type; |
| state.channel = _channel; |
| <(ruleDescriptor.actions.after):execAction()> |
| <endif> |
| } |
| finally |
| { |
| TraceOut("<ruleName>", <ruleDescriptor.index>); |
| LeaveRule("<ruleName>", <ruleDescriptor.index>); |
| LeaveRule_<ruleName>(); |
| <ruleScopeCleanUp()> |
| <memoize()> |
| } |
| } |
| // $ANTLR end "<ruleName>" |
| >> |
| |
| /** How to generate code for the implicitly-defined lexer grammar rule |
| * that chooses between lexer rules. |
| */ |
| tokensRule(ruleName,nakedBlock,args,block,ruleDescriptor) ::= << |
| |
| public override void mTokens() |
| { |
| <block><\n> |
| } |
| >> |
| |
| // S U B R U L E S |
| |
| /** A (...) subrule with multiple alternatives */ |
| block(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << |
| // <fileName>:<description> |
| int alt<decisionNumber>=<maxAlt>; |
| <decls> |
| <@predecision()> |
| try { DebugEnterSubRule(<decisionNumber>); |
| try { DebugEnterDecision(<decisionNumber>, decisionCanBacktrack[<decisionNumber>]); |
| <decision> |
| } finally { DebugExitDecision(<decisionNumber>); } |
| <@postdecision()> |
| <@prebranch()> |
| switch (alt<decisionNumber>) |
| { |
| <alts:{a|<altSwitchCase(i,a)>}> |
| } |
| } finally { DebugExitSubRule(<decisionNumber>); } |
| <@postbranch()> |
| >> |
| |
| /** A rule block with multiple alternatives */ |
| ruleBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << |
| // <fileName>:<description> |
| int alt<decisionNumber>=<maxAlt>; |
| <decls> |
| <@predecision()> |
| try { DebugEnterDecision(<decisionNumber>, decisionCanBacktrack[<decisionNumber>]); |
| <decision> |
| } finally { DebugExitDecision(<decisionNumber>); } |
| <@postdecision()> |
| switch (alt<decisionNumber>) |
| { |
| <alts:{a|<altSwitchCase(i,a)>}> |
| } |
| >> |
| |
| ruleBlockSingleAlt(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,description) ::= << |
| // <fileName>:<description> |
| <decls> |
| <@prealt()> |
| DebugEnterAlt(1); |
| <alts> |
| <@postalt()> |
| >> |
| |
| /** A special case of a (...) subrule with a single alternative */ |
| blockSingleAlt(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,description) ::= << |
| // <fileName>:<description> |
| <decls> |
| <@prealt()> |
| DebugEnterAlt(1); |
| <alts> |
| <@postalt()> |
| >> |
| |
| /** A (..)+ block with 1 or more alternatives */ |
| positiveClosureBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << |
| // <fileName>:<description> |
| int cnt<decisionNumber>=0; |
| <decls> |
| <@preloop()> |
| try { DebugEnterSubRule(<decisionNumber>); |
| while (true) |
| { |
| int alt<decisionNumber>=<maxAlt>; |
| <@predecision()> |
| try { DebugEnterDecision(<decisionNumber>, decisionCanBacktrack[<decisionNumber>]); |
| <decision> |
| } finally { DebugExitDecision(<decisionNumber>); } |
| <@postdecision()> |
| switch (alt<decisionNumber>) |
| { |
| <alts:{a|<altSwitchCase(i,a)>}> |
| default: |
| if (cnt<decisionNumber> >= 1) |
| goto loop<decisionNumber>; |
| |
| <ruleBacktrackFailure()> |
| EarlyExitException eee<decisionNumber> = new EarlyExitException( <decisionNumber>, input ); |
| DebugRecognitionException(eee<decisionNumber>); |
| <@earlyExitException()> |
| throw eee<decisionNumber>; |
| } |
| cnt<decisionNumber>++; |
| } |
| loop<decisionNumber>: |
| ; |
| |
| } finally { DebugExitSubRule(<decisionNumber>); } |
| <@postloop()> |
| >> |
| |
| positiveClosureBlockSingleAlt ::= positiveClosureBlock |
| |
| /** A (..)* block with 1 or more alternatives */ |
| closureBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << |
| // <fileName>:<description> |
| <decls> |
| <@preloop()> |
| try { DebugEnterSubRule(<decisionNumber>); |
| while (true) |
| { |
| int alt<decisionNumber>=<maxAlt>; |
| <@predecision()> |
| try { DebugEnterDecision(<decisionNumber>, decisionCanBacktrack[<decisionNumber>]); |
| <decision> |
| } finally { DebugExitDecision(<decisionNumber>); } |
| <@postdecision()> |
| switch ( alt<decisionNumber> ) |
| { |
| <alts:{a|<altSwitchCase(i,a)>}> |
| default: |
| goto loop<decisionNumber>; |
| } |
| } |
| |
| loop<decisionNumber>: |
| ; |
| |
| } finally { DebugExitSubRule(<decisionNumber>); } |
| <@postloop()> |
| >> |
| |
| closureBlockSingleAlt ::= closureBlock |
| |
| /** Optional blocks (x)? are translated to (x|) by before code generation |
| * so we can just use the normal block template |
| */ |
| optionalBlock ::= block |
| |
| optionalBlockSingleAlt ::= block |
| |
| /** A case in a switch that jumps to an alternative given the alternative |
| * number. A DFA predicts the alternative and then a simple switch |
| * does the jump to the code that actually matches that alternative. |
| */ |
| altSwitchCase(altNum,alt) ::= << |
| case <altNum>: |
| <@prealt()> |
| DebugEnterAlt(<altNum>); |
| <alt> |
| break;<\n> |
| >> |
| |
| /** An alternative is just a list of elements; at outermost level */ |
| alt(elements,altNum,description,autoAST,outerAlt,treeLevel,rew) ::= << |
| // <fileName>:<description> |
| { |
| <@declarations()> |
| <elements:element()> |
| <rew> |
| <@cleanup()> |
| } |
| >> |
| |
| /** What to emit when there is no rewrite. For auto build |
| * mode, does nothing. |
| */ |
| noRewrite(rewriteBlockLevel, treeLevel) ::= "" |
| |
| // E L E M E N T S |
| |
| /** Dump the elements one per line */ |
| element(it) ::= <% |
| <@prematch()> |
| DebugLocation(<it.line>, <it.pos>);<\n> |
| <it.el><\n> |
| %> |
| |
| /** match a token optionally with a label in front */ |
| tokenRef(token,label,elementIndex,terminalOptions) ::= << |
| <if(label)><label>=(<labelType>)<endif>Match(input,<token>,Follow._<token>_in_<ruleName><elementIndex>); <checkRuleBacktrackFailure()> |
| >> |
| |
| /** ids+=ID */ |
| tokenRefAndListLabel(token,label,elementIndex,terminalOptions) ::= << |
| <tokenRef(...)> |
| <listLabelElem(elem=label,elemType=labelType,...)> |
| >> |
| |
| listLabelElem(label,elem,elemType) ::= << |
| if (list_<label>==null) list_<label>=new List\<<elemType; null={<labelType>}>\>(); |
| list_<label>.Add(<elem>);<\n> |
| >> |
| |
| /** match a character */ |
| charRef(char,label) ::= << |
| <if(label)> |
| <label> = input.LA(1);<\n> |
| <endif> |
| Match(<char>); <checkRuleBacktrackFailure()> |
| >> |
| |
| /** match a character range */ |
| charRangeRef(a,b,label) ::= << |
| <if(label)> |
| <label> = input.LA(1);<\n> |
| <endif> |
| MatchRange(<a>,<b>); <checkRuleBacktrackFailure()> |
| >> |
| |
| /** For now, sets are interval tests and must be tested inline */ |
| matchSet(s,label,terminalOptions,elementIndex,postmatchCode="") ::= << |
| <if(label)> |
| <matchSetLabel()> |
| <endif> |
| if (<s>) |
| { |
| input.Consume(); |
| <postmatchCode> |
| <if(!LEXER)>state.errorRecovery=false;<endif><if(backtracking)>state.failed=false;<endif> |
| } |
| else |
| { |
| <ruleBacktrackFailure()> |
| MismatchedSetException mse = new MismatchedSetException(null,input); |
| DebugRecognitionException(mse); |
| <@mismatchedSetException()> |
| <if(LEXER)> |
| Recover(mse); |
| throw mse; |
| <else> |
| throw mse; |
| <! use following code to make it recover inline; remove throw mse; |
| recoverFromMismatchedSet(input,mse,Follow._set_in_<ruleName><elementIndex>); |
| !> |
| <endif> |
| }<\n> |
| >> |
| |
| matchSetUnchecked(s,label,elementIndex,postmatchCode=false) ::= <% |
| <if(label)> |
| <matchSetLabel()><\n> |
| <endif> |
| input.Consume();<\n> |
| <if(postmatchCode)> |
| <postmatchCode><\n> |
| <endif> |
| <if(!LEXER)>state.errorRecovery=false;<endif><if(backtracking)>state.failed=false;<endif> |
| %> |
| |
| matchSetLabel() ::= <% |
| <if(LEXER)> |
| <label>= input.LA(1); |
| <else> |
| <label>=(<labelType>)input.LT(1); |
| <endif> |
| %> |
| |
| matchRuleBlockSet ::= matchSet |
| |
| matchSetAndListLabel(s,label,elementIndex,postmatchCode) ::= << |
| <matchSet(...)> |
| <listLabelElem(elem=label,elemType=labelType,...)> |
| >> |
| |
| /** Match a string literal */ |
| lexerStringRef(string,label,elementIndex) ::= <% |
| <if(label)> |
| int <label>Start = CharIndex;<\n> |
| Match(<string>); <checkRuleBacktrackFailure()><\n> |
| int <label>StartLine<elementIndex> = Line;<\n> |
| int <label>StartCharPos<elementIndex> = CharPositionInLine;<\n> |
| <label> = new <labelType>(input, TokenTypes.Invalid, TokenChannels.Default, <label>Start, CharIndex-1);<\n> |
| <label>.Line = <label>StartLine<elementIndex>;<\n> |
| <label>.CharPositionInLine = <label>StartCharPos<elementIndex>; |
| <else> |
| Match(<string>); <checkRuleBacktrackFailure()><\n> |
| <endif> |
| %> |
| |
| wildcard(token,label,elementIndex,terminalOptions) ::= << |
| <if(label)> |
| <label>=(<labelType>)input.LT(1);<\n> |
| <endif> |
| MatchAny(input); <checkRuleBacktrackFailure()> |
| >> |
| |
| wildcardAndListLabel(token,label,elementIndex,terminalOptions) ::= << |
| <wildcard(...)> |
| <listLabelElem(elem=label,elemType=labelType,...)> |
| >> |
| |
| /** Match . wildcard in lexer */ |
| wildcardChar(label, elementIndex) ::= << |
| <if(label)> |
| <label> = input.LA(1);<\n> |
| <endif> |
| MatchAny(); <checkRuleBacktrackFailure()> |
| >> |
| |
| wildcardCharListLabel(label, elementIndex) ::= << |
| <wildcardChar(...)> |
| <listLabelElem(elem=label,elemType=labelType,...)> |
| >> |
| |
| /** Match a rule reference by invoking it possibly with arguments |
| * and a return value or values. The 'rule' argument was the |
| * target rule name, but now is type Rule, whose toString is |
| * same: the rule name. Now though you can access full rule |
| * descriptor stuff. |
| */ |
| ruleRef(rule,label,elementIndex,args,scope) ::= << |
| PushFollow(Follow._<rule.name>_in_<ruleName><elementIndex>); |
| <if(label)><label>=<endif><if(scope)><scope:delegateName()>.<endif><rule.name; format="id">(<args; separator=", ">); |
| PopFollow(); |
| <checkRuleBacktrackFailure()> |
| >> |
| |
| /** ids+=r */ |
| ruleRefAndListLabel(rule,label,elementIndex,args,scope) ::= << |
| <ruleRef(...)> |
| <listLabelElem(elem=label,elemType={<ASTLabelType>},...)> |
| >> |
| |
| /** A lexer rule reference. |
| * |
| * The 'rule' argument was the target rule name, but now |
| * is type Rule, whose toString is same: the rule name. |
| * Now though you can access full rule descriptor stuff. |
| */ |
| lexerRuleRef(rule,label,args,elementIndex,scope) ::= <% |
| <if(label)> |
| int <label>Start<elementIndex> = CharIndex;<\n> |
| int <label>StartLine<elementIndex> = Line;<\n> |
| int <label>StartCharPos<elementIndex> = CharPositionInLine;<\n> |
| <if(scope)><scope:delegateName()>.<endif>m<rule.name>(<args; separator=", ">); <checkRuleBacktrackFailure()><\n> |
| <label> = new <labelType>(input, TokenTypes.Invalid, TokenChannels.Default, <label>Start<elementIndex>, CharIndex-1);<\n> |
| <label>.Line = <label>StartLine<elementIndex>;<\n> |
| <label>.CharPositionInLine = <label>StartCharPos<elementIndex>; |
| <else> |
| <if(scope)><scope:delegateName()>.<endif>m<rule.name>(<args; separator=", ">); <checkRuleBacktrackFailure()> |
| <endif> |
| %> |
| |
| /** i+=INT in lexer */ |
| lexerRuleRefAndListLabel(rule,label,args,elementIndex,scope) ::= << |
| <lexerRuleRef(...)> |
| <listLabelElem(elem=label,elemType=labelType,...)> |
| >> |
| |
| /** EOF in the lexer */ |
| lexerMatchEOF(label,elementIndex) ::= <% |
| <if(label)> |
| int <label>Start<elementIndex> = CharIndex;<\n> |
| int <label>StartLine<elementIndex> = Line;<\n> |
| int <label>StartCharPos<elementIndex> = CharPositionInLine;<\n> |
| Match(EOF); <checkRuleBacktrackFailure()><\n> |
| <labelType> <label> = new <labelType>(input, EOF, TokenChannels.Default, <label>Start<elementIndex>, CharIndex-1);<\n> |
| <label>.Line = <label>StartLine<elementIndex>;<\n> |
| <label>.CharPositionInLine = <label>StartCharPos<elementIndex>; |
| <else> |
| Match(EOF); <checkRuleBacktrackFailure()> |
| <endif> |
| %> |
| |
| // used for left-recursive rules |
| recRuleDefArg() ::= "int <recRuleArg()>" |
| recRuleArg() ::= "_p" |
| recRuleAltPredicate(ruleName,opPrec) ::= "<recRuleArg()> \<= <opPrec>" |
| recRuleSetResultAction() ::= "root_0=$<ruleName>_primary.tree;" |
| recRuleSetReturnAction(src,name) ::= "$<name>=$<src>.<name>;" |
| |
| /** match ^(root children) in tree parser */ |
| tree(root, actionsAfterRoot, children, nullableChildList, |
| enclosingTreeLevel, treeLevel) ::= << |
| <root:element()> |
| <actionsAfterRoot:element()> |
| <if(nullableChildList)> |
| if (input.LA(1) == TokenTypes.Down) |
| { |
| Match(input, TokenTypes.Down, null); <checkRuleBacktrackFailure()> |
| <children:element()> |
| Match(input, TokenTypes.Up, null); <checkRuleBacktrackFailure()> |
| } |
| <else> |
| Match(input, TokenTypes.Down, null); <checkRuleBacktrackFailure()> |
| <children:element()> |
| Match(input, TokenTypes.Up, null); <checkRuleBacktrackFailure()> |
| <endif> |
| >> |
| |
| /** Every predicate is used as a validating predicate (even when it is |
| * also hoisted into a prediction expression). |
| */ |
| validateSemanticPredicate(pred,description) ::= << |
| if (!(<evalPredicate(...)>)) |
| { |
| <ruleBacktrackFailure()> |
| throw new FailedPredicateException(input, "<ruleName>", "<description>"); |
| } |
| >> |
| |
| // F i x e d D F A (if-then-else) |
| |
| dfaState(k,edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << |
| int LA<decisionNumber>_<stateNumber> = input.LA(<k>);<\n> |
| <edges; separator="\nelse "> |
| else |
| { |
| <if(eotPredictsAlt)> |
| alt<decisionNumber> = <eotPredictsAlt>; |
| <else> |
| <ruleBacktrackFailure()> |
| NoViableAltException nvae = new NoViableAltException("<description>", <decisionNumber>, <stateNumber>, input); |
| DebugRecognitionException(nvae); |
| <@noViableAltException()> |
| throw nvae; |
| <endif> |
| } |
| >> |
| |
| /** Same as a normal DFA state except that we don't examine lookahead |
| * for the bypass alternative. It delays error detection but this |
| * is faster, smaller, and more what people expect. For (X)? people |
| * expect "if ( LA(1)==X ) match(X);" and that's it. |
| */ |
| dfaOptionalBlockState(k,edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << |
| int LA<decisionNumber>_<stateNumber> = input.LA(<k>);<\n> |
| <edges; separator="\nelse "> |
| >> |
| |
| /** A DFA state that is actually the loopback decision of a closure |
| * loop. If end-of-token (EOT) predicts any of the targets then it |
| * should act like a default clause (i.e., no error can be generated). |
| * This is used only in the lexer so that for ('a')* on the end of a rule |
| * anything other than 'a' predicts exiting. |
| */ |
| dfaLoopbackState(k,edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << |
| int LA<decisionNumber>_<stateNumber> = input.LA(<k>);<\n> |
| <edges; separator="\nelse "><\n> |
| <if(eotPredictsAlt)> |
| <if(!edges)> |
| alt<decisionNumber> = <eotPredictsAlt>;<! if no edges, don't gen ELSE !> |
| <else> |
| else |
| { |
| alt<decisionNumber> = <eotPredictsAlt>; |
| }<\n> |
| <endif> |
| <endif> |
| >> |
| |
| /** An accept state indicates a unique alternative has been predicted */ |
| dfaAcceptState(alt) ::= "alt<decisionNumber> = <alt>;" |
| |
| /** A simple edge with an expression. If the expression is satisfied, |
| * enter to the target state. To handle gated productions, we may |
| * have to evaluate some predicates for this edge. |
| */ |
| dfaEdge(labelExpr, targetState, predicates) ::= << |
| if ((<labelExpr>)<if(predicates)> && (<predicates>)<endif>) |
| { |
| <targetState> |
| } |
| >> |
| |
| // F i x e d D F A (switch case) |
| |
| /** A DFA state where a SWITCH may be generated. The code generator |
| * decides if this is possible: CodeGenerator.canGenerateSwitch(). |
| */ |
| dfaStateSwitch(k,edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << |
| switch (input.LA(<k>)) |
| { |
| <edges; separator="\n"> |
| default: |
| <if(eotPredictsAlt)> |
| alt<decisionNumber>=<eotPredictsAlt>; |
| break;<\n> |
| <else> |
| { |
| <ruleBacktrackFailure()> |
| NoViableAltException nvae = new NoViableAltException("<description>", <decisionNumber>, <stateNumber>, input); |
| DebugRecognitionException(nvae); |
| <@noViableAltException()> |
| throw nvae; |
| } |
| <endif> |
| }<\n> |
| >> |
| |
| dfaOptionalBlockStateSwitch(k,edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << |
| switch (input.LA(<k>)) |
| { |
| <edges; separator="\n"> |
| }<\n> |
| >> |
| |
| dfaLoopbackStateSwitch(k, edges,eotPredictsAlt,description,stateNumber,semPredState) ::= << |
| switch (input.LA(<k>)) |
| { |
| <edges; separator="\n"> |
| <if(eotPredictsAlt)> |
| default: |
| alt<decisionNumber>=<eotPredictsAlt>; |
| break;<\n> |
| <endif> |
| }<\n> |
| >> |
| |
| dfaEdgeSwitch(labels, targetState) ::= << |
| <labels:{it|case <it>:}; separator="\n"> |
| { |
| <targetState> |
| } |
| break; |
| >> |
| |
| // C y c l i c D F A |
| |
| /** The code to initiate execution of a cyclic DFA; this is used |
| * in the rule to predict an alt just like the fixed DFA case. |
| * The <name> attribute is inherited via the parser, lexer, ... |
| */ |
| dfaDecision(decisionNumber,description) ::= << |
| try |
| { |
| alt<decisionNumber> = dfa<decisionNumber>.Predict(input); |
| } |
| catch (NoViableAltException nvae) |
| { |
| DebugRecognitionException(nvae); |
| throw; |
| } |
| >> |
| |
| /* Dump DFA tables as run-length-encoded Strings of octal values. |
| * Can't use hex as compiler translates them before compilation. |
| * These strings are split into multiple, concatenated strings. |
| * Java puts them back together at compile time thankfully. |
| * Java cannot handle large static arrays, so we're stuck with this |
| * encode/decode approach. See analysis and runtime DFA for |
| * the encoding methods. |
| */ |
| cyclicDFA(dfa) ::= << |
| private class DFA<dfa.decisionNumber> : DFA |
| { |
| private const string DFA<dfa.decisionNumber>_eotS = |
| "<dfa.javaCompressedEOT; wrap="\"+\n\t\t\"">"; |
| private const string DFA<dfa.decisionNumber>_eofS = |
| "<dfa.javaCompressedEOF; wrap="\"+\n\t\t\"">"; |
| private const string DFA<dfa.decisionNumber>_minS = |
| "<dfa.javaCompressedMin; wrap="\"+\n\t\t\"">"; |
| private const string DFA<dfa.decisionNumber>_maxS = |
| "<dfa.javaCompressedMax; wrap="\"+\n\t\t\"">"; |
| private const string DFA<dfa.decisionNumber>_acceptS = |
| "<dfa.javaCompressedAccept; wrap="\"+\n\t\t\"">"; |
| private const string DFA<dfa.decisionNumber>_specialS = |
| "<dfa.javaCompressedSpecial; wrap="\"+\n\t\t\"">}>"; |
| private static readonly string[] DFA<dfa.decisionNumber>_transitionS = |
| { |
| <dfa.javaCompressedTransition:{s|"<s; wrap="\"+\n\"">"}; separator=",\n"> |
| }; |
| |
| private static readonly short[] DFA<dfa.decisionNumber>_eot = DFA.UnpackEncodedString(DFA<dfa.decisionNumber>_eotS); |
| private static readonly short[] DFA<dfa.decisionNumber>_eof = DFA.UnpackEncodedString(DFA<dfa.decisionNumber>_eofS); |
| private static readonly char[] DFA<dfa.decisionNumber>_min = DFA.UnpackEncodedStringToUnsignedChars(DFA<dfa.decisionNumber>_minS); |
| private static readonly char[] DFA<dfa.decisionNumber>_max = DFA.UnpackEncodedStringToUnsignedChars(DFA<dfa.decisionNumber>_maxS); |
| private static readonly short[] DFA<dfa.decisionNumber>_accept = DFA.UnpackEncodedString(DFA<dfa.decisionNumber>_acceptS); |
| private static readonly short[] DFA<dfa.decisionNumber>_special = DFA.UnpackEncodedString(DFA<dfa.decisionNumber>_specialS); |
| private static readonly short[][] DFA<dfa.decisionNumber>_transition; |
| |
| static DFA<dfa.decisionNumber>() |
| { |
| int numStates = DFA<dfa.decisionNumber>_transitionS.Length; |
| DFA<dfa.decisionNumber>_transition = new short[numStates][]; |
| for ( int i=0; i \< numStates; i++ ) |
| { |
| DFA<dfa.decisionNumber>_transition[i] = DFA.UnpackEncodedString(DFA<dfa.decisionNumber>_transitionS[i]); |
| } |
| } |
| |
| public DFA<dfa.decisionNumber>( BaseRecognizer recognizer<if(dfa.specialStateSTs)>, SpecialStateTransitionHandler specialStateTransition<endif> ) |
| <if(dfa.specialStateSTs)> |
| : base(specialStateTransition) |
| <endif> |
| { |
| this.recognizer = recognizer; |
| this.decisionNumber = <dfa.decisionNumber>; |
| this.eot = DFA<dfa.decisionNumber>_eot; |
| this.eof = DFA<dfa.decisionNumber>_eof; |
| this.min = DFA<dfa.decisionNumber>_min; |
| this.max = DFA<dfa.decisionNumber>_max; |
| this.accept = DFA<dfa.decisionNumber>_accept; |
| this.special = DFA<dfa.decisionNumber>_special; |
| this.transition = DFA<dfa.decisionNumber>_transition; |
| } |
| |
| public override string Description { get { return "<dfa.description>"; } } |
| |
| public override void Error(NoViableAltException nvae) |
| { |
| DebugRecognitionException(nvae); |
| } |
| }<\n> |
| <if(dfa.specialStateSTs)> |
| private int SpecialStateTransition<dfa.decisionNumber>(DFA dfa, int s, IIntStream _input)<! throws NoViableAltException!> |
| { |
| <if(LEXER)> |
| IIntStream input = _input; |
| <endif> |
| <if(PARSER)> |
| ITokenStream input = (ITokenStream)_input; |
| <endif> |
| <if(TREE_PARSER)> |
| ITreeNodeStream input = (ITreeNodeStream)_input; |
| <endif> |
| int _s = s; |
| switch (s) |
| { |
| <dfa.specialStateSTs:{state | |
| case <i0>:<! compressed special state numbers 0..n-1 !> |
| <state>}; separator="\n"> |
| } |
| <if(backtracking)> |
| if (state.backtracking > 0) {state.failed=true; return -1;} |
| <endif> |
| NoViableAltException nvae = new NoViableAltException(dfa.Description, <dfa.decisionNumber>, _s, input); |
| dfa.Error(nvae); |
| throw nvae; |
| } |
| <endif> |
| >> |
| |
| /** A state in a cyclic DFA; it's a special state and part of a big switch on |
| * state. |
| */ |
| cyclicDFAState(decisionNumber,stateNumber,edges,needErrorClause,semPredState) ::= << |
| int LA<decisionNumber>_<stateNumber> = input.LA(1);<\n> |
| <if(semPredState)> |
| <! get next lookahead symbol to test edges, then rewind !> |
| <\n>int index<decisionNumber>_<stateNumber> = input.Index; |
| input.Rewind(); |
| <endif> |
| s = -1; |
| <edges; separator="\nelse "> |
| <if(semPredState)> |
| <! return input cursor to state before we rewound !> |
| <\n>input.Seek(index<decisionNumber>_<stateNumber>); |
| <endif> |
| if (s >= 0) return s; |
| break; |
| >> |
| |
| /** Just like a fixed DFA edge, test the lookahead and indicate what |
| * state to jump to next if successful. |
| */ |
| cyclicDFAEdge(labelExpr, targetStateNumber, edgeNumber, predicates) ::= << |
| if ((<labelExpr>)<if(predicates)> && (<predicates>)<endif>) {s = <targetStateNumber>;}<\n> |
| >> |
| |
| /** An edge pointing at end-of-token; essentially matches any char; |
| * always jump to the target. |
| */ |
| eotDFAEdge(targetStateNumber,edgeNumber, predicates) ::= << |
| s = <targetStateNumber>;<\n> |
| >> |
| |
| |
| // D F A E X P R E S S I O N S |
| |
| andPredicates(left,right) ::= "(<left>&&<right>)" |
| |
| orPredicates(operands) ::= "(<first(operands)><rest(operands):{o | ||<o>}>)" |
| |
| notPredicate(pred) ::= "!(<evalPredicate(...)>)" |
| |
| evalPredicate(pred,description) ::= "(<pred>)" |
| |
| evalSynPredicate(pred,description) ::= "EvaluatePredicate(<pred>_fragment)" |
| |
| lookaheadTest(atom,k,atomAsInt) ::= "LA<decisionNumber>_<stateNumber>==<atom>" |
| |
| /** Sometimes a lookahead test cannot assume that LA(k) is in a temp variable |
| * somewhere. Must ask for the lookahead directly. |
| */ |
| isolatedLookaheadTest(atom,k,atomAsInt) ::= "input.LA(<k>)==<atom>" |
| |
| lookaheadRangeTest(lower,upper,k,rangeNumber,lowerAsInt,upperAsInt) ::= <% |
| (LA<decisionNumber>_<stateNumber><ge()><lower> && LA<decisionNumber>_<stateNumber><le()><upper>) |
| %> |
| |
| isolatedLookaheadRangeTest(lower,upper,k,rangeNumber,lowerAsInt,upperAsInt) ::= "(input.LA(<k>)<ge()><lower> && input.LA(<k>)<le()><upper>)" |
| |
| le() ::= "\<=" |
| ge() ::= ">=" |
| |
| setTest(ranges) ::= << |
| <ranges; separator="||"> |
| >> |
| |
| // A T T R I B U T E S |
| |
| attributeScope(scope) ::= << |
| <if(scope.attributes)> |
| protected sealed partial class <scope.name>_scope |
| { |
| <scope.attributes:{it|public <it.decl>;}; separator="\n"> |
| } |
| <if(scope.actions.scopeinit)> |
| protected void <scope.name>_scopeInit( <scope.name>_scope scope ) |
| { |
| <scope.actions.scopeinit> |
| } |
| <else> |
| protected virtual void <scope.name>_scopeInit( <scope.name>_scope scope ) {} |
| <endif> |
| <if(scope.actions.scopeafter)> |
| protected void <scope.name>_scopeAfter( <scope.name>_scope scope ) |
| { |
| <scope.actions.scopeafter> |
| } |
| <else> |
| protected virtual void <scope.name>_scopeAfter( <scope.name>_scope scope ) {} |
| <endif> |
| protected readonly ListStack\<<scope.name>_scope\> <scope.name>_stack = new ListStack\<<scope.name>_scope\>(); |
| <endif> |
| >> |
| |
| globalAttributeScope(scope) ::= << |
| <attributeScope(...)> |
| >> |
| |
| ruleAttributeScope(scope) ::= << |
| <attributeScope(...)> |
| >> |
| |
| returnStructName(it) ::= "<it.name>_return" |
| |
| returnType(ruleDescriptor) ::= <% |
| <if(ruleDescriptor.returnScope.attributes && ruleDescriptor.hasMultipleReturnValues)> |
| <ruleDescriptor.grammar.recognizerName>.<ruleDescriptor:returnStructName()> |
| <elseif(ruleDescriptor.hasMultipleReturnValues)> |
| <ruleReturnBaseType()> |
| <elseif(ruleDescriptor.hasSingleReturnValue)> |
| <ruleDescriptor.singleValueReturnType> |
| <else> |
| void |
| <endif> |
| %> |
| |
| /** Generate the C# type associated with a single or multiple return |
| * values. |
| */ |
| ruleLabelType(referencedRule) ::= <% |
| <if(referencedRule.returnScope.attributes&&referencedRule.hasMultipleReturnValues)> |
| <referencedRule.grammar.recognizerName>.<referencedRule:returnStructName()> |
| <elseif(referencedRule.hasMultipleReturnValues)> |
| <ruleReturnBaseType()> |
| <elseif(referencedRule.hasSingleReturnValue)> |
| <referencedRule.singleValueReturnType> |
| <else> |
| void |
| <endif> |
| %> |
| |
| delegateName(it) ::= << |
| <if(it.label)><it.label><else>g<it.name><endif> |
| >> |
| |
| /** Using a type to init value map, try to init a type; if not in table |
| * must be an object, default value is "null". |
| */ |
| initValue(typeName) ::= << |
| default(<typeName>) |
| >> |
| |
| /** Define a rule label including default value */ |
| ruleLabelDef(label) ::= <% |
| <ruleLabelType(referencedRule=label.referencedRule)> <label.label.text> = <initValue(typeName=ruleLabelType(referencedRule=label.referencedRule))>; |
| %> |
| |
| /** Define a return struct for a rule if the code needs to access its |
| * start/stop tokens, tree stuff, attributes, ... Leave a hole for |
| * subgroups to stick in members. |
| */ |
| returnScope(scope) ::= << |
| <if(scope.attributes && ruleDescriptor.hasMultipleReturnValues)> |
| <returnScopeModifier(grammar=grammar,ruleDescriptor=ruleDescriptor)> sealed partial class <ruleDescriptor:returnStructName()> : <ruleReturnBaseType()><@ruleReturnInterfaces()> |
| { |
| <scope.attributes:{it|public <it.decl>;}; separator="\n"> |
| <@ruleReturnMembers()> |
| } |
| <endif> |
| >> |
| |
| ruleReturnBaseType() ::= <% |
| <if(TREE_PARSER)>Tree<else>Parser<endif>RuleReturnScope\<<labelType>> |
| %> |
| |
| @returnScope.ruleReturnMembers() ::= << |
| >> |
| |
| parameterScope(scope) ::= << |
| <scope.attributes:{it|<it.decl>}; separator=", "> |
| >> |
| |
| parameterAttributeRef(attr) ::= << |
| <attr.name; format="id"> |
| >> |
| |
| parameterSetAttributeRef(attr,expr) ::= << |
| <attr.name; format="id"> =<expr>; |
| >> |
| |
| scopeAttributeRef(scope,attr,index,negIndex) ::= <% |
| <if(negIndex)> |
| <scope>_stack[<scope>_stack.Count - <negIndex> - 1].<attr.name; format="id"> |
| <else> |
| <if(index)> |
| <scope>_stack[<index>].<attr.name; format="id"> |
| <else> |
| <scope>_stack.Peek().<attr.name; format="id"> |
| <endif> |
| <endif> |
| %> |
| |
| scopeSetAttributeRef(scope,attr,expr,index,negIndex) ::= <% |
| <if(negIndex)> |
| <scope>_stack[<scope>_stack.Count - <negIndex> - 1].<attr.name; format="id"> = <expr>; |
| <else> |
| <if(index)> |
| <scope>_stack[<index>].<attr.name; format="id"> = <expr>; |
| <else> |
| <scope>_stack.Peek().<attr.name; format="id"> = <expr>; |
| <endif> |
| <endif> |
| %> |
| |
| /** $x is either global scope or x is rule with dynamic scope; refers |
| * to stack itself not top of stack. This is useful for predicates |
| * like {$function.Count>0 && $function::name.Equals("foo")}? |
| */ |
| isolatedDynamicScopeRef(scope) ::= "<scope>_stack" |
| |
| /** reference an attribute of rule; might only have single return value */ |
| ruleLabelRef(referencedRule,scope,attr) ::= <% |
| <if(referencedRule.hasMultipleReturnValues)> |
| (<scope>!=null?<scope>.<attr.name; format="id">:<initValue(attr.type)>) |
| <else> |
| <scope> |
| <endif> |
| %> |
| |
| returnAttributeRef(ruleDescriptor,attr) ::= <% |
| <if(ruleDescriptor.hasMultipleReturnValues)> |
| retval.<attr.name; format="id"> |
| <else> |
| <attr.name; format="id"> |
| <endif> |
| %> |
| |
| returnSetAttributeRef(ruleDescriptor,attr,expr) ::= <% |
| <if(ruleDescriptor.hasMultipleReturnValues)> |
| retval.<attr.name; format="id"> =<expr>; |
| <else> |
| <attr.name; format="id"> =<expr>; |
| <endif> |
| %> |
| |
| /** How to translate $tokenLabel */ |
| tokenLabelRef(label) ::= "<label>" |
| |
| /** ids+=ID {$ids} or e+=expr {$e} */ |
| listLabelRef(label) ::= "list_<label>" |
| |
| |
| // not sure the next are the right approach |
| |
| tokenLabelPropertyRef_text(scope,attr) ::= "(<scope>!=null?<scope>.Text:null)" |
| tokenLabelPropertyRef_type(scope,attr) ::= "(<scope>!=null?<scope>.Type:0)" |
| tokenLabelPropertyRef_line(scope,attr) ::= "(<scope>!=null?<scope>.Line:0)" |
| tokenLabelPropertyRef_pos(scope,attr) ::= "(<scope>!=null?<scope>.CharPositionInLine:0)" |
| tokenLabelPropertyRef_channel(scope,attr) ::= "(<scope>!=null?<scope>.Channel:0)" |
| tokenLabelPropertyRef_index(scope,attr) ::= "(<scope>!=null?<scope>.TokenIndex:0)" |
| tokenLabelPropertyRef_tree(scope,attr) ::= "<scope>_tree" |
| tokenLabelPropertyRef_int(scope,attr) ::= "(<scope>!=null?int.Parse(<scope>.Text):0)" |
| |
| ruleLabelPropertyRef_start(scope,attr) ::= "(<scope>!=null?((<labelType>)<scope>.Start):default(<labelType>))" |
| ruleLabelPropertyRef_stop(scope,attr) ::= "(<scope>!=null?((<labelType>)<scope>.Stop):default(<labelType>))" |
| ruleLabelPropertyRef_tree(scope,attr) ::= "(<scope>!=null?((<ASTLabelType>)<scope>.Tree):default(<ASTLabelType>))" |
| ruleLabelPropertyRef_text(scope,attr) ::= <% |
| <if(TREE_PARSER)> |
| (<scope>!=null?(input.TokenStream.ToString( |
| input.TreeAdaptor.GetTokenStartIndex(<scope>.Start), |
| input.TreeAdaptor.GetTokenStopIndex(<scope>.Start))):null) |
| <else> |
| (<scope>!=null?input.ToString(<scope>.Start,<scope>.Stop):null) |
| <endif> |
| %> |
| |
| ruleLabelPropertyRef_st(scope,attr) ::= "(<scope>!=null?<scope>.Template:null)" |
| |
| /** Isolated $RULE ref ok in lexer as it's a Token */ |
| lexerRuleLabel(label) ::= "<label>" |
| |
| lexerRuleLabelPropertyRef_type(scope,attr) ::= |
| "(<scope>!=null?<scope>.Type:0)" |
| |
| lexerRuleLabelPropertyRef_line(scope,attr) ::= |
| "(<scope>!=null?<scope>.Line:0)" |
| |
| lexerRuleLabelPropertyRef_pos(scope,attr) ::= |
| "(<scope>!=null?<scope>.CharPositionInLine:-1)" |
| |
| lexerRuleLabelPropertyRef_channel(scope,attr) ::= |
| "(<scope>!=null?<scope>.Channel:0)" |
| |
| lexerRuleLabelPropertyRef_index(scope,attr) ::= |
| "(<scope>!=null?<scope>.TokenIndex:0)" |
| |
| lexerRuleLabelPropertyRef_text(scope,attr) ::= |
| "(<scope>!=null?<scope>.Text:null)" |
| |
| lexerRuleLabelPropertyRef_int(scope,attr) ::= |
| "(<scope>!=null?int.Parse(<scope>.Text):0)" |
| |
| // Somebody may ref $template or $tree or $stop within a rule: |
| rulePropertyRef_start(scope,attr) ::= "retval.Start" |
| rulePropertyRef_stop(scope,attr) ::= "retval.Stop" |
| rulePropertyRef_tree(scope,attr) ::= "retval.Tree" |
| rulePropertyRef_text(scope,attr) ::= <% |
| <if(TREE_PARSER)> |
| input.TokenStream.ToString( |
| input.TreeAdaptor.GetTokenStartIndex(retval.Start), |
| input.TreeAdaptor.GetTokenStopIndex(retval.Start)) |
| <else> |
| input.ToString(retval.Start,input.LT(-1)) |
| <endif> |
| %> |
| rulePropertyRef_st(scope,attr) ::= "retval.Template" |
| |
| lexerRulePropertyRef_text(scope,attr) ::= "Text" |
| lexerRulePropertyRef_type(scope,attr) ::= "_type" |
| lexerRulePropertyRef_line(scope,attr) ::= "state.tokenStartLine" |
| lexerRulePropertyRef_pos(scope,attr) ::= "state.tokenStartCharPositionInLine" |
| lexerRulePropertyRef_index(scope,attr) ::= "-1" // undefined token index in lexer |
| lexerRulePropertyRef_channel(scope,attr) ::= "_channel" |
| lexerRulePropertyRef_start(scope,attr) ::= "state.tokenStartCharIndex" |
| lexerRulePropertyRef_stop(scope,attr) ::= "(CharIndex-1)" |
| lexerRulePropertyRef_int(scope,attr) ::= "int.Parse(<scope>.Text)" |
| |
| // setting $st and $tree is allowed in local rule. everything else |
| // is flagged as error |
| ruleSetPropertyRef_tree(scope,attr,expr) ::= "retval.Tree = <expr>;" |
| ruleSetPropertyRef_st(scope,attr,expr) ::= "retval.Template =<expr>;" |
| |
| /** How to execute an action (only when not backtracking) */ |
| execAction(action) ::= <% |
| <if(backtracking)> |
| if (<actions.(actionScope).synpredgate>)<\n> |
| {<\n> |
| <@indentedAction()><\n> |
| } |
| <else> |
| <action> |
| <endif> |
| %> |
| |
| @execAction.indentedAction() ::= << |
| <action> |
| >> |
| |
| /** How to always execute an action even when backtracking */ |
| execForcedAction(action) ::= "<action>" |
| |
| // M I S C (properties, etc...) |
| |
| bitset(name, words64) ::= << |
| public static readonly BitSet <name> = new BitSet(new ulong[]{<words64:{it|<it>UL};separator=",">}); |
| >> |
| |
| codeFileExtension() ::= ".cs" |
| |
| true_value() ::= "true" |
| false_value() ::= "false" |