Commit 59bf694e authored by Kateřina Sloupová's avatar Kateřina Sloupová
Browse files

it ANTLRs and parses (DFA)

parent 7868636c
Pipeline #54589 failed with stage
in 21 seconds
grammar DFA_grammar;
/* Parser Rules */
start: init production* final;
init: INIT EQUALS statename;
production: LEFT_PARENTHESIS statename COMMA statename RIGHT_PARENTHESIS EQUALS statename;
final: FINAL EQUALS LEFT_BRACKET (statename (COMMA statename)* | ) RIGHT_BRACKET;
statename: STATE;
/* Lexer Rules */
/* Tokens */
INIT : 'init';
EQUALS : '=';
LEFT_PARENTHESIS : '(';
RIGHT_PARENTHESIS : ')';
LEFT_BRACKET : '{';
RIGHT_BRACKET : '}';
COMMA : ',';
FINAL : 'final';
STATE : [a-zA-Z0-9]+;
/* Characters to be ignored */
WS : [ \r\t\n]+ -> skip ;
token literal names:
null
'init'
'='
'('
')'
'{'
'}'
','
'final'
null
null
token symbolic names:
null
INIT
EQUALS
LEFT_PARENTHESIS
RIGHT_PARENTHESIS
LEFT_BRACKET
RIGHT_BRACKET
COMMA
FINAL
STATE
WS
rule names:
start
init
production
final
statename
atn:
[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 12, 52, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 3, 2, 3, 2, 7, 2, 15, 10, 2, 12, 2, 14, 2, 18, 11, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 7, 5, 40, 10, 5, 12, 5, 14, 5, 43, 11, 5, 3, 5, 5, 5, 46, 10, 5, 3, 5, 3, 5, 3, 6, 3, 6, 3, 6, 2, 2, 7, 2, 4, 6, 8, 10, 2, 2, 2, 49, 2, 12, 3, 2, 2, 2, 4, 21, 3, 2, 2, 2, 6, 25, 3, 2, 2, 2, 8, 33, 3, 2, 2, 2, 10, 49, 3, 2, 2, 2, 12, 16, 5, 4, 3, 2, 13, 15, 5, 6, 4, 2, 14, 13, 3, 2, 2, 2, 15, 18, 3, 2, 2, 2, 16, 14, 3, 2, 2, 2, 16, 17, 3, 2, 2, 2, 17, 19, 3, 2, 2, 2, 18, 16, 3, 2, 2, 2, 19, 20, 5, 8, 5, 2, 20, 3, 3, 2, 2, 2, 21, 22, 7, 3, 2, 2, 22, 23, 7, 4, 2, 2, 23, 24, 5, 10, 6, 2, 24, 5, 3, 2, 2, 2, 25, 26, 7, 5, 2, 2, 26, 27, 5, 10, 6, 2, 27, 28, 7, 9, 2, 2, 28, 29, 5, 10, 6, 2, 29, 30, 7, 6, 2, 2, 30, 31, 7, 4, 2, 2, 31, 32, 5, 10, 6, 2, 32, 7, 3, 2, 2, 2, 33, 34, 7, 10, 2, 2, 34, 35, 7, 4, 2, 2, 35, 45, 7, 7, 2, 2, 36, 41, 5, 10, 6, 2, 37, 38, 7, 9, 2, 2, 38, 40, 5, 10, 6, 2, 39, 37, 3, 2, 2, 2, 40, 43, 3, 2, 2, 2, 41, 39, 3, 2, 2, 2, 41, 42, 3, 2, 2, 2, 42, 46, 3, 2, 2, 2, 43, 41, 3, 2, 2, 2, 44, 46, 3, 2, 2, 2, 45, 36, 3, 2, 2, 2, 45, 44, 3, 2, 2, 2, 46, 47, 3, 2, 2, 2, 47, 48, 7, 8, 2, 2, 48, 9, 3, 2, 2, 2, 49, 50, 7, 11, 2, 2, 50, 11, 3, 2, 2, 2, 5, 16, 41, 45]
\ No newline at end of file
INIT=1
EQUALS=2
LEFT_PARENTHESIS=3
RIGHT_PARENTHESIS=4
LEFT_BRACKET=5
RIGHT_BRACKET=6
COMMA=7
FINAL=8
STATE=9
WS=10
'init'=1
'='=2
'('=3
')'=4
'{'=5
'}'=6
','=7
'final'=8
token literal names:
null
'init'
'='
'('
')'
'{'
'}'
','
'final'
null
null
token symbolic names:
null
INIT
EQUALS
LEFT_PARENTHESIS
RIGHT_PARENTHESIS
LEFT_BRACKET
RIGHT_BRACKET
COMMA
FINAL
STATE
WS
rule names:
INIT
EQUALS
LEFT_PARENTHESIS
RIGHT_PARENTHESIS
LEFT_BRACKET
RIGHT_BRACKET
COMMA
FINAL
STATE
WS
channel names:
DEFAULT_TOKEN_CHANNEL
HIDDEN
mode names:
DEFAULT_MODE
atn:
[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 12, 58, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 10, 6, 10, 48, 10, 10, 13, 10, 14, 10, 49, 3, 11, 6, 11, 53, 10, 11, 13, 11, 14, 11, 54, 3, 11, 3, 11, 2, 2, 12, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 3, 2, 4, 5, 2, 50, 59, 67, 92, 99, 124, 5, 2, 11, 12, 15, 15, 34, 34, 2, 59, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 3, 23, 3, 2, 2, 2, 5, 28, 3, 2, 2, 2, 7, 30, 3, 2, 2, 2, 9, 32, 3, 2, 2, 2, 11, 34, 3, 2, 2, 2, 13, 36, 3, 2, 2, 2, 15, 38, 3, 2, 2, 2, 17, 40, 3, 2, 2, 2, 19, 47, 3, 2, 2, 2, 21, 52, 3, 2, 2, 2, 23, 24, 7, 107, 2, 2, 24, 25, 7, 112, 2, 2, 25, 26, 7, 107, 2, 2, 26, 27, 7, 118, 2, 2, 27, 4, 3, 2, 2, 2, 28, 29, 7, 63, 2, 2, 29, 6, 3, 2, 2, 2, 30, 31, 7, 42, 2, 2, 31, 8, 3, 2, 2, 2, 32, 33, 7, 43, 2, 2, 33, 10, 3, 2, 2, 2, 34, 35, 7, 125, 2, 2, 35, 12, 3, 2, 2, 2, 36, 37, 7, 127, 2, 2, 37, 14, 3, 2, 2, 2, 38, 39, 7, 46, 2, 2, 39, 16, 3, 2, 2, 2, 40, 41, 7, 104, 2, 2, 41, 42, 7, 107, 2, 2, 42, 43, 7, 112, 2, 2, 43, 44, 7, 99, 2, 2, 44, 45, 7, 110, 2, 2, 45, 18, 3, 2, 2, 2, 46, 48, 9, 2, 2, 2, 47, 46, 3, 2, 2, 2, 48, 49, 3, 2, 2, 2, 49, 47, 3, 2, 2, 2, 49, 50, 3, 2, 2, 2, 50, 20, 3, 2, 2, 2, 51, 53, 9, 3, 2, 2, 52, 51, 3, 2, 2, 2, 53, 54, 3, 2, 2, 2, 54, 52, 3, 2, 2, 2, 54, 55, 3, 2, 2, 2, 55, 56, 3, 2, 2, 2, 56, 57, 8, 11, 2, 2, 57, 22, 3, 2, 2, 2, 5, 2, 49, 54, 3, 8, 2, 2]
\ No newline at end of file
# Generated from DFA_grammar.g4 by ANTLR 4.8
from antlr4 import * # type: ignore
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\f")
buf.write(":\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\3\2\3\2\3\2\3\2\3\2")
buf.write("\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3")
buf.write("\t\3\t\3\t\3\t\3\t\3\n\6\n\60\n\n\r\n\16\n\61\3\13\6\13")
buf.write("\65\n\13\r\13\16\13\66\3\13\3\13\2\2\f\3\3\5\4\7\5\t\6")
buf.write("\13\7\r\b\17\t\21\n\23\13\25\f\3\2\4\5\2\62;C\\c|\5\2")
buf.write("\13\f\17\17\"\"\2;\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2")
buf.write("\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21")
buf.write("\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\3\27\3\2\2\2\5\34\3")
buf.write("\2\2\2\7\36\3\2\2\2\t \3\2\2\2\13\"\3\2\2\2\r$\3\2\2\2")
buf.write("\17&\3\2\2\2\21(\3\2\2\2\23/\3\2\2\2\25\64\3\2\2\2\27")
buf.write("\30\7k\2\2\30\31\7p\2\2\31\32\7k\2\2\32\33\7v\2\2\33\4")
buf.write("\3\2\2\2\34\35\7?\2\2\35\6\3\2\2\2\36\37\7*\2\2\37\b\3")
buf.write("\2\2\2 !\7+\2\2!\n\3\2\2\2\"#\7}\2\2#\f\3\2\2\2$%\7\177")
buf.write("\2\2%\16\3\2\2\2&\'\7.\2\2\'\20\3\2\2\2()\7h\2\2)*\7k")
buf.write("\2\2*+\7p\2\2+,\7c\2\2,-\7n\2\2-\22\3\2\2\2.\60\t\2\2")
buf.write("\2/.\3\2\2\2\60\61\3\2\2\2\61/\3\2\2\2\61\62\3\2\2\2\62")
buf.write("\24\3\2\2\2\63\65\t\3\2\2\64\63\3\2\2\2\65\66\3\2\2\2")
buf.write("\66\64\3\2\2\2\66\67\3\2\2\2\678\3\2\2\289\b\13\2\29\26")
buf.write("\3\2\2\2\5\2\61\66\3\b\2\2")
return buf.getvalue()
class DFA_grammarLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
INIT = 1
EQUALS = 2
LEFT_PARENTHESIS = 3
RIGHT_PARENTHESIS = 4
LEFT_BRACKET = 5
RIGHT_BRACKET = 6
COMMA = 7
FINAL = 8
STATE = 9
WS = 10
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'init'", "'='", "'('", "')'", "'{'", "'}'", "','", "'final'" ]
symbolicNames = [ "<INVALID>",
"INIT", "EQUALS", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_BRACKET",
"RIGHT_BRACKET", "COMMA", "FINAL", "STATE", "WS" ]
ruleNames = [ "INIT", "EQUALS", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS",
"LEFT_BRACKET", "RIGHT_BRACKET", "COMMA", "FINAL", "STATE",
"WS" ]
grammarFileName = "DFA_grammar.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.8")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
INIT=1
EQUALS=2
LEFT_PARENTHESIS=3
RIGHT_PARENTHESIS=4
LEFT_BRACKET=5
RIGHT_BRACKET=6
COMMA=7
FINAL=8
STATE=9
WS=10
'init'=1
'='=2
'('=3
')'=4
'{'=5
'}'=6
','=7
'final'=8
# Generated from DFA_grammar.g4 by ANTLR 4.8
from antlr4 import * # type: ignore
if __name__ is not None and "." in __name__:
from .DFA_grammarParser import DFA_grammarParser
else:
from DFA_grammarParser import DFA_grammarParser
# This class defines a complete listener for a parse tree produced by DFA_grammarParser.
class DFA_grammarListener(ParseTreeListener):
# Enter a parse tree produced by DFA_grammarParser#start.
def enterStart(self, ctx:DFA_grammarParser.StartContext):
pass
# Exit a parse tree produced by DFA_grammarParser#start.
def exitStart(self, ctx:DFA_grammarParser.StartContext):
pass
# Enter a parse tree produced by DFA_grammarParser#init.
def enterInit(self, ctx:DFA_grammarParser.InitContext):
pass
# Exit a parse tree produced by DFA_grammarParser#init.
def exitInit(self, ctx:DFA_grammarParser.InitContext):
pass
# Enter a parse tree produced by DFA_grammarParser#production.
def enterProduction(self, ctx:DFA_grammarParser.ProductionContext):
pass
# Exit a parse tree produced by DFA_grammarParser#production.
def exitProduction(self, ctx:DFA_grammarParser.ProductionContext):
pass
# Enter a parse tree produced by DFA_grammarParser#final.
def enterFinal(self, ctx:DFA_grammarParser.FinalContext):
pass
# Exit a parse tree produced by DFA_grammarParser#final.
def exitFinal(self, ctx:DFA_grammarParser.FinalContext):
pass
# Enter a parse tree produced by DFA_grammarParser#statename.
def enterStatename(self, ctx:DFA_grammarParser.StatenameContext):
pass
# Exit a parse tree produced by DFA_grammarParser#statename.
def exitStatename(self, ctx:DFA_grammarParser.StatenameContext):
pass
del DFA_grammarParser
\ No newline at end of file
# Generated from DFA_grammar.g4 by ANTLR 4.8
# encoding: utf-8
from antlr4 import * # type: ignore
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\f")
buf.write("\64\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\3\2\3\2\7")
buf.write("\2\17\n\2\f\2\16\2\22\13\2\3\2\3\2\3\3\3\3\3\3\3\3\3\4")
buf.write("\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\7")
buf.write("\5(\n\5\f\5\16\5+\13\5\3\5\5\5.\n\5\3\5\3\5\3\6\3\6\3")
buf.write("\6\2\2\7\2\4\6\b\n\2\2\2\61\2\f\3\2\2\2\4\25\3\2\2\2\6")
buf.write("\31\3\2\2\2\b!\3\2\2\2\n\61\3\2\2\2\f\20\5\4\3\2\r\17")
buf.write("\5\6\4\2\16\r\3\2\2\2\17\22\3\2\2\2\20\16\3\2\2\2\20\21")
buf.write("\3\2\2\2\21\23\3\2\2\2\22\20\3\2\2\2\23\24\5\b\5\2\24")
buf.write("\3\3\2\2\2\25\26\7\3\2\2\26\27\7\4\2\2\27\30\5\n\6\2\30")
buf.write("\5\3\2\2\2\31\32\7\5\2\2\32\33\5\n\6\2\33\34\7\t\2\2\34")
buf.write("\35\5\n\6\2\35\36\7\6\2\2\36\37\7\4\2\2\37 \5\n\6\2 \7")
buf.write("\3\2\2\2!\"\7\n\2\2\"#\7\4\2\2#-\7\7\2\2$)\5\n\6\2%&\7")
buf.write("\t\2\2&(\5\n\6\2\'%\3\2\2\2(+\3\2\2\2)\'\3\2\2\2)*\3\2")
buf.write("\2\2*.\3\2\2\2+)\3\2\2\2,.\3\2\2\2-$\3\2\2\2-,\3\2\2\2")
buf.write("./\3\2\2\2/\60\7\b\2\2\60\t\3\2\2\2\61\62\7\13\2\2\62")
buf.write("\13\3\2\2\2\5\20)-")
return buf.getvalue()
class DFA_grammarParser ( Parser ):
grammarFileName = "DFA_grammar.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'init'", "'='", "'('", "')'", "'{'",
"'}'", "','", "'final'" ]
symbolicNames = [ "<INVALID>", "INIT", "EQUALS", "LEFT_PARENTHESIS",
"RIGHT_PARENTHESIS", "LEFT_BRACKET", "RIGHT_BRACKET",
"COMMA", "FINAL", "STATE", "WS" ]
RULE_start = 0
RULE_init = 1
RULE_production = 2
RULE_final = 3
RULE_statename = 4
ruleNames = [ "start", "init", "production", "final", "statename" ]
EOF = Token.EOF
INIT=1
EQUALS=2
LEFT_PARENTHESIS=3
RIGHT_PARENTHESIS=4
LEFT_BRACKET=5
RIGHT_BRACKET=6
COMMA=7
FINAL=8
STATE=9
WS=10
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.8")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class StartContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def init(self):
return self.getTypedRuleContext(DFA_grammarParser.InitContext,0)
def final(self):
return self.getTypedRuleContext(DFA_grammarParser.FinalContext,0)
def production(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(DFA_grammarParser.ProductionContext)
else:
return self.getTypedRuleContext(DFA_grammarParser.ProductionContext,i)
def getRuleIndex(self):
return DFA_grammarParser.RULE_start
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStart" ):
listener.enterStart(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStart" ):
listener.exitStart(self)
def start(self):
localctx = DFA_grammarParser.StartContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_start)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 10
self.init()
self.state = 14
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==DFA_grammarParser.LEFT_PARENTHESIS:
self.state = 11
self.production()
self.state = 16
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 17
self.final()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InitContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def INIT(self):
return self.getToken(DFA_grammarParser.INIT, 0)
def EQUALS(self):
return self.getToken(DFA_grammarParser.EQUALS, 0)
def statename(self):
return self.getTypedRuleContext(DFA_grammarParser.StatenameContext,0)
def getRuleIndex(self):
return DFA_grammarParser.RULE_init
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInit" ):
listener.enterInit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInit" ):
listener.exitInit(self)
def init(self):
localctx = DFA_grammarParser.InitContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_init)
try:
self.enterOuterAlt(localctx, 1)
self.state = 19
self.match(DFA_grammarParser.INIT)
self.state = 20
self.match(DFA_grammarParser.EQUALS)
self.state = 21
self.statename()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ProductionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LEFT_PARENTHESIS(self):
return self.getToken(DFA_grammarParser.LEFT_PARENTHESIS, 0)
def statename(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(DFA_grammarParser.StatenameContext)
else:
return self.getTypedRuleContext(DFA_grammarParser.StatenameContext,i)
def COMMA(self):
return self.getToken(DFA_grammarParser.COMMA, 0)
def RIGHT_PARENTHESIS(self):
return self.getToken(DFA_grammarParser.RIGHT_PARENTHESIS, 0)
def EQUALS(self):
return self.getToken(DFA_grammarParser.EQUALS, 0)
def getRuleIndex(self):
return DFA_grammarParser.RULE_production
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterProduction" ):
listener.enterProduction(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitProduction" ):
listener.exitProduction(self)
def production(self):
localctx = DFA_grammarParser.ProductionContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_production)
try:
self.enterOuterAlt(localctx, 1)
self.state = 23
self.match(DFA_grammarParser.LEFT_PARENTHESIS)
self.state = 24
self.statename()
self.state = 25
self.match(DFA_grammarParser.COMMA)
self.state = 26
self.statename()
self.state = 27
self.match(DFA_grammarParser.RIGHT_PARENTHESIS)
self.state = 28
self.match(DFA_grammarParser.EQUALS)
self.state = 29
self.statename()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FinalContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def FINAL(self):
return self.getToken(DFA_grammarParser.FINAL, 0)
def EQUALS(self):
return self.getToken(DFA_grammarParser.EQUALS, 0)
def LEFT_BRACKET(self):
return self.getToken(DFA_grammarParser.LEFT_BRACKET, 0)
def RIGHT_BRACKET(self):
return self.getToken(DFA_grammarParser.RIGHT_BRACKET, 0)
def statename(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(DFA_grammarParser.StatenameContext)
else:
return self.getTypedRuleContext(DFA_grammarParser.StatenameContext,i)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(DFA_grammarParser.COMMA)
else:
return self.getToken(DFA_grammarParser.COMMA, i)
def getRuleIndex(self):
return DFA_grammarParser.RULE_final
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFinal" ):
listener.enterFinal(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFinal" ):
listener.exitFinal(self)
def final(self):
localctx = DFA_grammarParser.FinalContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_final)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 31
self.match(DFA_grammarParser.FINAL)
self.state = 32
self.match(DFA_grammarParser.EQUALS)
self.state = 33
self.match(DFA_grammarParser.LEFT_BRACKET)
self.state = 43
self._errHandler.sync(self)
token = self.