Commit 9dcd6d8e authored by Kateřina Sloupová's avatar Kateřina Sloupová
Browse files

add support for commentary parsing to grammars and parser

parent 3e7393c3
......@@ -2,7 +2,7 @@ grammar CFG;
/* Parser Rules */
start: (onerule NEWLINE+)* onerule (NEWLINE+ | );
start: (onerule NEWLINE+)* onerule (NEWLINE+ | ) comment;
onerule: nonterminal ARROW (rewrite DELIMITER)* rewrite;
......@@ -14,6 +14,10 @@ nonterminal: (CAPS | (LEFT_ANGLE symbol+ RIGHT_ANGLE (APOSTROPHE*)) | (symbo
symbol: (TERMINAL | CAPS | UNDERSCORE);
comment: (HASH anyvalue* | );
anyvalue: LEFT_ANGLE | RIGHT_ANGLE | APOSTROPHE | UNDERSCORE | TERMINAL | CAPS | ARROW | EPSILON | DELIMITER | NEWLINE| ANYCHAR;
/* Lexer Rules */
/* Tokens */
......@@ -28,6 +32,9 @@ ARROW : ('→' | '->');
EPSILON : ('ε' | '\\''e');
DELIMITER : '|';
NEWLINE : ('\n' | ';' | ',');
HASH : '#';
/* Characters to be ignored */
WS : [ \r\t]+ -> skip ;
ANYCHAR : .;
# Generated from CFG.g4 by ANTLR 4.8
# Generated from lib/parsing/CFG.g4 by ANTLR 4.8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
......@@ -8,28 +8,30 @@ import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\r")
buf.write(":\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\3\2\3\2\3\3")
buf.write("\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\b\5\b)")
buf.write("\n\b\3\t\3\t\3\t\5\t.\n\t\3\n\3\n\3\13\3\13\3\f\6\f\65")
buf.write("\n\f\r\f\16\f\66\3\f\3\f\2\2\r\3\3\5\4\7\5\t\6\13\7\r")
buf.write("\b\17\t\21\n\23\13\25\f\27\r\3\2\6\4\2\62;c|\3\2C\\\5")
buf.write("\2\f\f..==\5\2\13\13\17\17\"\"\2<\2\3\3\2\2\2\2\5\3\2")
buf.write("\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2")
buf.write("\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2")
buf.write("\27\3\2\2\2\3\31\3\2\2\2\5\33\3\2\2\2\7\35\3\2\2\2\t\37")
buf.write("\3\2\2\2\13!\3\2\2\2\r#\3\2\2\2\17(\3\2\2\2\21-\3\2\2")
buf.write("\2\23/\3\2\2\2\25\61\3\2\2\2\27\64\3\2\2\2\31\32\7>\2")
buf.write("\2\32\4\3\2\2\2\33\34\7@\2\2\34\6\3\2\2\2\35\36\7)\2\2")
buf.write("\36\b\3\2\2\2\37 \7a\2\2 \n\3\2\2\2!\"\t\2\2\2\"\f\3\2")
buf.write("\2\2#$\t\3\2\2$\16\3\2\2\2%)\7\u2194\2\2&\'\7/\2\2\')")
buf.write("\7@\2\2(%\3\2\2\2(&\3\2\2\2)\20\3\2\2\2*.\7\u03b7\2\2")
buf.write("+,\7^\2\2,.\7g\2\2-*\3\2\2\2-+\3\2\2\2.\22\3\2\2\2/\60")
buf.write("\7~\2\2\60\24\3\2\2\2\61\62\t\4\2\2\62\26\3\2\2\2\63\65")
buf.write("\t\5\2\2\64\63\3\2\2\2\65\66\3\2\2\2\66\64\3\2\2\2\66")
buf.write("\67\3\2\2\2\678\3\2\2\289\b\f\2\29\30\3\2\2\2\6\2(-\66")
buf.write("\3\b\2\2")
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\17")
buf.write("B\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7")
buf.write("\3\b\3\b\3\b\5\b-\n\b\3\t\3\t\3\t\5\t\62\n\t\3\n\3\n\3")
buf.write("\13\3\13\3\f\3\f\3\r\6\r;\n\r\r\r\16\r<\3\r\3\r\3\16\3")
buf.write("\16\2\2\17\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25")
buf.write("\f\27\r\31\16\33\17\3\2\6\4\2\62;c|\3\2C\\\5\2\f\f..=")
buf.write("=\5\2\13\13\17\17\"\"\2D\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3")
buf.write("\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2")
buf.write("\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2")
buf.write("\2\31\3\2\2\2\2\33\3\2\2\2\3\35\3\2\2\2\5\37\3\2\2\2\7")
buf.write("!\3\2\2\2\t#\3\2\2\2\13%\3\2\2\2\r\'\3\2\2\2\17,\3\2\2")
buf.write("\2\21\61\3\2\2\2\23\63\3\2\2\2\25\65\3\2\2\2\27\67\3\2")
buf.write("\2\2\31:\3\2\2\2\33@\3\2\2\2\35\36\7>\2\2\36\4\3\2\2\2")
buf.write("\37 \7@\2\2 \6\3\2\2\2!\"\7)\2\2\"\b\3\2\2\2#$\7a\2\2")
buf.write("$\n\3\2\2\2%&\t\2\2\2&\f\3\2\2\2\'(\t\3\2\2(\16\3\2\2")
buf.write("\2)-\7\u2194\2\2*+\7/\2\2+-\7@\2\2,)\3\2\2\2,*\3\2\2\2")
buf.write("-\20\3\2\2\2.\62\7\u03b7\2\2/\60\7^\2\2\60\62\7g\2\2\61")
buf.write(".\3\2\2\2\61/\3\2\2\2\62\22\3\2\2\2\63\64\7~\2\2\64\24")
buf.write("\3\2\2\2\65\66\t\4\2\2\66\26\3\2\2\2\678\7%\2\28\30\3")
buf.write("\2\2\29;\t\5\2\2:9\3\2\2\2;<\3\2\2\2<:\3\2\2\2<=\3\2\2")
buf.write("\2=>\3\2\2\2>?\b\r\2\2?\32\3\2\2\2@A\13\2\2\2A\34\3\2")
buf.write("\2\2\6\2,\61<\3\b\2\2")
return buf.getvalue()
......@@ -49,22 +51,25 @@ class CFGLexer(Lexer):
EPSILON = 8
DELIMITER = 9
NEWLINE = 10
WS = 11
HASH = 11
WS = 12
ANYCHAR = 13
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'<'", "'>'", "'''", "'_'", "'|'" ]
"'<'", "'>'", "'''", "'_'", "'|'", "'#'" ]
symbolicNames = [ "<INVALID>",
"LEFT_ANGLE", "RIGHT_ANGLE", "APOSTROPHE", "UNDERSCORE", "TERMINAL",
"CAPS", "ARROW", "EPSILON", "DELIMITER", "NEWLINE", "WS" ]
"CAPS", "ARROW", "EPSILON", "DELIMITER", "NEWLINE", "HASH",
"WS", "ANYCHAR" ]
ruleNames = [ "LEFT_ANGLE", "RIGHT_ANGLE", "APOSTROPHE", "UNDERSCORE",
"TERMINAL", "CAPS", "ARROW", "EPSILON", "DELIMITER", "NEWLINE",
"WS" ]
"HASH", "WS", "ANYCHAR" ]
grammarFileName = "CFG.g4"
......
# Generated from CFG.g4 by ANTLR 4.8
# Generated from lib/parsing/CFG.g4 by ANTLR 4.8
from antlr4 import *
if __name__ is not None and "." in __name__:
from .CFGParser import CFGParser
......@@ -62,5 +62,23 @@ class CFGListener(ParseTreeListener):
pass
# Enter a parse tree produced by CFGParser#comment.
def enterComment(self, ctx:CFGParser.CommentContext):
pass
# Exit a parse tree produced by CFGParser#comment.
def exitComment(self, ctx:CFGParser.CommentContext):
pass
# Enter a parse tree produced by CFGParser#anyvalue.
def enterAnyvalue(self, ctx:CFGParser.AnyvalueContext):
pass
# Exit a parse tree produced by CFGParser#anyvalue.
def exitAnyvalue(self, ctx:CFGParser.AnyvalueContext):
pass
del CFGParser
\ No newline at end of file
This diff is collapsed.
......@@ -2,7 +2,7 @@ grammar DFA;
/* Parser Rules */
start: init production* final;
start: init production* final comment;
init: (INIT EQUALS statename | );
......@@ -12,6 +12,10 @@ final: FINAL EQUALS LEFT_BRACKET (statename (COMMA statename)* | ) RIGHT_B
statename: STATE;
comment: (HASH anyvalue* | );
anyvalue: INIT | EQUALS | LEFT_PARENTHESIS | RIGHT_PARENTHESIS | LEFT_BRACKET | RIGHT_BRACKET | COMMA | FINAL | STATE | ANYCHAR;
/* Lexer Rules */
/* Tokens */
......@@ -24,7 +28,10 @@ LEFT_BRACKET : '{';
RIGHT_BRACKET : '}';
COMMA : ',';
FINAL : 'final';
STATE : ([a-zA-Z0-9] | '_' | '\'')+;
STATE : ([a-zA-Z0-9] | '_' | '\'' | '<' | '>')+;
HASH : '#';
/* Characters to be ignored */
WS : [ \r\t\n]+ -> skip ;
ANYCHAR : .;
# Generated from DFA.g4 by ANTLR 4.8
# Generated from lib/parsing/DFA.g4 by ANTLR 4.8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
......@@ -8,27 +8,29 @@ import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\f")
buf.write(":\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\3\2\3\2\3\2\3\2\3\2")
buf.write("\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3")
buf.write("\t\3\t\3\t\3\t\3\t\3\n\6\n\60\n\n\r\n\16\n\61\3\13\6\13")
buf.write("\65\n\13\r\13\16\13\66\3\13\3\13\2\2\f\3\3\5\4\7\5\t\6")
buf.write("\13\7\r\b\17\t\21\n\23\13\25\f\3\2\4\7\2))\62;C\\aac|")
buf.write("\5\2\13\f\17\17\"\"\2;\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2")
buf.write("\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2")
buf.write("\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\3\27\3\2\2\2\5")
buf.write("\34\3\2\2\2\7\36\3\2\2\2\t \3\2\2\2\13\"\3\2\2\2\r$\3")
buf.write("\2\2\2\17&\3\2\2\2\21(\3\2\2\2\23/\3\2\2\2\25\64\3\2\2")
buf.write("\2\27\30\7k\2\2\30\31\7p\2\2\31\32\7k\2\2\32\33\7v\2\2")
buf.write("\33\4\3\2\2\2\34\35\7?\2\2\35\6\3\2\2\2\36\37\7*\2\2\37")
buf.write("\b\3\2\2\2 !\7+\2\2!\n\3\2\2\2\"#\7}\2\2#\f\3\2\2\2$%")
buf.write("\7\177\2\2%\16\3\2\2\2&\'\7.\2\2\'\20\3\2\2\2()\7h\2\2")
buf.write(")*\7k\2\2*+\7p\2\2+,\7c\2\2,-\7n\2\2-\22\3\2\2\2.\60\t")
buf.write("\2\2\2/.\3\2\2\2\60\61\3\2\2\2\61/\3\2\2\2\61\62\3\2\2")
buf.write("\2\62\24\3\2\2\2\63\65\t\3\2\2\64\63\3\2\2\2\65\66\3\2")
buf.write("\2\2\66\64\3\2\2\2\66\67\3\2\2\2\678\3\2\2\289\b\13\2")
buf.write("\29\26\3\2\2\2\6\2/\61\66\3\b\2\2")
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\16")
buf.write("B\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\3\2")
buf.write("\3\2\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3")
buf.write("\7\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\n\6\n\64\n\n\r\n")
buf.write("\16\n\65\3\13\3\13\3\f\6\f;\n\f\r\f\16\f<\3\f\3\f\3\r")
buf.write("\3\r\2\2\16\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25")
buf.write("\f\27\r\31\16\3\2\4\t\2))\62;>>@@C\\aac|\5\2\13\f\17\17")
buf.write("\"\"\2C\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2")
buf.write("\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2")
buf.write("\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\3")
buf.write("\33\3\2\2\2\5 \3\2\2\2\7\"\3\2\2\2\t$\3\2\2\2\13&\3\2")
buf.write("\2\2\r(\3\2\2\2\17*\3\2\2\2\21,\3\2\2\2\23\63\3\2\2\2")
buf.write("\25\67\3\2\2\2\27:\3\2\2\2\31@\3\2\2\2\33\34\7k\2\2\34")
buf.write("\35\7p\2\2\35\36\7k\2\2\36\37\7v\2\2\37\4\3\2\2\2 !\7")
buf.write("?\2\2!\6\3\2\2\2\"#\7*\2\2#\b\3\2\2\2$%\7+\2\2%\n\3\2")
buf.write("\2\2&\'\7}\2\2\'\f\3\2\2\2()\7\177\2\2)\16\3\2\2\2*+\7")
buf.write(".\2\2+\20\3\2\2\2,-\7h\2\2-.\7k\2\2./\7p\2\2/\60\7c\2")
buf.write("\2\60\61\7n\2\2\61\22\3\2\2\2\62\64\t\2\2\2\63\62\3\2")
buf.write("\2\2\64\65\3\2\2\2\65\63\3\2\2\2\65\66\3\2\2\2\66\24\3")
buf.write("\2\2\2\678\7%\2\28\26\3\2\2\29;\t\3\2\2:9\3\2\2\2;<\3")
buf.write("\2\2\2<:\3\2\2\2<=\3\2\2\2=>\3\2\2\2>?\b\f\2\2?\30\3\2")
buf.write("\2\2@A\13\2\2\2A\32\3\2\2\2\6\2\63\65<\3\b\2\2")
return buf.getvalue()
......@@ -47,22 +49,25 @@ class DFALexer(Lexer):
COMMA = 7
FINAL = 8
STATE = 9
WS = 10
HASH = 10
WS = 11
ANYCHAR = 12
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'init'", "'='", "'('", "')'", "'{'", "'}'", "','", "'final'" ]
"'init'", "'='", "'('", "')'", "'{'", "'}'", "','", "'final'",
"'#'" ]
symbolicNames = [ "<INVALID>",
"INIT", "EQUALS", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_BRACKET",
"RIGHT_BRACKET", "COMMA", "FINAL", "STATE", "WS" ]
"RIGHT_BRACKET", "COMMA", "FINAL", "STATE", "HASH", "WS", "ANYCHAR" ]
ruleNames = [ "INIT", "EQUALS", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS",
"LEFT_BRACKET", "RIGHT_BRACKET", "COMMA", "FINAL", "STATE",
"WS" ]
"HASH", "WS", "ANYCHAR" ]
grammarFileName = "DFA.g4"
......
# Generated from DFA.g4 by ANTLR 4.8
# Generated from lib/parsing/DFA.g4 by ANTLR 4.8
from antlr4 import *
if __name__ is not None and "." in __name__:
from .DFAParser import DFAParser
......@@ -53,5 +53,23 @@ class DFAListener(ParseTreeListener):
pass
# Enter a parse tree produced by DFAParser#comment.
def enterComment(self, ctx:DFAParser.CommentContext):
pass
# Exit a parse tree produced by DFAParser#comment.
def exitComment(self, ctx:DFAParser.CommentContext):
pass
# Enter a parse tree produced by DFAParser#anyvalue.
def enterAnyvalue(self, ctx:DFAParser.AnyvalueContext):
pass
# Exit a parse tree produced by DFAParser#anyvalue.
def exitAnyvalue(self, ctx:DFAParser.AnyvalueContext):
pass
del DFAParser
\ No newline at end of file
# Generated from DFA.g4 by ANTLR 4.8
# Generated from lib/parsing/DFA.g4 by ANTLR 4.8
# encoding: utf-8
from antlr4 import *
from io import StringIO
......@@ -11,24 +11,29 @@ else:
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\f")
buf.write("\66\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\3\2\3\2\7")
buf.write("\2\17\n\2\f\2\16\2\22\13\2\3\2\3\2\3\3\3\3\3\3\3\3\5\3")
buf.write("\32\n\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3")
buf.write("\5\3\5\3\5\7\5*\n\5\f\5\16\5-\13\5\3\5\5\5\60\n\5\3\5")
buf.write("\3\5\3\6\3\6\3\6\2\2\7\2\4\6\b\n\2\2\2\64\2\f\3\2\2\2")
buf.write("\4\31\3\2\2\2\6\33\3\2\2\2\b#\3\2\2\2\n\63\3\2\2\2\f\20")
buf.write("\5\4\3\2\r\17\5\6\4\2\16\r\3\2\2\2\17\22\3\2\2\2\20\16")
buf.write("\3\2\2\2\20\21\3\2\2\2\21\23\3\2\2\2\22\20\3\2\2\2\23")
buf.write("\24\5\b\5\2\24\3\3\2\2\2\25\26\7\3\2\2\26\27\7\4\2\2\27")
buf.write("\32\5\n\6\2\30\32\3\2\2\2\31\25\3\2\2\2\31\30\3\2\2\2")
buf.write("\32\5\3\2\2\2\33\34\7\5\2\2\34\35\5\n\6\2\35\36\7\t\2")
buf.write("\2\36\37\5\n\6\2\37 \7\6\2\2 !\7\4\2\2!\"\5\n\6\2\"\7")
buf.write("\3\2\2\2#$\7\n\2\2$%\7\4\2\2%/\7\7\2\2&+\5\n\6\2\'(\7")
buf.write("\t\2\2(*\5\n\6\2)\'\3\2\2\2*-\3\2\2\2+)\3\2\2\2+,\3\2")
buf.write("\2\2,\60\3\2\2\2-+\3\2\2\2.\60\3\2\2\2/&\3\2\2\2/.\3\2")
buf.write("\2\2\60\61\3\2\2\2\61\62\7\b\2\2\62\t\3\2\2\2\63\64\7")
buf.write("\13\2\2\64\13\3\2\2\2\6\20\31+/")
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\16")
buf.write("G\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b")
buf.write("\t\b\3\2\3\2\7\2\23\n\2\f\2\16\2\26\13\2\3\2\3\2\3\2\3")
buf.write("\3\3\3\3\3\3\3\5\3\37\n\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4")
buf.write("\3\4\3\5\3\5\3\5\3\5\3\5\3\5\7\5/\n\5\f\5\16\5\62\13\5")
buf.write("\3\5\5\5\65\n\5\3\5\3\5\3\6\3\6\3\7\3\7\7\7=\n\7\f\7\16")
buf.write("\7@\13\7\3\7\5\7C\n\7\3\b\3\b\3\b\2\2\t\2\4\6\b\n\f\16")
buf.write("\2\3\4\2\3\13\16\16\2E\2\20\3\2\2\2\4\36\3\2\2\2\6 \3")
buf.write("\2\2\2\b(\3\2\2\2\n8\3\2\2\2\fB\3\2\2\2\16D\3\2\2\2\20")
buf.write("\24\5\4\3\2\21\23\5\6\4\2\22\21\3\2\2\2\23\26\3\2\2\2")
buf.write("\24\22\3\2\2\2\24\25\3\2\2\2\25\27\3\2\2\2\26\24\3\2\2")
buf.write("\2\27\30\5\b\5\2\30\31\5\f\7\2\31\3\3\2\2\2\32\33\7\3")
buf.write("\2\2\33\34\7\4\2\2\34\37\5\n\6\2\35\37\3\2\2\2\36\32\3")
buf.write("\2\2\2\36\35\3\2\2\2\37\5\3\2\2\2 !\7\5\2\2!\"\5\n\6\2")
buf.write("\"#\7\t\2\2#$\5\n\6\2$%\7\6\2\2%&\7\4\2\2&\'\5\n\6\2\'")
buf.write("\7\3\2\2\2()\7\n\2\2)*\7\4\2\2*\64\7\7\2\2+\60\5\n\6\2")
buf.write(",-\7\t\2\2-/\5\n\6\2.,\3\2\2\2/\62\3\2\2\2\60.\3\2\2\2")
buf.write("\60\61\3\2\2\2\61\65\3\2\2\2\62\60\3\2\2\2\63\65\3\2\2")
buf.write("\2\64+\3\2\2\2\64\63\3\2\2\2\65\66\3\2\2\2\66\67\7\b\2")
buf.write("\2\67\t\3\2\2\289\7\13\2\29\13\3\2\2\2:>\7\f\2\2;=\5\16")
buf.write("\b\2<;\3\2\2\2=@\3\2\2\2><\3\2\2\2>?\3\2\2\2?C\3\2\2\2")
buf.write("@>\3\2\2\2AC\3\2\2\2B:\3\2\2\2BA\3\2\2\2C\r\3\2\2\2DE")
buf.write("\t\2\2\2E\17\3\2\2\2\b\24\36\60\64>B")
return buf.getvalue()
......@@ -43,19 +48,22 @@ class DFAParser ( Parser ):
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'init'", "'='", "'('", "')'", "'{'",
"'}'", "','", "'final'" ]
"'}'", "','", "'final'", "<INVALID>", "'#'" ]
symbolicNames = [ "<INVALID>", "INIT", "EQUALS", "LEFT_PARENTHESIS",
"RIGHT_PARENTHESIS", "LEFT_BRACKET", "RIGHT_BRACKET",
"COMMA", "FINAL", "STATE", "WS" ]
"COMMA", "FINAL", "STATE", "HASH", "WS", "ANYCHAR" ]
RULE_start = 0
RULE_init = 1
RULE_production = 2
RULE_final = 3
RULE_statename = 4
RULE_comment = 5
RULE_anyvalue = 6
ruleNames = [ "start", "init", "production", "final", "statename" ]
ruleNames = [ "start", "init", "production", "final", "statename",
"comment", "anyvalue" ]
EOF = Token.EOF
INIT=1
......@@ -67,7 +75,9 @@ class DFAParser ( Parser ):
COMMA=7
FINAL=8
STATE=9
WS=10
HASH=10
WS=11
ANYCHAR=12
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
......@@ -92,6 +102,10 @@ class DFAParser ( Parser ):
return self.getTypedRuleContext(DFAParser.FinalContext,0)
def comment(self):
return self.getTypedRuleContext(DFAParser.CommentContext,0)
def production(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(DFAParser.ProductionContext)
......@@ -120,20 +134,22 @@ class DFAParser ( Parser ):
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 10
self.init()
self.state = 14
self.init()
self.state = 18
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==DFAParser.LEFT_PARENTHESIS:
self.state = 11
self.state = 15
self.production()
self.state = 16
self.state = 20
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 17
self.state = 21
self.final()
self.state = 22
self.comment()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
......@@ -179,15 +195,15 @@ class DFAParser ( Parser ):
self.enterRule(localctx, 2, self.RULE_init)
try:
self.enterOuterAlt(localctx, 1)
self.state = 23
self.state = 28
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [DFAParser.INIT]:
self.state = 19
self.state = 24
self.match(DFAParser.INIT)
self.state = 20
self.state = 25
self.match(DFAParser.EQUALS)
self.state = 21
self.state = 26
self.statename()
pass
elif token in [DFAParser.LEFT_PARENTHESIS, DFAParser.FINAL]:
......@@ -249,19 +265,19 @@ class DFAParser ( Parser ):
self.enterRule(localctx, 4, self.RULE_production)
try:
self.enterOuterAlt(localctx, 1)
self.state = 25
self.state = 30
self.match(DFAParser.LEFT_PARENTHESIS)
self.state = 26
self.state = 31
self.statename()
self.state = 27
self.state = 32
self.match(DFAParser.COMMA)
self.state = 28
self.state = 33
self.statename()
self.state = 29
self.state = 34
self.match(DFAParser.RIGHT_PARENTHESIS)
self.state = 30
self.state = 35
self.match(DFAParser.EQUALS)
self.state = 31
self.state = 36
self.statename()
except RecognitionException as re:
localctx.exception = re
......@@ -324,27 +340,27 @@ class DFAParser ( Parser ):
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 33
self.state = 38
self.match(DFAParser.FINAL)
self.state = 34
self.state = 39
self.match(DFAParser.EQUALS)
self.state = 35
self.state = 40
self.match(DFAParser.LEFT_BRACKET)
self.state = 45
self.state = 50
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [DFAParser.STATE]:
self.state = 36
self.statename()
self.state = 41
self.statename()
self.state = 46
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==DFAParser.COMMA:
self.state = 37
self.state = 42
self.match(DFAParser.COMMA)
self.state = 38
self.statename()
self.state = 43
self.statename()
self.state = 48
self._errHandler.sync(self)
_la = self._input.LA(1)
......@@ -354,7 +370,7 @@ class DFAParser ( Parser ):
else:
raise NoViableAltException(self)
self.state = 47
self.state = 52
self.match(DFAParser.RIGHT_BRACKET)
except RecognitionException as re:
localctx.exception = re
......@@ -394,7 +410,7 @@ class DFAParser ( Parser ):
self.enterRule(localctx, 8, self.RULE_statename)
try:
self.enterOuterAlt(localctx, 1)
self.state = 49
self.state = 54
self.match(DFAParser.STATE)
except RecognitionException as re:
localctx.exception = re
......@@ -405,6 +421,147 @@ class DFAParser ( Parser ):
return localctx
class CommentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def HASH(self):
return self.getToken(DFAParser.HASH, 0)
def anyvalue(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(DFAParser.AnyvalueContext)
else:
return self.getTypedRuleContext(DFAParser.AnyvalueContext,i)
def getRuleIndex(self):
return DFAParser.RULE_comment
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterComment" ):
listener.enterComment(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitComment" ):
listener.exitComment(self)
def comment(self):
localctx = DFAParser.CommentContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_comment)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 64
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [DFAParser.HASH]:
self.state = 56
self.match(DFAParser.HASH)
self.state = 60
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DFAParser.INIT) | (1 << DFAParser.EQUALS) | (1 << DFAParser.LEFT_PARENTHESIS) | (1 << DFAParser.RIGHT_PARENTHESIS) | (1 << DFAParser.LEFT_BRACKET) | (1 << DFAParser.RIGHT_BRACKET) | (1 << DFAParser.COMMA) | (1 << DFAParser.FINAL) | (1 << DFAParser.STATE) | (1 << DFAParser.ANYCHAR))) != 0):
self.state = 57
self.anyvalue()
self.state = 62
self._errHandler.sync(self)
_la = self._input.LA(1)
pass
elif token in [DFAParser.EOF]:
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AnyvalueContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):