Commit 2fb0d5b0 authored by Kateřina Sloupová's avatar Kateřina Sloupová
Browse files

add support for any value in quotation marks and tweak any values in comments

parent aa6c3784
......@@ -346,7 +346,7 @@ def main():
grammar_test("S'->a|aA''|\e;A''->a|bS'|<ab_0>''")
grammar_test("S->a;\nS->aA;\nS->\e;\nA->a;\nA->bS;\n")
words = dfa_eq("init=1 (1, a)=2 (2,a)=2 (1,b)=3 final={2,3} $comment#",
words = dfa_eq("init=1 (1, a)=2 (2,a)=2 (1,b)=3 final={2,3} #c",
"init=A (A, a)=B (A,b)=C (C,b)=C final={B,C}")
print(words.right_counterexample, words.left_counterexample, words.inf)
print()
......
......@@ -115,7 +115,8 @@ class WebChecker:
task=self.task, student_string=self.student_string)
except ParsingError as ex:
raise ParsingError(ex.args)
return ex.args
#raise ParsingError(ex.args)
def convert(self, student_type):
......@@ -143,7 +144,8 @@ class WebChecker:
return parser.reggrammar_to_str(nfa.nfa_to_reggrammar().eliminate_useless())
except ParsingError as ex:
raise ParsingError(ex.args)
return ex.args
# raise ParsingError(ex.args)
except Exception as ex:
print("Error inside of web checker:", ex.args)
......@@ -151,7 +153,7 @@ class WebChecker:
def relation(self, eq: bool) -> str:
student = self.student.dfa
teacher = self.teacher.dfa
# language 0 on picture: complement of both
self.languages[0] = self.language((reg.DFA.union(teacher, student)).complement())
......
......@@ -8,7 +8,9 @@ onerule: nonterminal ARROW (rewrite DELIMITER)* rewrite;
rewrite: (term_or_nonterm+ | EPSILON);
term_or_nonterm: (TERMINAL | nonterminal);
term_or_nonterm: (terminal | nonterminal);
terminal: (TERMINAL | QUOTE anyvalue+ QUOTE);
nonterminal: (CAPS | (LEFT_ANGLE symbol+ RIGHT_ANGLE (APOSTROPHE*)) | (symbol APOSTROPHE+));
......@@ -16,7 +18,7 @@ symbol: (TERMINAL | CAPS | UNDERSCORE);
comment: (HASH anyvalue* | );
anyvalue: LEFT_ANGLE | RIGHT_ANGLE | APOSTROPHE | UNDERSCORE | TERMINAL | CAPS | ARROW | EPSILON | DELIMITER | NEWLINE| ANYCHAR;
anyvalue: LEFT_ANGLE | RIGHT_ANGLE | APOSTROPHE | UNDERSCORE | TERMINAL | CAPS | ARROW | EPSILON | DELIMITER | NEWLINE| ANYCHAR | HASH;
/* Lexer Rules */
......@@ -33,6 +35,7 @@ EPSILON : ('ε' | '\\''e');
DELIMITER : '|';
NEWLINE : ('\n' | ';' | ',');
HASH : '#';
QUOTE : '"';
/* Characters to be ignored */
WS : [ \r\t]+ -> skip ;
......
......@@ -8,30 +8,31 @@ import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\17")
buf.write("B\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\20")
buf.write("F\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7")
buf.write("\3\b\3\b\3\b\5\b-\n\b\3\t\3\t\3\t\5\t\62\n\t\3\n\3\n\3")
buf.write("\13\3\13\3\f\3\f\3\r\6\r;\n\r\r\r\16\r<\3\r\3\r\3\16\3")
buf.write("\16\2\2\17\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25")
buf.write("\f\27\r\31\16\33\17\3\2\6\4\2\62;c|\3\2C\\\5\2\f\f..=")
buf.write("=\5\2\13\13\17\17\"\"\2D\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3")
buf.write("\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2")
buf.write("\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2")
buf.write("\2\31\3\2\2\2\2\33\3\2\2\2\3\35\3\2\2\2\5\37\3\2\2\2\7")
buf.write("!\3\2\2\2\t#\3\2\2\2\13%\3\2\2\2\r\'\3\2\2\2\17,\3\2\2")
buf.write("\2\21\61\3\2\2\2\23\63\3\2\2\2\25\65\3\2\2\2\27\67\3\2")
buf.write("\2\2\31:\3\2\2\2\33@\3\2\2\2\35\36\7>\2\2\36\4\3\2\2\2")
buf.write("\37 \7@\2\2 \6\3\2\2\2!\"\7)\2\2\"\b\3\2\2\2#$\7a\2\2")
buf.write("$\n\3\2\2\2%&\t\2\2\2&\f\3\2\2\2\'(\t\3\2\2(\16\3\2\2")
buf.write("\2)-\7\u2194\2\2*+\7/\2\2+-\7@\2\2,)\3\2\2\2,*\3\2\2\2")
buf.write("-\20\3\2\2\2.\62\7\u03b7\2\2/\60\7^\2\2\60\62\7g\2\2\61")
buf.write(".\3\2\2\2\61/\3\2\2\2\62\22\3\2\2\2\63\64\7~\2\2\64\24")
buf.write("\3\2\2\2\65\66\t\4\2\2\66\26\3\2\2\2\678\7%\2\28\30\3")
buf.write("\2\2\29;\t\5\2\2:9\3\2\2\2;<\3\2\2\2<:\3\2\2\2<=\3\2\2")
buf.write("\2=>\3\2\2\2>?\b\r\2\2?\32\3\2\2\2@A\13\2\2\2A\34\3\2")
buf.write("\2\2\6\2,\61<\3\b\2\2")
buf.write("\t\16\4\17\t\17\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3")
buf.write("\6\3\7\3\7\3\b\3\b\3\b\5\b/\n\b\3\t\3\t\3\t\5\t\64\n\t")
buf.write("\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\6\16?\n\16\r\16")
buf.write("\16\16@\3\16\3\16\3\17\3\17\2\2\20\3\3\5\4\7\5\t\6\13")
buf.write("\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\3\2")
buf.write("\6\4\2\62;c|\3\2C\\\5\2\f\f..==\5\2\13\13\17\17\"\"\2")
buf.write("H\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13")
buf.write("\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3")
buf.write("\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2")
buf.write("\2\2\2\35\3\2\2\2\3\37\3\2\2\2\5!\3\2\2\2\7#\3\2\2\2\t")
buf.write("%\3\2\2\2\13\'\3\2\2\2\r)\3\2\2\2\17.\3\2\2\2\21\63\3")
buf.write("\2\2\2\23\65\3\2\2\2\25\67\3\2\2\2\279\3\2\2\2\31;\3\2")
buf.write("\2\2\33>\3\2\2\2\35D\3\2\2\2\37 \7>\2\2 \4\3\2\2\2!\"")
buf.write("\7@\2\2\"\6\3\2\2\2#$\7)\2\2$\b\3\2\2\2%&\7a\2\2&\n\3")
buf.write("\2\2\2\'(\t\2\2\2(\f\3\2\2\2)*\t\3\2\2*\16\3\2\2\2+/\7")
buf.write("\u2194\2\2,-\7/\2\2-/\7@\2\2.+\3\2\2\2.,\3\2\2\2/\20\3")
buf.write("\2\2\2\60\64\7\u03b7\2\2\61\62\7^\2\2\62\64\7g\2\2\63")
buf.write("\60\3\2\2\2\63\61\3\2\2\2\64\22\3\2\2\2\65\66\7~\2\2\66")
buf.write("\24\3\2\2\2\678\t\4\2\28\26\3\2\2\29:\7%\2\2:\30\3\2\2")
buf.write("\2;<\7$\2\2<\32\3\2\2\2=?\t\5\2\2>=\3\2\2\2?@\3\2\2\2")
buf.write("@>\3\2\2\2@A\3\2\2\2AB\3\2\2\2BC\b\16\2\2C\34\3\2\2\2")
buf.write("DE\13\2\2\2E\36\3\2\2\2\6\2.\63@\3\b\2\2")
return buf.getvalue()
......@@ -52,24 +53,25 @@ class CFGLexer(Lexer):
DELIMITER = 9
NEWLINE = 10
HASH = 11
WS = 12
ANYCHAR = 13
QUOTE = 12
WS = 13
ANYCHAR = 14
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'<'", "'>'", "'''", "'_'", "'|'", "'#'" ]
"'<'", "'>'", "'''", "'_'", "'|'", "'#'", "'\"'" ]
symbolicNames = [ "<INVALID>",
"LEFT_ANGLE", "RIGHT_ANGLE", "APOSTROPHE", "UNDERSCORE", "TERMINAL",
"CAPS", "ARROW", "EPSILON", "DELIMITER", "NEWLINE", "HASH",
"WS", "ANYCHAR" ]
"QUOTE", "WS", "ANYCHAR" ]
ruleNames = [ "LEFT_ANGLE", "RIGHT_ANGLE", "APOSTROPHE", "UNDERSCORE",
"TERMINAL", "CAPS", "ARROW", "EPSILON", "DELIMITER", "NEWLINE",
"HASH", "WS", "ANYCHAR" ]
"HASH", "QUOTE", "WS", "ANYCHAR" ]
grammarFileName = "CFG.g4"
......
......@@ -44,6 +44,15 @@ class CFGListener(ParseTreeListener):
pass
# Enter a parse tree produced by CFGParser#terminal.
def enterTerminal(self, ctx:CFGParser.TerminalContext):
pass
# Exit a parse tree produced by CFGParser#terminal.
def exitTerminal(self, ctx:CFGParser.TerminalContext):
pass
# Enter a parse tree produced by CFGParser#nonterminal.
def enterNonterminal(self, ctx:CFGParser.NonterminalContext):
pass
......
This diff is collapsed.
......@@ -10,11 +10,11 @@ production: LEFT_PARENTHESIS statename COMMA statename RIGHT_PARENTHESIS EQUALS
final: FINAL EQUALS LEFT_BRACKET (statename (COMMA statename)* | ) RIGHT_BRACKET;
statename: STATE;
statename: (STATE | QUOTE anyvalue+ QUOTE);
comment: (HASH anyvalue* | );
anyvalue: INIT | EQUALS | LEFT_PARENTHESIS | RIGHT_PARENTHESIS | LEFT_BRACKET | RIGHT_BRACKET | COMMA | FINAL | STATE | ANYCHAR;
anyvalue: INIT | EQUALS | LEFT_PARENTHESIS | RIGHT_PARENTHESIS | LEFT_BRACKET | RIGHT_BRACKET | COMMA | FINAL | STATE | ANYCHAR | HASH;
/* Lexer Rules */
......@@ -28,8 +28,9 @@ LEFT_BRACKET : '{';
RIGHT_BRACKET : '}';
COMMA : ',';
FINAL : 'final';
STATE : ([a-zA-Z0-9] | '_' | '\'' | '<' | '>')+;
STATE : ([a-zA-Z0-9] | '_' | '\'' | '<' | '>' )+;
HASH : '#';
QUOTE : '"';
/* Characters to be ignored */
WS : [ \r\t\n]+ -> skip ;
......
......@@ -8,29 +8,30 @@ import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\16")
buf.write("B\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\3\2")
buf.write("\3\2\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3")
buf.write("\7\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\n\6\n\64\n\n\r\n")
buf.write("\16\n\65\3\13\3\13\3\f\6\f;\n\f\r\f\16\f<\3\f\3\f\3\r")
buf.write("\3\r\2\2\16\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25")
buf.write("\f\27\r\31\16\3\2\4\t\2))\62;>>@@C\\aac|\5\2\13\f\17\17")
buf.write("\"\"\2C\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2")
buf.write("\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2")
buf.write("\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\3")
buf.write("\33\3\2\2\2\5 \3\2\2\2\7\"\3\2\2\2\t$\3\2\2\2\13&\3\2")
buf.write("\2\2\r(\3\2\2\2\17*\3\2\2\2\21,\3\2\2\2\23\63\3\2\2\2")
buf.write("\25\67\3\2\2\2\27:\3\2\2\2\31@\3\2\2\2\33\34\7k\2\2\34")
buf.write("\35\7p\2\2\35\36\7k\2\2\36\37\7v\2\2\37\4\3\2\2\2 !\7")
buf.write("?\2\2!\6\3\2\2\2\"#\7*\2\2#\b\3\2\2\2$%\7+\2\2%\n\3\2")
buf.write("\2\2&\'\7}\2\2\'\f\3\2\2\2()\7\177\2\2)\16\3\2\2\2*+\7")
buf.write(".\2\2+\20\3\2\2\2,-\7h\2\2-.\7k\2\2./\7p\2\2/\60\7c\2")
buf.write("\2\60\61\7n\2\2\61\22\3\2\2\2\62\64\t\2\2\2\63\62\3\2")
buf.write("\2\2\64\65\3\2\2\2\65\63\3\2\2\2\65\66\3\2\2\2\66\24\3")
buf.write("\2\2\2\678\7%\2\28\26\3\2\2\29;\t\3\2\2:9\3\2\2\2;<\3")
buf.write("\2\2\2<:\3\2\2\2<=\3\2\2\2=>\3\2\2\2>?\b\f\2\2?\30\3\2")
buf.write("\2\2@A\13\2\2\2A\32\3\2\2\2\6\2\63\65<\3\b\2\2")
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\17")
buf.write("F\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\3\2\3\2\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6")
buf.write("\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\n\6\n\66")
buf.write("\n\n\r\n\16\n\67\3\13\3\13\3\f\3\f\3\r\6\r?\n\r\r\r\16")
buf.write("\r@\3\r\3\r\3\16\3\16\2\2\17\3\3\5\4\7\5\t\6\13\7\r\b")
buf.write("\17\t\21\n\23\13\25\f\27\r\31\16\33\17\3\2\4\t\2))\62")
buf.write(";>>@@C\\aac|\5\2\13\f\17\17\"\"\2G\2\3\3\2\2\2\2\5\3\2")
buf.write("\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2")
buf.write("\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2")
buf.write("\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\3\35\3\2\2\2\5\"")
buf.write("\3\2\2\2\7$\3\2\2\2\t&\3\2\2\2\13(\3\2\2\2\r*\3\2\2\2")
buf.write("\17,\3\2\2\2\21.\3\2\2\2\23\65\3\2\2\2\259\3\2\2\2\27")
buf.write(";\3\2\2\2\31>\3\2\2\2\33D\3\2\2\2\35\36\7k\2\2\36\37\7")
buf.write("p\2\2\37 \7k\2\2 !\7v\2\2!\4\3\2\2\2\"#\7?\2\2#\6\3\2")
buf.write("\2\2$%\7*\2\2%\b\3\2\2\2&\'\7+\2\2\'\n\3\2\2\2()\7}\2")
buf.write("\2)\f\3\2\2\2*+\7\177\2\2+\16\3\2\2\2,-\7.\2\2-\20\3\2")
buf.write("\2\2./\7h\2\2/\60\7k\2\2\60\61\7p\2\2\61\62\7c\2\2\62")
buf.write("\63\7n\2\2\63\22\3\2\2\2\64\66\t\2\2\2\65\64\3\2\2\2\66")
buf.write("\67\3\2\2\2\67\65\3\2\2\2\678\3\2\2\28\24\3\2\2\29:\7")
buf.write("%\2\2:\26\3\2\2\2;<\7$\2\2<\30\3\2\2\2=?\t\3\2\2>=\3\2")
buf.write("\2\2?@\3\2\2\2@>\3\2\2\2@A\3\2\2\2AB\3\2\2\2BC\b\r\2\2")
buf.write("C\32\3\2\2\2DE\13\2\2\2E\34\3\2\2\2\6\2\65\67@\3\b\2\2")
return buf.getvalue()
......@@ -50,8 +51,9 @@ class DFALexer(Lexer):
FINAL = 8
STATE = 9
HASH = 10
WS = 11
ANYCHAR = 12
QUOTE = 11
WS = 12
ANYCHAR = 13
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
......@@ -59,15 +61,16 @@ class DFALexer(Lexer):
literalNames = [ "<INVALID>",
"'init'", "'='", "'('", "')'", "'{'", "'}'", "','", "'final'",
"'#'" ]
"'#'", "'\"'" ]
symbolicNames = [ "<INVALID>",
"INIT", "EQUALS", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_BRACKET",
"RIGHT_BRACKET", "COMMA", "FINAL", "STATE", "HASH", "WS", "ANYCHAR" ]
"RIGHT_BRACKET", "COMMA", "FINAL", "STATE", "HASH", "QUOTE",
"WS", "ANYCHAR" ]
ruleNames = [ "INIT", "EQUALS", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS",
"LEFT_BRACKET", "RIGHT_BRACKET", "COMMA", "FINAL", "STATE",
"HASH", "WS", "ANYCHAR" ]
"HASH", "QUOTE", "WS", "ANYCHAR" ]
grammarFileName = "DFA.g4"
......
......@@ -11,29 +11,32 @@ else:
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\16")
buf.write("G\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b")
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\17")
buf.write("P\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b")
buf.write("\t\b\3\2\3\2\7\2\23\n\2\f\2\16\2\26\13\2\3\2\3\2\3\2\3")
buf.write("\3\3\3\3\3\3\3\5\3\37\n\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4")
buf.write("\3\4\3\5\3\5\3\5\3\5\3\5\3\5\7\5/\n\5\f\5\16\5\62\13\5")
buf.write("\3\5\5\5\65\n\5\3\5\3\5\3\6\3\6\3\7\3\7\7\7=\n\7\f\7\16")
buf.write("\7@\13\7\3\7\5\7C\n\7\3\b\3\b\3\b\2\2\t\2\4\6\b\n\f\16")
buf.write("\2\3\4\2\3\13\16\16\2E\2\20\3\2\2\2\4\36\3\2\2\2\6 \3")
buf.write("\2\2\2\b(\3\2\2\2\n8\3\2\2\2\fB\3\2\2\2\16D\3\2\2\2\20")
buf.write("\24\5\4\3\2\21\23\5\6\4\2\22\21\3\2\2\2\23\26\3\2\2\2")
buf.write("\24\22\3\2\2\2\24\25\3\2\2\2\25\27\3\2\2\2\26\24\3\2\2")
buf.write("\2\27\30\5\b\5\2\30\31\5\f\7\2\31\3\3\2\2\2\32\33\7\3")
buf.write("\2\2\33\34\7\4\2\2\34\37\5\n\6\2\35\37\3\2\2\2\36\32\3")
buf.write("\2\2\2\36\35\3\2\2\2\37\5\3\2\2\2 !\7\5\2\2!\"\5\n\6\2")
buf.write("\"#\7\t\2\2#$\5\n\6\2$%\7\6\2\2%&\7\4\2\2&\'\5\n\6\2\'")
buf.write("\7\3\2\2\2()\7\n\2\2)*\7\4\2\2*\64\7\7\2\2+\60\5\n\6\2")
buf.write(",-\7\t\2\2-/\5\n\6\2.,\3\2\2\2/\62\3\2\2\2\60.\3\2\2\2")
buf.write("\60\61\3\2\2\2\61\65\3\2\2\2\62\60\3\2\2\2\63\65\3\2\2")
buf.write("\2\64+\3\2\2\2\64\63\3\2\2\2\65\66\3\2\2\2\66\67\7\b\2")
buf.write("\2\67\t\3\2\2\289\7\13\2\29\13\3\2\2\2:>\7\f\2\2;=\5\16")
buf.write("\b\2<;\3\2\2\2=@\3\2\2\2><\3\2\2\2>?\3\2\2\2?C\3\2\2\2")
buf.write("@>\3\2\2\2AC\3\2\2\2B:\3\2\2\2BA\3\2\2\2C\r\3\2\2\2DE")
buf.write("\t\2\2\2E\17\3\2\2\2\b\24\36\60\64>B")
buf.write("\3\5\5\5\65\n\5\3\5\3\5\3\6\3\6\3\6\6\6<\n\6\r\6\16\6")
buf.write("=\3\6\3\6\5\6B\n\6\3\7\3\7\7\7F\n\7\f\7\16\7I\13\7\3\7")
buf.write("\5\7L\n\7\3\b\3\b\3\b\2\2\t\2\4\6\b\n\f\16\2\3\4\2\3\f")
buf.write("\17\17\2P\2\20\3\2\2\2\4\36\3\2\2\2\6 \3\2\2\2\b(\3\2")
buf.write("\2\2\nA\3\2\2\2\fK\3\2\2\2\16M\3\2\2\2\20\24\5\4\3\2\21")
buf.write("\23\5\6\4\2\22\21\3\2\2\2\23\26\3\2\2\2\24\22\3\2\2\2")
buf.write("\24\25\3\2\2\2\25\27\3\2\2\2\26\24\3\2\2\2\27\30\5\b\5")
buf.write("\2\30\31\5\f\7\2\31\3\3\2\2\2\32\33\7\3\2\2\33\34\7\4")
buf.write("\2\2\34\37\5\n\6\2\35\37\3\2\2\2\36\32\3\2\2\2\36\35\3")
buf.write("\2\2\2\37\5\3\2\2\2 !\7\5\2\2!\"\5\n\6\2\"#\7\t\2\2#$")
buf.write("\5\n\6\2$%\7\6\2\2%&\7\4\2\2&\'\5\n\6\2\'\7\3\2\2\2()")
buf.write("\7\n\2\2)*\7\4\2\2*\64\7\7\2\2+\60\5\n\6\2,-\7\t\2\2-")
buf.write("/\5\n\6\2.,\3\2\2\2/\62\3\2\2\2\60.\3\2\2\2\60\61\3\2")
buf.write("\2\2\61\65\3\2\2\2\62\60\3\2\2\2\63\65\3\2\2\2\64+\3\2")
buf.write("\2\2\64\63\3\2\2\2\65\66\3\2\2\2\66\67\7\b\2\2\67\t\3")
buf.write("\2\2\28B\7\13\2\29;\7\r\2\2:<\5\16\b\2;:\3\2\2\2<=\3\2")
buf.write("\2\2=;\3\2\2\2=>\3\2\2\2>?\3\2\2\2?@\7\r\2\2@B\3\2\2\2")
buf.write("A8\3\2\2\2A9\3\2\2\2B\13\3\2\2\2CG\7\f\2\2DF\5\16\b\2")
buf.write("ED\3\2\2\2FI\3\2\2\2GE\3\2\2\2GH\3\2\2\2HL\3\2\2\2IG\3")
buf.write("\2\2\2JL\3\2\2\2KC\3\2\2\2KJ\3\2\2\2L\r\3\2\2\2MN\t\2")
buf.write("\2\2N\17\3\2\2\2\n\24\36\60\64=AGK")
return buf.getvalue()
......@@ -48,11 +51,12 @@ class DFAParser ( Parser ):
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'init'", "'='", "'('", "')'", "'{'",
"'}'", "','", "'final'", "<INVALID>", "'#'" ]
"'}'", "','", "'final'", "<INVALID>", "'#'", "'\"'" ]
symbolicNames = [ "<INVALID>", "INIT", "EQUALS", "LEFT_PARENTHESIS",
"RIGHT_PARENTHESIS", "LEFT_BRACKET", "RIGHT_BRACKET",
"COMMA", "FINAL", "STATE", "HASH", "WS", "ANYCHAR" ]
"COMMA", "FINAL", "STATE", "HASH", "QUOTE", "WS",
"ANYCHAR" ]
RULE_start = 0
RULE_init = 1
......@@ -76,8 +80,9 @@ class DFAParser ( Parser ):
FINAL=8
STATE=9
HASH=10
WS=11
ANYCHAR=12
QUOTE=11
WS=12
ANYCHAR=13
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
......@@ -349,7 +354,7 @@ class DFAParser ( Parser ):
self.state = 50
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [DFAParser.STATE]:
if token in [DFAParser.STATE, DFAParser.QUOTE]:
self.state = 41
self.statename()
self.state = 46
......@@ -390,6 +395,19 @@ class DFAParser ( Parser ):
def STATE(self):
return self.getToken(DFAParser.STATE, 0)
def QUOTE(self, i:int=None):
if i is None:
return self.getTokens(DFAParser.QUOTE)
else:
return self.getToken(DFAParser.QUOTE, i)
def anyvalue(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(DFAParser.AnyvalueContext)
else:
return self.getTypedRuleContext(DFAParser.AnyvalueContext,i)
def getRuleIndex(self):
return DFAParser.RULE_statename
......@@ -408,10 +426,37 @@ class DFAParser ( Parser ):
localctx = DFAParser.StatenameContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_statename)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 54
self.match(DFAParser.STATE)
self.state = 63
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [DFAParser.STATE]:
self.state = 54
self.match(DFAParser.STATE)
pass
elif token in [DFAParser.QUOTE]:
self.state = 55
self.match(DFAParser.QUOTE)
self.state = 57
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 56
self.anyvalue()
self.state = 59
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DFAParser.INIT) | (1 << DFAParser.EQUALS) | (1 << DFAParser.LEFT_PARENTHESIS) | (1 << DFAParser.RIGHT_PARENTHESIS) | (1 << DFAParser.LEFT_BRACKET) | (1 << DFAParser.RIGHT_BRACKET) | (1 << DFAParser.COMMA) | (1 << DFAParser.FINAL) | (1 << DFAParser.STATE) | (1 << DFAParser.HASH) | (1 << DFAParser.ANYCHAR))) != 0)):
break
self.state = 61
self.match(DFAParser.QUOTE)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
......@@ -458,19 +503,19 @@ class DFAParser ( Parser ):
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 64
self.state = 73
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [DFAParser.HASH]:
self.state = 56
self.state = 65
self.match(DFAParser.HASH)
self.state = 60
self.state = 69
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DFAParser.INIT) | (1 << DFAParser.EQUALS) | (1 << DFAParser.LEFT_PARENTHESIS) | (1 << DFAParser.RIGHT_PARENTHESIS) | (1 << DFAParser.LEFT_BRACKET) | (1 << DFAParser.RIGHT_BRACKET) | (1 << DFAParser.COMMA) | (1 << DFAParser.FINAL) | (1 << DFAParser.STATE) | (1 << DFAParser.ANYCHAR))) != 0):
self.state = 57
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DFAParser.INIT) | (1 << DFAParser.EQUALS) | (1 << DFAParser.LEFT_PARENTHESIS) | (1 << DFAParser.RIGHT_PARENTHESIS) | (1 << DFAParser.LEFT_BRACKET) | (1 << DFAParser.RIGHT_BRACKET) | (1 << DFAParser.COMMA) | (1 << DFAParser.FINAL) | (1 << DFAParser.STATE) | (1 << DFAParser.HASH) | (1 << DFAParser.ANYCHAR))) != 0):
self.state = 66
self.anyvalue()
self.state = 62
self.state = 71
self._errHandler.sync(self)
_la = self._input.LA(1)
......@@ -525,6 +570,9 @@ class DFAParser ( Parser ):
def ANYCHAR(self):
return self.getToken(DFAParser.ANYCHAR, 0)
def HASH(self):
return self.getToken(DFAParser.HASH, 0)
def getRuleIndex(self):
return DFAParser.RULE_anyvalue
......@@ -546,9 +594,9 @@ class DFAParser ( Parser ):
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 66
self.state = 75
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DFAParser.INIT) | (1 << DFAParser.EQUALS) | (1 << DFAParser.LEFT_PARENTHESIS) | (1 << DFAParser.RIGHT_PARENTHESIS) | (1 << DFAParser.LEFT_BRACKET) | (1 << DFAParser.RIGHT_BRACKET) | (1 << DFAParser.COMMA) | (1 << DFAParser.FINAL) | (1 << DFAParser.STATE) | (1 << DFAParser.ANYCHAR))) != 0)):
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << DFAParser.INIT) | (1 << DFAParser.EQUALS) | (1 << DFAParser.LEFT_PARENTHESIS) | (1 << DFAParser.RIGHT_PARENTHESIS) | (1 << DFAParser.LEFT_BRACKET) | (1 << DFAParser.RIGHT_BRACKET) | (1 << DFAParser.COMMA) | (1 << DFAParser.FINAL) | (1 << DFAParser.STATE) | (1 << DFAParser.HASH) | (1 << DFAParser.ANYCHAR))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
......
......@@ -12,11 +12,11 @@ stateset: LEFT_BRACKET (statename (COMMA statename)* | ) RIGHT_BRACKET;
final: FINAL EQUALS stateset;
statename: STATE;
statename: (STATE | QUOTE anyvalue+ QUOTE);
comment: (HASH anyvalue* | );
anyvalue: INIT | EQUALS | LEFT_PARENTHESIS | RIGHT_PARENTHESIS | LEFT_BRACKET | RIGHT_BRACKET | COMMA | FINAL | STATE | ANYCHAR;
anyvalue: INIT | EQUALS | LEFT_PARENTHESIS | RIGHT_PARENTHESIS | LEFT_BRACKET | RIGHT_BRACKET | COMMA | FINAL | STATE | ANYCHAR | HASH;
/* Lexer Rules */
......@@ -33,6 +33,7 @@ FINAL : ('final');
EPSILON : ('ε' | '\\''e');
STATE : ([a-zA-Z0-9] | '_' | '\'')+;
HASH : '#';
QUOTE : '"';
/* Characters to be ignored */
WS : [ \r\t\n]+ -> skip ;
......
......@@ -8,31 +8,33 @@ import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\17")
buf.write("I\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\20")
buf.write("M\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\3\2\3\2\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6")
buf.write("\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3")
buf.write("\n\5\n8\n\n\3\13\6\13;\n\13\r\13\16\13<\3\f\3\f\3\r\6")
buf.write("\rB\n\r\r\r\16\rC\3\r\3\r\3\16\3\16\2\2\17\3\3\5\4\7\5")
buf.write("\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\3")
buf.write("\2\4\7\2))\62;C\\aac|\5\2\13\f\17\17\"\"\2K\2\3\3\2\2")
buf.write("\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2")
buf.write("\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25")
buf.write("\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\3\35\3")
buf.write("\2\2\2\5\"\3\2\2\2\7$\3\2\2\2\t&\3\2\2\2\13(\3\2\2\2\r")
buf.write("*\3\2\2\2\17,\3\2\2\2\21.\3\2\2\2\23\67\3\2\2\2\25:\3")
buf.write("\2\2\2\27>\3\2\2\2\31A\3\2\2\2\33G\3\2\2\2\35\36\7k\2")
buf.write("\2\36\37\7p\2\2\37 \7k\2\2 !\7v\2\2!\4\3\2\2\2\"#\7?\2")
buf.write("\2#\6\3\2\2\2$%\7*\2\2%\b\3\2\2\2&\'\7+\2\2\'\n\3\2\2")
buf.write("\2()\7}\2\2)\f\3\2\2\2*+\7\177\2\2+\16\3\2\2\2,-\7.\2")
buf.write("\2-\20\3\2\2\2./\7h\2\2/\60\7k\2\2\60\61\7p\2\2\61\62")
buf.write("\7c\2\2\62\63\7n\2\2\63\22\3\2\2\2\648\7\u03b7\2\2\65")
buf.write("\66\7^\2\2\668\7g\2\2\67\64\3\2\2\2\67\65\3\2\2\28\24")
buf.write("\3\2\2\29;\t\2\2\2:9\3\2\2\2;<\3\2\2\2<:\3\2\2\2<=\3\2")
buf.write("\2\2=\26\3\2\2\2>?\7%\2\2?\30\3\2\2\2@B\t\3\2\2A@\3\2")
buf.write("\2\2BC\3\2\2\2CA\3\2\2\2CD\3\2\2\2DE\3\2\2\2EF\b\r\2\2")
buf.write("F\32\3\2\2\2GH\13\2\2\2H\34\3\2\2\2\7\2\67:<C\3\b\2\2")
buf.write("\t\16\4\17\t\17\3\2\3\2\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3")
buf.write("\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t")
buf.write("\3\n\3\n\3\n\5\n:\n\n\3\13\6\13=\n\13\r\13\16\13>\3\f")
buf.write("\3\f\3\r\3\r\3\16\6\16F\n\16\r\16\16\16G\3\16\3\16\3\17")
buf.write("\3\17\2\2\20\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13")
buf.write("\25\f\27\r\31\16\33\17\35\20\3\2\4\7\2))\62;C\\aac|\5")
buf.write("\2\13\f\17\17\"\"\2O\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2")
buf.write("\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2")
buf.write("\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31")
buf.write("\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\3\37\3\2\2\2\5$\3\2")
buf.write("\2\2\7&\3\2\2\2\t(\3\2\2\2\13*\3\2\2\2\r,\3\2\2\2\17.")
buf.write("\3\2\2\2\21\60\3\2\2\2\239\3\2\2\2\25<\3\2\2\2\27@\3\2")
buf.write("\2\2\31B\3\2\2\2\33E\3\2\2\2\35K\3\2\2\2\37 \7k\2\2 !")
buf.write("\7p\2\2!\"\7k\2\2\"#\7v\2\2#\4\3\2\2\2$%\7?\2\2%\6\3\2")
buf.write("\2\2&\'\7*\2\2\'\b\3\2\2\2()\7+\2\2)\n\3\2\2\2*+\7}\2")
buf.write("\2+\f\3\2\2\2,-\7\177\2\2-\16\3\2\2\2./\7.\2\2/\20\3\2")
buf.write("\2\2\60\61\7h\2\2\61\62\7k\2\2\62\63\7p\2\2\63\64\7c\2")
buf.write("\2\64\65\7n\2\2\65\22\3\2\2\2\66:\7\u03b7\2\2\678\7^\2")
buf.write("\28:\7g\2\29\66\3\2\2\29\67\3\2\2\2:\24\3\2\2\2;=\t\2")
buf.write("\2\2<;\3\2\2\2=>\3\2\2\2><\3\2\2\2>?\3\2\2\2?\26\3\2\2")
buf.write("\2@A\7%\2\2A\30\3\2\2\2BC\7$\2\2C\32\3\2\2\2DF\t\3\2\2")
buf.write("ED\3\2\2\2FG\3\2\2\2GE\3\2\2\2GH\3\2\2\2HI\3\2\2\2IJ\b")
buf.write("\16\2\2J\34\3\2\2\2KL\13\2\2\2L\36\3\2\2\2\7\29<>G\3\b")
buf.write("\2\2")
return buf.getvalue()
......@@ -53,24 +55,25 @@ class NFALexer(Lexer):
EPSILON = 9
STATE = 10
HASH = 11
WS = 12
ANYCHAR = 13
QUOTE = 12
WS = 13
ANYCHAR = 14
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'init'", "'='", "'('", "')'", "'{'", "'}'", "','", "'#'" ]
"'init'", "'='", "'('", "')'", "'{'", "'}'", "','", "'#'", "'\"'" ]
symbolicNames = [ "<INVALID>",
"INIT", "EQUALS", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_BRACKET",
"RIGHT_BRACKET", "COMMA", "FINAL", "EPSILON", "STATE", "HASH",
"WS", "ANYCHAR" ]
"QUOTE", "WS", "ANYCHAR" ]