Commit dbef1081 authored by Kateřina Sloupová's avatar Kateřina Sloupová
Browse files

add remaining changed files

parent ebd6f525
Pipeline #62439 failed with stage
in 23 seconds
import lib.reg as reg
import lib.cfl as cfl
from lib.parsing.parser import Parser, ParsingError
from lib.checker import get_task, dfa_transform, nfa_transform, check_task, check_empty, check_alphabets, exit_correct, exit_incorrect
import sys
import signal
def print_extra_word_ce(student_word):
print("Příklad slova, které je ve studentově řešení a není v zadaném jazyce: ", student_word)
def print_missing_word_ce(teacher_word):
print("Příklad slova, které chybí ve studentově řešení a je v zadaném jazyce: ", teacher_word)
def dfa_task(teacher_type, teacher_string, task, student_string):
try:
student_solution = dfa_transform(student_string, task)
teacher_solution = dfa_transform(teacher_string, teacher_type)
except ParsingError as ex:
#print(ex.args)
exit_incorrect()
check_empty(student_solution=student_solution,
teacher_solution=teacher_solution)
alphabets = check_alphabets(student_alpha=student_solution.characters,
teacher_alpha=teacher_solution.characters, task=task)
if alphabets != "":
print(alphabets)
exit_incorrect()
result = reg.DFA.is_equivalent(student_solution, teacher_solution)
task_solved = check_task(student_solution, task)
if task_solved == "" and result:
exit_correct()
print(task_solved)
if not result:
student_word, teacher_word = result.left_counterexample, result.right_counterexample
if student_word is None:
print("Studentovo řešení je podmnožinou zadaného jazyka.")
else:
print_extra_word_ce(student_word)
if teacher_word is None:
print("Studentovo řešení je nadmnožinou zadaného jazyka.")
else:
print_missing_word_ce(teacher_word)
if result.inf is not None:
print("Rozdíl porovnávaných jazyků je nekonečný.")
else:
print("Rozdíl porovnávaných jazyků je konečný.")
exit_incorrect()
def exit_cfl_ok_but_invalid_constraint(msg : str) -> None:
print("Gramatika generuje zadaný jazyk, ale nesplňuje podmínky "
f"zadání: {msg}")
exit(1)
def cfg_task(teacher_type: str, teacher_string: str, task: str,
student_string: str) -> None:
parser = Parser()
assert task[:3] == "CFG", f"Invalid task prefix {task[:3]}"
if len(task) > 3:
assert task[3] == '+', f"Invalid task suffix {task[3:]}"
constraints = filter(lambda x: len(x) != 0, task[4:].split(","))
try:
teacher_solution = parser.str_to_cfg(teacher_string)
except ParsingError as message:
print(f"Error parsing teacher's solution: {message}")
exit(1)
try:
student_solution = parser.str_to_cfg(student_string)
except ParsingError as message:
print(f"Error parsing student's solution: {message}")
exit(1)
check_empty(student_solution=student_solution,
teacher_solution=teacher_solution)
alphabets = check_alphabets(student_alpha=student_solution.terminals,
teacher_alpha=teacher_solution.terminals, task="GRA")
if alphabets != "":
print(alphabets)
exit_incorrect()
equals = cfl.CFG.is_equivalent_test(student_solution, teacher_solution)
if equals:
failed = []
for constraint in constraints:
if constraint == "¬ε":
if not student_solution.is_epsilon_normal_form():
failed.append("obsahuje nepovolené ε-kroky")
elif constraint == "¬s":
if student_solution.has_simple_rules():
failed.append("obsahuje nepovolená jednoduchá pravidla")
elif constraint == "CNF":
if not student_solution.is_cnf():
failed.append("není v CNF")
else:
assert False, f"Unknown constraint `{constraint}'"
if len(failed) == 0:
exit_correct()
exit_cfl_ok_but_invalid_constraint(", ".join(failed))
if equals.left_counterexample is not None:
print_extra_word_ce(equals.left_counterexample)
if equals.right_counterexample:
print_missing_word_ce(equals.right_counterexample)
exit_incorrect()
def main():
signal.alarm(50)
# Get string with student's solution.
with open(sys.argv[1]) as student_file:
student_string = student_file.read()
# Get string with teacher's solution and type of the task.
for argument in sys.argv:
if argument[:2] == "-o":
teacher_string = argument[2:]
try:
task_prefix, teacher_string = teacher_string.split(":", 1)
teacher_type, task = get_task(task_prefix)
if teacher_type == "DFA" or teacher_type == "NFA":
dfa_task(teacher_type=teacher_type, teacher_string=teacher_string,
task=task, student_string=student_string)
elif teacher_type == "CFG":
cfg_task(teacher_type=teacher_type, teacher_string=teacher_string,
task=task, student_string=student_string)
else:
print(f"Invalid question type {task_prefix}")
exit(1)
# TODO K: get your exceptions together!
except Exception as ex:
print("Error inside of fja_checker:", ex.args)
exit(1)
if __name__ == "__main__":
main()
grammar CFG;
/* Parser Rules */
start: (onerule NEWLINE+)* onerule (NEWLINE+ | );
onerule: nonterminal ARROW (rewrite DELIMITER)* rewrite;
rewrite: (term_or_nonterm+ | EPSILON);
term_or_nonterm: (TERMINAL | nonterminal);
nonterminal: (CAPS | (LEFT_ANGLE symbol+ RIGHT_ANGLE (APOSTROPHE*)) | (symbol APOSTROPHE+));
symbol: (TERMINAL | CAPS | UNDERSCORE);
/* Lexer Rules */
/* Tokens */
LEFT_ANGLE : '<';
RIGHT_ANGLE : '>';
APOSTROPHE : '\'';
UNDERSCORE : '_';
TERMINAL : [a-z0-9];
CAPS : [A-Z];
ARROW : ('→' | '->');
EPSILON : ('ε' | '\\''e');
DELIMITER : '|';
NEWLINE : ('\n' | ';' | ',');
/* Characters to be ignored */
WS : [ \r\t]+ -> skip ;
# Generated from CFG.g4 by ANTLR 4.8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\r")
buf.write(":\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\3\2\3\2\3\3")
buf.write("\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\b\5\b)")
buf.write("\n\b\3\t\3\t\3\t\5\t.\n\t\3\n\3\n\3\13\3\13\3\f\6\f\65")
buf.write("\n\f\r\f\16\f\66\3\f\3\f\2\2\r\3\3\5\4\7\5\t\6\13\7\r")
buf.write("\b\17\t\21\n\23\13\25\f\27\r\3\2\6\4\2\62;c|\3\2C\\\5")
buf.write("\2\f\f..==\5\2\13\13\17\17\"\"\2<\2\3\3\2\2\2\2\5\3\2")
buf.write("\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2")
buf.write("\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2")
buf.write("\27\3\2\2\2\3\31\3\2\2\2\5\33\3\2\2\2\7\35\3\2\2\2\t\37")
buf.write("\3\2\2\2\13!\3\2\2\2\r#\3\2\2\2\17(\3\2\2\2\21-\3\2\2")
buf.write("\2\23/\3\2\2\2\25\61\3\2\2\2\27\64\3\2\2\2\31\32\7>\2")
buf.write("\2\32\4\3\2\2\2\33\34\7@\2\2\34\6\3\2\2\2\35\36\7)\2\2")
buf.write("\36\b\3\2\2\2\37 \7a\2\2 \n\3\2\2\2!\"\t\2\2\2\"\f\3\2")
buf.write("\2\2#$\t\3\2\2$\16\3\2\2\2%)\7\u2194\2\2&\'\7/\2\2\')")
buf.write("\7@\2\2(%\3\2\2\2(&\3\2\2\2)\20\3\2\2\2*.\7\u03b7\2\2")
buf.write("+,\7^\2\2,.\7g\2\2-*\3\2\2\2-+\3\2\2\2.\22\3\2\2\2/\60")
buf.write("\7~\2\2\60\24\3\2\2\2\61\62\t\4\2\2\62\26\3\2\2\2\63\65")
buf.write("\t\5\2\2\64\63\3\2\2\2\65\66\3\2\2\2\66\64\3\2\2\2\66")
buf.write("\67\3\2\2\2\678\3\2\2\289\b\f\2\29\30\3\2\2\2\6\2(-\66")
buf.write("\3\b\2\2")
return buf.getvalue()
class CFGLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
LEFT_ANGLE = 1
RIGHT_ANGLE = 2
APOSTROPHE = 3
UNDERSCORE = 4
TERMINAL = 5
CAPS = 6
ARROW = 7
EPSILON = 8
DELIMITER = 9
NEWLINE = 10
WS = 11
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'<'", "'>'", "'''", "'_'", "'|'" ]
symbolicNames = [ "<INVALID>",
"LEFT_ANGLE", "RIGHT_ANGLE", "APOSTROPHE", "UNDERSCORE", "TERMINAL",
"CAPS", "ARROW", "EPSILON", "DELIMITER", "NEWLINE", "WS" ]
ruleNames = [ "LEFT_ANGLE", "RIGHT_ANGLE", "APOSTROPHE", "UNDERSCORE",
"TERMINAL", "CAPS", "ARROW", "EPSILON", "DELIMITER", "NEWLINE",
"WS" ]
grammarFileName = "CFG.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.8")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
# Generated from CFG.g4 by ANTLR 4.8
from antlr4 import *
if __name__ is not None and "." in __name__:
from .CFGParser import CFGParser
else:
from CFGParser import CFGParser
# This class defines a complete listener for a parse tree produced by CFGParser.
class CFGListener(ParseTreeListener):
# Enter a parse tree produced by CFGParser#start.
def enterStart(self, ctx:CFGParser.StartContext):
pass
# Exit a parse tree produced by CFGParser#start.
def exitStart(self, ctx:CFGParser.StartContext):
pass
# Enter a parse tree produced by CFGParser#onerule.
def enterOnerule(self, ctx:CFGParser.OneruleContext):
pass
# Exit a parse tree produced by CFGParser#onerule.
def exitOnerule(self, ctx:CFGParser.OneruleContext):
pass
# Enter a parse tree produced by CFGParser#rewrite.
def enterRewrite(self, ctx:CFGParser.RewriteContext):
pass
# Exit a parse tree produced by CFGParser#rewrite.
def exitRewrite(self, ctx:CFGParser.RewriteContext):
pass
# Enter a parse tree produced by CFGParser#term_or_nonterm.
def enterTerm_or_nonterm(self, ctx:CFGParser.Term_or_nontermContext):
pass
# Exit a parse tree produced by CFGParser#term_or_nonterm.
def exitTerm_or_nonterm(self, ctx:CFGParser.Term_or_nontermContext):
pass
# Enter a parse tree produced by CFGParser#nonterminal.
def enterNonterminal(self, ctx:CFGParser.NonterminalContext):
pass
# Exit a parse tree produced by CFGParser#nonterminal.
def exitNonterminal(self, ctx:CFGParser.NonterminalContext):
pass
# Enter a parse tree produced by CFGParser#symbol.
def enterSymbol(self, ctx:CFGParser.SymbolContext):
pass
# Exit a parse tree produced by CFGParser#symbol.
def exitSymbol(self, ctx:CFGParser.SymbolContext):
pass
del CFGParser
\ No newline at end of file
# Generated from CFG.g4 by ANTLR 4.8
# encoding: utf-8
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\r")
buf.write("S\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\3\2")
buf.write("\3\2\6\2\21\n\2\r\2\16\2\22\7\2\25\n\2\f\2\16\2\30\13")
buf.write("\2\3\2\3\2\6\2\34\n\2\r\2\16\2\35\3\2\5\2!\n\2\3\3\3\3")
buf.write("\3\3\3\3\3\3\7\3(\n\3\f\3\16\3+\13\3\3\3\3\3\3\4\6\4\60")
buf.write("\n\4\r\4\16\4\61\3\4\5\4\65\n\4\3\5\3\5\5\59\n\5\3\6\3")
buf.write("\6\3\6\6\6>\n\6\r\6\16\6?\3\6\3\6\7\6D\n\6\f\6\16\6G\13")
buf.write("\6\3\6\3\6\6\6K\n\6\r\6\16\6L\5\6O\n\6\3\7\3\7\3\7\2\2")
buf.write("\b\2\4\6\b\n\f\2\3\3\2\6\b\2Y\2\26\3\2\2\2\4\"\3\2\2\2")
buf.write("\6\64\3\2\2\2\b8\3\2\2\2\nN\3\2\2\2\fP\3\2\2\2\16\20\5")
buf.write("\4\3\2\17\21\7\f\2\2\20\17\3\2\2\2\21\22\3\2\2\2\22\20")
buf.write("\3\2\2\2\22\23\3\2\2\2\23\25\3\2\2\2\24\16\3\2\2\2\25")
buf.write("\30\3\2\2\2\26\24\3\2\2\2\26\27\3\2\2\2\27\31\3\2\2\2")
buf.write("\30\26\3\2\2\2\31 \5\4\3\2\32\34\7\f\2\2\33\32\3\2\2\2")
buf.write("\34\35\3\2\2\2\35\33\3\2\2\2\35\36\3\2\2\2\36!\3\2\2\2")
buf.write("\37!\3\2\2\2 \33\3\2\2\2 \37\3\2\2\2!\3\3\2\2\2\"#\5\n")
buf.write("\6\2#)\7\t\2\2$%\5\6\4\2%&\7\13\2\2&(\3\2\2\2\'$\3\2\2")
buf.write("\2(+\3\2\2\2)\'\3\2\2\2)*\3\2\2\2*,\3\2\2\2+)\3\2\2\2")
buf.write(",-\5\6\4\2-\5\3\2\2\2.\60\5\b\5\2/.\3\2\2\2\60\61\3\2")
buf.write("\2\2\61/\3\2\2\2\61\62\3\2\2\2\62\65\3\2\2\2\63\65\7\n")
buf.write("\2\2\64/\3\2\2\2\64\63\3\2\2\2\65\7\3\2\2\2\669\7\7\2")
buf.write("\2\679\5\n\6\28\66\3\2\2\28\67\3\2\2\29\t\3\2\2\2:O\7")
buf.write("\b\2\2;=\7\3\2\2<>\5\f\7\2=<\3\2\2\2>?\3\2\2\2?=\3\2\2")
buf.write("\2?@\3\2\2\2@A\3\2\2\2AE\7\4\2\2BD\7\5\2\2CB\3\2\2\2D")
buf.write("G\3\2\2\2EC\3\2\2\2EF\3\2\2\2FO\3\2\2\2GE\3\2\2\2HJ\5")
buf.write("\f\7\2IK\7\5\2\2JI\3\2\2\2KL\3\2\2\2LJ\3\2\2\2LM\3\2\2")
buf.write("\2MO\3\2\2\2N:\3\2\2\2N;\3\2\2\2NH\3\2\2\2O\13\3\2\2\2")
buf.write("PQ\t\2\2\2Q\r\3\2\2\2\16\22\26\35 )\61\648?ELN")
return buf.getvalue()
class CFGParser ( Parser ):
grammarFileName = "CFG.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'<'", "'>'", "'''", "'_'", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "'|'" ]
symbolicNames = [ "<INVALID>", "LEFT_ANGLE", "RIGHT_ANGLE", "APOSTROPHE",
"UNDERSCORE", "TERMINAL", "CAPS", "ARROW", "EPSILON",
"DELIMITER", "NEWLINE", "WS" ]
RULE_start = 0
RULE_onerule = 1
RULE_rewrite = 2
RULE_term_or_nonterm = 3
RULE_nonterminal = 4
RULE_symbol = 5
ruleNames = [ "start", "onerule", "rewrite", "term_or_nonterm", "nonterminal",
"symbol" ]
EOF = Token.EOF
LEFT_ANGLE=1
RIGHT_ANGLE=2
APOSTROPHE=3
UNDERSCORE=4
TERMINAL=5
CAPS=6
ARROW=7
EPSILON=8
DELIMITER=9
NEWLINE=10
WS=11
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.8")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class StartContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def onerule(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(CFGParser.OneruleContext)
else:
return self.getTypedRuleContext(CFGParser.OneruleContext,i)
def NEWLINE(self, i:int=None):
if i is None:
return self.getTokens(CFGParser.NEWLINE)
else:
return self.getToken(CFGParser.NEWLINE, i)
def getRuleIndex(self):
return CFGParser.RULE_start
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStart" ):
listener.enterStart(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStart" ):
listener.exitStart(self)
def start(self):
localctx = CFGParser.StartContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_start)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 20
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,1,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 12
self.onerule()
self.state = 14
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 13
self.match(CFGParser.NEWLINE)
self.state = 16
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==CFGParser.NEWLINE):
break
self.state = 22
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,1,self._ctx)
self.state = 23
self.onerule()
self.state = 30
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CFGParser.NEWLINE]:
self.state = 25
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 24
self.match(CFGParser.NEWLINE)
self.state = 27
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==CFGParser.NEWLINE):
break
pass
elif token in [CFGParser.EOF]:
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class OneruleContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def nonterminal(self):
return self.getTypedRuleContext(CFGParser.NonterminalContext,0)
def ARROW(self):
return self.getToken(CFGParser.ARROW, 0)
def rewrite(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(CFGParser.RewriteContext)
else:
return self.getTypedRuleContext(CFGParser.RewriteContext,i)
def DELIMITER(self, i:int=None):
if i is None:
return self.getTokens(CFGParser.DELIMITER)
else:
return self.getToken(CFGParser.DELIMITER, i)
def getRuleIndex(self):
return CFGParser.RULE_onerule
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOnerule" ):
listener.enterOnerule(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOnerule" ):
listener.exitOnerule(self)
def onerule(self):
localctx = CFGParser.OneruleContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_onerule)
try:
self.enterOuterAlt(localctx, 1)
self.state = 32
self.nonterminal()
self.state = 33
self.match(CFGParser.ARROW)
self.state = 39
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,4,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 34
self.rewrite()
self.state = 35
self.match(CFGParser.DELIMITER)
self.state = 41
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,4,self._ctx)
self.state = 42
self.rewrite()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx