Do IR generation

This commit is contained in:
Emi Simpson 2023-03-04 23:13:25 -05:00
parent 532a5a14d0
commit 15206f7776
Signed by: Emi
GPG key ID: A12F2C2FFDC3D847

View file

@ -79,36 +79,56 @@ ASTTerm: TypeAlias = 'ASTNegated | ASTProp'
class ASTNegated:
term: ASTTerm
def __str__(self) -> str:
return f'¬{self.term}'
def make_ir(self, props: Sequence[str], var: Sequence[str]) -> 'IRNeg':
return IRNeg(self.term.make_ir(props, var))
@dataclass(frozen=True)
class ASTProp:
ident: Lexeme[Tok]
arguments: Sequence[ASTTerm]
def __str__(self) -> str:
if len(self.arguments):
return f'{self.ident.matched_string}({",".join(map(str, self.arguments))})'
def make_ir(self, props: Sequence[str], vars: Sequence[str]) -> 'IRVar | IRProp':
if self.ident.matched_string in props:
return IRProp(self.ident, [t.make_ir(props, vars) for t in self.arguments])
elif self.ident.matched_string in vars:
if len(self.arguments):
raise Exception('Bad arg count!') #TODO
else:
return IRVar(self.ident)
else:
return self.ident.matched_string
raise Exception('Unidentified!') #TODO
@dataclass(frozen=True)
class AST:
predicate_idents: Sequence[Lexeme[Tok]]
variable_idents: Sequence[Lexeme[Tok]]
const_idents: Sequence[Lexeme[Tok]]
func_idents: Sequence[Lexeme[Tok]]
clauses: Sequence[Sequence[ASTTerm]]
class IRProp:
lexeme: Lexeme[Tok]
arguments: 'Sequence[IRTerm]'
def __str__(self) -> str:
return (
'Predicates: ' + repr([i.matched_string for i in self.predicate_idents]) + '\n' +
'Variables: ' + repr([i.matched_string for i in self.variable_idents]) + '\n' +
'Constants: ' + repr([i.matched_string for i in self.const_idents]) + '\n' +
'Functions: ' + repr([i.matched_string for i in self.func_idents]) + '\n' +
'Clauses:\n' + '\n'.join(' or '.join(str(term) for term in clause) for clause in self.clauses) + '\n'
)
return f'{self.lexeme.matched_string}({",".join(str(arg) for arg in self.arguments)})'
@dataclass(frozen=True)
class IRVar:
lexeme: Lexeme[Tok]
def __str__(self) -> str:
return f'*{self.lexeme.matched_string}'
@dataclass(frozen=True)
class IRNeg:
inner: 'IRTerm'
def __str__(self) -> str:
return f'¬{self.inner}'
IRTerm: TypeAlias = IRVar | IRProp | IRNeg
def make_ir(
predicate_idents: Sequence[Lexeme[Tok]],
variable_idents: Sequence[Lexeme[Tok]],
const_idents: Sequence[Lexeme[Tok]],
func_idents: Sequence[Lexeme[Tok]],
clauses: Sequence[Sequence[ASTTerm]],
) -> Sequence[Sequence[IRTerm]]:
prop_idents = [l.matched_string for l in (*const_idents, *func_idents, *predicate_idents)]
var_idents = [l.matched_string for l in variable_idents]
return [[term.make_ir(prop_idents, var_idents) for term in clause] for clause in clauses]
def cons(stack: Sequence[Any]) -> Sequence[Any]:
match stack:
@ -143,7 +163,7 @@ def drop(stack: Sequence[Any]) -> Sequence[Any]:
GRAMMAR: Sequence[Tuple[Variable, Sequence[Variable | Tok | Action]]] = [
(Variable.Start,
[ Tok.PredicateSection, drop, Variable.Idents, call_func(p(p,p,p,p,AST)), Tok.Newline, drop
[ Tok.PredicateSection, drop, Variable.Idents, call_func(p(p,p,p,p,make_ir)), Tok.Newline, drop
, Tok.VariablesSection, drop, Variable.Idents, f_apply, Tok.Newline, drop
, Tok.ConstantsSection, drop, Variable.Idents, f_apply, Tok.Newline, drop
, Tok.FunctionsSection, drop, Variable.Idents, f_apply, Tok.Newline, drop
@ -237,7 +257,7 @@ if __name__ == '__main__':
match maybe_ast:
case Ok([ast]):
print(ast)
print('\n'.join(' or '.join(str(t) for t in c) for c in ast))
case Ok(huh):
print('Unexpected end result: ', huh)
case Err((Lexeme(token, text, line, col_start, col_end), expected)):