diff --git a/grammar.py b/grammar.py index 6da8e2b..afe99bb 100644 --- a/grammar.py +++ b/grammar.py @@ -12,12 +12,13 @@ from dataclasses import dataclass from enum import auto, IntEnum from re import compile, Pattern -from ir import IRNeg, IRProp, IRTerm, IRVar -from lex import Lexeme -from parse import Action +from build_oracle import oracle_table +from ir import IRNeg, IRProp, IRTerm, IRVar, KnowledgeBase +from lex import Lexeme, tokenize +from parse import Action, parser from tokens import * -from typing import Any, Callable, Collection, Mapping, Sequence, Tuple, TypeAlias +from typing import Any, Callable, cast, Collection, Mapping, Sequence, Tuple, TypeAlias class Variable(IntEnum): @@ -137,15 +138,19 @@ class ASTProp: if bound_type == IdentKind.Variable: return Ok(IRVar(self.ident.matched_string)) else: - arg_ir = sequence([t.make_ir(idents, False) for t in self.arguments]) - return map_res(p(IRProp, self.ident.matched_string), arg_ir) + return (sequence([t.make_ir(idents, False) for t in self.arguments]) + <= cast(Callable[[Iterable[IRTerm]], Tuple[IRTerm, ...]], tuple))\ + <= p(IRProp, self.ident.matched_string) @cur2 def make_ir( idents: IdentBindings, clauses: Sequence[Sequence[ASTTerm]], -) -> Result[Sequence[Sequence[IRTerm]], GenIrError]: - return sequence([sequence([term.make_ir(idents, True) for term in clause]) for clause in clauses]) +) -> Result[KnowledgeBase, GenIrError]: + return map_res( + lambda kb_: FSet(FSet(clause) for clause in kb_), + sequence([sequence([term.make_ir(idents, True) for term in clause]) for clause in clauses]) + ) def cons(stack: Sequence[Any]) -> Sequence[Any]: match stack: @@ -257,7 +262,7 @@ CSTerms := Comma := ε """ -def lex_and_parse(input: str) -> Result[Result[Sequence[Sequence[IRTerm]], GenIrError], Tuple[Lexeme[Tok], Collection[Tok]]]: +def lex_and_parse(input: str) -> Result[Result[KnowledgeBase, GenIrError], Tuple[Lexeme[Tok], Collection[Tok]]]: lexemes = unwrap_r(tokenize(LEX_TABLE, [Tok.Whitespace], Tok.Eof, input)) oracle_table_ = oracle_table(p_instance(Tok), p_instance(Variable), GRAMMAR) #type:ignore @@ -277,9 +282,6 @@ if __name__ == '__main__': # from emis_funky_funktions import cur2, flip # from build_oracle import print_oracle_table_enum, oracle_table # print(print_oracle_table_enum(oracle_table(flip(cur2(isinstance))(Tok), GRAMMAR))) #type: ignore - from build_oracle import oracle_table - from parse import parser - from lex import tokenize import sys if len(sys.argv) == 2: diff --git a/ir.py b/ir.py index 6790e88..2842b80 100644 --- a/ir.py +++ b/ir.py @@ -178,6 +178,16 @@ Due to this generalization, `Clause_` does not necessarily benefit from hashabil deduplication. It exists mostly to make inputing things easier, e.g. in doctests. """ +KnowledgeBase: TypeAlias = FrozenSet[Clause] +KnowledgeBase_: TypeAlias = Collection[Clause_] +""" +A more general version of `KnowledgeBase` + +`KnowledgeBase_` : `KnowledgeBase` :: `Clause_` : `Clause` + +A superclass of `KnowledgeBase` +""" + sub_all: Callable[[Substitutions, IRTerm], IRTerm] = p(reduce, lambda t, s: t.subst(s)) #type:ignore """ Perform a series of substitutions on a term diff --git a/resolution.py b/resolution.py index 2a5c34e..b9af348 100644 --- a/resolution.py +++ b/resolution.py @@ -4,17 +4,7 @@ from itertools import combinations, product from operator import eq from typing import Collection, FrozenSet, TypeAlias -from ir import Clause, Clause_, IRNeg, IRProp, IRTerm, IRVar, Substitutions, sub_all, unify - -KnowledgeBase: TypeAlias = FrozenSet[Clause] -KnowledgeBase_: TypeAlias = Collection[Clause_] -""" -A more general version of `KnowledgeBase` - -`KnowledgeBase_` : `KnowledgeBase` :: `Clause_` : `Clause` - -A superclass of `KnowledgeBase` -""" +from ir import Clause, Clause_, IRNeg, IRProp, IRTerm, IRVar, KnowledgeBase, KnowledgeBase_, Substitutions, sub_all, unify def terms_cancel(t1: IRTerm, t2: IRTerm) -> Option[Substitutions]: """