JSON-Lang/pattern.py
2024-03-15 20:21:42 -04:00

57 lines
1.8 KiB
Python

from emis_funky_funktions import *
from typing import Collection, Mapping, Sequence, Tuple, TypeAlias
from comb_parse import Parser
from ir import Pattern, NamePattern, IgnorePattern, IntPattern, SPattern
from lex import Lexeme, tokenize
from enum import auto, IntEnum
import re
class PatTok(IntEnum):
"""
All possible tokens used in the grammar
"""
Whitespace = auto()
Number = auto()
Succ = auto()
Underscore = auto()
Name = auto()
Eof = auto()
def __repr__(self):
return self._name_
PATTERN_LEX_TABLE: Collection[Tuple[re.Pattern[str], PatTok]] = [
(re.compile(r"\s+"), PatTok.Whitespace),
(re.compile(r"\d+"), PatTok.Number),
(re.compile(r"S"), PatTok.Succ),
(re.compile(r"_"), PatTok.Underscore),
(re.compile(r"\w+"), PatTok.Name),
]
# P := int
# P := name
# P := underscore
# P := S <P>
parse_int: Parser[Pattern, PatTok] = Parser.token(PatTok.Number).map(Lexeme.get_match).map(int).map(IntPattern)
parse_name: Parser[Pattern, PatTok] = Parser.token(PatTok.Name).map(Lexeme.get_match).map(p(NamePattern))
parse_ignore: Parser[Pattern, PatTok] = Parser.token(PatTok.Underscore).map(lambda _: IgnorePattern())
parse_succ: Parser[Pattern, PatTok] = Parser.token(PatTok.Succ).map(k(SPattern)).fapply(Parser.lazy(lambda: parse_P)) #type: ignore
parse_P: Parser[Pattern, PatTok] = parse_int.or_(parse_name, parse_ignore, parse_succ)
parse_pattern = parse_P.seq_ignore_tok(PatTok.Eof)
def lex_and_parse_pattern(input: str) -> Result[Pattern, str | Mapping[Lexeme[PatTok], Collection[PatTok]]]:
match tokenize(PATTERN_LEX_TABLE, [PatTok.Whitespace], PatTok.Eof, input):
case Ok(lexemes):
match parse_pattern.parse_(lexemes):
case Ok(pattern): # Imagine having a good type system
return Ok(pattern)
case Err(e):
return Err(e)
case Err(remainder):
return Err(remainder)
raise Exception('Unreachable')