Fix failing doctests in lex.py
This commit is contained in:
parent
57130c9233
commit
0605b759f0
14
lex.py
14
lex.py
|
@ -25,7 +25,7 @@ def try_lex1(regex: Pattern[str], tok: A, input: str, line_no: int, col_no: int)
|
|||
matched, and the rest of the input. Otherwise, returns `None`
|
||||
|
||||
>>> try_lex1(compile(r'\d+'), "NUMBER", "123abc", 1, 1)
|
||||
Some((['NUMBER': '123']@(1, 1-4), 'abc'))
|
||||
Some((['NUMBER': '123']@(1, 1-3), 'abc'))
|
||||
|
||||
>>> try_lex1(compile(r'\d+'), "NUMBER", "abc123", 1, 1) is None
|
||||
True
|
||||
|
@ -55,13 +55,13 @@ def tokenize(
|
|||
If the lexer is unable to match the input string with any of the tokens, then an `Err`
|
||||
is returned containing the section of the input that failed to match.
|
||||
|
||||
>>> tokenize(LEX_TABLE, [Tok.Whitespace], 'Clauses: \\n!man(x5) person') #doctest: +NORMALIZE_WHITESPACE
|
||||
Ok([[ClausesSection: 'Clauses:']@(1, 1-9), [Newline: '\\n']@(1, 10-11),
|
||||
[Negate: '!']@(2, 1-2), [Identifier: 'man']@(2, 2-5), [OpenP: '(']@(2, 5-6),
|
||||
[Identifier: 'x5']@(2, 6-8), [CloseP: ')']@(2, 8-9),
|
||||
[Identifier: 'person']@(2, 10-16)])
|
||||
>>> tokenize(LEX_TABLE, [Tok.Whitespace], Tok.Eof, 'Clauses: \\n!man(x5) person') #doctest: +NORMALIZE_WHITESPACE
|
||||
Ok([[ClausesSection: 'Clauses:']@(1, 1-8), [Newline: '\\n']@(1, 10-10),
|
||||
[Negate: '!']@(2, 1-1), [Identifier: 'man']@(2, 2-4), [OpenP: '(']@(2, 5-5),
|
||||
[Identifier: 'x5']@(2, 6-7), [CloseP: ')']@(2, 8-8),
|
||||
[Identifier: 'person']@(2, 10-15), [Eof: '']@(2, 16-16)])
|
||||
|
||||
>>> tokenize(LEX_TABLE, [Tok.Whitespace], 'Clauses: \\n🍆 !man(x5)')
|
||||
>>> tokenize(LEX_TABLE, [Tok.Whitespace], Tok.Eof,'Clauses: \\n🍆 !man(x5)')
|
||||
Err('🍆 !man(x5)')
|
||||
"""
|
||||
def inner(input: str, line_no: int, col_no: int, prefix: List[Lexeme[A]]) -> Result[List[Lexeme[A]], str]:
|
||||
|
|
Loading…
Reference in a new issue