Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/lark/exceptions.py: 79%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1from .utils import logger, NO_VALUE
2from typing import Mapping, Iterable, Callable, Union, TypeVar, Tuple, Any, List, Set, Optional, Collection, TYPE_CHECKING
4if TYPE_CHECKING:
5 from .lexer import Token
6 from .parsers.lalr_interactive_parser import InteractiveParser
7 from .tree import Tree
9###{standalone
11class LarkError(Exception):
12 pass
15class ConfigurationError(LarkError, ValueError):
16 pass
19def assert_config(value, options: Collection, msg='Got %r, expected one of %s'):
20 if value not in options:
21 raise ConfigurationError(msg % (value, options))
24class GrammarError(LarkError):
25 pass
28class ParseError(LarkError):
29 pass
32class LexError(LarkError):
33 pass
35T = TypeVar('T')
37class UnexpectedInput(LarkError):
38 """UnexpectedInput Error.
40 Used as a base class for the following exceptions:
42 - ``UnexpectedCharacters``: The lexer encountered an unexpected string
43 - ``UnexpectedToken``: The parser received an unexpected token
44 - ``UnexpectedEOF``: The parser expected a token, but the input ended
46 After catching one of these exceptions, you may call the following helper methods to create a nicer error message.
47 """
48 line: int
49 column: int
50 pos_in_stream = None
51 state: Any
52 _terminals_by_name = None
53 interactive_parser: 'InteractiveParser'
55 def get_context(self, text: str, span: int=40) -> str:
56 """Returns a pretty string pinpointing the error in the text,
57 with span amount of context characters around it.
59 Note:
60 The parser doesn't hold a copy of the text it has to parse,
61 so you have to provide it again
62 """
63 assert self.pos_in_stream is not None, self
64 pos = self.pos_in_stream
65 start = max(pos - span, 0)
66 end = pos + span
67 if not isinstance(text, bytes):
68 before = text[start:pos].rsplit('\n', 1)[-1]
69 after = text[pos:end].split('\n', 1)[0]
70 return before + after + '\n' + ' ' * len(before.expandtabs()) + '^\n'
71 else:
72 before = text[start:pos].rsplit(b'\n', 1)[-1]
73 after = text[pos:end].split(b'\n', 1)[0]
74 return (before + after + b'\n' + b' ' * len(before.expandtabs()) + b'^\n').decode("ascii", "backslashreplace")
76 def match_examples(self, parse_fn: 'Callable[[str], Tree]',
77 examples: Union[Mapping[T, Iterable[str]], Iterable[Tuple[T, Iterable[str]]]],
78 token_type_match_fallback: bool=False,
79 use_accepts: bool=True
80 ) -> Optional[T]:
81 """Allows you to detect what's wrong in the input text by matching
82 against example errors.
84 Given a parser instance and a dictionary mapping some label with
85 some malformed syntax examples, it'll return the label for the
86 example that bests matches the current error. The function will
87 iterate the dictionary until it finds a matching error, and
88 return the corresponding value.
90 For an example usage, see `examples/error_reporting_lalr.py`
92 Parameters:
93 parse_fn: parse function (usually ``lark_instance.parse``)
94 examples: dictionary of ``{'example_string': value}``.
95 use_accepts: Recommended to keep this as ``use_accepts=True``.
96 """
97 assert self.state is not None, "Not supported for this exception"
99 if isinstance(examples, Mapping):
100 examples = examples.items()
102 candidate = (None, False)
103 for i, (label, example) in enumerate(examples):
104 assert not isinstance(example, str), "Expecting a list"
106 for j, malformed in enumerate(example):
107 try:
108 parse_fn(malformed)
109 except UnexpectedInput as ut:
110 if ut.state == self.state:
111 if (
112 use_accepts
113 and isinstance(self, UnexpectedToken)
114 and isinstance(ut, UnexpectedToken)
115 and ut.accepts != self.accepts
116 ):
117 logger.debug("Different accepts with same state[%d]: %s != %s at example [%s][%s]" %
118 (self.state, self.accepts, ut.accepts, i, j))
119 continue
120 if (
121 isinstance(self, (UnexpectedToken, UnexpectedEOF))
122 and isinstance(ut, (UnexpectedToken, UnexpectedEOF))
123 ):
124 if ut.token == self.token: # Try exact match first
125 logger.debug("Exact Match at example [%s][%s]" % (i, j))
126 return label
128 if token_type_match_fallback:
129 # Fallback to token types match
130 if (ut.token.type == self.token.type) and not candidate[-1]:
131 logger.debug("Token Type Fallback at example [%s][%s]" % (i, j))
132 candidate = label, True
134 if candidate[0] is None:
135 logger.debug("Same State match at example [%s][%s]" % (i, j))
136 candidate = label, False
138 return candidate[0]
140 def _format_expected(self, expected):
141 if self._terminals_by_name:
142 d = self._terminals_by_name
143 expected = [d[t_name].user_repr() if t_name in d else t_name for t_name in expected]
144 return "Expected one of: \n\t* %s\n" % '\n\t* '.join(expected)
147class UnexpectedEOF(ParseError, UnexpectedInput):
148 """An exception that is raised by the parser, when the input ends while it still expects a token.
149 """
150 expected: 'List[Token]'
152 def __init__(self, expected, state=None, terminals_by_name=None):
153 super(UnexpectedEOF, self).__init__()
155 self.expected = expected
156 self.state = state
157 from .lexer import Token
158 self.token = Token("<EOF>", "") # , line=-1, column=-1, pos_in_stream=-1)
159 self.pos_in_stream = -1
160 self.line = -1
161 self.column = -1
162 self._terminals_by_name = terminals_by_name
165 def __str__(self):
166 message = "Unexpected end-of-input. "
167 message += self._format_expected(self.expected)
168 return message
171class UnexpectedCharacters(LexError, UnexpectedInput):
172 """An exception that is raised by the lexer, when it cannot match the next
173 string of characters to any of its terminals.
174 """
176 allowed: Set[str]
177 considered_tokens: Set[Any]
179 def __init__(self, seq, lex_pos, line, column, allowed=None, considered_tokens=None, state=None, token_history=None,
180 terminals_by_name=None, considered_rules=None):
181 super(UnexpectedCharacters, self).__init__()
183 # TODO considered_tokens and allowed can be figured out using state
184 self.line = line
185 self.column = column
186 self.pos_in_stream = lex_pos
187 self.state = state
188 self._terminals_by_name = terminals_by_name
190 self.allowed = allowed
191 self.considered_tokens = considered_tokens
192 self.considered_rules = considered_rules
193 self.token_history = token_history
195 if isinstance(seq, bytes):
196 self.char = seq[lex_pos:lex_pos + 1].decode("ascii", "backslashreplace")
197 else:
198 self.char = seq[lex_pos]
199 self._context = self.get_context(seq)
202 def __str__(self):
203 message = "No terminal matches '%s' in the current parser context, at line %d col %d" % (self.char, self.line, self.column)
204 message += '\n\n' + self._context
205 if self.allowed:
206 message += self._format_expected(self.allowed)
207 if self.token_history:
208 message += '\nPrevious tokens: %s\n' % ', '.join(repr(t) for t in self.token_history)
209 return message
212class UnexpectedToken(ParseError, UnexpectedInput):
213 """An exception that is raised by the parser, when the token it received
214 doesn't match any valid step forward.
216 Parameters:
217 token: The mismatched token
218 expected: The set of expected tokens
219 considered_rules: Which rules were considered, to deduce the expected tokens
220 state: A value representing the parser state. Do not rely on its value or type.
221 interactive_parser: An instance of ``InteractiveParser``, that is initialized to the point of failure,
222 and can be used for debugging and error handling.
224 Note: These parameters are available as attributes of the instance.
225 """
227 expected: Set[str]
228 considered_rules: Set[str]
230 def __init__(self, token, expected, considered_rules=None, state=None, interactive_parser=None, terminals_by_name=None, token_history=None):
231 super(UnexpectedToken, self).__init__()
233 # TODO considered_rules and expected can be figured out using state
234 self.line = getattr(token, 'line', '?')
235 self.column = getattr(token, 'column', '?')
236 self.pos_in_stream = getattr(token, 'start_pos', None)
237 self.state = state
239 self.token = token
240 self.expected = expected # XXX deprecate? `accepts` is better
241 self._accepts = NO_VALUE
242 self.considered_rules = considered_rules
243 self.interactive_parser = interactive_parser
244 self._terminals_by_name = terminals_by_name
245 self.token_history = token_history
248 @property
249 def accepts(self) -> Set[str]:
250 if self._accepts is NO_VALUE:
251 self._accepts = self.interactive_parser and self.interactive_parser.accepts()
252 return self._accepts
254 def __str__(self):
255 message = ("Unexpected token %r at line %s, column %s.\n%s"
256 % (self.token, self.line, self.column, self._format_expected(self.accepts or self.expected)))
257 if self.token_history:
258 message += "Previous tokens: %r\n" % self.token_history
260 return message
264class VisitError(LarkError):
265 """VisitError is raised when visitors are interrupted by an exception
267 It provides the following attributes for inspection:
269 Parameters:
270 rule: the name of the visit rule that failed
271 obj: the tree-node or token that was being processed
272 orig_exc: the exception that cause it to fail
274 Note: These parameters are available as attributes
275 """
277 obj: 'Union[Tree, Token]'
278 orig_exc: Exception
280 def __init__(self, rule, obj, orig_exc):
281 message = 'Error trying to process rule "%s":\n\n%s' % (rule, orig_exc)
282 super(VisitError, self).__init__(message)
284 self.rule = rule
285 self.obj = obj
286 self.orig_exc = orig_exc
289class MissingVariableError(LarkError):
290 pass
292###}