1# This module provides a LALR interactive parser, which is used for debugging and error handling
2
3from typing import Iterator, List
4from copy import copy
5import warnings
6
7from lark.exceptions import UnexpectedToken
8from lark.lexer import Token, LexerThread
9from .lalr_parser_state import ParserState
10
11###{standalone
12
13class InteractiveParser:
14 """InteractiveParser gives you advanced control over parsing and error handling when parsing with LALR.
15
16 For a simpler interface, see the ``on_error`` argument to ``Lark.parse()``.
17 """
18 def __init__(self, parser, parser_state: ParserState, lexer_thread: LexerThread):
19 self.parser = parser
20 self.parser_state = parser_state
21 self.lexer_thread = lexer_thread
22 self.result = None
23
24 @property
25 def lexer_state(self) -> LexerThread:
26 warnings.warn("lexer_state will be removed in subsequent releases. Use lexer_thread instead.", DeprecationWarning)
27 return self.lexer_thread
28
29 def feed_token(self, token: Token):
30 """Feed the parser with a token, and advance it to the next state, as if it received it from the lexer.
31
32 Note that ``token`` has to be an instance of ``Token``.
33 """
34 return self.parser_state.feed_token(token, token.type == '$END')
35
36 def iter_parse(self) -> Iterator[Token]:
37 """Step through the different stages of the parse, by reading tokens from the lexer
38 and feeding them to the parser, one per iteration.
39
40 Returns an iterator of the tokens it encounters.
41
42 When the parse is over, the resulting tree can be found in ``InteractiveParser.result``.
43 """
44 for token in self.lexer_thread.lex(self.parser_state):
45 yield token
46 self.result = self.feed_token(token)
47
48 def exhaust_lexer(self) -> List[Token]:
49 """Try to feed the rest of the lexer state into the interactive parser.
50
51 Note that this modifies the instance in place and does not feed an '$END' Token
52 """
53 return list(self.iter_parse())
54
55
56 def feed_eof(self, last_token=None):
57 """Feed a '$END' Token. Borrows from 'last_token' if given."""
58 eof = Token.new_borrow_pos('$END', '', last_token) if last_token is not None else self.lexer_thread._Token('$END', '', 0, 1, 1)
59 return self.feed_token(eof)
60
61
62 def __copy__(self):
63 """Create a new interactive parser with a separate state.
64
65 Calls to feed_token() won't affect the old instance, and vice-versa.
66 """
67 return self.copy()
68
69 def copy(self, deepcopy_values=True):
70 return type(self)(
71 self.parser,
72 self.parser_state.copy(deepcopy_values=deepcopy_values),
73 copy(self.lexer_thread),
74 )
75
76 def __eq__(self, other):
77 if not isinstance(other, InteractiveParser):
78 return False
79
80 return self.parser_state == other.parser_state and self.lexer_thread == other.lexer_thread
81
82 def as_immutable(self):
83 """Convert to an ``ImmutableInteractiveParser``."""
84 p = copy(self)
85 return ImmutableInteractiveParser(p.parser, p.parser_state, p.lexer_thread)
86
87 def pretty(self):
88 """Print the output of ``choices()`` in a way that's easier to read."""
89 out = ["Parser choices:"]
90 for k, v in self.choices().items():
91 out.append('\t- %s -> %r' % (k, v))
92 out.append('stack size: %s' % len(self.parser_state.state_stack))
93 return '\n'.join(out)
94
95 def choices(self):
96 """Returns a dictionary of token types, matched to their action in the parser.
97
98 Only returns token types that are accepted by the current state.
99
100 Updated by ``feed_token()``.
101 """
102 return self.parser_state.parse_conf.parse_table.states[self.parser_state.position]
103
104 def accepts(self):
105 """Returns the set of possible tokens that will advance the parser into a new valid state."""
106 accepts = set()
107 conf_no_callbacks = copy(self.parser_state.parse_conf)
108 # We don't want to call callbacks here since those might have arbitrary side effects
109 # and are unnecessarily slow.
110 conf_no_callbacks.callbacks = {}
111 for t in self.choices():
112 if t.isupper(): # is terminal?
113 new_cursor = self.copy(deepcopy_values=False)
114 new_cursor.parser_state.parse_conf = conf_no_callbacks
115 try:
116 new_cursor.feed_token(self.lexer_thread._Token(t, ''))
117 except UnexpectedToken:
118 pass
119 else:
120 accepts.add(t)
121 return accepts
122
123 def resume_parse(self):
124 """Resume automated parsing from the current state.
125 """
126 return self.parser.parse_from_state(self.parser_state, last_token=self.lexer_thread.state.last_token)
127
128
129
130class ImmutableInteractiveParser(InteractiveParser):
131 """Same as ``InteractiveParser``, but operations create a new instance instead
132 of changing it in-place.
133 """
134
135 result = None
136
137 def __hash__(self):
138 return hash((self.parser_state, self.lexer_thread))
139
140 def feed_token(self, token):
141 c = copy(self)
142 c.result = InteractiveParser.feed_token(c, token)
143 return c
144
145 def exhaust_lexer(self):
146 """Try to feed the rest of the lexer state into the parser.
147
148 Note that this returns a new ImmutableInteractiveParser and does not feed an '$END' Token"""
149 cursor = self.as_mutable()
150 cursor.exhaust_lexer()
151 return cursor.as_immutable()
152
153 def as_mutable(self):
154 """Convert to an ``InteractiveParser``."""
155 p = copy(self)
156 return InteractiveParser(p.parser, p.parser_state, p.lexer_thread)
157
158###}