1from typing import Any, Callable, Dict, Optional, Collection, Union, TYPE_CHECKING
2
3from .exceptions import ConfigurationError, GrammarError, assert_config
4from .utils import get_regexp_width, Serialize
5from .lexer import LexerThread, BasicLexer, ContextualLexer, Lexer
6from .parsers import earley, xearley, cyk
7from .parsers.lalr_parser import LALR_Parser
8from .tree import Tree
9from .common import LexerConf, ParserConf, _ParserArgType, _LexerArgType
10
11if TYPE_CHECKING:
12 from .parsers.lalr_analysis import ParseTableBase
13
14
15###{standalone
16
17def _wrap_lexer(lexer_class):
18 future_interface = getattr(lexer_class, '__future_interface__', False)
19 if future_interface:
20 return lexer_class
21 else:
22 class CustomLexerWrapper(Lexer):
23 def __init__(self, lexer_conf):
24 self.lexer = lexer_class(lexer_conf)
25 def lex(self, lexer_state, parser_state):
26 return self.lexer.lex(lexer_state.text)
27 return CustomLexerWrapper
28
29
30def _deserialize_parsing_frontend(data, memo, lexer_conf, callbacks, options):
31 parser_conf = ParserConf.deserialize(data['parser_conf'], memo)
32 cls = (options and options._plugins.get('LALR_Parser')) or LALR_Parser
33 parser = cls.deserialize(data['parser'], memo, callbacks, options.debug)
34 parser_conf.callbacks = callbacks
35 return ParsingFrontend(lexer_conf, parser_conf, options, parser=parser)
36
37
38_parser_creators: 'Dict[str, Callable[[LexerConf, Any, Any], Any]]' = {}
39
40
41class ParsingFrontend(Serialize):
42 __serialize_fields__ = 'lexer_conf', 'parser_conf', 'parser'
43
44 lexer_conf: LexerConf
45 parser_conf: ParserConf
46 options: Any
47
48 def __init__(self, lexer_conf: LexerConf, parser_conf: ParserConf, options, parser=None):
49 self.parser_conf = parser_conf
50 self.lexer_conf = lexer_conf
51 self.options = options
52
53 # Set-up parser
54 if parser: # From cache
55 self.parser = parser
56 else:
57 create_parser = _parser_creators.get(parser_conf.parser_type)
58 assert create_parser is not None, "{} is not supported in standalone mode".format(
59 parser_conf.parser_type
60 )
61 self.parser = create_parser(lexer_conf, parser_conf, options)
62
63 # Set-up lexer
64 lexer_type = lexer_conf.lexer_type
65 self.skip_lexer = False
66 if lexer_type in ('dynamic', 'dynamic_complete'):
67 assert lexer_conf.postlex is None
68 self.skip_lexer = True
69 return
70
71 if isinstance(lexer_type, type):
72 assert issubclass(lexer_type, Lexer)
73 self.lexer = _wrap_lexer(lexer_type)(lexer_conf)
74 elif isinstance(lexer_type, str):
75 create_lexer = {
76 'basic': create_basic_lexer,
77 'contextual': create_contextual_lexer,
78 }[lexer_type]
79 self.lexer = create_lexer(lexer_conf, self.parser, lexer_conf.postlex, options)
80 else:
81 raise TypeError("Bad value for lexer_type: {lexer_type}")
82
83 if lexer_conf.postlex:
84 self.lexer = PostLexConnector(self.lexer, lexer_conf.postlex)
85
86 def _verify_start(self, start=None):
87 if start is None:
88 start_decls = self.parser_conf.start
89 if len(start_decls) > 1:
90 raise ConfigurationError("Lark initialized with more than 1 possible start rule. Must specify which start rule to parse", start_decls)
91 start ,= start_decls
92 elif start not in self.parser_conf.start:
93 raise ConfigurationError("Unknown start rule %s. Must be one of %r" % (start, self.parser_conf.start))
94 return start
95
96 def _make_lexer_thread(self, text: str) -> Union[str, LexerThread]:
97 cls = (self.options and self.options._plugins.get('LexerThread')) or LexerThread
98 return text if self.skip_lexer else cls.from_text(self.lexer, text)
99
100 def parse(self, text: str, start=None, on_error=None):
101 chosen_start = self._verify_start(start)
102 kw = {} if on_error is None else {'on_error': on_error}
103 stream = self._make_lexer_thread(text)
104 return self.parser.parse(stream, chosen_start, **kw)
105
106 def parse_interactive(self, text: Optional[str]=None, start=None):
107 # TODO BREAK - Change text from Optional[str] to text: str = ''.
108 # Would break behavior of exhaust_lexer(), which currently raises TypeError, and after the change would just return []
109 chosen_start = self._verify_start(start)
110 if self.parser_conf.parser_type != 'lalr':
111 raise ConfigurationError("parse_interactive() currently only works with parser='lalr' ")
112 stream = self._make_lexer_thread(text) # type: ignore[arg-type]
113 return self.parser.parse_interactive(stream, chosen_start)
114
115
116def _validate_frontend_args(parser, lexer) -> None:
117 assert_config(parser, ('lalr', 'earley', 'cyk'))
118 if not isinstance(lexer, type): # not custom lexer?
119 expected = {
120 'lalr': ('basic', 'contextual'),
121 'earley': ('basic', 'dynamic', 'dynamic_complete'),
122 'cyk': ('basic', ),
123 }[parser]
124 assert_config(lexer, expected, 'Parser %r does not support lexer %%r, expected one of %%s' % parser)
125
126
127def _get_lexer_callbacks(transformer, terminals):
128 result = {}
129 for terminal in terminals:
130 callback = getattr(transformer, terminal.name, None)
131 if callback is not None:
132 result[terminal.name] = callback
133 return result
134
135class PostLexConnector:
136 def __init__(self, lexer, postlexer):
137 self.lexer = lexer
138 self.postlexer = postlexer
139
140 def lex(self, lexer_state, parser_state):
141 i = self.lexer.lex(lexer_state, parser_state)
142 return self.postlexer.process(i)
143
144
145
146def create_basic_lexer(lexer_conf, parser, postlex, options) -> BasicLexer:
147 cls = (options and options._plugins.get('BasicLexer')) or BasicLexer
148 return cls(lexer_conf)
149
150def create_contextual_lexer(lexer_conf: LexerConf, parser, postlex, options) -> ContextualLexer:
151 cls = (options and options._plugins.get('ContextualLexer')) or ContextualLexer
152 parse_table: ParseTableBase[int] = parser._parse_table
153 states: Dict[int, Collection[str]] = {idx:list(t.keys()) for idx, t in parse_table.states.items()}
154 always_accept: Collection[str] = postlex.always_accept if postlex else ()
155 return cls(lexer_conf, states, always_accept=always_accept)
156
157def create_lalr_parser(lexer_conf: LexerConf, parser_conf: ParserConf, options=None) -> LALR_Parser:
158 debug = options.debug if options else False
159 strict = options.strict if options else False
160 cls = (options and options._plugins.get('LALR_Parser')) or LALR_Parser
161 return cls(parser_conf, debug=debug, strict=strict)
162
163_parser_creators['lalr'] = create_lalr_parser
164
165###}
166
167class EarleyRegexpMatcher:
168 def __init__(self, lexer_conf):
169 self.regexps = {}
170 for t in lexer_conf.terminals:
171 regexp = t.pattern.to_regexp()
172 try:
173 width = get_regexp_width(regexp)[0]
174 except ValueError:
175 raise GrammarError("Bad regexp in token %s: %s" % (t.name, regexp))
176 else:
177 if width == 0:
178 raise GrammarError("Dynamic Earley doesn't allow zero-width regexps", t)
179 if lexer_conf.use_bytes:
180 regexp = regexp.encode('utf-8')
181
182 self.regexps[t.name] = lexer_conf.re_module.compile(regexp, lexer_conf.g_regex_flags)
183
184 def match(self, term, text, index=0):
185 return self.regexps[term.name].match(text, index)
186
187
188def create_earley_parser__dynamic(lexer_conf: LexerConf, parser_conf: ParserConf, **kw):
189 if lexer_conf.callbacks:
190 raise GrammarError("Earley's dynamic lexer doesn't support lexer_callbacks.")
191
192 earley_matcher = EarleyRegexpMatcher(lexer_conf)
193 return xearley.Parser(lexer_conf, parser_conf, earley_matcher.match, **kw)
194
195def _match_earley_basic(term, token):
196 return term.name == token.type
197
198def create_earley_parser__basic(lexer_conf: LexerConf, parser_conf: ParserConf, **kw):
199 return earley.Parser(lexer_conf, parser_conf, _match_earley_basic, **kw)
200
201def create_earley_parser(lexer_conf: LexerConf, parser_conf: ParserConf, options) -> earley.Parser:
202 resolve_ambiguity = options.ambiguity == 'resolve'
203 debug = options.debug if options else False
204 tree_class = options.tree_class or Tree if options.ambiguity != 'forest' else None
205
206 extra = {}
207 if lexer_conf.lexer_type == 'dynamic':
208 f = create_earley_parser__dynamic
209 elif lexer_conf.lexer_type == 'dynamic_complete':
210 extra['complete_lex'] = True
211 f = create_earley_parser__dynamic
212 else:
213 f = create_earley_parser__basic
214
215 return f(lexer_conf, parser_conf, resolve_ambiguity=resolve_ambiguity,
216 debug=debug, tree_class=tree_class, ordered_sets=options.ordered_sets, **extra)
217
218
219
220class CYK_FrontEnd:
221 def __init__(self, lexer_conf, parser_conf, options=None):
222 self.parser = cyk.Parser(parser_conf.rules)
223
224 self.callbacks = parser_conf.callbacks
225
226 def parse(self, lexer_thread, start):
227 tokens = list(lexer_thread.lex(None))
228 tree = self.parser.parse(tokens, start)
229 return self._transform(tree)
230
231 def _transform(self, tree):
232 subtrees = list(tree.iter_subtrees())
233 for subtree in subtrees:
234 subtree.children = [self._apply_callback(c) if isinstance(c, Tree) else c for c in subtree.children]
235
236 return self._apply_callback(tree)
237
238 def _apply_callback(self, tree):
239 return self.callbacks[tree.rule](tree.children)
240
241
242_parser_creators['earley'] = create_earley_parser
243_parser_creators['cyk'] = CYK_FrontEnd
244
245
246def _construct_parsing_frontend(
247 parser_type: _ParserArgType,
248 lexer_type: _LexerArgType,
249 lexer_conf,
250 parser_conf,
251 options
252):
253 assert isinstance(lexer_conf, LexerConf)
254 assert isinstance(parser_conf, ParserConf)
255 parser_conf.parser_type = parser_type
256 lexer_conf.lexer_type = lexer_type
257 return ParsingFrontend(lexer_conf, parser_conf, options)