/rust/registry/src/index.crates.io-6f17d22bba15001f/lalrpop-util-0.20.2/src/state_machine.rs
Line | Count | Source (jump to first uncovered line) |
1 | | use alloc::{string::String, vec, vec::Vec}; |
2 | | use core::fmt::Debug; |
3 | | |
4 | | #[cfg(feature = "std")] |
5 | | const DEBUG_ENABLED: bool = false; |
6 | | |
7 | | macro_rules! debug { |
8 | | ($($args:expr),* $(,)*) => { |
9 | | #[cfg(feature = "std")] |
10 | | if DEBUG_ENABLED { |
11 | | eprintln!($($args),*); |
12 | | } |
13 | | } |
14 | | } |
15 | | |
16 | | pub trait ParserDefinition: Sized { |
17 | | /// Represents a location in the input text. If you are using the |
18 | | /// default tokenizer, this will be a `usize`. |
19 | | type Location: Clone + Debug; |
20 | | |
21 | | /// Represents a "user error" -- this can get produced by |
22 | | /// `reduce()` if the grammar includes `=>?` actions. |
23 | | type Error; |
24 | | |
25 | | /// The type emitted by the user's tokenizer (excluding the |
26 | | /// location information). |
27 | | type Token: Clone + Debug; |
28 | | |
29 | | /// We assign a unique index to each token in the grammar, which |
30 | | /// we call its *index*. When we pull in a new `Token` from the |
31 | | /// input, we then match against it to determine its index. Note |
32 | | /// that the actual `Token` is retained too, as it may carry |
33 | | /// additional information (e.g., an `ID` terminal often has a |
34 | | /// string value associated with it; this is not important to the |
35 | | /// parser, but the semantic analyzer will want it). |
36 | | type TokenIndex: Copy + Clone + Debug; |
37 | | |
38 | | /// The type representing things on the LALRPOP stack. Represents |
39 | | /// the union of terminals and nonterminals. |
40 | | type Symbol; |
41 | | |
42 | | /// Type produced by reducing the start symbol. |
43 | | type Success; |
44 | | |
45 | | /// Identifies a state. Typically an i8, i16, or i32 (depending on |
46 | | /// how many states you have). |
47 | | type StateIndex: Copy + Clone + Debug; |
48 | | |
49 | | /// Identifies an action. |
50 | | type Action: ParserAction<Self>; |
51 | | |
52 | | /// Identifies a reduction. |
53 | | type ReduceIndex: Copy + Clone + Debug; |
54 | | |
55 | | /// Identifies a nonterminal. |
56 | | type NonterminalIndex: Copy + Clone + Debug; |
57 | | |
58 | | /// Returns a location representing the "start of the input". |
59 | | fn start_location(&self) -> Self::Location; |
60 | | |
61 | | /// Returns the initial state. |
62 | | fn start_state(&self) -> Self::StateIndex; |
63 | | |
64 | | /// Converts the user's tokens into an internal index; this index |
65 | | /// is then used to index into actions and the like. When using an |
66 | | /// internal tokenizer, these indices are directly produced. When |
67 | | /// using an **external** tokenier, however, this function matches |
68 | | /// against the patterns given by the user: it is fallible |
69 | | /// therefore as these patterns may not be exhaustive. If a token |
70 | | /// value is found that doesn't match any of the patterns the user |
71 | | /// supplied, then this function returns `None`, which is |
72 | | /// translated into a parse error by LALRPOP ("unrecognized |
73 | | /// token"). |
74 | | fn token_to_index(&self, token: &Self::Token) -> Option<Self::TokenIndex>; |
75 | | |
76 | | /// Given the top-most state and the pending terminal, returns an |
77 | | /// action. This can be either SHIFT(state), REDUCE(action), or |
78 | | /// ERROR. |
79 | | fn action(&self, state: Self::StateIndex, token_index: Self::TokenIndex) -> Self::Action; |
80 | | |
81 | | /// Returns the action to take if an error occurs in the given |
82 | | /// state. This function is the same as the ordinary `action`, |
83 | | /// except that it applies not to the user's terminals but to the |
84 | | /// "special terminal" `!`. |
85 | | fn error_action(&self, state: Self::StateIndex) -> Self::Action; |
86 | | |
87 | | /// Action to take if EOF occurs in the given state. This function |
88 | | /// is the same as the ordinary `action`, except that it applies |
89 | | /// not to the user's terminals but to the "special terminal" `$`. |
90 | | fn eof_action(&self, state: Self::StateIndex) -> Self::Action; |
91 | | |
92 | | /// If we reduce to a nonterminal in the given state, what state |
93 | | /// do we go to? This is infallible due to the nature of LR(1) |
94 | | /// grammars. |
95 | | fn goto(&self, state: Self::StateIndex, nt: Self::NonterminalIndex) -> Self::StateIndex; |
96 | | |
97 | | /// "Upcast" a terminal into a symbol so we can push it onto the |
98 | | /// parser stack. |
99 | | fn token_to_symbol(&self, token_index: Self::TokenIndex, token: Self::Token) -> Self::Symbol; |
100 | | |
101 | | /// Returns the expected tokens in a given state. This is used for |
102 | | /// error reporting. |
103 | | fn expected_tokens(&self, state: Self::StateIndex) -> Vec<String>; |
104 | | |
105 | | /// Returns the expected tokens in a given state. This is used in the |
106 | | /// same way as `expected_tokens` but allows more precise reporting |
107 | | /// of accepted tokens in some cases. |
108 | 0 | fn expected_tokens_from_states(&self, states: &[Self::StateIndex]) -> Vec<String> { |
109 | 0 | // Default to using the preexisting `expected_tokens` method |
110 | 0 | self.expected_tokens(*states.last().unwrap()) |
111 | 0 | } |
112 | | |
113 | | /// True if this grammar supports error recovery. |
114 | | fn uses_error_recovery(&self) -> bool; |
115 | | |
116 | | /// Given error information, creates an error recovery symbol that |
117 | | /// we push onto the stack (and supply to user actions). |
118 | | fn error_recovery_symbol(&self, recovery: ErrorRecovery<Self>) -> Self::Symbol; |
119 | | |
120 | | /// Execute a reduction in the given state: that is, execute user |
121 | | /// code. The start location indicates the "starting point" of the |
122 | | /// current lookahead that is triggering the reduction (it is |
123 | | /// `None` for EOF). |
124 | | /// |
125 | | /// The `states` and `symbols` vectors represent the internal |
126 | | /// state machine vectors; they are given to `reduce` so that it |
127 | | /// can pop off states that no longer apply (and consume their |
128 | | /// symbols). At the end, it should also push the new state and |
129 | | /// symbol produced. |
130 | | /// |
131 | | /// Returns a `Some` if we reduced the start state and hence |
132 | | /// parsing is complete, or if we encountered an irrecoverable |
133 | | /// error. |
134 | | /// |
135 | | /// FIXME. It would be nice to not have so much logic live in |
136 | | /// reduce. It should just be given an iterator of popped symbols |
137 | | /// and return the newly produced symbol (or error). We can use |
138 | | /// `simulate_reduce` and our own information to drive the rest, |
139 | | /// right? This would also allow us -- I think -- to extend error |
140 | | /// recovery to cover user-produced errors. |
141 | | fn reduce( |
142 | | &mut self, |
143 | | reduce_index: Self::ReduceIndex, |
144 | | start_location: Option<&Self::Location>, |
145 | | states: &mut Vec<Self::StateIndex>, |
146 | | symbols: &mut Vec<SymbolTriple<Self>>, |
147 | | ) -> Option<ParseResult<Self>>; |
148 | | |
149 | | /// Returns information about how many states will be popped |
150 | | /// during a reduction, and what nonterminal would be produced as |
151 | | /// a result. |
152 | | fn simulate_reduce(&self, action: Self::ReduceIndex) -> SimulatedReduce<Self>; |
153 | | } |
154 | | |
155 | | pub trait ParserAction<D: ParserDefinition>: Copy + Clone + Debug { |
156 | | fn as_shift(self) -> Option<D::StateIndex>; |
157 | | fn as_reduce(self) -> Option<D::ReduceIndex>; |
158 | | fn is_shift(self) -> bool; |
159 | | fn is_reduce(self) -> bool; |
160 | | fn is_error(self) -> bool; |
161 | | } |
162 | | |
163 | | pub enum SimulatedReduce<D: ParserDefinition> { |
164 | | Reduce { |
165 | | states_to_pop: usize, |
166 | | nonterminal_produced: D::NonterminalIndex, |
167 | | }, |
168 | | |
169 | | // This reduce is the "start" fn, so the parse is done. |
170 | | Accept, |
171 | | } |
172 | | |
173 | | // These aliases are an elaborate hack to get around |
174 | | // the warnings when you define a type alias like `type Foo<D: Trait>` |
175 | | #[doc(hidden)] |
176 | | pub type Location<D> = <D as ParserDefinition>::Location; |
177 | | #[doc(hidden)] |
178 | | pub type Token<D> = <D as ParserDefinition>::Token; |
179 | | #[doc(hidden)] |
180 | | pub type Error<D> = <D as ParserDefinition>::Error; |
181 | | #[doc(hidden)] |
182 | | pub type Success<D> = <D as ParserDefinition>::Success; |
183 | | #[doc(hidden)] |
184 | | pub type Symbol<D> = <D as ParserDefinition>::Symbol; |
185 | | |
186 | | pub type ParseError<D> = crate::ParseError<Location<D>, Token<D>, Error<D>>; |
187 | | pub type ParseResult<D> = Result<Success<D>, ParseError<D>>; |
188 | | pub type TokenTriple<D> = (Location<D>, Token<D>, Location<D>); |
189 | | pub type SymbolTriple<D> = (Location<D>, Symbol<D>, Location<D>); |
190 | | pub type ErrorRecovery<D> = crate::ErrorRecovery<Location<D>, Token<D>, Error<D>>; |
191 | | |
192 | | pub struct Parser<D, I> |
193 | | where |
194 | | D: ParserDefinition, |
195 | | I: Iterator<Item = Result<TokenTriple<D>, ParseError<D>>>, |
196 | | { |
197 | | definition: D, |
198 | | tokens: I, |
199 | | states: Vec<D::StateIndex>, |
200 | | symbols: Vec<SymbolTriple<D>>, |
201 | | last_location: D::Location, |
202 | | } |
203 | | |
204 | | enum NextToken<D: ParserDefinition> { |
205 | | FoundToken(TokenTriple<D>, D::TokenIndex), |
206 | | Eof, |
207 | | Done(ParseResult<D>), |
208 | | } |
209 | | |
210 | | impl<D, I> Parser<D, I> |
211 | | where |
212 | | D: ParserDefinition, |
213 | | I: Iterator<Item = Result<TokenTriple<D>, ParseError<D>>>, |
214 | | { |
215 | 1.02M | pub fn drive(definition: D, tokens: I) -> ParseResult<D> { |
216 | 1.02M | let last_location = definition.start_location(); |
217 | 1.02M | let start_state = definition.start_state(); |
218 | 1.02M | Parser { |
219 | 1.02M | definition, |
220 | 1.02M | tokens, |
221 | 1.02M | states: vec![start_state], |
222 | 1.02M | symbols: vec![], |
223 | 1.02M | last_location, |
224 | 1.02M | } |
225 | 1.02M | .parse() |
226 | 1.02M | } Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::drive <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::drive Line | Count | Source | 215 | 157k | pub fn drive(definition: D, tokens: I) -> ParseResult<D> { | 216 | 157k | let last_location = definition.start_location(); | 217 | 157k | let start_state = definition.start_state(); | 218 | 157k | Parser { | 219 | 157k | definition, | 220 | 157k | tokens, | 221 | 157k | states: vec![start_state], | 222 | 157k | symbols: vec![], | 223 | 157k | last_location, | 224 | 157k | } | 225 | 157k | .parse() | 226 | 157k | } |
<lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::drive Line | Count | Source | 215 | 871k | pub fn drive(definition: D, tokens: I) -> ParseResult<D> { | 216 | 871k | let last_location = definition.start_location(); | 217 | 871k | let start_state = definition.start_state(); | 218 | 871k | Parser { | 219 | 871k | definition, | 220 | 871k | tokens, | 221 | 871k | states: vec![start_state], | 222 | 871k | symbols: vec![], | 223 | 871k | last_location, | 224 | 871k | } | 225 | 871k | .parse() | 226 | 871k | } |
<lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::drive Line | Count | Source | 215 | 24 | pub fn drive(definition: D, tokens: I) -> ParseResult<D> { | 216 | 24 | let last_location = definition.start_location(); | 217 | 24 | let start_state = definition.start_state(); | 218 | 24 | Parser { | 219 | 24 | definition, | 220 | 24 | tokens, | 221 | 24 | states: vec![start_state], | 222 | 24 | symbols: vec![], | 223 | 24 | last_location, | 224 | 24 | } | 225 | 24 | .parse() | 226 | 24 | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::drive Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::drive <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::drive Line | Count | Source | 215 | 2 | pub fn drive(definition: D, tokens: I) -> ParseResult<D> { | 216 | 2 | let last_location = definition.start_location(); | 217 | 2 | let start_state = definition.start_state(); | 218 | 2 | Parser { | 219 | 2 | definition, | 220 | 2 | tokens, | 221 | 2 | states: vec![start_state], | 222 | 2 | symbols: vec![], | 223 | 2 | last_location, | 224 | 2 | } | 225 | 2 | .parse() | 226 | 2 | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<_, _>>::drive |
227 | | |
228 | 6.00M | fn top_state(&self) -> D::StateIndex { |
229 | 6.00M | *self.states.last().unwrap() |
230 | 6.00M | } Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::top_state <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::top_state Line | Count | Source | 228 | 2.51M | fn top_state(&self) -> D::StateIndex { | 229 | 2.51M | *self.states.last().unwrap() | 230 | 2.51M | } |
<lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::top_state Line | Count | Source | 228 | 3.48M | fn top_state(&self) -> D::StateIndex { | 229 | 3.48M | *self.states.last().unwrap() | 230 | 3.48M | } |
<lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::top_state Line | Count | Source | 228 | 96 | fn top_state(&self) -> D::StateIndex { | 229 | 96 | *self.states.last().unwrap() | 230 | 96 | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::top_state Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::top_state <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::top_state Line | Count | Source | 228 | 1.21k | fn top_state(&self) -> D::StateIndex { | 229 | 1.21k | *self.states.last().unwrap() | 230 | 1.21k | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<_, _>>::top_state |
231 | | |
232 | 1.02M | fn parse(&mut self) -> ParseResult<D> { |
233 | | // Outer loop: each time we continue around this loop, we |
234 | | // shift a new token from the input. We break from the loop |
235 | | // when the end of the input is reached (we return early if an |
236 | | // error occurs). |
237 | | 'shift: loop { |
238 | 2.37M | let (mut lookahead, mut token_index) = match self.next_token() { |
239 | 1.34M | NextToken::FoundToken(l, i) => (l, i), |
240 | 1.02M | NextToken::Eof => return self.parse_eof(), |
241 | 0 | NextToken::Done(e) => return e, |
242 | | }; |
243 | | |
244 | 1.34M | debug!("+ SHIFT: {:?}", lookahead); |
245 | | |
246 | 1.34M | debug!("\\ token_index: {:?}", token_index); |
247 | | |
248 | | 'inner: loop { |
249 | 1.50M | let top_state = self.top_state(); |
250 | 1.50M | let action = self.definition.action(top_state, token_index); |
251 | 1.50M | debug!("\\ action: {:?}", action); |
252 | | |
253 | 1.50M | if let Some(target_state) = action.as_shift() { |
254 | 1.34M | debug!("\\ shift to: {:?}", target_state); |
255 | | |
256 | | // Shift and transition to state `action - 1` |
257 | 1.34M | let symbol = self.definition.token_to_symbol(token_index, lookahead.1); |
258 | 1.34M | self.states.push(target_state); |
259 | 1.34M | self.symbols.push((lookahead.0, symbol, lookahead.2)); |
260 | 1.34M | continue 'shift; |
261 | 158k | } else if let Some(reduce_index) = action.as_reduce() { |
262 | 158k | debug!("\\ reduce to: {:?}", reduce_index); |
263 | | |
264 | 158k | if let Some(r) = self.reduce(reduce_index, Some(&lookahead.0)) { |
265 | 0 | return match r { |
266 | | // we reached eof, but still have lookahead |
267 | 0 | Ok(_) => Err(crate::ParseError::ExtraToken { token: lookahead }), |
268 | 0 | Err(e) => Err(e), |
269 | | }; |
270 | 158k | } |
271 | | } else { |
272 | 0 | debug!("\\ error -- initiating error recovery!"); |
273 | | |
274 | 0 | match self.error_recovery(Some(lookahead), Some(token_index)) { |
275 | 0 | NextToken::FoundToken(l, i) => { |
276 | 0 | lookahead = l; |
277 | 0 | token_index = i; |
278 | 0 | continue 'inner; |
279 | | } |
280 | 0 | NextToken::Eof => return self.parse_eof(), |
281 | 0 | NextToken::Done(e) => return e, |
282 | | } |
283 | | } |
284 | | } |
285 | | } |
286 | 1.02M | } Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse Line | Count | Source | 232 | 157k | fn parse(&mut self) -> ParseResult<D> { | 233 | | // Outer loop: each time we continue around this loop, we | 234 | | // shift a new token from the input. We break from the loop | 235 | | // when the end of the input is reached (we return early if an | 236 | | // error occurs). | 237 | | 'shift: loop { | 238 | 628k | let (mut lookahead, mut token_index) = match self.next_token() { | 239 | 471k | NextToken::FoundToken(l, i) => (l, i), | 240 | 157k | NextToken::Eof => return self.parse_eof(), | 241 | 0 | NextToken::Done(e) => return e, | 242 | | }; | 243 | | | 244 | 471k | debug!("+ SHIFT: {:?}", lookahead); | 245 | | | 246 | 471k | debug!("\\ token_index: {:?}", token_index); | 247 | | | 248 | | 'inner: loop { | 249 | 628k | let top_state = self.top_state(); | 250 | 628k | let action = self.definition.action(top_state, token_index); | 251 | 628k | debug!("\\ action: {:?}", action); | 252 | | | 253 | 628k | if let Some(target_state) = action.as_shift() { | 254 | 471k | debug!("\\ shift to: {:?}", target_state); | 255 | | | 256 | | // Shift and transition to state `action - 1` | 257 | 471k | let symbol = self.definition.token_to_symbol(token_index, lookahead.1); | 258 | 471k | self.states.push(target_state); | 259 | 471k | self.symbols.push((lookahead.0, symbol, lookahead.2)); | 260 | 471k | continue 'shift; | 261 | 157k | } else if let Some(reduce_index) = action.as_reduce() { | 262 | 157k | debug!("\\ reduce to: {:?}", reduce_index); | 263 | | | 264 | 157k | if let Some(r) = self.reduce(reduce_index, Some(&lookahead.0)) { | 265 | 0 | return match r { | 266 | | // we reached eof, but still have lookahead | 267 | 0 | Ok(_) => Err(crate::ParseError::ExtraToken { token: lookahead }), | 268 | 0 | Err(e) => Err(e), | 269 | | }; | 270 | 157k | } | 271 | | } else { | 272 | 0 | debug!("\\ error -- initiating error recovery!"); | 273 | | | 274 | 0 | match self.error_recovery(Some(lookahead), Some(token_index)) { | 275 | 0 | NextToken::FoundToken(l, i) => { | 276 | 0 | lookahead = l; | 277 | 0 | token_index = i; | 278 | 0 | continue 'inner; | 279 | | } | 280 | 0 | NextToken::Eof => return self.parse_eof(), | 281 | 0 | NextToken::Done(e) => return e, | 282 | | } | 283 | | } | 284 | | } | 285 | | } | 286 | 157k | } |
<lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse Line | Count | Source | 232 | 871k | fn parse(&mut self) -> ParseResult<D> { | 233 | | // Outer loop: each time we continue around this loop, we | 234 | | // shift a new token from the input. We break from the loop | 235 | | // when the end of the input is reached (we return early if an | 236 | | // error occurs). | 237 | | 'shift: loop { | 238 | 1.74M | let (mut lookahead, mut token_index) = match self.next_token() { | 239 | 871k | NextToken::FoundToken(l, i) => (l, i), | 240 | 871k | NextToken::Eof => return self.parse_eof(), | 241 | 0 | NextToken::Done(e) => return e, | 242 | | }; | 243 | | | 244 | 871k | debug!("+ SHIFT: {:?}", lookahead); | 245 | | | 246 | 871k | debug!("\\ token_index: {:?}", token_index); | 247 | | | 248 | | 'inner: loop { | 249 | 871k | let top_state = self.top_state(); | 250 | 871k | let action = self.definition.action(top_state, token_index); | 251 | 871k | debug!("\\ action: {:?}", action); | 252 | | | 253 | 871k | if let Some(target_state) = action.as_shift() { | 254 | 871k | debug!("\\ shift to: {:?}", target_state); | 255 | | | 256 | | // Shift and transition to state `action - 1` | 257 | 871k | let symbol = self.definition.token_to_symbol(token_index, lookahead.1); | 258 | 871k | self.states.push(target_state); | 259 | 871k | self.symbols.push((lookahead.0, symbol, lookahead.2)); | 260 | 871k | continue 'shift; | 261 | 0 | } else if let Some(reduce_index) = action.as_reduce() { | 262 | 0 | debug!("\\ reduce to: {:?}", reduce_index); | 263 | | | 264 | 0 | if let Some(r) = self.reduce(reduce_index, Some(&lookahead.0)) { | 265 | 0 | return match r { | 266 | | // we reached eof, but still have lookahead | 267 | 0 | Ok(_) => Err(crate::ParseError::ExtraToken { token: lookahead }), | 268 | 0 | Err(e) => Err(e), | 269 | | }; | 270 | 0 | } | 271 | | } else { | 272 | 0 | debug!("\\ error -- initiating error recovery!"); | 273 | | | 274 | 0 | match self.error_recovery(Some(lookahead), Some(token_index)) { | 275 | 0 | NextToken::FoundToken(l, i) => { | 276 | 0 | lookahead = l; | 277 | 0 | token_index = i; | 278 | 0 | continue 'inner; | 279 | | } | 280 | 0 | NextToken::Eof => return self.parse_eof(), | 281 | 0 | NextToken::Done(e) => return e, | 282 | | } | 283 | | } | 284 | | } | 285 | | } | 286 | 871k | } |
<lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse Line | Count | Source | 232 | 24 | fn parse(&mut self) -> ParseResult<D> { | 233 | | // Outer loop: each time we continue around this loop, we | 234 | | // shift a new token from the input. We break from the loop | 235 | | // when the end of the input is reached (we return early if an | 236 | | // error occurs). | 237 | | 'shift: loop { | 238 | 48 | let (mut lookahead, mut token_index) = match self.next_token() { | 239 | 24 | NextToken::FoundToken(l, i) => (l, i), | 240 | 24 | NextToken::Eof => return self.parse_eof(), | 241 | 0 | NextToken::Done(e) => return e, | 242 | | }; | 243 | | | 244 | 24 | debug!("+ SHIFT: {:?}", lookahead); | 245 | | | 246 | 24 | debug!("\\ token_index: {:?}", token_index); | 247 | | | 248 | | 'inner: loop { | 249 | 24 | let top_state = self.top_state(); | 250 | 24 | let action = self.definition.action(top_state, token_index); | 251 | 24 | debug!("\\ action: {:?}", action); | 252 | | | 253 | 24 | if let Some(target_state) = action.as_shift() { | 254 | 24 | debug!("\\ shift to: {:?}", target_state); | 255 | | | 256 | | // Shift and transition to state `action - 1` | 257 | 24 | let symbol = self.definition.token_to_symbol(token_index, lookahead.1); | 258 | 24 | self.states.push(target_state); | 259 | 24 | self.symbols.push((lookahead.0, symbol, lookahead.2)); | 260 | 24 | continue 'shift; | 261 | 0 | } else if let Some(reduce_index) = action.as_reduce() { | 262 | 0 | debug!("\\ reduce to: {:?}", reduce_index); | 263 | | | 264 | 0 | if let Some(r) = self.reduce(reduce_index, Some(&lookahead.0)) { | 265 | 0 | return match r { | 266 | | // we reached eof, but still have lookahead | 267 | 0 | Ok(_) => Err(crate::ParseError::ExtraToken { token: lookahead }), | 268 | 0 | Err(e) => Err(e), | 269 | | }; | 270 | 0 | } | 271 | | } else { | 272 | 0 | debug!("\\ error -- initiating error recovery!"); | 273 | | | 274 | 0 | match self.error_recovery(Some(lookahead), Some(token_index)) { | 275 | 0 | NextToken::FoundToken(l, i) => { | 276 | 0 | lookahead = l; | 277 | 0 | token_index = i; | 278 | 0 | continue 'inner; | 279 | | } | 280 | 0 | NextToken::Eof => return self.parse_eof(), | 281 | 0 | NextToken::Done(e) => return e, | 282 | | } | 283 | | } | 284 | | } | 285 | | } | 286 | 24 | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse Line | Count | Source | 232 | 2 | fn parse(&mut self) -> ParseResult<D> { | 233 | | // Outer loop: each time we continue around this loop, we | 234 | | // shift a new token from the input. We break from the loop | 235 | | // when the end of the input is reached (we return early if an | 236 | | // error occurs). | 237 | | 'shift: loop { | 238 | 292 | let (mut lookahead, mut token_index) = match self.next_token() { | 239 | 290 | NextToken::FoundToken(l, i) => (l, i), | 240 | 2 | NextToken::Eof => return self.parse_eof(), | 241 | 0 | NextToken::Done(e) => return e, | 242 | | }; | 243 | | | 244 | 290 | debug!("+ SHIFT: {:?}", lookahead); | 245 | | | 246 | 290 | debug!("\\ token_index: {:?}", token_index); | 247 | | | 248 | | 'inner: loop { | 249 | 1.20k | let top_state = self.top_state(); | 250 | 1.20k | let action = self.definition.action(top_state, token_index); | 251 | 1.20k | debug!("\\ action: {:?}", action); | 252 | | | 253 | 1.20k | if let Some(target_state) = action.as_shift() { | 254 | 290 | debug!("\\ shift to: {:?}", target_state); | 255 | | | 256 | | // Shift and transition to state `action - 1` | 257 | 290 | let symbol = self.definition.token_to_symbol(token_index, lookahead.1); | 258 | 290 | self.states.push(target_state); | 259 | 290 | self.symbols.push((lookahead.0, symbol, lookahead.2)); | 260 | 290 | continue 'shift; | 261 | 914 | } else if let Some(reduce_index) = action.as_reduce() { | 262 | 914 | debug!("\\ reduce to: {:?}", reduce_index); | 263 | | | 264 | 914 | if let Some(r) = self.reduce(reduce_index, Some(&lookahead.0)) { | 265 | 0 | return match r { | 266 | | // we reached eof, but still have lookahead | 267 | 0 | Ok(_) => Err(crate::ParseError::ExtraToken { token: lookahead }), | 268 | 0 | Err(e) => Err(e), | 269 | | }; | 270 | 914 | } | 271 | | } else { | 272 | 0 | debug!("\\ error -- initiating error recovery!"); | 273 | | | 274 | 0 | match self.error_recovery(Some(lookahead), Some(token_index)) { | 275 | 0 | NextToken::FoundToken(l, i) => { | 276 | 0 | lookahead = l; | 277 | 0 | token_index = i; | 278 | 0 | continue 'inner; | 279 | | } | 280 | 0 | NextToken::Eof => return self.parse_eof(), | 281 | 0 | NextToken::Done(e) => return e, | 282 | | } | 283 | | } | 284 | | } | 285 | | } | 286 | 2 | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<_, _>>::parse |
287 | | |
288 | | /// Invoked when we have no more tokens to consume. |
289 | 1.02M | fn parse_eof(&mut self) -> ParseResult<D> { |
290 | | loop { |
291 | 4.50M | let top_state = self.top_state(); |
292 | 4.50M | let action = self.definition.eof_action(top_state); |
293 | 4.50M | if let Some(reduce_index) = action.as_reduce() { |
294 | 1.02M | if let Some(result) = |
295 | 4.50M | self.definition |
296 | 4.50M | .reduce(reduce_index, None, &mut self.states, &mut self.symbols) |
297 | | { |
298 | 1.02M | return result; |
299 | 3.47M | } |
300 | | } else { |
301 | 0 | match self.error_recovery(None, None) { |
302 | 0 | NextToken::FoundToken(..) => panic!("cannot find token at EOF"), |
303 | 0 | NextToken::Done(e) => return e, |
304 | 0 | NextToken::Eof => continue, |
305 | | } |
306 | | } |
307 | | } |
308 | 1.02M | } Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse_eof <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse_eof Line | Count | Source | 289 | 157k | fn parse_eof(&mut self) -> ParseResult<D> { | 290 | | loop { | 291 | 1.88M | let top_state = self.top_state(); | 292 | 1.88M | let action = self.definition.eof_action(top_state); | 293 | 1.88M | if let Some(reduce_index) = action.as_reduce() { | 294 | 157k | if let Some(result) = | 295 | 1.88M | self.definition | 296 | 1.88M | .reduce(reduce_index, None, &mut self.states, &mut self.symbols) | 297 | | { | 298 | 157k | return result; | 299 | 1.72M | } | 300 | | } else { | 301 | 0 | match self.error_recovery(None, None) { | 302 | 0 | NextToken::FoundToken(..) => panic!("cannot find token at EOF"), | 303 | 0 | NextToken::Done(e) => return e, | 304 | 0 | NextToken::Eof => continue, | 305 | | } | 306 | | } | 307 | | } | 308 | 157k | } |
<lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse_eof Line | Count | Source | 289 | 871k | fn parse_eof(&mut self) -> ParseResult<D> { | 290 | | loop { | 291 | 2.61M | let top_state = self.top_state(); | 292 | 2.61M | let action = self.definition.eof_action(top_state); | 293 | 2.61M | if let Some(reduce_index) = action.as_reduce() { | 294 | 871k | if let Some(result) = | 295 | 2.61M | self.definition | 296 | 2.61M | .reduce(reduce_index, None, &mut self.states, &mut self.symbols) | 297 | | { | 298 | 871k | return result; | 299 | 1.74M | } | 300 | | } else { | 301 | 0 | match self.error_recovery(None, None) { | 302 | 0 | NextToken::FoundToken(..) => panic!("cannot find token at EOF"), | 303 | 0 | NextToken::Done(e) => return e, | 304 | 0 | NextToken::Eof => continue, | 305 | | } | 306 | | } | 307 | | } | 308 | 871k | } |
<lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse_eof Line | Count | Source | 289 | 24 | fn parse_eof(&mut self) -> ParseResult<D> { | 290 | | loop { | 291 | 72 | let top_state = self.top_state(); | 292 | 72 | let action = self.definition.eof_action(top_state); | 293 | 72 | if let Some(reduce_index) = action.as_reduce() { | 294 | 24 | if let Some(result) = | 295 | 72 | self.definition | 296 | 72 | .reduce(reduce_index, None, &mut self.states, &mut self.symbols) | 297 | | { | 298 | 24 | return result; | 299 | 48 | } | 300 | | } else { | 301 | 0 | match self.error_recovery(None, None) { | 302 | 0 | NextToken::FoundToken(..) => panic!("cannot find token at EOF"), | 303 | 0 | NextToken::Done(e) => return e, | 304 | 0 | NextToken::Eof => continue, | 305 | | } | 306 | | } | 307 | | } | 308 | 24 | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse_eof Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse_eof <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::parse_eof Line | Count | Source | 289 | 2 | fn parse_eof(&mut self) -> ParseResult<D> { | 290 | | loop { | 291 | 8 | let top_state = self.top_state(); | 292 | 8 | let action = self.definition.eof_action(top_state); | 293 | 8 | if let Some(reduce_index) = action.as_reduce() { | 294 | 2 | if let Some(result) = | 295 | 8 | self.definition | 296 | 8 | .reduce(reduce_index, None, &mut self.states, &mut self.symbols) | 297 | | { | 298 | 2 | return result; | 299 | 6 | } | 300 | | } else { | 301 | 0 | match self.error_recovery(None, None) { | 302 | 0 | NextToken::FoundToken(..) => panic!("cannot find token at EOF"), | 303 | 0 | NextToken::Done(e) => return e, | 304 | 0 | NextToken::Eof => continue, | 305 | | } | 306 | | } | 307 | | } | 308 | 2 | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<_, _>>::parse_eof |
309 | | |
310 | 0 | fn error_recovery( |
311 | 0 | &mut self, |
312 | 0 | mut opt_lookahead: Option<TokenTriple<D>>, |
313 | 0 | mut opt_token_index: Option<D::TokenIndex>, |
314 | 0 | ) -> NextToken<D> { |
315 | 0 | debug!( |
316 | 0 | "\\+ error_recovery(opt_lookahead={:?}, opt_token_index={:?})", |
317 | | opt_lookahead, opt_token_index, |
318 | | ); |
319 | | |
320 | 0 | if !self.definition.uses_error_recovery() { |
321 | 0 | debug!("\\ error -- no error recovery!"); |
322 | | |
323 | 0 | return NextToken::Done(Err( |
324 | 0 | self.unrecognized_token_error(opt_lookahead, &self.states) |
325 | 0 | )); |
326 | 0 | } |
327 | 0 |
|
328 | 0 | let error = self.unrecognized_token_error(opt_lookahead.clone(), &self.states); |
329 | 0 |
|
330 | 0 | let mut dropped_tokens = vec![]; |
331 | | |
332 | | // We are going to insert ERROR into the lookahead. So, first, |
333 | | // perform all reductions from current state triggered by having |
334 | | // ERROR in the lookahead. |
335 | | loop { |
336 | 0 | let state = self.top_state(); |
337 | 0 | let action = self.definition.error_action(state); |
338 | 0 | if let Some(reduce_index) = action.as_reduce() { |
339 | 0 | debug!("\\\\ reducing: {:?}", reduce_index); |
340 | | |
341 | 0 | if let Some(result) = |
342 | 0 | self.reduce(reduce_index, opt_lookahead.as_ref().map(|l| &l.0)) Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery::{closure#0} Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery::{closure#0} Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery::{closure#0} Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery::{closure#0} Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery::{closure#0} Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery::{closure#0} Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery::{closure#0} Unexecuted instantiation: <lalrpop_util::state_machine::Parser<_, _>>::error_recovery::{closure#0} |
343 | | { |
344 | 0 | debug!("\\\\ reduced to a result"); |
345 | | |
346 | 0 | return NextToken::Done(result); |
347 | 0 | } |
348 | | } else { |
349 | 0 | break; |
350 | 0 | } |
351 | 0 | } |
352 | 0 |
|
353 | 0 | // Now try to find the recovery state. |
354 | 0 | let states_len = self.states.len(); |
355 | 0 | let top = 'find_state: loop { |
356 | | // Go backwards through the states... |
357 | 0 | debug!( |
358 | 0 | "\\\\+ error_recovery: find_state loop, {:?} states = {:?}", |
359 | 0 | self.states.len(), |
360 | | self.states, |
361 | | ); |
362 | | |
363 | 0 | for top in (0..states_len).rev() { |
364 | 0 | let state = self.states[top]; |
365 | 0 | debug!("\\\\\\ top = {:?}, state = {:?}", top, state); |
366 | | |
367 | | // ...fetch action for error token... |
368 | 0 | let action = self.definition.error_action(state); |
369 | 0 | debug!("\\\\\\ action = {:?}", action); |
370 | 0 | if let Some(error_state) = action.as_shift() { |
371 | | // If action is a shift that takes us into `error_state`, |
372 | | // and `error_state` can accept this lookahead, we are done. |
373 | 0 | if self.accepts(error_state, &self.states[..=top], opt_token_index) { |
374 | 0 | debug!("\\\\\\ accepted!"); |
375 | 0 | break 'find_state top; |
376 | 0 | } |
377 | | } else { |
378 | | // ...else, if action is error or reduce, go to next state. |
379 | 0 | continue; |
380 | | } |
381 | | } |
382 | | |
383 | | // Otherwise, if we couldn't find a state that would -- |
384 | | // after shifting the error token -- accept the lookahead, |
385 | | // then drop the lookahead and advance to next token in |
386 | | // the input. |
387 | 0 | match opt_lookahead.take() { |
388 | | // If the lookahead is EOF, we can't drop any more |
389 | | // tokens, abort error recovery and just report the |
390 | | // original error (it might be nice if we would |
391 | | // propagate back the dropped tokens, though). |
392 | | None => { |
393 | 0 | debug!("\\\\\\ no more lookahead, report error"); |
394 | 0 | return NextToken::Done(Err(error)); |
395 | | } |
396 | | |
397 | | // Else, drop the current token and shift to the |
398 | | // next. If there is a next token, we will `continue` |
399 | | // to the start of the `'find_state` loop. |
400 | 0 | Some(lookahead) => { |
401 | 0 | debug!("\\\\\\ dropping lookahead token"); |
402 | | |
403 | 0 | dropped_tokens.push(lookahead); |
404 | 0 | match self.next_token() { |
405 | 0 | NextToken::FoundToken(next_lookahead, next_token_index) => { |
406 | 0 | opt_lookahead = Some(next_lookahead); |
407 | 0 | opt_token_index = Some(next_token_index); |
408 | 0 | } |
409 | | NextToken::Eof => { |
410 | 0 | debug!("\\\\\\ reached EOF"); |
411 | 0 | opt_lookahead = None; |
412 | 0 | opt_token_index = None; |
413 | | } |
414 | 0 | NextToken::Done(e) => { |
415 | 0 | debug!("\\\\\\ no more tokens"); |
416 | 0 | return NextToken::Done(e); |
417 | | } |
418 | | } |
419 | | } |
420 | | } |
421 | | }; |
422 | | |
423 | | // If we get here, we are ready to push the error recovery state. |
424 | | |
425 | | // We have to compute the span for the error recovery |
426 | | // token. We do this first, before we pop any symbols off the |
427 | | // stack. There are several possibilities, in order of |
428 | | // preference. |
429 | | // |
430 | | // For the **start** of the message, we prefer to use the start of any |
431 | | // popped states. This represents parts of the input we had consumed but |
432 | | // had to roll back and ignore. |
433 | | // |
434 | | // Example: |
435 | | // |
436 | | // a + (b + /) |
437 | | // ^ start point is here, since this `+` will be popped off |
438 | | // |
439 | | // If there are no popped states, but there *are* dropped tokens, we can use |
440 | | // the start of those. |
441 | | // |
442 | | // Example: |
443 | | // |
444 | | // a + (b + c e) |
445 | | // ^ start point would be here |
446 | | // |
447 | | // Finally, if there are no popped states *nor* dropped tokens, we can use |
448 | | // the end of the top-most state. |
449 | | |
450 | 0 | let start = if let Some(popped_sym) = self.symbols.get(top) { |
451 | 0 | popped_sym.0.clone() |
452 | 0 | } else if let Some(dropped_token) = dropped_tokens.first() { |
453 | 0 | dropped_token.0.clone() |
454 | 0 | } else if top > 0 { |
455 | 0 | self.symbols[top - 1].2.clone() |
456 | | } else { |
457 | 0 | self.definition.start_location() |
458 | | }; |
459 | | |
460 | | // For the end span, here are the possibilities: |
461 | | // |
462 | | // We prefer to use the end of the last dropped token. |
463 | | // |
464 | | // Examples: |
465 | | // |
466 | | // a + (b + /) |
467 | | // --- |
468 | | // a + (b c) |
469 | | // - |
470 | | // |
471 | | // But, if there are no dropped tokens, we will use the end of the popped states, |
472 | | // if any: |
473 | | // |
474 | | // a + / |
475 | | // - |
476 | | // |
477 | | // If there are neither dropped tokens *or* popped states, |
478 | | // then the user is simulating insertion of an operator. In |
479 | | // this case, we prefer the start of the lookahead, but |
480 | | // fallback to the start if we are at EOF. |
481 | | // |
482 | | // Examples: |
483 | | // |
484 | | // a + (b c) |
485 | | // - |
486 | | |
487 | 0 | let end = if let Some(dropped_token) = dropped_tokens.last() { |
488 | 0 | dropped_token.2.clone() |
489 | 0 | } else if states_len - 1 > top { |
490 | 0 | self.symbols.last().unwrap().2.clone() |
491 | 0 | } else if let Some(lookahead) = opt_lookahead.as_ref() { |
492 | 0 | lookahead.0.clone() |
493 | | } else { |
494 | 0 | start.clone() |
495 | | }; |
496 | | |
497 | 0 | self.states.truncate(top + 1); |
498 | 0 | self.symbols.truncate(top); |
499 | 0 |
|
500 | 0 | let recover_state = self.states[top]; |
501 | 0 | let error_action = self.definition.error_action(recover_state); |
502 | 0 | let error_state = error_action.as_shift().unwrap(); |
503 | 0 | self.states.push(error_state); |
504 | 0 | let recovery = self.definition.error_recovery_symbol(crate::ErrorRecovery { |
505 | 0 | error, |
506 | 0 | dropped_tokens, |
507 | 0 | }); |
508 | 0 | self.symbols.push((start, recovery, end)); |
509 | 0 |
|
510 | 0 | match (opt_lookahead, opt_token_index) { |
511 | 0 | (Some(l), Some(i)) => NextToken::FoundToken(l, i), |
512 | 0 | (None, None) => NextToken::Eof, |
513 | 0 | (l, i) => panic!("lookahead and token_index mismatched: {:?}, {:?}", l, i), |
514 | | } |
515 | 0 | } Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::error_recovery Unexecuted instantiation: <lalrpop_util::state_machine::Parser<_, _>>::error_recovery |
516 | | |
517 | | /// The `accepts` function has the job of figuring out whether the |
518 | | /// given error state would "accept" the given lookahead. We |
519 | | /// basically trace through the LR automaton looking for one of |
520 | | /// two outcomes: |
521 | | /// |
522 | | /// - the lookahead is eventually shifted |
523 | | /// - we reduce to the end state successfully (in the case of EOF). |
524 | | /// |
525 | | /// If we used the pure LR(1) algorithm, we wouldn't need this |
526 | | /// function, because we would be guaranteed to error immediately |
527 | | /// (and not after some number of reductions). But with an LALR |
528 | | /// (or Lane Table) generated automaton, it is possible to reduce |
529 | | /// some number of times before encountering an error. Failing to |
530 | | /// take this into account can lead error recovery into an |
531 | | /// infinite loop (see the `error_recovery_lalr_loop` test) or |
532 | | /// produce crappy results (see `error_recovery_lock_in`). |
533 | 0 | fn accepts( |
534 | 0 | &self, |
535 | 0 | error_state: D::StateIndex, |
536 | 0 | states: &[D::StateIndex], |
537 | 0 | opt_token_index: Option<D::TokenIndex>, |
538 | 0 | ) -> bool { |
539 | 0 | debug!( |
540 | 0 | "\\\\\\+ accepts(error_state={:?}, states={:?}, opt_token_index={:?})", |
541 | | error_state, states, opt_token_index, |
542 | | ); |
543 | | |
544 | 0 | let mut states = states.to_vec(); |
545 | 0 | states.push(error_state); |
546 | | loop { |
547 | 0 | let mut states_len = states.len(); |
548 | 0 | let top = states[states_len - 1]; |
549 | 0 | let action = match opt_token_index { |
550 | 0 | None => self.definition.eof_action(top), |
551 | 0 | Some(i) => self.definition.action(top, i), |
552 | | }; |
553 | | |
554 | | // If we encounter an error action, we do **not** accept. |
555 | 0 | if action.is_error() { |
556 | 0 | debug!("\\\\\\\\ accepts: error"); |
557 | 0 | return false; |
558 | 0 | } |
559 | | |
560 | | // If we encounter a reduce action, we need to simulate its |
561 | | // effect on the state stack. |
562 | 0 | if let Some(reduce_action) = action.as_reduce() { |
563 | 0 | match self.definition.simulate_reduce(reduce_action) { |
564 | | SimulatedReduce::Reduce { |
565 | 0 | states_to_pop, |
566 | 0 | nonterminal_produced, |
567 | 0 | } => { |
568 | 0 | states_len -= states_to_pop; |
569 | 0 | states.truncate(states_len); |
570 | 0 | let top = states[states_len - 1]; |
571 | 0 | let next_state = self.definition.goto(top, nonterminal_produced); |
572 | 0 | states.push(next_state); |
573 | 0 | } |
574 | | |
575 | | SimulatedReduce::Accept => { |
576 | 0 | debug!("\\\\\\\\ accepts: reduce accepts!"); |
577 | 0 | return true; |
578 | | } |
579 | | } |
580 | | } else { |
581 | | // If we encounter a shift action, we DO accept. |
582 | 0 | debug!("\\\\\\\\ accepts: shift accepts!"); |
583 | 0 | assert!(action.is_shift()); |
584 | 0 | return true; |
585 | | } |
586 | | } |
587 | 0 | } Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::accepts Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::accepts Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::accepts Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::accepts Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::accepts Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::accepts Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::accepts Unexecuted instantiation: <lalrpop_util::state_machine::Parser<_, _>>::accepts |
588 | | |
589 | 158k | fn reduce( |
590 | 158k | &mut self, |
591 | 158k | action: D::ReduceIndex, |
592 | 158k | lookahead_start: Option<&D::Location>, |
593 | 158k | ) -> Option<ParseResult<D>> { |
594 | 158k | self.definition |
595 | 158k | .reduce(action, lookahead_start, &mut self.states, &mut self.symbols) |
596 | 158k | } Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::reduce <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::reduce Line | Count | Source | 589 | 157k | fn reduce( | 590 | 157k | &mut self, | 591 | 157k | action: D::ReduceIndex, | 592 | 157k | lookahead_start: Option<&D::Location>, | 593 | 157k | ) -> Option<ParseResult<D>> { | 594 | 157k | self.definition | 595 | 157k | .reduce(action, lookahead_start, &mut self.states, &mut self.symbols) | 596 | 157k | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::reduce Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::reduce Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::reduce Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::reduce <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::reduce Line | Count | Source | 589 | 914 | fn reduce( | 590 | 914 | &mut self, | 591 | 914 | action: D::ReduceIndex, | 592 | 914 | lookahead_start: Option<&D::Location>, | 593 | 914 | ) -> Option<ParseResult<D>> { | 594 | 914 | self.definition | 595 | 914 | .reduce(action, lookahead_start, &mut self.states, &mut self.symbols) | 596 | 914 | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<_, _>>::reduce |
597 | | |
598 | 0 | fn unrecognized_token_error( |
599 | 0 | &self, |
600 | 0 | token: Option<TokenTriple<D>>, |
601 | 0 | states: &[D::StateIndex], |
602 | 0 | ) -> ParseError<D> { |
603 | 0 | match token { |
604 | 0 | Some(token) => crate::ParseError::UnrecognizedToken { |
605 | 0 | token, |
606 | 0 | expected: self.definition.expected_tokens_from_states(states), |
607 | 0 | }, |
608 | 0 | None => crate::ParseError::UnrecognizedEof { |
609 | 0 | location: self.last_location.clone(), |
610 | 0 | expected: self.definition.expected_tokens_from_states(states), |
611 | 0 | }, |
612 | | } |
613 | 0 | } Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::unrecognized_token_error Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::unrecognized_token_error Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::unrecognized_token_error Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::unrecognized_token_error Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::unrecognized_token_error Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::unrecognized_token_error Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::unrecognized_token_error Unexecuted instantiation: <lalrpop_util::state_machine::Parser<_, _>>::unrecognized_token_error |
614 | | |
615 | | /// Consume the next token from the input and classify it into a |
616 | | /// token index. Classification can fail with an error. If there |
617 | | /// are no more tokens, signal EOF. |
618 | 2.37M | fn next_token(&mut self) -> NextToken<D> { |
619 | 2.37M | let token = match self.tokens.next() { |
620 | 1.34M | Some(Ok(v)) => v, |
621 | 0 | Some(Err(e)) => return NextToken::Done(Err(e)), |
622 | 1.02M | None => return NextToken::Eof, |
623 | | }; |
624 | | |
625 | 1.34M | self.last_location = token.2.clone(); |
626 | | |
627 | 1.34M | let token_index = match self.definition.token_to_index(&token.1) { |
628 | 1.34M | Some(i) => i, |
629 | | None => { |
630 | 0 | return NextToken::Done(Err( |
631 | 0 | self.unrecognized_token_error(Some(token), &self.states) |
632 | 0 | )) |
633 | | } |
634 | | }; |
635 | | |
636 | 1.34M | NextToken::FoundToken(token, token_index) |
637 | 2.37M | } Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::next_token <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::next_token Line | Count | Source | 618 | 628k | fn next_token(&mut self) -> NextToken<D> { | 619 | 628k | let token = match self.tokens.next() { | 620 | 471k | Some(Ok(v)) => v, | 621 | 0 | Some(Err(e)) => return NextToken::Done(Err(e)), | 622 | 157k | None => return NextToken::Eof, | 623 | | }; | 624 | | | 625 | 471k | self.last_location = token.2.clone(); | 626 | | | 627 | 471k | let token_index = match self.definition.token_to_index(&token.1) { | 628 | 471k | Some(i) => i, | 629 | | None => { | 630 | 0 | return NextToken::Done(Err( | 631 | 0 | self.unrecognized_token_error(Some(token), &self.states) | 632 | 0 | )) | 633 | | } | 634 | | }; | 635 | | | 636 | 471k | NextToken::FoundToken(token, token_index) | 637 | 628k | } |
<lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::next_token Line | Count | Source | 618 | 1.74M | fn next_token(&mut self) -> NextToken<D> { | 619 | 1.74M | let token = match self.tokens.next() { | 620 | 871k | Some(Ok(v)) => v, | 621 | 0 | Some(Err(e)) => return NextToken::Done(Err(e)), | 622 | 871k | None => return NextToken::Eof, | 623 | | }; | 624 | | | 625 | 871k | self.last_location = token.2.clone(); | 626 | | | 627 | 871k | let token_index = match self.definition.token_to_index(&token.1) { | 628 | 871k | Some(i) => i, | 629 | | None => { | 630 | 0 | return NextToken::Done(Err( | 631 | 0 | self.unrecognized_token_error(Some(token), &self.states) | 632 | 0 | )) | 633 | | } | 634 | | }; | 635 | | | 636 | 871k | NextToken::FoundToken(token, token_index) | 637 | 1.74M | } |
<lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::next_token Line | Count | Source | 618 | 48 | fn next_token(&mut self) -> NextToken<D> { | 619 | 48 | let token = match self.tokens.next() { | 620 | 24 | Some(Ok(v)) => v, | 621 | 0 | Some(Err(e)) => return NextToken::Done(Err(e)), | 622 | 24 | None => return NextToken::Eof, | 623 | | }; | 624 | | | 625 | 24 | self.last_location = token.2.clone(); | 626 | | | 627 | 24 | let token_index = match self.definition.token_to_index(&token.1) { | 628 | 24 | Some(i) => i, | 629 | | None => { | 630 | 0 | return NextToken::Done(Err( | 631 | 0 | self.unrecognized_token_error(Some(token), &self.states) | 632 | 0 | )) | 633 | | } | 634 | | }; | 635 | | | 636 | 24 | NextToken::FoundToken(token, token_index) | 637 | 48 | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::next_token Unexecuted instantiation: <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::next_token <lalrpop_util::state_machine::Parser<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine, lalrpop_util::lexer::Matcher<cedar_policy_core::parser::node::ASTNode<alloc::string::String>>>>::next_token Line | Count | Source | 618 | 292 | fn next_token(&mut self) -> NextToken<D> { | 619 | 292 | let token = match self.tokens.next() { | 620 | 290 | Some(Ok(v)) => v, | 621 | 0 | Some(Err(e)) => return NextToken::Done(Err(e)), | 622 | 2 | None => return NextToken::Eof, | 623 | | }; | 624 | | | 625 | 290 | self.last_location = token.2.clone(); | 626 | | | 627 | 290 | let token_index = match self.definition.token_to_index(&token.1) { | 628 | 290 | Some(i) => i, | 629 | | None => { | 630 | 0 | return NextToken::Done(Err( | 631 | 0 | self.unrecognized_token_error(Some(token), &self.states) | 632 | 0 | )) | 633 | | } | 634 | | }; | 635 | | | 636 | 290 | NextToken::FoundToken(token, token_index) | 637 | 292 | } |
Unexecuted instantiation: <lalrpop_util::state_machine::Parser<_, _>>::next_token |
638 | | } |
639 | | |
640 | | /// In LALRPOP generated rules, we actually use `i32`, `i16`, or `i8` |
641 | | /// to represent all of the various indices (we use the smallest one |
642 | | /// that will fit). So implement `ParserAction` for each of those. |
643 | | macro_rules! integral_indices { |
644 | | ($t:ty) => { |
645 | | impl<D: ParserDefinition<StateIndex = $t, ReduceIndex = $t>> ParserAction<D> for $t { |
646 | 1.50M | fn as_shift(self) -> Option<D::StateIndex> { |
647 | 1.50M | if self > 0 { |
648 | 1.34M | Some(self - 1) |
649 | | } else { |
650 | 158k | None |
651 | | } |
652 | 1.50M | } Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine>>::as_shift <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine>>::as_shift Line | Count | Source | 646 | 628k | fn as_shift(self) -> Option<D::StateIndex> { | 647 | 628k | if self > 0 { | 648 | 471k | Some(self - 1) | 649 | | } else { | 650 | 157k | None | 651 | | } | 652 | 628k | } |
<i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine>>::as_shift Line | Count | Source | 646 | 871k | fn as_shift(self) -> Option<D::StateIndex> { | 647 | 871k | if self > 0 { | 648 | 871k | Some(self - 1) | 649 | | } else { | 650 | 0 | None | 651 | | } | 652 | 871k | } |
<i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine>>::as_shift Line | Count | Source | 646 | 24 | fn as_shift(self) -> Option<D::StateIndex> { | 647 | 24 | if self > 0 { | 648 | 24 | Some(self - 1) | 649 | | } else { | 650 | 0 | None | 651 | | } | 652 | 24 | } |
Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine>>::as_shift Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine>>::as_shift <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine>>::as_shift Line | Count | Source | 646 | 1.20k | fn as_shift(self) -> Option<D::StateIndex> { | 647 | 1.20k | if self > 0 { | 648 | 290 | Some(self - 1) | 649 | | } else { | 650 | 914 | None | 651 | | } | 652 | 1.20k | } |
Unexecuted instantiation: <i32 as lalrpop_util::state_machine::ParserAction<_>>::as_shift Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<_>>::as_shift Unexecuted instantiation: <i8 as lalrpop_util::state_machine::ParserAction<_>>::as_shift |
653 | | |
654 | 4.66M | fn as_reduce(self) -> Option<D::ReduceIndex> { |
655 | 4.66M | if self < 0 { |
656 | 4.66M | Some(-(self + 1)) |
657 | | } else { |
658 | 0 | None |
659 | | } |
660 | 4.66M | } Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine>>::as_reduce <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine>>::as_reduce Line | Count | Source | 654 | 2.04M | fn as_reduce(self) -> Option<D::ReduceIndex> { | 655 | 2.04M | if self < 0 { | 656 | 2.04M | Some(-(self + 1)) | 657 | | } else { | 658 | 0 | None | 659 | | } | 660 | 2.04M | } |
<i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine>>::as_reduce Line | Count | Source | 654 | 2.61M | fn as_reduce(self) -> Option<D::ReduceIndex> { | 655 | 2.61M | if self < 0 { | 656 | 2.61M | Some(-(self + 1)) | 657 | | } else { | 658 | 0 | None | 659 | | } | 660 | 2.61M | } |
<i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine>>::as_reduce Line | Count | Source | 654 | 72 | fn as_reduce(self) -> Option<D::ReduceIndex> { | 655 | 72 | if self < 0 { | 656 | 72 | Some(-(self + 1)) | 657 | | } else { | 658 | 0 | None | 659 | | } | 660 | 72 | } |
Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine>>::as_reduce Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine>>::as_reduce <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine>>::as_reduce Line | Count | Source | 654 | 922 | fn as_reduce(self) -> Option<D::ReduceIndex> { | 655 | 922 | if self < 0 { | 656 | 922 | Some(-(self + 1)) | 657 | | } else { | 658 | 0 | None | 659 | | } | 660 | 922 | } |
Unexecuted instantiation: <i32 as lalrpop_util::state_machine::ParserAction<_>>::as_reduce Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<_>>::as_reduce Unexecuted instantiation: <i8 as lalrpop_util::state_machine::ParserAction<_>>::as_reduce |
661 | | |
662 | 0 | fn is_shift(self) -> bool { |
663 | 0 | self > 0 |
664 | 0 | } Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine>>::is_shift Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine>>::is_shift Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine>>::is_shift Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine>>::is_shift Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine>>::is_shift Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine>>::is_shift Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine>>::is_shift Unexecuted instantiation: <i32 as lalrpop_util::state_machine::ParserAction<_>>::is_shift Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<_>>::is_shift Unexecuted instantiation: <i8 as lalrpop_util::state_machine::ParserAction<_>>::is_shift |
665 | | |
666 | 0 | fn is_reduce(self) -> bool { |
667 | 0 | self < 0 |
668 | 0 | } Unexecuted instantiation: <i32 as lalrpop_util::state_machine::ParserAction<_>>::is_reduce Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<_>>::is_reduce Unexecuted instantiation: <i8 as lalrpop_util::state_machine::ParserAction<_>>::is_reduce |
669 | | |
670 | 0 | fn is_error(self) -> bool { |
671 | 0 | self == 0 |
672 | 0 | } Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ref::__StateMachine>>::is_error Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Expr::__StateMachine>>::is_error Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Name::__StateMachine>>::is_error Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Ident::__StateMachine>>::is_error Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policy::__StateMachine>>::is_error Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Primary::__StateMachine>>::is_error Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<cedar_policy_core::parser::text_to_cst::grammar::__parse__Policies::__StateMachine>>::is_error Unexecuted instantiation: <i32 as lalrpop_util::state_machine::ParserAction<_>>::is_error Unexecuted instantiation: <i16 as lalrpop_util::state_machine::ParserAction<_>>::is_error Unexecuted instantiation: <i8 as lalrpop_util::state_machine::ParserAction<_>>::is_error |
673 | | } |
674 | | }; |
675 | | } |
676 | | |
677 | | integral_indices!(i32); |
678 | | integral_indices!(i16); |
679 | | integral_indices!(i8); |