/src/wgpu/naga/src/front/spv/next_block.rs
Line | Count | Source |
1 | | //! Implementation of [`Frontend::next_block()`]. |
2 | | //! |
3 | | //! This method is split out into its own module purely because it is so long. |
4 | | |
5 | | use alloc::{format, vec, vec::Vec}; |
6 | | |
7 | | use crate::front::spv::{ |
8 | | convert::{map_binary_operator, map_relational_fun}, |
9 | | image, resolve_constant, BlockContext, Body, BodyFragment, Constant, Error, Frontend, |
10 | | LookupExpression, LookupHelper as _, LookupLoadOverride, MergeBlockInformation, PhiExpression, |
11 | | SignAnchor, |
12 | | }; |
13 | | use crate::Handle; |
14 | | |
15 | | impl<I: Iterator<Item = u32>> Frontend<I> { |
16 | | /// Add the next SPIR-V block's contents to `block_ctx`. |
17 | | /// |
18 | | /// Except for the function's entry block, `block_id` should be the label of |
19 | | /// a block we've seen mentioned before, with an entry in |
20 | | /// `block_ctx.body_for_label` to tell us which `Body` it contributes to. |
21 | 13 | pub(in crate::front::spv) fn next_block( |
22 | 13 | &mut self, |
23 | 13 | block_id: spirv::Word, |
24 | 13 | ctx: &mut BlockContext, |
25 | 13 | ) -> Result<(), Error> { |
26 | | // Extend `body` with the correct form for a branch to `target`. |
27 | 0 | fn merger(body: &mut Body, target: &MergeBlockInformation) { |
28 | 0 | body.data.push(match *target { |
29 | 0 | MergeBlockInformation::LoopContinue => BodyFragment::Continue, |
30 | | MergeBlockInformation::LoopMerge | MergeBlockInformation::SwitchMerge => { |
31 | 0 | BodyFragment::Break |
32 | | } |
33 | | |
34 | | // Finishing a selection merge means just falling off the end of |
35 | | // the `accept` or `reject` block of the `If` statement. |
36 | 0 | MergeBlockInformation::SelectionMerge => return, |
37 | | }) |
38 | 0 | } |
39 | | |
40 | 13 | let mut emitter = crate::proc::Emitter::default(); |
41 | 13 | emitter.start(ctx.expressions); |
42 | | |
43 | | // Find the `Body` to which this block contributes. |
44 | | // |
45 | | // If this is some SPIR-V structured control flow construct's merge |
46 | | // block, then `body_idx` will refer to the same `Body` as the header, |
47 | | // so that we simply pick up accumulating the `Body` where the header |
48 | | // left off. Each of the statements in a block dominates the next, so |
49 | | // we're sure to encounter their SPIR-V blocks in order, ensuring that |
50 | | // the `Body` will be assembled in the proper order. |
51 | | // |
52 | | // Note that, unlike every other kind of SPIR-V block, we don't know the |
53 | | // function's first block's label in advance. Thus, we assume that if |
54 | | // this block has no entry in `ctx.body_for_label`, it must be the |
55 | | // function's first block. This always has body index zero. |
56 | 13 | let mut body_idx = *ctx.body_for_label.entry(block_id).or_default(); |
57 | | |
58 | | // The Naga IR block this call builds. This will end up as |
59 | | // `ctx.blocks[&block_id]`, and `ctx.bodies[body_idx]` will refer to it |
60 | | // via a `BodyFragment::BlockId`. |
61 | 13 | let mut block = crate::Block::new(); |
62 | | |
63 | | // Stores the merge block as defined by a `OpSelectionMerge` otherwise is `None` |
64 | | // |
65 | | // This is used in `OpSwitch` to promote the `MergeBlockInformation` from |
66 | | // `SelectionMerge` to `SwitchMerge` to allow `Break`s this isn't desirable for |
67 | | // `LoopMerge`s because otherwise `Continue`s wouldn't be allowed |
68 | 13 | let mut selection_merge_block = None; |
69 | | |
70 | | macro_rules! get_expr_handle { |
71 | | ($id:expr, $lexp:expr) => { |
72 | | self.get_expr_handle($id, $lexp, ctx, &mut emitter, &mut block, body_idx) |
73 | | }; |
74 | | } |
75 | | macro_rules! parse_expr_op { |
76 | | ($op:expr, BINARY) => { |
77 | | self.parse_expr_binary_op(ctx, &mut emitter, &mut block, block_id, body_idx, $op) |
78 | | }; |
79 | | |
80 | | ($op:expr, SHIFT) => { |
81 | | self.parse_expr_shift_op(ctx, &mut emitter, &mut block, block_id, body_idx, $op) |
82 | | }; |
83 | | ($op:expr, UNARY) => { |
84 | | self.parse_expr_unary_op(ctx, &mut emitter, &mut block, block_id, body_idx, $op) |
85 | | }; |
86 | | ($axis:expr, $ctrl:expr, DERIVATIVE) => { |
87 | | self.parse_expr_derivative( |
88 | | ctx, |
89 | | &mut emitter, |
90 | | &mut block, |
91 | | block_id, |
92 | | body_idx, |
93 | | ($axis, $ctrl), |
94 | | ) |
95 | | }; |
96 | | } |
97 | | |
98 | 0 | let terminator = loop { |
99 | | use spirv::Op; |
100 | 228k | let start = self.data_offset; |
101 | 228k | let inst = self.next_inst()?; |
102 | 228k | let span = crate::Span::from(start..(start + 4 * (inst.wc as usize))); |
103 | 228k | log::debug!("\t\t{:?} [{}]", inst.op, inst.wc); |
104 | | |
105 | 228k | match inst.op { |
106 | | Op::Line => { |
107 | 0 | inst.expect(4)?; |
108 | 0 | let _file_id = self.next()?; |
109 | 0 | let _row_id = self.next()?; |
110 | 0 | let _col_id = self.next()?; |
111 | | } |
112 | 0 | Op::NoLine => inst.expect(1)?, |
113 | | Op::Undef => { |
114 | 0 | inst.expect(3)?; |
115 | 0 | let type_id = self.next()?; |
116 | 0 | let id = self.next()?; |
117 | 0 | let type_lookup = self.lookup_type.lookup(type_id)?; |
118 | 0 | let ty = type_lookup.handle; |
119 | | |
120 | 0 | self.lookup_expression.insert( |
121 | 0 | id, |
122 | 0 | LookupExpression { |
123 | 0 | handle: ctx |
124 | 0 | .expressions |
125 | 0 | .append(crate::Expression::ZeroValue(ty), span), |
126 | 0 | type_id, |
127 | 0 | block_id, |
128 | 0 | }, |
129 | | ); |
130 | | } |
131 | | Op::Variable => { |
132 | 0 | inst.expect_at_least(4)?; |
133 | 0 | block.extend(emitter.finish(ctx.expressions)); |
134 | | |
135 | 0 | let result_type_id = self.next()?; |
136 | 0 | let result_id = self.next()?; |
137 | 0 | let _storage_class = self.next()?; |
138 | 0 | let init = if inst.wc > 4 { |
139 | 0 | inst.expect(5)?; |
140 | 0 | let init_id = self.next()?; |
141 | 0 | let lconst = self.lookup_constant.lookup(init_id)?; |
142 | 0 | Some(ctx.expressions.append(lconst.inner.to_expr(), span)) |
143 | | } else { |
144 | 0 | None |
145 | | }; |
146 | | |
147 | 0 | let name = self |
148 | 0 | .future_decor |
149 | 0 | .remove(&result_id) |
150 | 0 | .and_then(|decor| decor.name); |
151 | 0 | if let Some(ref name) = name { |
152 | 0 | log::debug!("\t\t\tid={result_id} name={name}"); |
153 | 0 | } |
154 | 0 | let lookup_ty = self.lookup_type.lookup(result_type_id)?; |
155 | 0 | let var_handle = ctx.local_arena.append( |
156 | | crate::LocalVariable { |
157 | 0 | name, |
158 | 0 | ty: match ctx.module.types[lookup_ty.handle].inner { |
159 | 0 | crate::TypeInner::Pointer { base, .. } => base, |
160 | 0 | _ => lookup_ty.handle, |
161 | | }, |
162 | 0 | init, |
163 | | }, |
164 | 0 | span, |
165 | | ); |
166 | | |
167 | 0 | self.lookup_expression.insert( |
168 | 0 | result_id, |
169 | 0 | LookupExpression { |
170 | 0 | handle: ctx |
171 | 0 | .expressions |
172 | 0 | .append(crate::Expression::LocalVariable(var_handle), span), |
173 | 0 | type_id: result_type_id, |
174 | 0 | block_id, |
175 | 0 | }, |
176 | | ); |
177 | 0 | emitter.start(ctx.expressions); |
178 | | } |
179 | | Op::Phi => { |
180 | 0 | inst.expect_at_least(3)?; |
181 | 0 | block.extend(emitter.finish(ctx.expressions)); |
182 | | |
183 | 0 | let result_type_id = self.next()?; |
184 | 0 | let result_id = self.next()?; |
185 | | |
186 | 0 | let name = format!("phi_{result_id}"); |
187 | 0 | let local = ctx.local_arena.append( |
188 | | crate::LocalVariable { |
189 | 0 | name: Some(name), |
190 | 0 | ty: self.lookup_type.lookup(result_type_id)?.handle, |
191 | 0 | init: None, |
192 | | }, |
193 | 0 | self.span_from(start), |
194 | | ); |
195 | 0 | let pointer = ctx |
196 | 0 | .expressions |
197 | 0 | .append(crate::Expression::LocalVariable(local), span); |
198 | | |
199 | 0 | let in_count = (inst.wc - 3) / 2; |
200 | 0 | let mut phi = PhiExpression { |
201 | 0 | local, |
202 | 0 | expressions: Vec::with_capacity(in_count as usize), |
203 | 0 | }; |
204 | 0 | for _ in 0..in_count { |
205 | 0 | let expr = self.next()?; |
206 | 0 | let block = self.next()?; |
207 | 0 | phi.expressions.push((expr, block)); |
208 | | } |
209 | | |
210 | 0 | ctx.phis.push(phi); |
211 | 0 | emitter.start(ctx.expressions); |
212 | | |
213 | | // Associate the lookup with an actual value, which is emitted |
214 | | // into the current block. |
215 | 0 | self.lookup_expression.insert( |
216 | 0 | result_id, |
217 | 0 | LookupExpression { |
218 | 0 | handle: ctx |
219 | 0 | .expressions |
220 | 0 | .append(crate::Expression::Load { pointer }, span), |
221 | 0 | type_id: result_type_id, |
222 | 0 | block_id, |
223 | 0 | }, |
224 | | ); |
225 | | } |
226 | | Op::AccessChain | Op::InBoundsAccessChain => { |
227 | | struct AccessExpression { |
228 | | base_handle: Handle<crate::Expression>, |
229 | | type_id: spirv::Word, |
230 | | load_override: Option<LookupLoadOverride>, |
231 | | } |
232 | | |
233 | 0 | inst.expect_at_least(4)?; |
234 | | |
235 | 0 | let result_type_id = self.next()?; |
236 | 0 | let result_id = self.next()?; |
237 | 0 | let base_id = self.next()?; |
238 | 0 | log::trace!("\t\t\tlooking up expr {base_id:?}"); |
239 | | |
240 | 0 | let mut acex = { |
241 | 0 | let lexp = self.lookup_expression.lookup(base_id)?; |
242 | 0 | let lty = self.lookup_type.lookup(lexp.type_id)?; |
243 | | |
244 | | // HACK `OpAccessChain` and `OpInBoundsAccessChain` |
245 | | // require for the result type to be a pointer, but if |
246 | | // we're given a pointer to an image / sampler, it will |
247 | | // be *already* dereferenced, since we do that early |
248 | | // during `parse_type_pointer()`. |
249 | | // |
250 | | // This can happen only through `BindingArray`, since |
251 | | // that's the only case where one can obtain a pointer |
252 | | // to an image / sampler, and so let's match on that: |
253 | 0 | let dereference = match ctx.module.types[lty.handle].inner { |
254 | 0 | crate::TypeInner::BindingArray { .. } => false, |
255 | 0 | _ => true, |
256 | | }; |
257 | | |
258 | 0 | let type_id = if dereference { |
259 | 0 | lty.base_id.ok_or(Error::InvalidAccessType(lexp.type_id))? |
260 | | } else { |
261 | 0 | lexp.type_id |
262 | | }; |
263 | | |
264 | 0 | AccessExpression { |
265 | 0 | base_handle: get_expr_handle!(base_id, lexp), |
266 | 0 | type_id, |
267 | 0 | load_override: self.lookup_load_override.get(&base_id).cloned(), |
268 | 0 | } |
269 | | }; |
270 | | |
271 | 0 | for _ in 4..inst.wc { |
272 | 0 | let access_id = self.next()?; |
273 | 0 | log::trace!("\t\t\tlooking up index expr {access_id:?}"); |
274 | 0 | let index_expr = self.lookup_expression.lookup(access_id)?.clone(); |
275 | 0 | let index_expr_handle = get_expr_handle!(access_id, &index_expr); |
276 | 0 | let index_expr_data = &ctx.expressions[index_expr.handle]; |
277 | 0 | let index_maybe = match *index_expr_data { |
278 | 0 | crate::Expression::Constant(const_handle) => Some( |
279 | 0 | ctx.gctx() |
280 | 0 | .eval_expr_to_u32(ctx.module.constants[const_handle].init) |
281 | 0 | .map_err(|_| { |
282 | 0 | Error::InvalidAccess(crate::Expression::Constant( |
283 | 0 | const_handle, |
284 | 0 | )) |
285 | 0 | })?, Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#1}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#1} |
286 | | ), |
287 | 0 | _ => None, |
288 | | }; |
289 | | |
290 | 0 | log::trace!("\t\t\tlooking up type {:?}", acex.type_id); |
291 | 0 | let type_lookup = self.lookup_type.lookup(acex.type_id)?; |
292 | 0 | let ty = &ctx.module.types[type_lookup.handle]; |
293 | 0 | acex = match ty.inner { |
294 | | // can only index a struct with a constant |
295 | 0 | crate::TypeInner::Struct { ref members, .. } => { |
296 | 0 | let index = index_maybe |
297 | 0 | .ok_or_else(|| Error::InvalidAccess(index_expr_data.clone()))?; Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#2}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#2} |
298 | | |
299 | 0 | let lookup_member = self |
300 | 0 | .lookup_member |
301 | 0 | .get(&(type_lookup.handle, index)) |
302 | 0 | .ok_or(Error::InvalidAccessType(acex.type_id))?; |
303 | 0 | let base_handle = ctx.expressions.append( |
304 | 0 | crate::Expression::AccessIndex { |
305 | 0 | base: acex.base_handle, |
306 | 0 | index, |
307 | 0 | }, |
308 | 0 | span, |
309 | | ); |
310 | | |
311 | 0 | if let Some(crate::Binding::BuiltIn(built_in)) = |
312 | 0 | members[index as usize].binding |
313 | 0 | { |
314 | 0 | self.gl_per_vertex_builtin_access.insert(built_in); |
315 | 0 | } |
316 | | |
317 | | AccessExpression { |
318 | 0 | base_handle, |
319 | 0 | type_id: lookup_member.type_id, |
320 | 0 | load_override: if lookup_member.row_major { |
321 | 0 | debug_assert!(acex.load_override.is_none()); |
322 | 0 | let sub_type_lookup = |
323 | 0 | self.lookup_type.lookup(lookup_member.type_id)?; |
324 | 0 | Some(match ctx.module.types[sub_type_lookup.handle].inner { |
325 | | // load it transposed, to match column major expectations |
326 | | crate::TypeInner::Matrix { .. } => { |
327 | 0 | let loaded = ctx.expressions.append( |
328 | 0 | crate::Expression::Load { |
329 | 0 | pointer: base_handle, |
330 | 0 | }, |
331 | 0 | span, |
332 | | ); |
333 | 0 | let transposed = ctx.expressions.append( |
334 | 0 | crate::Expression::Math { |
335 | 0 | fun: crate::MathFunction::Transpose, |
336 | 0 | arg: loaded, |
337 | 0 | arg1: None, |
338 | 0 | arg2: None, |
339 | 0 | arg3: None, |
340 | 0 | }, |
341 | 0 | span, |
342 | | ); |
343 | 0 | LookupLoadOverride::Loaded(transposed) |
344 | | } |
345 | 0 | _ => LookupLoadOverride::Pending, |
346 | | }) |
347 | | } else { |
348 | 0 | None |
349 | | }, |
350 | | } |
351 | | } |
352 | | crate::TypeInner::Matrix { .. } => { |
353 | 0 | let load_override = match acex.load_override { |
354 | | // We are indexing inside a row-major matrix |
355 | 0 | Some(LookupLoadOverride::Loaded(load_expr)) => { |
356 | 0 | let index = index_maybe.ok_or_else(|| { |
357 | 0 | Error::InvalidAccess(index_expr_data.clone()) |
358 | 0 | })?; Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#3}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#3} |
359 | 0 | let sub_handle = ctx.expressions.append( |
360 | 0 | crate::Expression::AccessIndex { |
361 | 0 | base: load_expr, |
362 | 0 | index, |
363 | 0 | }, |
364 | 0 | span, |
365 | | ); |
366 | 0 | Some(LookupLoadOverride::Loaded(sub_handle)) |
367 | | } |
368 | 0 | _ => None, |
369 | | }; |
370 | 0 | let sub_expr = match index_maybe { |
371 | 0 | Some(index) => crate::Expression::AccessIndex { |
372 | 0 | base: acex.base_handle, |
373 | 0 | index, |
374 | 0 | }, |
375 | 0 | None => crate::Expression::Access { |
376 | 0 | base: acex.base_handle, |
377 | 0 | index: index_expr_handle, |
378 | 0 | }, |
379 | | }; |
380 | | AccessExpression { |
381 | 0 | base_handle: ctx.expressions.append(sub_expr, span), |
382 | 0 | type_id: type_lookup |
383 | 0 | .base_id |
384 | 0 | .ok_or(Error::InvalidAccessType(acex.type_id))?, |
385 | 0 | load_override, |
386 | | } |
387 | | } |
388 | | // This must be a vector or an array. |
389 | | _ => { |
390 | 0 | let base_handle = ctx.expressions.append( |
391 | 0 | crate::Expression::Access { |
392 | 0 | base: acex.base_handle, |
393 | 0 | index: index_expr_handle, |
394 | 0 | }, |
395 | 0 | span, |
396 | | ); |
397 | 0 | let load_override = match acex.load_override { |
398 | | // If there is a load override in place, then we always end up |
399 | | // with a side-loaded value here. |
400 | 0 | Some(lookup_load_override) => { |
401 | 0 | let sub_expr = match lookup_load_override { |
402 | | // We must be indexing into the array of row-major matrices. |
403 | | // Let's load the result of indexing and transpose it. |
404 | | LookupLoadOverride::Pending => { |
405 | 0 | let loaded = ctx.expressions.append( |
406 | 0 | crate::Expression::Load { |
407 | 0 | pointer: base_handle, |
408 | 0 | }, |
409 | 0 | span, |
410 | | ); |
411 | 0 | ctx.expressions.append( |
412 | 0 | crate::Expression::Math { |
413 | 0 | fun: crate::MathFunction::Transpose, |
414 | 0 | arg: loaded, |
415 | 0 | arg1: None, |
416 | 0 | arg2: None, |
417 | 0 | arg3: None, |
418 | 0 | }, |
419 | 0 | span, |
420 | | ) |
421 | | } |
422 | | // We are indexing inside a row-major matrix. |
423 | 0 | LookupLoadOverride::Loaded(load_expr) => { |
424 | 0 | ctx.expressions.append( |
425 | 0 | crate::Expression::Access { |
426 | 0 | base: load_expr, |
427 | 0 | index: index_expr_handle, |
428 | 0 | }, |
429 | 0 | span, |
430 | | ) |
431 | | } |
432 | | }; |
433 | 0 | Some(LookupLoadOverride::Loaded(sub_expr)) |
434 | | } |
435 | 0 | None => None, |
436 | | }; |
437 | | AccessExpression { |
438 | 0 | base_handle, |
439 | 0 | type_id: type_lookup |
440 | 0 | .base_id |
441 | 0 | .ok_or(Error::InvalidAccessType(acex.type_id))?, |
442 | 0 | load_override, |
443 | | } |
444 | | } |
445 | | }; |
446 | | } |
447 | | |
448 | 0 | if let Some(load_expr) = acex.load_override { |
449 | 0 | self.lookup_load_override.insert(result_id, load_expr); |
450 | 0 | } |
451 | 0 | let lookup_expression = LookupExpression { |
452 | 0 | handle: acex.base_handle, |
453 | 0 | type_id: result_type_id, |
454 | 0 | block_id, |
455 | 0 | }; |
456 | 0 | self.lookup_expression.insert(result_id, lookup_expression); |
457 | | } |
458 | | Op::VectorExtractDynamic => { |
459 | 0 | inst.expect(5)?; |
460 | | |
461 | 0 | let result_type_id = self.next()?; |
462 | 0 | let id = self.next()?; |
463 | 0 | let composite_id = self.next()?; |
464 | 0 | let index_id = self.next()?; |
465 | | |
466 | 0 | let root_lexp = self.lookup_expression.lookup(composite_id)?; |
467 | 0 | let root_handle = get_expr_handle!(composite_id, root_lexp); |
468 | 0 | let root_type_lookup = self.lookup_type.lookup(root_lexp.type_id)?; |
469 | 0 | let index_lexp = self.lookup_expression.lookup(index_id)?; |
470 | 0 | let index_handle = get_expr_handle!(index_id, index_lexp); |
471 | 0 | let index_type = self.lookup_type.lookup(index_lexp.type_id)?.handle; |
472 | | |
473 | 0 | let num_components = match ctx.module.types[root_type_lookup.handle].inner { |
474 | 0 | crate::TypeInner::Vector { size, .. } => size as u32, |
475 | 0 | _ => return Err(Error::InvalidVectorType(root_type_lookup.handle)), |
476 | | }; |
477 | | |
478 | 0 | let mut make_index = |ctx: &mut BlockContext, index: u32| { |
479 | 0 | make_index_literal( |
480 | 0 | ctx, |
481 | 0 | index, |
482 | 0 | &mut block, |
483 | 0 | &mut emitter, |
484 | 0 | index_type, |
485 | 0 | index_lexp.type_id, |
486 | 0 | span, |
487 | | ) |
488 | 0 | }; Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#4}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#4} |
489 | | |
490 | 0 | let index_expr = make_index(ctx, 0)?; |
491 | 0 | let mut handle = ctx.expressions.append( |
492 | 0 | crate::Expression::Access { |
493 | 0 | base: root_handle, |
494 | 0 | index: index_expr, |
495 | 0 | }, |
496 | 0 | span, |
497 | | ); |
498 | 0 | for index in 1..num_components { |
499 | 0 | let index_expr = make_index(ctx, index)?; |
500 | 0 | let access_expr = ctx.expressions.append( |
501 | 0 | crate::Expression::Access { |
502 | 0 | base: root_handle, |
503 | 0 | index: index_expr, |
504 | 0 | }, |
505 | 0 | span, |
506 | | ); |
507 | 0 | let cond = ctx.expressions.append( |
508 | 0 | crate::Expression::Binary { |
509 | 0 | op: crate::BinaryOperator::Equal, |
510 | 0 | left: index_expr, |
511 | 0 | right: index_handle, |
512 | 0 | }, |
513 | 0 | span, |
514 | | ); |
515 | 0 | handle = ctx.expressions.append( |
516 | 0 | crate::Expression::Select { |
517 | 0 | condition: cond, |
518 | 0 | accept: access_expr, |
519 | 0 | reject: handle, |
520 | 0 | }, |
521 | 0 | span, |
522 | 0 | ); |
523 | | } |
524 | | |
525 | 0 | self.lookup_expression.insert( |
526 | 0 | id, |
527 | 0 | LookupExpression { |
528 | 0 | handle, |
529 | 0 | type_id: result_type_id, |
530 | 0 | block_id, |
531 | 0 | }, |
532 | | ); |
533 | | } |
534 | | Op::VectorInsertDynamic => { |
535 | 0 | inst.expect(6)?; |
536 | | |
537 | 0 | let result_type_id = self.next()?; |
538 | 0 | let id = self.next()?; |
539 | 0 | let composite_id = self.next()?; |
540 | 0 | let object_id = self.next()?; |
541 | 0 | let index_id = self.next()?; |
542 | | |
543 | 0 | let object_lexp = self.lookup_expression.lookup(object_id)?; |
544 | 0 | let object_handle = get_expr_handle!(object_id, object_lexp); |
545 | 0 | let root_lexp = self.lookup_expression.lookup(composite_id)?; |
546 | 0 | let root_handle = get_expr_handle!(composite_id, root_lexp); |
547 | 0 | let root_type_lookup = self.lookup_type.lookup(root_lexp.type_id)?; |
548 | 0 | let index_lexp = self.lookup_expression.lookup(index_id)?; |
549 | 0 | let index_handle = get_expr_handle!(index_id, index_lexp); |
550 | 0 | let index_type = self.lookup_type.lookup(index_lexp.type_id)?.handle; |
551 | | |
552 | 0 | let num_components = match ctx.module.types[root_type_lookup.handle].inner { |
553 | 0 | crate::TypeInner::Vector { size, .. } => size as u32, |
554 | 0 | _ => return Err(Error::InvalidVectorType(root_type_lookup.handle)), |
555 | | }; |
556 | | |
557 | 0 | let mut components = Vec::with_capacity(num_components as usize); |
558 | 0 | for index in 0..num_components { |
559 | 0 | let index_expr = make_index_literal( |
560 | 0 | ctx, |
561 | 0 | index, |
562 | 0 | &mut block, |
563 | 0 | &mut emitter, |
564 | 0 | index_type, |
565 | 0 | index_lexp.type_id, |
566 | 0 | span, |
567 | 0 | )?; |
568 | 0 | let access_expr = ctx.expressions.append( |
569 | 0 | crate::Expression::Access { |
570 | 0 | base: root_handle, |
571 | 0 | index: index_expr, |
572 | 0 | }, |
573 | 0 | span, |
574 | | ); |
575 | 0 | let cond = ctx.expressions.append( |
576 | 0 | crate::Expression::Binary { |
577 | 0 | op: crate::BinaryOperator::Equal, |
578 | 0 | left: index_expr, |
579 | 0 | right: index_handle, |
580 | 0 | }, |
581 | 0 | span, |
582 | | ); |
583 | 0 | let handle = ctx.expressions.append( |
584 | 0 | crate::Expression::Select { |
585 | 0 | condition: cond, |
586 | 0 | accept: object_handle, |
587 | 0 | reject: access_expr, |
588 | 0 | }, |
589 | 0 | span, |
590 | | ); |
591 | 0 | components.push(handle); |
592 | | } |
593 | 0 | let handle = ctx.expressions.append( |
594 | 0 | crate::Expression::Compose { |
595 | 0 | ty: root_type_lookup.handle, |
596 | 0 | components, |
597 | 0 | }, |
598 | 0 | span, |
599 | | ); |
600 | | |
601 | 0 | self.lookup_expression.insert( |
602 | 0 | id, |
603 | 0 | LookupExpression { |
604 | 0 | handle, |
605 | 0 | type_id: result_type_id, |
606 | 0 | block_id, |
607 | 0 | }, |
608 | | ); |
609 | | } |
610 | | Op::CompositeExtract => { |
611 | 1 | inst.expect_at_least(4)?; |
612 | | |
613 | 1 | let result_type_id = self.next()?; |
614 | 1 | let result_id = self.next()?; |
615 | 1 | let base_id = self.next()?; |
616 | 1 | log::trace!("\t\t\tlooking up expr {base_id:?}"); |
617 | 1 | let mut lexp = self.lookup_expression.lookup(base_id)?.clone(); |
618 | 1 | lexp.handle = get_expr_handle!(base_id, &lexp); |
619 | 1 | for _ in 4..inst.wc { |
620 | 29.7k | let index = self.next()?; |
621 | 29.7k | log::trace!("\t\t\tlooking up type {:?}", lexp.type_id); |
622 | 29.7k | let type_lookup = self.lookup_type.lookup(lexp.type_id)?; |
623 | 29.7k | let type_id = match ctx.module.types[type_lookup.handle].inner { |
624 | | crate::TypeInner::Struct { .. } => { |
625 | 0 | self.lookup_member |
626 | 0 | .get(&(type_lookup.handle, index)) |
627 | 0 | .ok_or(Error::InvalidAccessType(lexp.type_id))? |
628 | | .type_id |
629 | | } |
630 | | crate::TypeInner::Array { .. } |
631 | | | crate::TypeInner::Vector { .. } |
632 | 29.7k | | crate::TypeInner::Matrix { .. } => type_lookup |
633 | 29.7k | .base_id |
634 | 29.7k | .ok_or(Error::InvalidAccessType(lexp.type_id))?, |
635 | 0 | ref other => { |
636 | 0 | log::warn!("composite type {other:?}"); |
637 | 0 | return Err(Error::UnsupportedType(type_lookup.handle)); |
638 | | } |
639 | | }; |
640 | 29.7k | lexp = LookupExpression { |
641 | 29.7k | handle: ctx.expressions.append( |
642 | 29.7k | crate::Expression::AccessIndex { |
643 | 29.7k | base: lexp.handle, |
644 | 29.7k | index, |
645 | 29.7k | }, |
646 | 29.7k | span, |
647 | 29.7k | ), |
648 | 29.7k | type_id, |
649 | 29.7k | block_id, |
650 | 29.7k | }; |
651 | | } |
652 | | |
653 | 0 | self.lookup_expression.insert( |
654 | 0 | result_id, |
655 | 0 | LookupExpression { |
656 | 0 | handle: lexp.handle, |
657 | 0 | type_id: result_type_id, |
658 | 0 | block_id, |
659 | 0 | }, |
660 | | ); |
661 | | } |
662 | | Op::CompositeInsert => { |
663 | 0 | inst.expect_at_least(5)?; |
664 | | |
665 | 0 | let result_type_id = self.next()?; |
666 | 0 | let id = self.next()?; |
667 | 0 | let object_id = self.next()?; |
668 | 0 | let composite_id = self.next()?; |
669 | 0 | let mut selections = Vec::with_capacity(inst.wc as usize - 5); |
670 | 0 | for _ in 5..inst.wc { |
671 | 0 | selections.push(self.next()?); |
672 | | } |
673 | | |
674 | 0 | let object_lexp = self.lookup_expression.lookup(object_id)?.clone(); |
675 | 0 | let object_handle = get_expr_handle!(object_id, &object_lexp); |
676 | 0 | let root_lexp = self.lookup_expression.lookup(composite_id)?.clone(); |
677 | 0 | let root_handle = get_expr_handle!(composite_id, &root_lexp); |
678 | 0 | let handle = self.insert_composite( |
679 | 0 | root_handle, |
680 | 0 | result_type_id, |
681 | 0 | object_handle, |
682 | 0 | &selections, |
683 | 0 | &ctx.module.types, |
684 | 0 | ctx.expressions, |
685 | 0 | span, |
686 | 0 | )?; |
687 | | |
688 | 0 | self.lookup_expression.insert( |
689 | 0 | id, |
690 | 0 | LookupExpression { |
691 | 0 | handle, |
692 | 0 | type_id: result_type_id, |
693 | 0 | block_id, |
694 | 0 | }, |
695 | | ); |
696 | | } |
697 | | Op::CompositeConstruct => { |
698 | 0 | inst.expect_at_least(3)?; |
699 | | |
700 | 0 | let result_type_id = self.next()?; |
701 | 0 | let id = self.next()?; |
702 | 0 | let mut components = Vec::with_capacity(inst.wc as usize - 2); |
703 | 0 | for _ in 3..inst.wc { |
704 | 0 | let comp_id = self.next()?; |
705 | 0 | log::trace!("\t\t\tlooking up expr {comp_id:?}"); |
706 | 0 | let lexp = self.lookup_expression.lookup(comp_id)?; |
707 | 0 | let handle = get_expr_handle!(comp_id, lexp); |
708 | 0 | components.push(handle); |
709 | | } |
710 | 0 | let ty = self.lookup_type.lookup(result_type_id)?.handle; |
711 | 0 | let first = components[0]; |
712 | 0 | let expr = match ctx.module.types[ty].inner { |
713 | | // this is an optimization to detect the splat |
714 | 0 | crate::TypeInner::Vector { size, .. } |
715 | 0 | if components.len() == size as usize |
716 | 0 | && components[1..].iter().all(|&c| c == first) => Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#5}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#5} |
717 | | { |
718 | 0 | crate::Expression::Splat { size, value: first } |
719 | | } |
720 | 0 | _ => crate::Expression::Compose { ty, components }, |
721 | | }; |
722 | 0 | self.lookup_expression.insert( |
723 | 0 | id, |
724 | 0 | LookupExpression { |
725 | 0 | handle: ctx.expressions.append(expr, span), |
726 | 0 | type_id: result_type_id, |
727 | 0 | block_id, |
728 | 0 | }, |
729 | | ); |
730 | | } |
731 | | Op::Load => { |
732 | 0 | inst.expect_at_least(4)?; |
733 | | |
734 | 0 | let result_type_id = self.next()?; |
735 | 0 | let result_id = self.next()?; |
736 | 0 | let pointer_id = self.next()?; |
737 | 0 | if inst.wc != 4 { |
738 | 0 | inst.expect(5)?; |
739 | 0 | let _memory_access = self.next()?; |
740 | 0 | } |
741 | | |
742 | 0 | let base_lexp = self.lookup_expression.lookup(pointer_id)?; |
743 | 0 | let base_handle = get_expr_handle!(pointer_id, base_lexp); |
744 | 0 | let type_lookup = self.lookup_type.lookup(base_lexp.type_id)?; |
745 | 0 | let handle = match ctx.module.types[type_lookup.handle].inner { |
746 | | crate::TypeInner::Image { .. } | crate::TypeInner::Sampler { .. } => { |
747 | 0 | base_handle |
748 | | } |
749 | 0 | _ => match self.lookup_load_override.get(&pointer_id) { |
750 | 0 | Some(&LookupLoadOverride::Loaded(handle)) => handle, |
751 | | //Note: we aren't handling `LookupLoadOverride::Pending` properly here |
752 | 0 | _ => ctx.expressions.append( |
753 | 0 | crate::Expression::Load { |
754 | 0 | pointer: base_handle, |
755 | 0 | }, |
756 | 0 | span, |
757 | | ), |
758 | | }, |
759 | | }; |
760 | | |
761 | 0 | self.lookup_expression.insert( |
762 | 0 | result_id, |
763 | 0 | LookupExpression { |
764 | 0 | handle, |
765 | 0 | type_id: result_type_id, |
766 | 0 | block_id, |
767 | 0 | }, |
768 | | ); |
769 | | } |
770 | | Op::Store => { |
771 | 0 | inst.expect_at_least(3)?; |
772 | | |
773 | 0 | let pointer_id = self.next()?; |
774 | 0 | let value_id = self.next()?; |
775 | 0 | if inst.wc != 3 { |
776 | 0 | inst.expect(4)?; |
777 | 0 | let _memory_access = self.next()?; |
778 | 0 | } |
779 | 0 | let base_expr = self.lookup_expression.lookup(pointer_id)?; |
780 | 0 | let base_handle = get_expr_handle!(pointer_id, base_expr); |
781 | 0 | let value_expr = self.lookup_expression.lookup(value_id)?; |
782 | 0 | let value_handle = get_expr_handle!(value_id, value_expr); |
783 | | |
784 | 0 | block.extend(emitter.finish(ctx.expressions)); |
785 | 0 | block.push( |
786 | 0 | crate::Statement::Store { |
787 | 0 | pointer: base_handle, |
788 | 0 | value: value_handle, |
789 | 0 | }, |
790 | 0 | span, |
791 | | ); |
792 | 0 | emitter.start(ctx.expressions); |
793 | | } |
794 | | // Arithmetic Instructions +, -, *, /, % |
795 | | Op::SNegate | Op::FNegate => { |
796 | 0 | inst.expect(4)?; |
797 | 0 | self.parse_expr_unary_op_sign_adjusted( |
798 | 0 | ctx, |
799 | 0 | &mut emitter, |
800 | 0 | &mut block, |
801 | 0 | block_id, |
802 | 0 | body_idx, |
803 | 0 | crate::UnaryOperator::Negate, |
804 | 0 | )?; |
805 | | } |
806 | | Op::IAdd |
807 | | | Op::ISub |
808 | | | Op::IMul |
809 | | | Op::BitwiseOr |
810 | | | Op::BitwiseXor |
811 | | | Op::BitwiseAnd |
812 | | | Op::SDiv |
813 | | | Op::SRem => { |
814 | 0 | inst.expect(5)?; |
815 | 0 | let operator = map_binary_operator(inst.op)?; |
816 | 0 | self.parse_expr_binary_op_sign_adjusted( |
817 | 0 | ctx, |
818 | 0 | &mut emitter, |
819 | 0 | &mut block, |
820 | 0 | block_id, |
821 | 0 | body_idx, |
822 | 0 | operator, |
823 | 0 | SignAnchor::Result, |
824 | 0 | )?; |
825 | | } |
826 | | Op::IEqual | Op::INotEqual => { |
827 | 0 | inst.expect(5)?; |
828 | 0 | let operator = map_binary_operator(inst.op)?; |
829 | 0 | self.parse_expr_binary_op_sign_adjusted( |
830 | 0 | ctx, |
831 | 0 | &mut emitter, |
832 | 0 | &mut block, |
833 | 0 | block_id, |
834 | 0 | body_idx, |
835 | 0 | operator, |
836 | 0 | SignAnchor::Operand, |
837 | 0 | )?; |
838 | | } |
839 | | Op::FAdd => { |
840 | 0 | inst.expect(5)?; |
841 | 0 | parse_expr_op!(crate::BinaryOperator::Add, BINARY)?; |
842 | | } |
843 | | Op::FSub => { |
844 | 0 | inst.expect(5)?; |
845 | 0 | parse_expr_op!(crate::BinaryOperator::Subtract, BINARY)?; |
846 | | } |
847 | | Op::FMul => { |
848 | 0 | inst.expect(5)?; |
849 | 0 | parse_expr_op!(crate::BinaryOperator::Multiply, BINARY)?; |
850 | | } |
851 | | Op::UDiv | Op::FDiv => { |
852 | 0 | inst.expect(5)?; |
853 | 0 | parse_expr_op!(crate::BinaryOperator::Divide, BINARY)?; |
854 | | } |
855 | | Op::UMod | Op::FRem => { |
856 | 2 | inst.expect(5)?; |
857 | 2 | parse_expr_op!(crate::BinaryOperator::Modulo, BINARY)?; |
858 | | } |
859 | | Op::SMod => { |
860 | 0 | inst.expect(5)?; |
861 | | |
862 | | // x - y * int(floor(float(x) / float(y))) |
863 | | |
864 | 0 | let start = self.data_offset; |
865 | 0 | let result_type_id = self.next()?; |
866 | 0 | let result_id = self.next()?; |
867 | 0 | let p1_id = self.next()?; |
868 | 0 | let p2_id = self.next()?; |
869 | 0 | let span = self.span_from_with_op(start); |
870 | | |
871 | 0 | let p1_lexp = self.lookup_expression.lookup(p1_id)?; |
872 | 0 | let left = self.get_expr_handle( |
873 | 0 | p1_id, |
874 | 0 | p1_lexp, |
875 | 0 | ctx, |
876 | 0 | &mut emitter, |
877 | 0 | &mut block, |
878 | 0 | body_idx, |
879 | | ); |
880 | 0 | let p2_lexp = self.lookup_expression.lookup(p2_id)?; |
881 | 0 | let right = self.get_expr_handle( |
882 | 0 | p2_id, |
883 | 0 | p2_lexp, |
884 | 0 | ctx, |
885 | 0 | &mut emitter, |
886 | 0 | &mut block, |
887 | 0 | body_idx, |
888 | | ); |
889 | | |
890 | 0 | let result_ty = self.lookup_type.lookup(result_type_id)?; |
891 | 0 | let inner = &ctx.module.types[result_ty.handle].inner; |
892 | 0 | let kind = inner.scalar_kind().unwrap(); |
893 | 0 | let size = inner.size(ctx.gctx()) as u8; |
894 | | |
895 | 0 | let left_cast = ctx.expressions.append( |
896 | 0 | crate::Expression::As { |
897 | 0 | expr: left, |
898 | 0 | kind: crate::ScalarKind::Float, |
899 | 0 | convert: Some(size), |
900 | 0 | }, |
901 | 0 | span, |
902 | | ); |
903 | 0 | let right_cast = ctx.expressions.append( |
904 | 0 | crate::Expression::As { |
905 | 0 | expr: right, |
906 | 0 | kind: crate::ScalarKind::Float, |
907 | 0 | convert: Some(size), |
908 | 0 | }, |
909 | 0 | span, |
910 | | ); |
911 | 0 | let div = ctx.expressions.append( |
912 | 0 | crate::Expression::Binary { |
913 | 0 | op: crate::BinaryOperator::Divide, |
914 | 0 | left: left_cast, |
915 | 0 | right: right_cast, |
916 | 0 | }, |
917 | 0 | span, |
918 | | ); |
919 | 0 | let floor = ctx.expressions.append( |
920 | 0 | crate::Expression::Math { |
921 | 0 | fun: crate::MathFunction::Floor, |
922 | 0 | arg: div, |
923 | 0 | arg1: None, |
924 | 0 | arg2: None, |
925 | 0 | arg3: None, |
926 | 0 | }, |
927 | 0 | span, |
928 | | ); |
929 | 0 | let cast = ctx.expressions.append( |
930 | 0 | crate::Expression::As { |
931 | 0 | expr: floor, |
932 | 0 | kind, |
933 | 0 | convert: Some(size), |
934 | 0 | }, |
935 | 0 | span, |
936 | | ); |
937 | 0 | let mult = ctx.expressions.append( |
938 | 0 | crate::Expression::Binary { |
939 | 0 | op: crate::BinaryOperator::Multiply, |
940 | 0 | left: cast, |
941 | 0 | right, |
942 | 0 | }, |
943 | 0 | span, |
944 | | ); |
945 | 0 | let sub = ctx.expressions.append( |
946 | 0 | crate::Expression::Binary { |
947 | 0 | op: crate::BinaryOperator::Subtract, |
948 | 0 | left, |
949 | 0 | right: mult, |
950 | 0 | }, |
951 | 0 | span, |
952 | | ); |
953 | 0 | self.lookup_expression.insert( |
954 | 0 | result_id, |
955 | 0 | LookupExpression { |
956 | 0 | handle: sub, |
957 | 0 | type_id: result_type_id, |
958 | 0 | block_id, |
959 | 0 | }, |
960 | | ); |
961 | | } |
962 | | Op::FMod => { |
963 | 225k | inst.expect(5)?; |
964 | | |
965 | | // x - y * floor(x / y) |
966 | | |
967 | 225k | let start = self.data_offset; |
968 | 225k | let span = self.span_from_with_op(start); |
969 | | |
970 | 225k | let result_type_id = self.next()?; |
971 | 225k | let result_id = self.next()?; |
972 | 225k | let p1_id = self.next()?; |
973 | 225k | let p2_id = self.next()?; |
974 | | |
975 | 225k | let p1_lexp = self.lookup_expression.lookup(p1_id)?; |
976 | 225k | let left = self.get_expr_handle( |
977 | 225k | p1_id, |
978 | 225k | p1_lexp, |
979 | 225k | ctx, |
980 | 225k | &mut emitter, |
981 | 225k | &mut block, |
982 | 225k | body_idx, |
983 | | ); |
984 | 225k | let p2_lexp = self.lookup_expression.lookup(p2_id)?; |
985 | 225k | let right = self.get_expr_handle( |
986 | 225k | p2_id, |
987 | 225k | p2_lexp, |
988 | 225k | ctx, |
989 | 225k | &mut emitter, |
990 | 225k | &mut block, |
991 | 225k | body_idx, |
992 | | ); |
993 | | |
994 | 225k | let div = ctx.expressions.append( |
995 | 225k | crate::Expression::Binary { |
996 | 225k | op: crate::BinaryOperator::Divide, |
997 | 225k | left, |
998 | 225k | right, |
999 | 225k | }, |
1000 | 225k | span, |
1001 | | ); |
1002 | 225k | let floor = ctx.expressions.append( |
1003 | 225k | crate::Expression::Math { |
1004 | 225k | fun: crate::MathFunction::Floor, |
1005 | 225k | arg: div, |
1006 | 225k | arg1: None, |
1007 | 225k | arg2: None, |
1008 | 225k | arg3: None, |
1009 | 225k | }, |
1010 | 225k | span, |
1011 | | ); |
1012 | 225k | let mult = ctx.expressions.append( |
1013 | 225k | crate::Expression::Binary { |
1014 | 225k | op: crate::BinaryOperator::Multiply, |
1015 | 225k | left: floor, |
1016 | 225k | right, |
1017 | 225k | }, |
1018 | 225k | span, |
1019 | | ); |
1020 | 225k | let sub = ctx.expressions.append( |
1021 | 225k | crate::Expression::Binary { |
1022 | 225k | op: crate::BinaryOperator::Subtract, |
1023 | 225k | left, |
1024 | 225k | right: mult, |
1025 | 225k | }, |
1026 | 225k | span, |
1027 | | ); |
1028 | 225k | self.lookup_expression.insert( |
1029 | 225k | result_id, |
1030 | 225k | LookupExpression { |
1031 | 225k | handle: sub, |
1032 | 225k | type_id: result_type_id, |
1033 | 225k | block_id, |
1034 | 225k | }, |
1035 | | ); |
1036 | | } |
1037 | | Op::VectorTimesScalar |
1038 | | | Op::VectorTimesMatrix |
1039 | | | Op::MatrixTimesScalar |
1040 | | | Op::MatrixTimesVector |
1041 | | | Op::MatrixTimesMatrix => { |
1042 | 2.69k | inst.expect(5)?; |
1043 | 2.69k | parse_expr_op!(crate::BinaryOperator::Multiply, BINARY)?; |
1044 | | } |
1045 | | Op::Transpose => { |
1046 | 0 | inst.expect(4)?; |
1047 | | |
1048 | 0 | let result_type_id = self.next()?; |
1049 | 0 | let result_id = self.next()?; |
1050 | 0 | let matrix_id = self.next()?; |
1051 | 0 | let matrix_lexp = self.lookup_expression.lookup(matrix_id)?; |
1052 | 0 | let matrix_handle = get_expr_handle!(matrix_id, matrix_lexp); |
1053 | 0 | let expr = crate::Expression::Math { |
1054 | 0 | fun: crate::MathFunction::Transpose, |
1055 | 0 | arg: matrix_handle, |
1056 | 0 | arg1: None, |
1057 | 0 | arg2: None, |
1058 | 0 | arg3: None, |
1059 | 0 | }; |
1060 | 0 | self.lookup_expression.insert( |
1061 | 0 | result_id, |
1062 | 0 | LookupExpression { |
1063 | 0 | handle: ctx.expressions.append(expr, span), |
1064 | 0 | type_id: result_type_id, |
1065 | 0 | block_id, |
1066 | 0 | }, |
1067 | | ); |
1068 | | } |
1069 | | Op::Dot => { |
1070 | 0 | inst.expect(5)?; |
1071 | | |
1072 | 0 | let result_type_id = self.next()?; |
1073 | 0 | let result_id = self.next()?; |
1074 | 0 | let left_id = self.next()?; |
1075 | 0 | let right_id = self.next()?; |
1076 | 0 | let left_lexp = self.lookup_expression.lookup(left_id)?; |
1077 | 0 | let left_handle = get_expr_handle!(left_id, left_lexp); |
1078 | 0 | let right_lexp = self.lookup_expression.lookup(right_id)?; |
1079 | 0 | let right_handle = get_expr_handle!(right_id, right_lexp); |
1080 | 0 | let expr = crate::Expression::Math { |
1081 | 0 | fun: crate::MathFunction::Dot, |
1082 | 0 | arg: left_handle, |
1083 | 0 | arg1: Some(right_handle), |
1084 | 0 | arg2: None, |
1085 | 0 | arg3: None, |
1086 | 0 | }; |
1087 | 0 | self.lookup_expression.insert( |
1088 | 0 | result_id, |
1089 | 0 | LookupExpression { |
1090 | 0 | handle: ctx.expressions.append(expr, span), |
1091 | 0 | type_id: result_type_id, |
1092 | 0 | block_id, |
1093 | 0 | }, |
1094 | | ); |
1095 | | } |
1096 | | Op::BitFieldInsert => { |
1097 | 0 | inst.expect(7)?; |
1098 | | |
1099 | 0 | let start = self.data_offset; |
1100 | 0 | let span = self.span_from_with_op(start); |
1101 | | |
1102 | 0 | let result_type_id = self.next()?; |
1103 | 0 | let result_id = self.next()?; |
1104 | 0 | let base_id = self.next()?; |
1105 | 0 | let insert_id = self.next()?; |
1106 | 0 | let offset_id = self.next()?; |
1107 | 0 | let count_id = self.next()?; |
1108 | 0 | let base_lexp = self.lookup_expression.lookup(base_id)?; |
1109 | 0 | let base_handle = get_expr_handle!(base_id, base_lexp); |
1110 | 0 | let insert_lexp = self.lookup_expression.lookup(insert_id)?; |
1111 | 0 | let insert_handle = get_expr_handle!(insert_id, insert_lexp); |
1112 | 0 | let offset_lexp = self.lookup_expression.lookup(offset_id)?; |
1113 | 0 | let offset_handle = get_expr_handle!(offset_id, offset_lexp); |
1114 | 0 | let offset_lookup_ty = self.lookup_type.lookup(offset_lexp.type_id)?; |
1115 | 0 | let count_lexp = self.lookup_expression.lookup(count_id)?; |
1116 | 0 | let count_handle = get_expr_handle!(count_id, count_lexp); |
1117 | 0 | let count_lookup_ty = self.lookup_type.lookup(count_lexp.type_id)?; |
1118 | | |
1119 | 0 | let offset_kind = ctx.module.types[offset_lookup_ty.handle] |
1120 | 0 | .inner |
1121 | 0 | .scalar_kind() |
1122 | 0 | .unwrap(); |
1123 | 0 | let count_kind = ctx.module.types[count_lookup_ty.handle] |
1124 | 0 | .inner |
1125 | 0 | .scalar_kind() |
1126 | 0 | .unwrap(); |
1127 | | |
1128 | 0 | let offset_cast_handle = if offset_kind != crate::ScalarKind::Uint { |
1129 | 0 | ctx.expressions.append( |
1130 | 0 | crate::Expression::As { |
1131 | 0 | expr: offset_handle, |
1132 | 0 | kind: crate::ScalarKind::Uint, |
1133 | 0 | convert: None, |
1134 | 0 | }, |
1135 | 0 | span, |
1136 | | ) |
1137 | | } else { |
1138 | 0 | offset_handle |
1139 | | }; |
1140 | | |
1141 | 0 | let count_cast_handle = if count_kind != crate::ScalarKind::Uint { |
1142 | 0 | ctx.expressions.append( |
1143 | 0 | crate::Expression::As { |
1144 | 0 | expr: count_handle, |
1145 | 0 | kind: crate::ScalarKind::Uint, |
1146 | 0 | convert: None, |
1147 | 0 | }, |
1148 | 0 | span, |
1149 | | ) |
1150 | | } else { |
1151 | 0 | count_handle |
1152 | | }; |
1153 | | |
1154 | 0 | let expr = crate::Expression::Math { |
1155 | 0 | fun: crate::MathFunction::InsertBits, |
1156 | 0 | arg: base_handle, |
1157 | 0 | arg1: Some(insert_handle), |
1158 | 0 | arg2: Some(offset_cast_handle), |
1159 | 0 | arg3: Some(count_cast_handle), |
1160 | 0 | }; |
1161 | 0 | self.lookup_expression.insert( |
1162 | 0 | result_id, |
1163 | 0 | LookupExpression { |
1164 | 0 | handle: ctx.expressions.append(expr, span), |
1165 | 0 | type_id: result_type_id, |
1166 | 0 | block_id, |
1167 | 0 | }, |
1168 | | ); |
1169 | | } |
1170 | | Op::BitFieldSExtract | Op::BitFieldUExtract => { |
1171 | 0 | inst.expect(6)?; |
1172 | | |
1173 | 0 | let result_type_id = self.next()?; |
1174 | 0 | let result_id = self.next()?; |
1175 | 0 | let base_id = self.next()?; |
1176 | 0 | let offset_id = self.next()?; |
1177 | 0 | let count_id = self.next()?; |
1178 | 0 | let base_lexp = self.lookup_expression.lookup(base_id)?; |
1179 | 0 | let base_handle = get_expr_handle!(base_id, base_lexp); |
1180 | 0 | let offset_lexp = self.lookup_expression.lookup(offset_id)?; |
1181 | 0 | let offset_handle = get_expr_handle!(offset_id, offset_lexp); |
1182 | 0 | let offset_lookup_ty = self.lookup_type.lookup(offset_lexp.type_id)?; |
1183 | 0 | let count_lexp = self.lookup_expression.lookup(count_id)?; |
1184 | 0 | let count_handle = get_expr_handle!(count_id, count_lexp); |
1185 | 0 | let count_lookup_ty = self.lookup_type.lookup(count_lexp.type_id)?; |
1186 | | |
1187 | 0 | let offset_kind = ctx.module.types[offset_lookup_ty.handle] |
1188 | 0 | .inner |
1189 | 0 | .scalar_kind() |
1190 | 0 | .unwrap(); |
1191 | 0 | let count_kind = ctx.module.types[count_lookup_ty.handle] |
1192 | 0 | .inner |
1193 | 0 | .scalar_kind() |
1194 | 0 | .unwrap(); |
1195 | | |
1196 | 0 | let offset_cast_handle = if offset_kind != crate::ScalarKind::Uint { |
1197 | 0 | ctx.expressions.append( |
1198 | 0 | crate::Expression::As { |
1199 | 0 | expr: offset_handle, |
1200 | 0 | kind: crate::ScalarKind::Uint, |
1201 | 0 | convert: None, |
1202 | 0 | }, |
1203 | 0 | span, |
1204 | | ) |
1205 | | } else { |
1206 | 0 | offset_handle |
1207 | | }; |
1208 | | |
1209 | 0 | let count_cast_handle = if count_kind != crate::ScalarKind::Uint { |
1210 | 0 | ctx.expressions.append( |
1211 | 0 | crate::Expression::As { |
1212 | 0 | expr: count_handle, |
1213 | 0 | kind: crate::ScalarKind::Uint, |
1214 | 0 | convert: None, |
1215 | 0 | }, |
1216 | 0 | span, |
1217 | | ) |
1218 | | } else { |
1219 | 0 | count_handle |
1220 | | }; |
1221 | | |
1222 | 0 | let expr = crate::Expression::Math { |
1223 | 0 | fun: crate::MathFunction::ExtractBits, |
1224 | 0 | arg: base_handle, |
1225 | 0 | arg1: Some(offset_cast_handle), |
1226 | 0 | arg2: Some(count_cast_handle), |
1227 | 0 | arg3: None, |
1228 | 0 | }; |
1229 | 0 | self.lookup_expression.insert( |
1230 | 0 | result_id, |
1231 | 0 | LookupExpression { |
1232 | 0 | handle: ctx.expressions.append(expr, span), |
1233 | 0 | type_id: result_type_id, |
1234 | 0 | block_id, |
1235 | 0 | }, |
1236 | | ); |
1237 | | } |
1238 | | Op::BitReverse | Op::BitCount => { |
1239 | 0 | inst.expect(4)?; |
1240 | | |
1241 | 0 | let result_type_id = self.next()?; |
1242 | 0 | let result_id = self.next()?; |
1243 | 0 | let base_id = self.next()?; |
1244 | 0 | let base_lexp = self.lookup_expression.lookup(base_id)?; |
1245 | 0 | let base_handle = get_expr_handle!(base_id, base_lexp); |
1246 | 0 | let expr = crate::Expression::Math { |
1247 | 0 | fun: match inst.op { |
1248 | 0 | Op::BitReverse => crate::MathFunction::ReverseBits, |
1249 | 0 | Op::BitCount => crate::MathFunction::CountOneBits, |
1250 | 0 | _ => unreachable!(), |
1251 | | }, |
1252 | 0 | arg: base_handle, |
1253 | 0 | arg1: None, |
1254 | 0 | arg2: None, |
1255 | 0 | arg3: None, |
1256 | | }; |
1257 | 0 | self.lookup_expression.insert( |
1258 | 0 | result_id, |
1259 | 0 | LookupExpression { |
1260 | 0 | handle: ctx.expressions.append(expr, span), |
1261 | 0 | type_id: result_type_id, |
1262 | 0 | block_id, |
1263 | 0 | }, |
1264 | | ); |
1265 | | } |
1266 | | Op::OuterProduct => { |
1267 | 1 | inst.expect(5)?; |
1268 | | |
1269 | 1 | let result_type_id = self.next()?; |
1270 | 1 | let result_id = self.next()?; |
1271 | 1 | let left_id = self.next()?; |
1272 | 1 | let right_id = self.next()?; |
1273 | 1 | let left_lexp = self.lookup_expression.lookup(left_id)?; |
1274 | 1 | let left_handle = get_expr_handle!(left_id, left_lexp); |
1275 | 1 | let right_lexp = self.lookup_expression.lookup(right_id)?; |
1276 | 1 | let right_handle = get_expr_handle!(right_id, right_lexp); |
1277 | 1 | let expr = crate::Expression::Math { |
1278 | 1 | fun: crate::MathFunction::Outer, |
1279 | 1 | arg: left_handle, |
1280 | 1 | arg1: Some(right_handle), |
1281 | 1 | arg2: None, |
1282 | 1 | arg3: None, |
1283 | 1 | }; |
1284 | 1 | self.lookup_expression.insert( |
1285 | 1 | result_id, |
1286 | 1 | LookupExpression { |
1287 | 1 | handle: ctx.expressions.append(expr, span), |
1288 | 1 | type_id: result_type_id, |
1289 | 1 | block_id, |
1290 | 1 | }, |
1291 | | ); |
1292 | | } |
1293 | | // Bitwise instructions |
1294 | | Op::Not => { |
1295 | 0 | inst.expect(4)?; |
1296 | 0 | self.parse_expr_unary_op_sign_adjusted( |
1297 | 0 | ctx, |
1298 | 0 | &mut emitter, |
1299 | 0 | &mut block, |
1300 | 0 | block_id, |
1301 | 0 | body_idx, |
1302 | 0 | crate::UnaryOperator::BitwiseNot, |
1303 | 0 | )?; |
1304 | | } |
1305 | | Op::ShiftRightLogical => { |
1306 | 0 | inst.expect(5)?; |
1307 | | //TODO: convert input and result to unsigned |
1308 | 0 | parse_expr_op!(crate::BinaryOperator::ShiftRight, SHIFT)?; |
1309 | | } |
1310 | | Op::ShiftRightArithmetic => { |
1311 | 0 | inst.expect(5)?; |
1312 | | //TODO: convert input and result to signed |
1313 | 0 | parse_expr_op!(crate::BinaryOperator::ShiftRight, SHIFT)?; |
1314 | | } |
1315 | | Op::ShiftLeftLogical => { |
1316 | 0 | inst.expect(5)?; |
1317 | 0 | parse_expr_op!(crate::BinaryOperator::ShiftLeft, SHIFT)?; |
1318 | | } |
1319 | | // Sampling |
1320 | | Op::Image => { |
1321 | 0 | inst.expect(4)?; |
1322 | 0 | self.parse_image_uncouple(block_id)?; |
1323 | | } |
1324 | | Op::SampledImage => { |
1325 | 0 | inst.expect(5)?; |
1326 | 0 | self.parse_image_couple()?; |
1327 | | } |
1328 | | Op::ImageWrite => { |
1329 | 0 | let extra = inst.expect_at_least(4)?; |
1330 | 0 | let stmt = |
1331 | 0 | self.parse_image_write(extra, ctx, &mut emitter, &mut block, body_idx)?; |
1332 | 0 | block.extend(emitter.finish(ctx.expressions)); |
1333 | 0 | block.push(stmt, span); |
1334 | 0 | emitter.start(ctx.expressions); |
1335 | | } |
1336 | | Op::ImageFetch | Op::ImageRead => { |
1337 | 0 | let extra = inst.expect_at_least(5)?; |
1338 | 0 | self.parse_image_load( |
1339 | 0 | extra, |
1340 | 0 | ctx, |
1341 | 0 | &mut emitter, |
1342 | 0 | &mut block, |
1343 | 0 | block_id, |
1344 | 0 | body_idx, |
1345 | 0 | )?; |
1346 | | } |
1347 | | Op::ImageSampleImplicitLod | Op::ImageSampleExplicitLod => { |
1348 | 0 | let extra = inst.expect_at_least(5)?; |
1349 | 0 | let options = image::SamplingOptions { |
1350 | 0 | compare: false, |
1351 | 0 | project: false, |
1352 | 0 | gather: false, |
1353 | 0 | }; |
1354 | 0 | self.parse_image_sample( |
1355 | 0 | extra, |
1356 | 0 | options, |
1357 | 0 | ctx, |
1358 | 0 | &mut emitter, |
1359 | 0 | &mut block, |
1360 | 0 | block_id, |
1361 | 0 | body_idx, |
1362 | 0 | )?; |
1363 | | } |
1364 | | Op::ImageSampleProjImplicitLod | Op::ImageSampleProjExplicitLod => { |
1365 | 0 | let extra = inst.expect_at_least(5)?; |
1366 | 0 | let options = image::SamplingOptions { |
1367 | 0 | compare: false, |
1368 | 0 | project: true, |
1369 | 0 | gather: false, |
1370 | 0 | }; |
1371 | 0 | self.parse_image_sample( |
1372 | 0 | extra, |
1373 | 0 | options, |
1374 | 0 | ctx, |
1375 | 0 | &mut emitter, |
1376 | 0 | &mut block, |
1377 | 0 | block_id, |
1378 | 0 | body_idx, |
1379 | 0 | )?; |
1380 | | } |
1381 | | Op::ImageSampleDrefImplicitLod | Op::ImageSampleDrefExplicitLod => { |
1382 | 0 | let extra = inst.expect_at_least(6)?; |
1383 | 0 | let options = image::SamplingOptions { |
1384 | 0 | compare: true, |
1385 | 0 | project: false, |
1386 | 0 | gather: false, |
1387 | 0 | }; |
1388 | 0 | self.parse_image_sample( |
1389 | 0 | extra, |
1390 | 0 | options, |
1391 | 0 | ctx, |
1392 | 0 | &mut emitter, |
1393 | 0 | &mut block, |
1394 | 0 | block_id, |
1395 | 0 | body_idx, |
1396 | 0 | )?; |
1397 | | } |
1398 | | Op::ImageSampleProjDrefImplicitLod | Op::ImageSampleProjDrefExplicitLod => { |
1399 | 0 | let extra = inst.expect_at_least(6)?; |
1400 | 0 | let options = image::SamplingOptions { |
1401 | 0 | compare: true, |
1402 | 0 | project: true, |
1403 | 0 | gather: false, |
1404 | 0 | }; |
1405 | 0 | self.parse_image_sample( |
1406 | 0 | extra, |
1407 | 0 | options, |
1408 | 0 | ctx, |
1409 | 0 | &mut emitter, |
1410 | 0 | &mut block, |
1411 | 0 | block_id, |
1412 | 0 | body_idx, |
1413 | 0 | )?; |
1414 | | } |
1415 | | Op::ImageGather => { |
1416 | 0 | let extra = inst.expect_at_least(6)?; |
1417 | 0 | let options = image::SamplingOptions { |
1418 | 0 | compare: false, |
1419 | 0 | project: false, |
1420 | 0 | gather: true, |
1421 | 0 | }; |
1422 | 0 | self.parse_image_sample( |
1423 | 0 | extra, |
1424 | 0 | options, |
1425 | 0 | ctx, |
1426 | 0 | &mut emitter, |
1427 | 0 | &mut block, |
1428 | 0 | block_id, |
1429 | 0 | body_idx, |
1430 | 0 | )?; |
1431 | | } |
1432 | | Op::ImageDrefGather => { |
1433 | 0 | let extra = inst.expect_at_least(6)?; |
1434 | 0 | let options = image::SamplingOptions { |
1435 | 0 | compare: true, |
1436 | 0 | project: false, |
1437 | 0 | gather: true, |
1438 | 0 | }; |
1439 | 0 | self.parse_image_sample( |
1440 | 0 | extra, |
1441 | 0 | options, |
1442 | 0 | ctx, |
1443 | 0 | &mut emitter, |
1444 | 0 | &mut block, |
1445 | 0 | block_id, |
1446 | 0 | body_idx, |
1447 | 0 | )?; |
1448 | | } |
1449 | | Op::ImageQuerySize => { |
1450 | 0 | inst.expect(4)?; |
1451 | 0 | self.parse_image_query_size( |
1452 | | false, |
1453 | 0 | ctx, |
1454 | 0 | &mut emitter, |
1455 | 0 | &mut block, |
1456 | 0 | block_id, |
1457 | 0 | body_idx, |
1458 | 0 | )?; |
1459 | | } |
1460 | | Op::ImageQuerySizeLod => { |
1461 | 0 | inst.expect(5)?; |
1462 | 0 | self.parse_image_query_size( |
1463 | | true, |
1464 | 0 | ctx, |
1465 | 0 | &mut emitter, |
1466 | 0 | &mut block, |
1467 | 0 | block_id, |
1468 | 0 | body_idx, |
1469 | 0 | )?; |
1470 | | } |
1471 | | Op::ImageQueryLevels => { |
1472 | 0 | inst.expect(4)?; |
1473 | 0 | self.parse_image_query_other(crate::ImageQuery::NumLevels, ctx, block_id)?; |
1474 | | } |
1475 | | Op::ImageQuerySamples => { |
1476 | 0 | inst.expect(4)?; |
1477 | 0 | self.parse_image_query_other(crate::ImageQuery::NumSamples, ctx, block_id)?; |
1478 | | } |
1479 | | // other ops |
1480 | | Op::Select => { |
1481 | 0 | inst.expect(6)?; |
1482 | 0 | let result_type_id = self.next()?; |
1483 | 0 | let result_id = self.next()?; |
1484 | 0 | let condition = self.next()?; |
1485 | 0 | let o1_id = self.next()?; |
1486 | 0 | let o2_id = self.next()?; |
1487 | | |
1488 | 0 | let cond_lexp = self.lookup_expression.lookup(condition)?; |
1489 | 0 | let cond_handle = get_expr_handle!(condition, cond_lexp); |
1490 | 0 | let o1_lexp = self.lookup_expression.lookup(o1_id)?; |
1491 | 0 | let o1_handle = get_expr_handle!(o1_id, o1_lexp); |
1492 | 0 | let o2_lexp = self.lookup_expression.lookup(o2_id)?; |
1493 | 0 | let o2_handle = get_expr_handle!(o2_id, o2_lexp); |
1494 | | |
1495 | 0 | let expr = crate::Expression::Select { |
1496 | 0 | condition: cond_handle, |
1497 | 0 | accept: o1_handle, |
1498 | 0 | reject: o2_handle, |
1499 | 0 | }; |
1500 | 0 | self.lookup_expression.insert( |
1501 | 0 | result_id, |
1502 | 0 | LookupExpression { |
1503 | 0 | handle: ctx.expressions.append(expr, span), |
1504 | 0 | type_id: result_type_id, |
1505 | 0 | block_id, |
1506 | 0 | }, |
1507 | | ); |
1508 | | } |
1509 | | Op::VectorShuffle => { |
1510 | 0 | inst.expect_at_least(5)?; |
1511 | 0 | let result_type_id = self.next()?; |
1512 | 0 | let result_id = self.next()?; |
1513 | 0 | let v1_id = self.next()?; |
1514 | 0 | let v2_id = self.next()?; |
1515 | | |
1516 | 0 | let v1_lexp = self.lookup_expression.lookup(v1_id)?; |
1517 | 0 | let v1_lty = self.lookup_type.lookup(v1_lexp.type_id)?; |
1518 | 0 | let v1_handle = get_expr_handle!(v1_id, v1_lexp); |
1519 | 0 | let n1 = match ctx.module.types[v1_lty.handle].inner { |
1520 | 0 | crate::TypeInner::Vector { size, .. } => size as u32, |
1521 | 0 | _ => return Err(Error::InvalidInnerType(v1_lexp.type_id)), |
1522 | | }; |
1523 | 0 | let v2_lexp = self.lookup_expression.lookup(v2_id)?; |
1524 | 0 | let v2_lty = self.lookup_type.lookup(v2_lexp.type_id)?; |
1525 | 0 | let v2_handle = get_expr_handle!(v2_id, v2_lexp); |
1526 | 0 | let n2 = match ctx.module.types[v2_lty.handle].inner { |
1527 | 0 | crate::TypeInner::Vector { size, .. } => size as u32, |
1528 | 0 | _ => return Err(Error::InvalidInnerType(v2_lexp.type_id)), |
1529 | | }; |
1530 | | |
1531 | 0 | self.temp_bytes.clear(); |
1532 | 0 | let mut max_component = 0; |
1533 | 0 | for _ in 5..inst.wc as usize { |
1534 | 0 | let mut index = self.next()?; |
1535 | 0 | if index == u32::MAX { |
1536 | 0 | // treat Undefined as X |
1537 | 0 | index = 0; |
1538 | 0 | } |
1539 | 0 | max_component = max_component.max(index); |
1540 | 0 | self.temp_bytes.push(index as u8); |
1541 | | } |
1542 | | |
1543 | | // Check for swizzle first. |
1544 | 0 | let expr = if max_component < n1 { |
1545 | | use crate::SwizzleComponent as Sc; |
1546 | 0 | let size = match self.temp_bytes.len() { |
1547 | 0 | 2 => crate::VectorSize::Bi, |
1548 | 0 | 3 => crate::VectorSize::Tri, |
1549 | 0 | _ => crate::VectorSize::Quad, |
1550 | | }; |
1551 | 0 | let mut pattern = [Sc::X; 4]; |
1552 | 0 | for (pat, index) in pattern.iter_mut().zip(self.temp_bytes.drain(..)) { |
1553 | 0 | *pat = match index { |
1554 | 0 | 0 => Sc::X, |
1555 | 0 | 1 => Sc::Y, |
1556 | 0 | 2 => Sc::Z, |
1557 | 0 | _ => Sc::W, |
1558 | | }; |
1559 | | } |
1560 | 0 | crate::Expression::Swizzle { |
1561 | 0 | size, |
1562 | 0 | vector: v1_handle, |
1563 | 0 | pattern, |
1564 | 0 | } |
1565 | | } else { |
1566 | | // Fall back to access + compose |
1567 | 0 | let mut components = Vec::with_capacity(self.temp_bytes.len()); |
1568 | 0 | for index in self.temp_bytes.drain(..).map(|i| i as u32) {Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#6}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#6} |
1569 | 0 | let expr = if index < n1 { |
1570 | 0 | crate::Expression::AccessIndex { |
1571 | 0 | base: v1_handle, |
1572 | 0 | index, |
1573 | 0 | } |
1574 | 0 | } else if index < n1 + n2 { |
1575 | 0 | crate::Expression::AccessIndex { |
1576 | 0 | base: v2_handle, |
1577 | 0 | index: index - n1, |
1578 | 0 | } |
1579 | | } else { |
1580 | 0 | return Err(Error::InvalidAccessIndex(index)); |
1581 | | }; |
1582 | 0 | components.push(ctx.expressions.append(expr, span)); |
1583 | | } |
1584 | | crate::Expression::Compose { |
1585 | 0 | ty: self.lookup_type.lookup(result_type_id)?.handle, |
1586 | 0 | components, |
1587 | | } |
1588 | | }; |
1589 | | |
1590 | 0 | self.lookup_expression.insert( |
1591 | 0 | result_id, |
1592 | 0 | LookupExpression { |
1593 | 0 | handle: ctx.expressions.append(expr, span), |
1594 | 0 | type_id: result_type_id, |
1595 | 0 | block_id, |
1596 | 0 | }, |
1597 | | ); |
1598 | | } |
1599 | | Op::Bitcast |
1600 | | | Op::ConvertSToF |
1601 | | | Op::ConvertUToF |
1602 | | | Op::ConvertFToU |
1603 | | | Op::ConvertFToS |
1604 | | | Op::FConvert |
1605 | | | Op::UConvert |
1606 | | | Op::SConvert => { |
1607 | 0 | inst.expect(4)?; |
1608 | 0 | let result_type_id = self.next()?; |
1609 | 0 | let result_id = self.next()?; |
1610 | 0 | let value_id = self.next()?; |
1611 | | |
1612 | 0 | let value_lexp = self.lookup_expression.lookup(value_id)?; |
1613 | 0 | let ty_lookup = self.lookup_type.lookup(result_type_id)?; |
1614 | 0 | let scalar = match ctx.module.types[ty_lookup.handle].inner { |
1615 | 0 | crate::TypeInner::Scalar(scalar) |
1616 | 0 | | crate::TypeInner::Vector { scalar, .. } |
1617 | 0 | | crate::TypeInner::Matrix { scalar, .. } => scalar, |
1618 | 0 | _ => return Err(Error::InvalidAsType(ty_lookup.handle)), |
1619 | | }; |
1620 | | |
1621 | 0 | let expr = crate::Expression::As { |
1622 | 0 | expr: get_expr_handle!(value_id, value_lexp), |
1623 | 0 | kind: scalar.kind, |
1624 | 0 | convert: if scalar.kind == crate::ScalarKind::Bool { |
1625 | 0 | Some(crate::BOOL_WIDTH) |
1626 | 0 | } else if inst.op == Op::Bitcast { |
1627 | 0 | None |
1628 | | } else { |
1629 | 0 | Some(scalar.width) |
1630 | | }, |
1631 | | }; |
1632 | 0 | self.lookup_expression.insert( |
1633 | 0 | result_id, |
1634 | 0 | LookupExpression { |
1635 | 0 | handle: ctx.expressions.append(expr, span), |
1636 | 0 | type_id: result_type_id, |
1637 | 0 | block_id, |
1638 | 0 | }, |
1639 | | ); |
1640 | | } |
1641 | | Op::FunctionCall => { |
1642 | 2 | inst.expect_at_least(4)?; |
1643 | | |
1644 | 2 | let result_type_id = self.next()?; |
1645 | 2 | let result_id = self.next()?; |
1646 | 2 | let func_id = self.next()?; |
1647 | | |
1648 | 2 | let mut arguments = Vec::with_capacity(inst.wc as usize - 4); |
1649 | 2 | for _ in 0..arguments.capacity() { |
1650 | 2 | let arg_id = self.next()?; |
1651 | 2 | let lexp = self.lookup_expression.lookup(arg_id)?; |
1652 | 2 | arguments.push(get_expr_handle!(arg_id, lexp)); |
1653 | | } |
1654 | | |
1655 | 2 | block.extend(emitter.finish(ctx.expressions)); |
1656 | | |
1657 | | // We just need an unique handle here, nothing more. |
1658 | 2 | let function = self.add_call(ctx.function_id, func_id); |
1659 | | |
1660 | 2 | let result = if self.lookup_void_type == Some(result_type_id) { |
1661 | 0 | None |
1662 | | } else { |
1663 | 2 | let expr_handle = ctx |
1664 | 2 | .expressions |
1665 | 2 | .append(crate::Expression::CallResult(function), span); |
1666 | 2 | self.lookup_expression.insert( |
1667 | 2 | result_id, |
1668 | 2 | LookupExpression { |
1669 | 2 | handle: expr_handle, |
1670 | 2 | type_id: result_type_id, |
1671 | 2 | block_id, |
1672 | 2 | }, |
1673 | | ); |
1674 | 2 | Some(expr_handle) |
1675 | | }; |
1676 | 2 | block.push( |
1677 | 2 | crate::Statement::Call { |
1678 | 2 | function, |
1679 | 2 | arguments, |
1680 | 2 | result, |
1681 | 2 | }, |
1682 | 2 | span, |
1683 | | ); |
1684 | 2 | emitter.start(ctx.expressions); |
1685 | | } |
1686 | | Op::ExtInst => { |
1687 | | use crate::MathFunction as Mf; |
1688 | | use spirv::GLOp as Glo; |
1689 | | |
1690 | 0 | let base_wc = 5; |
1691 | 0 | inst.expect_at_least(base_wc)?; |
1692 | | |
1693 | 0 | let result_type_id = self.next()?; |
1694 | 0 | let result_id = self.next()?; |
1695 | 0 | let set_id = self.next()?; |
1696 | 0 | if Some(set_id) != self.ext_glsl_id { |
1697 | 0 | return Err(Error::UnsupportedExtInstSet(set_id)); |
1698 | 0 | } |
1699 | 0 | let inst_id = self.next()?; |
1700 | 0 | let gl_op = Glo::from_u32(inst_id).ok_or(Error::UnsupportedExtInst(inst_id))?; |
1701 | | |
1702 | 0 | let fun = match gl_op { |
1703 | 0 | Glo::Round => Mf::Round, |
1704 | 0 | Glo::RoundEven => Mf::Round, |
1705 | 0 | Glo::Trunc => Mf::Trunc, |
1706 | 0 | Glo::FAbs | Glo::SAbs => Mf::Abs, |
1707 | 0 | Glo::FSign | Glo::SSign => Mf::Sign, |
1708 | 0 | Glo::Floor => Mf::Floor, |
1709 | 0 | Glo::Ceil => Mf::Ceil, |
1710 | 0 | Glo::Fract => Mf::Fract, |
1711 | 0 | Glo::Sin => Mf::Sin, |
1712 | 0 | Glo::Cos => Mf::Cos, |
1713 | 0 | Glo::Tan => Mf::Tan, |
1714 | 0 | Glo::Asin => Mf::Asin, |
1715 | 0 | Glo::Acos => Mf::Acos, |
1716 | 0 | Glo::Atan => Mf::Atan, |
1717 | 0 | Glo::Sinh => Mf::Sinh, |
1718 | 0 | Glo::Cosh => Mf::Cosh, |
1719 | 0 | Glo::Tanh => Mf::Tanh, |
1720 | 0 | Glo::Atan2 => Mf::Atan2, |
1721 | 0 | Glo::Asinh => Mf::Asinh, |
1722 | 0 | Glo::Acosh => Mf::Acosh, |
1723 | 0 | Glo::Atanh => Mf::Atanh, |
1724 | 0 | Glo::Radians => Mf::Radians, |
1725 | 0 | Glo::Degrees => Mf::Degrees, |
1726 | 0 | Glo::Pow => Mf::Pow, |
1727 | 0 | Glo::Exp => Mf::Exp, |
1728 | 0 | Glo::Log => Mf::Log, |
1729 | 0 | Glo::Exp2 => Mf::Exp2, |
1730 | 0 | Glo::Log2 => Mf::Log2, |
1731 | 0 | Glo::Sqrt => Mf::Sqrt, |
1732 | 0 | Glo::InverseSqrt => Mf::InverseSqrt, |
1733 | 0 | Glo::MatrixInverse => Mf::Inverse, |
1734 | 0 | Glo::Determinant => Mf::Determinant, |
1735 | 0 | Glo::ModfStruct => Mf::Modf, |
1736 | 0 | Glo::FMin | Glo::UMin | Glo::SMin | Glo::NMin => Mf::Min, |
1737 | 0 | Glo::FMax | Glo::UMax | Glo::SMax | Glo::NMax => Mf::Max, |
1738 | 0 | Glo::FClamp | Glo::UClamp | Glo::SClamp | Glo::NClamp => Mf::Clamp, |
1739 | 0 | Glo::FMix => Mf::Mix, |
1740 | 0 | Glo::Step => Mf::Step, |
1741 | 0 | Glo::SmoothStep => Mf::SmoothStep, |
1742 | 0 | Glo::Fma => Mf::Fma, |
1743 | 0 | Glo::FrexpStruct => Mf::Frexp, |
1744 | 0 | Glo::Ldexp => Mf::Ldexp, |
1745 | 0 | Glo::Length => Mf::Length, |
1746 | 0 | Glo::Distance => Mf::Distance, |
1747 | 0 | Glo::Cross => Mf::Cross, |
1748 | 0 | Glo::Normalize => Mf::Normalize, |
1749 | 0 | Glo::FaceForward => Mf::FaceForward, |
1750 | 0 | Glo::Reflect => Mf::Reflect, |
1751 | 0 | Glo::Refract => Mf::Refract, |
1752 | 0 | Glo::PackUnorm4x8 => Mf::Pack4x8unorm, |
1753 | 0 | Glo::PackSnorm4x8 => Mf::Pack4x8snorm, |
1754 | 0 | Glo::PackHalf2x16 => Mf::Pack2x16float, |
1755 | 0 | Glo::PackUnorm2x16 => Mf::Pack2x16unorm, |
1756 | 0 | Glo::PackSnorm2x16 => Mf::Pack2x16snorm, |
1757 | 0 | Glo::UnpackUnorm4x8 => Mf::Unpack4x8unorm, |
1758 | 0 | Glo::UnpackSnorm4x8 => Mf::Unpack4x8snorm, |
1759 | 0 | Glo::UnpackHalf2x16 => Mf::Unpack2x16float, |
1760 | 0 | Glo::UnpackUnorm2x16 => Mf::Unpack2x16unorm, |
1761 | 0 | Glo::UnpackSnorm2x16 => Mf::Unpack2x16snorm, |
1762 | 0 | Glo::FindILsb => Mf::FirstTrailingBit, |
1763 | 0 | Glo::FindUMsb | Glo::FindSMsb => Mf::FirstLeadingBit, |
1764 | | // TODO: https://github.com/gfx-rs/naga/issues/2526 |
1765 | 0 | Glo::Modf | Glo::Frexp => return Err(Error::UnsupportedExtInst(inst_id)), |
1766 | | Glo::IMix |
1767 | | | Glo::PackDouble2x32 |
1768 | | | Glo::UnpackDouble2x32 |
1769 | | | Glo::InterpolateAtCentroid |
1770 | | | Glo::InterpolateAtSample |
1771 | | | Glo::InterpolateAtOffset => { |
1772 | 0 | return Err(Error::UnsupportedExtInst(inst_id)) |
1773 | | } |
1774 | | }; |
1775 | | |
1776 | 0 | let arg_count = fun.argument_count(); |
1777 | 0 | inst.expect(base_wc + arg_count as u16)?; |
1778 | 0 | let arg = { |
1779 | 0 | let arg_id = self.next()?; |
1780 | 0 | let lexp = self.lookup_expression.lookup(arg_id)?; |
1781 | 0 | get_expr_handle!(arg_id, lexp) |
1782 | | }; |
1783 | 0 | let arg1 = if arg_count > 1 { |
1784 | 0 | let arg_id = self.next()?; |
1785 | 0 | let lexp = self.lookup_expression.lookup(arg_id)?; |
1786 | 0 | Some(get_expr_handle!(arg_id, lexp)) |
1787 | | } else { |
1788 | 0 | None |
1789 | | }; |
1790 | 0 | let arg2 = if arg_count > 2 { |
1791 | 0 | let arg_id = self.next()?; |
1792 | 0 | let lexp = self.lookup_expression.lookup(arg_id)?; |
1793 | 0 | Some(get_expr_handle!(arg_id, lexp)) |
1794 | | } else { |
1795 | 0 | None |
1796 | | }; |
1797 | 0 | let arg3 = if arg_count > 3 { |
1798 | 0 | let arg_id = self.next()?; |
1799 | 0 | let lexp = self.lookup_expression.lookup(arg_id)?; |
1800 | 0 | Some(get_expr_handle!(arg_id, lexp)) |
1801 | | } else { |
1802 | 0 | None |
1803 | | }; |
1804 | | |
1805 | 0 | let expr = crate::Expression::Math { |
1806 | 0 | fun, |
1807 | 0 | arg, |
1808 | 0 | arg1, |
1809 | 0 | arg2, |
1810 | 0 | arg3, |
1811 | 0 | }; |
1812 | 0 | self.lookup_expression.insert( |
1813 | 0 | result_id, |
1814 | 0 | LookupExpression { |
1815 | 0 | handle: ctx.expressions.append(expr, span), |
1816 | 0 | type_id: result_type_id, |
1817 | 0 | block_id, |
1818 | 0 | }, |
1819 | | ); |
1820 | | } |
1821 | | // Relational and Logical Instructions |
1822 | | Op::LogicalNot => { |
1823 | 0 | inst.expect(4)?; |
1824 | 0 | parse_expr_op!(crate::UnaryOperator::LogicalNot, UNARY)?; |
1825 | | } |
1826 | | Op::LogicalOr => { |
1827 | 0 | inst.expect(5)?; |
1828 | 0 | parse_expr_op!(crate::BinaryOperator::LogicalOr, BINARY)?; |
1829 | | } |
1830 | | Op::LogicalAnd => { |
1831 | 0 | inst.expect(5)?; |
1832 | 0 | parse_expr_op!(crate::BinaryOperator::LogicalAnd, BINARY)?; |
1833 | | } |
1834 | | Op::SGreaterThan | Op::SGreaterThanEqual | Op::SLessThan | Op::SLessThanEqual => { |
1835 | 0 | inst.expect(5)?; |
1836 | 0 | self.parse_expr_int_comparison( |
1837 | 0 | ctx, |
1838 | 0 | &mut emitter, |
1839 | 0 | &mut block, |
1840 | 0 | block_id, |
1841 | 0 | body_idx, |
1842 | 0 | map_binary_operator(inst.op)?, |
1843 | 0 | crate::ScalarKind::Sint, |
1844 | 0 | )?; |
1845 | | } |
1846 | | Op::UGreaterThan | Op::UGreaterThanEqual | Op::ULessThan | Op::ULessThanEqual => { |
1847 | 0 | inst.expect(5)?; |
1848 | 0 | self.parse_expr_int_comparison( |
1849 | 0 | ctx, |
1850 | 0 | &mut emitter, |
1851 | 0 | &mut block, |
1852 | 0 | block_id, |
1853 | 0 | body_idx, |
1854 | 0 | map_binary_operator(inst.op)?, |
1855 | 0 | crate::ScalarKind::Uint, |
1856 | 0 | )?; |
1857 | | } |
1858 | | Op::FOrdEqual |
1859 | | | Op::FUnordEqual |
1860 | | | Op::FOrdNotEqual |
1861 | | | Op::FUnordNotEqual |
1862 | | | Op::FOrdLessThan |
1863 | | | Op::FUnordLessThan |
1864 | | | Op::FOrdGreaterThan |
1865 | | | Op::FUnordGreaterThan |
1866 | | | Op::FOrdLessThanEqual |
1867 | | | Op::FUnordLessThanEqual |
1868 | | | Op::FOrdGreaterThanEqual |
1869 | | | Op::FUnordGreaterThanEqual |
1870 | | | Op::LogicalEqual |
1871 | | | Op::LogicalNotEqual => { |
1872 | 0 | inst.expect(5)?; |
1873 | 0 | let operator = map_binary_operator(inst.op)?; |
1874 | 0 | parse_expr_op!(operator, BINARY)?; |
1875 | | } |
1876 | | Op::Any | Op::All | Op::IsNan | Op::IsInf | Op::IsFinite | Op::IsNormal => { |
1877 | 0 | inst.expect(4)?; |
1878 | 0 | let result_type_id = self.next()?; |
1879 | 0 | let result_id = self.next()?; |
1880 | 0 | let arg_id = self.next()?; |
1881 | | |
1882 | 0 | let arg_lexp = self.lookup_expression.lookup(arg_id)?; |
1883 | 0 | let arg_handle = get_expr_handle!(arg_id, arg_lexp); |
1884 | | |
1885 | 0 | let expr = crate::Expression::Relational { |
1886 | 0 | fun: map_relational_fun(inst.op)?, |
1887 | 0 | argument: arg_handle, |
1888 | | }; |
1889 | 0 | self.lookup_expression.insert( |
1890 | 0 | result_id, |
1891 | 0 | LookupExpression { |
1892 | 0 | handle: ctx.expressions.append(expr, span), |
1893 | 0 | type_id: result_type_id, |
1894 | 0 | block_id, |
1895 | 0 | }, |
1896 | | ); |
1897 | | } |
1898 | | Op::Kill => { |
1899 | 0 | inst.expect(1)?; |
1900 | 0 | break Some(crate::Statement::Kill); |
1901 | | } |
1902 | | Op::Unreachable => { |
1903 | 0 | inst.expect(1)?; |
1904 | 0 | break None; |
1905 | | } |
1906 | | Op::Return => { |
1907 | 0 | inst.expect(1)?; |
1908 | 0 | break Some(crate::Statement::Return { value: None }); |
1909 | | } |
1910 | | Op::ReturnValue => { |
1911 | 0 | inst.expect(2)?; |
1912 | 0 | let value_id = self.next()?; |
1913 | 0 | let value_lexp = self.lookup_expression.lookup(value_id)?; |
1914 | 0 | let value_handle = get_expr_handle!(value_id, value_lexp); |
1915 | 0 | break Some(crate::Statement::Return { |
1916 | 0 | value: Some(value_handle), |
1917 | 0 | }); |
1918 | | } |
1919 | | Op::Branch => { |
1920 | 0 | inst.expect(2)?; |
1921 | 0 | let target_id = self.next()?; |
1922 | | |
1923 | | // If this is a branch to a merge or continue block, then |
1924 | | // that ends the current body. |
1925 | | // |
1926 | | // Why can we count on finding an entry here when it's |
1927 | | // needed? SPIR-V requires dominators to appear before |
1928 | | // blocks they dominate, so we will have visited a |
1929 | | // structured control construct's header block before |
1930 | | // anything that could exit it. |
1931 | 0 | if let Some(info) = ctx.mergers.get(&target_id) { |
1932 | 0 | block.extend(emitter.finish(ctx.expressions)); |
1933 | 0 | ctx.blocks.insert(block_id, block); |
1934 | 0 | let body = &mut ctx.bodies[body_idx]; |
1935 | 0 | body.data.push(BodyFragment::BlockId(block_id)); |
1936 | | |
1937 | 0 | merger(body, info); |
1938 | | |
1939 | 0 | return Ok(()); |
1940 | 0 | } |
1941 | | |
1942 | | // If `target_id` has no entry in `ctx.body_for_label`, then |
1943 | | // this must be the only branch to it: |
1944 | | // |
1945 | | // - We've already established that it's not anybody's merge |
1946 | | // block. |
1947 | | // |
1948 | | // - It can't be a switch case. Only switch header blocks |
1949 | | // and other switch cases can branch to a switch case. |
1950 | | // Switch header blocks must dominate all their cases, so |
1951 | | // they must appear in the file before them, and when we |
1952 | | // see `Op::Switch` we populate `ctx.body_for_label` for |
1953 | | // every switch case. |
1954 | | // |
1955 | | // Thus, `target_id` must be a simple extension of the |
1956 | | // current block, which we dominate, so we know we'll |
1957 | | // encounter it later in the file. |
1958 | 0 | ctx.body_for_label.entry(target_id).or_insert(body_idx); |
1959 | | |
1960 | 0 | break None; |
1961 | | } |
1962 | | Op::BranchConditional => { |
1963 | 0 | inst.expect_at_least(4)?; |
1964 | | |
1965 | 0 | let condition = { |
1966 | 0 | let condition_id = self.next()?; |
1967 | 0 | let lexp = self.lookup_expression.lookup(condition_id)?; |
1968 | 0 | get_expr_handle!(condition_id, lexp) |
1969 | | }; |
1970 | | |
1971 | | // HACK(eddyb) Naga doesn't seem to have this helper, |
1972 | | // so it's declared on the fly here for convenience. |
1973 | | #[derive(Copy, Clone)] |
1974 | | struct BranchTarget { |
1975 | | label_id: spirv::Word, |
1976 | | merge_info: Option<MergeBlockInformation>, |
1977 | | } |
1978 | 0 | let branch_target = |label_id| BranchTarget { |
1979 | 0 | label_id, |
1980 | 0 | merge_info: ctx.mergers.get(&label_id).copied(), |
1981 | 0 | }; Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#7}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#7} |
1982 | | |
1983 | 0 | let true_target = branch_target(self.next()?); |
1984 | 0 | let false_target = branch_target(self.next()?); |
1985 | | |
1986 | | // Consume branch weights |
1987 | 0 | for _ in 4..inst.wc { |
1988 | 0 | let _ = self.next()?; |
1989 | | } |
1990 | | |
1991 | | // Handle `OpBranchConditional`s used at the end of a loop |
1992 | | // body's "continuing" section as a "conditional backedge", |
1993 | | // i.e. a `do`-`while` condition, or `break if` in WGSL. |
1994 | | |
1995 | | // HACK(eddyb) this has to go to the parent *twice*, because |
1996 | | // `OpLoopMerge` left the "continuing" section nested in the |
1997 | | // loop body in terms of `parent`, but not `BodyFragment`. |
1998 | 0 | let parent_body_idx = ctx.bodies[body_idx].parent; |
1999 | 0 | let parent_parent_body_idx = ctx.bodies[parent_body_idx].parent; |
2000 | 0 | match ctx.bodies[parent_parent_body_idx].data[..] { |
2001 | | // The `OpLoopMerge`'s `continuing` block and the loop's |
2002 | | // backedge block may not be the same, but they'll both |
2003 | | // belong to the same body. |
2004 | | [.., BodyFragment::Loop { |
2005 | 0 | body: loop_body_idx, |
2006 | 0 | continuing: loop_continuing_idx, |
2007 | 0 | break_if: ref mut break_if_slot @ None, |
2008 | 0 | }] if body_idx == loop_continuing_idx => { |
2009 | | // Try both orderings of break-vs-backedge, because |
2010 | | // SPIR-V is symmetrical here, unlike WGSL `break if`. |
2011 | 0 | let break_if_cond = [true, false].into_iter().find_map(|true_breaks| { |
2012 | 0 | let (break_candidate, backedge_candidate) = if true_breaks { |
2013 | 0 | (true_target, false_target) |
2014 | | } else { |
2015 | 0 | (false_target, true_target) |
2016 | | }; |
2017 | | |
2018 | 0 | if break_candidate.merge_info |
2019 | 0 | != Some(MergeBlockInformation::LoopMerge) |
2020 | | { |
2021 | 0 | return None; |
2022 | 0 | } |
2023 | | |
2024 | | // HACK(eddyb) since Naga doesn't explicitly track |
2025 | | // backedges, this is checking for the outcome of |
2026 | | // `OpLoopMerge` below (even if it looks weird). |
2027 | 0 | let backedge_candidate_is_backedge = |
2028 | 0 | backedge_candidate.merge_info.is_none() |
2029 | 0 | && ctx.body_for_label.get(&backedge_candidate.label_id) |
2030 | 0 | == Some(&loop_body_idx); |
2031 | 0 | if !backedge_candidate_is_backedge { |
2032 | 0 | return None; |
2033 | 0 | } |
2034 | | |
2035 | 0 | Some(if true_breaks { |
2036 | 0 | condition |
2037 | | } else { |
2038 | 0 | ctx.expressions.append( |
2039 | 0 | crate::Expression::Unary { |
2040 | 0 | op: crate::UnaryOperator::LogicalNot, |
2041 | 0 | expr: condition, |
2042 | 0 | }, |
2043 | 0 | span, |
2044 | | ) |
2045 | | }) |
2046 | 0 | }); Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#8}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#8} |
2047 | | |
2048 | 0 | if let Some(break_if_cond) = break_if_cond { |
2049 | 0 | *break_if_slot = Some(break_if_cond); |
2050 | | |
2051 | | // This `OpBranchConditional` ends the "continuing" |
2052 | | // section of the loop body as normal, with the |
2053 | | // `break if` condition having been stashed above. |
2054 | 0 | break None; |
2055 | 0 | } |
2056 | | } |
2057 | 0 | _ => {} |
2058 | | } |
2059 | | |
2060 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2061 | 0 | ctx.blocks.insert(block_id, block); |
2062 | 0 | let body = &mut ctx.bodies[body_idx]; |
2063 | 0 | body.data.push(BodyFragment::BlockId(block_id)); |
2064 | | |
2065 | 0 | let same_target = true_target.label_id == false_target.label_id; |
2066 | | |
2067 | | // Start a body block for the `accept` branch. |
2068 | 0 | let accept = ctx.bodies.len(); |
2069 | 0 | let mut accept_block = Body::with_parent(body_idx); |
2070 | | |
2071 | | // If the `OpBranchConditional` target is somebody else's |
2072 | | // merge or continue block, then put a `Break` or `Continue` |
2073 | | // statement in this new body block. |
2074 | 0 | if let Some(info) = true_target.merge_info { |
2075 | 0 | merger( |
2076 | 0 | match same_target { |
2077 | 0 | true => &mut ctx.bodies[body_idx], |
2078 | 0 | false => &mut accept_block, |
2079 | | }, |
2080 | 0 | &info, |
2081 | | ) |
2082 | | } else { |
2083 | | // Note the body index for the block we're branching to. |
2084 | 0 | let prev = ctx.body_for_label.insert( |
2085 | 0 | true_target.label_id, |
2086 | 0 | match same_target { |
2087 | 0 | true => body_idx, |
2088 | 0 | false => accept, |
2089 | | }, |
2090 | | ); |
2091 | 0 | debug_assert!(prev.is_none()); |
2092 | | } |
2093 | | |
2094 | 0 | if same_target { |
2095 | 0 | return Ok(()); |
2096 | 0 | } |
2097 | | |
2098 | 0 | ctx.bodies.push(accept_block); |
2099 | | |
2100 | | // Handle the `reject` branch just like the `accept` block. |
2101 | 0 | let reject = ctx.bodies.len(); |
2102 | 0 | let mut reject_block = Body::with_parent(body_idx); |
2103 | | |
2104 | 0 | if let Some(info) = false_target.merge_info { |
2105 | 0 | merger(&mut reject_block, &info) |
2106 | | } else { |
2107 | 0 | let prev = ctx.body_for_label.insert(false_target.label_id, reject); |
2108 | 0 | debug_assert!(prev.is_none()); |
2109 | | } |
2110 | | |
2111 | 0 | ctx.bodies.push(reject_block); |
2112 | | |
2113 | 0 | let body = &mut ctx.bodies[body_idx]; |
2114 | 0 | body.data.push(BodyFragment::If { |
2115 | 0 | condition, |
2116 | 0 | accept, |
2117 | 0 | reject, |
2118 | 0 | }); |
2119 | | |
2120 | 0 | return Ok(()); |
2121 | | } |
2122 | | Op::Switch => { |
2123 | 0 | inst.expect_at_least(3)?; |
2124 | 0 | let selector = self.next()?; |
2125 | 0 | let default_id = self.next()?; |
2126 | | |
2127 | | // If the previous instruction was a `OpSelectionMerge` then we must |
2128 | | // promote the `MergeBlockInformation` to a `SwitchMerge` |
2129 | 0 | if let Some(merge) = selection_merge_block { |
2130 | 0 | ctx.mergers |
2131 | 0 | .insert(merge, MergeBlockInformation::SwitchMerge); |
2132 | 0 | } |
2133 | | |
2134 | 0 | let default = ctx.bodies.len(); |
2135 | 0 | ctx.bodies.push(Body::with_parent(body_idx)); |
2136 | 0 | ctx.body_for_label.entry(default_id).or_insert(default); |
2137 | | |
2138 | 0 | let selector_lexp = &self.lookup_expression[&selector]; |
2139 | 0 | let selector_lty = self.lookup_type.lookup(selector_lexp.type_id)?; |
2140 | 0 | let selector_handle = get_expr_handle!(selector, selector_lexp); |
2141 | 0 | let selector = match ctx.module.types[selector_lty.handle].inner { |
2142 | | crate::TypeInner::Scalar(crate::Scalar { |
2143 | | kind: crate::ScalarKind::Uint, |
2144 | | width: _, |
2145 | | }) => { |
2146 | | // IR expects a signed integer, so do a bitcast |
2147 | 0 | ctx.expressions.append( |
2148 | 0 | crate::Expression::As { |
2149 | 0 | kind: crate::ScalarKind::Sint, |
2150 | 0 | expr: selector_handle, |
2151 | 0 | convert: None, |
2152 | 0 | }, |
2153 | 0 | span, |
2154 | | ) |
2155 | | } |
2156 | | crate::TypeInner::Scalar(crate::Scalar { |
2157 | | kind: crate::ScalarKind::Sint, |
2158 | | width: _, |
2159 | 0 | }) => selector_handle, |
2160 | 0 | ref other => unimplemented!("Unexpected selector {:?}", other), |
2161 | | }; |
2162 | | |
2163 | | // Clear past switch cases to prevent them from entering this one |
2164 | 0 | self.switch_cases.clear(); |
2165 | | |
2166 | 0 | for _ in 0..(inst.wc - 3) / 2 { |
2167 | 0 | let literal = self.next()?; |
2168 | 0 | let target = self.next()?; |
2169 | | |
2170 | 0 | let case_body_idx = ctx.bodies.len(); |
2171 | | |
2172 | | // Check if any previous case already used this target block id, if so |
2173 | | // group them together to reorder them later so that no weird |
2174 | | // fallthrough cases happen. |
2175 | 0 | if let Some(&mut (_, ref mut literals)) = self.switch_cases.get_mut(&target) |
2176 | | { |
2177 | 0 | literals.push(literal as i32); |
2178 | 0 | continue; |
2179 | 0 | } |
2180 | | |
2181 | 0 | let mut body = Body::with_parent(body_idx); |
2182 | | |
2183 | 0 | if let Some(info) = ctx.mergers.get(&target) { |
2184 | 0 | merger(&mut body, info); |
2185 | 0 | } |
2186 | | |
2187 | 0 | ctx.bodies.push(body); |
2188 | 0 | ctx.body_for_label.entry(target).or_insert(case_body_idx); |
2189 | | |
2190 | | // Register this target block id as already having been processed and |
2191 | | // the respective body index assigned and the first case value |
2192 | 0 | self.switch_cases |
2193 | 0 | .insert(target, (case_body_idx, vec![literal as i32])); |
2194 | | } |
2195 | | |
2196 | | // Loop through the collected target blocks creating a new case for each |
2197 | | // literal pointing to it, only one case will have the true body and all the |
2198 | | // others will be empty fallthrough so that they all execute the same body |
2199 | | // without duplicating code. |
2200 | | // |
2201 | | // Since `switch_cases` is an indexmap the order of insertion is preserved |
2202 | | // this is needed because spir-v defines fallthrough order in the switch |
2203 | | // instruction. |
2204 | 0 | let mut cases = Vec::with_capacity((inst.wc as usize - 3) / 2); |
2205 | 0 | for &(case_body_idx, ref literals) in self.switch_cases.values() { |
2206 | 0 | let value = literals[0]; |
2207 | | |
2208 | 0 | for &literal in literals.iter().skip(1) { |
2209 | 0 | let empty_body_idx = ctx.bodies.len(); |
2210 | 0 | let body = Body::with_parent(body_idx); |
2211 | 0 |
|
2212 | 0 | ctx.bodies.push(body); |
2213 | 0 |
|
2214 | 0 | cases.push((literal, empty_body_idx)); |
2215 | 0 | } |
2216 | | |
2217 | 0 | cases.push((value, case_body_idx)); |
2218 | | } |
2219 | | |
2220 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2221 | | |
2222 | 0 | let body = &mut ctx.bodies[body_idx]; |
2223 | 0 | ctx.blocks.insert(block_id, block); |
2224 | | // Make sure the vector has space for at least two more allocations |
2225 | 0 | body.data.reserve(2); |
2226 | 0 | body.data.push(BodyFragment::BlockId(block_id)); |
2227 | 0 | body.data.push(BodyFragment::Switch { |
2228 | 0 | selector, |
2229 | 0 | cases, |
2230 | 0 | default, |
2231 | 0 | }); |
2232 | | |
2233 | 0 | return Ok(()); |
2234 | | } |
2235 | | Op::SelectionMerge => { |
2236 | 0 | inst.expect(3)?; |
2237 | 0 | let merge_block_id = self.next()?; |
2238 | | // TODO: Selection Control Mask |
2239 | 0 | let _selection_control = self.next()?; |
2240 | | |
2241 | | // Indicate that the merge block is a continuation of the |
2242 | | // current `Body`. |
2243 | 0 | ctx.body_for_label.entry(merge_block_id).or_insert(body_idx); |
2244 | | |
2245 | | // Let subsequent branches to the merge block know that |
2246 | | // they've reached the end of the selection construct. |
2247 | 0 | ctx.mergers |
2248 | 0 | .insert(merge_block_id, MergeBlockInformation::SelectionMerge); |
2249 | | |
2250 | 0 | selection_merge_block = Some(merge_block_id); |
2251 | | } |
2252 | | Op::LoopMerge => { |
2253 | 0 | inst.expect_at_least(4)?; |
2254 | 0 | let merge_block_id = self.next()?; |
2255 | 0 | let continuing = self.next()?; |
2256 | | |
2257 | | // TODO: Loop Control Parameters |
2258 | 0 | for _ in 0..inst.wc - 3 { |
2259 | 0 | self.next()?; |
2260 | | } |
2261 | | |
2262 | | // Indicate that the merge block is a continuation of the |
2263 | | // current `Body`. |
2264 | 0 | ctx.body_for_label.entry(merge_block_id).or_insert(body_idx); |
2265 | | // Let subsequent branches to the merge block know that |
2266 | | // they're `Break` statements. |
2267 | 0 | ctx.mergers |
2268 | 0 | .insert(merge_block_id, MergeBlockInformation::LoopMerge); |
2269 | | |
2270 | 0 | let loop_body_idx = ctx.bodies.len(); |
2271 | 0 | ctx.bodies.push(Body::with_parent(body_idx)); |
2272 | | |
2273 | 0 | let continue_idx = ctx.bodies.len(); |
2274 | | // The continue block inherits the scope of the loop body |
2275 | 0 | ctx.bodies.push(Body::with_parent(loop_body_idx)); |
2276 | 0 | ctx.body_for_label.entry(continuing).or_insert(continue_idx); |
2277 | | // Let subsequent branches to the continue block know that |
2278 | | // they're `Continue` statements. |
2279 | 0 | ctx.mergers |
2280 | 0 | .insert(continuing, MergeBlockInformation::LoopContinue); |
2281 | | |
2282 | | // The loop header always belongs to the loop body |
2283 | 0 | ctx.body_for_label.insert(block_id, loop_body_idx); |
2284 | | |
2285 | 0 | let parent_body = &mut ctx.bodies[body_idx]; |
2286 | 0 | parent_body.data.push(BodyFragment::Loop { |
2287 | 0 | body: loop_body_idx, |
2288 | 0 | continuing: continue_idx, |
2289 | 0 | break_if: None, |
2290 | 0 | }); |
2291 | 0 | body_idx = loop_body_idx; |
2292 | | } |
2293 | | Op::DPdxCoarse => { |
2294 | 0 | parse_expr_op!( |
2295 | 0 | crate::DerivativeAxis::X, |
2296 | 0 | crate::DerivativeControl::Coarse, |
2297 | | DERIVATIVE |
2298 | 0 | )?; |
2299 | | } |
2300 | | Op::DPdyCoarse => { |
2301 | 0 | parse_expr_op!( |
2302 | 0 | crate::DerivativeAxis::Y, |
2303 | 0 | crate::DerivativeControl::Coarse, |
2304 | | DERIVATIVE |
2305 | 0 | )?; |
2306 | | } |
2307 | | Op::FwidthCoarse => { |
2308 | 0 | parse_expr_op!( |
2309 | 0 | crate::DerivativeAxis::Width, |
2310 | 0 | crate::DerivativeControl::Coarse, |
2311 | | DERIVATIVE |
2312 | 0 | )?; |
2313 | | } |
2314 | | Op::DPdxFine => { |
2315 | 0 | parse_expr_op!( |
2316 | 0 | crate::DerivativeAxis::X, |
2317 | 0 | crate::DerivativeControl::Fine, |
2318 | | DERIVATIVE |
2319 | 0 | )?; |
2320 | | } |
2321 | | Op::DPdyFine => { |
2322 | 0 | parse_expr_op!( |
2323 | 0 | crate::DerivativeAxis::Y, |
2324 | 0 | crate::DerivativeControl::Fine, |
2325 | | DERIVATIVE |
2326 | 0 | )?; |
2327 | | } |
2328 | | Op::FwidthFine => { |
2329 | 0 | parse_expr_op!( |
2330 | 0 | crate::DerivativeAxis::Width, |
2331 | 0 | crate::DerivativeControl::Fine, |
2332 | | DERIVATIVE |
2333 | 0 | )?; |
2334 | | } |
2335 | | Op::DPdx => { |
2336 | 0 | parse_expr_op!( |
2337 | 0 | crate::DerivativeAxis::X, |
2338 | 0 | crate::DerivativeControl::None, |
2339 | | DERIVATIVE |
2340 | 0 | )?; |
2341 | | } |
2342 | | Op::DPdy => { |
2343 | 0 | parse_expr_op!( |
2344 | 0 | crate::DerivativeAxis::Y, |
2345 | 0 | crate::DerivativeControl::None, |
2346 | | DERIVATIVE |
2347 | 0 | )?; |
2348 | | } |
2349 | | Op::Fwidth => { |
2350 | 0 | parse_expr_op!( |
2351 | 0 | crate::DerivativeAxis::Width, |
2352 | 0 | crate::DerivativeControl::None, |
2353 | | DERIVATIVE |
2354 | 0 | )?; |
2355 | | } |
2356 | | Op::ArrayLength => { |
2357 | 0 | inst.expect(5)?; |
2358 | 0 | let result_type_id = self.next()?; |
2359 | 0 | let result_id = self.next()?; |
2360 | 0 | let structure_id = self.next()?; |
2361 | 0 | let member_index = self.next()?; |
2362 | | |
2363 | | // We're assuming that the validation pass, if it's run, will catch if the |
2364 | | // wrong types or parameters are supplied here. |
2365 | | |
2366 | 0 | let structure_ptr = self.lookup_expression.lookup(structure_id)?; |
2367 | 0 | let structure_handle = get_expr_handle!(structure_id, structure_ptr); |
2368 | | |
2369 | 0 | let member_ptr = ctx.expressions.append( |
2370 | 0 | crate::Expression::AccessIndex { |
2371 | 0 | base: structure_handle, |
2372 | 0 | index: member_index, |
2373 | 0 | }, |
2374 | 0 | span, |
2375 | | ); |
2376 | | |
2377 | 0 | let length = ctx |
2378 | 0 | .expressions |
2379 | 0 | .append(crate::Expression::ArrayLength(member_ptr), span); |
2380 | | |
2381 | 0 | self.lookup_expression.insert( |
2382 | 0 | result_id, |
2383 | 0 | LookupExpression { |
2384 | 0 | handle: length, |
2385 | 0 | type_id: result_type_id, |
2386 | 0 | block_id, |
2387 | 0 | }, |
2388 | | ); |
2389 | | } |
2390 | | Op::CopyMemory => { |
2391 | 0 | inst.expect_at_least(3)?; |
2392 | 0 | let target_id = self.next()?; |
2393 | 0 | let source_id = self.next()?; |
2394 | 0 | let _memory_access = if inst.wc != 3 { |
2395 | 0 | inst.expect(4)?; |
2396 | 0 | spirv::MemoryAccess::from_bits(self.next()?) |
2397 | 0 | .ok_or(Error::InvalidParameter(Op::CopyMemory))? |
2398 | | } else { |
2399 | 0 | spirv::MemoryAccess::NONE |
2400 | | }; |
2401 | | |
2402 | | // TODO: check if the source and target types are the same? |
2403 | 0 | let target = self.lookup_expression.lookup(target_id)?; |
2404 | 0 | let target_handle = get_expr_handle!(target_id, target); |
2405 | 0 | let source = self.lookup_expression.lookup(source_id)?; |
2406 | 0 | let source_handle = get_expr_handle!(source_id, source); |
2407 | | |
2408 | | // This operation is practically the same as loading and then storing, I think. |
2409 | 0 | let value_expr = ctx.expressions.append( |
2410 | 0 | crate::Expression::Load { |
2411 | 0 | pointer: source_handle, |
2412 | 0 | }, |
2413 | 0 | span, |
2414 | | ); |
2415 | | |
2416 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2417 | 0 | block.push( |
2418 | 0 | crate::Statement::Store { |
2419 | 0 | pointer: target_handle, |
2420 | 0 | value: value_expr, |
2421 | 0 | }, |
2422 | 0 | span, |
2423 | | ); |
2424 | | |
2425 | 0 | emitter.start(ctx.expressions); |
2426 | | } |
2427 | | Op::ControlBarrier => { |
2428 | 0 | inst.expect(4)?; |
2429 | 0 | let exec_scope_id = self.next()?; |
2430 | 0 | let _mem_scope_raw = self.next()?; |
2431 | 0 | let semantics_id = self.next()?; |
2432 | 0 | let exec_scope_const = self.lookup_constant.lookup(exec_scope_id)?; |
2433 | 0 | let semantics_const = self.lookup_constant.lookup(semantics_id)?; |
2434 | | |
2435 | 0 | let exec_scope = resolve_constant(ctx.gctx(), &exec_scope_const.inner) |
2436 | 0 | .ok_or(Error::InvalidBarrierScope(exec_scope_id))?; |
2437 | 0 | let semantics = resolve_constant(ctx.gctx(), &semantics_const.inner) |
2438 | 0 | .ok_or(Error::InvalidBarrierMemorySemantics(semantics_id))?; |
2439 | | |
2440 | 0 | if exec_scope == spirv::Scope::Workgroup as u32 |
2441 | 0 | || exec_scope == spirv::Scope::Subgroup as u32 |
2442 | 0 | { |
2443 | 0 | let mut flags = crate::Barrier::empty(); |
2444 | 0 | flags.set( |
2445 | 0 | crate::Barrier::STORAGE, |
2446 | 0 | semantics & spirv::MemorySemantics::UNIFORM_MEMORY.bits() != 0, |
2447 | 0 | ); |
2448 | 0 | flags.set( |
2449 | 0 | crate::Barrier::WORK_GROUP, |
2450 | 0 | semantics & (spirv::MemorySemantics::WORKGROUP_MEMORY).bits() != 0, |
2451 | 0 | ); |
2452 | 0 | flags.set( |
2453 | 0 | crate::Barrier::SUB_GROUP, |
2454 | 0 | semantics & spirv::MemorySemantics::SUBGROUP_MEMORY.bits() != 0, |
2455 | 0 | ); |
2456 | 0 | flags.set( |
2457 | 0 | crate::Barrier::TEXTURE, |
2458 | 0 | semantics & spirv::MemorySemantics::IMAGE_MEMORY.bits() != 0, |
2459 | 0 | ); |
2460 | 0 |
|
2461 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2462 | 0 | block.push(crate::Statement::ControlBarrier(flags), span); |
2463 | 0 | emitter.start(ctx.expressions); |
2464 | 0 | } else { |
2465 | 0 | log::warn!("Unsupported barrier execution scope: {exec_scope}"); |
2466 | | } |
2467 | | } |
2468 | | Op::MemoryBarrier => { |
2469 | 0 | inst.expect(3)?; |
2470 | 0 | let mem_scope_id = self.next()?; |
2471 | 0 | let semantics_id = self.next()?; |
2472 | 0 | let mem_scope_const = self.lookup_constant.lookup(mem_scope_id)?; |
2473 | 0 | let semantics_const = self.lookup_constant.lookup(semantics_id)?; |
2474 | | |
2475 | 0 | let mem_scope = resolve_constant(ctx.gctx(), &mem_scope_const.inner) |
2476 | 0 | .ok_or(Error::InvalidBarrierScope(mem_scope_id))?; |
2477 | 0 | let semantics = resolve_constant(ctx.gctx(), &semantics_const.inner) |
2478 | 0 | .ok_or(Error::InvalidBarrierMemorySemantics(semantics_id))?; |
2479 | | |
2480 | 0 | let mut flags = if mem_scope == spirv::Scope::Device as u32 { |
2481 | 0 | crate::Barrier::STORAGE |
2482 | 0 | } else if mem_scope == spirv::Scope::Workgroup as u32 { |
2483 | 0 | crate::Barrier::WORK_GROUP |
2484 | 0 | } else if mem_scope == spirv::Scope::Subgroup as u32 { |
2485 | 0 | crate::Barrier::SUB_GROUP |
2486 | | } else { |
2487 | 0 | crate::Barrier::empty() |
2488 | | }; |
2489 | 0 | flags.set( |
2490 | | crate::Barrier::STORAGE, |
2491 | 0 | semantics & spirv::MemorySemantics::UNIFORM_MEMORY.bits() != 0, |
2492 | | ); |
2493 | 0 | flags.set( |
2494 | | crate::Barrier::WORK_GROUP, |
2495 | 0 | semantics & (spirv::MemorySemantics::WORKGROUP_MEMORY).bits() != 0, |
2496 | | ); |
2497 | 0 | flags.set( |
2498 | | crate::Barrier::SUB_GROUP, |
2499 | 0 | semantics & spirv::MemorySemantics::SUBGROUP_MEMORY.bits() != 0, |
2500 | | ); |
2501 | 0 | flags.set( |
2502 | | crate::Barrier::TEXTURE, |
2503 | 0 | semantics & spirv::MemorySemantics::IMAGE_MEMORY.bits() != 0, |
2504 | | ); |
2505 | | |
2506 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2507 | 0 | block.push(crate::Statement::MemoryBarrier(flags), span); |
2508 | 0 | emitter.start(ctx.expressions); |
2509 | | } |
2510 | | Op::CopyObject => { |
2511 | 0 | inst.expect(4)?; |
2512 | 0 | let result_type_id = self.next()?; |
2513 | 0 | let result_id = self.next()?; |
2514 | 0 | let operand_id = self.next()?; |
2515 | | |
2516 | 0 | let lookup = self.lookup_expression.lookup(operand_id)?; |
2517 | 0 | let handle = get_expr_handle!(operand_id, lookup); |
2518 | | |
2519 | 0 | self.lookup_expression.insert( |
2520 | 0 | result_id, |
2521 | 0 | LookupExpression { |
2522 | 0 | handle, |
2523 | 0 | type_id: result_type_id, |
2524 | 0 | block_id, |
2525 | 0 | }, |
2526 | | ); |
2527 | | } |
2528 | | Op::GroupNonUniformBallot => { |
2529 | 0 | inst.expect(5)?; |
2530 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2531 | 0 | let result_type_id = self.next()?; |
2532 | 0 | let result_id = self.next()?; |
2533 | 0 | let exec_scope_id = self.next()?; |
2534 | 0 | let predicate_id = self.next()?; |
2535 | | |
2536 | 0 | let exec_scope_const = self.lookup_constant.lookup(exec_scope_id)?; |
2537 | 0 | let _exec_scope = resolve_constant(ctx.gctx(), &exec_scope_const.inner) |
2538 | 0 | .filter(|exec_scope| *exec_scope == spirv::Scope::Subgroup as u32) Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#9}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#9} |
2539 | 0 | .ok_or(Error::InvalidBarrierScope(exec_scope_id))?; |
2540 | | |
2541 | 0 | let predicate = if self |
2542 | 0 | .lookup_constant |
2543 | 0 | .lookup(predicate_id) |
2544 | 0 | .ok() |
2545 | 0 | .filter(|predicate_const| match predicate_const.inner { |
2546 | 0 | Constant::Constant(constant) => matches!( |
2547 | 0 | ctx.gctx().global_expressions[ctx.gctx().constants[constant].init], |
2548 | | crate::Expression::Literal(crate::Literal::Bool(true)), |
2549 | | ), |
2550 | 0 | Constant::Override(_) => false, |
2551 | 0 | }) Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#10}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#10} |
2552 | 0 | .is_some() |
2553 | | { |
2554 | 0 | None |
2555 | | } else { |
2556 | 0 | let predicate_lookup = self.lookup_expression.lookup(predicate_id)?; |
2557 | 0 | let predicate_handle = get_expr_handle!(predicate_id, predicate_lookup); |
2558 | 0 | Some(predicate_handle) |
2559 | | }; |
2560 | | |
2561 | 0 | let result_handle = ctx |
2562 | 0 | .expressions |
2563 | 0 | .append(crate::Expression::SubgroupBallotResult, span); |
2564 | 0 | self.lookup_expression.insert( |
2565 | 0 | result_id, |
2566 | 0 | LookupExpression { |
2567 | 0 | handle: result_handle, |
2568 | 0 | type_id: result_type_id, |
2569 | 0 | block_id, |
2570 | 0 | }, |
2571 | | ); |
2572 | | |
2573 | 0 | block.push( |
2574 | 0 | crate::Statement::SubgroupBallot { |
2575 | 0 | result: result_handle, |
2576 | 0 | predicate, |
2577 | 0 | }, |
2578 | 0 | span, |
2579 | | ); |
2580 | 0 | emitter.start(ctx.expressions); |
2581 | | } |
2582 | | Op::GroupNonUniformAll |
2583 | | | Op::GroupNonUniformAny |
2584 | | | Op::GroupNonUniformIAdd |
2585 | | | Op::GroupNonUniformFAdd |
2586 | | | Op::GroupNonUniformIMul |
2587 | | | Op::GroupNonUniformFMul |
2588 | | | Op::GroupNonUniformSMax |
2589 | | | Op::GroupNonUniformUMax |
2590 | | | Op::GroupNonUniformFMax |
2591 | | | Op::GroupNonUniformSMin |
2592 | | | Op::GroupNonUniformUMin |
2593 | | | Op::GroupNonUniformFMin |
2594 | | | Op::GroupNonUniformBitwiseAnd |
2595 | | | Op::GroupNonUniformBitwiseOr |
2596 | | | Op::GroupNonUniformBitwiseXor |
2597 | | | Op::GroupNonUniformLogicalAnd |
2598 | | | Op::GroupNonUniformLogicalOr |
2599 | | | Op::GroupNonUniformLogicalXor => { |
2600 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2601 | 0 | inst.expect( |
2602 | 0 | if matches!(inst.op, Op::GroupNonUniformAll | Op::GroupNonUniformAny) { |
2603 | 0 | 5 |
2604 | | } else { |
2605 | 0 | 6 |
2606 | | }, |
2607 | 0 | )?; |
2608 | 0 | let result_type_id = self.next()?; |
2609 | 0 | let result_id = self.next()?; |
2610 | 0 | let exec_scope_id = self.next()?; |
2611 | 0 | let collective_op_id = match inst.op { |
2612 | | Op::GroupNonUniformAll | Op::GroupNonUniformAny => { |
2613 | 0 | crate::CollectiveOperation::Reduce |
2614 | | } |
2615 | | _ => { |
2616 | 0 | let group_op_id = self.next()?; |
2617 | 0 | match spirv::GroupOperation::from_u32(group_op_id) { |
2618 | | Some(spirv::GroupOperation::Reduce) => { |
2619 | 0 | crate::CollectiveOperation::Reduce |
2620 | | } |
2621 | | Some(spirv::GroupOperation::InclusiveScan) => { |
2622 | 0 | crate::CollectiveOperation::InclusiveScan |
2623 | | } |
2624 | | Some(spirv::GroupOperation::ExclusiveScan) => { |
2625 | 0 | crate::CollectiveOperation::ExclusiveScan |
2626 | | } |
2627 | 0 | _ => return Err(Error::UnsupportedGroupOperation(group_op_id)), |
2628 | | } |
2629 | | } |
2630 | | }; |
2631 | 0 | let argument_id = self.next()?; |
2632 | | |
2633 | 0 | let argument_lookup = self.lookup_expression.lookup(argument_id)?; |
2634 | 0 | let argument_handle = get_expr_handle!(argument_id, argument_lookup); |
2635 | | |
2636 | 0 | let exec_scope_const = self.lookup_constant.lookup(exec_scope_id)?; |
2637 | 0 | let _exec_scope = resolve_constant(ctx.gctx(), &exec_scope_const.inner) |
2638 | 0 | .filter(|exec_scope| *exec_scope == spirv::Scope::Subgroup as u32) Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#11}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#11} |
2639 | 0 | .ok_or(Error::InvalidBarrierScope(exec_scope_id))?; |
2640 | | |
2641 | 0 | let op_id = match inst.op { |
2642 | 0 | Op::GroupNonUniformAll => crate::SubgroupOperation::All, |
2643 | 0 | Op::GroupNonUniformAny => crate::SubgroupOperation::Any, |
2644 | | Op::GroupNonUniformIAdd | Op::GroupNonUniformFAdd => { |
2645 | 0 | crate::SubgroupOperation::Add |
2646 | | } |
2647 | | Op::GroupNonUniformIMul | Op::GroupNonUniformFMul => { |
2648 | 0 | crate::SubgroupOperation::Mul |
2649 | | } |
2650 | | Op::GroupNonUniformSMax |
2651 | | | Op::GroupNonUniformUMax |
2652 | 0 | | Op::GroupNonUniformFMax => crate::SubgroupOperation::Max, |
2653 | | Op::GroupNonUniformSMin |
2654 | | | Op::GroupNonUniformUMin |
2655 | 0 | | Op::GroupNonUniformFMin => crate::SubgroupOperation::Min, |
2656 | | Op::GroupNonUniformBitwiseAnd | Op::GroupNonUniformLogicalAnd => { |
2657 | 0 | crate::SubgroupOperation::And |
2658 | | } |
2659 | | Op::GroupNonUniformBitwiseOr | Op::GroupNonUniformLogicalOr => { |
2660 | 0 | crate::SubgroupOperation::Or |
2661 | | } |
2662 | | Op::GroupNonUniformBitwiseXor | Op::GroupNonUniformLogicalXor => { |
2663 | 0 | crate::SubgroupOperation::Xor |
2664 | | } |
2665 | 0 | _ => unreachable!(), |
2666 | | }; |
2667 | | |
2668 | 0 | let result_type = self.lookup_type.lookup(result_type_id)?; |
2669 | | |
2670 | 0 | let result_handle = ctx.expressions.append( |
2671 | 0 | crate::Expression::SubgroupOperationResult { |
2672 | 0 | ty: result_type.handle, |
2673 | 0 | }, |
2674 | 0 | span, |
2675 | | ); |
2676 | 0 | self.lookup_expression.insert( |
2677 | 0 | result_id, |
2678 | 0 | LookupExpression { |
2679 | 0 | handle: result_handle, |
2680 | 0 | type_id: result_type_id, |
2681 | 0 | block_id, |
2682 | 0 | }, |
2683 | | ); |
2684 | | |
2685 | 0 | block.push( |
2686 | 0 | crate::Statement::SubgroupCollectiveOperation { |
2687 | 0 | result: result_handle, |
2688 | 0 | op: op_id, |
2689 | 0 | collective_op: collective_op_id, |
2690 | 0 | argument: argument_handle, |
2691 | 0 | }, |
2692 | 0 | span, |
2693 | | ); |
2694 | 0 | emitter.start(ctx.expressions); |
2695 | | } |
2696 | | Op::GroupNonUniformBroadcastFirst |
2697 | | | Op::GroupNonUniformBroadcast |
2698 | | | Op::GroupNonUniformShuffle |
2699 | | | Op::GroupNonUniformShuffleDown |
2700 | | | Op::GroupNonUniformShuffleUp |
2701 | | | Op::GroupNonUniformShuffleXor |
2702 | | | Op::GroupNonUniformQuadBroadcast => { |
2703 | 0 | inst.expect(if matches!(inst.op, Op::GroupNonUniformBroadcastFirst) { |
2704 | 0 | 5 |
2705 | | } else { |
2706 | 0 | 6 |
2707 | 0 | })?; |
2708 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2709 | 0 | let result_type_id = self.next()?; |
2710 | 0 | let result_id = self.next()?; |
2711 | 0 | let exec_scope_id = self.next()?; |
2712 | 0 | let argument_id = self.next()?; |
2713 | | |
2714 | 0 | let argument_lookup = self.lookup_expression.lookup(argument_id)?; |
2715 | 0 | let argument_handle = get_expr_handle!(argument_id, argument_lookup); |
2716 | | |
2717 | 0 | let exec_scope_const = self.lookup_constant.lookup(exec_scope_id)?; |
2718 | 0 | let _exec_scope = resolve_constant(ctx.gctx(), &exec_scope_const.inner) |
2719 | 0 | .filter(|exec_scope| *exec_scope == spirv::Scope::Subgroup as u32) Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#12}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#12} |
2720 | 0 | .ok_or(Error::InvalidBarrierScope(exec_scope_id))?; |
2721 | | |
2722 | 0 | let mode = if matches!(inst.op, Op::GroupNonUniformBroadcastFirst) { |
2723 | 0 | crate::GatherMode::BroadcastFirst |
2724 | | } else { |
2725 | 0 | let index_id = self.next()?; |
2726 | 0 | let index_lookup = self.lookup_expression.lookup(index_id)?; |
2727 | 0 | let index_handle = get_expr_handle!(index_id, index_lookup); |
2728 | 0 | match inst.op { |
2729 | | Op::GroupNonUniformBroadcast => { |
2730 | 0 | crate::GatherMode::Broadcast(index_handle) |
2731 | | } |
2732 | 0 | Op::GroupNonUniformShuffle => crate::GatherMode::Shuffle(index_handle), |
2733 | | Op::GroupNonUniformShuffleDown => { |
2734 | 0 | crate::GatherMode::ShuffleDown(index_handle) |
2735 | | } |
2736 | | Op::GroupNonUniformShuffleUp => { |
2737 | 0 | crate::GatherMode::ShuffleUp(index_handle) |
2738 | | } |
2739 | | Op::GroupNonUniformShuffleXor => { |
2740 | 0 | crate::GatherMode::ShuffleXor(index_handle) |
2741 | | } |
2742 | | Op::GroupNonUniformQuadBroadcast => { |
2743 | 0 | crate::GatherMode::QuadBroadcast(index_handle) |
2744 | | } |
2745 | 0 | _ => unreachable!(), |
2746 | | } |
2747 | | }; |
2748 | | |
2749 | 0 | let result_type = self.lookup_type.lookup(result_type_id)?; |
2750 | | |
2751 | 0 | let result_handle = ctx.expressions.append( |
2752 | 0 | crate::Expression::SubgroupOperationResult { |
2753 | 0 | ty: result_type.handle, |
2754 | 0 | }, |
2755 | 0 | span, |
2756 | | ); |
2757 | 0 | self.lookup_expression.insert( |
2758 | 0 | result_id, |
2759 | 0 | LookupExpression { |
2760 | 0 | handle: result_handle, |
2761 | 0 | type_id: result_type_id, |
2762 | 0 | block_id, |
2763 | 0 | }, |
2764 | | ); |
2765 | | |
2766 | 0 | block.push( |
2767 | 0 | crate::Statement::SubgroupGather { |
2768 | 0 | result: result_handle, |
2769 | 0 | mode, |
2770 | 0 | argument: argument_handle, |
2771 | 0 | }, |
2772 | 0 | span, |
2773 | | ); |
2774 | 0 | emitter.start(ctx.expressions); |
2775 | | } |
2776 | | Op::GroupNonUniformQuadSwap => { |
2777 | 0 | inst.expect(6)?; |
2778 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2779 | 0 | let result_type_id = self.next()?; |
2780 | 0 | let result_id = self.next()?; |
2781 | 0 | let exec_scope_id = self.next()?; |
2782 | 0 | let argument_id = self.next()?; |
2783 | 0 | let direction_id = self.next()?; |
2784 | | |
2785 | 0 | let argument_lookup = self.lookup_expression.lookup(argument_id)?; |
2786 | 0 | let argument_handle = get_expr_handle!(argument_id, argument_lookup); |
2787 | | |
2788 | 0 | let exec_scope_const = self.lookup_constant.lookup(exec_scope_id)?; |
2789 | 0 | let _exec_scope = resolve_constant(ctx.gctx(), &exec_scope_const.inner) |
2790 | 0 | .filter(|exec_scope| *exec_scope == spirv::Scope::Subgroup as u32) Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block::{closure#13}Unexecuted instantiation: <naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block::{closure#13} |
2791 | 0 | .ok_or(Error::InvalidBarrierScope(exec_scope_id))?; |
2792 | | |
2793 | 0 | let direction_const = self.lookup_constant.lookup(direction_id)?; |
2794 | 0 | let direction_const = resolve_constant(ctx.gctx(), &direction_const.inner) |
2795 | 0 | .ok_or(Error::InvalidOperand)?; |
2796 | 0 | let direction = match direction_const { |
2797 | 0 | 0 => crate::Direction::X, |
2798 | 0 | 1 => crate::Direction::Y, |
2799 | 0 | 2 => crate::Direction::Diagonal, |
2800 | 0 | _ => unreachable!(), |
2801 | | }; |
2802 | | |
2803 | 0 | let result_type = self.lookup_type.lookup(result_type_id)?; |
2804 | | |
2805 | 0 | let result_handle = ctx.expressions.append( |
2806 | 0 | crate::Expression::SubgroupOperationResult { |
2807 | 0 | ty: result_type.handle, |
2808 | 0 | }, |
2809 | 0 | span, |
2810 | | ); |
2811 | 0 | self.lookup_expression.insert( |
2812 | 0 | result_id, |
2813 | 0 | LookupExpression { |
2814 | 0 | handle: result_handle, |
2815 | 0 | type_id: result_type_id, |
2816 | 0 | block_id, |
2817 | 0 | }, |
2818 | | ); |
2819 | | |
2820 | 0 | block.push( |
2821 | 0 | crate::Statement::SubgroupGather { |
2822 | 0 | mode: crate::GatherMode::QuadSwap(direction), |
2823 | 0 | result: result_handle, |
2824 | 0 | argument: argument_handle, |
2825 | 0 | }, |
2826 | 0 | span, |
2827 | | ); |
2828 | 0 | emitter.start(ctx.expressions); |
2829 | | } |
2830 | | Op::AtomicLoad => { |
2831 | 0 | inst.expect(6)?; |
2832 | 0 | let start = self.data_offset; |
2833 | 0 | let result_type_id = self.next()?; |
2834 | 0 | let result_id = self.next()?; |
2835 | 0 | let pointer_id = self.next()?; |
2836 | 0 | let _scope_id = self.next()?; |
2837 | 0 | let _memory_semantics_id = self.next()?; |
2838 | 0 | let span = self.span_from_with_op(start); |
2839 | | |
2840 | 0 | log::trace!("\t\t\tlooking up expr {pointer_id:?}"); |
2841 | 0 | let p_lexp_handle = |
2842 | 0 | get_expr_handle!(pointer_id, self.lookup_expression.lookup(pointer_id)?); |
2843 | | |
2844 | | // Create an expression for our result |
2845 | 0 | let expr = crate::Expression::Load { |
2846 | 0 | pointer: p_lexp_handle, |
2847 | 0 | }; |
2848 | 0 | let handle = ctx.expressions.append(expr, span); |
2849 | 0 | self.lookup_expression.insert( |
2850 | 0 | result_id, |
2851 | 0 | LookupExpression { |
2852 | 0 | handle, |
2853 | 0 | type_id: result_type_id, |
2854 | 0 | block_id, |
2855 | 0 | }, |
2856 | | ); |
2857 | | |
2858 | | // Store any associated global variables so we can upgrade their types later |
2859 | 0 | self.record_atomic_access(ctx, p_lexp_handle)?; |
2860 | | } |
2861 | | Op::AtomicStore => { |
2862 | 0 | inst.expect(5)?; |
2863 | 0 | let start = self.data_offset; |
2864 | 0 | let pointer_id = self.next()?; |
2865 | 0 | let _scope_id = self.next()?; |
2866 | 0 | let _memory_semantics_id = self.next()?; |
2867 | 0 | let value_id = self.next()?; |
2868 | 0 | let span = self.span_from_with_op(start); |
2869 | | |
2870 | 0 | log::trace!("\t\t\tlooking up pointer expr {pointer_id:?}"); |
2871 | 0 | let p_lexp_handle = |
2872 | 0 | get_expr_handle!(pointer_id, self.lookup_expression.lookup(pointer_id)?); |
2873 | | |
2874 | 0 | log::trace!("\t\t\tlooking up value expr {pointer_id:?}"); |
2875 | 0 | let v_lexp_handle = |
2876 | 0 | get_expr_handle!(value_id, self.lookup_expression.lookup(value_id)?); |
2877 | | |
2878 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2879 | | // Create a statement for the op itself |
2880 | 0 | let stmt = crate::Statement::Store { |
2881 | 0 | pointer: p_lexp_handle, |
2882 | 0 | value: v_lexp_handle, |
2883 | 0 | }; |
2884 | 0 | block.push(stmt, span); |
2885 | 0 | emitter.start(ctx.expressions); |
2886 | | |
2887 | | // Store any associated global variables so we can upgrade their types later |
2888 | 0 | self.record_atomic_access(ctx, p_lexp_handle)?; |
2889 | | } |
2890 | | Op::AtomicIIncrement | Op::AtomicIDecrement => { |
2891 | 0 | inst.expect(6)?; |
2892 | 0 | let start = self.data_offset; |
2893 | 0 | let result_type_id = self.next()?; |
2894 | 0 | let result_id = self.next()?; |
2895 | 0 | let pointer_id = self.next()?; |
2896 | 0 | let _scope_id = self.next()?; |
2897 | 0 | let _memory_semantics_id = self.next()?; |
2898 | 0 | let span = self.span_from_with_op(start); |
2899 | | |
2900 | 0 | let (p_exp_h, p_base_ty_h) = self.get_exp_and_base_ty_handles( |
2901 | 0 | pointer_id, |
2902 | 0 | ctx, |
2903 | 0 | &mut emitter, |
2904 | 0 | &mut block, |
2905 | 0 | body_idx, |
2906 | 0 | )?; |
2907 | | |
2908 | 0 | block.extend(emitter.finish(ctx.expressions)); |
2909 | | // Create an expression for our result |
2910 | 0 | let r_lexp_handle = { |
2911 | 0 | let expr = crate::Expression::AtomicResult { |
2912 | 0 | ty: p_base_ty_h, |
2913 | 0 | comparison: false, |
2914 | 0 | }; |
2915 | 0 | let handle = ctx.expressions.append(expr, span); |
2916 | 0 | self.lookup_expression.insert( |
2917 | 0 | result_id, |
2918 | 0 | LookupExpression { |
2919 | 0 | handle, |
2920 | 0 | type_id: result_type_id, |
2921 | 0 | block_id, |
2922 | 0 | }, |
2923 | | ); |
2924 | 0 | handle |
2925 | | }; |
2926 | 0 | emitter.start(ctx.expressions); |
2927 | | |
2928 | | // Create a literal "1" to use as our value |
2929 | 0 | let one_lexp_handle = make_index_literal( |
2930 | 0 | ctx, |
2931 | | 1, |
2932 | 0 | &mut block, |
2933 | 0 | &mut emitter, |
2934 | 0 | p_base_ty_h, |
2935 | 0 | result_type_id, |
2936 | 0 | span, |
2937 | 0 | )?; |
2938 | | |
2939 | | // Create a statement for the op itself |
2940 | 0 | let stmt = crate::Statement::Atomic { |
2941 | 0 | pointer: p_exp_h, |
2942 | 0 | fun: match inst.op { |
2943 | 0 | Op::AtomicIIncrement => crate::AtomicFunction::Add, |
2944 | 0 | _ => crate::AtomicFunction::Subtract, |
2945 | | }, |
2946 | 0 | value: one_lexp_handle, |
2947 | 0 | result: Some(r_lexp_handle), |
2948 | | }; |
2949 | 0 | block.push(stmt, span); |
2950 | | |
2951 | | // Store any associated global variables so we can upgrade their types later |
2952 | 0 | self.record_atomic_access(ctx, p_exp_h)?; |
2953 | | } |
2954 | | Op::AtomicCompareExchange => { |
2955 | 0 | inst.expect(9)?; |
2956 | | |
2957 | 0 | let start = self.data_offset; |
2958 | 0 | let span = self.span_from_with_op(start); |
2959 | 0 | let result_type_id = self.next()?; |
2960 | 0 | let result_id = self.next()?; |
2961 | 0 | let pointer_id = self.next()?; |
2962 | 0 | let _memory_scope_id = self.next()?; |
2963 | 0 | let _equal_memory_semantics_id = self.next()?; |
2964 | 0 | let _unequal_memory_semantics_id = self.next()?; |
2965 | 0 | let value_id = self.next()?; |
2966 | 0 | let comparator_id = self.next()?; |
2967 | | |
2968 | 0 | let (p_exp_h, p_base_ty_h) = self.get_exp_and_base_ty_handles( |
2969 | 0 | pointer_id, |
2970 | 0 | ctx, |
2971 | 0 | &mut emitter, |
2972 | 0 | &mut block, |
2973 | 0 | body_idx, |
2974 | 0 | )?; |
2975 | | |
2976 | 0 | log::trace!("\t\t\tlooking up value expr {value_id:?}"); |
2977 | 0 | let v_lexp_handle = |
2978 | 0 | get_expr_handle!(value_id, self.lookup_expression.lookup(value_id)?); |
2979 | | |
2980 | 0 | log::trace!("\t\t\tlooking up comparator expr {value_id:?}"); |
2981 | 0 | let c_lexp_handle = get_expr_handle!( |
2982 | 0 | comparator_id, |
2983 | 0 | self.lookup_expression.lookup(comparator_id)? |
2984 | | ); |
2985 | | |
2986 | | // We know from the SPIR-V spec that the result type must be an integer |
2987 | | // scalar, and we'll need the type itself to get a handle to the atomic |
2988 | | // result struct. |
2989 | 0 | let crate::TypeInner::Scalar(scalar) = ctx.module.types[p_base_ty_h].inner |
2990 | | else { |
2991 | 0 | return Err( |
2992 | 0 | crate::front::atomic_upgrade::Error::CompareExchangeNonScalarBaseType |
2993 | 0 | .into(), |
2994 | 0 | ); |
2995 | | }; |
2996 | | |
2997 | | // Get a handle to the atomic result struct type. |
2998 | 0 | let atomic_result_struct_ty_h = ctx.module.generate_predeclared_type( |
2999 | 0 | crate::PredeclaredType::AtomicCompareExchangeWeakResult(scalar), |
3000 | | ); |
3001 | | |
3002 | 0 | block.extend(emitter.finish(ctx.expressions)); |
3003 | | |
3004 | | // Create an expression for our atomic result |
3005 | 0 | let atomic_lexp_handle = { |
3006 | 0 | let expr = crate::Expression::AtomicResult { |
3007 | 0 | ty: atomic_result_struct_ty_h, |
3008 | 0 | comparison: true, |
3009 | 0 | }; |
3010 | 0 | ctx.expressions.append(expr, span) |
3011 | | }; |
3012 | | |
3013 | | // Create an dot accessor to extract the value from the |
3014 | | // result struct __atomic_compare_exchange_result<T> and use that |
3015 | | // as the expression for the result_id |
3016 | 0 | { |
3017 | 0 | let expr = crate::Expression::AccessIndex { |
3018 | 0 | base: atomic_lexp_handle, |
3019 | 0 | index: 0, |
3020 | 0 | }; |
3021 | 0 | let handle = ctx.expressions.append(expr, span); |
3022 | 0 | // Use this dot accessor as the result id's expression |
3023 | 0 | let _ = self.lookup_expression.insert( |
3024 | 0 | result_id, |
3025 | 0 | LookupExpression { |
3026 | 0 | handle, |
3027 | 0 | type_id: result_type_id, |
3028 | 0 | block_id, |
3029 | 0 | }, |
3030 | 0 | ); |
3031 | 0 | } |
3032 | | |
3033 | 0 | emitter.start(ctx.expressions); |
3034 | | |
3035 | | // Create a statement for the op itself |
3036 | 0 | let stmt = crate::Statement::Atomic { |
3037 | 0 | pointer: p_exp_h, |
3038 | 0 | fun: crate::AtomicFunction::Exchange { |
3039 | 0 | compare: Some(c_lexp_handle), |
3040 | 0 | }, |
3041 | 0 | value: v_lexp_handle, |
3042 | 0 | result: Some(atomic_lexp_handle), |
3043 | 0 | }; |
3044 | 0 | block.push(stmt, span); |
3045 | | |
3046 | | // Store any associated global variables so we can upgrade their types later |
3047 | 0 | self.record_atomic_access(ctx, p_exp_h)?; |
3048 | | } |
3049 | | Op::AtomicExchange |
3050 | | | Op::AtomicIAdd |
3051 | | | Op::AtomicISub |
3052 | | | Op::AtomicSMin |
3053 | | | Op::AtomicUMin |
3054 | | | Op::AtomicSMax |
3055 | | | Op::AtomicUMax |
3056 | | | Op::AtomicAnd |
3057 | | | Op::AtomicOr |
3058 | | | Op::AtomicXor |
3059 | 0 | | Op::AtomicFAddEXT => self.parse_atomic_expr_with_value( |
3060 | 0 | inst, |
3061 | 0 | &mut emitter, |
3062 | 0 | ctx, |
3063 | 0 | &mut block, |
3064 | 0 | block_id, |
3065 | 0 | body_idx, |
3066 | 0 | match inst.op { |
3067 | 0 | Op::AtomicExchange => crate::AtomicFunction::Exchange { compare: None }, |
3068 | 0 | Op::AtomicIAdd | Op::AtomicFAddEXT => crate::AtomicFunction::Add, |
3069 | 0 | Op::AtomicISub => crate::AtomicFunction::Subtract, |
3070 | 0 | Op::AtomicSMin => crate::AtomicFunction::Min, |
3071 | 0 | Op::AtomicUMin => crate::AtomicFunction::Min, |
3072 | 0 | Op::AtomicSMax => crate::AtomicFunction::Max, |
3073 | 0 | Op::AtomicUMax => crate::AtomicFunction::Max, |
3074 | 0 | Op::AtomicAnd => crate::AtomicFunction::And, |
3075 | 0 | Op::AtomicOr => crate::AtomicFunction::InclusiveOr, |
3076 | 0 | Op::AtomicXor => crate::AtomicFunction::ExclusiveOr, |
3077 | 0 | _ => unreachable!(), |
3078 | | }, |
3079 | 0 | )?, |
3080 | | |
3081 | | _ => { |
3082 | 0 | return Err(Error::UnsupportedInstruction(self.state, inst.op)); |
3083 | | } |
3084 | | } |
3085 | | }; |
3086 | | |
3087 | 0 | block.extend(emitter.finish(ctx.expressions)); |
3088 | 0 | if let Some(stmt) = terminator { |
3089 | 0 | block.push(stmt, crate::Span::default()); |
3090 | 0 | } |
3091 | | |
3092 | | // Save this block fragment in `block_ctx.blocks`, and mark it to be |
3093 | | // incorporated into the current body at `Statement` assembly time. |
3094 | 0 | ctx.blocks.insert(block_id, block); |
3095 | 0 | let body = &mut ctx.bodies[body_idx]; |
3096 | 0 | body.data.push(BodyFragment::BlockId(block_id)); |
3097 | 0 | Ok(()) |
3098 | 13 | } Unexecuted instantiation: <naga::front::spv::Frontend<core::iter::adapters::map::Map<core::slice::iter::Chunks<u8>, naga::front::spv::parse_u8_slice::{closure#0}>>>::next_block<naga::front::spv::Frontend<alloc::vec::into_iter::IntoIter<u32>>>::next_block Line | Count | Source | 21 | 13 | pub(in crate::front::spv) fn next_block( | 22 | 13 | &mut self, | 23 | 13 | block_id: spirv::Word, | 24 | 13 | ctx: &mut BlockContext, | 25 | 13 | ) -> Result<(), Error> { | 26 | | // Extend `body` with the correct form for a branch to `target`. | 27 | | fn merger(body: &mut Body, target: &MergeBlockInformation) { | 28 | | body.data.push(match *target { | 29 | | MergeBlockInformation::LoopContinue => BodyFragment::Continue, | 30 | | MergeBlockInformation::LoopMerge | MergeBlockInformation::SwitchMerge => { | 31 | | BodyFragment::Break | 32 | | } | 33 | | | 34 | | // Finishing a selection merge means just falling off the end of | 35 | | // the `accept` or `reject` block of the `If` statement. | 36 | | MergeBlockInformation::SelectionMerge => return, | 37 | | }) | 38 | | } | 39 | | | 40 | 13 | let mut emitter = crate::proc::Emitter::default(); | 41 | 13 | emitter.start(ctx.expressions); | 42 | | | 43 | | // Find the `Body` to which this block contributes. | 44 | | // | 45 | | // If this is some SPIR-V structured control flow construct's merge | 46 | | // block, then `body_idx` will refer to the same `Body` as the header, | 47 | | // so that we simply pick up accumulating the `Body` where the header | 48 | | // left off. Each of the statements in a block dominates the next, so | 49 | | // we're sure to encounter their SPIR-V blocks in order, ensuring that | 50 | | // the `Body` will be assembled in the proper order. | 51 | | // | 52 | | // Note that, unlike every other kind of SPIR-V block, we don't know the | 53 | | // function's first block's label in advance. Thus, we assume that if | 54 | | // this block has no entry in `ctx.body_for_label`, it must be the | 55 | | // function's first block. This always has body index zero. | 56 | 13 | let mut body_idx = *ctx.body_for_label.entry(block_id).or_default(); | 57 | | | 58 | | // The Naga IR block this call builds. This will end up as | 59 | | // `ctx.blocks[&block_id]`, and `ctx.bodies[body_idx]` will refer to it | 60 | | // via a `BodyFragment::BlockId`. | 61 | 13 | let mut block = crate::Block::new(); | 62 | | | 63 | | // Stores the merge block as defined by a `OpSelectionMerge` otherwise is `None` | 64 | | // | 65 | | // This is used in `OpSwitch` to promote the `MergeBlockInformation` from | 66 | | // `SelectionMerge` to `SwitchMerge` to allow `Break`s this isn't desirable for | 67 | | // `LoopMerge`s because otherwise `Continue`s wouldn't be allowed | 68 | 13 | let mut selection_merge_block = None; | 69 | | | 70 | | macro_rules! get_expr_handle { | 71 | | ($id:expr, $lexp:expr) => { | 72 | | self.get_expr_handle($id, $lexp, ctx, &mut emitter, &mut block, body_idx) | 73 | | }; | 74 | | } | 75 | | macro_rules! parse_expr_op { | 76 | | ($op:expr, BINARY) => { | 77 | | self.parse_expr_binary_op(ctx, &mut emitter, &mut block, block_id, body_idx, $op) | 78 | | }; | 79 | | | 80 | | ($op:expr, SHIFT) => { | 81 | | self.parse_expr_shift_op(ctx, &mut emitter, &mut block, block_id, body_idx, $op) | 82 | | }; | 83 | | ($op:expr, UNARY) => { | 84 | | self.parse_expr_unary_op(ctx, &mut emitter, &mut block, block_id, body_idx, $op) | 85 | | }; | 86 | | ($axis:expr, $ctrl:expr, DERIVATIVE) => { | 87 | | self.parse_expr_derivative( | 88 | | ctx, | 89 | | &mut emitter, | 90 | | &mut block, | 91 | | block_id, | 92 | | body_idx, | 93 | | ($axis, $ctrl), | 94 | | ) | 95 | | }; | 96 | | } | 97 | | | 98 | 0 | let terminator = loop { | 99 | | use spirv::Op; | 100 | 228k | let start = self.data_offset; | 101 | 228k | let inst = self.next_inst()?; | 102 | 228k | let span = crate::Span::from(start..(start + 4 * (inst.wc as usize))); | 103 | 228k | log::debug!("\t\t{:?} [{}]", inst.op, inst.wc); | 104 | | | 105 | 228k | match inst.op { | 106 | | Op::Line => { | 107 | 0 | inst.expect(4)?; | 108 | 0 | let _file_id = self.next()?; | 109 | 0 | let _row_id = self.next()?; | 110 | 0 | let _col_id = self.next()?; | 111 | | } | 112 | 0 | Op::NoLine => inst.expect(1)?, | 113 | | Op::Undef => { | 114 | 0 | inst.expect(3)?; | 115 | 0 | let type_id = self.next()?; | 116 | 0 | let id = self.next()?; | 117 | 0 | let type_lookup = self.lookup_type.lookup(type_id)?; | 118 | 0 | let ty = type_lookup.handle; | 119 | | | 120 | 0 | self.lookup_expression.insert( | 121 | 0 | id, | 122 | 0 | LookupExpression { | 123 | 0 | handle: ctx | 124 | 0 | .expressions | 125 | 0 | .append(crate::Expression::ZeroValue(ty), span), | 126 | 0 | type_id, | 127 | 0 | block_id, | 128 | 0 | }, | 129 | | ); | 130 | | } | 131 | | Op::Variable => { | 132 | 0 | inst.expect_at_least(4)?; | 133 | 0 | block.extend(emitter.finish(ctx.expressions)); | 134 | | | 135 | 0 | let result_type_id = self.next()?; | 136 | 0 | let result_id = self.next()?; | 137 | 0 | let _storage_class = self.next()?; | 138 | 0 | let init = if inst.wc > 4 { | 139 | 0 | inst.expect(5)?; | 140 | 0 | let init_id = self.next()?; | 141 | 0 | let lconst = self.lookup_constant.lookup(init_id)?; | 142 | 0 | Some(ctx.expressions.append(lconst.inner.to_expr(), span)) | 143 | | } else { | 144 | 0 | None | 145 | | }; | 146 | | | 147 | 0 | let name = self | 148 | 0 | .future_decor | 149 | 0 | .remove(&result_id) | 150 | 0 | .and_then(|decor| decor.name); | 151 | 0 | if let Some(ref name) = name { | 152 | 0 | log::debug!("\t\t\tid={result_id} name={name}"); | 153 | 0 | } | 154 | 0 | let lookup_ty = self.lookup_type.lookup(result_type_id)?; | 155 | 0 | let var_handle = ctx.local_arena.append( | 156 | | crate::LocalVariable { | 157 | 0 | name, | 158 | 0 | ty: match ctx.module.types[lookup_ty.handle].inner { | 159 | 0 | crate::TypeInner::Pointer { base, .. } => base, | 160 | 0 | _ => lookup_ty.handle, | 161 | | }, | 162 | 0 | init, | 163 | | }, | 164 | 0 | span, | 165 | | ); | 166 | | | 167 | 0 | self.lookup_expression.insert( | 168 | 0 | result_id, | 169 | 0 | LookupExpression { | 170 | 0 | handle: ctx | 171 | 0 | .expressions | 172 | 0 | .append(crate::Expression::LocalVariable(var_handle), span), | 173 | 0 | type_id: result_type_id, | 174 | 0 | block_id, | 175 | 0 | }, | 176 | | ); | 177 | 0 | emitter.start(ctx.expressions); | 178 | | } | 179 | | Op::Phi => { | 180 | 0 | inst.expect_at_least(3)?; | 181 | 0 | block.extend(emitter.finish(ctx.expressions)); | 182 | | | 183 | 0 | let result_type_id = self.next()?; | 184 | 0 | let result_id = self.next()?; | 185 | | | 186 | 0 | let name = format!("phi_{result_id}"); | 187 | 0 | let local = ctx.local_arena.append( | 188 | | crate::LocalVariable { | 189 | 0 | name: Some(name), | 190 | 0 | ty: self.lookup_type.lookup(result_type_id)?.handle, | 191 | 0 | init: None, | 192 | | }, | 193 | 0 | self.span_from(start), | 194 | | ); | 195 | 0 | let pointer = ctx | 196 | 0 | .expressions | 197 | 0 | .append(crate::Expression::LocalVariable(local), span); | 198 | | | 199 | 0 | let in_count = (inst.wc - 3) / 2; | 200 | 0 | let mut phi = PhiExpression { | 201 | 0 | local, | 202 | 0 | expressions: Vec::with_capacity(in_count as usize), | 203 | 0 | }; | 204 | 0 | for _ in 0..in_count { | 205 | 0 | let expr = self.next()?; | 206 | 0 | let block = self.next()?; | 207 | 0 | phi.expressions.push((expr, block)); | 208 | | } | 209 | | | 210 | 0 | ctx.phis.push(phi); | 211 | 0 | emitter.start(ctx.expressions); | 212 | | | 213 | | // Associate the lookup with an actual value, which is emitted | 214 | | // into the current block. | 215 | 0 | self.lookup_expression.insert( | 216 | 0 | result_id, | 217 | 0 | LookupExpression { | 218 | 0 | handle: ctx | 219 | 0 | .expressions | 220 | 0 | .append(crate::Expression::Load { pointer }, span), | 221 | 0 | type_id: result_type_id, | 222 | 0 | block_id, | 223 | 0 | }, | 224 | | ); | 225 | | } | 226 | | Op::AccessChain | Op::InBoundsAccessChain => { | 227 | | struct AccessExpression { | 228 | | base_handle: Handle<crate::Expression>, | 229 | | type_id: spirv::Word, | 230 | | load_override: Option<LookupLoadOverride>, | 231 | | } | 232 | | | 233 | 0 | inst.expect_at_least(4)?; | 234 | | | 235 | 0 | let result_type_id = self.next()?; | 236 | 0 | let result_id = self.next()?; | 237 | 0 | let base_id = self.next()?; | 238 | 0 | log::trace!("\t\t\tlooking up expr {base_id:?}"); | 239 | | | 240 | 0 | let mut acex = { | 241 | 0 | let lexp = self.lookup_expression.lookup(base_id)?; | 242 | 0 | let lty = self.lookup_type.lookup(lexp.type_id)?; | 243 | | | 244 | | // HACK `OpAccessChain` and `OpInBoundsAccessChain` | 245 | | // require for the result type to be a pointer, but if | 246 | | // we're given a pointer to an image / sampler, it will | 247 | | // be *already* dereferenced, since we do that early | 248 | | // during `parse_type_pointer()`. | 249 | | // | 250 | | // This can happen only through `BindingArray`, since | 251 | | // that's the only case where one can obtain a pointer | 252 | | // to an image / sampler, and so let's match on that: | 253 | 0 | let dereference = match ctx.module.types[lty.handle].inner { | 254 | 0 | crate::TypeInner::BindingArray { .. } => false, | 255 | 0 | _ => true, | 256 | | }; | 257 | | | 258 | 0 | let type_id = if dereference { | 259 | 0 | lty.base_id.ok_or(Error::InvalidAccessType(lexp.type_id))? | 260 | | } else { | 261 | 0 | lexp.type_id | 262 | | }; | 263 | | | 264 | 0 | AccessExpression { | 265 | 0 | base_handle: get_expr_handle!(base_id, lexp), | 266 | 0 | type_id, | 267 | 0 | load_override: self.lookup_load_override.get(&base_id).cloned(), | 268 | 0 | } | 269 | | }; | 270 | | | 271 | 0 | for _ in 4..inst.wc { | 272 | 0 | let access_id = self.next()?; | 273 | 0 | log::trace!("\t\t\tlooking up index expr {access_id:?}"); | 274 | 0 | let index_expr = self.lookup_expression.lookup(access_id)?.clone(); | 275 | 0 | let index_expr_handle = get_expr_handle!(access_id, &index_expr); | 276 | 0 | let index_expr_data = &ctx.expressions[index_expr.handle]; | 277 | 0 | let index_maybe = match *index_expr_data { | 278 | 0 | crate::Expression::Constant(const_handle) => Some( | 279 | 0 | ctx.gctx() | 280 | 0 | .eval_expr_to_u32(ctx.module.constants[const_handle].init) | 281 | 0 | .map_err(|_| { | 282 | | Error::InvalidAccess(crate::Expression::Constant( | 283 | | const_handle, | 284 | | )) | 285 | 0 | })?, | 286 | | ), | 287 | 0 | _ => None, | 288 | | }; | 289 | | | 290 | 0 | log::trace!("\t\t\tlooking up type {:?}", acex.type_id); | 291 | 0 | let type_lookup = self.lookup_type.lookup(acex.type_id)?; | 292 | 0 | let ty = &ctx.module.types[type_lookup.handle]; | 293 | 0 | acex = match ty.inner { | 294 | | // can only index a struct with a constant | 295 | 0 | crate::TypeInner::Struct { ref members, .. } => { | 296 | 0 | let index = index_maybe | 297 | 0 | .ok_or_else(|| Error::InvalidAccess(index_expr_data.clone()))?; | 298 | | | 299 | 0 | let lookup_member = self | 300 | 0 | .lookup_member | 301 | 0 | .get(&(type_lookup.handle, index)) | 302 | 0 | .ok_or(Error::InvalidAccessType(acex.type_id))?; | 303 | 0 | let base_handle = ctx.expressions.append( | 304 | 0 | crate::Expression::AccessIndex { | 305 | 0 | base: acex.base_handle, | 306 | 0 | index, | 307 | 0 | }, | 308 | 0 | span, | 309 | | ); | 310 | | | 311 | 0 | if let Some(crate::Binding::BuiltIn(built_in)) = | 312 | 0 | members[index as usize].binding | 313 | 0 | { | 314 | 0 | self.gl_per_vertex_builtin_access.insert(built_in); | 315 | 0 | } | 316 | | | 317 | | AccessExpression { | 318 | 0 | base_handle, | 319 | 0 | type_id: lookup_member.type_id, | 320 | 0 | load_override: if lookup_member.row_major { | 321 | 0 | debug_assert!(acex.load_override.is_none()); | 322 | 0 | let sub_type_lookup = | 323 | 0 | self.lookup_type.lookup(lookup_member.type_id)?; | 324 | 0 | Some(match ctx.module.types[sub_type_lookup.handle].inner { | 325 | | // load it transposed, to match column major expectations | 326 | | crate::TypeInner::Matrix { .. } => { | 327 | 0 | let loaded = ctx.expressions.append( | 328 | 0 | crate::Expression::Load { | 329 | 0 | pointer: base_handle, | 330 | 0 | }, | 331 | 0 | span, | 332 | | ); | 333 | 0 | let transposed = ctx.expressions.append( | 334 | 0 | crate::Expression::Math { | 335 | 0 | fun: crate::MathFunction::Transpose, | 336 | 0 | arg: loaded, | 337 | 0 | arg1: None, | 338 | 0 | arg2: None, | 339 | 0 | arg3: None, | 340 | 0 | }, | 341 | 0 | span, | 342 | | ); | 343 | 0 | LookupLoadOverride::Loaded(transposed) | 344 | | } | 345 | 0 | _ => LookupLoadOverride::Pending, | 346 | | }) | 347 | | } else { | 348 | 0 | None | 349 | | }, | 350 | | } | 351 | | } | 352 | | crate::TypeInner::Matrix { .. } => { | 353 | 0 | let load_override = match acex.load_override { | 354 | | // We are indexing inside a row-major matrix | 355 | 0 | Some(LookupLoadOverride::Loaded(load_expr)) => { | 356 | 0 | let index = index_maybe.ok_or_else(|| { | 357 | | Error::InvalidAccess(index_expr_data.clone()) | 358 | 0 | })?; | 359 | 0 | let sub_handle = ctx.expressions.append( | 360 | 0 | crate::Expression::AccessIndex { | 361 | 0 | base: load_expr, | 362 | 0 | index, | 363 | 0 | }, | 364 | 0 | span, | 365 | | ); | 366 | 0 | Some(LookupLoadOverride::Loaded(sub_handle)) | 367 | | } | 368 | 0 | _ => None, | 369 | | }; | 370 | 0 | let sub_expr = match index_maybe { | 371 | 0 | Some(index) => crate::Expression::AccessIndex { | 372 | 0 | base: acex.base_handle, | 373 | 0 | index, | 374 | 0 | }, | 375 | 0 | None => crate::Expression::Access { | 376 | 0 | base: acex.base_handle, | 377 | 0 | index: index_expr_handle, | 378 | 0 | }, | 379 | | }; | 380 | | AccessExpression { | 381 | 0 | base_handle: ctx.expressions.append(sub_expr, span), | 382 | 0 | type_id: type_lookup | 383 | 0 | .base_id | 384 | 0 | .ok_or(Error::InvalidAccessType(acex.type_id))?, | 385 | 0 | load_override, | 386 | | } | 387 | | } | 388 | | // This must be a vector or an array. | 389 | | _ => { | 390 | 0 | let base_handle = ctx.expressions.append( | 391 | 0 | crate::Expression::Access { | 392 | 0 | base: acex.base_handle, | 393 | 0 | index: index_expr_handle, | 394 | 0 | }, | 395 | 0 | span, | 396 | | ); | 397 | 0 | let load_override = match acex.load_override { | 398 | | // If there is a load override in place, then we always end up | 399 | | // with a side-loaded value here. | 400 | 0 | Some(lookup_load_override) => { | 401 | 0 | let sub_expr = match lookup_load_override { | 402 | | // We must be indexing into the array of row-major matrices. | 403 | | // Let's load the result of indexing and transpose it. | 404 | | LookupLoadOverride::Pending => { | 405 | 0 | let loaded = ctx.expressions.append( | 406 | 0 | crate::Expression::Load { | 407 | 0 | pointer: base_handle, | 408 | 0 | }, | 409 | 0 | span, | 410 | | ); | 411 | 0 | ctx.expressions.append( | 412 | 0 | crate::Expression::Math { | 413 | 0 | fun: crate::MathFunction::Transpose, | 414 | 0 | arg: loaded, | 415 | 0 | arg1: None, | 416 | 0 | arg2: None, | 417 | 0 | arg3: None, | 418 | 0 | }, | 419 | 0 | span, | 420 | | ) | 421 | | } | 422 | | // We are indexing inside a row-major matrix. | 423 | 0 | LookupLoadOverride::Loaded(load_expr) => { | 424 | 0 | ctx.expressions.append( | 425 | 0 | crate::Expression::Access { | 426 | 0 | base: load_expr, | 427 | 0 | index: index_expr_handle, | 428 | 0 | }, | 429 | 0 | span, | 430 | | ) | 431 | | } | 432 | | }; | 433 | 0 | Some(LookupLoadOverride::Loaded(sub_expr)) | 434 | | } | 435 | 0 | None => None, | 436 | | }; | 437 | | AccessExpression { | 438 | 0 | base_handle, | 439 | 0 | type_id: type_lookup | 440 | 0 | .base_id | 441 | 0 | .ok_or(Error::InvalidAccessType(acex.type_id))?, | 442 | 0 | load_override, | 443 | | } | 444 | | } | 445 | | }; | 446 | | } | 447 | | | 448 | 0 | if let Some(load_expr) = acex.load_override { | 449 | 0 | self.lookup_load_override.insert(result_id, load_expr); | 450 | 0 | } | 451 | 0 | let lookup_expression = LookupExpression { | 452 | 0 | handle: acex.base_handle, | 453 | 0 | type_id: result_type_id, | 454 | 0 | block_id, | 455 | 0 | }; | 456 | 0 | self.lookup_expression.insert(result_id, lookup_expression); | 457 | | } | 458 | | Op::VectorExtractDynamic => { | 459 | 0 | inst.expect(5)?; | 460 | | | 461 | 0 | let result_type_id = self.next()?; | 462 | 0 | let id = self.next()?; | 463 | 0 | let composite_id = self.next()?; | 464 | 0 | let index_id = self.next()?; | 465 | | | 466 | 0 | let root_lexp = self.lookup_expression.lookup(composite_id)?; | 467 | 0 | let root_handle = get_expr_handle!(composite_id, root_lexp); | 468 | 0 | let root_type_lookup = self.lookup_type.lookup(root_lexp.type_id)?; | 469 | 0 | let index_lexp = self.lookup_expression.lookup(index_id)?; | 470 | 0 | let index_handle = get_expr_handle!(index_id, index_lexp); | 471 | 0 | let index_type = self.lookup_type.lookup(index_lexp.type_id)?.handle; | 472 | | | 473 | 0 | let num_components = match ctx.module.types[root_type_lookup.handle].inner { | 474 | 0 | crate::TypeInner::Vector { size, .. } => size as u32, | 475 | 0 | _ => return Err(Error::InvalidVectorType(root_type_lookup.handle)), | 476 | | }; | 477 | | | 478 | 0 | let mut make_index = |ctx: &mut BlockContext, index: u32| { | 479 | | make_index_literal( | 480 | | ctx, | 481 | | index, | 482 | | &mut block, | 483 | | &mut emitter, | 484 | | index_type, | 485 | | index_lexp.type_id, | 486 | | span, | 487 | | ) | 488 | | }; | 489 | | | 490 | 0 | let index_expr = make_index(ctx, 0)?; | 491 | 0 | let mut handle = ctx.expressions.append( | 492 | 0 | crate::Expression::Access { | 493 | 0 | base: root_handle, | 494 | 0 | index: index_expr, | 495 | 0 | }, | 496 | 0 | span, | 497 | | ); | 498 | 0 | for index in 1..num_components { | 499 | 0 | let index_expr = make_index(ctx, index)?; | 500 | 0 | let access_expr = ctx.expressions.append( | 501 | 0 | crate::Expression::Access { | 502 | 0 | base: root_handle, | 503 | 0 | index: index_expr, | 504 | 0 | }, | 505 | 0 | span, | 506 | | ); | 507 | 0 | let cond = ctx.expressions.append( | 508 | 0 | crate::Expression::Binary { | 509 | 0 | op: crate::BinaryOperator::Equal, | 510 | 0 | left: index_expr, | 511 | 0 | right: index_handle, | 512 | 0 | }, | 513 | 0 | span, | 514 | | ); | 515 | 0 | handle = ctx.expressions.append( | 516 | 0 | crate::Expression::Select { | 517 | 0 | condition: cond, | 518 | 0 | accept: access_expr, | 519 | 0 | reject: handle, | 520 | 0 | }, | 521 | 0 | span, | 522 | 0 | ); | 523 | | } | 524 | | | 525 | 0 | self.lookup_expression.insert( | 526 | 0 | id, | 527 | 0 | LookupExpression { | 528 | 0 | handle, | 529 | 0 | type_id: result_type_id, | 530 | 0 | block_id, | 531 | 0 | }, | 532 | | ); | 533 | | } | 534 | | Op::VectorInsertDynamic => { | 535 | 0 | inst.expect(6)?; | 536 | | | 537 | 0 | let result_type_id = self.next()?; | 538 | 0 | let id = self.next()?; | 539 | 0 | let composite_id = self.next()?; | 540 | 0 | let object_id = self.next()?; | 541 | 0 | let index_id = self.next()?; | 542 | | | 543 | 0 | let object_lexp = self.lookup_expression.lookup(object_id)?; | 544 | 0 | let object_handle = get_expr_handle!(object_id, object_lexp); | 545 | 0 | let root_lexp = self.lookup_expression.lookup(composite_id)?; | 546 | 0 | let root_handle = get_expr_handle!(composite_id, root_lexp); | 547 | 0 | let root_type_lookup = self.lookup_type.lookup(root_lexp.type_id)?; | 548 | 0 | let index_lexp = self.lookup_expression.lookup(index_id)?; | 549 | 0 | let index_handle = get_expr_handle!(index_id, index_lexp); | 550 | 0 | let index_type = self.lookup_type.lookup(index_lexp.type_id)?.handle; | 551 | | | 552 | 0 | let num_components = match ctx.module.types[root_type_lookup.handle].inner { | 553 | 0 | crate::TypeInner::Vector { size, .. } => size as u32, | 554 | 0 | _ => return Err(Error::InvalidVectorType(root_type_lookup.handle)), | 555 | | }; | 556 | | | 557 | 0 | let mut components = Vec::with_capacity(num_components as usize); | 558 | 0 | for index in 0..num_components { | 559 | 0 | let index_expr = make_index_literal( | 560 | 0 | ctx, | 561 | 0 | index, | 562 | 0 | &mut block, | 563 | 0 | &mut emitter, | 564 | 0 | index_type, | 565 | 0 | index_lexp.type_id, | 566 | 0 | span, | 567 | 0 | )?; | 568 | 0 | let access_expr = ctx.expressions.append( | 569 | 0 | crate::Expression::Access { | 570 | 0 | base: root_handle, | 571 | 0 | index: index_expr, | 572 | 0 | }, | 573 | 0 | span, | 574 | | ); | 575 | 0 | let cond = ctx.expressions.append( | 576 | 0 | crate::Expression::Binary { | 577 | 0 | op: crate::BinaryOperator::Equal, | 578 | 0 | left: index_expr, | 579 | 0 | right: index_handle, | 580 | 0 | }, | 581 | 0 | span, | 582 | | ); | 583 | 0 | let handle = ctx.expressions.append( | 584 | 0 | crate::Expression::Select { | 585 | 0 | condition: cond, | 586 | 0 | accept: object_handle, | 587 | 0 | reject: access_expr, | 588 | 0 | }, | 589 | 0 | span, | 590 | | ); | 591 | 0 | components.push(handle); | 592 | | } | 593 | 0 | let handle = ctx.expressions.append( | 594 | 0 | crate::Expression::Compose { | 595 | 0 | ty: root_type_lookup.handle, | 596 | 0 | components, | 597 | 0 | }, | 598 | 0 | span, | 599 | | ); | 600 | | | 601 | 0 | self.lookup_expression.insert( | 602 | 0 | id, | 603 | 0 | LookupExpression { | 604 | 0 | handle, | 605 | 0 | type_id: result_type_id, | 606 | 0 | block_id, | 607 | 0 | }, | 608 | | ); | 609 | | } | 610 | | Op::CompositeExtract => { | 611 | 1 | inst.expect_at_least(4)?; | 612 | | | 613 | 1 | let result_type_id = self.next()?; | 614 | 1 | let result_id = self.next()?; | 615 | 1 | let base_id = self.next()?; | 616 | 1 | log::trace!("\t\t\tlooking up expr {base_id:?}"); | 617 | 1 | let mut lexp = self.lookup_expression.lookup(base_id)?.clone(); | 618 | 1 | lexp.handle = get_expr_handle!(base_id, &lexp); | 619 | 1 | for _ in 4..inst.wc { | 620 | 29.7k | let index = self.next()?; | 621 | 29.7k | log::trace!("\t\t\tlooking up type {:?}", lexp.type_id); | 622 | 29.7k | let type_lookup = self.lookup_type.lookup(lexp.type_id)?; | 623 | 29.7k | let type_id = match ctx.module.types[type_lookup.handle].inner { | 624 | | crate::TypeInner::Struct { .. } => { | 625 | 0 | self.lookup_member | 626 | 0 | .get(&(type_lookup.handle, index)) | 627 | 0 | .ok_or(Error::InvalidAccessType(lexp.type_id))? | 628 | | .type_id | 629 | | } | 630 | | crate::TypeInner::Array { .. } | 631 | | | crate::TypeInner::Vector { .. } | 632 | 29.7k | | crate::TypeInner::Matrix { .. } => type_lookup | 633 | 29.7k | .base_id | 634 | 29.7k | .ok_or(Error::InvalidAccessType(lexp.type_id))?, | 635 | 0 | ref other => { | 636 | 0 | log::warn!("composite type {other:?}"); | 637 | 0 | return Err(Error::UnsupportedType(type_lookup.handle)); | 638 | | } | 639 | | }; | 640 | 29.7k | lexp = LookupExpression { | 641 | 29.7k | handle: ctx.expressions.append( | 642 | 29.7k | crate::Expression::AccessIndex { | 643 | 29.7k | base: lexp.handle, | 644 | 29.7k | index, | 645 | 29.7k | }, | 646 | 29.7k | span, | 647 | 29.7k | ), | 648 | 29.7k | type_id, | 649 | 29.7k | block_id, | 650 | 29.7k | }; | 651 | | } | 652 | | | 653 | 0 | self.lookup_expression.insert( | 654 | 0 | result_id, | 655 | 0 | LookupExpression { | 656 | 0 | handle: lexp.handle, | 657 | 0 | type_id: result_type_id, | 658 | 0 | block_id, | 659 | 0 | }, | 660 | | ); | 661 | | } | 662 | | Op::CompositeInsert => { | 663 | 0 | inst.expect_at_least(5)?; | 664 | | | 665 | 0 | let result_type_id = self.next()?; | 666 | 0 | let id = self.next()?; | 667 | 0 | let object_id = self.next()?; | 668 | 0 | let composite_id = self.next()?; | 669 | 0 | let mut selections = Vec::with_capacity(inst.wc as usize - 5); | 670 | 0 | for _ in 5..inst.wc { | 671 | 0 | selections.push(self.next()?); | 672 | | } | 673 | | | 674 | 0 | let object_lexp = self.lookup_expression.lookup(object_id)?.clone(); | 675 | 0 | let object_handle = get_expr_handle!(object_id, &object_lexp); | 676 | 0 | let root_lexp = self.lookup_expression.lookup(composite_id)?.clone(); | 677 | 0 | let root_handle = get_expr_handle!(composite_id, &root_lexp); | 678 | 0 | let handle = self.insert_composite( | 679 | 0 | root_handle, | 680 | 0 | result_type_id, | 681 | 0 | object_handle, | 682 | 0 | &selections, | 683 | 0 | &ctx.module.types, | 684 | 0 | ctx.expressions, | 685 | 0 | span, | 686 | 0 | )?; | 687 | | | 688 | 0 | self.lookup_expression.insert( | 689 | 0 | id, | 690 | 0 | LookupExpression { | 691 | 0 | handle, | 692 | 0 | type_id: result_type_id, | 693 | 0 | block_id, | 694 | 0 | }, | 695 | | ); | 696 | | } | 697 | | Op::CompositeConstruct => { | 698 | 0 | inst.expect_at_least(3)?; | 699 | | | 700 | 0 | let result_type_id = self.next()?; | 701 | 0 | let id = self.next()?; | 702 | 0 | let mut components = Vec::with_capacity(inst.wc as usize - 2); | 703 | 0 | for _ in 3..inst.wc { | 704 | 0 | let comp_id = self.next()?; | 705 | 0 | log::trace!("\t\t\tlooking up expr {comp_id:?}"); | 706 | 0 | let lexp = self.lookup_expression.lookup(comp_id)?; | 707 | 0 | let handle = get_expr_handle!(comp_id, lexp); | 708 | 0 | components.push(handle); | 709 | | } | 710 | 0 | let ty = self.lookup_type.lookup(result_type_id)?.handle; | 711 | 0 | let first = components[0]; | 712 | 0 | let expr = match ctx.module.types[ty].inner { | 713 | | // this is an optimization to detect the splat | 714 | 0 | crate::TypeInner::Vector { size, .. } | 715 | 0 | if components.len() == size as usize | 716 | 0 | && components[1..].iter().all(|&c| c == first) => | 717 | | { | 718 | 0 | crate::Expression::Splat { size, value: first } | 719 | | } | 720 | 0 | _ => crate::Expression::Compose { ty, components }, | 721 | | }; | 722 | 0 | self.lookup_expression.insert( | 723 | 0 | id, | 724 | 0 | LookupExpression { | 725 | 0 | handle: ctx.expressions.append(expr, span), | 726 | 0 | type_id: result_type_id, | 727 | 0 | block_id, | 728 | 0 | }, | 729 | | ); | 730 | | } | 731 | | Op::Load => { | 732 | 0 | inst.expect_at_least(4)?; | 733 | | | 734 | 0 | let result_type_id = self.next()?; | 735 | 0 | let result_id = self.next()?; | 736 | 0 | let pointer_id = self.next()?; | 737 | 0 | if inst.wc != 4 { | 738 | 0 | inst.expect(5)?; | 739 | 0 | let _memory_access = self.next()?; | 740 | 0 | } | 741 | | | 742 | 0 | let base_lexp = self.lookup_expression.lookup(pointer_id)?; | 743 | 0 | let base_handle = get_expr_handle!(pointer_id, base_lexp); | 744 | 0 | let type_lookup = self.lookup_type.lookup(base_lexp.type_id)?; | 745 | 0 | let handle = match ctx.module.types[type_lookup.handle].inner { | 746 | | crate::TypeInner::Image { .. } | crate::TypeInner::Sampler { .. } => { | 747 | 0 | base_handle | 748 | | } | 749 | 0 | _ => match self.lookup_load_override.get(&pointer_id) { | 750 | 0 | Some(&LookupLoadOverride::Loaded(handle)) => handle, | 751 | | //Note: we aren't handling `LookupLoadOverride::Pending` properly here | 752 | 0 | _ => ctx.expressions.append( | 753 | 0 | crate::Expression::Load { | 754 | 0 | pointer: base_handle, | 755 | 0 | }, | 756 | 0 | span, | 757 | | ), | 758 | | }, | 759 | | }; | 760 | | | 761 | 0 | self.lookup_expression.insert( | 762 | 0 | result_id, | 763 | 0 | LookupExpression { | 764 | 0 | handle, | 765 | 0 | type_id: result_type_id, | 766 | 0 | block_id, | 767 | 0 | }, | 768 | | ); | 769 | | } | 770 | | Op::Store => { | 771 | 0 | inst.expect_at_least(3)?; | 772 | | | 773 | 0 | let pointer_id = self.next()?; | 774 | 0 | let value_id = self.next()?; | 775 | 0 | if inst.wc != 3 { | 776 | 0 | inst.expect(4)?; | 777 | 0 | let _memory_access = self.next()?; | 778 | 0 | } | 779 | 0 | let base_expr = self.lookup_expression.lookup(pointer_id)?; | 780 | 0 | let base_handle = get_expr_handle!(pointer_id, base_expr); | 781 | 0 | let value_expr = self.lookup_expression.lookup(value_id)?; | 782 | 0 | let value_handle = get_expr_handle!(value_id, value_expr); | 783 | | | 784 | 0 | block.extend(emitter.finish(ctx.expressions)); | 785 | 0 | block.push( | 786 | 0 | crate::Statement::Store { | 787 | 0 | pointer: base_handle, | 788 | 0 | value: value_handle, | 789 | 0 | }, | 790 | 0 | span, | 791 | | ); | 792 | 0 | emitter.start(ctx.expressions); | 793 | | } | 794 | | // Arithmetic Instructions +, -, *, /, % | 795 | | Op::SNegate | Op::FNegate => { | 796 | 0 | inst.expect(4)?; | 797 | 0 | self.parse_expr_unary_op_sign_adjusted( | 798 | 0 | ctx, | 799 | 0 | &mut emitter, | 800 | 0 | &mut block, | 801 | 0 | block_id, | 802 | 0 | body_idx, | 803 | 0 | crate::UnaryOperator::Negate, | 804 | 0 | )?; | 805 | | } | 806 | | Op::IAdd | 807 | | | Op::ISub | 808 | | | Op::IMul | 809 | | | Op::BitwiseOr | 810 | | | Op::BitwiseXor | 811 | | | Op::BitwiseAnd | 812 | | | Op::SDiv | 813 | | | Op::SRem => { | 814 | 0 | inst.expect(5)?; | 815 | 0 | let operator = map_binary_operator(inst.op)?; | 816 | 0 | self.parse_expr_binary_op_sign_adjusted( | 817 | 0 | ctx, | 818 | 0 | &mut emitter, | 819 | 0 | &mut block, | 820 | 0 | block_id, | 821 | 0 | body_idx, | 822 | 0 | operator, | 823 | 0 | SignAnchor::Result, | 824 | 0 | )?; | 825 | | } | 826 | | Op::IEqual | Op::INotEqual => { | 827 | 0 | inst.expect(5)?; | 828 | 0 | let operator = map_binary_operator(inst.op)?; | 829 | 0 | self.parse_expr_binary_op_sign_adjusted( | 830 | 0 | ctx, | 831 | 0 | &mut emitter, | 832 | 0 | &mut block, | 833 | 0 | block_id, | 834 | 0 | body_idx, | 835 | 0 | operator, | 836 | 0 | SignAnchor::Operand, | 837 | 0 | )?; | 838 | | } | 839 | | Op::FAdd => { | 840 | 0 | inst.expect(5)?; | 841 | 0 | parse_expr_op!(crate::BinaryOperator::Add, BINARY)?; | 842 | | } | 843 | | Op::FSub => { | 844 | 0 | inst.expect(5)?; | 845 | 0 | parse_expr_op!(crate::BinaryOperator::Subtract, BINARY)?; | 846 | | } | 847 | | Op::FMul => { | 848 | 0 | inst.expect(5)?; | 849 | 0 | parse_expr_op!(crate::BinaryOperator::Multiply, BINARY)?; | 850 | | } | 851 | | Op::UDiv | Op::FDiv => { | 852 | 0 | inst.expect(5)?; | 853 | 0 | parse_expr_op!(crate::BinaryOperator::Divide, BINARY)?; | 854 | | } | 855 | | Op::UMod | Op::FRem => { | 856 | 2 | inst.expect(5)?; | 857 | 2 | parse_expr_op!(crate::BinaryOperator::Modulo, BINARY)?; | 858 | | } | 859 | | Op::SMod => { | 860 | 0 | inst.expect(5)?; | 861 | | | 862 | | // x - y * int(floor(float(x) / float(y))) | 863 | | | 864 | 0 | let start = self.data_offset; | 865 | 0 | let result_type_id = self.next()?; | 866 | 0 | let result_id = self.next()?; | 867 | 0 | let p1_id = self.next()?; | 868 | 0 | let p2_id = self.next()?; | 869 | 0 | let span = self.span_from_with_op(start); | 870 | | | 871 | 0 | let p1_lexp = self.lookup_expression.lookup(p1_id)?; | 872 | 0 | let left = self.get_expr_handle( | 873 | 0 | p1_id, | 874 | 0 | p1_lexp, | 875 | 0 | ctx, | 876 | 0 | &mut emitter, | 877 | 0 | &mut block, | 878 | 0 | body_idx, | 879 | | ); | 880 | 0 | let p2_lexp = self.lookup_expression.lookup(p2_id)?; | 881 | 0 | let right = self.get_expr_handle( | 882 | 0 | p2_id, | 883 | 0 | p2_lexp, | 884 | 0 | ctx, | 885 | 0 | &mut emitter, | 886 | 0 | &mut block, | 887 | 0 | body_idx, | 888 | | ); | 889 | | | 890 | 0 | let result_ty = self.lookup_type.lookup(result_type_id)?; | 891 | 0 | let inner = &ctx.module.types[result_ty.handle].inner; | 892 | 0 | let kind = inner.scalar_kind().unwrap(); | 893 | 0 | let size = inner.size(ctx.gctx()) as u8; | 894 | | | 895 | 0 | let left_cast = ctx.expressions.append( | 896 | 0 | crate::Expression::As { | 897 | 0 | expr: left, | 898 | 0 | kind: crate::ScalarKind::Float, | 899 | 0 | convert: Some(size), | 900 | 0 | }, | 901 | 0 | span, | 902 | | ); | 903 | 0 | let right_cast = ctx.expressions.append( | 904 | 0 | crate::Expression::As { | 905 | 0 | expr: right, | 906 | 0 | kind: crate::ScalarKind::Float, | 907 | 0 | convert: Some(size), | 908 | 0 | }, | 909 | 0 | span, | 910 | | ); | 911 | 0 | let div = ctx.expressions.append( | 912 | 0 | crate::Expression::Binary { | 913 | 0 | op: crate::BinaryOperator::Divide, | 914 | 0 | left: left_cast, | 915 | 0 | right: right_cast, | 916 | 0 | }, | 917 | 0 | span, | 918 | | ); | 919 | 0 | let floor = ctx.expressions.append( | 920 | 0 | crate::Expression::Math { | 921 | 0 | fun: crate::MathFunction::Floor, | 922 | 0 | arg: div, | 923 | 0 | arg1: None, | 924 | 0 | arg2: None, | 925 | 0 | arg3: None, | 926 | 0 | }, | 927 | 0 | span, | 928 | | ); | 929 | 0 | let cast = ctx.expressions.append( | 930 | 0 | crate::Expression::As { | 931 | 0 | expr: floor, | 932 | 0 | kind, | 933 | 0 | convert: Some(size), | 934 | 0 | }, | 935 | 0 | span, | 936 | | ); | 937 | 0 | let mult = ctx.expressions.append( | 938 | 0 | crate::Expression::Binary { | 939 | 0 | op: crate::BinaryOperator::Multiply, | 940 | 0 | left: cast, | 941 | 0 | right, | 942 | 0 | }, | 943 | 0 | span, | 944 | | ); | 945 | 0 | let sub = ctx.expressions.append( | 946 | 0 | crate::Expression::Binary { | 947 | 0 | op: crate::BinaryOperator::Subtract, | 948 | 0 | left, | 949 | 0 | right: mult, | 950 | 0 | }, | 951 | 0 | span, | 952 | | ); | 953 | 0 | self.lookup_expression.insert( | 954 | 0 | result_id, | 955 | 0 | LookupExpression { | 956 | 0 | handle: sub, | 957 | 0 | type_id: result_type_id, | 958 | 0 | block_id, | 959 | 0 | }, | 960 | | ); | 961 | | } | 962 | | Op::FMod => { | 963 | 225k | inst.expect(5)?; | 964 | | | 965 | | // x - y * floor(x / y) | 966 | | | 967 | 225k | let start = self.data_offset; | 968 | 225k | let span = self.span_from_with_op(start); | 969 | | | 970 | 225k | let result_type_id = self.next()?; | 971 | 225k | let result_id = self.next()?; | 972 | 225k | let p1_id = self.next()?; | 973 | 225k | let p2_id = self.next()?; | 974 | | | 975 | 225k | let p1_lexp = self.lookup_expression.lookup(p1_id)?; | 976 | 225k | let left = self.get_expr_handle( | 977 | 225k | p1_id, | 978 | 225k | p1_lexp, | 979 | 225k | ctx, | 980 | 225k | &mut emitter, | 981 | 225k | &mut block, | 982 | 225k | body_idx, | 983 | | ); | 984 | 225k | let p2_lexp = self.lookup_expression.lookup(p2_id)?; | 985 | 225k | let right = self.get_expr_handle( | 986 | 225k | p2_id, | 987 | 225k | p2_lexp, | 988 | 225k | ctx, | 989 | 225k | &mut emitter, | 990 | 225k | &mut block, | 991 | 225k | body_idx, | 992 | | ); | 993 | | | 994 | 225k | let div = ctx.expressions.append( | 995 | 225k | crate::Expression::Binary { | 996 | 225k | op: crate::BinaryOperator::Divide, | 997 | 225k | left, | 998 | 225k | right, | 999 | 225k | }, | 1000 | 225k | span, | 1001 | | ); | 1002 | 225k | let floor = ctx.expressions.append( | 1003 | 225k | crate::Expression::Math { | 1004 | 225k | fun: crate::MathFunction::Floor, | 1005 | 225k | arg: div, | 1006 | 225k | arg1: None, | 1007 | 225k | arg2: None, | 1008 | 225k | arg3: None, | 1009 | 225k | }, | 1010 | 225k | span, | 1011 | | ); | 1012 | 225k | let mult = ctx.expressions.append( | 1013 | 225k | crate::Expression::Binary { | 1014 | 225k | op: crate::BinaryOperator::Multiply, | 1015 | 225k | left: floor, | 1016 | 225k | right, | 1017 | 225k | }, | 1018 | 225k | span, | 1019 | | ); | 1020 | 225k | let sub = ctx.expressions.append( | 1021 | 225k | crate::Expression::Binary { | 1022 | 225k | op: crate::BinaryOperator::Subtract, | 1023 | 225k | left, | 1024 | 225k | right: mult, | 1025 | 225k | }, | 1026 | 225k | span, | 1027 | | ); | 1028 | 225k | self.lookup_expression.insert( | 1029 | 225k | result_id, | 1030 | 225k | LookupExpression { | 1031 | 225k | handle: sub, | 1032 | 225k | type_id: result_type_id, | 1033 | 225k | block_id, | 1034 | 225k | }, | 1035 | | ); | 1036 | | } | 1037 | | Op::VectorTimesScalar | 1038 | | | Op::VectorTimesMatrix | 1039 | | | Op::MatrixTimesScalar | 1040 | | | Op::MatrixTimesVector | 1041 | | | Op::MatrixTimesMatrix => { | 1042 | 2.69k | inst.expect(5)?; | 1043 | 2.69k | parse_expr_op!(crate::BinaryOperator::Multiply, BINARY)?; | 1044 | | } | 1045 | | Op::Transpose => { | 1046 | 0 | inst.expect(4)?; | 1047 | | | 1048 | 0 | let result_type_id = self.next()?; | 1049 | 0 | let result_id = self.next()?; | 1050 | 0 | let matrix_id = self.next()?; | 1051 | 0 | let matrix_lexp = self.lookup_expression.lookup(matrix_id)?; | 1052 | 0 | let matrix_handle = get_expr_handle!(matrix_id, matrix_lexp); | 1053 | 0 | let expr = crate::Expression::Math { | 1054 | 0 | fun: crate::MathFunction::Transpose, | 1055 | 0 | arg: matrix_handle, | 1056 | 0 | arg1: None, | 1057 | 0 | arg2: None, | 1058 | 0 | arg3: None, | 1059 | 0 | }; | 1060 | 0 | self.lookup_expression.insert( | 1061 | 0 | result_id, | 1062 | 0 | LookupExpression { | 1063 | 0 | handle: ctx.expressions.append(expr, span), | 1064 | 0 | type_id: result_type_id, | 1065 | 0 | block_id, | 1066 | 0 | }, | 1067 | | ); | 1068 | | } | 1069 | | Op::Dot => { | 1070 | 0 | inst.expect(5)?; | 1071 | | | 1072 | 0 | let result_type_id = self.next()?; | 1073 | 0 | let result_id = self.next()?; | 1074 | 0 | let left_id = self.next()?; | 1075 | 0 | let right_id = self.next()?; | 1076 | 0 | let left_lexp = self.lookup_expression.lookup(left_id)?; | 1077 | 0 | let left_handle = get_expr_handle!(left_id, left_lexp); | 1078 | 0 | let right_lexp = self.lookup_expression.lookup(right_id)?; | 1079 | 0 | let right_handle = get_expr_handle!(right_id, right_lexp); | 1080 | 0 | let expr = crate::Expression::Math { | 1081 | 0 | fun: crate::MathFunction::Dot, | 1082 | 0 | arg: left_handle, | 1083 | 0 | arg1: Some(right_handle), | 1084 | 0 | arg2: None, | 1085 | 0 | arg3: None, | 1086 | 0 | }; | 1087 | 0 | self.lookup_expression.insert( | 1088 | 0 | result_id, | 1089 | 0 | LookupExpression { | 1090 | 0 | handle: ctx.expressions.append(expr, span), | 1091 | 0 | type_id: result_type_id, | 1092 | 0 | block_id, | 1093 | 0 | }, | 1094 | | ); | 1095 | | } | 1096 | | Op::BitFieldInsert => { | 1097 | 0 | inst.expect(7)?; | 1098 | | | 1099 | 0 | let start = self.data_offset; | 1100 | 0 | let span = self.span_from_with_op(start); | 1101 | | | 1102 | 0 | let result_type_id = self.next()?; | 1103 | 0 | let result_id = self.next()?; | 1104 | 0 | let base_id = self.next()?; | 1105 | 0 | let insert_id = self.next()?; | 1106 | 0 | let offset_id = self.next()?; | 1107 | 0 | let count_id = self.next()?; | 1108 | 0 | let base_lexp = self.lookup_expression.lookup(base_id)?; | 1109 | 0 | let base_handle = get_expr_handle!(base_id, base_lexp); | 1110 | 0 | let insert_lexp = self.lookup_expression.lookup(insert_id)?; | 1111 | 0 | let insert_handle = get_expr_handle!(insert_id, insert_lexp); | 1112 | 0 | let offset_lexp = self.lookup_expression.lookup(offset_id)?; | 1113 | 0 | let offset_handle = get_expr_handle!(offset_id, offset_lexp); | 1114 | 0 | let offset_lookup_ty = self.lookup_type.lookup(offset_lexp.type_id)?; | 1115 | 0 | let count_lexp = self.lookup_expression.lookup(count_id)?; | 1116 | 0 | let count_handle = get_expr_handle!(count_id, count_lexp); | 1117 | 0 | let count_lookup_ty = self.lookup_type.lookup(count_lexp.type_id)?; | 1118 | | | 1119 | 0 | let offset_kind = ctx.module.types[offset_lookup_ty.handle] | 1120 | 0 | .inner | 1121 | 0 | .scalar_kind() | 1122 | 0 | .unwrap(); | 1123 | 0 | let count_kind = ctx.module.types[count_lookup_ty.handle] | 1124 | 0 | .inner | 1125 | 0 | .scalar_kind() | 1126 | 0 | .unwrap(); | 1127 | | | 1128 | 0 | let offset_cast_handle = if offset_kind != crate::ScalarKind::Uint { | 1129 | 0 | ctx.expressions.append( | 1130 | 0 | crate::Expression::As { | 1131 | 0 | expr: offset_handle, | 1132 | 0 | kind: crate::ScalarKind::Uint, | 1133 | 0 | convert: None, | 1134 | 0 | }, | 1135 | 0 | span, | 1136 | | ) | 1137 | | } else { | 1138 | 0 | offset_handle | 1139 | | }; | 1140 | | | 1141 | 0 | let count_cast_handle = if count_kind != crate::ScalarKind::Uint { | 1142 | 0 | ctx.expressions.append( | 1143 | 0 | crate::Expression::As { | 1144 | 0 | expr: count_handle, | 1145 | 0 | kind: crate::ScalarKind::Uint, | 1146 | 0 | convert: None, | 1147 | 0 | }, | 1148 | 0 | span, | 1149 | | ) | 1150 | | } else { | 1151 | 0 | count_handle | 1152 | | }; | 1153 | | | 1154 | 0 | let expr = crate::Expression::Math { | 1155 | 0 | fun: crate::MathFunction::InsertBits, | 1156 | 0 | arg: base_handle, | 1157 | 0 | arg1: Some(insert_handle), | 1158 | 0 | arg2: Some(offset_cast_handle), | 1159 | 0 | arg3: Some(count_cast_handle), | 1160 | 0 | }; | 1161 | 0 | self.lookup_expression.insert( | 1162 | 0 | result_id, | 1163 | 0 | LookupExpression { | 1164 | 0 | handle: ctx.expressions.append(expr, span), | 1165 | 0 | type_id: result_type_id, | 1166 | 0 | block_id, | 1167 | 0 | }, | 1168 | | ); | 1169 | | } | 1170 | | Op::BitFieldSExtract | Op::BitFieldUExtract => { | 1171 | 0 | inst.expect(6)?; | 1172 | | | 1173 | 0 | let result_type_id = self.next()?; | 1174 | 0 | let result_id = self.next()?; | 1175 | 0 | let base_id = self.next()?; | 1176 | 0 | let offset_id = self.next()?; | 1177 | 0 | let count_id = self.next()?; | 1178 | 0 | let base_lexp = self.lookup_expression.lookup(base_id)?; | 1179 | 0 | let base_handle = get_expr_handle!(base_id, base_lexp); | 1180 | 0 | let offset_lexp = self.lookup_expression.lookup(offset_id)?; | 1181 | 0 | let offset_handle = get_expr_handle!(offset_id, offset_lexp); | 1182 | 0 | let offset_lookup_ty = self.lookup_type.lookup(offset_lexp.type_id)?; | 1183 | 0 | let count_lexp = self.lookup_expression.lookup(count_id)?; | 1184 | 0 | let count_handle = get_expr_handle!(count_id, count_lexp); | 1185 | 0 | let count_lookup_ty = self.lookup_type.lookup(count_lexp.type_id)?; | 1186 | | | 1187 | 0 | let offset_kind = ctx.module.types[offset_lookup_ty.handle] | 1188 | 0 | .inner | 1189 | 0 | .scalar_kind() | 1190 | 0 | .unwrap(); | 1191 | 0 | let count_kind = ctx.module.types[count_lookup_ty.handle] | 1192 | 0 | .inner | 1193 | 0 | .scalar_kind() | 1194 | 0 | .unwrap(); | 1195 | | | 1196 | 0 | let offset_cast_handle = if offset_kind != crate::ScalarKind::Uint { | 1197 | 0 | ctx.expressions.append( | 1198 | 0 | crate::Expression::As { | 1199 | 0 | expr: offset_handle, | 1200 | 0 | kind: crate::ScalarKind::Uint, | 1201 | 0 | convert: None, | 1202 | 0 | }, | 1203 | 0 | span, | 1204 | | ) | 1205 | | } else { | 1206 | 0 | offset_handle | 1207 | | }; | 1208 | | | 1209 | 0 | let count_cast_handle = if count_kind != crate::ScalarKind::Uint { | 1210 | 0 | ctx.expressions.append( | 1211 | 0 | crate::Expression::As { | 1212 | 0 | expr: count_handle, | 1213 | 0 | kind: crate::ScalarKind::Uint, | 1214 | 0 | convert: None, | 1215 | 0 | }, | 1216 | 0 | span, | 1217 | | ) | 1218 | | } else { | 1219 | 0 | count_handle | 1220 | | }; | 1221 | | | 1222 | 0 | let expr = crate::Expression::Math { | 1223 | 0 | fun: crate::MathFunction::ExtractBits, | 1224 | 0 | arg: base_handle, | 1225 | 0 | arg1: Some(offset_cast_handle), | 1226 | 0 | arg2: Some(count_cast_handle), | 1227 | 0 | arg3: None, | 1228 | 0 | }; | 1229 | 0 | self.lookup_expression.insert( | 1230 | 0 | result_id, | 1231 | 0 | LookupExpression { | 1232 | 0 | handle: ctx.expressions.append(expr, span), | 1233 | 0 | type_id: result_type_id, | 1234 | 0 | block_id, | 1235 | 0 | }, | 1236 | | ); | 1237 | | } | 1238 | | Op::BitReverse | Op::BitCount => { | 1239 | 0 | inst.expect(4)?; | 1240 | | | 1241 | 0 | let result_type_id = self.next()?; | 1242 | 0 | let result_id = self.next()?; | 1243 | 0 | let base_id = self.next()?; | 1244 | 0 | let base_lexp = self.lookup_expression.lookup(base_id)?; | 1245 | 0 | let base_handle = get_expr_handle!(base_id, base_lexp); | 1246 | 0 | let expr = crate::Expression::Math { | 1247 | 0 | fun: match inst.op { | 1248 | 0 | Op::BitReverse => crate::MathFunction::ReverseBits, | 1249 | 0 | Op::BitCount => crate::MathFunction::CountOneBits, | 1250 | 0 | _ => unreachable!(), | 1251 | | }, | 1252 | 0 | arg: base_handle, | 1253 | 0 | arg1: None, | 1254 | 0 | arg2: None, | 1255 | 0 | arg3: None, | 1256 | | }; | 1257 | 0 | self.lookup_expression.insert( | 1258 | 0 | result_id, | 1259 | 0 | LookupExpression { | 1260 | 0 | handle: ctx.expressions.append(expr, span), | 1261 | 0 | type_id: result_type_id, | 1262 | 0 | block_id, | 1263 | 0 | }, | 1264 | | ); | 1265 | | } | 1266 | | Op::OuterProduct => { | 1267 | 1 | inst.expect(5)?; | 1268 | | | 1269 | 1 | let result_type_id = self.next()?; | 1270 | 1 | let result_id = self.next()?; | 1271 | 1 | let left_id = self.next()?; | 1272 | 1 | let right_id = self.next()?; | 1273 | 1 | let left_lexp = self.lookup_expression.lookup(left_id)?; | 1274 | 1 | let left_handle = get_expr_handle!(left_id, left_lexp); | 1275 | 1 | let right_lexp = self.lookup_expression.lookup(right_id)?; | 1276 | 1 | let right_handle = get_expr_handle!(right_id, right_lexp); | 1277 | 1 | let expr = crate::Expression::Math { | 1278 | 1 | fun: crate::MathFunction::Outer, | 1279 | 1 | arg: left_handle, | 1280 | 1 | arg1: Some(right_handle), | 1281 | 1 | arg2: None, | 1282 | 1 | arg3: None, | 1283 | 1 | }; | 1284 | 1 | self.lookup_expression.insert( | 1285 | 1 | result_id, | 1286 | 1 | LookupExpression { | 1287 | 1 | handle: ctx.expressions.append(expr, span), | 1288 | 1 | type_id: result_type_id, | 1289 | 1 | block_id, | 1290 | 1 | }, | 1291 | | ); | 1292 | | } | 1293 | | // Bitwise instructions | 1294 | | Op::Not => { | 1295 | 0 | inst.expect(4)?; | 1296 | 0 | self.parse_expr_unary_op_sign_adjusted( | 1297 | 0 | ctx, | 1298 | 0 | &mut emitter, | 1299 | 0 | &mut block, | 1300 | 0 | block_id, | 1301 | 0 | body_idx, | 1302 | 0 | crate::UnaryOperator::BitwiseNot, | 1303 | 0 | )?; | 1304 | | } | 1305 | | Op::ShiftRightLogical => { | 1306 | 0 | inst.expect(5)?; | 1307 | | //TODO: convert input and result to unsigned | 1308 | 0 | parse_expr_op!(crate::BinaryOperator::ShiftRight, SHIFT)?; | 1309 | | } | 1310 | | Op::ShiftRightArithmetic => { | 1311 | 0 | inst.expect(5)?; | 1312 | | //TODO: convert input and result to signed | 1313 | 0 | parse_expr_op!(crate::BinaryOperator::ShiftRight, SHIFT)?; | 1314 | | } | 1315 | | Op::ShiftLeftLogical => { | 1316 | 0 | inst.expect(5)?; | 1317 | 0 | parse_expr_op!(crate::BinaryOperator::ShiftLeft, SHIFT)?; | 1318 | | } | 1319 | | // Sampling | 1320 | | Op::Image => { | 1321 | 0 | inst.expect(4)?; | 1322 | 0 | self.parse_image_uncouple(block_id)?; | 1323 | | } | 1324 | | Op::SampledImage => { | 1325 | 0 | inst.expect(5)?; | 1326 | 0 | self.parse_image_couple()?; | 1327 | | } | 1328 | | Op::ImageWrite => { | 1329 | 0 | let extra = inst.expect_at_least(4)?; | 1330 | 0 | let stmt = | 1331 | 0 | self.parse_image_write(extra, ctx, &mut emitter, &mut block, body_idx)?; | 1332 | 0 | block.extend(emitter.finish(ctx.expressions)); | 1333 | 0 | block.push(stmt, span); | 1334 | 0 | emitter.start(ctx.expressions); | 1335 | | } | 1336 | | Op::ImageFetch | Op::ImageRead => { | 1337 | 0 | let extra = inst.expect_at_least(5)?; | 1338 | 0 | self.parse_image_load( | 1339 | 0 | extra, | 1340 | 0 | ctx, | 1341 | 0 | &mut emitter, | 1342 | 0 | &mut block, | 1343 | 0 | block_id, | 1344 | 0 | body_idx, | 1345 | 0 | )?; | 1346 | | } | 1347 | | Op::ImageSampleImplicitLod | Op::ImageSampleExplicitLod => { | 1348 | 0 | let extra = inst.expect_at_least(5)?; | 1349 | 0 | let options = image::SamplingOptions { | 1350 | 0 | compare: false, | 1351 | 0 | project: false, | 1352 | 0 | gather: false, | 1353 | 0 | }; | 1354 | 0 | self.parse_image_sample( | 1355 | 0 | extra, | 1356 | 0 | options, | 1357 | 0 | ctx, | 1358 | 0 | &mut emitter, | 1359 | 0 | &mut block, | 1360 | 0 | block_id, | 1361 | 0 | body_idx, | 1362 | 0 | )?; | 1363 | | } | 1364 | | Op::ImageSampleProjImplicitLod | Op::ImageSampleProjExplicitLod => { | 1365 | 0 | let extra = inst.expect_at_least(5)?; | 1366 | 0 | let options = image::SamplingOptions { | 1367 | 0 | compare: false, | 1368 | 0 | project: true, | 1369 | 0 | gather: false, | 1370 | 0 | }; | 1371 | 0 | self.parse_image_sample( | 1372 | 0 | extra, | 1373 | 0 | options, | 1374 | 0 | ctx, | 1375 | 0 | &mut emitter, | 1376 | 0 | &mut block, | 1377 | 0 | block_id, | 1378 | 0 | body_idx, | 1379 | 0 | )?; | 1380 | | } | 1381 | | Op::ImageSampleDrefImplicitLod | Op::ImageSampleDrefExplicitLod => { | 1382 | 0 | let extra = inst.expect_at_least(6)?; | 1383 | 0 | let options = image::SamplingOptions { | 1384 | 0 | compare: true, | 1385 | 0 | project: false, | 1386 | 0 | gather: false, | 1387 | 0 | }; | 1388 | 0 | self.parse_image_sample( | 1389 | 0 | extra, | 1390 | 0 | options, | 1391 | 0 | ctx, | 1392 | 0 | &mut emitter, | 1393 | 0 | &mut block, | 1394 | 0 | block_id, | 1395 | 0 | body_idx, | 1396 | 0 | )?; | 1397 | | } | 1398 | | Op::ImageSampleProjDrefImplicitLod | Op::ImageSampleProjDrefExplicitLod => { | 1399 | 0 | let extra = inst.expect_at_least(6)?; | 1400 | 0 | let options = image::SamplingOptions { | 1401 | 0 | compare: true, | 1402 | 0 | project: true, | 1403 | 0 | gather: false, | 1404 | 0 | }; | 1405 | 0 | self.parse_image_sample( | 1406 | 0 | extra, | 1407 | 0 | options, | 1408 | 0 | ctx, | 1409 | 0 | &mut emitter, | 1410 | 0 | &mut block, | 1411 | 0 | block_id, | 1412 | 0 | body_idx, | 1413 | 0 | )?; | 1414 | | } | 1415 | | Op::ImageGather => { | 1416 | 0 | let extra = inst.expect_at_least(6)?; | 1417 | 0 | let options = image::SamplingOptions { | 1418 | 0 | compare: false, | 1419 | 0 | project: false, | 1420 | 0 | gather: true, | 1421 | 0 | }; | 1422 | 0 | self.parse_image_sample( | 1423 | 0 | extra, | 1424 | 0 | options, | 1425 | 0 | ctx, | 1426 | 0 | &mut emitter, | 1427 | 0 | &mut block, | 1428 | 0 | block_id, | 1429 | 0 | body_idx, | 1430 | 0 | )?; | 1431 | | } | 1432 | | Op::ImageDrefGather => { | 1433 | 0 | let extra = inst.expect_at_least(6)?; | 1434 | 0 | let options = image::SamplingOptions { | 1435 | 0 | compare: true, | 1436 | 0 | project: false, | 1437 | 0 | gather: true, | 1438 | 0 | }; | 1439 | 0 | self.parse_image_sample( | 1440 | 0 | extra, | 1441 | 0 | options, | 1442 | 0 | ctx, | 1443 | 0 | &mut emitter, | 1444 | 0 | &mut block, | 1445 | 0 | block_id, | 1446 | 0 | body_idx, | 1447 | 0 | )?; | 1448 | | } | 1449 | | Op::ImageQuerySize => { | 1450 | 0 | inst.expect(4)?; | 1451 | 0 | self.parse_image_query_size( | 1452 | | false, | 1453 | 0 | ctx, | 1454 | 0 | &mut emitter, | 1455 | 0 | &mut block, | 1456 | 0 | block_id, | 1457 | 0 | body_idx, | 1458 | 0 | )?; | 1459 | | } | 1460 | | Op::ImageQuerySizeLod => { | 1461 | 0 | inst.expect(5)?; | 1462 | 0 | self.parse_image_query_size( | 1463 | | true, | 1464 | 0 | ctx, | 1465 | 0 | &mut emitter, | 1466 | 0 | &mut block, | 1467 | 0 | block_id, | 1468 | 0 | body_idx, | 1469 | 0 | )?; | 1470 | | } | 1471 | | Op::ImageQueryLevels => { | 1472 | 0 | inst.expect(4)?; | 1473 | 0 | self.parse_image_query_other(crate::ImageQuery::NumLevels, ctx, block_id)?; | 1474 | | } | 1475 | | Op::ImageQuerySamples => { | 1476 | 0 | inst.expect(4)?; | 1477 | 0 | self.parse_image_query_other(crate::ImageQuery::NumSamples, ctx, block_id)?; | 1478 | | } | 1479 | | // other ops | 1480 | | Op::Select => { | 1481 | 0 | inst.expect(6)?; | 1482 | 0 | let result_type_id = self.next()?; | 1483 | 0 | let result_id = self.next()?; | 1484 | 0 | let condition = self.next()?; | 1485 | 0 | let o1_id = self.next()?; | 1486 | 0 | let o2_id = self.next()?; | 1487 | | | 1488 | 0 | let cond_lexp = self.lookup_expression.lookup(condition)?; | 1489 | 0 | let cond_handle = get_expr_handle!(condition, cond_lexp); | 1490 | 0 | let o1_lexp = self.lookup_expression.lookup(o1_id)?; | 1491 | 0 | let o1_handle = get_expr_handle!(o1_id, o1_lexp); | 1492 | 0 | let o2_lexp = self.lookup_expression.lookup(o2_id)?; | 1493 | 0 | let o2_handle = get_expr_handle!(o2_id, o2_lexp); | 1494 | | | 1495 | 0 | let expr = crate::Expression::Select { | 1496 | 0 | condition: cond_handle, | 1497 | 0 | accept: o1_handle, | 1498 | 0 | reject: o2_handle, | 1499 | 0 | }; | 1500 | 0 | self.lookup_expression.insert( | 1501 | 0 | result_id, | 1502 | 0 | LookupExpression { | 1503 | 0 | handle: ctx.expressions.append(expr, span), | 1504 | 0 | type_id: result_type_id, | 1505 | 0 | block_id, | 1506 | 0 | }, | 1507 | | ); | 1508 | | } | 1509 | | Op::VectorShuffle => { | 1510 | 0 | inst.expect_at_least(5)?; | 1511 | 0 | let result_type_id = self.next()?; | 1512 | 0 | let result_id = self.next()?; | 1513 | 0 | let v1_id = self.next()?; | 1514 | 0 | let v2_id = self.next()?; | 1515 | | | 1516 | 0 | let v1_lexp = self.lookup_expression.lookup(v1_id)?; | 1517 | 0 | let v1_lty = self.lookup_type.lookup(v1_lexp.type_id)?; | 1518 | 0 | let v1_handle = get_expr_handle!(v1_id, v1_lexp); | 1519 | 0 | let n1 = match ctx.module.types[v1_lty.handle].inner { | 1520 | 0 | crate::TypeInner::Vector { size, .. } => size as u32, | 1521 | 0 | _ => return Err(Error::InvalidInnerType(v1_lexp.type_id)), | 1522 | | }; | 1523 | 0 | let v2_lexp = self.lookup_expression.lookup(v2_id)?; | 1524 | 0 | let v2_lty = self.lookup_type.lookup(v2_lexp.type_id)?; | 1525 | 0 | let v2_handle = get_expr_handle!(v2_id, v2_lexp); | 1526 | 0 | let n2 = match ctx.module.types[v2_lty.handle].inner { | 1527 | 0 | crate::TypeInner::Vector { size, .. } => size as u32, | 1528 | 0 | _ => return Err(Error::InvalidInnerType(v2_lexp.type_id)), | 1529 | | }; | 1530 | | | 1531 | 0 | self.temp_bytes.clear(); | 1532 | 0 | let mut max_component = 0; | 1533 | 0 | for _ in 5..inst.wc as usize { | 1534 | 0 | let mut index = self.next()?; | 1535 | 0 | if index == u32::MAX { | 1536 | 0 | // treat Undefined as X | 1537 | 0 | index = 0; | 1538 | 0 | } | 1539 | 0 | max_component = max_component.max(index); | 1540 | 0 | self.temp_bytes.push(index as u8); | 1541 | | } | 1542 | | | 1543 | | // Check for swizzle first. | 1544 | 0 | let expr = if max_component < n1 { | 1545 | | use crate::SwizzleComponent as Sc; | 1546 | 0 | let size = match self.temp_bytes.len() { | 1547 | 0 | 2 => crate::VectorSize::Bi, | 1548 | 0 | 3 => crate::VectorSize::Tri, | 1549 | 0 | _ => crate::VectorSize::Quad, | 1550 | | }; | 1551 | 0 | let mut pattern = [Sc::X; 4]; | 1552 | 0 | for (pat, index) in pattern.iter_mut().zip(self.temp_bytes.drain(..)) { | 1553 | 0 | *pat = match index { | 1554 | 0 | 0 => Sc::X, | 1555 | 0 | 1 => Sc::Y, | 1556 | 0 | 2 => Sc::Z, | 1557 | 0 | _ => Sc::W, | 1558 | | }; | 1559 | | } | 1560 | 0 | crate::Expression::Swizzle { | 1561 | 0 | size, | 1562 | 0 | vector: v1_handle, | 1563 | 0 | pattern, | 1564 | 0 | } | 1565 | | } else { | 1566 | | // Fall back to access + compose | 1567 | 0 | let mut components = Vec::with_capacity(self.temp_bytes.len()); | 1568 | 0 | for index in self.temp_bytes.drain(..).map(|i| i as u32) { | 1569 | 0 | let expr = if index < n1 { | 1570 | 0 | crate::Expression::AccessIndex { | 1571 | 0 | base: v1_handle, | 1572 | 0 | index, | 1573 | 0 | } | 1574 | 0 | } else if index < n1 + n2 { | 1575 | 0 | crate::Expression::AccessIndex { | 1576 | 0 | base: v2_handle, | 1577 | 0 | index: index - n1, | 1578 | 0 | } | 1579 | | } else { | 1580 | 0 | return Err(Error::InvalidAccessIndex(index)); | 1581 | | }; | 1582 | 0 | components.push(ctx.expressions.append(expr, span)); | 1583 | | } | 1584 | | crate::Expression::Compose { | 1585 | 0 | ty: self.lookup_type.lookup(result_type_id)?.handle, | 1586 | 0 | components, | 1587 | | } | 1588 | | }; | 1589 | | | 1590 | 0 | self.lookup_expression.insert( | 1591 | 0 | result_id, | 1592 | 0 | LookupExpression { | 1593 | 0 | handle: ctx.expressions.append(expr, span), | 1594 | 0 | type_id: result_type_id, | 1595 | 0 | block_id, | 1596 | 0 | }, | 1597 | | ); | 1598 | | } | 1599 | | Op::Bitcast | 1600 | | | Op::ConvertSToF | 1601 | | | Op::ConvertUToF | 1602 | | | Op::ConvertFToU | 1603 | | | Op::ConvertFToS | 1604 | | | Op::FConvert | 1605 | | | Op::UConvert | 1606 | | | Op::SConvert => { | 1607 | 0 | inst.expect(4)?; | 1608 | 0 | let result_type_id = self.next()?; | 1609 | 0 | let result_id = self.next()?; | 1610 | 0 | let value_id = self.next()?; | 1611 | | | 1612 | 0 | let value_lexp = self.lookup_expression.lookup(value_id)?; | 1613 | 0 | let ty_lookup = self.lookup_type.lookup(result_type_id)?; | 1614 | 0 | let scalar = match ctx.module.types[ty_lookup.handle].inner { | 1615 | 0 | crate::TypeInner::Scalar(scalar) | 1616 | 0 | | crate::TypeInner::Vector { scalar, .. } | 1617 | 0 | | crate::TypeInner::Matrix { scalar, .. } => scalar, | 1618 | 0 | _ => return Err(Error::InvalidAsType(ty_lookup.handle)), | 1619 | | }; | 1620 | | | 1621 | 0 | let expr = crate::Expression::As { | 1622 | 0 | expr: get_expr_handle!(value_id, value_lexp), | 1623 | 0 | kind: scalar.kind, | 1624 | 0 | convert: if scalar.kind == crate::ScalarKind::Bool { | 1625 | 0 | Some(crate::BOOL_WIDTH) | 1626 | 0 | } else if inst.op == Op::Bitcast { | 1627 | 0 | None | 1628 | | } else { | 1629 | 0 | Some(scalar.width) | 1630 | | }, | 1631 | | }; | 1632 | 0 | self.lookup_expression.insert( | 1633 | 0 | result_id, | 1634 | 0 | LookupExpression { | 1635 | 0 | handle: ctx.expressions.append(expr, span), | 1636 | 0 | type_id: result_type_id, | 1637 | 0 | block_id, | 1638 | 0 | }, | 1639 | | ); | 1640 | | } | 1641 | | Op::FunctionCall => { | 1642 | 2 | inst.expect_at_least(4)?; | 1643 | | | 1644 | 2 | let result_type_id = self.next()?; | 1645 | 2 | let result_id = self.next()?; | 1646 | 2 | let func_id = self.next()?; | 1647 | | | 1648 | 2 | let mut arguments = Vec::with_capacity(inst.wc as usize - 4); | 1649 | 2 | for _ in 0..arguments.capacity() { | 1650 | 2 | let arg_id = self.next()?; | 1651 | 2 | let lexp = self.lookup_expression.lookup(arg_id)?; | 1652 | 2 | arguments.push(get_expr_handle!(arg_id, lexp)); | 1653 | | } | 1654 | | | 1655 | 2 | block.extend(emitter.finish(ctx.expressions)); | 1656 | | | 1657 | | // We just need an unique handle here, nothing more. | 1658 | 2 | let function = self.add_call(ctx.function_id, func_id); | 1659 | | | 1660 | 2 | let result = if self.lookup_void_type == Some(result_type_id) { | 1661 | 0 | None | 1662 | | } else { | 1663 | 2 | let expr_handle = ctx | 1664 | 2 | .expressions | 1665 | 2 | .append(crate::Expression::CallResult(function), span); | 1666 | 2 | self.lookup_expression.insert( | 1667 | 2 | result_id, | 1668 | 2 | LookupExpression { | 1669 | 2 | handle: expr_handle, | 1670 | 2 | type_id: result_type_id, | 1671 | 2 | block_id, | 1672 | 2 | }, | 1673 | | ); | 1674 | 2 | Some(expr_handle) | 1675 | | }; | 1676 | 2 | block.push( | 1677 | 2 | crate::Statement::Call { | 1678 | 2 | function, | 1679 | 2 | arguments, | 1680 | 2 | result, | 1681 | 2 | }, | 1682 | 2 | span, | 1683 | | ); | 1684 | 2 | emitter.start(ctx.expressions); | 1685 | | } | 1686 | | Op::ExtInst => { | 1687 | | use crate::MathFunction as Mf; | 1688 | | use spirv::GLOp as Glo; | 1689 | | | 1690 | 0 | let base_wc = 5; | 1691 | 0 | inst.expect_at_least(base_wc)?; | 1692 | | | 1693 | 0 | let result_type_id = self.next()?; | 1694 | 0 | let result_id = self.next()?; | 1695 | 0 | let set_id = self.next()?; | 1696 | 0 | if Some(set_id) != self.ext_glsl_id { | 1697 | 0 | return Err(Error::UnsupportedExtInstSet(set_id)); | 1698 | 0 | } | 1699 | 0 | let inst_id = self.next()?; | 1700 | 0 | let gl_op = Glo::from_u32(inst_id).ok_or(Error::UnsupportedExtInst(inst_id))?; | 1701 | | | 1702 | 0 | let fun = match gl_op { | 1703 | 0 | Glo::Round => Mf::Round, | 1704 | 0 | Glo::RoundEven => Mf::Round, | 1705 | 0 | Glo::Trunc => Mf::Trunc, | 1706 | 0 | Glo::FAbs | Glo::SAbs => Mf::Abs, | 1707 | 0 | Glo::FSign | Glo::SSign => Mf::Sign, | 1708 | 0 | Glo::Floor => Mf::Floor, | 1709 | 0 | Glo::Ceil => Mf::Ceil, | 1710 | 0 | Glo::Fract => Mf::Fract, | 1711 | 0 | Glo::Sin => Mf::Sin, | 1712 | 0 | Glo::Cos => Mf::Cos, | 1713 | 0 | Glo::Tan => Mf::Tan, | 1714 | 0 | Glo::Asin => Mf::Asin, | 1715 | 0 | Glo::Acos => Mf::Acos, | 1716 | 0 | Glo::Atan => Mf::Atan, | 1717 | 0 | Glo::Sinh => Mf::Sinh, | 1718 | 0 | Glo::Cosh => Mf::Cosh, | 1719 | 0 | Glo::Tanh => Mf::Tanh, | 1720 | 0 | Glo::Atan2 => Mf::Atan2, | 1721 | 0 | Glo::Asinh => Mf::Asinh, | 1722 | 0 | Glo::Acosh => Mf::Acosh, | 1723 | 0 | Glo::Atanh => Mf::Atanh, | 1724 | 0 | Glo::Radians => Mf::Radians, | 1725 | 0 | Glo::Degrees => Mf::Degrees, | 1726 | 0 | Glo::Pow => Mf::Pow, | 1727 | 0 | Glo::Exp => Mf::Exp, | 1728 | 0 | Glo::Log => Mf::Log, | 1729 | 0 | Glo::Exp2 => Mf::Exp2, | 1730 | 0 | Glo::Log2 => Mf::Log2, | 1731 | 0 | Glo::Sqrt => Mf::Sqrt, | 1732 | 0 | Glo::InverseSqrt => Mf::InverseSqrt, | 1733 | 0 | Glo::MatrixInverse => Mf::Inverse, | 1734 | 0 | Glo::Determinant => Mf::Determinant, | 1735 | 0 | Glo::ModfStruct => Mf::Modf, | 1736 | 0 | Glo::FMin | Glo::UMin | Glo::SMin | Glo::NMin => Mf::Min, | 1737 | 0 | Glo::FMax | Glo::UMax | Glo::SMax | Glo::NMax => Mf::Max, | 1738 | 0 | Glo::FClamp | Glo::UClamp | Glo::SClamp | Glo::NClamp => Mf::Clamp, | 1739 | 0 | Glo::FMix => Mf::Mix, | 1740 | 0 | Glo::Step => Mf::Step, | 1741 | 0 | Glo::SmoothStep => Mf::SmoothStep, | 1742 | 0 | Glo::Fma => Mf::Fma, | 1743 | 0 | Glo::FrexpStruct => Mf::Frexp, | 1744 | 0 | Glo::Ldexp => Mf::Ldexp, | 1745 | 0 | Glo::Length => Mf::Length, | 1746 | 0 | Glo::Distance => Mf::Distance, | 1747 | 0 | Glo::Cross => Mf::Cross, | 1748 | 0 | Glo::Normalize => Mf::Normalize, | 1749 | 0 | Glo::FaceForward => Mf::FaceForward, | 1750 | 0 | Glo::Reflect => Mf::Reflect, | 1751 | 0 | Glo::Refract => Mf::Refract, | 1752 | 0 | Glo::PackUnorm4x8 => Mf::Pack4x8unorm, | 1753 | 0 | Glo::PackSnorm4x8 => Mf::Pack4x8snorm, | 1754 | 0 | Glo::PackHalf2x16 => Mf::Pack2x16float, | 1755 | 0 | Glo::PackUnorm2x16 => Mf::Pack2x16unorm, | 1756 | 0 | Glo::PackSnorm2x16 => Mf::Pack2x16snorm, | 1757 | 0 | Glo::UnpackUnorm4x8 => Mf::Unpack4x8unorm, | 1758 | 0 | Glo::UnpackSnorm4x8 => Mf::Unpack4x8snorm, | 1759 | 0 | Glo::UnpackHalf2x16 => Mf::Unpack2x16float, | 1760 | 0 | Glo::UnpackUnorm2x16 => Mf::Unpack2x16unorm, | 1761 | 0 | Glo::UnpackSnorm2x16 => Mf::Unpack2x16snorm, | 1762 | 0 | Glo::FindILsb => Mf::FirstTrailingBit, | 1763 | 0 | Glo::FindUMsb | Glo::FindSMsb => Mf::FirstLeadingBit, | 1764 | | // TODO: https://github.com/gfx-rs/naga/issues/2526 | 1765 | 0 | Glo::Modf | Glo::Frexp => return Err(Error::UnsupportedExtInst(inst_id)), | 1766 | | Glo::IMix | 1767 | | | Glo::PackDouble2x32 | 1768 | | | Glo::UnpackDouble2x32 | 1769 | | | Glo::InterpolateAtCentroid | 1770 | | | Glo::InterpolateAtSample | 1771 | | | Glo::InterpolateAtOffset => { | 1772 | 0 | return Err(Error::UnsupportedExtInst(inst_id)) | 1773 | | } | 1774 | | }; | 1775 | | | 1776 | 0 | let arg_count = fun.argument_count(); | 1777 | 0 | inst.expect(base_wc + arg_count as u16)?; | 1778 | 0 | let arg = { | 1779 | 0 | let arg_id = self.next()?; | 1780 | 0 | let lexp = self.lookup_expression.lookup(arg_id)?; | 1781 | 0 | get_expr_handle!(arg_id, lexp) | 1782 | | }; | 1783 | 0 | let arg1 = if arg_count > 1 { | 1784 | 0 | let arg_id = self.next()?; | 1785 | 0 | let lexp = self.lookup_expression.lookup(arg_id)?; | 1786 | 0 | Some(get_expr_handle!(arg_id, lexp)) | 1787 | | } else { | 1788 | 0 | None | 1789 | | }; | 1790 | 0 | let arg2 = if arg_count > 2 { | 1791 | 0 | let arg_id = self.next()?; | 1792 | 0 | let lexp = self.lookup_expression.lookup(arg_id)?; | 1793 | 0 | Some(get_expr_handle!(arg_id, lexp)) | 1794 | | } else { | 1795 | 0 | None | 1796 | | }; | 1797 | 0 | let arg3 = if arg_count > 3 { | 1798 | 0 | let arg_id = self.next()?; | 1799 | 0 | let lexp = self.lookup_expression.lookup(arg_id)?; | 1800 | 0 | Some(get_expr_handle!(arg_id, lexp)) | 1801 | | } else { | 1802 | 0 | None | 1803 | | }; | 1804 | | | 1805 | 0 | let expr = crate::Expression::Math { | 1806 | 0 | fun, | 1807 | 0 | arg, | 1808 | 0 | arg1, | 1809 | 0 | arg2, | 1810 | 0 | arg3, | 1811 | 0 | }; | 1812 | 0 | self.lookup_expression.insert( | 1813 | 0 | result_id, | 1814 | 0 | LookupExpression { | 1815 | 0 | handle: ctx.expressions.append(expr, span), | 1816 | 0 | type_id: result_type_id, | 1817 | 0 | block_id, | 1818 | 0 | }, | 1819 | | ); | 1820 | | } | 1821 | | // Relational and Logical Instructions | 1822 | | Op::LogicalNot => { | 1823 | 0 | inst.expect(4)?; | 1824 | 0 | parse_expr_op!(crate::UnaryOperator::LogicalNot, UNARY)?; | 1825 | | } | 1826 | | Op::LogicalOr => { | 1827 | 0 | inst.expect(5)?; | 1828 | 0 | parse_expr_op!(crate::BinaryOperator::LogicalOr, BINARY)?; | 1829 | | } | 1830 | | Op::LogicalAnd => { | 1831 | 0 | inst.expect(5)?; | 1832 | 0 | parse_expr_op!(crate::BinaryOperator::LogicalAnd, BINARY)?; | 1833 | | } | 1834 | | Op::SGreaterThan | Op::SGreaterThanEqual | Op::SLessThan | Op::SLessThanEqual => { | 1835 | 0 | inst.expect(5)?; | 1836 | 0 | self.parse_expr_int_comparison( | 1837 | 0 | ctx, | 1838 | 0 | &mut emitter, | 1839 | 0 | &mut block, | 1840 | 0 | block_id, | 1841 | 0 | body_idx, | 1842 | 0 | map_binary_operator(inst.op)?, | 1843 | 0 | crate::ScalarKind::Sint, | 1844 | 0 | )?; | 1845 | | } | 1846 | | Op::UGreaterThan | Op::UGreaterThanEqual | Op::ULessThan | Op::ULessThanEqual => { | 1847 | 0 | inst.expect(5)?; | 1848 | 0 | self.parse_expr_int_comparison( | 1849 | 0 | ctx, | 1850 | 0 | &mut emitter, | 1851 | 0 | &mut block, | 1852 | 0 | block_id, | 1853 | 0 | body_idx, | 1854 | 0 | map_binary_operator(inst.op)?, | 1855 | 0 | crate::ScalarKind::Uint, | 1856 | 0 | )?; | 1857 | | } | 1858 | | Op::FOrdEqual | 1859 | | | Op::FUnordEqual | 1860 | | | Op::FOrdNotEqual | 1861 | | | Op::FUnordNotEqual | 1862 | | | Op::FOrdLessThan | 1863 | | | Op::FUnordLessThan | 1864 | | | Op::FOrdGreaterThan | 1865 | | | Op::FUnordGreaterThan | 1866 | | | Op::FOrdLessThanEqual | 1867 | | | Op::FUnordLessThanEqual | 1868 | | | Op::FOrdGreaterThanEqual | 1869 | | | Op::FUnordGreaterThanEqual | 1870 | | | Op::LogicalEqual | 1871 | | | Op::LogicalNotEqual => { | 1872 | 0 | inst.expect(5)?; | 1873 | 0 | let operator = map_binary_operator(inst.op)?; | 1874 | 0 | parse_expr_op!(operator, BINARY)?; | 1875 | | } | 1876 | | Op::Any | Op::All | Op::IsNan | Op::IsInf | Op::IsFinite | Op::IsNormal => { | 1877 | 0 | inst.expect(4)?; | 1878 | 0 | let result_type_id = self.next()?; | 1879 | 0 | let result_id = self.next()?; | 1880 | 0 | let arg_id = self.next()?; | 1881 | | | 1882 | 0 | let arg_lexp = self.lookup_expression.lookup(arg_id)?; | 1883 | 0 | let arg_handle = get_expr_handle!(arg_id, arg_lexp); | 1884 | | | 1885 | 0 | let expr = crate::Expression::Relational { | 1886 | 0 | fun: map_relational_fun(inst.op)?, | 1887 | 0 | argument: arg_handle, | 1888 | | }; | 1889 | 0 | self.lookup_expression.insert( | 1890 | 0 | result_id, | 1891 | 0 | LookupExpression { | 1892 | 0 | handle: ctx.expressions.append(expr, span), | 1893 | 0 | type_id: result_type_id, | 1894 | 0 | block_id, | 1895 | 0 | }, | 1896 | | ); | 1897 | | } | 1898 | | Op::Kill => { | 1899 | 0 | inst.expect(1)?; | 1900 | 0 | break Some(crate::Statement::Kill); | 1901 | | } | 1902 | | Op::Unreachable => { | 1903 | 0 | inst.expect(1)?; | 1904 | 0 | break None; | 1905 | | } | 1906 | | Op::Return => { | 1907 | 0 | inst.expect(1)?; | 1908 | 0 | break Some(crate::Statement::Return { value: None }); | 1909 | | } | 1910 | | Op::ReturnValue => { | 1911 | 0 | inst.expect(2)?; | 1912 | 0 | let value_id = self.next()?; | 1913 | 0 | let value_lexp = self.lookup_expression.lookup(value_id)?; | 1914 | 0 | let value_handle = get_expr_handle!(value_id, value_lexp); | 1915 | 0 | break Some(crate::Statement::Return { | 1916 | 0 | value: Some(value_handle), | 1917 | 0 | }); | 1918 | | } | 1919 | | Op::Branch => { | 1920 | 0 | inst.expect(2)?; | 1921 | 0 | let target_id = self.next()?; | 1922 | | | 1923 | | // If this is a branch to a merge or continue block, then | 1924 | | // that ends the current body. | 1925 | | // | 1926 | | // Why can we count on finding an entry here when it's | 1927 | | // needed? SPIR-V requires dominators to appear before | 1928 | | // blocks they dominate, so we will have visited a | 1929 | | // structured control construct's header block before | 1930 | | // anything that could exit it. | 1931 | 0 | if let Some(info) = ctx.mergers.get(&target_id) { | 1932 | 0 | block.extend(emitter.finish(ctx.expressions)); | 1933 | 0 | ctx.blocks.insert(block_id, block); | 1934 | 0 | let body = &mut ctx.bodies[body_idx]; | 1935 | 0 | body.data.push(BodyFragment::BlockId(block_id)); | 1936 | | | 1937 | 0 | merger(body, info); | 1938 | | | 1939 | 0 | return Ok(()); | 1940 | 0 | } | 1941 | | | 1942 | | // If `target_id` has no entry in `ctx.body_for_label`, then | 1943 | | // this must be the only branch to it: | 1944 | | // | 1945 | | // - We've already established that it's not anybody's merge | 1946 | | // block. | 1947 | | // | 1948 | | // - It can't be a switch case. Only switch header blocks | 1949 | | // and other switch cases can branch to a switch case. | 1950 | | // Switch header blocks must dominate all their cases, so | 1951 | | // they must appear in the file before them, and when we | 1952 | | // see `Op::Switch` we populate `ctx.body_for_label` for | 1953 | | // every switch case. | 1954 | | // | 1955 | | // Thus, `target_id` must be a simple extension of the | 1956 | | // current block, which we dominate, so we know we'll | 1957 | | // encounter it later in the file. | 1958 | 0 | ctx.body_for_label.entry(target_id).or_insert(body_idx); | 1959 | | | 1960 | 0 | break None; | 1961 | | } | 1962 | | Op::BranchConditional => { | 1963 | 0 | inst.expect_at_least(4)?; | 1964 | | | 1965 | 0 | let condition = { | 1966 | 0 | let condition_id = self.next()?; | 1967 | 0 | let lexp = self.lookup_expression.lookup(condition_id)?; | 1968 | 0 | get_expr_handle!(condition_id, lexp) | 1969 | | }; | 1970 | | | 1971 | | // HACK(eddyb) Naga doesn't seem to have this helper, | 1972 | | // so it's declared on the fly here for convenience. | 1973 | | #[derive(Copy, Clone)] | 1974 | | struct BranchTarget { | 1975 | | label_id: spirv::Word, | 1976 | | merge_info: Option<MergeBlockInformation>, | 1977 | | } | 1978 | 0 | let branch_target = |label_id| BranchTarget { | 1979 | | label_id, | 1980 | | merge_info: ctx.mergers.get(&label_id).copied(), | 1981 | | }; | 1982 | | | 1983 | 0 | let true_target = branch_target(self.next()?); | 1984 | 0 | let false_target = branch_target(self.next()?); | 1985 | | | 1986 | | // Consume branch weights | 1987 | 0 | for _ in 4..inst.wc { | 1988 | 0 | let _ = self.next()?; | 1989 | | } | 1990 | | | 1991 | | // Handle `OpBranchConditional`s used at the end of a loop | 1992 | | // body's "continuing" section as a "conditional backedge", | 1993 | | // i.e. a `do`-`while` condition, or `break if` in WGSL. | 1994 | | | 1995 | | // HACK(eddyb) this has to go to the parent *twice*, because | 1996 | | // `OpLoopMerge` left the "continuing" section nested in the | 1997 | | // loop body in terms of `parent`, but not `BodyFragment`. | 1998 | 0 | let parent_body_idx = ctx.bodies[body_idx].parent; | 1999 | 0 | let parent_parent_body_idx = ctx.bodies[parent_body_idx].parent; | 2000 | 0 | match ctx.bodies[parent_parent_body_idx].data[..] { | 2001 | | // The `OpLoopMerge`'s `continuing` block and the loop's | 2002 | | // backedge block may not be the same, but they'll both | 2003 | | // belong to the same body. | 2004 | | [.., BodyFragment::Loop { | 2005 | 0 | body: loop_body_idx, | 2006 | 0 | continuing: loop_continuing_idx, | 2007 | 0 | break_if: ref mut break_if_slot @ None, | 2008 | 0 | }] if body_idx == loop_continuing_idx => { | 2009 | | // Try both orderings of break-vs-backedge, because | 2010 | | // SPIR-V is symmetrical here, unlike WGSL `break if`. | 2011 | 0 | let break_if_cond = [true, false].into_iter().find_map(|true_breaks| { | 2012 | | let (break_candidate, backedge_candidate) = if true_breaks { | 2013 | | (true_target, false_target) | 2014 | | } else { | 2015 | | (false_target, true_target) | 2016 | | }; | 2017 | | | 2018 | | if break_candidate.merge_info | 2019 | | != Some(MergeBlockInformation::LoopMerge) | 2020 | | { | 2021 | | return None; | 2022 | | } | 2023 | | | 2024 | | // HACK(eddyb) since Naga doesn't explicitly track | 2025 | | // backedges, this is checking for the outcome of | 2026 | | // `OpLoopMerge` below (even if it looks weird). | 2027 | | let backedge_candidate_is_backedge = | 2028 | | backedge_candidate.merge_info.is_none() | 2029 | | && ctx.body_for_label.get(&backedge_candidate.label_id) | 2030 | | == Some(&loop_body_idx); | 2031 | | if !backedge_candidate_is_backedge { | 2032 | | return None; | 2033 | | } | 2034 | | | 2035 | | Some(if true_breaks { | 2036 | | condition | 2037 | | } else { | 2038 | | ctx.expressions.append( | 2039 | | crate::Expression::Unary { | 2040 | | op: crate::UnaryOperator::LogicalNot, | 2041 | | expr: condition, | 2042 | | }, | 2043 | | span, | 2044 | | ) | 2045 | | }) | 2046 | | }); | 2047 | | | 2048 | 0 | if let Some(break_if_cond) = break_if_cond { | 2049 | 0 | *break_if_slot = Some(break_if_cond); | 2050 | | | 2051 | | // This `OpBranchConditional` ends the "continuing" | 2052 | | // section of the loop body as normal, with the | 2053 | | // `break if` condition having been stashed above. | 2054 | 0 | break None; | 2055 | 0 | } | 2056 | | } | 2057 | 0 | _ => {} | 2058 | | } | 2059 | | | 2060 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2061 | 0 | ctx.blocks.insert(block_id, block); | 2062 | 0 | let body = &mut ctx.bodies[body_idx]; | 2063 | 0 | body.data.push(BodyFragment::BlockId(block_id)); | 2064 | | | 2065 | 0 | let same_target = true_target.label_id == false_target.label_id; | 2066 | | | 2067 | | // Start a body block for the `accept` branch. | 2068 | 0 | let accept = ctx.bodies.len(); | 2069 | 0 | let mut accept_block = Body::with_parent(body_idx); | 2070 | | | 2071 | | // If the `OpBranchConditional` target is somebody else's | 2072 | | // merge or continue block, then put a `Break` or `Continue` | 2073 | | // statement in this new body block. | 2074 | 0 | if let Some(info) = true_target.merge_info { | 2075 | 0 | merger( | 2076 | 0 | match same_target { | 2077 | 0 | true => &mut ctx.bodies[body_idx], | 2078 | 0 | false => &mut accept_block, | 2079 | | }, | 2080 | 0 | &info, | 2081 | | ) | 2082 | | } else { | 2083 | | // Note the body index for the block we're branching to. | 2084 | 0 | let prev = ctx.body_for_label.insert( | 2085 | 0 | true_target.label_id, | 2086 | 0 | match same_target { | 2087 | 0 | true => body_idx, | 2088 | 0 | false => accept, | 2089 | | }, | 2090 | | ); | 2091 | 0 | debug_assert!(prev.is_none()); | 2092 | | } | 2093 | | | 2094 | 0 | if same_target { | 2095 | 0 | return Ok(()); | 2096 | 0 | } | 2097 | | | 2098 | 0 | ctx.bodies.push(accept_block); | 2099 | | | 2100 | | // Handle the `reject` branch just like the `accept` block. | 2101 | 0 | let reject = ctx.bodies.len(); | 2102 | 0 | let mut reject_block = Body::with_parent(body_idx); | 2103 | | | 2104 | 0 | if let Some(info) = false_target.merge_info { | 2105 | 0 | merger(&mut reject_block, &info) | 2106 | | } else { | 2107 | 0 | let prev = ctx.body_for_label.insert(false_target.label_id, reject); | 2108 | 0 | debug_assert!(prev.is_none()); | 2109 | | } | 2110 | | | 2111 | 0 | ctx.bodies.push(reject_block); | 2112 | | | 2113 | 0 | let body = &mut ctx.bodies[body_idx]; | 2114 | 0 | body.data.push(BodyFragment::If { | 2115 | 0 | condition, | 2116 | 0 | accept, | 2117 | 0 | reject, | 2118 | 0 | }); | 2119 | | | 2120 | 0 | return Ok(()); | 2121 | | } | 2122 | | Op::Switch => { | 2123 | 0 | inst.expect_at_least(3)?; | 2124 | 0 | let selector = self.next()?; | 2125 | 0 | let default_id = self.next()?; | 2126 | | | 2127 | | // If the previous instruction was a `OpSelectionMerge` then we must | 2128 | | // promote the `MergeBlockInformation` to a `SwitchMerge` | 2129 | 0 | if let Some(merge) = selection_merge_block { | 2130 | 0 | ctx.mergers | 2131 | 0 | .insert(merge, MergeBlockInformation::SwitchMerge); | 2132 | 0 | } | 2133 | | | 2134 | 0 | let default = ctx.bodies.len(); | 2135 | 0 | ctx.bodies.push(Body::with_parent(body_idx)); | 2136 | 0 | ctx.body_for_label.entry(default_id).or_insert(default); | 2137 | | | 2138 | 0 | let selector_lexp = &self.lookup_expression[&selector]; | 2139 | 0 | let selector_lty = self.lookup_type.lookup(selector_lexp.type_id)?; | 2140 | 0 | let selector_handle = get_expr_handle!(selector, selector_lexp); | 2141 | 0 | let selector = match ctx.module.types[selector_lty.handle].inner { | 2142 | | crate::TypeInner::Scalar(crate::Scalar { | 2143 | | kind: crate::ScalarKind::Uint, | 2144 | | width: _, | 2145 | | }) => { | 2146 | | // IR expects a signed integer, so do a bitcast | 2147 | 0 | ctx.expressions.append( | 2148 | 0 | crate::Expression::As { | 2149 | 0 | kind: crate::ScalarKind::Sint, | 2150 | 0 | expr: selector_handle, | 2151 | 0 | convert: None, | 2152 | 0 | }, | 2153 | 0 | span, | 2154 | | ) | 2155 | | } | 2156 | | crate::TypeInner::Scalar(crate::Scalar { | 2157 | | kind: crate::ScalarKind::Sint, | 2158 | | width: _, | 2159 | 0 | }) => selector_handle, | 2160 | 0 | ref other => unimplemented!("Unexpected selector {:?}", other), | 2161 | | }; | 2162 | | | 2163 | | // Clear past switch cases to prevent them from entering this one | 2164 | 0 | self.switch_cases.clear(); | 2165 | | | 2166 | 0 | for _ in 0..(inst.wc - 3) / 2 { | 2167 | 0 | let literal = self.next()?; | 2168 | 0 | let target = self.next()?; | 2169 | | | 2170 | 0 | let case_body_idx = ctx.bodies.len(); | 2171 | | | 2172 | | // Check if any previous case already used this target block id, if so | 2173 | | // group them together to reorder them later so that no weird | 2174 | | // fallthrough cases happen. | 2175 | 0 | if let Some(&mut (_, ref mut literals)) = self.switch_cases.get_mut(&target) | 2176 | | { | 2177 | 0 | literals.push(literal as i32); | 2178 | 0 | continue; | 2179 | 0 | } | 2180 | | | 2181 | 0 | let mut body = Body::with_parent(body_idx); | 2182 | | | 2183 | 0 | if let Some(info) = ctx.mergers.get(&target) { | 2184 | 0 | merger(&mut body, info); | 2185 | 0 | } | 2186 | | | 2187 | 0 | ctx.bodies.push(body); | 2188 | 0 | ctx.body_for_label.entry(target).or_insert(case_body_idx); | 2189 | | | 2190 | | // Register this target block id as already having been processed and | 2191 | | // the respective body index assigned and the first case value | 2192 | 0 | self.switch_cases | 2193 | 0 | .insert(target, (case_body_idx, vec![literal as i32])); | 2194 | | } | 2195 | | | 2196 | | // Loop through the collected target blocks creating a new case for each | 2197 | | // literal pointing to it, only one case will have the true body and all the | 2198 | | // others will be empty fallthrough so that they all execute the same body | 2199 | | // without duplicating code. | 2200 | | // | 2201 | | // Since `switch_cases` is an indexmap the order of insertion is preserved | 2202 | | // this is needed because spir-v defines fallthrough order in the switch | 2203 | | // instruction. | 2204 | 0 | let mut cases = Vec::with_capacity((inst.wc as usize - 3) / 2); | 2205 | 0 | for &(case_body_idx, ref literals) in self.switch_cases.values() { | 2206 | 0 | let value = literals[0]; | 2207 | | | 2208 | 0 | for &literal in literals.iter().skip(1) { | 2209 | 0 | let empty_body_idx = ctx.bodies.len(); | 2210 | 0 | let body = Body::with_parent(body_idx); | 2211 | 0 |
| 2212 | 0 | ctx.bodies.push(body); | 2213 | 0 |
| 2214 | 0 | cases.push((literal, empty_body_idx)); | 2215 | 0 | } | 2216 | | | 2217 | 0 | cases.push((value, case_body_idx)); | 2218 | | } | 2219 | | | 2220 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2221 | | | 2222 | 0 | let body = &mut ctx.bodies[body_idx]; | 2223 | 0 | ctx.blocks.insert(block_id, block); | 2224 | | // Make sure the vector has space for at least two more allocations | 2225 | 0 | body.data.reserve(2); | 2226 | 0 | body.data.push(BodyFragment::BlockId(block_id)); | 2227 | 0 | body.data.push(BodyFragment::Switch { | 2228 | 0 | selector, | 2229 | 0 | cases, | 2230 | 0 | default, | 2231 | 0 | }); | 2232 | | | 2233 | 0 | return Ok(()); | 2234 | | } | 2235 | | Op::SelectionMerge => { | 2236 | 0 | inst.expect(3)?; | 2237 | 0 | let merge_block_id = self.next()?; | 2238 | | // TODO: Selection Control Mask | 2239 | 0 | let _selection_control = self.next()?; | 2240 | | | 2241 | | // Indicate that the merge block is a continuation of the | 2242 | | // current `Body`. | 2243 | 0 | ctx.body_for_label.entry(merge_block_id).or_insert(body_idx); | 2244 | | | 2245 | | // Let subsequent branches to the merge block know that | 2246 | | // they've reached the end of the selection construct. | 2247 | 0 | ctx.mergers | 2248 | 0 | .insert(merge_block_id, MergeBlockInformation::SelectionMerge); | 2249 | | | 2250 | 0 | selection_merge_block = Some(merge_block_id); | 2251 | | } | 2252 | | Op::LoopMerge => { | 2253 | 0 | inst.expect_at_least(4)?; | 2254 | 0 | let merge_block_id = self.next()?; | 2255 | 0 | let continuing = self.next()?; | 2256 | | | 2257 | | // TODO: Loop Control Parameters | 2258 | 0 | for _ in 0..inst.wc - 3 { | 2259 | 0 | self.next()?; | 2260 | | } | 2261 | | | 2262 | | // Indicate that the merge block is a continuation of the | 2263 | | // current `Body`. | 2264 | 0 | ctx.body_for_label.entry(merge_block_id).or_insert(body_idx); | 2265 | | // Let subsequent branches to the merge block know that | 2266 | | // they're `Break` statements. | 2267 | 0 | ctx.mergers | 2268 | 0 | .insert(merge_block_id, MergeBlockInformation::LoopMerge); | 2269 | | | 2270 | 0 | let loop_body_idx = ctx.bodies.len(); | 2271 | 0 | ctx.bodies.push(Body::with_parent(body_idx)); | 2272 | | | 2273 | 0 | let continue_idx = ctx.bodies.len(); | 2274 | | // The continue block inherits the scope of the loop body | 2275 | 0 | ctx.bodies.push(Body::with_parent(loop_body_idx)); | 2276 | 0 | ctx.body_for_label.entry(continuing).or_insert(continue_idx); | 2277 | | // Let subsequent branches to the continue block know that | 2278 | | // they're `Continue` statements. | 2279 | 0 | ctx.mergers | 2280 | 0 | .insert(continuing, MergeBlockInformation::LoopContinue); | 2281 | | | 2282 | | // The loop header always belongs to the loop body | 2283 | 0 | ctx.body_for_label.insert(block_id, loop_body_idx); | 2284 | | | 2285 | 0 | let parent_body = &mut ctx.bodies[body_idx]; | 2286 | 0 | parent_body.data.push(BodyFragment::Loop { | 2287 | 0 | body: loop_body_idx, | 2288 | 0 | continuing: continue_idx, | 2289 | 0 | break_if: None, | 2290 | 0 | }); | 2291 | 0 | body_idx = loop_body_idx; | 2292 | | } | 2293 | | Op::DPdxCoarse => { | 2294 | 0 | parse_expr_op!( | 2295 | 0 | crate::DerivativeAxis::X, | 2296 | 0 | crate::DerivativeControl::Coarse, | 2297 | | DERIVATIVE | 2298 | 0 | )?; | 2299 | | } | 2300 | | Op::DPdyCoarse => { | 2301 | 0 | parse_expr_op!( | 2302 | 0 | crate::DerivativeAxis::Y, | 2303 | 0 | crate::DerivativeControl::Coarse, | 2304 | | DERIVATIVE | 2305 | 0 | )?; | 2306 | | } | 2307 | | Op::FwidthCoarse => { | 2308 | 0 | parse_expr_op!( | 2309 | 0 | crate::DerivativeAxis::Width, | 2310 | 0 | crate::DerivativeControl::Coarse, | 2311 | | DERIVATIVE | 2312 | 0 | )?; | 2313 | | } | 2314 | | Op::DPdxFine => { | 2315 | 0 | parse_expr_op!( | 2316 | 0 | crate::DerivativeAxis::X, | 2317 | 0 | crate::DerivativeControl::Fine, | 2318 | | DERIVATIVE | 2319 | 0 | )?; | 2320 | | } | 2321 | | Op::DPdyFine => { | 2322 | 0 | parse_expr_op!( | 2323 | 0 | crate::DerivativeAxis::Y, | 2324 | 0 | crate::DerivativeControl::Fine, | 2325 | | DERIVATIVE | 2326 | 0 | )?; | 2327 | | } | 2328 | | Op::FwidthFine => { | 2329 | 0 | parse_expr_op!( | 2330 | 0 | crate::DerivativeAxis::Width, | 2331 | 0 | crate::DerivativeControl::Fine, | 2332 | | DERIVATIVE | 2333 | 0 | )?; | 2334 | | } | 2335 | | Op::DPdx => { | 2336 | 0 | parse_expr_op!( | 2337 | 0 | crate::DerivativeAxis::X, | 2338 | 0 | crate::DerivativeControl::None, | 2339 | | DERIVATIVE | 2340 | 0 | )?; | 2341 | | } | 2342 | | Op::DPdy => { | 2343 | 0 | parse_expr_op!( | 2344 | 0 | crate::DerivativeAxis::Y, | 2345 | 0 | crate::DerivativeControl::None, | 2346 | | DERIVATIVE | 2347 | 0 | )?; | 2348 | | } | 2349 | | Op::Fwidth => { | 2350 | 0 | parse_expr_op!( | 2351 | 0 | crate::DerivativeAxis::Width, | 2352 | 0 | crate::DerivativeControl::None, | 2353 | | DERIVATIVE | 2354 | 0 | )?; | 2355 | | } | 2356 | | Op::ArrayLength => { | 2357 | 0 | inst.expect(5)?; | 2358 | 0 | let result_type_id = self.next()?; | 2359 | 0 | let result_id = self.next()?; | 2360 | 0 | let structure_id = self.next()?; | 2361 | 0 | let member_index = self.next()?; | 2362 | | | 2363 | | // We're assuming that the validation pass, if it's run, will catch if the | 2364 | | // wrong types or parameters are supplied here. | 2365 | | | 2366 | 0 | let structure_ptr = self.lookup_expression.lookup(structure_id)?; | 2367 | 0 | let structure_handle = get_expr_handle!(structure_id, structure_ptr); | 2368 | | | 2369 | 0 | let member_ptr = ctx.expressions.append( | 2370 | 0 | crate::Expression::AccessIndex { | 2371 | 0 | base: structure_handle, | 2372 | 0 | index: member_index, | 2373 | 0 | }, | 2374 | 0 | span, | 2375 | | ); | 2376 | | | 2377 | 0 | let length = ctx | 2378 | 0 | .expressions | 2379 | 0 | .append(crate::Expression::ArrayLength(member_ptr), span); | 2380 | | | 2381 | 0 | self.lookup_expression.insert( | 2382 | 0 | result_id, | 2383 | 0 | LookupExpression { | 2384 | 0 | handle: length, | 2385 | 0 | type_id: result_type_id, | 2386 | 0 | block_id, | 2387 | 0 | }, | 2388 | | ); | 2389 | | } | 2390 | | Op::CopyMemory => { | 2391 | 0 | inst.expect_at_least(3)?; | 2392 | 0 | let target_id = self.next()?; | 2393 | 0 | let source_id = self.next()?; | 2394 | 0 | let _memory_access = if inst.wc != 3 { | 2395 | 0 | inst.expect(4)?; | 2396 | 0 | spirv::MemoryAccess::from_bits(self.next()?) | 2397 | 0 | .ok_or(Error::InvalidParameter(Op::CopyMemory))? | 2398 | | } else { | 2399 | 0 | spirv::MemoryAccess::NONE | 2400 | | }; | 2401 | | | 2402 | | // TODO: check if the source and target types are the same? | 2403 | 0 | let target = self.lookup_expression.lookup(target_id)?; | 2404 | 0 | let target_handle = get_expr_handle!(target_id, target); | 2405 | 0 | let source = self.lookup_expression.lookup(source_id)?; | 2406 | 0 | let source_handle = get_expr_handle!(source_id, source); | 2407 | | | 2408 | | // This operation is practically the same as loading and then storing, I think. | 2409 | 0 | let value_expr = ctx.expressions.append( | 2410 | 0 | crate::Expression::Load { | 2411 | 0 | pointer: source_handle, | 2412 | 0 | }, | 2413 | 0 | span, | 2414 | | ); | 2415 | | | 2416 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2417 | 0 | block.push( | 2418 | 0 | crate::Statement::Store { | 2419 | 0 | pointer: target_handle, | 2420 | 0 | value: value_expr, | 2421 | 0 | }, | 2422 | 0 | span, | 2423 | | ); | 2424 | | | 2425 | 0 | emitter.start(ctx.expressions); | 2426 | | } | 2427 | | Op::ControlBarrier => { | 2428 | 0 | inst.expect(4)?; | 2429 | 0 | let exec_scope_id = self.next()?; | 2430 | 0 | let _mem_scope_raw = self.next()?; | 2431 | 0 | let semantics_id = self.next()?; | 2432 | 0 | let exec_scope_const = self.lookup_constant.lookup(exec_scope_id)?; | 2433 | 0 | let semantics_const = self.lookup_constant.lookup(semantics_id)?; | 2434 | | | 2435 | 0 | let exec_scope = resolve_constant(ctx.gctx(), &exec_scope_const.inner) | 2436 | 0 | .ok_or(Error::InvalidBarrierScope(exec_scope_id))?; | 2437 | 0 | let semantics = resolve_constant(ctx.gctx(), &semantics_const.inner) | 2438 | 0 | .ok_or(Error::InvalidBarrierMemorySemantics(semantics_id))?; | 2439 | | | 2440 | 0 | if exec_scope == spirv::Scope::Workgroup as u32 | 2441 | 0 | || exec_scope == spirv::Scope::Subgroup as u32 | 2442 | 0 | { | 2443 | 0 | let mut flags = crate::Barrier::empty(); | 2444 | 0 | flags.set( | 2445 | 0 | crate::Barrier::STORAGE, | 2446 | 0 | semantics & spirv::MemorySemantics::UNIFORM_MEMORY.bits() != 0, | 2447 | 0 | ); | 2448 | 0 | flags.set( | 2449 | 0 | crate::Barrier::WORK_GROUP, | 2450 | 0 | semantics & (spirv::MemorySemantics::WORKGROUP_MEMORY).bits() != 0, | 2451 | 0 | ); | 2452 | 0 | flags.set( | 2453 | 0 | crate::Barrier::SUB_GROUP, | 2454 | 0 | semantics & spirv::MemorySemantics::SUBGROUP_MEMORY.bits() != 0, | 2455 | 0 | ); | 2456 | 0 | flags.set( | 2457 | 0 | crate::Barrier::TEXTURE, | 2458 | 0 | semantics & spirv::MemorySemantics::IMAGE_MEMORY.bits() != 0, | 2459 | 0 | ); | 2460 | 0 |
| 2461 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2462 | 0 | block.push(crate::Statement::ControlBarrier(flags), span); | 2463 | 0 | emitter.start(ctx.expressions); | 2464 | 0 | } else { | 2465 | 0 | log::warn!("Unsupported barrier execution scope: {exec_scope}"); | 2466 | | } | 2467 | | } | 2468 | | Op::MemoryBarrier => { | 2469 | 0 | inst.expect(3)?; | 2470 | 0 | let mem_scope_id = self.next()?; | 2471 | 0 | let semantics_id = self.next()?; | 2472 | 0 | let mem_scope_const = self.lookup_constant.lookup(mem_scope_id)?; | 2473 | 0 | let semantics_const = self.lookup_constant.lookup(semantics_id)?; | 2474 | | | 2475 | 0 | let mem_scope = resolve_constant(ctx.gctx(), &mem_scope_const.inner) | 2476 | 0 | .ok_or(Error::InvalidBarrierScope(mem_scope_id))?; | 2477 | 0 | let semantics = resolve_constant(ctx.gctx(), &semantics_const.inner) | 2478 | 0 | .ok_or(Error::InvalidBarrierMemorySemantics(semantics_id))?; | 2479 | | | 2480 | 0 | let mut flags = if mem_scope == spirv::Scope::Device as u32 { | 2481 | 0 | crate::Barrier::STORAGE | 2482 | 0 | } else if mem_scope == spirv::Scope::Workgroup as u32 { | 2483 | 0 | crate::Barrier::WORK_GROUP | 2484 | 0 | } else if mem_scope == spirv::Scope::Subgroup as u32 { | 2485 | 0 | crate::Barrier::SUB_GROUP | 2486 | | } else { | 2487 | 0 | crate::Barrier::empty() | 2488 | | }; | 2489 | 0 | flags.set( | 2490 | | crate::Barrier::STORAGE, | 2491 | 0 | semantics & spirv::MemorySemantics::UNIFORM_MEMORY.bits() != 0, | 2492 | | ); | 2493 | 0 | flags.set( | 2494 | | crate::Barrier::WORK_GROUP, | 2495 | 0 | semantics & (spirv::MemorySemantics::WORKGROUP_MEMORY).bits() != 0, | 2496 | | ); | 2497 | 0 | flags.set( | 2498 | | crate::Barrier::SUB_GROUP, | 2499 | 0 | semantics & spirv::MemorySemantics::SUBGROUP_MEMORY.bits() != 0, | 2500 | | ); | 2501 | 0 | flags.set( | 2502 | | crate::Barrier::TEXTURE, | 2503 | 0 | semantics & spirv::MemorySemantics::IMAGE_MEMORY.bits() != 0, | 2504 | | ); | 2505 | | | 2506 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2507 | 0 | block.push(crate::Statement::MemoryBarrier(flags), span); | 2508 | 0 | emitter.start(ctx.expressions); | 2509 | | } | 2510 | | Op::CopyObject => { | 2511 | 0 | inst.expect(4)?; | 2512 | 0 | let result_type_id = self.next()?; | 2513 | 0 | let result_id = self.next()?; | 2514 | 0 | let operand_id = self.next()?; | 2515 | | | 2516 | 0 | let lookup = self.lookup_expression.lookup(operand_id)?; | 2517 | 0 | let handle = get_expr_handle!(operand_id, lookup); | 2518 | | | 2519 | 0 | self.lookup_expression.insert( | 2520 | 0 | result_id, | 2521 | 0 | LookupExpression { | 2522 | 0 | handle, | 2523 | 0 | type_id: result_type_id, | 2524 | 0 | block_id, | 2525 | 0 | }, | 2526 | | ); | 2527 | | } | 2528 | | Op::GroupNonUniformBallot => { | 2529 | 0 | inst.expect(5)?; | 2530 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2531 | 0 | let result_type_id = self.next()?; | 2532 | 0 | let result_id = self.next()?; | 2533 | 0 | let exec_scope_id = self.next()?; | 2534 | 0 | let predicate_id = self.next()?; | 2535 | | | 2536 | 0 | let exec_scope_const = self.lookup_constant.lookup(exec_scope_id)?; | 2537 | 0 | let _exec_scope = resolve_constant(ctx.gctx(), &exec_scope_const.inner) | 2538 | 0 | .filter(|exec_scope| *exec_scope == spirv::Scope::Subgroup as u32) | 2539 | 0 | .ok_or(Error::InvalidBarrierScope(exec_scope_id))?; | 2540 | | | 2541 | 0 | let predicate = if self | 2542 | 0 | .lookup_constant | 2543 | 0 | .lookup(predicate_id) | 2544 | 0 | .ok() | 2545 | 0 | .filter(|predicate_const| match predicate_const.inner { | 2546 | | Constant::Constant(constant) => matches!( | 2547 | | ctx.gctx().global_expressions[ctx.gctx().constants[constant].init], | 2548 | | crate::Expression::Literal(crate::Literal::Bool(true)), | 2549 | | ), | 2550 | | Constant::Override(_) => false, | 2551 | | }) | 2552 | 0 | .is_some() | 2553 | | { | 2554 | 0 | None | 2555 | | } else { | 2556 | 0 | let predicate_lookup = self.lookup_expression.lookup(predicate_id)?; | 2557 | 0 | let predicate_handle = get_expr_handle!(predicate_id, predicate_lookup); | 2558 | 0 | Some(predicate_handle) | 2559 | | }; | 2560 | | | 2561 | 0 | let result_handle = ctx | 2562 | 0 | .expressions | 2563 | 0 | .append(crate::Expression::SubgroupBallotResult, span); | 2564 | 0 | self.lookup_expression.insert( | 2565 | 0 | result_id, | 2566 | 0 | LookupExpression { | 2567 | 0 | handle: result_handle, | 2568 | 0 | type_id: result_type_id, | 2569 | 0 | block_id, | 2570 | 0 | }, | 2571 | | ); | 2572 | | | 2573 | 0 | block.push( | 2574 | 0 | crate::Statement::SubgroupBallot { | 2575 | 0 | result: result_handle, | 2576 | 0 | predicate, | 2577 | 0 | }, | 2578 | 0 | span, | 2579 | | ); | 2580 | 0 | emitter.start(ctx.expressions); | 2581 | | } | 2582 | | Op::GroupNonUniformAll | 2583 | | | Op::GroupNonUniformAny | 2584 | | | Op::GroupNonUniformIAdd | 2585 | | | Op::GroupNonUniformFAdd | 2586 | | | Op::GroupNonUniformIMul | 2587 | | | Op::GroupNonUniformFMul | 2588 | | | Op::GroupNonUniformSMax | 2589 | | | Op::GroupNonUniformUMax | 2590 | | | Op::GroupNonUniformFMax | 2591 | | | Op::GroupNonUniformSMin | 2592 | | | Op::GroupNonUniformUMin | 2593 | | | Op::GroupNonUniformFMin | 2594 | | | Op::GroupNonUniformBitwiseAnd | 2595 | | | Op::GroupNonUniformBitwiseOr | 2596 | | | Op::GroupNonUniformBitwiseXor | 2597 | | | Op::GroupNonUniformLogicalAnd | 2598 | | | Op::GroupNonUniformLogicalOr | 2599 | | | Op::GroupNonUniformLogicalXor => { | 2600 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2601 | 0 | inst.expect( | 2602 | 0 | if matches!(inst.op, Op::GroupNonUniformAll | Op::GroupNonUniformAny) { | 2603 | 0 | 5 | 2604 | | } else { | 2605 | 0 | 6 | 2606 | | }, | 2607 | 0 | )?; | 2608 | 0 | let result_type_id = self.next()?; | 2609 | 0 | let result_id = self.next()?; | 2610 | 0 | let exec_scope_id = self.next()?; | 2611 | 0 | let collective_op_id = match inst.op { | 2612 | | Op::GroupNonUniformAll | Op::GroupNonUniformAny => { | 2613 | 0 | crate::CollectiveOperation::Reduce | 2614 | | } | 2615 | | _ => { | 2616 | 0 | let group_op_id = self.next()?; | 2617 | 0 | match spirv::GroupOperation::from_u32(group_op_id) { | 2618 | | Some(spirv::GroupOperation::Reduce) => { | 2619 | 0 | crate::CollectiveOperation::Reduce | 2620 | | } | 2621 | | Some(spirv::GroupOperation::InclusiveScan) => { | 2622 | 0 | crate::CollectiveOperation::InclusiveScan | 2623 | | } | 2624 | | Some(spirv::GroupOperation::ExclusiveScan) => { | 2625 | 0 | crate::CollectiveOperation::ExclusiveScan | 2626 | | } | 2627 | 0 | _ => return Err(Error::UnsupportedGroupOperation(group_op_id)), | 2628 | | } | 2629 | | } | 2630 | | }; | 2631 | 0 | let argument_id = self.next()?; | 2632 | | | 2633 | 0 | let argument_lookup = self.lookup_expression.lookup(argument_id)?; | 2634 | 0 | let argument_handle = get_expr_handle!(argument_id, argument_lookup); | 2635 | | | 2636 | 0 | let exec_scope_const = self.lookup_constant.lookup(exec_scope_id)?; | 2637 | 0 | let _exec_scope = resolve_constant(ctx.gctx(), &exec_scope_const.inner) | 2638 | 0 | .filter(|exec_scope| *exec_scope == spirv::Scope::Subgroup as u32) | 2639 | 0 | .ok_or(Error::InvalidBarrierScope(exec_scope_id))?; | 2640 | | | 2641 | 0 | let op_id = match inst.op { | 2642 | 0 | Op::GroupNonUniformAll => crate::SubgroupOperation::All, | 2643 | 0 | Op::GroupNonUniformAny => crate::SubgroupOperation::Any, | 2644 | | Op::GroupNonUniformIAdd | Op::GroupNonUniformFAdd => { | 2645 | 0 | crate::SubgroupOperation::Add | 2646 | | } | 2647 | | Op::GroupNonUniformIMul | Op::GroupNonUniformFMul => { | 2648 | 0 | crate::SubgroupOperation::Mul | 2649 | | } | 2650 | | Op::GroupNonUniformSMax | 2651 | | | Op::GroupNonUniformUMax | 2652 | 0 | | Op::GroupNonUniformFMax => crate::SubgroupOperation::Max, | 2653 | | Op::GroupNonUniformSMin | 2654 | | | Op::GroupNonUniformUMin | 2655 | 0 | | Op::GroupNonUniformFMin => crate::SubgroupOperation::Min, | 2656 | | Op::GroupNonUniformBitwiseAnd | Op::GroupNonUniformLogicalAnd => { | 2657 | 0 | crate::SubgroupOperation::And | 2658 | | } | 2659 | | Op::GroupNonUniformBitwiseOr | Op::GroupNonUniformLogicalOr => { | 2660 | 0 | crate::SubgroupOperation::Or | 2661 | | } | 2662 | | Op::GroupNonUniformBitwiseXor | Op::GroupNonUniformLogicalXor => { | 2663 | 0 | crate::SubgroupOperation::Xor | 2664 | | } | 2665 | 0 | _ => unreachable!(), | 2666 | | }; | 2667 | | | 2668 | 0 | let result_type = self.lookup_type.lookup(result_type_id)?; | 2669 | | | 2670 | 0 | let result_handle = ctx.expressions.append( | 2671 | 0 | crate::Expression::SubgroupOperationResult { | 2672 | 0 | ty: result_type.handle, | 2673 | 0 | }, | 2674 | 0 | span, | 2675 | | ); | 2676 | 0 | self.lookup_expression.insert( | 2677 | 0 | result_id, | 2678 | 0 | LookupExpression { | 2679 | 0 | handle: result_handle, | 2680 | 0 | type_id: result_type_id, | 2681 | 0 | block_id, | 2682 | 0 | }, | 2683 | | ); | 2684 | | | 2685 | 0 | block.push( | 2686 | 0 | crate::Statement::SubgroupCollectiveOperation { | 2687 | 0 | result: result_handle, | 2688 | 0 | op: op_id, | 2689 | 0 | collective_op: collective_op_id, | 2690 | 0 | argument: argument_handle, | 2691 | 0 | }, | 2692 | 0 | span, | 2693 | | ); | 2694 | 0 | emitter.start(ctx.expressions); | 2695 | | } | 2696 | | Op::GroupNonUniformBroadcastFirst | 2697 | | | Op::GroupNonUniformBroadcast | 2698 | | | Op::GroupNonUniformShuffle | 2699 | | | Op::GroupNonUniformShuffleDown | 2700 | | | Op::GroupNonUniformShuffleUp | 2701 | | | Op::GroupNonUniformShuffleXor | 2702 | | | Op::GroupNonUniformQuadBroadcast => { | 2703 | 0 | inst.expect(if matches!(inst.op, Op::GroupNonUniformBroadcastFirst) { | 2704 | 0 | 5 | 2705 | | } else { | 2706 | 0 | 6 | 2707 | 0 | })?; | 2708 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2709 | 0 | let result_type_id = self.next()?; | 2710 | 0 | let result_id = self.next()?; | 2711 | 0 | let exec_scope_id = self.next()?; | 2712 | 0 | let argument_id = self.next()?; | 2713 | | | 2714 | 0 | let argument_lookup = self.lookup_expression.lookup(argument_id)?; | 2715 | 0 | let argument_handle = get_expr_handle!(argument_id, argument_lookup); | 2716 | | | 2717 | 0 | let exec_scope_const = self.lookup_constant.lookup(exec_scope_id)?; | 2718 | 0 | let _exec_scope = resolve_constant(ctx.gctx(), &exec_scope_const.inner) | 2719 | 0 | .filter(|exec_scope| *exec_scope == spirv::Scope::Subgroup as u32) | 2720 | 0 | .ok_or(Error::InvalidBarrierScope(exec_scope_id))?; | 2721 | | | 2722 | 0 | let mode = if matches!(inst.op, Op::GroupNonUniformBroadcastFirst) { | 2723 | 0 | crate::GatherMode::BroadcastFirst | 2724 | | } else { | 2725 | 0 | let index_id = self.next()?; | 2726 | 0 | let index_lookup = self.lookup_expression.lookup(index_id)?; | 2727 | 0 | let index_handle = get_expr_handle!(index_id, index_lookup); | 2728 | 0 | match inst.op { | 2729 | | Op::GroupNonUniformBroadcast => { | 2730 | 0 | crate::GatherMode::Broadcast(index_handle) | 2731 | | } | 2732 | 0 | Op::GroupNonUniformShuffle => crate::GatherMode::Shuffle(index_handle), | 2733 | | Op::GroupNonUniformShuffleDown => { | 2734 | 0 | crate::GatherMode::ShuffleDown(index_handle) | 2735 | | } | 2736 | | Op::GroupNonUniformShuffleUp => { | 2737 | 0 | crate::GatherMode::ShuffleUp(index_handle) | 2738 | | } | 2739 | | Op::GroupNonUniformShuffleXor => { | 2740 | 0 | crate::GatherMode::ShuffleXor(index_handle) | 2741 | | } | 2742 | | Op::GroupNonUniformQuadBroadcast => { | 2743 | 0 | crate::GatherMode::QuadBroadcast(index_handle) | 2744 | | } | 2745 | 0 | _ => unreachable!(), | 2746 | | } | 2747 | | }; | 2748 | | | 2749 | 0 | let result_type = self.lookup_type.lookup(result_type_id)?; | 2750 | | | 2751 | 0 | let result_handle = ctx.expressions.append( | 2752 | 0 | crate::Expression::SubgroupOperationResult { | 2753 | 0 | ty: result_type.handle, | 2754 | 0 | }, | 2755 | 0 | span, | 2756 | | ); | 2757 | 0 | self.lookup_expression.insert( | 2758 | 0 | result_id, | 2759 | 0 | LookupExpression { | 2760 | 0 | handle: result_handle, | 2761 | 0 | type_id: result_type_id, | 2762 | 0 | block_id, | 2763 | 0 | }, | 2764 | | ); | 2765 | | | 2766 | 0 | block.push( | 2767 | 0 | crate::Statement::SubgroupGather { | 2768 | 0 | result: result_handle, | 2769 | 0 | mode, | 2770 | 0 | argument: argument_handle, | 2771 | 0 | }, | 2772 | 0 | span, | 2773 | | ); | 2774 | 0 | emitter.start(ctx.expressions); | 2775 | | } | 2776 | | Op::GroupNonUniformQuadSwap => { | 2777 | 0 | inst.expect(6)?; | 2778 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2779 | 0 | let result_type_id = self.next()?; | 2780 | 0 | let result_id = self.next()?; | 2781 | 0 | let exec_scope_id = self.next()?; | 2782 | 0 | let argument_id = self.next()?; | 2783 | 0 | let direction_id = self.next()?; | 2784 | | | 2785 | 0 | let argument_lookup = self.lookup_expression.lookup(argument_id)?; | 2786 | 0 | let argument_handle = get_expr_handle!(argument_id, argument_lookup); | 2787 | | | 2788 | 0 | let exec_scope_const = self.lookup_constant.lookup(exec_scope_id)?; | 2789 | 0 | let _exec_scope = resolve_constant(ctx.gctx(), &exec_scope_const.inner) | 2790 | 0 | .filter(|exec_scope| *exec_scope == spirv::Scope::Subgroup as u32) | 2791 | 0 | .ok_or(Error::InvalidBarrierScope(exec_scope_id))?; | 2792 | | | 2793 | 0 | let direction_const = self.lookup_constant.lookup(direction_id)?; | 2794 | 0 | let direction_const = resolve_constant(ctx.gctx(), &direction_const.inner) | 2795 | 0 | .ok_or(Error::InvalidOperand)?; | 2796 | 0 | let direction = match direction_const { | 2797 | 0 | 0 => crate::Direction::X, | 2798 | 0 | 1 => crate::Direction::Y, | 2799 | 0 | 2 => crate::Direction::Diagonal, | 2800 | 0 | _ => unreachable!(), | 2801 | | }; | 2802 | | | 2803 | 0 | let result_type = self.lookup_type.lookup(result_type_id)?; | 2804 | | | 2805 | 0 | let result_handle = ctx.expressions.append( | 2806 | 0 | crate::Expression::SubgroupOperationResult { | 2807 | 0 | ty: result_type.handle, | 2808 | 0 | }, | 2809 | 0 | span, | 2810 | | ); | 2811 | 0 | self.lookup_expression.insert( | 2812 | 0 | result_id, | 2813 | 0 | LookupExpression { | 2814 | 0 | handle: result_handle, | 2815 | 0 | type_id: result_type_id, | 2816 | 0 | block_id, | 2817 | 0 | }, | 2818 | | ); | 2819 | | | 2820 | 0 | block.push( | 2821 | 0 | crate::Statement::SubgroupGather { | 2822 | 0 | mode: crate::GatherMode::QuadSwap(direction), | 2823 | 0 | result: result_handle, | 2824 | 0 | argument: argument_handle, | 2825 | 0 | }, | 2826 | 0 | span, | 2827 | | ); | 2828 | 0 | emitter.start(ctx.expressions); | 2829 | | } | 2830 | | Op::AtomicLoad => { | 2831 | 0 | inst.expect(6)?; | 2832 | 0 | let start = self.data_offset; | 2833 | 0 | let result_type_id = self.next()?; | 2834 | 0 | let result_id = self.next()?; | 2835 | 0 | let pointer_id = self.next()?; | 2836 | 0 | let _scope_id = self.next()?; | 2837 | 0 | let _memory_semantics_id = self.next()?; | 2838 | 0 | let span = self.span_from_with_op(start); | 2839 | | | 2840 | 0 | log::trace!("\t\t\tlooking up expr {pointer_id:?}"); | 2841 | 0 | let p_lexp_handle = | 2842 | 0 | get_expr_handle!(pointer_id, self.lookup_expression.lookup(pointer_id)?); | 2843 | | | 2844 | | // Create an expression for our result | 2845 | 0 | let expr = crate::Expression::Load { | 2846 | 0 | pointer: p_lexp_handle, | 2847 | 0 | }; | 2848 | 0 | let handle = ctx.expressions.append(expr, span); | 2849 | 0 | self.lookup_expression.insert( | 2850 | 0 | result_id, | 2851 | 0 | LookupExpression { | 2852 | 0 | handle, | 2853 | 0 | type_id: result_type_id, | 2854 | 0 | block_id, | 2855 | 0 | }, | 2856 | | ); | 2857 | | | 2858 | | // Store any associated global variables so we can upgrade their types later | 2859 | 0 | self.record_atomic_access(ctx, p_lexp_handle)?; | 2860 | | } | 2861 | | Op::AtomicStore => { | 2862 | 0 | inst.expect(5)?; | 2863 | 0 | let start = self.data_offset; | 2864 | 0 | let pointer_id = self.next()?; | 2865 | 0 | let _scope_id = self.next()?; | 2866 | 0 | let _memory_semantics_id = self.next()?; | 2867 | 0 | let value_id = self.next()?; | 2868 | 0 | let span = self.span_from_with_op(start); | 2869 | | | 2870 | 0 | log::trace!("\t\t\tlooking up pointer expr {pointer_id:?}"); | 2871 | 0 | let p_lexp_handle = | 2872 | 0 | get_expr_handle!(pointer_id, self.lookup_expression.lookup(pointer_id)?); | 2873 | | | 2874 | 0 | log::trace!("\t\t\tlooking up value expr {pointer_id:?}"); | 2875 | 0 | let v_lexp_handle = | 2876 | 0 | get_expr_handle!(value_id, self.lookup_expression.lookup(value_id)?); | 2877 | | | 2878 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2879 | | // Create a statement for the op itself | 2880 | 0 | let stmt = crate::Statement::Store { | 2881 | 0 | pointer: p_lexp_handle, | 2882 | 0 | value: v_lexp_handle, | 2883 | 0 | }; | 2884 | 0 | block.push(stmt, span); | 2885 | 0 | emitter.start(ctx.expressions); | 2886 | | | 2887 | | // Store any associated global variables so we can upgrade their types later | 2888 | 0 | self.record_atomic_access(ctx, p_lexp_handle)?; | 2889 | | } | 2890 | | Op::AtomicIIncrement | Op::AtomicIDecrement => { | 2891 | 0 | inst.expect(6)?; | 2892 | 0 | let start = self.data_offset; | 2893 | 0 | let result_type_id = self.next()?; | 2894 | 0 | let result_id = self.next()?; | 2895 | 0 | let pointer_id = self.next()?; | 2896 | 0 | let _scope_id = self.next()?; | 2897 | 0 | let _memory_semantics_id = self.next()?; | 2898 | 0 | let span = self.span_from_with_op(start); | 2899 | | | 2900 | 0 | let (p_exp_h, p_base_ty_h) = self.get_exp_and_base_ty_handles( | 2901 | 0 | pointer_id, | 2902 | 0 | ctx, | 2903 | 0 | &mut emitter, | 2904 | 0 | &mut block, | 2905 | 0 | body_idx, | 2906 | 0 | )?; | 2907 | | | 2908 | 0 | block.extend(emitter.finish(ctx.expressions)); | 2909 | | // Create an expression for our result | 2910 | 0 | let r_lexp_handle = { | 2911 | 0 | let expr = crate::Expression::AtomicResult { | 2912 | 0 | ty: p_base_ty_h, | 2913 | 0 | comparison: false, | 2914 | 0 | }; | 2915 | 0 | let handle = ctx.expressions.append(expr, span); | 2916 | 0 | self.lookup_expression.insert( | 2917 | 0 | result_id, | 2918 | 0 | LookupExpression { | 2919 | 0 | handle, | 2920 | 0 | type_id: result_type_id, | 2921 | 0 | block_id, | 2922 | 0 | }, | 2923 | | ); | 2924 | 0 | handle | 2925 | | }; | 2926 | 0 | emitter.start(ctx.expressions); | 2927 | | | 2928 | | // Create a literal "1" to use as our value | 2929 | 0 | let one_lexp_handle = make_index_literal( | 2930 | 0 | ctx, | 2931 | | 1, | 2932 | 0 | &mut block, | 2933 | 0 | &mut emitter, | 2934 | 0 | p_base_ty_h, | 2935 | 0 | result_type_id, | 2936 | 0 | span, | 2937 | 0 | )?; | 2938 | | | 2939 | | // Create a statement for the op itself | 2940 | 0 | let stmt = crate::Statement::Atomic { | 2941 | 0 | pointer: p_exp_h, | 2942 | 0 | fun: match inst.op { | 2943 | 0 | Op::AtomicIIncrement => crate::AtomicFunction::Add, | 2944 | 0 | _ => crate::AtomicFunction::Subtract, | 2945 | | }, | 2946 | 0 | value: one_lexp_handle, | 2947 | 0 | result: Some(r_lexp_handle), | 2948 | | }; | 2949 | 0 | block.push(stmt, span); | 2950 | | | 2951 | | // Store any associated global variables so we can upgrade their types later | 2952 | 0 | self.record_atomic_access(ctx, p_exp_h)?; | 2953 | | } | 2954 | | Op::AtomicCompareExchange => { | 2955 | 0 | inst.expect(9)?; | 2956 | | | 2957 | 0 | let start = self.data_offset; | 2958 | 0 | let span = self.span_from_with_op(start); | 2959 | 0 | let result_type_id = self.next()?; | 2960 | 0 | let result_id = self.next()?; | 2961 | 0 | let pointer_id = self.next()?; | 2962 | 0 | let _memory_scope_id = self.next()?; | 2963 | 0 | let _equal_memory_semantics_id = self.next()?; | 2964 | 0 | let _unequal_memory_semantics_id = self.next()?; | 2965 | 0 | let value_id = self.next()?; | 2966 | 0 | let comparator_id = self.next()?; | 2967 | | | 2968 | 0 | let (p_exp_h, p_base_ty_h) = self.get_exp_and_base_ty_handles( | 2969 | 0 | pointer_id, | 2970 | 0 | ctx, | 2971 | 0 | &mut emitter, | 2972 | 0 | &mut block, | 2973 | 0 | body_idx, | 2974 | 0 | )?; | 2975 | | | 2976 | 0 | log::trace!("\t\t\tlooking up value expr {value_id:?}"); | 2977 | 0 | let v_lexp_handle = | 2978 | 0 | get_expr_handle!(value_id, self.lookup_expression.lookup(value_id)?); | 2979 | | | 2980 | 0 | log::trace!("\t\t\tlooking up comparator expr {value_id:?}"); | 2981 | 0 | let c_lexp_handle = get_expr_handle!( | 2982 | 0 | comparator_id, | 2983 | 0 | self.lookup_expression.lookup(comparator_id)? | 2984 | | ); | 2985 | | | 2986 | | // We know from the SPIR-V spec that the result type must be an integer | 2987 | | // scalar, and we'll need the type itself to get a handle to the atomic | 2988 | | // result struct. | 2989 | 0 | let crate::TypeInner::Scalar(scalar) = ctx.module.types[p_base_ty_h].inner | 2990 | | else { | 2991 | 0 | return Err( | 2992 | 0 | crate::front::atomic_upgrade::Error::CompareExchangeNonScalarBaseType | 2993 | 0 | .into(), | 2994 | 0 | ); | 2995 | | }; | 2996 | | | 2997 | | // Get a handle to the atomic result struct type. | 2998 | 0 | let atomic_result_struct_ty_h = ctx.module.generate_predeclared_type( | 2999 | 0 | crate::PredeclaredType::AtomicCompareExchangeWeakResult(scalar), | 3000 | | ); | 3001 | | | 3002 | 0 | block.extend(emitter.finish(ctx.expressions)); | 3003 | | | 3004 | | // Create an expression for our atomic result | 3005 | 0 | let atomic_lexp_handle = { | 3006 | 0 | let expr = crate::Expression::AtomicResult { | 3007 | 0 | ty: atomic_result_struct_ty_h, | 3008 | 0 | comparison: true, | 3009 | 0 | }; | 3010 | 0 | ctx.expressions.append(expr, span) | 3011 | | }; | 3012 | | | 3013 | | // Create an dot accessor to extract the value from the | 3014 | | // result struct __atomic_compare_exchange_result<T> and use that | 3015 | | // as the expression for the result_id | 3016 | 0 | { | 3017 | 0 | let expr = crate::Expression::AccessIndex { | 3018 | 0 | base: atomic_lexp_handle, | 3019 | 0 | index: 0, | 3020 | 0 | }; | 3021 | 0 | let handle = ctx.expressions.append(expr, span); | 3022 | 0 | // Use this dot accessor as the result id's expression | 3023 | 0 | let _ = self.lookup_expression.insert( | 3024 | 0 | result_id, | 3025 | 0 | LookupExpression { | 3026 | 0 | handle, | 3027 | 0 | type_id: result_type_id, | 3028 | 0 | block_id, | 3029 | 0 | }, | 3030 | 0 | ); | 3031 | 0 | } | 3032 | | | 3033 | 0 | emitter.start(ctx.expressions); | 3034 | | | 3035 | | // Create a statement for the op itself | 3036 | 0 | let stmt = crate::Statement::Atomic { | 3037 | 0 | pointer: p_exp_h, | 3038 | 0 | fun: crate::AtomicFunction::Exchange { | 3039 | 0 | compare: Some(c_lexp_handle), | 3040 | 0 | }, | 3041 | 0 | value: v_lexp_handle, | 3042 | 0 | result: Some(atomic_lexp_handle), | 3043 | 0 | }; | 3044 | 0 | block.push(stmt, span); | 3045 | | | 3046 | | // Store any associated global variables so we can upgrade their types later | 3047 | 0 | self.record_atomic_access(ctx, p_exp_h)?; | 3048 | | } | 3049 | | Op::AtomicExchange | 3050 | | | Op::AtomicIAdd | 3051 | | | Op::AtomicISub | 3052 | | | Op::AtomicSMin | 3053 | | | Op::AtomicUMin | 3054 | | | Op::AtomicSMax | 3055 | | | Op::AtomicUMax | 3056 | | | Op::AtomicAnd | 3057 | | | Op::AtomicOr | 3058 | | | Op::AtomicXor | 3059 | 0 | | Op::AtomicFAddEXT => self.parse_atomic_expr_with_value( | 3060 | 0 | inst, | 3061 | 0 | &mut emitter, | 3062 | 0 | ctx, | 3063 | 0 | &mut block, | 3064 | 0 | block_id, | 3065 | 0 | body_idx, | 3066 | 0 | match inst.op { | 3067 | 0 | Op::AtomicExchange => crate::AtomicFunction::Exchange { compare: None }, | 3068 | 0 | Op::AtomicIAdd | Op::AtomicFAddEXT => crate::AtomicFunction::Add, | 3069 | 0 | Op::AtomicISub => crate::AtomicFunction::Subtract, | 3070 | 0 | Op::AtomicSMin => crate::AtomicFunction::Min, | 3071 | 0 | Op::AtomicUMin => crate::AtomicFunction::Min, | 3072 | 0 | Op::AtomicSMax => crate::AtomicFunction::Max, | 3073 | 0 | Op::AtomicUMax => crate::AtomicFunction::Max, | 3074 | 0 | Op::AtomicAnd => crate::AtomicFunction::And, | 3075 | 0 | Op::AtomicOr => crate::AtomicFunction::InclusiveOr, | 3076 | 0 | Op::AtomicXor => crate::AtomicFunction::ExclusiveOr, | 3077 | 0 | _ => unreachable!(), | 3078 | | }, | 3079 | 0 | )?, | 3080 | | | 3081 | | _ => { | 3082 | 0 | return Err(Error::UnsupportedInstruction(self.state, inst.op)); | 3083 | | } | 3084 | | } | 3085 | | }; | 3086 | | | 3087 | 0 | block.extend(emitter.finish(ctx.expressions)); | 3088 | 0 | if let Some(stmt) = terminator { | 3089 | 0 | block.push(stmt, crate::Span::default()); | 3090 | 0 | } | 3091 | | | 3092 | | // Save this block fragment in `block_ctx.blocks`, and mark it to be | 3093 | | // incorporated into the current body at `Statement` assembly time. | 3094 | 0 | ctx.blocks.insert(block_id, block); | 3095 | 0 | let body = &mut ctx.bodies[body_idx]; | 3096 | 0 | body.data.push(BodyFragment::BlockId(block_id)); | 3097 | 0 | Ok(()) | 3098 | 13 | } |
|
3099 | | } |
3100 | | |
3101 | 0 | fn make_index_literal( |
3102 | 0 | ctx: &mut BlockContext, |
3103 | 0 | index: u32, |
3104 | 0 | block: &mut crate::Block, |
3105 | 0 | emitter: &mut crate::proc::Emitter, |
3106 | 0 | index_type: Handle<crate::Type>, |
3107 | 0 | index_type_id: spirv::Word, |
3108 | 0 | span: crate::Span, |
3109 | 0 | ) -> Result<Handle<crate::Expression>, Error> { |
3110 | 0 | block.extend(emitter.finish(ctx.expressions)); |
3111 | | |
3112 | 0 | let literal = match ctx.module.types[index_type].inner.scalar_kind() { |
3113 | 0 | Some(crate::ScalarKind::Uint) => crate::Literal::U32(index), |
3114 | 0 | Some(crate::ScalarKind::Sint) => crate::Literal::I32(index as i32), |
3115 | 0 | _ => return Err(Error::InvalidIndexType(index_type_id)), |
3116 | | }; |
3117 | 0 | let expr = ctx |
3118 | 0 | .expressions |
3119 | 0 | .append(crate::Expression::Literal(literal), span); |
3120 | | |
3121 | 0 | emitter.start(ctx.expressions); |
3122 | 0 | Ok(expr) |
3123 | 0 | } |