Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include <atomic>
6 : #include <type_traits>
7 :
8 : #include "src/wasm/wasm-interpreter.h"
9 :
10 : #include "src/assembler-inl.h"
11 : #include "src/base/overflowing-math.h"
12 : #include "src/boxed-float.h"
13 : #include "src/compiler/wasm-compiler.h"
14 : #include "src/conversions.h"
15 : #include "src/identity-map.h"
16 : #include "src/objects-inl.h"
17 : #include "src/trap-handler/trap-handler.h"
18 : #include "src/utils.h"
19 : #include "src/wasm/decoder.h"
20 : #include "src/wasm/function-body-decoder-impl.h"
21 : #include "src/wasm/function-body-decoder.h"
22 : #include "src/wasm/memory-tracing.h"
23 : #include "src/wasm/wasm-engine.h"
24 : #include "src/wasm/wasm-external-refs.h"
25 : #include "src/wasm/wasm-limits.h"
26 : #include "src/wasm/wasm-module.h"
27 : #include "src/wasm/wasm-objects-inl.h"
28 :
29 : #include "src/zone/accounting-allocator.h"
30 : #include "src/zone/zone-containers.h"
31 :
32 : namespace v8 {
33 : namespace internal {
34 : namespace wasm {
35 :
36 : #define TRACE(...) \
37 : do { \
38 : if (FLAG_trace_wasm_interpreter) PrintF(__VA_ARGS__); \
39 : } while (false)
40 :
41 : #if V8_TARGET_BIG_ENDIAN
42 : #define LANE(i, type) ((sizeof(type.val) / sizeof(type.val[0])) - (i)-1)
43 : #else
44 : #define LANE(i, type) (i)
45 : #endif
46 :
47 : #define FOREACH_INTERNAL_OPCODE(V) V(Breakpoint, 0xFF)
48 :
49 : #define WASM_CTYPES(V) \
50 : V(I32, int32_t) V(I64, int64_t) V(F32, float) V(F64, double) V(S128, Simd128)
51 :
52 : #define FOREACH_SIMPLE_BINOP(V) \
53 : V(I32Add, uint32_t, +) \
54 : V(I32Sub, uint32_t, -) \
55 : V(I32Mul, uint32_t, *) \
56 : V(I32And, uint32_t, &) \
57 : V(I32Ior, uint32_t, |) \
58 : V(I32Xor, uint32_t, ^) \
59 : V(I32Eq, uint32_t, ==) \
60 : V(I32Ne, uint32_t, !=) \
61 : V(I32LtU, uint32_t, <) \
62 : V(I32LeU, uint32_t, <=) \
63 : V(I32GtU, uint32_t, >) \
64 : V(I32GeU, uint32_t, >=) \
65 : V(I32LtS, int32_t, <) \
66 : V(I32LeS, int32_t, <=) \
67 : V(I32GtS, int32_t, >) \
68 : V(I32GeS, int32_t, >=) \
69 : V(I64Add, uint64_t, +) \
70 : V(I64Sub, uint64_t, -) \
71 : V(I64Mul, uint64_t, *) \
72 : V(I64And, uint64_t, &) \
73 : V(I64Ior, uint64_t, |) \
74 : V(I64Xor, uint64_t, ^) \
75 : V(I64Eq, uint64_t, ==) \
76 : V(I64Ne, uint64_t, !=) \
77 : V(I64LtU, uint64_t, <) \
78 : V(I64LeU, uint64_t, <=) \
79 : V(I64GtU, uint64_t, >) \
80 : V(I64GeU, uint64_t, >=) \
81 : V(I64LtS, int64_t, <) \
82 : V(I64LeS, int64_t, <=) \
83 : V(I64GtS, int64_t, >) \
84 : V(I64GeS, int64_t, >=) \
85 : V(F32Add, float, +) \
86 : V(F32Sub, float, -) \
87 : V(F32Eq, float, ==) \
88 : V(F32Ne, float, !=) \
89 : V(F32Lt, float, <) \
90 : V(F32Le, float, <=) \
91 : V(F32Gt, float, >) \
92 : V(F32Ge, float, >=) \
93 : V(F64Add, double, +) \
94 : V(F64Sub, double, -) \
95 : V(F64Eq, double, ==) \
96 : V(F64Ne, double, !=) \
97 : V(F64Lt, double, <) \
98 : V(F64Le, double, <=) \
99 : V(F64Gt, double, >) \
100 : V(F64Ge, double, >=) \
101 : V(F32Mul, float, *) \
102 : V(F64Mul, double, *) \
103 : V(F32Div, float, /) \
104 : V(F64Div, double, /)
105 :
106 : #define FOREACH_OTHER_BINOP(V) \
107 : V(I32DivS, int32_t) \
108 : V(I32DivU, uint32_t) \
109 : V(I32RemS, int32_t) \
110 : V(I32RemU, uint32_t) \
111 : V(I32Shl, uint32_t) \
112 : V(I32ShrU, uint32_t) \
113 : V(I32ShrS, int32_t) \
114 : V(I64DivS, int64_t) \
115 : V(I64DivU, uint64_t) \
116 : V(I64RemS, int64_t) \
117 : V(I64RemU, uint64_t) \
118 : V(I64Shl, uint64_t) \
119 : V(I64ShrU, uint64_t) \
120 : V(I64ShrS, int64_t) \
121 : V(I32Ror, int32_t) \
122 : V(I32Rol, int32_t) \
123 : V(I64Ror, int64_t) \
124 : V(I64Rol, int64_t) \
125 : V(F32Min, float) \
126 : V(F32Max, float) \
127 : V(F64Min, double) \
128 : V(F64Max, double) \
129 : V(I32AsmjsDivS, int32_t) \
130 : V(I32AsmjsDivU, uint32_t) \
131 : V(I32AsmjsRemS, int32_t) \
132 : V(I32AsmjsRemU, uint32_t) \
133 : V(F32CopySign, Float32) \
134 : V(F64CopySign, Float64)
135 :
136 : #define FOREACH_I32CONV_FLOATOP(V) \
137 : V(I32SConvertF32, int32_t, float) \
138 : V(I32SConvertF64, int32_t, double) \
139 : V(I32UConvertF32, uint32_t, float) \
140 : V(I32UConvertF64, uint32_t, double)
141 :
142 : #define FOREACH_OTHER_UNOP(V) \
143 : V(I32Clz, uint32_t) \
144 : V(I32Ctz, uint32_t) \
145 : V(I32Popcnt, uint32_t) \
146 : V(I32Eqz, uint32_t) \
147 : V(I64Clz, uint64_t) \
148 : V(I64Ctz, uint64_t) \
149 : V(I64Popcnt, uint64_t) \
150 : V(I64Eqz, uint64_t) \
151 : V(F32Abs, Float32) \
152 : V(F32Neg, Float32) \
153 : V(F32Ceil, float) \
154 : V(F32Floor, float) \
155 : V(F32Trunc, float) \
156 : V(F32NearestInt, float) \
157 : V(F64Abs, Float64) \
158 : V(F64Neg, Float64) \
159 : V(F64Ceil, double) \
160 : V(F64Floor, double) \
161 : V(F64Trunc, double) \
162 : V(F64NearestInt, double) \
163 : V(I32ConvertI64, int64_t) \
164 : V(I64SConvertF32, float) \
165 : V(I64SConvertF64, double) \
166 : V(I64UConvertF32, float) \
167 : V(I64UConvertF64, double) \
168 : V(I64SConvertI32, int32_t) \
169 : V(I64UConvertI32, uint32_t) \
170 : V(F32SConvertI32, int32_t) \
171 : V(F32UConvertI32, uint32_t) \
172 : V(F32SConvertI64, int64_t) \
173 : V(F32UConvertI64, uint64_t) \
174 : V(F32ConvertF64, double) \
175 : V(F32ReinterpretI32, int32_t) \
176 : V(F64SConvertI32, int32_t) \
177 : V(F64UConvertI32, uint32_t) \
178 : V(F64SConvertI64, int64_t) \
179 : V(F64UConvertI64, uint64_t) \
180 : V(F64ConvertF32, float) \
181 : V(F64ReinterpretI64, int64_t) \
182 : V(I32AsmjsSConvertF32, float) \
183 : V(I32AsmjsUConvertF32, float) \
184 : V(I32AsmjsSConvertF64, double) \
185 : V(I32AsmjsUConvertF64, double) \
186 : V(F32Sqrt, float) \
187 : V(F64Sqrt, double)
188 :
189 : namespace {
190 :
191 : constexpr uint32_t kFloat32SignBitMask = uint32_t{1} << 31;
192 : constexpr uint64_t kFloat64SignBitMask = uint64_t{1} << 63;
193 :
194 : inline int32_t ExecuteI32DivS(int32_t a, int32_t b, TrapReason* trap) {
195 41556 : if (b == 0) {
196 : *trap = kTrapDivByZero;
197 : return 0;
198 : }
199 38204 : if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
200 : *trap = kTrapDivUnrepresentable;
201 : return 0;
202 : }
203 38192 : return a / b;
204 : }
205 :
206 : inline uint32_t ExecuteI32DivU(uint32_t a, uint32_t b, TrapReason* trap) {
207 40932 : if (b == 0) {
208 : *trap = kTrapDivByZero;
209 : return 0;
210 : }
211 37656 : return a / b;
212 : }
213 :
214 : inline int32_t ExecuteI32RemS(int32_t a, int32_t b, TrapReason* trap) {
215 40964 : if (b == 0) {
216 : *trap = kTrapRemByZero;
217 : return 0;
218 : }
219 37672 : if (b == -1) return 0;
220 34400 : return a % b;
221 : }
222 :
223 : inline uint32_t ExecuteI32RemU(uint32_t a, uint32_t b, TrapReason* trap) {
224 40932 : if (b == 0) {
225 : *trap = kTrapRemByZero;
226 : return 0;
227 : }
228 37656 : return a % b;
229 : }
230 :
231 : inline uint32_t ExecuteI32Shl(uint32_t a, uint32_t b, TrapReason* trap) {
232 40912 : return a << (b & 0x1F);
233 : }
234 :
235 : inline uint32_t ExecuteI32ShrU(uint32_t a, uint32_t b, TrapReason* trap) {
236 40912 : return a >> (b & 0x1F);
237 : }
238 :
239 : inline int32_t ExecuteI32ShrS(int32_t a, int32_t b, TrapReason* trap) {
240 40912 : return a >> (b & 0x1F);
241 : }
242 :
243 : inline int64_t ExecuteI64DivS(int64_t a, int64_t b, TrapReason* trap) {
244 54316 : if (b == 0) {
245 : *trap = kTrapDivByZero;
246 : return 0;
247 : }
248 49672 : if (b == -1 && a == std::numeric_limits<int64_t>::min()) {
249 : *trap = kTrapDivUnrepresentable;
250 : return 0;
251 : }
252 49668 : return a / b;
253 : }
254 :
255 : inline uint64_t ExecuteI64DivU(uint64_t a, uint64_t b, TrapReason* trap) {
256 53708 : if (b == 0) {
257 : *trap = kTrapDivByZero;
258 : return 0;
259 : }
260 49128 : return a / b;
261 : }
262 :
263 : inline int64_t ExecuteI64RemS(int64_t a, int64_t b, TrapReason* trap) {
264 53712 : if (b == 0) {
265 : *trap = kTrapRemByZero;
266 : return 0;
267 : }
268 49132 : if (b == -1) return 0;
269 44884 : return a % b;
270 : }
271 :
272 : inline uint64_t ExecuteI64RemU(uint64_t a, uint64_t b, TrapReason* trap) {
273 53708 : if (b == 0) {
274 : *trap = kTrapRemByZero;
275 : return 0;
276 : }
277 49128 : return a % b;
278 : }
279 :
280 : inline uint64_t ExecuteI64Shl(uint64_t a, uint64_t b, TrapReason* trap) {
281 81248 : return a << (b & 0x3F);
282 : }
283 :
284 : inline uint64_t ExecuteI64ShrU(uint64_t a, uint64_t b, TrapReason* trap) {
285 81232 : return a >> (b & 0x3F);
286 : }
287 :
288 : inline int64_t ExecuteI64ShrS(int64_t a, int64_t b, TrapReason* trap) {
289 81232 : return a >> (b & 0x3F);
290 : }
291 :
292 : inline uint32_t ExecuteI32Ror(uint32_t a, uint32_t b, TrapReason* trap) {
293 26912 : return (a >> (b & 0x1F)) | (a << ((32 - b) & 0x1F));
294 : }
295 :
296 : inline uint32_t ExecuteI32Rol(uint32_t a, uint32_t b, TrapReason* trap) {
297 26912 : return (a << (b & 0x1F)) | (a >> ((32 - b) & 0x1F));
298 : }
299 :
300 : inline uint64_t ExecuteI64Ror(uint64_t a, uint64_t b, TrapReason* trap) {
301 26260 : return (a >> (b & 0x3F)) | (a << ((64 - b) & 0x3F));
302 : }
303 :
304 : inline uint64_t ExecuteI64Rol(uint64_t a, uint64_t b, TrapReason* trap) {
305 26260 : return (a << (b & 0x3F)) | (a >> ((64 - b) & 0x3F));
306 : }
307 :
308 : inline float ExecuteF32Min(float a, float b, TrapReason* trap) {
309 52912 : return JSMin(a, b);
310 : }
311 :
312 : inline float ExecuteF32Max(float a, float b, TrapReason* trap) {
313 52904 : return JSMax(a, b);
314 : }
315 :
316 : inline Float32 ExecuteF32CopySign(Float32 a, Float32 b, TrapReason* trap) {
317 52908 : return Float32::FromBits((a.get_bits() & ~kFloat32SignBitMask) |
318 52908 : (b.get_bits() & kFloat32SignBitMask));
319 : }
320 :
321 : inline double ExecuteF64Min(double a, double b, TrapReason* trap) {
322 9624 : return JSMin(a, b);
323 : }
324 :
325 : inline double ExecuteF64Max(double a, double b, TrapReason* trap) {
326 9616 : return JSMax(a, b);
327 : }
328 :
329 : inline Float64 ExecuteF64CopySign(Float64 a, Float64 b, TrapReason* trap) {
330 9612 : return Float64::FromBits((a.get_bits() & ~kFloat64SignBitMask) |
331 9612 : (b.get_bits() & kFloat64SignBitMask));
332 : }
333 :
334 : inline int32_t ExecuteI32AsmjsDivS(int32_t a, int32_t b, TrapReason* trap) {
335 2340 : if (b == 0) return 0;
336 2096 : if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
337 : return std::numeric_limits<int32_t>::min();
338 : }
339 2088 : return a / b;
340 : }
341 :
342 : inline uint32_t ExecuteI32AsmjsDivU(uint32_t a, uint32_t b, TrapReason* trap) {
343 20 : if (b == 0) return 0;
344 8 : return a / b;
345 : }
346 :
347 : inline int32_t ExecuteI32AsmjsRemS(int32_t a, int32_t b, TrapReason* trap) {
348 2340 : if (b == 0) return 0;
349 2096 : if (b == -1) return 0;
350 1860 : return a % b;
351 : }
352 :
353 : inline uint32_t ExecuteI32AsmjsRemU(uint32_t a, uint32_t b, TrapReason* trap) {
354 20 : if (b == 0) return 0;
355 8 : return a % b;
356 : }
357 :
358 : inline int32_t ExecuteI32AsmjsSConvertF32(float a, TrapReason* trap) {
359 460 : return DoubleToInt32(a);
360 : }
361 :
362 : inline uint32_t ExecuteI32AsmjsUConvertF32(float a, TrapReason* trap) {
363 460 : return DoubleToUint32(a);
364 : }
365 :
366 : inline int32_t ExecuteI32AsmjsSConvertF64(double a, TrapReason* trap) {
367 196 : return DoubleToInt32(a);
368 : }
369 :
370 : inline uint32_t ExecuteI32AsmjsUConvertF64(double a, TrapReason* trap) {
371 : return DoubleToUint32(a);
372 : }
373 :
374 : int32_t ExecuteI32Clz(uint32_t val, TrapReason* trap) {
375 : return base::bits::CountLeadingZeros(val);
376 : }
377 :
378 : uint32_t ExecuteI32Ctz(uint32_t val, TrapReason* trap) {
379 : return base::bits::CountTrailingZeros(val);
380 : }
381 :
382 : uint32_t ExecuteI32Popcnt(uint32_t val, TrapReason* trap) {
383 : return base::bits::CountPopulation(val);
384 : }
385 :
386 : inline uint32_t ExecuteI32Eqz(uint32_t val, TrapReason* trap) {
387 696 : return val == 0 ? 1 : 0;
388 : }
389 :
390 : int64_t ExecuteI64Clz(uint64_t val, TrapReason* trap) {
391 : return base::bits::CountLeadingZeros(val);
392 : }
393 :
394 : inline uint64_t ExecuteI64Ctz(uint64_t val, TrapReason* trap) {
395 260 : return base::bits::CountTrailingZeros(val);
396 : }
397 :
398 : inline int64_t ExecuteI64Popcnt(uint64_t val, TrapReason* trap) {
399 : return base::bits::CountPopulation(val);
400 : }
401 :
402 : inline int32_t ExecuteI64Eqz(uint64_t val, TrapReason* trap) {
403 332 : return val == 0 ? 1 : 0;
404 : }
405 :
406 : inline Float32 ExecuteF32Abs(Float32 a, TrapReason* trap) {
407 16 : return Float32::FromBits(a.get_bits() & ~kFloat32SignBitMask);
408 : }
409 :
410 : inline Float32 ExecuteF32Neg(Float32 a, TrapReason* trap) {
411 468 : return Float32::FromBits(a.get_bits() ^ kFloat32SignBitMask);
412 : }
413 :
414 460 : inline float ExecuteF32Ceil(float a, TrapReason* trap) { return ceilf(a); }
415 :
416 460 : inline float ExecuteF32Floor(float a, TrapReason* trap) { return floorf(a); }
417 :
418 460 : inline float ExecuteF32Trunc(float a, TrapReason* trap) { return truncf(a); }
419 :
420 : inline float ExecuteF32NearestInt(float a, TrapReason* trap) {
421 460 : return nearbyintf(a);
422 : }
423 :
424 : inline float ExecuteF32Sqrt(float a, TrapReason* trap) {
425 8 : float result = sqrtf(a);
426 : return result;
427 : }
428 :
429 : inline Float64 ExecuteF64Abs(Float64 a, TrapReason* trap) {
430 16 : return Float64::FromBits(a.get_bits() & ~kFloat64SignBitMask);
431 : }
432 :
433 : inline Float64 ExecuteF64Neg(Float64 a, TrapReason* trap) {
434 204 : return Float64::FromBits(a.get_bits() ^ kFloat64SignBitMask);
435 : }
436 :
437 196 : inline double ExecuteF64Ceil(double a, TrapReason* trap) { return ceil(a); }
438 :
439 196 : inline double ExecuteF64Floor(double a, TrapReason* trap) { return floor(a); }
440 :
441 196 : inline double ExecuteF64Trunc(double a, TrapReason* trap) { return trunc(a); }
442 :
443 : inline double ExecuteF64NearestInt(double a, TrapReason* trap) {
444 196 : return nearbyint(a);
445 : }
446 :
447 8 : inline double ExecuteF64Sqrt(double a, TrapReason* trap) { return sqrt(a); }
448 :
449 : template <typename int_type, typename float_type>
450 : int_type ExecuteConvert(float_type a, TrapReason* trap) {
451 2776 : if (is_inbounds<int_type>(a)) {
452 1568 : return static_cast<int_type>(a);
453 : }
454 : *trap = kTrapFloatUnrepresentable;
455 : return 0;
456 : }
457 :
458 : template <typename int_type, typename float_type>
459 : int_type ExecuteConvertSaturate(float_type a) {
460 : TrapReason base_trap = kTrapCount;
461 : int32_t val = ExecuteConvert<int_type>(a, &base_trap);
462 1312 : if (base_trap == kTrapCount) {
463 : return val;
464 : }
465 : return std::isnan(a) ? 0
466 : : (a < static_cast<float_type>(0.0)
467 : ? std::numeric_limits<int_type>::min()
468 604 : : std::numeric_limits<int_type>::max());
469 : }
470 :
471 : template <typename dst_type, typename src_type, void (*fn)(Address)>
472 604 : inline dst_type CallExternalIntToFloatFunction(src_type input) {
473 604 : uint8_t data[std::max(sizeof(dst_type), sizeof(src_type))];
474 604 : Address data_addr = reinterpret_cast<Address>(data);
475 : WriteUnalignedValue<src_type>(data_addr, input);
476 604 : fn(data_addr);
477 1208 : return ReadUnalignedValue<dst_type>(data_addr);
478 : }
479 :
480 : template <typename dst_type, typename src_type, int32_t (*fn)(Address)>
481 2956 : inline dst_type CallExternalFloatToIntFunction(src_type input,
482 : TrapReason* trap) {
483 2956 : uint8_t data[std::max(sizeof(dst_type), sizeof(src_type))];
484 2956 : Address data_addr = reinterpret_cast<Address>(data);
485 : WriteUnalignedValue<src_type>(data_addr, input);
486 2956 : if (!fn(data_addr)) *trap = kTrapFloatUnrepresentable;
487 5912 : return ReadUnalignedValue<dst_type>(data_addr);
488 : }
489 :
490 : inline uint32_t ExecuteI32ConvertI64(int64_t a, TrapReason* trap) {
491 158032 : return static_cast<uint32_t>(a & 0xFFFFFFFF);
492 : }
493 :
494 : int64_t ExecuteI64SConvertF32(float a, TrapReason* trap) {
495 : return CallExternalFloatToIntFunction<int64_t, float,
496 920 : float32_to_int64_wrapper>(a, trap);
497 : }
498 :
499 460 : int64_t ExecuteI64SConvertSatF32(float a) {
500 460 : TrapReason base_trap = kTrapCount;
501 : int64_t val = ExecuteI64SConvertF32(a, &base_trap);
502 460 : if (base_trap == kTrapCount) {
503 : return val;
504 : }
505 : return std::isnan(a) ? 0
506 : : (a < 0.0 ? std::numeric_limits<int64_t>::min()
507 128 : : std::numeric_limits<int64_t>::max());
508 : }
509 :
510 : int64_t ExecuteI64SConvertF64(double a, TrapReason* trap) {
511 : return CallExternalFloatToIntFunction<int64_t, double,
512 724 : float64_to_int64_wrapper>(a, trap);
513 : }
514 :
515 196 : int64_t ExecuteI64SConvertSatF64(double a) {
516 196 : TrapReason base_trap = kTrapCount;
517 : int64_t val = ExecuteI64SConvertF64(a, &base_trap);
518 196 : if (base_trap == kTrapCount) {
519 : return val;
520 : }
521 : return std::isnan(a) ? 0
522 : : (a < 0.0 ? std::numeric_limits<int64_t>::min()
523 44 : : std::numeric_limits<int64_t>::max());
524 : }
525 :
526 : uint64_t ExecuteI64UConvertF32(float a, TrapReason* trap) {
527 : return CallExternalFloatToIntFunction<uint64_t, float,
528 920 : float32_to_uint64_wrapper>(a, trap);
529 : }
530 :
531 460 : uint64_t ExecuteI64UConvertSatF32(float a) {
532 460 : TrapReason base_trap = kTrapCount;
533 : uint64_t val = ExecuteI64UConvertF32(a, &base_trap);
534 460 : if (base_trap == kTrapCount) {
535 : return val;
536 : }
537 : return std::isnan(a) ? 0
538 : : (a < 0.0 ? std::numeric_limits<uint64_t>::min()
539 256 : : std::numeric_limits<uint64_t>::max());
540 : }
541 :
542 : uint64_t ExecuteI64UConvertF64(double a, TrapReason* trap) {
543 : return CallExternalFloatToIntFunction<uint64_t, double,
544 392 : float64_to_uint64_wrapper>(a, trap);
545 : }
546 :
547 196 : uint64_t ExecuteI64UConvertSatF64(double a) {
548 196 : TrapReason base_trap = kTrapCount;
549 : int64_t val = ExecuteI64UConvertF64(a, &base_trap);
550 196 : if (base_trap == kTrapCount) {
551 : return val;
552 : }
553 : return std::isnan(a) ? 0
554 : : (a < 0.0 ? std::numeric_limits<uint64_t>::min()
555 80 : : std::numeric_limits<uint64_t>::max());
556 : }
557 :
558 : inline int64_t ExecuteI64SConvertI32(int32_t a, TrapReason* trap) {
559 : return static_cast<int64_t>(a);
560 : }
561 :
562 : inline int64_t ExecuteI64UConvertI32(uint32_t a, TrapReason* trap) {
563 : return static_cast<uint64_t>(a);
564 : }
565 :
566 : inline float ExecuteF32SConvertI32(int32_t a, TrapReason* trap) {
567 24 : return static_cast<float>(a);
568 : }
569 :
570 : inline float ExecuteF32UConvertI32(uint32_t a, TrapReason* trap) {
571 8 : return static_cast<float>(a);
572 : }
573 :
574 : inline float ExecuteF32SConvertI64(int64_t a, TrapReason* trap) {
575 324 : return static_cast<float>(a);
576 : }
577 :
578 : inline float ExecuteF32UConvertI64(uint64_t a, TrapReason* trap) {
579 : return CallExternalIntToFloatFunction<float, uint64_t,
580 304 : uint64_to_float32_wrapper>(a);
581 : }
582 :
583 : inline float ExecuteF32ConvertF64(double a, TrapReason* trap) {
584 8 : return static_cast<float>(a);
585 : }
586 :
587 : inline Float32 ExecuteF32ReinterpretI32(int32_t a, TrapReason* trap) {
588 : return Float32::FromBits(a);
589 : }
590 :
591 : inline double ExecuteF64SConvertI32(int32_t a, TrapReason* trap) {
592 936 : return static_cast<double>(a);
593 : }
594 :
595 : inline double ExecuteF64UConvertI32(uint32_t a, TrapReason* trap) {
596 8 : return static_cast<double>(a);
597 : }
598 :
599 : inline double ExecuteF64SConvertI64(int64_t a, TrapReason* trap) {
600 15372 : return static_cast<double>(a);
601 : }
602 :
603 : inline double ExecuteF64UConvertI64(uint64_t a, TrapReason* trap) {
604 : return CallExternalIntToFloatFunction<double, uint64_t,
605 300 : uint64_to_float64_wrapper>(a);
606 : }
607 :
608 : inline double ExecuteF64ConvertF32(float a, TrapReason* trap) {
609 1412 : return static_cast<double>(a);
610 : }
611 :
612 : inline Float64 ExecuteF64ReinterpretI64(int64_t a, TrapReason* trap) {
613 : return Float64::FromBits(a);
614 : }
615 :
616 : inline int32_t ExecuteI32ReinterpretF32(WasmValue a) {
617 : return a.to_f32_boxed().get_bits();
618 : }
619 :
620 : inline int64_t ExecuteI64ReinterpretF64(WasmValue a) {
621 : return a.to_f64_boxed().get_bits();
622 : }
623 :
624 : enum InternalOpcode {
625 : #define DECL_INTERNAL_ENUM(name, value) kInternal##name = value,
626 : FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_ENUM)
627 : #undef DECL_INTERNAL_ENUM
628 : };
629 :
630 : const char* OpcodeName(uint32_t val) {
631 0 : switch (val) {
632 : #define DECL_INTERNAL_CASE(name, value) \
633 : case kInternal##name: \
634 : return "Internal" #name;
635 : FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_CASE)
636 : #undef DECL_INTERNAL_CASE
637 : }
638 0 : return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(val));
639 : }
640 :
641 : constexpr uint32_t kCatchInArity = 1;
642 :
643 : } // namespace
644 :
645 : class SideTable;
646 :
647 : // Code and metadata needed to execute a function.
648 1122412 : struct InterpreterCode {
649 : const WasmFunction* function; // wasm function
650 : BodyLocalDecls locals; // local declarations
651 : const byte* orig_start; // start of original code
652 : const byte* orig_end; // end of original code
653 : byte* start; // start of (maybe altered) code
654 : byte* end; // end of (maybe altered) code
655 : SideTable* side_table; // precomputed side table for control flow.
656 :
657 33223676 : const byte* at(pc_t pc) { return start + pc; }
658 : };
659 :
660 : // A helper class to compute the control transfers for each bytecode offset.
661 : // Control transfers allow Br, BrIf, BrTable, If, Else, and End bytecodes to
662 : // be directly executed without the need to dynamically track blocks.
663 30 : class SideTable : public ZoneObject {
664 : public:
665 : ControlTransferMap map_;
666 : uint32_t max_stack_height_ = 0;
667 :
668 368155 : SideTable(Zone* zone, const WasmModule* module, InterpreterCode* code)
669 368155 : : map_(zone) {
670 : // Create a zone for all temporary objects.
671 736310 : Zone control_transfer_zone(zone->allocator(), ZONE_NAME);
672 :
673 : // Represents a control flow label.
674 : class CLabel : public ZoneObject {
675 : explicit CLabel(Zone* zone, uint32_t target_stack_height, uint32_t arity)
676 : : target_stack_height(target_stack_height),
677 : arity(arity),
678 391801 : refs(zone) {}
679 :
680 : public:
681 : struct Ref {
682 : const byte* from_pc;
683 : const uint32_t stack_height;
684 : };
685 : const byte* target = nullptr;
686 : uint32_t target_stack_height;
687 : // Arity when branching to this label.
688 : const uint32_t arity;
689 : ZoneVector<Ref> refs;
690 :
691 : static CLabel* New(Zone* zone, uint32_t stack_height, uint32_t arity) {
692 : return new (zone) CLabel(zone, stack_height, arity);
693 : }
694 :
695 : // Bind this label to the given PC.
696 : void Bind(const byte* pc) {
697 : DCHECK_NULL(target);
698 391801 : target = pc;
699 : }
700 :
701 : // Reference this label from the given location.
702 : void Ref(const byte* from_pc, uint32_t stack_height) {
703 : // Target being bound before a reference means this is a loop.
704 : DCHECK_IMPLIES(target, *target == kExprLoop);
705 38100 : refs.push_back({from_pc, stack_height});
706 : }
707 :
708 391801 : void Finish(ControlTransferMap* map, const byte* start) {
709 : DCHECK_NOT_NULL(target);
710 410851 : for (auto ref : refs) {
711 19050 : size_t offset = static_cast<size_t>(ref.from_pc - start);
712 19050 : auto pcdiff = static_cast<pcdiff_t>(target - ref.from_pc);
713 : DCHECK_GE(ref.stack_height, target_stack_height);
714 : spdiff_t spdiff =
715 19050 : static_cast<spdiff_t>(ref.stack_height - target_stack_height);
716 : TRACE("control transfer @%zu: Δpc %d, stack %u->%u = -%u\n", offset,
717 : pcdiff, ref.stack_height, target_stack_height, spdiff);
718 19050 : ControlTransferEntry& entry = (*map)[offset];
719 19050 : entry.pc_diff = pcdiff;
720 19050 : entry.sp_diff = spdiff;
721 19050 : entry.target_arity = arity;
722 : }
723 391801 : }
724 : };
725 :
726 : // An entry in the control stack.
727 : struct Control {
728 : const byte* pc;
729 : CLabel* end_label;
730 : CLabel* else_label;
731 : // Arity (number of values on the stack) when exiting this control
732 : // structure via |end|.
733 : uint32_t exit_arity;
734 : // Track whether this block was already left, i.e. all further
735 : // instructions are unreachable.
736 : bool unreachable = false;
737 :
738 : Control(const byte* pc, CLabel* end_label, CLabel* else_label,
739 : uint32_t exit_arity)
740 : : pc(pc),
741 : end_label(end_label),
742 : else_label(else_label),
743 390761 : exit_arity(exit_arity) {}
744 : Control(const byte* pc, CLabel* end_label, uint32_t exit_arity)
745 : : Control(pc, end_label, nullptr, exit_arity) {}
746 :
747 390761 : void Finish(ControlTransferMap* map, const byte* start) {
748 390761 : end_label->Finish(map, start);
749 390761 : if (else_label) else_label->Finish(map, start);
750 390761 : }
751 : };
752 :
753 : // Compute the ControlTransfer map.
754 : // This algorithm maintains a stack of control constructs similar to the
755 : // AST decoder. The {control_stack} allows matching {br,br_if,br_table}
756 : // bytecodes with their target, as well as determining whether the current
757 : // bytecodes are within the true or false block of an else.
758 : ZoneVector<Control> control_stack(&control_transfer_zone);
759 : // It also maintains a stack of all nested {try} blocks to resolve local
760 : // handler targets for potentially throwing operations. These exceptional
761 : // control transfers are treated just like other branches in the resulting
762 : // map. This stack contains indices into the above control stack.
763 : ZoneVector<size_t> exception_stack(zone);
764 : uint32_t stack_height = 0;
765 : uint32_t func_arity =
766 368155 : static_cast<uint32_t>(code->function->sig->return_count());
767 : CLabel* func_label =
768 368155 : CLabel::New(&control_transfer_zone, stack_height, func_arity);
769 368155 : control_stack.emplace_back(code->orig_start, func_label, func_arity);
770 : auto control_parent = [&]() -> Control& {
771 : DCHECK_LE(2, control_stack.size());
772 46757 : return control_stack[control_stack.size() - 2];
773 368155 : };
774 : auto copy_unreachable = [&] {
775 23121 : control_stack.back().unreachable = control_parent().unreachable;
776 : };
777 2378274 : for (BytecodeIterator i(code->orig_start, code->orig_end, &code->locals);
778 2010119 : i.has_next(); i.next()) {
779 : WasmOpcode opcode = i.current();
780 : uint32_t exceptional_stack_height = 0;
781 2010119 : if (WasmOpcodes::IsPrefixOpcode(opcode)) opcode = i.prefixed_opcode();
782 2010119 : bool unreachable = control_stack.back().unreachable;
783 2010119 : if (unreachable) {
784 : TRACE("@%u: %s (is unreachable)\n", i.pc_offset(),
785 : WasmOpcodes::OpcodeName(opcode));
786 : } else {
787 : auto stack_effect =
788 1987016 : StackEffect(module, code->function->sig, i.pc(), i.end());
789 : TRACE("@%u: %s (sp %d - %d + %d)\n", i.pc_offset(),
790 : WasmOpcodes::OpcodeName(opcode), stack_height, stack_effect.first,
791 : stack_effect.second);
792 : DCHECK_GE(stack_height, stack_effect.first);
793 : DCHECK_GE(kMaxUInt32, static_cast<uint64_t>(stack_height) -
794 : stack_effect.first + stack_effect.second);
795 1987016 : exceptional_stack_height = stack_height - stack_effect.first;
796 1987016 : stack_height = stack_height - stack_effect.first + stack_effect.second;
797 1987016 : if (stack_height > max_stack_height_) max_stack_height_ = stack_height;
798 : }
799 2010119 : if (!exception_stack.empty() && WasmOpcodes::IsThrowingOpcode(opcode)) {
800 : // Record exceptional control flow from potentially throwing opcodes to
801 : // the local handler if one is present. The stack height at the throw
802 : // point is assumed to have popped all operands and not pushed any yet.
803 : DCHECK_GE(control_stack.size() - 1, exception_stack.back());
804 356 : const Control* c = &control_stack[exception_stack.back()];
805 356 : if (!unreachable) c->else_label->Ref(i.pc(), exceptional_stack_height);
806 356 : if (exceptional_stack_height + kCatchInArity > max_stack_height_) {
807 64 : max_stack_height_ = exceptional_stack_height + kCatchInArity;
808 : }
809 : TRACE("handler @%u: %s -> try @%u\n", i.pc_offset(), OpcodeName(opcode),
810 : static_cast<uint32_t>(c->pc - code->start));
811 : }
812 2010119 : switch (opcode) {
813 : case kExprBlock:
814 : case kExprLoop: {
815 : bool is_loop = opcode == kExprLoop;
816 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
817 21566 : i.pc());
818 21566 : if (imm.type == kWasmVar) {
819 16 : imm.sig = module->signatures[imm.sig_index];
820 : }
821 : TRACE("control @%u: %s, arity %d->%d\n", i.pc_offset(),
822 : is_loop ? "Loop" : "Block", imm.in_arity(), imm.out_arity());
823 : CLabel* label =
824 21566 : CLabel::New(&control_transfer_zone, stack_height,
825 21566 : is_loop ? imm.in_arity() : imm.out_arity());
826 21566 : control_stack.emplace_back(i.pc(), label, imm.out_arity());
827 : copy_unreachable();
828 21566 : if (is_loop) label->Bind(i.pc());
829 : break;
830 : }
831 : case kExprIf: {
832 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
833 740 : i.pc());
834 740 : if (imm.type == kWasmVar) {
835 8 : imm.sig = module->signatures[imm.sig_index];
836 : }
837 : TRACE("control @%u: If, arity %d->%d\n", i.pc_offset(),
838 : imm.in_arity(), imm.out_arity());
839 : CLabel* end_label = CLabel::New(&control_transfer_zone, stack_height,
840 740 : imm.out_arity());
841 : CLabel* else_label =
842 740 : CLabel::New(&control_transfer_zone, stack_height, 0);
843 1480 : control_stack.emplace_back(i.pc(), end_label, else_label,
844 740 : imm.out_arity());
845 : copy_unreachable();
846 740 : if (!unreachable) else_label->Ref(i.pc(), stack_height);
847 : break;
848 : }
849 : case kExprElse: {
850 : Control* c = &control_stack.back();
851 : copy_unreachable();
852 : TRACE("control @%u: Else\n", i.pc_offset());
853 215 : if (!control_parent().unreachable) {
854 210 : c->end_label->Ref(i.pc(), stack_height);
855 : }
856 : DCHECK_NOT_NULL(c->else_label);
857 215 : c->else_label->Bind(i.pc() + 1);
858 215 : c->else_label->Finish(&map_, code->orig_start);
859 215 : c->else_label = nullptr;
860 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
861 215 : stack_height = c->end_label->target_stack_height;
862 215 : break;
863 : }
864 : case kExprTry: {
865 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
866 300 : i.pc());
867 300 : if (imm.type == kWasmVar) {
868 0 : imm.sig = module->signatures[imm.sig_index];
869 : }
870 : TRACE("control @%u: Try, arity %d->%d\n", i.pc_offset(),
871 : imm.in_arity(), imm.out_arity());
872 : CLabel* end_label = CLabel::New(&control_transfer_zone, stack_height,
873 300 : imm.out_arity());
874 : CLabel* catch_label =
875 300 : CLabel::New(&control_transfer_zone, stack_height, kCatchInArity);
876 600 : control_stack.emplace_back(i.pc(), end_label, catch_label,
877 300 : imm.out_arity());
878 600 : exception_stack.push_back(control_stack.size() - 1);
879 : copy_unreachable();
880 : break;
881 : }
882 : case kExprCatch: {
883 : DCHECK_EQ(control_stack.size() - 1, exception_stack.back());
884 : Control* c = &control_stack.back();
885 : exception_stack.pop_back();
886 : copy_unreachable();
887 : TRACE("control @%u: Catch\n", i.pc_offset());
888 300 : if (!control_parent().unreachable) {
889 300 : c->end_label->Ref(i.pc(), stack_height);
890 : }
891 : DCHECK_NOT_NULL(c->else_label);
892 300 : c->else_label->Bind(i.pc() + 1);
893 300 : c->else_label->Finish(&map_, code->orig_start);
894 300 : c->else_label = nullptr;
895 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
896 300 : stack_height = c->end_label->target_stack_height + kCatchInArity;
897 300 : break;
898 : }
899 : case kExprBrOnExn: {
900 192 : BranchOnExceptionImmediate<Decoder::kNoValidate> imm(&i, i.pc());
901 192 : uint32_t depth = imm.depth.depth; // Extracted for convenience.
902 384 : imm.index.exception = &module->exceptions[imm.index.index];
903 : DCHECK_EQ(0, imm.index.exception->sig->return_count());
904 192 : size_t params = imm.index.exception->sig->parameter_count();
905 : // Taken branches pop the exception and push the encoded values.
906 192 : uint32_t height = stack_height - 1 + static_cast<uint32_t>(params);
907 : TRACE("control @%u: BrOnExn[depth=%u]\n", i.pc_offset(), depth);
908 192 : Control* c = &control_stack[control_stack.size() - depth - 1];
909 192 : if (!unreachable) c->end_label->Ref(i.pc(), height);
910 : break;
911 : }
912 : case kExprEnd: {
913 : Control* c = &control_stack.back();
914 : TRACE("control @%u: End\n", i.pc_offset());
915 : // Only loops have bound labels.
916 : DCHECK_IMPLIES(c->end_label->target, *c->pc == kExprLoop);
917 390761 : if (!c->end_label->target) {
918 390592 : if (c->else_label) c->else_label->Bind(i.pc());
919 390592 : c->end_label->Bind(i.pc() + 1);
920 : }
921 390761 : c->Finish(&map_, code->orig_start);
922 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
923 390761 : stack_height = c->end_label->target_stack_height + c->exit_arity;
924 : control_stack.pop_back();
925 : break;
926 : }
927 : case kExprBr: {
928 : BranchDepthImmediate<Decoder::kNoValidate> imm(&i, i.pc());
929 : TRACE("control @%u: Br[depth=%u]\n", i.pc_offset(), imm.depth);
930 430 : Control* c = &control_stack[control_stack.size() - imm.depth - 1];
931 430 : if (!unreachable) c->end_label->Ref(i.pc(), stack_height);
932 : break;
933 : }
934 : case kExprBrIf: {
935 : BranchDepthImmediate<Decoder::kNoValidate> imm(&i, i.pc());
936 : TRACE("control @%u: BrIf[depth=%u]\n", i.pc_offset(), imm.depth);
937 97 : Control* c = &control_stack[control_stack.size() - imm.depth - 1];
938 97 : if (!unreachable) c->end_label->Ref(i.pc(), stack_height);
939 : break;
940 : }
941 : case kExprBrTable: {
942 4205 : BranchTableImmediate<Decoder::kNoValidate> imm(&i, i.pc());
943 : BranchTableIterator<Decoder::kNoValidate> iterator(&i, imm);
944 : TRACE("control @%u: BrTable[count=%u]\n", i.pc_offset(),
945 : imm.table_count);
946 4205 : if (!unreachable) {
947 20945 : while (iterator.has_next()) {
948 : uint32_t j = iterator.cur_index();
949 16740 : uint32_t target = iterator.next();
950 16740 : Control* c = &control_stack[control_stack.size() - target - 1];
951 16740 : c->end_label->Ref(i.pc() + j, stack_height);
952 : }
953 : }
954 : break;
955 : }
956 : default:
957 : break;
958 : }
959 2010119 : if (WasmOpcodes::IsUnconditionalJump(opcode)) {
960 21876 : control_stack.back().unreachable = true;
961 : }
962 : }
963 : DCHECK_EQ(0, control_stack.size());
964 : DCHECK_EQ(func_arity, stack_height);
965 368155 : }
966 :
967 : bool HasEntryAt(pc_t from) {
968 : auto result = map_.find(from);
969 : return result != map_.end();
970 : }
971 :
972 : ControlTransferEntry& Lookup(pc_t from) {
973 : auto result = map_.find(from);
974 : DCHECK(result != map_.end());
975 : return result->second;
976 : }
977 : };
978 :
979 : // The main storage for interpreter code. It maps {WasmFunction} to the
980 : // metadata needed to execute each function.
981 366609 : class CodeMap {
982 : Zone* zone_;
983 : const WasmModule* module_;
984 : ZoneVector<InterpreterCode> interpreter_code_;
985 :
986 : public:
987 366609 : CodeMap(const WasmModule* module, const uint8_t* module_start, Zone* zone)
988 366609 : : zone_(zone), module_(module), interpreter_code_(zone) {
989 366609 : if (module == nullptr) return;
990 366609 : interpreter_code_.reserve(module->functions.size());
991 370431 : for (const WasmFunction& function : module->functions) {
992 3822 : if (function.imported) {
993 : DCHECK(!function.code.is_set());
994 1727 : AddFunction(&function, nullptr, nullptr);
995 : } else {
996 2095 : AddFunction(&function, module_start + function.code.offset(),
997 2095 : module_start + function.code.end_offset());
998 : }
999 : }
1000 : }
1001 :
1002 : const WasmModule* module() const { return module_; }
1003 :
1004 : InterpreterCode* GetCode(const WasmFunction* function) {
1005 : InterpreterCode* code = GetCode(function->func_index);
1006 : DCHECK_EQ(function, code->function);
1007 : return code;
1008 : }
1009 :
1010 : InterpreterCode* GetCode(uint32_t function_index) {
1011 : DCHECK_LT(function_index, interpreter_code_.size());
1012 10683272 : return Preprocess(&interpreter_code_[function_index]);
1013 : }
1014 :
1015 5707752 : InterpreterCode* Preprocess(InterpreterCode* code) {
1016 : DCHECK_EQ(code->function->imported, code->start == nullptr);
1017 5707752 : if (!code->side_table && code->start) {
1018 : // Compute the control targets map and the local declarations.
1019 736250 : code->side_table = new (zone_) SideTable(zone_, module_, code);
1020 : }
1021 5707752 : return code;
1022 : }
1023 :
1024 371418 : void AddFunction(const WasmFunction* function, const byte* code_start,
1025 : const byte* code_end) {
1026 : InterpreterCode code = {
1027 : function, BodyLocalDecls(zone_), code_start,
1028 : code_end, const_cast<byte*>(code_start), const_cast<byte*>(code_end),
1029 742836 : nullptr};
1030 :
1031 : DCHECK_EQ(interpreter_code_.size(), function->func_index);
1032 371418 : interpreter_code_.push_back(code);
1033 371418 : }
1034 :
1035 : void SetFunctionCode(const WasmFunction* function, const byte* start,
1036 : const byte* end) {
1037 : DCHECK_LT(function->func_index, interpreter_code_.size());
1038 366116 : InterpreterCode* code = &interpreter_code_[function->func_index];
1039 : DCHECK_EQ(function, code->function);
1040 366116 : code->orig_start = start;
1041 366116 : code->orig_end = end;
1042 366116 : code->start = const_cast<byte*>(start);
1043 366116 : code->end = const_cast<byte*>(end);
1044 366116 : code->side_table = nullptr;
1045 366116 : Preprocess(code);
1046 : }
1047 : };
1048 :
1049 : namespace {
1050 :
1051 : struct ExternalCallResult {
1052 : enum Type {
1053 : // The function should be executed inside this interpreter.
1054 : INTERNAL,
1055 : // For indirect calls: Table or function does not exist.
1056 : INVALID_FUNC,
1057 : // For indirect calls: Signature does not match expected signature.
1058 : SIGNATURE_MISMATCH,
1059 : // The function was executed and returned normally.
1060 : EXTERNAL_RETURNED,
1061 : // The function was executed, threw an exception, and the stack was unwound.
1062 : EXTERNAL_UNWOUND,
1063 : // The function was executed and threw an exception that was locally caught.
1064 : EXTERNAL_CAUGHT
1065 : };
1066 : Type type;
1067 : // If type is INTERNAL, this field holds the function to call internally.
1068 : InterpreterCode* interpreter_code;
1069 :
1070 : ExternalCallResult(Type type) : type(type) { // NOLINT
1071 : DCHECK_NE(INTERNAL, type);
1072 : }
1073 : ExternalCallResult(Type type, InterpreterCode* code)
1074 : : type(type), interpreter_code(code) {
1075 : DCHECK_EQ(INTERNAL, type);
1076 : }
1077 : };
1078 :
1079 : // Like a static_cast from src to dst, but specialized for boxed floats.
1080 : template <typename dst, typename src>
1081 : struct converter {
1082 512 : dst operator()(src val) const { return static_cast<dst>(val); }
1083 : };
1084 : template <>
1085 : struct converter<Float64, uint64_t> {
1086 : Float64 operator()(uint64_t val) const { return Float64::FromBits(val); }
1087 : };
1088 : template <>
1089 : struct converter<Float32, uint32_t> {
1090 : Float32 operator()(uint32_t val) const { return Float32::FromBits(val); }
1091 : };
1092 : template <>
1093 : struct converter<uint64_t, Float64> {
1094 : uint64_t operator()(Float64 val) const { return val.get_bits(); }
1095 : };
1096 : template <>
1097 : struct converter<uint32_t, Float32> {
1098 : uint32_t operator()(Float32 val) const { return val.get_bits(); }
1099 : };
1100 :
1101 : template <typename T>
1102 : V8_INLINE bool has_nondeterminism(T val) {
1103 : static_assert(!std::is_floating_point<T>::value, "missing specialization");
1104 : return false;
1105 : }
1106 : template <>
1107 : V8_INLINE bool has_nondeterminism<float>(float val) {
1108 : return std::isnan(val);
1109 : }
1110 : template <>
1111 : V8_INLINE bool has_nondeterminism<double>(double val) {
1112 : return std::isnan(val);
1113 : }
1114 :
1115 : } // namespace
1116 :
1117 : // Responsible for executing code directly.
1118 366609 : class ThreadImpl {
1119 : struct Activation {
1120 : uint32_t fp;
1121 : sp_t sp;
1122 37391 : Activation(uint32_t fp, sp_t sp) : fp(fp), sp(sp) {}
1123 : };
1124 :
1125 : public:
1126 : ThreadImpl(Zone* zone, CodeMap* codemap,
1127 : Handle<WasmInstanceObject> instance_object,
1128 : Handle<Cell> reference_stack_cell)
1129 : : codemap_(codemap),
1130 : isolate_(instance_object->GetIsolate()),
1131 : instance_object_(instance_object),
1132 : reference_stack_cell_(reference_stack_cell),
1133 : frames_(zone),
1134 1466436 : activations_(zone) {}
1135 :
1136 : //==========================================================================
1137 : // Implementation of public interface for WasmInterpreter::Thread.
1138 : //==========================================================================
1139 :
1140 : WasmInterpreter::State state() { return state_; }
1141 :
1142 4740187 : void InitFrame(const WasmFunction* function, WasmValue* args) {
1143 : DCHECK_EQ(current_activation().fp, frames_.size());
1144 : InterpreterCode* code = codemap()->GetCode(function);
1145 4740187 : size_t num_params = function->sig->parameter_count();
1146 4740187 : EnsureStackSpace(num_params);
1147 : Push(args, num_params);
1148 4740187 : PushFrame(code);
1149 4740187 : }
1150 :
1151 : WasmInterpreter::State Run(int num_steps = -1) {
1152 : DCHECK(state_ == WasmInterpreter::STOPPED ||
1153 : state_ == WasmInterpreter::PAUSED);
1154 : DCHECK(num_steps == -1 || num_steps > 0);
1155 : if (num_steps == -1) {
1156 : TRACE(" => Run()\n");
1157 : } else if (num_steps == 1) {
1158 : TRACE(" => Step()\n");
1159 : } else {
1160 : TRACE(" => Run(%d)\n", num_steps);
1161 : }
1162 4745635 : state_ = WasmInterpreter::RUNNING;
1163 4745635 : Execute(frames_.back().code, frames_.back().pc, num_steps);
1164 : // If state_ is STOPPED, the current activation must be fully unwound.
1165 : DCHECK_IMPLIES(state_ == WasmInterpreter::STOPPED,
1166 : current_activation().fp == frames_.size());
1167 4745635 : return state_;
1168 : }
1169 :
1170 0 : void Pause() { UNIMPLEMENTED(); }
1171 :
1172 : void Reset() {
1173 : TRACE("----- RESET -----\n");
1174 4702798 : sp_ = stack_.get();
1175 : frames_.clear();
1176 4702798 : state_ = WasmInterpreter::STOPPED;
1177 4702798 : trap_reason_ = kTrapCount;
1178 4702798 : possible_nondeterminism_ = false;
1179 : }
1180 :
1181 : int GetFrameCount() {
1182 : DCHECK_GE(kMaxInt, frames_.size());
1183 8121 : return static_cast<int>(frames_.size());
1184 : }
1185 :
1186 4700028 : WasmValue GetReturnValue(uint32_t index) {
1187 4700028 : if (state_ == WasmInterpreter::TRAPPED) return WasmValue(0xDEADBEEF);
1188 : DCHECK_EQ(WasmInterpreter::FINISHED, state_);
1189 : Activation act = current_activation();
1190 : // Current activation must be finished.
1191 : DCHECK_EQ(act.fp, frames_.size());
1192 4700028 : return GetStackValue(act.sp + index);
1193 : }
1194 :
1195 : WasmValue GetStackValue(sp_t index) {
1196 : DCHECK_GT(StackHeight(), index);
1197 16988983 : return stack_[index].ExtractValue(this, index);
1198 : }
1199 :
1200 3880214 : void SetStackValue(sp_t index, WasmValue value) {
1201 : DCHECK_GT(StackHeight(), index);
1202 3880214 : stack_[index] = StackValue(value, this, index);
1203 3880214 : }
1204 :
1205 : TrapReason GetTrapReason() { return trap_reason_; }
1206 :
1207 : pc_t GetBreakpointPc() { return break_pc_; }
1208 :
1209 : bool PossibleNondeterminism() { return possible_nondeterminism_; }
1210 :
1211 : uint64_t NumInterpretedCalls() { return num_interpreted_calls_; }
1212 :
1213 40 : void AddBreakFlags(uint8_t flags) { break_flags_ |= flags; }
1214 :
1215 0 : void ClearBreakFlags() { break_flags_ = WasmInterpreter::BreakFlag::None; }
1216 :
1217 : Handle<Cell> reference_stack_cell() const { return reference_stack_cell_; }
1218 :
1219 : uint32_t NumActivations() {
1220 24 : return static_cast<uint32_t>(activations_.size());
1221 : }
1222 :
1223 37391 : uint32_t StartActivation() {
1224 : TRACE("----- START ACTIVATION %zu -----\n", activations_.size());
1225 : // If you use activations, use them consistently:
1226 : DCHECK_IMPLIES(activations_.empty(), frames_.empty());
1227 : DCHECK_IMPLIES(activations_.empty(), StackHeight() == 0);
1228 37391 : uint32_t activation_id = static_cast<uint32_t>(activations_.size());
1229 112173 : activations_.emplace_back(static_cast<uint32_t>(frames_.size()),
1230 37391 : StackHeight());
1231 37391 : state_ = WasmInterpreter::STOPPED;
1232 37391 : return activation_id;
1233 : }
1234 :
1235 : void FinishActivation(uint32_t id) {
1236 : TRACE("----- FINISH ACTIVATION %zu -----\n", activations_.size() - 1);
1237 : DCHECK_LT(0, activations_.size());
1238 : DCHECK_EQ(activations_.size() - 1, id);
1239 : // Stack height must match the start of this activation (otherwise unwind
1240 : // first).
1241 : DCHECK_EQ(activations_.back().fp, frames_.size());
1242 : DCHECK_LE(activations_.back().sp, StackHeight());
1243 37389 : sp_ = stack_.get() + activations_.back().sp;
1244 : activations_.pop_back();
1245 : }
1246 :
1247 : uint32_t ActivationFrameBase(uint32_t id) {
1248 : DCHECK_GT(activations_.size(), id);
1249 14306 : return activations_[id].fp;
1250 : }
1251 :
1252 960 : WasmInterpreter::Thread::ExceptionHandlingResult RaiseException(
1253 : Isolate* isolate, Handle<Object> exception) {
1254 : DCHECK_EQ(WasmInterpreter::TRAPPED, state_);
1255 960 : isolate->Throw(*exception); // Will check that none is pending.
1256 960 : if (HandleException(isolate) == WasmInterpreter::Thread::UNWOUND) {
1257 : DCHECK_EQ(WasmInterpreter::STOPPED, state_);
1258 : return WasmInterpreter::Thread::UNWOUND;
1259 : }
1260 16 : state_ = WasmInterpreter::PAUSED;
1261 16 : return WasmInterpreter::Thread::HANDLED;
1262 : }
1263 :
1264 : private:
1265 : // Handle a thrown exception. Returns whether the exception was handled inside
1266 : // the current activation. Unwinds the interpreted stack accordingly.
1267 3938 : WasmInterpreter::Thread::ExceptionHandlingResult HandleException(
1268 : Isolate* isolate) {
1269 : DCHECK(isolate->has_pending_exception());
1270 : DCHECK_LT(0, activations_.size());
1271 : Activation& act = activations_.back();
1272 343320 : while (frames_.size() > act.fp) {
1273 : Frame& frame = frames_.back();
1274 339938 : InterpreterCode* code = frame.code;
1275 679876 : if (code->side_table->HasEntryAt(frame.pc)) {
1276 : TRACE("----- HANDLE -----\n");
1277 556 : Push(WasmValue(handle(isolate->pending_exception(), isolate)));
1278 : isolate->clear_pending_exception();
1279 556 : frame.pc += JumpToHandlerDelta(code, frame.pc);
1280 : TRACE(" => handler #%zu (#%u @%zu)\n", frames_.size() - 1,
1281 : code->function->func_index, frame.pc);
1282 556 : return WasmInterpreter::Thread::HANDLED;
1283 : }
1284 : TRACE(" => drop frame #%zu (#%u @%zu)\n", frames_.size() - 1,
1285 : code->function->func_index, frame.pc);
1286 339382 : sp_ = stack_.get() + frame.sp;
1287 : frames_.pop_back();
1288 : }
1289 : TRACE("----- UNWIND -----\n");
1290 : DCHECK_EQ(act.fp, frames_.size());
1291 : DCHECK_EQ(act.sp, StackHeight());
1292 3382 : state_ = WasmInterpreter::STOPPED;
1293 3382 : return WasmInterpreter::Thread::UNWOUND;
1294 : }
1295 :
1296 : // Entries on the stack of functions being evaluated.
1297 : struct Frame {
1298 : InterpreterCode* code;
1299 : pc_t pc;
1300 : sp_t sp;
1301 :
1302 : // Limit of parameters.
1303 : sp_t plimit() { return sp + code->function->sig->parameter_count(); }
1304 : // Limit of locals.
1305 : sp_t llimit() { return plimit() + code->locals.type_list.size(); }
1306 : };
1307 :
1308 : // Safety wrapper for values on the operand stack represented as {WasmValue}.
1309 : // Most values are stored directly on the stack, only reference values are
1310 : // kept in a separate on-heap reference stack to make the GC trace them.
1311 : // TODO(mstarzinger): Optimize simple stack operations (like "get_local",
1312 : // "set_local", and "tee_local") so that they don't require a handle scope.
1313 : // TODO(mstarzinger): Ensure unused slots on the reference stack are cleared
1314 : // so that they don't keep alive old/stale references unnecessarily long.
1315 : // TODO(mstarzinger): Consider optimizing activations that use no reference
1316 : // values to avoid allocating the reference stack entirely.
1317 : class StackValue {
1318 : public:
1319 : StackValue() = default; // Only needed for resizing the stack.
1320 44721689 : StackValue(WasmValue v, ThreadImpl* thread, sp_t index) : value_(v) {
1321 44721689 : if (IsReferenceValue()) {
1322 4912 : value_ = WasmValue(Handle<Object>::null());
1323 4912 : int ref_index = static_cast<int>(index);
1324 4912 : thread->reference_stack()->set(ref_index, *v.to_anyref());
1325 : }
1326 44721689 : }
1327 :
1328 40983847 : WasmValue ExtractValue(ThreadImpl* thread, sp_t index) {
1329 40983847 : if (!IsReferenceValue()) return value_;
1330 : DCHECK(value_.to_anyref().is_null());
1331 2812 : int ref_index = static_cast<int>(index);
1332 2812 : Isolate* isolate = thread->isolate_;
1333 : Handle<Object> ref(thread->reference_stack()->get(ref_index), isolate);
1334 : return WasmValue(ref);
1335 : }
1336 :
1337 : bool IsReferenceValue() const { return value_.type() == kWasmAnyRef; }
1338 :
1339 : private:
1340 : WasmValue value_;
1341 : };
1342 :
1343 : friend class InterpretedFrameImpl;
1344 :
1345 : CodeMap* codemap_;
1346 : Isolate* isolate_;
1347 : Handle<WasmInstanceObject> instance_object_;
1348 : std::unique_ptr<StackValue[]> stack_;
1349 : StackValue* stack_limit_ = nullptr; // End of allocated stack space.
1350 : StackValue* sp_ = nullptr; // Current stack pointer.
1351 : // The reference stack is pointed to by a {Cell} to be able to replace the
1352 : // underlying {FixedArray} when growing the stack. This avoids having to
1353 : // recreate or update the global handle keeping this object alive.
1354 : Handle<Cell> reference_stack_cell_; // References are on an on-heap stack.
1355 : ZoneVector<Frame> frames_;
1356 : WasmInterpreter::State state_ = WasmInterpreter::STOPPED;
1357 : pc_t break_pc_ = kInvalidPc;
1358 : TrapReason trap_reason_ = kTrapCount;
1359 : bool possible_nondeterminism_ = false;
1360 : uint8_t break_flags_ = 0; // a combination of WasmInterpreter::BreakFlag
1361 : uint64_t num_interpreted_calls_ = 0;
1362 : // Store the stack height of each activation (for unwind and frame
1363 : // inspection).
1364 : ZoneVector<Activation> activations_;
1365 :
1366 : CodeMap* codemap() const { return codemap_; }
1367 : const WasmModule* module() const { return codemap_->module(); }
1368 : FixedArray reference_stack() const {
1369 : return FixedArray::cast(reference_stack_cell_->value());
1370 : }
1371 :
1372 : void DoTrap(TrapReason trap, pc_t pc) {
1373 : TRACE("TRAP: %s\n", WasmOpcodes::TrapReasonMessage(trap));
1374 37244 : state_ = WasmInterpreter::TRAPPED;
1375 37244 : trap_reason_ = trap;
1376 : CommitPc(pc);
1377 : }
1378 :
1379 : // Check if there is room for a function's activation.
1380 : void EnsureStackSpaceForCall(InterpreterCode* code) {
1381 10663806 : EnsureStackSpace(code->side_table->max_stack_height_ +
1382 5331903 : code->locals.type_list.size());
1383 : DCHECK_GE(StackHeight(), code->function->sig->parameter_count());
1384 : }
1385 :
1386 : // Push a frame with arguments already on the stack.
1387 5186887 : void PushFrame(InterpreterCode* code) {
1388 : DCHECK_NOT_NULL(code);
1389 : DCHECK_NOT_NULL(code->side_table);
1390 : EnsureStackSpaceForCall(code);
1391 :
1392 5186887 : ++num_interpreted_calls_;
1393 5186887 : size_t arity = code->function->sig->parameter_count();
1394 : // The parameters will overlap the arguments already on the stack.
1395 : DCHECK_GE(StackHeight(), arity);
1396 :
1397 15560661 : frames_.push_back({code, 0, StackHeight() - arity});
1398 5186887 : frames_.back().pc = InitLocals(code);
1399 : TRACE(" => PushFrame #%zu (#%u @%zu)\n", frames_.size() - 1,
1400 : code->function->func_index, frames_.back().pc);
1401 5186887 : }
1402 :
1403 5331903 : pc_t InitLocals(InterpreterCode* code) {
1404 9998276 : for (auto p : code->locals.type_list) {
1405 : WasmValue val;
1406 4666373 : switch (p) {
1407 : #define CASE_TYPE(wasm, ctype) \
1408 : case kWasm##wasm: \
1409 : val = WasmValue(ctype{}); \
1410 : break;
1411 4666037 : WASM_CTYPES(CASE_TYPE)
1412 : #undef CASE_TYPE
1413 : case kWasmAnyRef:
1414 : case kWasmAnyFunc:
1415 : case kWasmExceptRef: {
1416 672 : val = WasmValue(isolate_->factory()->null_value());
1417 336 : break;
1418 : }
1419 : default:
1420 0 : UNREACHABLE();
1421 : break;
1422 : }
1423 4666373 : Push(val);
1424 : }
1425 5331903 : return code->locals.encoded_size;
1426 : }
1427 :
1428 : void CommitPc(pc_t pc) {
1429 : DCHECK(!frames_.empty());
1430 92354 : frames_.back().pc = pc;
1431 : }
1432 :
1433 : bool SkipBreakpoint(InterpreterCode* code, pc_t pc) {
1434 6000 : if (pc == break_pc_) {
1435 : // Skip the previously hit breakpoint when resuming.
1436 3000 : break_pc_ = kInvalidPc;
1437 : return true;
1438 : }
1439 : return false;
1440 : }
1441 :
1442 540 : void ReloadFromFrameOnException(Decoder* decoder, InterpreterCode** code,
1443 : pc_t* pc, pc_t* limit) {
1444 : Frame* top = &frames_.back();
1445 540 : *code = top->code;
1446 540 : *pc = top->pc;
1447 540 : *limit = top->code->end - top->code->start;
1448 540 : decoder->Reset(top->code->start, top->code->end);
1449 540 : }
1450 :
1451 : int LookupTargetDelta(InterpreterCode* code, pc_t pc) {
1452 162578 : return static_cast<int>(code->side_table->Lookup(pc).pc_diff);
1453 : }
1454 :
1455 556 : int JumpToHandlerDelta(InterpreterCode* code, pc_t pc) {
1456 556 : ControlTransferEntry& control_transfer_entry = code->side_table->Lookup(pc);
1457 556 : DoStackTransfer(sp_ - (control_transfer_entry.sp_diff + kCatchInArity),
1458 1112 : control_transfer_entry.target_arity);
1459 556 : return control_transfer_entry.pc_diff;
1460 : }
1461 :
1462 254978 : int DoBreak(InterpreterCode* code, pc_t pc, size_t depth) {
1463 254978 : ControlTransferEntry& control_transfer_entry = code->side_table->Lookup(pc);
1464 254978 : DoStackTransfer(sp_ - control_transfer_entry.sp_diff,
1465 509956 : control_transfer_entry.target_arity);
1466 254978 : return control_transfer_entry.pc_diff;
1467 : }
1468 :
1469 110700 : pc_t ReturnPc(Decoder* decoder, InterpreterCode* code, pc_t pc) {
1470 110700 : switch (code->orig_start[pc]) {
1471 : case kExprCallFunction: {
1472 : CallFunctionImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1473 110420 : return pc + 1 + imm.length;
1474 : }
1475 : case kExprCallIndirect: {
1476 : CallIndirectImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures,
1477 280 : decoder, code->at(pc));
1478 280 : return pc + 1 + imm.length;
1479 : }
1480 : default:
1481 0 : UNREACHABLE();
1482 : }
1483 : }
1484 :
1485 4811220 : bool DoReturn(Decoder* decoder, InterpreterCode** code, pc_t* pc, pc_t* limit,
1486 : size_t arity) {
1487 : DCHECK_GT(frames_.size(), 0);
1488 4811220 : StackValue* sp_dest = stack_.get() + frames_.back().sp;
1489 : frames_.pop_back();
1490 4811220 : if (frames_.size() == current_activation().fp) {
1491 : // A return from the last frame terminates the execution.
1492 4700520 : state_ = WasmInterpreter::FINISHED;
1493 4700520 : DoStackTransfer(sp_dest, arity);
1494 : TRACE(" => finish\n");
1495 4700520 : return false;
1496 : } else {
1497 : // Return to caller frame.
1498 : Frame* top = &frames_.back();
1499 110700 : *code = top->code;
1500 110700 : decoder->Reset((*code)->start, (*code)->end);
1501 110700 : *pc = ReturnPc(decoder, *code, top->pc);
1502 110700 : *limit = top->code->end - top->code->start;
1503 : TRACE(" => Return to #%zu (#%u @%zu)\n", frames_.size() - 1,
1504 : (*code)->function->func_index, *pc);
1505 110700 : DoStackTransfer(sp_dest, arity);
1506 110700 : return true;
1507 : }
1508 : }
1509 :
1510 : // Returns true if the call was successful, false if the stack check failed
1511 : // and the current activation was fully unwound.
1512 446700 : bool DoCall(Decoder* decoder, InterpreterCode* target, pc_t* pc,
1513 : pc_t* limit) V8_WARN_UNUSED_RESULT {
1514 446700 : frames_.back().pc = *pc;
1515 446700 : PushFrame(target);
1516 446700 : if (!DoStackCheck()) return false;
1517 446684 : *pc = frames_.back().pc;
1518 446684 : *limit = target->end - target->start;
1519 446684 : decoder->Reset(target->start, target->end);
1520 446684 : return true;
1521 : }
1522 :
1523 : // Returns true if the tail call was successful, false if the stack check
1524 : // failed.
1525 145016 : bool DoReturnCall(Decoder* decoder, InterpreterCode* target, pc_t* pc,
1526 : pc_t* limit) V8_WARN_UNUSED_RESULT {
1527 : DCHECK_NOT_NULL(target);
1528 : DCHECK_NOT_NULL(target->side_table);
1529 : EnsureStackSpaceForCall(target);
1530 :
1531 145016 : ++num_interpreted_calls_;
1532 :
1533 : Frame* top = &frames_.back();
1534 :
1535 : // Drop everything except current parameters.
1536 145016 : StackValue* sp_dest = stack_.get() + top->sp;
1537 145016 : size_t arity = target->function->sig->parameter_count();
1538 :
1539 145016 : DoStackTransfer(sp_dest, arity);
1540 :
1541 145016 : *limit = target->end - target->start;
1542 145016 : decoder->Reset(target->start, target->end);
1543 :
1544 : // Rebuild current frame to look like a call to callee.
1545 145016 : top->code = target;
1546 145016 : top->pc = 0;
1547 145016 : top->sp = StackHeight() - arity;
1548 145016 : top->pc = InitLocals(target);
1549 :
1550 145016 : *pc = top->pc;
1551 :
1552 : TRACE(" => ReturnCall #%zu (#%u @%zu)\n", frames_.size() - 1,
1553 : target->function->func_index, top->pc);
1554 :
1555 145016 : return true;
1556 : }
1557 :
1558 : // Copies {arity} values on the top of the stack down the stack to {dest},
1559 : // dropping the values in-between.
1560 5211770 : void DoStackTransfer(StackValue* dest, size_t arity) {
1561 : // before: |---------------| pop_count | arity |
1562 : // ^ 0 ^ dest ^ sp_
1563 : //
1564 : // after: |---------------| arity |
1565 : // ^ 0 ^ sp_
1566 : DCHECK_LE(dest, sp_);
1567 : DCHECK_LE(dest + arity, sp_);
1568 5211770 : if (arity && (dest != sp_ - arity)) {
1569 : memmove(dest, sp_ - arity, arity * sizeof(*sp_));
1570 : // Also move elements on the reference stack accordingly.
1571 : // TODO(mstarzinger): Refactor the interface so that we don't have to
1572 : // recompute values here which are already known at the call-site.
1573 4602236 : int dst = static_cast<int>(StackHeight() - (sp_ - dest));
1574 4602236 : int src = static_cast<int>(StackHeight() - arity);
1575 4602236 : int len = static_cast<int>(arity);
1576 9204472 : isolate_->heap()->MoveElements(reference_stack(), dst, src, len);
1577 : }
1578 5211770 : sp_ = dest + arity;
1579 5211770 : }
1580 :
1581 : inline Address EffectiveAddress(uint32_t index) {
1582 : // Compute the effective address of the access, making sure to condition
1583 : // the index even in the in-bounds case.
1584 6127802 : return reinterpret_cast<Address>(instance_object_->memory_start()) +
1585 6128054 : (index & instance_object_->memory_mask());
1586 : }
1587 :
1588 : template <typename mtype>
1589 6132294 : inline Address BoundsCheckMem(uint32_t offset, uint32_t index) {
1590 6132294 : uint32_t effective_index = offset + index;
1591 6132294 : if (effective_index < index) {
1592 : return kNullAddress; // wraparound => oob
1593 : }
1594 12264588 : if (!IsInBounds(effective_index, sizeof(mtype),
1595 : instance_object_->memory_size())) {
1596 : return kNullAddress; // oob
1597 : }
1598 6127614 : return EffectiveAddress(effective_index);
1599 : }
1600 :
1601 : inline bool BoundsCheckMemRange(uint32_t index, uint32_t* size,
1602 : Address* out_address) {
1603 188 : bool ok = ClampToBounds(
1604 : index, size, static_cast<uint32_t>(instance_object_->memory_size()));
1605 : *out_address = EffectiveAddress(index);
1606 : return ok;
1607 : }
1608 :
1609 : template <typename ctype, typename mtype>
1610 3173174 : bool ExecuteLoad(Decoder* decoder, InterpreterCode* code, pc_t pc, int& len,
1611 : MachineRepresentation rep) {
1612 : MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc),
1613 3173174 : sizeof(ctype));
1614 3173174 : uint32_t index = Pop().to<uint32_t>();
1615 3173174 : Address addr = BoundsCheckMem<mtype>(imm.offset, index);
1616 3173174 : if (!addr) {
1617 : DoTrap(kTrapMemOutOfBounds, pc);
1618 2136 : return false;
1619 : }
1620 : WasmValue result(
1621 0 : converter<ctype, mtype>{}(ReadLittleEndianValue<mtype>(addr)));
1622 :
1623 3171038 : Push(result);
1624 3171038 : len = 1 + imm.length;
1625 :
1626 3171038 : if (FLAG_trace_wasm_memory) {
1627 28 : MemoryTracingInfo info(imm.offset + index, false, rep);
1628 28 : TraceMemoryOperation(ExecutionTier::kInterpreter, &info,
1629 : code->function->func_index, static_cast<int>(pc),
1630 : instance_object_->memory_start());
1631 : }
1632 :
1633 : return true;
1634 : }
1635 :
1636 : template <typename ctype, typename mtype>
1637 2573736 : bool ExecuteStore(Decoder* decoder, InterpreterCode* code, pc_t pc, int& len,
1638 : MachineRepresentation rep) {
1639 : MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc),
1640 2573736 : sizeof(ctype));
1641 2573736 : ctype val = Pop().to<ctype>();
1642 :
1643 2573736 : uint32_t index = Pop().to<uint32_t>();
1644 2573736 : Address addr = BoundsCheckMem<mtype>(imm.offset, index);
1645 2573736 : if (!addr) {
1646 : DoTrap(kTrapMemOutOfBounds, pc);
1647 1296 : return false;
1648 : }
1649 : WriteLittleEndianValue<mtype>(addr, converter<mtype, ctype>{}(val));
1650 2572440 : len = 1 + imm.length;
1651 :
1652 2572440 : if (FLAG_trace_wasm_memory) {
1653 8 : MemoryTracingInfo info(imm.offset + index, true, rep);
1654 8 : TraceMemoryOperation(ExecutionTier::kInterpreter, &info,
1655 : code->function->func_index, static_cast<int>(pc),
1656 : instance_object_->memory_start());
1657 : }
1658 :
1659 : return true;
1660 : }
1661 :
1662 : bool CheckDataSegmentIsPassiveAndNotDropped(uint32_t index, pc_t pc) {
1663 : DCHECK_LT(index, module()->num_declared_data_segments);
1664 84 : if (instance_object_->dropped_data_segments()[index]) {
1665 : DoTrap(kTrapDataSegmentDropped, pc);
1666 : return false;
1667 : }
1668 : return true;
1669 : }
1670 :
1671 : bool CheckElemSegmentIsPassiveAndNotDropped(uint32_t index, pc_t pc) {
1672 : DCHECK_LT(index, module()->elem_segments.size());
1673 128 : if (instance_object_->dropped_elem_segments()[index]) {
1674 : DoTrap(kTrapElemSegmentDropped, pc);
1675 : return false;
1676 : }
1677 : return true;
1678 : }
1679 :
1680 : template <typename type, typename op_type>
1681 384116 : bool ExtractAtomicOpParams(Decoder* decoder, InterpreterCode* code,
1682 : Address& address, pc_t pc, int& len,
1683 : type* val = nullptr, type* val2 = nullptr) {
1684 : MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc + 1),
1685 768232 : sizeof(type));
1686 384116 : if (val2) *val2 = static_cast<type>(Pop().to<op_type>());
1687 384116 : if (val) *val = static_cast<type>(Pop().to<op_type>());
1688 384116 : uint32_t index = Pop().to<uint32_t>();
1689 384116 : address = BoundsCheckMem<type>(imm.offset, index);
1690 384116 : if (!address) {
1691 : DoTrap(kTrapMemOutOfBounds, pc);
1692 0 : return false;
1693 : }
1694 384116 : len = 2 + imm.length;
1695 384116 : return true;
1696 : }
1697 :
1698 3724 : bool ExecuteNumericOp(WasmOpcode opcode, Decoder* decoder,
1699 : InterpreterCode* code, pc_t pc, int& len) {
1700 3724 : switch (opcode) {
1701 : case kExprI32SConvertSatF32:
1702 920 : Push(WasmValue(ExecuteConvertSaturate<int32_t>(Pop().to<float>())));
1703 460 : return true;
1704 : case kExprI32UConvertSatF32:
1705 920 : Push(WasmValue(ExecuteConvertSaturate<uint32_t>(Pop().to<float>())));
1706 460 : return true;
1707 : case kExprI32SConvertSatF64:
1708 392 : Push(WasmValue(ExecuteConvertSaturate<int32_t>(Pop().to<double>())));
1709 196 : return true;
1710 : case kExprI32UConvertSatF64:
1711 392 : Push(WasmValue(ExecuteConvertSaturate<uint32_t>(Pop().to<double>())));
1712 196 : return true;
1713 : case kExprI64SConvertSatF32:
1714 920 : Push(WasmValue(ExecuteI64SConvertSatF32(Pop().to<float>())));
1715 460 : return true;
1716 : case kExprI64UConvertSatF32:
1717 920 : Push(WasmValue(ExecuteI64UConvertSatF32(Pop().to<float>())));
1718 460 : return true;
1719 : case kExprI64SConvertSatF64:
1720 392 : Push(WasmValue(ExecuteI64SConvertSatF64(Pop().to<double>())));
1721 196 : return true;
1722 : case kExprI64UConvertSatF64:
1723 392 : Push(WasmValue(ExecuteI64UConvertSatF64(Pop().to<double>())));
1724 196 : return true;
1725 : case kExprMemoryInit: {
1726 72 : MemoryInitImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1727 : DCHECK_LT(imm.data_segment_index, module()->num_declared_data_segments);
1728 72 : len += imm.length;
1729 144 : if (!CheckDataSegmentIsPassiveAndNotDropped(imm.data_segment_index,
1730 : pc)) {
1731 : return false;
1732 : }
1733 68 : auto size = Pop().to<uint32_t>();
1734 68 : auto src = Pop().to<uint32_t>();
1735 68 : auto dst = Pop().to<uint32_t>();
1736 : Address dst_addr;
1737 : bool ok = BoundsCheckMemRange(dst, &size, &dst_addr);
1738 : auto src_max =
1739 68 : instance_object_->data_segment_sizes()[imm.data_segment_index];
1740 : // Use & instead of && so the clamp is not short-circuited.
1741 : ok &= ClampToBounds(src, &size, src_max);
1742 : Address src_addr =
1743 68 : instance_object_->data_segment_starts()[imm.data_segment_index] +
1744 68 : src;
1745 68 : memory_copy_wrapper(dst_addr, src_addr, size);
1746 68 : if (!ok) DoTrap(kTrapMemOutOfBounds, pc);
1747 : return ok;
1748 : }
1749 : case kExprDataDrop: {
1750 : DataDropImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1751 12 : len += imm.length;
1752 12 : if (!CheckDataSegmentIsPassiveAndNotDropped(imm.index, pc)) {
1753 : return false;
1754 : }
1755 8 : instance_object_->dropped_data_segments()[imm.index] = 1;
1756 8 : return true;
1757 : }
1758 : case kExprMemoryCopy: {
1759 : MemoryCopyImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1760 76 : auto size = Pop().to<uint32_t>();
1761 76 : auto src = Pop().to<uint32_t>();
1762 76 : auto dst = Pop().to<uint32_t>();
1763 : Address dst_addr;
1764 76 : bool copy_backward = src < dst && dst - src < size;
1765 : bool ok = BoundsCheckMemRange(dst, &size, &dst_addr);
1766 : // Trap without copying any bytes if we are copying backward and the
1767 : // copy is partially out-of-bounds. We only need to check that the dst
1768 : // region is out-of-bounds, because we know that {src < dst}, so the src
1769 : // region is always out of bounds if the dst region is.
1770 76 : if (ok || !copy_backward) {
1771 : Address src_addr;
1772 : // Use & instead of && so the bounds check is not short-circuited.
1773 : ok &= BoundsCheckMemRange(src, &size, &src_addr);
1774 64 : memory_copy_wrapper(dst_addr, src_addr, size);
1775 : }
1776 76 : if (!ok) DoTrap(kTrapMemOutOfBounds, pc);
1777 76 : len += imm.length;
1778 : return ok;
1779 : }
1780 : case kExprMemoryFill: {
1781 : MemoryIndexImmediate<Decoder::kNoValidate> imm(decoder,
1782 : code->at(pc + 1));
1783 44 : auto size = Pop().to<uint32_t>();
1784 44 : auto value = Pop().to<uint32_t>();
1785 44 : auto dst = Pop().to<uint32_t>();
1786 : Address dst_addr;
1787 : bool ok = BoundsCheckMemRange(dst, &size, &dst_addr);
1788 44 : memory_fill_wrapper(dst_addr, value, size);
1789 44 : if (!ok) DoTrap(kTrapMemOutOfBounds, pc);
1790 44 : len += imm.length;
1791 : return ok;
1792 : }
1793 : case kExprTableInit: {
1794 116 : TableInitImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1795 116 : len += imm.length;
1796 232 : if (!CheckElemSegmentIsPassiveAndNotDropped(imm.elem_segment_index,
1797 : pc)) {
1798 : return false;
1799 : }
1800 112 : auto size = Pop().to<uint32_t>();
1801 112 : auto src = Pop().to<uint32_t>();
1802 112 : auto dst = Pop().to<uint32_t>();
1803 112 : HandleScope scope(isolate_); // Avoid leaking handles.
1804 112 : bool ok = WasmInstanceObject::InitTableEntries(
1805 : instance_object_->GetIsolate(), instance_object_, imm.table.index,
1806 112 : imm.elem_segment_index, dst, src, size);
1807 112 : if (!ok) DoTrap(kTrapTableOutOfBounds, pc);
1808 : return ok;
1809 : }
1810 : case kExprElemDrop: {
1811 : ElemDropImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1812 12 : len += imm.length;
1813 12 : if (!CheckElemSegmentIsPassiveAndNotDropped(imm.index, pc)) {
1814 : return false;
1815 : }
1816 8 : instance_object_->dropped_elem_segments()[imm.index] = 1;
1817 8 : return true;
1818 : }
1819 : case kExprTableCopy: {
1820 : TableCopyImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1821 768 : auto size = Pop().to<uint32_t>();
1822 768 : auto src = Pop().to<uint32_t>();
1823 768 : auto dst = Pop().to<uint32_t>();
1824 768 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
1825 768 : bool ok = WasmInstanceObject::CopyTableEntries(
1826 : isolate_, instance_object_, imm.table_dst.index,
1827 768 : imm.table_src.index, dst, src, size);
1828 768 : if (!ok) DoTrap(kTrapTableOutOfBounds, pc);
1829 768 : len += imm.length;
1830 : return ok;
1831 : }
1832 : default:
1833 0 : FATAL("Unknown or unimplemented opcode #%d:%s", code->start[pc],
1834 0 : OpcodeName(code->start[pc]));
1835 : UNREACHABLE();
1836 : }
1837 : return false;
1838 : }
1839 :
1840 : template <typename type, typename op_type, typename func>
1841 : op_type ExecuteAtomicBinopBE(type val, Address addr, func op) {
1842 : type old_val;
1843 : type new_val;
1844 : old_val = ReadUnalignedValue<type>(addr);
1845 : do {
1846 : new_val =
1847 : ByteReverse(static_cast<type>(op(ByteReverse<type>(old_val), val)));
1848 : } while (!(std::atomic_compare_exchange_strong(
1849 : reinterpret_cast<std::atomic<type>*>(addr), &old_val, new_val)));
1850 : return static_cast<op_type>(ByteReverse<type>(old_val));
1851 : }
1852 :
1853 : template <typename type>
1854 : type AdjustByteOrder(type param) {
1855 : #if V8_TARGET_BIG_ENDIAN
1856 : return ByteReverse(param);
1857 : #else
1858 : return param;
1859 : #endif
1860 : }
1861 :
1862 384116 : bool ExecuteAtomicOp(WasmOpcode opcode, Decoder* decoder,
1863 : InterpreterCode* code, pc_t pc, int& len) {
1864 : #if V8_TARGET_BIG_ENDIAN
1865 : constexpr bool kBigEndian = true;
1866 : #else
1867 : constexpr bool kBigEndian = false;
1868 : #endif
1869 : WasmValue result;
1870 384116 : switch (opcode) {
1871 : #define ATOMIC_BINOP_CASE(name, type, op_type, operation, op) \
1872 : case kExpr##name: { \
1873 : type val; \
1874 : Address addr; \
1875 : op_type result; \
1876 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
1877 : &val)) { \
1878 : return false; \
1879 : } \
1880 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1881 : "Size mismatch for types std::atomic<" #type \
1882 : ">, and " #type); \
1883 : if (kBigEndian) { \
1884 : auto oplambda = [](type a, type b) { return a op b; }; \
1885 : result = ExecuteAtomicBinopBE<type, op_type>(val, addr, oplambda); \
1886 : } else { \
1887 : result = static_cast<op_type>( \
1888 : std::operation(reinterpret_cast<std::atomic<type>*>(addr), val)); \
1889 : } \
1890 : Push(WasmValue(result)); \
1891 : break; \
1892 : }
1893 26920 : ATOMIC_BINOP_CASE(I32AtomicAdd, uint32_t, uint32_t, atomic_fetch_add, +);
1894 972 : ATOMIC_BINOP_CASE(I32AtomicAdd8U, uint8_t, uint32_t, atomic_fetch_add, +);
1895 972 : ATOMIC_BINOP_CASE(I32AtomicAdd16U, uint16_t, uint32_t, atomic_fetch_add,
1896 : +);
1897 26912 : ATOMIC_BINOP_CASE(I32AtomicSub, uint32_t, uint32_t, atomic_fetch_sub, -);
1898 972 : ATOMIC_BINOP_CASE(I32AtomicSub8U, uint8_t, uint32_t, atomic_fetch_sub, -);
1899 972 : ATOMIC_BINOP_CASE(I32AtomicSub16U, uint16_t, uint32_t, atomic_fetch_sub,
1900 : -);
1901 26912 : ATOMIC_BINOP_CASE(I32AtomicAnd, uint32_t, uint32_t, atomic_fetch_and, &);
1902 972 : ATOMIC_BINOP_CASE(I32AtomicAnd8U, uint8_t, uint32_t, atomic_fetch_and, &);
1903 972 : ATOMIC_BINOP_CASE(I32AtomicAnd16U, uint16_t, uint32_t,
1904 : atomic_fetch_and, &);
1905 26912 : ATOMIC_BINOP_CASE(I32AtomicOr, uint32_t, uint32_t, atomic_fetch_or, |);
1906 972 : ATOMIC_BINOP_CASE(I32AtomicOr8U, uint8_t, uint32_t, atomic_fetch_or, |);
1907 972 : ATOMIC_BINOP_CASE(I32AtomicOr16U, uint16_t, uint32_t, atomic_fetch_or, |);
1908 26912 : ATOMIC_BINOP_CASE(I32AtomicXor, uint32_t, uint32_t, atomic_fetch_xor, ^);
1909 972 : ATOMIC_BINOP_CASE(I32AtomicXor8U, uint8_t, uint32_t, atomic_fetch_xor, ^);
1910 972 : ATOMIC_BINOP_CASE(I32AtomicXor16U, uint16_t, uint32_t, atomic_fetch_xor,
1911 : ^);
1912 26912 : ATOMIC_BINOP_CASE(I32AtomicExchange, uint32_t, uint32_t, atomic_exchange,
1913 : =);
1914 972 : ATOMIC_BINOP_CASE(I32AtomicExchange8U, uint8_t, uint32_t, atomic_exchange,
1915 : =);
1916 972 : ATOMIC_BINOP_CASE(I32AtomicExchange16U, uint16_t, uint32_t,
1917 : atomic_exchange, =);
1918 52520 : ATOMIC_BINOP_CASE(I64AtomicAdd, uint64_t, uint64_t, atomic_fetch_add, +);
1919 972 : ATOMIC_BINOP_CASE(I64AtomicAdd8U, uint8_t, uint64_t, atomic_fetch_add, +);
1920 972 : ATOMIC_BINOP_CASE(I64AtomicAdd16U, uint16_t, uint64_t, atomic_fetch_add,
1921 : +);
1922 40380 : ATOMIC_BINOP_CASE(I64AtomicAdd32U, uint32_t, uint64_t, atomic_fetch_add,
1923 : +);
1924 52512 : ATOMIC_BINOP_CASE(I64AtomicSub, uint64_t, uint64_t, atomic_fetch_sub, -);
1925 972 : ATOMIC_BINOP_CASE(I64AtomicSub8U, uint8_t, uint64_t, atomic_fetch_sub, -);
1926 984 : ATOMIC_BINOP_CASE(I64AtomicSub16U, uint16_t, uint64_t, atomic_fetch_sub,
1927 : -);
1928 40380 : ATOMIC_BINOP_CASE(I64AtomicSub32U, uint32_t, uint64_t, atomic_fetch_sub,
1929 : -);
1930 52512 : ATOMIC_BINOP_CASE(I64AtomicAnd, uint64_t, uint64_t, atomic_fetch_and, &);
1931 972 : ATOMIC_BINOP_CASE(I64AtomicAnd8U, uint8_t, uint64_t, atomic_fetch_and, &);
1932 972 : ATOMIC_BINOP_CASE(I64AtomicAnd16U, uint16_t, uint64_t,
1933 : atomic_fetch_and, &);
1934 40380 : ATOMIC_BINOP_CASE(I64AtomicAnd32U, uint32_t, uint64_t,
1935 : atomic_fetch_and, &);
1936 52512 : ATOMIC_BINOP_CASE(I64AtomicOr, uint64_t, uint64_t, atomic_fetch_or, |);
1937 972 : ATOMIC_BINOP_CASE(I64AtomicOr8U, uint8_t, uint64_t, atomic_fetch_or, |);
1938 972 : ATOMIC_BINOP_CASE(I64AtomicOr16U, uint16_t, uint64_t, atomic_fetch_or, |);
1939 40380 : ATOMIC_BINOP_CASE(I64AtomicOr32U, uint32_t, uint64_t, atomic_fetch_or, |);
1940 52512 : ATOMIC_BINOP_CASE(I64AtomicXor, uint64_t, uint64_t, atomic_fetch_xor, ^);
1941 972 : ATOMIC_BINOP_CASE(I64AtomicXor8U, uint8_t, uint64_t, atomic_fetch_xor, ^);
1942 972 : ATOMIC_BINOP_CASE(I64AtomicXor16U, uint16_t, uint64_t, atomic_fetch_xor,
1943 : ^);
1944 40380 : ATOMIC_BINOP_CASE(I64AtomicXor32U, uint32_t, uint64_t, atomic_fetch_xor,
1945 : ^);
1946 52512 : ATOMIC_BINOP_CASE(I64AtomicExchange, uint64_t, uint64_t, atomic_exchange,
1947 : =);
1948 972 : ATOMIC_BINOP_CASE(I64AtomicExchange8U, uint8_t, uint64_t, atomic_exchange,
1949 : =);
1950 972 : ATOMIC_BINOP_CASE(I64AtomicExchange16U, uint16_t, uint64_t,
1951 : atomic_exchange, =);
1952 40380 : ATOMIC_BINOP_CASE(I64AtomicExchange32U, uint32_t, uint64_t,
1953 : atomic_exchange, =);
1954 : #undef ATOMIC_BINOP_CASE
1955 : #define ATOMIC_COMPARE_EXCHANGE_CASE(name, type, op_type) \
1956 : case kExpr##name: { \
1957 : type old_val; \
1958 : type new_val; \
1959 : Address addr; \
1960 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
1961 : &old_val, &new_val)) { \
1962 : return false; \
1963 : } \
1964 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1965 : "Size mismatch for types std::atomic<" #type \
1966 : ">, and " #type); \
1967 : old_val = AdjustByteOrder<type>(old_val); \
1968 : new_val = AdjustByteOrder<type>(new_val); \
1969 : std::atomic_compare_exchange_strong( \
1970 : reinterpret_cast<std::atomic<type>*>(addr), &old_val, new_val); \
1971 : Push(WasmValue(static_cast<op_type>(AdjustByteOrder<type>(old_val)))); \
1972 : break; \
1973 : }
1974 40380 : ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange, uint32_t,
1975 : uint32_t);
1976 972 : ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange8U, uint8_t,
1977 : uint32_t);
1978 972 : ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange16U, uint16_t,
1979 : uint32_t);
1980 78780 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange, uint64_t,
1981 : uint64_t);
1982 972 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange8U, uint8_t,
1983 : uint64_t);
1984 984 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange16U, uint16_t,
1985 : uint64_t);
1986 40380 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange32U, uint32_t,
1987 : uint64_t);
1988 : #undef ATOMIC_COMPARE_EXCHANGE_CASE
1989 : #define ATOMIC_LOAD_CASE(name, type, op_type, operation) \
1990 : case kExpr##name: { \
1991 : Address addr; \
1992 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len)) { \
1993 : return false; \
1994 : } \
1995 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1996 : "Size mismatch for types std::atomic<" #type \
1997 : ">, and " #type); \
1998 : result = WasmValue(static_cast<op_type>(AdjustByteOrder<type>( \
1999 : std::operation(reinterpret_cast<std::atomic<type>*>(addr))))); \
2000 : Push(result); \
2001 : break; \
2002 : }
2003 928 : ATOMIC_LOAD_CASE(I32AtomicLoad, uint32_t, uint32_t, atomic_load);
2004 216 : ATOMIC_LOAD_CASE(I32AtomicLoad8U, uint8_t, uint32_t, atomic_load);
2005 216 : ATOMIC_LOAD_CASE(I32AtomicLoad16U, uint16_t, uint32_t, atomic_load);
2006 1312 : ATOMIC_LOAD_CASE(I64AtomicLoad, uint64_t, uint64_t, atomic_load);
2007 228 : ATOMIC_LOAD_CASE(I64AtomicLoad8U, uint8_t, uint64_t, atomic_load);
2008 216 : ATOMIC_LOAD_CASE(I64AtomicLoad16U, uint16_t, uint64_t, atomic_load);
2009 1392 : ATOMIC_LOAD_CASE(I64AtomicLoad32U, uint32_t, uint64_t, atomic_load);
2010 : #undef ATOMIC_LOAD_CASE
2011 : #define ATOMIC_STORE_CASE(name, type, op_type, operation) \
2012 : case kExpr##name: { \
2013 : type val; \
2014 : Address addr; \
2015 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
2016 : &val)) { \
2017 : return false; \
2018 : } \
2019 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
2020 : "Size mismatch for types std::atomic<" #type \
2021 : ">, and " #type); \
2022 : std::operation(reinterpret_cast<std::atomic<type>*>(addr), \
2023 : AdjustByteOrder<type>(val)); \
2024 : break; \
2025 : }
2026 472 : ATOMIC_STORE_CASE(I32AtomicStore, uint32_t, uint32_t, atomic_store);
2027 72 : ATOMIC_STORE_CASE(I32AtomicStore8U, uint8_t, uint32_t, atomic_store);
2028 72 : ATOMIC_STORE_CASE(I32AtomicStore16U, uint16_t, uint32_t, atomic_store);
2029 656 : ATOMIC_STORE_CASE(I64AtomicStore, uint64_t, uint64_t, atomic_store);
2030 72 : ATOMIC_STORE_CASE(I64AtomicStore8U, uint8_t, uint64_t, atomic_store);
2031 72 : ATOMIC_STORE_CASE(I64AtomicStore16U, uint16_t, uint64_t, atomic_store);
2032 464 : ATOMIC_STORE_CASE(I64AtomicStore32U, uint32_t, uint64_t, atomic_store);
2033 : #undef ATOMIC_STORE_CASE
2034 : default:
2035 0 : UNREACHABLE();
2036 : return false;
2037 : }
2038 : return true;
2039 : }
2040 :
2041 : byte* GetGlobalPtr(const WasmGlobal* global) {
2042 : DCHECK(!ValueTypes::IsReferenceType(global->type));
2043 808296 : if (global->mutability && global->imported) {
2044 : return reinterpret_cast<byte*>(
2045 0 : instance_object_->imported_mutable_globals()[global->index]);
2046 : } else {
2047 808296 : return instance_object_->globals_start() + global->offset;
2048 : }
2049 : }
2050 :
2051 592 : void GetGlobalBufferAndIndex(const WasmGlobal* global,
2052 : Handle<FixedArray>* buffer, uint32_t* index) {
2053 : DCHECK(ValueTypes::IsReferenceType(global->type));
2054 592 : if (global->mutability && global->imported) {
2055 : *buffer =
2056 : handle(FixedArray::cast(
2057 : instance_object_->imported_mutable_globals_buffers()->get(
2058 176 : global->index)),
2059 528 : isolate_);
2060 176 : Address idx = instance_object_->imported_mutable_globals()[global->index];
2061 : DCHECK_LE(idx, std::numeric_limits<uint32_t>::max());
2062 176 : *index = static_cast<uint32_t>(idx);
2063 : } else {
2064 832 : *buffer = handle(instance_object_->tagged_globals_buffer(), isolate_);
2065 416 : *index = global->offset;
2066 : }
2067 592 : }
2068 :
2069 2349608 : bool ExecuteSimdOp(WasmOpcode opcode, Decoder* decoder, InterpreterCode* code,
2070 : pc_t pc, int& len) {
2071 2349608 : switch (opcode) {
2072 : #define SPLAT_CASE(format, sType, valType, num) \
2073 : case kExpr##format##Splat: { \
2074 : WasmValue val = Pop(); \
2075 : valType v = val.to<valType>(); \
2076 : sType s; \
2077 : for (int i = 0; i < num; i++) s.val[i] = v; \
2078 : Push(WasmValue(Simd128(s))); \
2079 : return true; \
2080 : }
2081 1121656 : SPLAT_CASE(I32x4, int4, int32_t, 4)
2082 1915864 : SPLAT_CASE(F32x4, float4, float, 4)
2083 30808 : SPLAT_CASE(I16x8, int8, int32_t, 8)
2084 29008 : SPLAT_CASE(I8x16, int16, int32_t, 16)
2085 : #undef SPLAT_CASE
2086 : #define EXTRACT_LANE_CASE(format, name) \
2087 : case kExpr##format##ExtractLane: { \
2088 : SimdLaneImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
2089 : ++len; \
2090 : WasmValue val = Pop(); \
2091 : Simd128 s = val.to_s128(); \
2092 : auto ss = s.to_##name(); \
2093 : Push(WasmValue(ss.val[LANE(imm.lane, ss)])); \
2094 : return true; \
2095 : }
2096 252 : EXTRACT_LANE_CASE(I32x4, i32x4)
2097 96 : EXTRACT_LANE_CASE(F32x4, f32x4)
2098 96 : EXTRACT_LANE_CASE(I16x8, i16x8)
2099 96 : EXTRACT_LANE_CASE(I8x16, i8x16)
2100 : #undef EXTRACT_LANE_CASE
2101 : #define BINOP_CASE(op, name, stype, count, expr) \
2102 : case kExpr##op: { \
2103 : WasmValue v2 = Pop(); \
2104 : WasmValue v1 = Pop(); \
2105 : stype s1 = v1.to_s128().to_##name(); \
2106 : stype s2 = v2.to_s128().to_##name(); \
2107 : stype res; \
2108 : for (size_t i = 0; i < count; ++i) { \
2109 : auto a = s1.val[LANE(i, s1)]; \
2110 : auto b = s2.val[LANE(i, s1)]; \
2111 : res.val[LANE(i, s1)] = expr; \
2112 : } \
2113 : Push(WasmValue(Simd128(res))); \
2114 : return true; \
2115 : }
2116 213600 : BINOP_CASE(F32x4Add, f32x4, float4, 4, a + b)
2117 213536 : BINOP_CASE(F32x4Sub, f32x4, float4, 4, a - b)
2118 213536 : BINOP_CASE(F32x4Mul, f32x4, float4, 4, a * b)
2119 0 : BINOP_CASE(F32x4Min, f32x4, float4, 4, a < b ? a : b)
2120 0 : BINOP_CASE(F32x4Max, f32x4, float4, 4, a > b ? a : b)
2121 107840 : BINOP_CASE(I32x4Add, i32x4, int4, 4, base::AddWithWraparound(a, b))
2122 107648 : BINOP_CASE(I32x4Sub, i32x4, int4, 4, base::SubWithWraparound(a, b))
2123 107648 : BINOP_CASE(I32x4Mul, i32x4, int4, 4, base::MulWithWraparound(a, b))
2124 53824 : BINOP_CASE(I32x4MinS, i32x4, int4, 4, a < b ? a : b)
2125 53824 : BINOP_CASE(I32x4MinU, i32x4, int4, 4,
2126 : static_cast<uint32_t>(a) < static_cast<uint32_t>(b) ? a : b)
2127 53824 : BINOP_CASE(I32x4MaxS, i32x4, int4, 4, a > b ? a : b)
2128 53824 : BINOP_CASE(I32x4MaxU, i32x4, int4, 4,
2129 : static_cast<uint32_t>(a) > static_cast<uint32_t>(b) ? a : b)
2130 53824 : BINOP_CASE(S128And, i32x4, int4, 4, a & b)
2131 53824 : BINOP_CASE(S128Or, i32x4, int4, 4, a | b)
2132 53824 : BINOP_CASE(S128Xor, i32x4, int4, 4, a ^ b)
2133 3888 : BINOP_CASE(I16x8Add, i16x8, int8, 8, base::AddWithWraparound(a, b))
2134 3888 : BINOP_CASE(I16x8Sub, i16x8, int8, 8, base::SubWithWraparound(a, b))
2135 3888 : BINOP_CASE(I16x8Mul, i16x8, int8, 8, base::MulWithWraparound(a, b))
2136 1296 : BINOP_CASE(I16x8MinS, i16x8, int8, 8, a < b ? a : b)
2137 1296 : BINOP_CASE(I16x8MinU, i16x8, int8, 8,
2138 : static_cast<uint16_t>(a) < static_cast<uint16_t>(b) ? a : b)
2139 1296 : BINOP_CASE(I16x8MaxS, i16x8, int8, 8, a > b ? a : b)
2140 1296 : BINOP_CASE(I16x8MaxU, i16x8, int8, 8,
2141 : static_cast<uint16_t>(a) > static_cast<uint16_t>(b) ? a : b)
2142 3888 : BINOP_CASE(I16x8AddSaturateS, i16x8, int8, 8, SaturateAdd<int16_t>(a, b))
2143 3888 : BINOP_CASE(I16x8AddSaturateU, i16x8, int8, 8, SaturateAdd<uint16_t>(a, b))
2144 3888 : BINOP_CASE(I16x8SubSaturateS, i16x8, int8, 8, SaturateSub<int16_t>(a, b))
2145 3888 : BINOP_CASE(I16x8SubSaturateU, i16x8, int8, 8, SaturateSub<uint16_t>(a, b))
2146 6480 : BINOP_CASE(I8x16Add, i8x16, int16, 16, base::AddWithWraparound(a, b))
2147 6480 : BINOP_CASE(I8x16Sub, i8x16, int16, 16, base::SubWithWraparound(a, b))
2148 6480 : BINOP_CASE(I8x16Mul, i8x16, int16, 16, base::MulWithWraparound(a, b))
2149 1296 : BINOP_CASE(I8x16MinS, i8x16, int16, 16, a < b ? a : b)
2150 1296 : BINOP_CASE(I8x16MinU, i8x16, int16, 16,
2151 : static_cast<uint8_t>(a) < static_cast<uint8_t>(b) ? a : b)
2152 1296 : BINOP_CASE(I8x16MaxS, i8x16, int16, 16, a > b ? a : b)
2153 1296 : BINOP_CASE(I8x16MaxU, i8x16, int16, 16,
2154 : static_cast<uint8_t>(a) > static_cast<uint8_t>(b) ? a : b)
2155 6480 : BINOP_CASE(I8x16AddSaturateS, i8x16, int16, 16, SaturateAdd<int8_t>(a, b))
2156 6480 : BINOP_CASE(I8x16AddSaturateU, i8x16, int16, 16,
2157 : SaturateAdd<uint8_t>(a, b))
2158 6480 : BINOP_CASE(I8x16SubSaturateS, i8x16, int16, 16, SaturateSub<int8_t>(a, b))
2159 6480 : BINOP_CASE(I8x16SubSaturateU, i8x16, int16, 16,
2160 : SaturateSub<uint8_t>(a, b))
2161 : #undef BINOP_CASE
2162 : #define UNOP_CASE(op, name, stype, count, expr) \
2163 : case kExpr##op: { \
2164 : WasmValue v = Pop(); \
2165 : stype s = v.to_s128().to_##name(); \
2166 : stype res; \
2167 : for (size_t i = 0; i < count; ++i) { \
2168 : auto a = s.val[i]; \
2169 : res.val[i] = expr; \
2170 : } \
2171 : Push(WasmValue(Simd128(res))); \
2172 : return true; \
2173 : }
2174 3528 : UNOP_CASE(F32x4Abs, f32x4, float4, 4, std::abs(a))
2175 1512 : UNOP_CASE(F32x4Neg, f32x4, float4, 4, -a)
2176 3024 : UNOP_CASE(F32x4RecipApprox, f32x4, float4, 4, base::Recip(a))
2177 1296 : UNOP_CASE(F32x4RecipSqrtApprox, f32x4, float4, 4, base::RecipSqrt(a))
2178 1624 : UNOP_CASE(I32x4Neg, i32x4, int4, 4, base::NegateWithWraparound(a))
2179 696 : UNOP_CASE(S128Not, i32x4, int4, 4, ~a)
2180 396 : UNOP_CASE(I16x8Neg, i16x8, int8, 8, base::NegateWithWraparound(a))
2181 684 : UNOP_CASE(I8x16Neg, i8x16, int16, 16, base::NegateWithWraparound(a))
2182 : #undef UNOP_CASE
2183 : #define CMPOP_CASE(op, name, stype, out_stype, count, expr) \
2184 : case kExpr##op: { \
2185 : WasmValue v2 = Pop(); \
2186 : WasmValue v1 = Pop(); \
2187 : stype s1 = v1.to_s128().to_##name(); \
2188 : stype s2 = v2.to_s128().to_##name(); \
2189 : out_stype res; \
2190 : for (size_t i = 0; i < count; ++i) { \
2191 : auto a = s1.val[i]; \
2192 : auto b = s2.val[i]; \
2193 : res.val[i] = expr ? -1 : 0; \
2194 : } \
2195 : Push(WasmValue(Simd128(res))); \
2196 : return true; \
2197 : }
2198 211600 : CMPOP_CASE(F32x4Eq, f32x4, float4, int4, 4, a == b)
2199 211600 : CMPOP_CASE(F32x4Ne, f32x4, float4, int4, 4, a != b)
2200 211600 : CMPOP_CASE(F32x4Gt, f32x4, float4, int4, 4, a > b)
2201 211600 : CMPOP_CASE(F32x4Ge, f32x4, float4, int4, 4, a >= b)
2202 211600 : CMPOP_CASE(F32x4Lt, f32x4, float4, int4, 4, a < b)
2203 211600 : CMPOP_CASE(F32x4Le, f32x4, float4, int4, 4, a <= b)
2204 53888 : CMPOP_CASE(I32x4Eq, i32x4, int4, int4, 4, a == b)
2205 53904 : CMPOP_CASE(I32x4Ne, i32x4, int4, int4, 4, a != b)
2206 53824 : CMPOP_CASE(I32x4GtS, i32x4, int4, int4, 4, a > b)
2207 53824 : CMPOP_CASE(I32x4GeS, i32x4, int4, int4, 4, a >= b)
2208 53824 : CMPOP_CASE(I32x4LtS, i32x4, int4, int4, 4, a < b)
2209 53824 : CMPOP_CASE(I32x4LeS, i32x4, int4, int4, 4, a <= b)
2210 53824 : CMPOP_CASE(I32x4GtU, i32x4, int4, int4, 4,
2211 : static_cast<uint32_t>(a) > static_cast<uint32_t>(b))
2212 53824 : CMPOP_CASE(I32x4GeU, i32x4, int4, int4, 4,
2213 : static_cast<uint32_t>(a) >= static_cast<uint32_t>(b))
2214 53824 : CMPOP_CASE(I32x4LtU, i32x4, int4, int4, 4,
2215 : static_cast<uint32_t>(a) < static_cast<uint32_t>(b))
2216 53824 : CMPOP_CASE(I32x4LeU, i32x4, int4, int4, 4,
2217 : static_cast<uint32_t>(a) <= static_cast<uint32_t>(b))
2218 1360 : CMPOP_CASE(I16x8Eq, i16x8, int8, int8, 8, a == b)
2219 1376 : CMPOP_CASE(I16x8Ne, i16x8, int8, int8, 8, a != b)
2220 1296 : CMPOP_CASE(I16x8GtS, i16x8, int8, int8, 8, a > b)
2221 1296 : CMPOP_CASE(I16x8GeS, i16x8, int8, int8, 8, a >= b)
2222 1296 : CMPOP_CASE(I16x8LtS, i16x8, int8, int8, 8, a < b)
2223 1296 : CMPOP_CASE(I16x8LeS, i16x8, int8, int8, 8, a <= b)
2224 1296 : CMPOP_CASE(I16x8GtU, i16x8, int8, int8, 8,
2225 : static_cast<uint16_t>(a) > static_cast<uint16_t>(b))
2226 1296 : CMPOP_CASE(I16x8GeU, i16x8, int8, int8, 8,
2227 : static_cast<uint16_t>(a) >= static_cast<uint16_t>(b))
2228 1296 : CMPOP_CASE(I16x8LtU, i16x8, int8, int8, 8,
2229 : static_cast<uint16_t>(a) < static_cast<uint16_t>(b))
2230 1296 : CMPOP_CASE(I16x8LeU, i16x8, int8, int8, 8,
2231 : static_cast<uint16_t>(a) <= static_cast<uint16_t>(b))
2232 1360 : CMPOP_CASE(I8x16Eq, i8x16, int16, int16, 16, a == b)
2233 1376 : CMPOP_CASE(I8x16Ne, i8x16, int16, int16, 16, a != b)
2234 1296 : CMPOP_CASE(I8x16GtS, i8x16, int16, int16, 16, a > b)
2235 1296 : CMPOP_CASE(I8x16GeS, i8x16, int16, int16, 16, a >= b)
2236 1296 : CMPOP_CASE(I8x16LtS, i8x16, int16, int16, 16, a < b)
2237 1296 : CMPOP_CASE(I8x16LeS, i8x16, int16, int16, 16, a <= b)
2238 1296 : CMPOP_CASE(I8x16GtU, i8x16, int16, int16, 16,
2239 : static_cast<uint8_t>(a) > static_cast<uint8_t>(b))
2240 1296 : CMPOP_CASE(I8x16GeU, i8x16, int16, int16, 16,
2241 : static_cast<uint8_t>(a) >= static_cast<uint8_t>(b))
2242 1296 : CMPOP_CASE(I8x16LtU, i8x16, int16, int16, 16,
2243 : static_cast<uint8_t>(a) < static_cast<uint8_t>(b))
2244 1296 : CMPOP_CASE(I8x16LeU, i8x16, int16, int16, 16,
2245 : static_cast<uint8_t>(a) <= static_cast<uint8_t>(b))
2246 : #undef CMPOP_CASE
2247 : #define REPLACE_LANE_CASE(format, name, stype, ctype) \
2248 : case kExpr##format##ReplaceLane: { \
2249 : SimdLaneImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
2250 : ++len; \
2251 : WasmValue new_val = Pop(); \
2252 : WasmValue simd_val = Pop(); \
2253 : stype s = simd_val.to_s128().to_##name(); \
2254 : s.val[LANE(imm.lane, s)] = new_val.to<ctype>(); \
2255 : Push(WasmValue(Simd128(s))); \
2256 : return true; \
2257 : }
2258 96 : REPLACE_LANE_CASE(F32x4, f32x4, float4, float)
2259 168 : REPLACE_LANE_CASE(I32x4, i32x4, int4, int32_t)
2260 156 : REPLACE_LANE_CASE(I16x8, i16x8, int8, int32_t)
2261 252 : REPLACE_LANE_CASE(I8x16, i8x16, int16, int32_t)
2262 : #undef REPLACE_LANE_CASE
2263 : case kExprS128LoadMem:
2264 : return ExecuteLoad<Simd128, Simd128>(decoder, code, pc, len,
2265 0 : MachineRepresentation::kSimd128);
2266 : case kExprS128StoreMem:
2267 : return ExecuteStore<Simd128, Simd128>(decoder, code, pc, len,
2268 0 : MachineRepresentation::kSimd128);
2269 : #define SHIFT_CASE(op, name, stype, count, expr) \
2270 : case kExpr##op: { \
2271 : SimdShiftImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
2272 : ++len; \
2273 : WasmValue v = Pop(); \
2274 : stype s = v.to_s128().to_##name(); \
2275 : stype res; \
2276 : for (size_t i = 0; i < count; ++i) { \
2277 : auto a = s.val[i]; \
2278 : res.val[i] = expr; \
2279 : } \
2280 : Push(WasmValue(Simd128(res))); \
2281 : return true; \
2282 : }
2283 21576 : SHIFT_CASE(I32x4Shl, i32x4, int4, 4,
2284 : static_cast<uint32_t>(a) << imm.shift)
2285 21576 : SHIFT_CASE(I32x4ShrS, i32x4, int4, 4, a >> imm.shift)
2286 21576 : SHIFT_CASE(I32x4ShrU, i32x4, int4, 4,
2287 : static_cast<uint32_t>(a) >> imm.shift)
2288 1620 : SHIFT_CASE(I16x8Shl, i16x8, int8, 8,
2289 : static_cast<uint16_t>(a) << imm.shift)
2290 1620 : SHIFT_CASE(I16x8ShrS, i16x8, int8, 8, a >> imm.shift)
2291 1620 : SHIFT_CASE(I16x8ShrU, i16x8, int8, 8,
2292 : static_cast<uint16_t>(a) >> imm.shift)
2293 756 : SHIFT_CASE(I8x16Shl, i8x16, int16, 16,
2294 : static_cast<uint8_t>(a) << imm.shift)
2295 756 : SHIFT_CASE(I8x16ShrS, i8x16, int16, 16, a >> imm.shift)
2296 756 : SHIFT_CASE(I8x16ShrU, i8x16, int16, 16,
2297 : static_cast<uint8_t>(a) >> imm.shift)
2298 : #undef SHIFT_CASE
2299 : #define CONVERT_CASE(op, src_type, name, dst_type, count, start_index, ctype, \
2300 : expr) \
2301 : case kExpr##op: { \
2302 : WasmValue v = Pop(); \
2303 : src_type s = v.to_s128().to_##name(); \
2304 : dst_type res; \
2305 : for (size_t i = 0; i < count; ++i) { \
2306 : ctype a = s.val[LANE(start_index + i, s)]; \
2307 : res.val[LANE(i, res)] = expr; \
2308 : } \
2309 : Push(WasmValue(Simd128(res))); \
2310 : return true; \
2311 : }
2312 0 : CONVERT_CASE(F32x4SConvertI32x4, int4, i32x4, float4, 4, 0, int32_t,
2313 : static_cast<float>(a))
2314 0 : CONVERT_CASE(F32x4UConvertI32x4, int4, i32x4, float4, 4, 0, uint32_t,
2315 : static_cast<float>(a))
2316 1380 : CONVERT_CASE(I32x4SConvertF32x4, float4, f32x4, int4, 4, 0, double,
2317 : std::isnan(a) ? 0
2318 : : a<kMinInt ? kMinInt : a> kMaxInt
2319 : ? kMaxInt
2320 : : static_cast<int32_t>(a))
2321 1380 : CONVERT_CASE(I32x4UConvertF32x4, float4, f32x4, int4, 4, 0, double,
2322 : std::isnan(a)
2323 : ? 0
2324 : : a<0 ? 0 : a> kMaxUInt32 ? kMaxUInt32
2325 : : static_cast<uint32_t>(a))
2326 108 : CONVERT_CASE(I32x4SConvertI16x8High, int8, i16x8, int4, 4, 4, int16_t,
2327 : a)
2328 108 : CONVERT_CASE(I32x4UConvertI16x8High, int8, i16x8, int4, 4, 4, uint16_t,
2329 : a)
2330 108 : CONVERT_CASE(I32x4SConvertI16x8Low, int8, i16x8, int4, 4, 0, int16_t, a)
2331 108 : CONVERT_CASE(I32x4UConvertI16x8Low, int8, i16x8, int4, 4, 0, uint16_t,
2332 : a)
2333 108 : CONVERT_CASE(I16x8SConvertI8x16High, int16, i8x16, int8, 8, 8, int8_t,
2334 : a)
2335 108 : CONVERT_CASE(I16x8UConvertI8x16High, int16, i8x16, int8, 8, 8, uint8_t,
2336 : a)
2337 108 : CONVERT_CASE(I16x8SConvertI8x16Low, int16, i8x16, int8, 8, 0, int8_t, a)
2338 108 : CONVERT_CASE(I16x8UConvertI8x16Low, int16, i8x16, int8, 8, 0, uint8_t,
2339 : a)
2340 : #undef CONVERT_CASE
2341 : #define PACK_CASE(op, src_type, name, dst_type, count, ctype, dst_ctype, \
2342 : is_unsigned) \
2343 : case kExpr##op: { \
2344 : WasmValue v2 = Pop(); \
2345 : WasmValue v1 = Pop(); \
2346 : src_type s1 = v1.to_s128().to_##name(); \
2347 : src_type s2 = v2.to_s128().to_##name(); \
2348 : dst_type res; \
2349 : int64_t min = std::numeric_limits<ctype>::min(); \
2350 : int64_t max = std::numeric_limits<ctype>::max(); \
2351 : for (size_t i = 0; i < count; ++i) { \
2352 : int32_t v = i < count / 2 ? s1.val[LANE(i, s1)] \
2353 : : s2.val[LANE(i - count / 2, s2)]; \
2354 : int64_t a = is_unsigned ? static_cast<int64_t>(v & 0xFFFFFFFFu) : v; \
2355 : res.val[LANE(i, res)] = \
2356 : static_cast<dst_ctype>(std::max(min, std::min(max, a))); \
2357 : } \
2358 : Push(WasmValue(Simd128(res))); \
2359 : return true; \
2360 : }
2361 2784 : PACK_CASE(I16x8SConvertI32x4, int4, i32x4, int8, 8, int16_t, int16_t,
2362 : false)
2363 2784 : PACK_CASE(I16x8UConvertI32x4, int4, i32x4, int8, 8, uint16_t, int16_t,
2364 : true)
2365 720 : PACK_CASE(I8x16SConvertI16x8, int8, i16x8, int16, 16, int8_t, int8_t,
2366 : false)
2367 720 : PACK_CASE(I8x16UConvertI16x8, int8, i16x8, int16, 16, uint8_t, int8_t,
2368 : true)
2369 : #undef PACK_CASE
2370 : case kExprS128Select: {
2371 48 : int4 bool_val = Pop().to_s128().to_i32x4();
2372 48 : int4 v2 = Pop().to_s128().to_i32x4();
2373 48 : int4 v1 = Pop().to_s128().to_i32x4();
2374 : int4 res;
2375 216 : for (size_t i = 0; i < 4; ++i) {
2376 96 : res.val[i] = v2.val[i] ^ ((v1.val[i] ^ v2.val[i]) & bool_val.val[i]);
2377 : }
2378 24 : Push(WasmValue(Simd128(res)));
2379 : return true;
2380 : }
2381 : #define ADD_HORIZ_CASE(op, name, stype, count) \
2382 : case kExpr##op: { \
2383 : WasmValue v2 = Pop(); \
2384 : WasmValue v1 = Pop(); \
2385 : stype s1 = v1.to_s128().to_##name(); \
2386 : stype s2 = v2.to_s128().to_##name(); \
2387 : stype res; \
2388 : for (size_t i = 0; i < count / 2; ++i) { \
2389 : res.val[LANE(i, s1)] = \
2390 : s1.val[LANE(i * 2, s1)] + s1.val[LANE(i * 2 + 1, s1)]; \
2391 : res.val[LANE(i + count / 2, s1)] = \
2392 : s2.val[LANE(i * 2, s1)] + s2.val[LANE(i * 2 + 1, s1)]; \
2393 : } \
2394 : Push(WasmValue(Simd128(res))); \
2395 : return true; \
2396 : }
2397 16 : ADD_HORIZ_CASE(I32x4AddHoriz, i32x4, int4, 4)
2398 16 : ADD_HORIZ_CASE(F32x4AddHoriz, f32x4, float4, 4)
2399 16 : ADD_HORIZ_CASE(I16x8AddHoriz, i16x8, int8, 8)
2400 : #undef ADD_HORIZ_CASE
2401 : case kExprS8x16Shuffle: {
2402 : Simd8x16ShuffleImmediate<Decoder::kNoValidate> imm(decoder,
2403 : code->at(pc));
2404 11872 : len += 16;
2405 23744 : int16 v2 = Pop().to_s128().to_i8x16();
2406 23744 : int16 v1 = Pop().to_s128().to_i8x16();
2407 : int16 res;
2408 391776 : for (size_t i = 0; i < kSimd128Size; ++i) {
2409 189952 : int lane = imm.shuffle[i];
2410 : res.val[LANE(i, v1)] = lane < kSimd128Size
2411 : ? v1.val[LANE(lane, v1)]
2412 189952 : : v2.val[LANE(lane - kSimd128Size, v1)];
2413 : }
2414 11872 : Push(WasmValue(Simd128(res)));
2415 : return true;
2416 : }
2417 : case kExprS1x4AnyTrue:
2418 : case kExprS1x8AnyTrue:
2419 : case kExprS1x16AnyTrue: {
2420 96 : int4 s = Pop().to_s128().to_i32x4();
2421 48 : bool res = s.val[0] | s.val[1] | s.val[2] | s.val[3];
2422 48 : Push(WasmValue((res)));
2423 : return true;
2424 : }
2425 : #define REDUCTION_CASE(op, name, stype, count, operation) \
2426 : case kExpr##op: { \
2427 : stype s = Pop().to_s128().to_##name(); \
2428 : bool res = true; \
2429 : for (size_t i = 0; i < count; ++i) { \
2430 : res = res & static_cast<bool>(s.val[i]); \
2431 : } \
2432 : Push(WasmValue(res)); \
2433 : return true; \
2434 : }
2435 48 : REDUCTION_CASE(S1x4AllTrue, i32x4, int4, 4, &)
2436 48 : REDUCTION_CASE(S1x8AllTrue, i16x8, int8, 8, &)
2437 48 : REDUCTION_CASE(S1x16AllTrue, i8x16, int16, 16, &)
2438 : #undef REDUCTION_CASE
2439 : default:
2440 : return false;
2441 : }
2442 : }
2443 :
2444 : // Check if our control stack (frames_) exceeds the limit. Trigger stack
2445 : // overflow if it does, and unwinding the current frame.
2446 : // Returns true if execution can continue, false if the current activation was
2447 : // fully unwound.
2448 : // Do call this function immediately *after* pushing a new frame. The pc of
2449 : // the top frame will be reset to 0 if the stack check fails.
2450 446700 : bool DoStackCheck() V8_WARN_UNUSED_RESULT {
2451 : // The goal of this stack check is not to prevent actual stack overflows,
2452 : // but to simulate stack overflows during the execution of compiled code.
2453 : // That is why this function uses FLAG_stack_size, even though the value
2454 : // stack actually lies in zone memory.
2455 446700 : const size_t stack_size_limit = FLAG_stack_size * KB;
2456 : // Sum up the value stack size and the control stack size.
2457 893400 : const size_t current_stack_size = (sp_ - stack_.get()) * sizeof(*sp_) +
2458 446700 : frames_.size() * sizeof(frames_[0]);
2459 446700 : if (V8_LIKELY(current_stack_size <= stack_size_limit)) {
2460 : return true;
2461 : }
2462 : // The pc of the top frame is initialized to the first instruction. We reset
2463 : // it to 0 here such that we report the same position as in compiled code.
2464 16 : frames_.back().pc = 0;
2465 16 : isolate_->StackOverflow();
2466 16 : return HandleException(isolate_) == WasmInterpreter::Thread::HANDLED;
2467 : }
2468 :
2469 472 : void EncodeI32ExceptionValue(Handle<FixedArray> encoded_values,
2470 : uint32_t* encoded_index, uint32_t value) {
2471 944 : encoded_values->set((*encoded_index)++, Smi::FromInt(value >> 16));
2472 944 : encoded_values->set((*encoded_index)++, Smi::FromInt(value & 0xffff));
2473 472 : }
2474 :
2475 72 : void EncodeI64ExceptionValue(Handle<FixedArray> encoded_values,
2476 : uint32_t* encoded_index, uint64_t value) {
2477 72 : EncodeI32ExceptionValue(encoded_values, encoded_index,
2478 144 : static_cast<uint32_t>(value >> 32));
2479 72 : EncodeI32ExceptionValue(encoded_values, encoded_index,
2480 72 : static_cast<uint32_t>(value));
2481 72 : }
2482 :
2483 : // Allocate, initialize and throw a new exception. The exception values are
2484 : // being popped off the operand stack. Returns true if the exception is being
2485 : // handled locally by the interpreter, false otherwise (interpreter exits).
2486 604 : bool DoThrowException(const WasmException* exception,
2487 : uint32_t index) V8_WARN_UNUSED_RESULT {
2488 604 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
2489 : Handle<WasmExceptionTag> exception_tag(
2490 : WasmExceptionTag::cast(
2491 : instance_object_->exceptions_table()->get(index)),
2492 604 : isolate_);
2493 604 : uint32_t encoded_size = WasmExceptionPackage::GetEncodedSize(exception);
2494 : Handle<Object> exception_object =
2495 604 : WasmExceptionPackage::New(isolate_, exception_tag, encoded_size);
2496 : Handle<FixedArray> encoded_values = Handle<FixedArray>::cast(
2497 604 : WasmExceptionPackage::GetExceptionValues(isolate_, exception_object));
2498 : // Encode the exception values on the operand stack into the exception
2499 : // package allocated above. This encoding has to be in sync with other
2500 : // backends so that exceptions can be passed between them.
2501 604 : const WasmExceptionSig* sig = exception->sig;
2502 604 : uint32_t encoded_index = 0;
2503 604 : sp_t base_index = StackHeight() - sig->parameter_count();
2504 1468 : for (size_t i = 0; i < sig->parameter_count(); ++i) {
2505 432 : WasmValue value = GetStackValue(base_index + i);
2506 432 : switch (sig->GetParam(i)) {
2507 : case kWasmI32: {
2508 : uint32_t u32 = value.to_u32();
2509 232 : EncodeI32ExceptionValue(encoded_values, &encoded_index, u32);
2510 232 : break;
2511 : }
2512 : case kWasmF32: {
2513 : uint32_t f32 = value.to_f32_boxed().get_bits();
2514 32 : EncodeI32ExceptionValue(encoded_values, &encoded_index, f32);
2515 32 : break;
2516 : }
2517 : case kWasmI64: {
2518 : uint64_t u64 = value.to_u64();
2519 40 : EncodeI64ExceptionValue(encoded_values, &encoded_index, u64);
2520 40 : break;
2521 : }
2522 : case kWasmF64: {
2523 : uint64_t f64 = value.to_f64_boxed().get_bits();
2524 32 : EncodeI64ExceptionValue(encoded_values, &encoded_index, f64);
2525 32 : break;
2526 : }
2527 : case kWasmS128: {
2528 16 : int4 s128 = value.to_s128().to_i32x4();
2529 16 : EncodeI32ExceptionValue(encoded_values, &encoded_index, s128.val[0]);
2530 16 : EncodeI32ExceptionValue(encoded_values, &encoded_index, s128.val[1]);
2531 16 : EncodeI32ExceptionValue(encoded_values, &encoded_index, s128.val[2]);
2532 16 : EncodeI32ExceptionValue(encoded_values, &encoded_index, s128.val[3]);
2533 : break;
2534 : }
2535 : case kWasmAnyRef: {
2536 : Handle<Object> anyref = value.to_anyref();
2537 160 : encoded_values->set(encoded_index++, *anyref);
2538 : break;
2539 : }
2540 : default:
2541 0 : UNREACHABLE();
2542 : }
2543 : }
2544 : DCHECK_EQ(encoded_size, encoded_index);
2545 604 : Drop(static_cast<int>(sig->parameter_count()));
2546 : // Now that the exception is ready, set it as pending.
2547 604 : isolate_->Throw(*exception_object);
2548 1208 : return HandleException(isolate_) == WasmInterpreter::Thread::HANDLED;
2549 : }
2550 :
2551 : // Throw a given existing exception. Returns true if the exception is being
2552 : // handled locally by the interpreter, false otherwise (interpreter exits).
2553 152 : bool DoRethrowException(WasmValue exception) {
2554 152 : isolate_->ReThrow(*exception.to_anyref());
2555 152 : return HandleException(isolate_) == WasmInterpreter::Thread::HANDLED;
2556 : }
2557 :
2558 : // Determines whether the given exception has a tag matching the expected tag
2559 : // for the given index within the exception table of the current instance.
2560 416 : bool MatchingExceptionTag(Handle<Object> exception_object, uint32_t index) {
2561 : Handle<Object> caught_tag =
2562 416 : WasmExceptionPackage::GetExceptionTag(isolate_, exception_object);
2563 : Handle<Object> expected_tag =
2564 416 : handle(instance_object_->exceptions_table()->get(index), isolate_);
2565 : DCHECK(expected_tag->IsWasmExceptionTag());
2566 416 : return expected_tag.is_identical_to(caught_tag);
2567 : }
2568 :
2569 304 : void DecodeI32ExceptionValue(Handle<FixedArray> encoded_values,
2570 : uint32_t* encoded_index, uint32_t* value) {
2571 608 : uint32_t msb = Smi::cast(encoded_values->get((*encoded_index)++)).value();
2572 608 : uint32_t lsb = Smi::cast(encoded_values->get((*encoded_index)++)).value();
2573 304 : *value = (msb << 16) | (lsb & 0xffff);
2574 304 : }
2575 :
2576 40 : void DecodeI64ExceptionValue(Handle<FixedArray> encoded_values,
2577 : uint32_t* encoded_index, uint64_t* value) {
2578 40 : uint32_t lsb = 0, msb = 0;
2579 40 : DecodeI32ExceptionValue(encoded_values, encoded_index, &msb);
2580 40 : DecodeI32ExceptionValue(encoded_values, encoded_index, &lsb);
2581 40 : *value = (static_cast<uint64_t>(msb) << 32) | static_cast<uint64_t>(lsb);
2582 40 : }
2583 :
2584 : // Unpack the values encoded in the given exception. The exception values are
2585 : // pushed onto the operand stack. Callers must perform a tag check to ensure
2586 : // the encoded values match the expected signature of the exception.
2587 320 : void DoUnpackException(const WasmException* exception,
2588 : Handle<Object> exception_object) {
2589 : Handle<FixedArray> encoded_values = Handle<FixedArray>::cast(
2590 320 : WasmExceptionPackage::GetExceptionValues(isolate_, exception_object));
2591 : // Decode the exception values from the given exception package and push
2592 : // them onto the operand stack. This encoding has to be in sync with other
2593 : // backends so that exceptions can be passed between them.
2594 320 : const WasmExceptionSig* sig = exception->sig;
2595 320 : uint32_t encoded_index = 0;
2596 880 : for (size_t i = 0; i < sig->parameter_count(); ++i) {
2597 : WasmValue value;
2598 280 : switch (sig->GetParam(i)) {
2599 : case kWasmI32: {
2600 176 : uint32_t u32 = 0;
2601 176 : DecodeI32ExceptionValue(encoded_values, &encoded_index, &u32);
2602 352 : value = WasmValue(u32);
2603 : break;
2604 : }
2605 : case kWasmF32: {
2606 16 : uint32_t f32_bits = 0;
2607 16 : DecodeI32ExceptionValue(encoded_values, &encoded_index, &f32_bits);
2608 32 : value = WasmValue(Float32::FromBits(f32_bits));
2609 : break;
2610 : }
2611 : case kWasmI64: {
2612 24 : uint64_t u64 = 0;
2613 24 : DecodeI64ExceptionValue(encoded_values, &encoded_index, &u64);
2614 48 : value = WasmValue(u64);
2615 : break;
2616 : }
2617 : case kWasmF64: {
2618 16 : uint64_t f64_bits = 0;
2619 16 : DecodeI64ExceptionValue(encoded_values, &encoded_index, &f64_bits);
2620 32 : value = WasmValue(Float64::FromBits(f64_bits));
2621 : break;
2622 : }
2623 : case kWasmS128: {
2624 8 : int4 s128 = {0, 0, 0, 0};
2625 : uint32_t* vals = reinterpret_cast<uint32_t*>(s128.val);
2626 8 : DecodeI32ExceptionValue(encoded_values, &encoded_index, &vals[0]);
2627 8 : DecodeI32ExceptionValue(encoded_values, &encoded_index, &vals[1]);
2628 8 : DecodeI32ExceptionValue(encoded_values, &encoded_index, &vals[2]);
2629 8 : DecodeI32ExceptionValue(encoded_values, &encoded_index, &vals[3]);
2630 8 : value = WasmValue(Simd128(s128));
2631 : break;
2632 : }
2633 : case kWasmAnyRef: {
2634 40 : Handle<Object> anyref(encoded_values->get(encoded_index++), isolate_);
2635 40 : value = WasmValue(anyref);
2636 : break;
2637 : }
2638 : default:
2639 0 : UNREACHABLE();
2640 : }
2641 280 : Push(value);
2642 : }
2643 : DCHECK_EQ(WasmExceptionPackage::GetEncodedSize(exception), encoded_index);
2644 320 : }
2645 :
2646 4745635 : void Execute(InterpreterCode* code, pc_t pc, int max) {
2647 : DCHECK_NOT_NULL(code->side_table);
2648 : DCHECK(!frames_.empty());
2649 : // There must be enough space on the stack to hold the arguments, locals,
2650 : // and the value stack.
2651 : DCHECK_LE(code->function->sig->parameter_count() +
2652 : code->locals.type_list.size() +
2653 : code->side_table->max_stack_height_,
2654 : stack_limit_ - stack_.get() - frames_.back().sp);
2655 : // Seal the surrounding {HandleScope} to ensure that all cases within the
2656 : // interpreter switch below which deal with handles open their own scope.
2657 : // This avoids leaking / accumulating handles in the surrounding scope.
2658 : SealHandleScope shs(isolate_);
2659 :
2660 4745635 : Decoder decoder(code->start, code->end);
2661 4745635 : pc_t limit = code->end - code->start;
2662 : bool hit_break = false;
2663 :
2664 : while (true) {
2665 : #define PAUSE_IF_BREAK_FLAG(flag) \
2666 : if (V8_UNLIKELY(break_flags_ & WasmInterpreter::BreakFlag::flag)) { \
2667 : hit_break = true; \
2668 : max = 0; \
2669 : }
2670 :
2671 : DCHECK_GT(limit, pc);
2672 : DCHECK_NOT_NULL(code->start);
2673 :
2674 : // Do first check for a breakpoint, in order to set hit_break correctly.
2675 : const char* skip = " ";
2676 44024707 : int len = 1;
2677 44024707 : byte orig = code->start[pc];
2678 44024707 : WasmOpcode opcode = static_cast<WasmOpcode>(orig);
2679 44024707 : if (WasmOpcodes::IsPrefixOpcode(opcode)) {
2680 2737448 : opcode = static_cast<WasmOpcode>(opcode << 8 | code->start[pc + 1]);
2681 : }
2682 44024707 : if (V8_UNLIKELY(orig == kInternalBreakpoint)) {
2683 6000 : orig = code->orig_start[pc];
2684 6000 : if (WasmOpcodes::IsPrefixOpcode(static_cast<WasmOpcode>(orig))) {
2685 : opcode =
2686 0 : static_cast<WasmOpcode>(orig << 8 | code->orig_start[pc + 1]);
2687 : }
2688 12000 : if (SkipBreakpoint(code, pc)) {
2689 : // skip breakpoint by switching on original code.
2690 : skip = "[skip] ";
2691 : } else {
2692 : TRACE("@%-3zu: [break] %-24s:", pc, WasmOpcodes::OpcodeName(opcode));
2693 : TraceValueStack();
2694 : TRACE("\n");
2695 : hit_break = true;
2696 5433 : break;
2697 : }
2698 : }
2699 :
2700 : // If max is 0, break. If max is positive (a limit is set), decrement it.
2701 44021707 : if (max == 0) break;
2702 44019274 : if (max > 0) --max;
2703 :
2704 : USE(skip);
2705 : TRACE("@%-3zu: %s%-24s:", pc, skip, WasmOpcodes::OpcodeName(opcode));
2706 : TraceValueStack();
2707 : TRACE("\n");
2708 :
2709 : #ifdef DEBUG
2710 : // Compute the stack effect of this opcode, and verify later that the
2711 : // stack was modified accordingly.
2712 : std::pair<uint32_t, uint32_t> stack_effect =
2713 : StackEffect(codemap_->module(), frames_.back().code->function->sig,
2714 : code->orig_start + pc, code->orig_end);
2715 : sp_t expected_new_stack_height =
2716 : StackHeight() - stack_effect.first + stack_effect.second;
2717 : #endif
2718 :
2719 44019274 : switch (orig) {
2720 : case kExprNop:
2721 : break;
2722 : case kExprBlock:
2723 : case kExprLoop:
2724 : case kExprTry: {
2725 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures,
2726 2338096 : &decoder, code->at(pc));
2727 1169048 : len = 1 + imm.length;
2728 : break;
2729 : }
2730 : case kExprIf: {
2731 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures,
2732 334432 : &decoder, code->at(pc));
2733 167216 : WasmValue cond = Pop();
2734 : bool is_true = cond.to<uint32_t>() != 0;
2735 167216 : if (is_true) {
2736 : // fall through to the true block.
2737 19622 : len = 1 + imm.length;
2738 : TRACE(" true => fallthrough\n");
2739 : } else {
2740 295188 : len = LookupTargetDelta(code, pc);
2741 : TRACE(" false => @%zu\n", pc + len);
2742 : }
2743 : break;
2744 : }
2745 : case kExprElse:
2746 : case kExprCatch: {
2747 29968 : len = LookupTargetDelta(code, pc);
2748 : TRACE(" end => @%zu\n", pc + len);
2749 14984 : break;
2750 : }
2751 : case kExprThrow: {
2752 : ExceptionIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2753 604 : code->at(pc));
2754 604 : CommitPc(pc); // Needed for local unwinding.
2755 604 : const WasmException* exception = &module()->exceptions[imm.index];
2756 756 : if (!DoThrowException(exception, imm.index)) return;
2757 452 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2758 452 : continue; // Do not bump pc.
2759 : }
2760 : case kExprRethrow: {
2761 152 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
2762 152 : WasmValue ex = Pop();
2763 152 : CommitPc(pc); // Needed for local unwinding.
2764 152 : if (!DoRethrowException(ex)) return;
2765 32 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2766 : continue; // Do not bump pc.
2767 : }
2768 : case kExprBrOnExn: {
2769 : BranchOnExceptionImmediate<Decoder::kNoValidate> imm(&decoder,
2770 832 : code->at(pc));
2771 416 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
2772 416 : WasmValue ex = Pop();
2773 416 : Handle<Object> exception = ex.to_anyref();
2774 416 : if (MatchingExceptionTag(exception, imm.index.index)) {
2775 640 : imm.index.exception = &module()->exceptions[imm.index.index];
2776 320 : DoUnpackException(imm.index.exception, exception);
2777 320 : len = DoBreak(code, pc, imm.depth.depth);
2778 : TRACE(" match => @%zu\n", pc + len);
2779 : } else {
2780 96 : Push(ex); // Exception remains on stack.
2781 : TRACE(" false => fallthrough\n");
2782 96 : len = 1 + imm.length;
2783 : }
2784 : break;
2785 : }
2786 : case kExprSelect: {
2787 932 : WasmValue cond = Pop();
2788 932 : WasmValue fval = Pop();
2789 932 : WasmValue tval = Pop();
2790 932 : Push(cond.to<int32_t>() != 0 ? tval : fval);
2791 : break;
2792 : }
2793 : case kExprBr: {
2794 : BranchDepthImmediate<Decoder::kNoValidate> imm(&decoder,
2795 18054 : code->at(pc));
2796 18054 : len = DoBreak(code, pc, imm.depth);
2797 : TRACE(" br => @%zu\n", pc + len);
2798 : break;
2799 : }
2800 : case kExprBrIf: {
2801 : BranchDepthImmediate<Decoder::kNoValidate> imm(&decoder,
2802 28736 : code->at(pc));
2803 28736 : WasmValue cond = Pop();
2804 : bool is_true = cond.to<uint32_t>() != 0;
2805 28736 : if (is_true) {
2806 14680 : len = DoBreak(code, pc, imm.depth);
2807 : TRACE(" br_if => @%zu\n", pc + len);
2808 : } else {
2809 : TRACE(" false => fallthrough\n");
2810 14056 : len = 1 + imm.length;
2811 : }
2812 : break;
2813 : }
2814 : case kExprBrTable: {
2815 : BranchTableImmediate<Decoder::kNoValidate> imm(&decoder,
2816 443848 : code->at(pc));
2817 : BranchTableIterator<Decoder::kNoValidate> iterator(&decoder, imm);
2818 221924 : uint32_t key = Pop().to<uint32_t>();
2819 : uint32_t depth = 0;
2820 221924 : if (key >= imm.table_count) key = imm.table_count;
2821 1941548 : for (uint32_t i = 0; i <= key; i++) {
2822 : DCHECK(iterator.has_next());
2823 859812 : depth = iterator.next();
2824 : }
2825 221924 : len = key + DoBreak(code, pc + key, static_cast<size_t>(depth));
2826 : TRACE(" br[%u] => @%zu\n", key, pc + key + len);
2827 : break;
2828 : }
2829 : case kExprReturn: {
2830 222296 : size_t arity = code->function->sig->return_count();
2831 222296 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
2832 12 : PAUSE_IF_BREAK_FLAG(AfterReturn);
2833 : continue; // Do not bump pc.
2834 : }
2835 : case kExprUnreachable: {
2836 76 : return DoTrap(kTrapUnreachable, pc);
2837 : }
2838 : case kExprEnd: {
2839 : break;
2840 : }
2841 : case kExprI32Const: {
2842 7764438 : ImmI32Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2843 7764438 : Push(WasmValue(imm.value));
2844 7764438 : len = 1 + imm.length;
2845 : break;
2846 : }
2847 : case kExprI64Const: {
2848 7048 : ImmI64Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2849 7048 : Push(WasmValue(imm.value));
2850 7048 : len = 1 + imm.length;
2851 : break;
2852 : }
2853 : case kExprF32Const: {
2854 300 : ImmF32Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2855 300 : Push(WasmValue(imm.value));
2856 300 : len = 1 + imm.length;
2857 : break;
2858 : }
2859 : case kExprF64Const: {
2860 1684 : ImmF64Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2861 1684 : Push(WasmValue(imm.value));
2862 1684 : len = 1 + imm.length;
2863 : break;
2864 : }
2865 : case kExprRefNull: {
2866 160 : Push(WasmValue(isolate_->factory()->null_value()));
2867 80 : break;
2868 : }
2869 : case kExprGetLocal: {
2870 12276007 : LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2871 12276007 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
2872 24552014 : Push(GetStackValue(frames_.back().sp + imm.index));
2873 12276007 : len = 1 + imm.length;
2874 : break;
2875 : }
2876 : case kExprSetLocal: {
2877 3877618 : LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2878 3877618 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
2879 3877618 : WasmValue val = Pop();
2880 3877618 : SetStackValue(frames_.back().sp + imm.index, val);
2881 3877618 : len = 1 + imm.length;
2882 : break;
2883 : }
2884 : case kExprTeeLocal: {
2885 2596 : LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2886 2596 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
2887 2596 : WasmValue val = Pop();
2888 2596 : SetStackValue(frames_.back().sp + imm.index, val);
2889 2596 : Push(val);
2890 2596 : len = 1 + imm.length;
2891 : break;
2892 : }
2893 : case kExprDrop: {
2894 : Drop();
2895 : break;
2896 : }
2897 : case kExprCallFunction: {
2898 : CallFunctionImmediate<Decoder::kNoValidate> imm(&decoder,
2899 454557 : code->at(pc));
2900 : InterpreterCode* target = codemap()->GetCode(imm.index);
2901 454557 : if (target->function->imported) {
2902 8173 : CommitPc(pc);
2903 : ExternalCallResult result =
2904 8173 : CallImportedFunction(target->function->func_index);
2905 8173 : switch (result.type) {
2906 : case ExternalCallResult::INTERNAL:
2907 : // The import is a function of this instance. Call it directly.
2908 : DCHECK(!result.interpreter_code->function->imported);
2909 : break;
2910 : case ExternalCallResult::INVALID_FUNC:
2911 : case ExternalCallResult::SIGNATURE_MISMATCH:
2912 : // Direct calls are checked statically.
2913 0 : UNREACHABLE();
2914 : case ExternalCallResult::EXTERNAL_RETURNED:
2915 5983 : PAUSE_IF_BREAK_FLAG(AfterCall);
2916 5983 : len = 1 + imm.length;
2917 5983 : break;
2918 : case ExternalCallResult::EXTERNAL_UNWOUND:
2919 2134 : return;
2920 : case ExternalCallResult::EXTERNAL_CAUGHT:
2921 56 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2922 56 : continue; // Do not bump pc.
2923 : }
2924 5983 : if (result.type != ExternalCallResult::INTERNAL) break;
2925 : }
2926 : // Execute an internal call.
2927 446384 : if (!DoCall(&decoder, target, &pc, &limit)) return;
2928 446368 : code = target;
2929 446368 : PAUSE_IF_BREAK_FLAG(AfterCall);
2930 : continue; // Do not bump pc.
2931 : } break;
2932 :
2933 : case kExprCallIndirect: {
2934 : CallIndirectImmediate<Decoder::kNoValidate> imm(
2935 1056 : kAllWasmFeatures, &decoder, code->at(pc));
2936 528 : uint32_t entry_index = Pop().to<uint32_t>();
2937 : // Assume only one table for now.
2938 : DCHECK_LE(module()->tables.size(), 1u);
2939 528 : CommitPc(pc); // TODO(wasm): Be more disciplined about committing PC.
2940 : ExternalCallResult result =
2941 528 : CallIndirectFunction(0, entry_index, imm.sig_index);
2942 528 : switch (result.type) {
2943 : case ExternalCallResult::INTERNAL:
2944 : // The import is a function of this instance. Call it directly.
2945 316 : if (!DoCall(&decoder, result.interpreter_code, &pc, &limit))
2946 180 : return;
2947 316 : code = result.interpreter_code;
2948 316 : PAUSE_IF_BREAK_FLAG(AfterCall);
2949 316 : continue; // Do not bump pc.
2950 : case ExternalCallResult::INVALID_FUNC:
2951 44 : return DoTrap(kTrapFuncInvalid, pc);
2952 : case ExternalCallResult::SIGNATURE_MISMATCH:
2953 120 : return DoTrap(kTrapFuncSigMismatch, pc);
2954 : case ExternalCallResult::EXTERNAL_RETURNED:
2955 32 : PAUSE_IF_BREAK_FLAG(AfterCall);
2956 32 : len = 1 + imm.length;
2957 32 : break;
2958 : case ExternalCallResult::EXTERNAL_UNWOUND:
2959 : return;
2960 : case ExternalCallResult::EXTERNAL_CAUGHT:
2961 0 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2962 0 : continue; // Do not bump pc.
2963 : }
2964 32 : } break;
2965 :
2966 : case kExprReturnCall: {
2967 : CallFunctionImmediate<Decoder::kNoValidate> imm(&decoder,
2968 104820 : code->at(pc));
2969 : InterpreterCode* target = codemap()->GetCode(imm.index);
2970 :
2971 104820 : if (!target->function->imported) {
2972 : // Enter internal found function.
2973 104820 : if (!DoReturnCall(&decoder, target, &pc, &limit)) return;
2974 104812 : code = target;
2975 104812 : PAUSE_IF_BREAK_FLAG(AfterCall);
2976 :
2977 104812 : continue; // Do not bump pc.
2978 : }
2979 : // Function is imported.
2980 8 : CommitPc(pc);
2981 : ExternalCallResult result =
2982 8 : CallImportedFunction(target->function->func_index);
2983 8 : switch (result.type) {
2984 : case ExternalCallResult::INTERNAL:
2985 : // Cannot import internal functions.
2986 : case ExternalCallResult::INVALID_FUNC:
2987 : case ExternalCallResult::SIGNATURE_MISMATCH:
2988 : // Direct calls are checked statically.
2989 0 : UNREACHABLE();
2990 : case ExternalCallResult::EXTERNAL_RETURNED:
2991 8 : len = 1 + imm.length;
2992 8 : break;
2993 : case ExternalCallResult::EXTERNAL_UNWOUND:
2994 : return;
2995 : case ExternalCallResult::EXTERNAL_CAUGHT:
2996 0 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2997 0 : continue;
2998 : }
2999 8 : size_t arity = code->function->sig->return_count();
3000 8 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
3001 0 : PAUSE_IF_BREAK_FLAG(AfterReturn);
3002 : continue;
3003 : } break;
3004 :
3005 : case kExprReturnCallIndirect: {
3006 : CallIndirectImmediate<Decoder::kNoValidate> imm(
3007 80424 : kAllWasmFeatures, &decoder, code->at(pc));
3008 40212 : uint32_t entry_index = Pop().to<uint32_t>();
3009 : // Assume only one table for now.
3010 : DCHECK_LE(module()->tables.size(), 1u);
3011 40212 : CommitPc(pc); // TODO(wasm): Be more disciplined about committing PC.
3012 :
3013 : // TODO(wasm): Calling functions needs some refactoring to avoid
3014 : // multi-exit code like this.
3015 : ExternalCallResult result =
3016 40212 : CallIndirectFunction(0, entry_index, imm.sig_index);
3017 40212 : switch (result.type) {
3018 : case ExternalCallResult::INTERNAL: {
3019 : InterpreterCode* target = result.interpreter_code;
3020 :
3021 : DCHECK(!target->function->imported);
3022 :
3023 : // The function belongs to this instance. Enter it directly.
3024 40212 : if (!DoReturnCall(&decoder, target, &pc, &limit)) return;
3025 40204 : code = result.interpreter_code;
3026 40204 : PAUSE_IF_BREAK_FLAG(AfterCall);
3027 40204 : continue; // Do not bump pc.
3028 : }
3029 : case ExternalCallResult::INVALID_FUNC:
3030 0 : return DoTrap(kTrapFuncInvalid, pc);
3031 : case ExternalCallResult::SIGNATURE_MISMATCH:
3032 0 : return DoTrap(kTrapFuncSigMismatch, pc);
3033 : case ExternalCallResult::EXTERNAL_RETURNED: {
3034 8 : len = 1 + imm.length;
3035 :
3036 8 : size_t arity = code->function->sig->return_count();
3037 8 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
3038 0 : PAUSE_IF_BREAK_FLAG(AfterCall);
3039 : break;
3040 : }
3041 : case ExternalCallResult::EXTERNAL_UNWOUND:
3042 : return;
3043 :
3044 : case ExternalCallResult::EXTERNAL_CAUGHT:
3045 0 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
3046 0 : break;
3047 : }
3048 0 : } break;
3049 :
3050 : case kExprGetGlobal: {
3051 : GlobalIndexImmediate<Decoder::kNoValidate> imm(&decoder,
3052 15936 : code->at(pc));
3053 15936 : const WasmGlobal* global = &module()->globals[imm.index];
3054 15936 : switch (global->type) {
3055 : #define CASE_TYPE(wasm, ctype) \
3056 : case kWasm##wasm: { \
3057 : byte* ptr = GetGlobalPtr(global); \
3058 : Push(WasmValue( \
3059 : ReadLittleEndianValue<ctype>(reinterpret_cast<Address>(ptr)))); \
3060 : break; \
3061 : }
3062 15552 : WASM_CTYPES(CASE_TYPE)
3063 : #undef CASE_TYPE
3064 : case kWasmAnyRef:
3065 : case kWasmAnyFunc:
3066 : case kWasmExceptRef: {
3067 384 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
3068 : Handle<FixedArray> global_buffer; // The buffer of the global.
3069 384 : uint32_t global_index = 0; // The index into the buffer.
3070 384 : GetGlobalBufferAndIndex(global, &global_buffer, &global_index);
3071 384 : Handle<Object> value(global_buffer->get(global_index), isolate_);
3072 384 : Push(WasmValue(value));
3073 : break;
3074 : }
3075 : default:
3076 0 : UNREACHABLE();
3077 : }
3078 15936 : len = 1 + imm.length;
3079 : break;
3080 : }
3081 : case kExprSetGlobal: {
3082 : GlobalIndexImmediate<Decoder::kNoValidate> imm(&decoder,
3083 792952 : code->at(pc));
3084 792952 : const WasmGlobal* global = &module()->globals[imm.index];
3085 792952 : switch (global->type) {
3086 : #define CASE_TYPE(wasm, ctype) \
3087 : case kWasm##wasm: { \
3088 : byte* ptr = GetGlobalPtr(global); \
3089 : WriteLittleEndianValue<ctype>(reinterpret_cast<Address>(ptr), \
3090 : Pop().to<ctype>()); \
3091 : break; \
3092 : }
3093 1585488 : WASM_CTYPES(CASE_TYPE)
3094 : #undef CASE_TYPE
3095 : case kWasmAnyRef:
3096 : case kWasmAnyFunc:
3097 : case kWasmExceptRef: {
3098 208 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
3099 : Handle<FixedArray> global_buffer; // The buffer of the global.
3100 208 : uint32_t global_index = 0; // The index into the buffer.
3101 208 : GetGlobalBufferAndIndex(global, &global_buffer, &global_index);
3102 624 : global_buffer->set(global_index, *Pop().to_anyref());
3103 : break;
3104 : }
3105 : default:
3106 0 : UNREACHABLE();
3107 : }
3108 792952 : len = 1 + imm.length;
3109 : break;
3110 : }
3111 :
3112 : #define LOAD_CASE(name, ctype, mtype, rep) \
3113 : case kExpr##name: { \
3114 : if (!ExecuteLoad<ctype, mtype>(&decoder, code, pc, len, \
3115 : MachineRepresentation::rep)) \
3116 : return; \
3117 : break; \
3118 : }
3119 :
3120 262372 : LOAD_CASE(I32LoadMem8S, int32_t, int8_t, kWord8);
3121 262380 : LOAD_CASE(I32LoadMem8U, int32_t, uint8_t, kWord8);
3122 131300 : LOAD_CASE(I32LoadMem16S, int32_t, int16_t, kWord16);
3123 131300 : LOAD_CASE(I32LoadMem16U, int32_t, uint16_t, kWord16);
3124 96 : LOAD_CASE(I64LoadMem8S, int64_t, int8_t, kWord8);
3125 0 : LOAD_CASE(I64LoadMem8U, int64_t, uint8_t, kWord16);
3126 96 : LOAD_CASE(I64LoadMem16S, int64_t, int16_t, kWord16);
3127 0 : LOAD_CASE(I64LoadMem16U, int64_t, uint16_t, kWord16);
3128 96 : LOAD_CASE(I64LoadMem32S, int64_t, int32_t, kWord32);
3129 0 : LOAD_CASE(I64LoadMem32U, int64_t, uint32_t, kWord32);
3130 803250 : LOAD_CASE(I32LoadMem, int32_t, int32_t, kWord32);
3131 1542892 : LOAD_CASE(I64LoadMem, int64_t, int64_t, kWord64);
3132 9452 : LOAD_CASE(F32LoadMem, Float32, uint32_t, kFloat32);
3133 29940 : LOAD_CASE(F64LoadMem, Float64, uint64_t, kFloat64);
3134 : #undef LOAD_CASE
3135 :
3136 : #define STORE_CASE(name, ctype, mtype, rep) \
3137 : case kExpr##name: { \
3138 : if (!ExecuteStore<ctype, mtype>(&decoder, code, pc, len, \
3139 : MachineRepresentation::rep)) \
3140 : return; \
3141 : break; \
3142 : }
3143 :
3144 420 : STORE_CASE(I32StoreMem8, int32_t, int8_t, kWord8);
3145 396 : STORE_CASE(I32StoreMem16, int32_t, int16_t, kWord16);
3146 96 : STORE_CASE(I64StoreMem8, int64_t, int8_t, kWord8);
3147 92 : STORE_CASE(I64StoreMem16, int64_t, int16_t, kWord16);
3148 84 : STORE_CASE(I64StoreMem32, int64_t, int32_t, kWord32);
3149 852096 : STORE_CASE(I32StoreMem, int32_t, int32_t, kWord32);
3150 1708428 : STORE_CASE(I64StoreMem, int64_t, int64_t, kWord64);
3151 1200 : STORE_CASE(F32StoreMem, Float32, uint32_t, kFloat32);
3152 10924 : STORE_CASE(F64StoreMem, Float64, uint64_t, kFloat64);
3153 : #undef STORE_CASE
3154 :
3155 : #define ASMJS_LOAD_CASE(name, ctype, mtype, defval) \
3156 : case kExpr##name: { \
3157 : uint32_t index = Pop().to<uint32_t>(); \
3158 : ctype result; \
3159 : Address addr = BoundsCheckMem<mtype>(0, index); \
3160 : if (!addr) { \
3161 : result = defval; \
3162 : } else { \
3163 : /* TODO(titzer): alignment for asmjs load mem? */ \
3164 : result = static_cast<ctype>(*reinterpret_cast<mtype*>(addr)); \
3165 : } \
3166 : Push(WasmValue(result)); \
3167 : break; \
3168 : }
3169 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem8S, int32_t, int8_t, 0);
3170 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem8U, int32_t, uint8_t, 0);
3171 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem16S, int32_t, int16_t, 0);
3172 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem16U, int32_t, uint16_t, 0);
3173 200 : ASMJS_LOAD_CASE(I32AsmjsLoadMem, int32_t, int32_t, 0);
3174 200 : ASMJS_LOAD_CASE(F32AsmjsLoadMem, float, float,
3175 : std::numeric_limits<float>::quiet_NaN());
3176 272 : ASMJS_LOAD_CASE(F64AsmjsLoadMem, double, double,
3177 : std::numeric_limits<double>::quiet_NaN());
3178 : #undef ASMJS_LOAD_CASE
3179 :
3180 : #define ASMJS_STORE_CASE(name, ctype, mtype) \
3181 : case kExpr##name: { \
3182 : WasmValue val = Pop(); \
3183 : uint32_t index = Pop().to<uint32_t>(); \
3184 : Address addr = BoundsCheckMem<mtype>(0, index); \
3185 : if (addr) { \
3186 : *(reinterpret_cast<mtype*>(addr)) = static_cast<mtype>(val.to<ctype>()); \
3187 : } \
3188 : Push(val); \
3189 : break; \
3190 : }
3191 :
3192 0 : ASMJS_STORE_CASE(I32AsmjsStoreMem8, int32_t, int8_t);
3193 0 : ASMJS_STORE_CASE(I32AsmjsStoreMem16, int32_t, int16_t);
3194 932 : ASMJS_STORE_CASE(I32AsmjsStoreMem, int32_t, int32_t);
3195 0 : ASMJS_STORE_CASE(F32AsmjsStoreMem, float, float);
3196 0 : ASMJS_STORE_CASE(F64AsmjsStoreMem, double, double);
3197 : #undef ASMJS_STORE_CASE
3198 : case kExprMemoryGrow: {
3199 : MemoryIndexImmediate<Decoder::kNoValidate> imm(&decoder,
3200 48 : code->at(pc));
3201 48 : uint32_t delta_pages = Pop().to<uint32_t>();
3202 48 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
3203 : Handle<WasmMemoryObject> memory(instance_object_->memory_object(),
3204 48 : isolate_);
3205 : int32_t result =
3206 48 : WasmMemoryObject::Grow(isolate_, memory, delta_pages);
3207 48 : Push(WasmValue(result));
3208 48 : len = 1 + imm.length;
3209 : // Treat one grow_memory instruction like 1000 other instructions,
3210 : // because it is a really expensive operation.
3211 48 : if (max > 0) max = std::max(0, max - 1000);
3212 : break;
3213 : }
3214 : case kExprMemorySize: {
3215 : MemoryIndexImmediate<Decoder::kNoValidate> imm(&decoder,
3216 0 : code->at(pc));
3217 0 : Push(WasmValue(static_cast<uint32_t>(instance_object_->memory_size() /
3218 0 : kWasmPageSize)));
3219 0 : len = 1 + imm.length;
3220 : break;
3221 : }
3222 : // We need to treat kExprI32ReinterpretF32 and kExprI64ReinterpretF64
3223 : // specially to guarantee that the quiet bit of a NaN is preserved on
3224 : // ia32 by the reinterpret casts.
3225 : case kExprI32ReinterpretF32: {
3226 256 : WasmValue val = Pop();
3227 256 : Push(WasmValue(ExecuteI32ReinterpretF32(val)));
3228 : break;
3229 : }
3230 : case kExprI64ReinterpretF64: {
3231 244 : WasmValue val = Pop();
3232 244 : Push(WasmValue(ExecuteI64ReinterpretF64(val)));
3233 : break;
3234 : }
3235 : #define SIGN_EXTENSION_CASE(name, wtype, ntype) \
3236 : case kExpr##name: { \
3237 : ntype val = static_cast<ntype>(Pop().to<wtype>()); \
3238 : Push(WasmValue(static_cast<wtype>(val))); \
3239 : break; \
3240 : }
3241 40 : SIGN_EXTENSION_CASE(I32SExtendI8, int32_t, int8_t);
3242 40 : SIGN_EXTENSION_CASE(I32SExtendI16, int32_t, int16_t);
3243 40 : SIGN_EXTENSION_CASE(I64SExtendI8, int64_t, int8_t);
3244 40 : SIGN_EXTENSION_CASE(I64SExtendI16, int64_t, int16_t);
3245 40 : SIGN_EXTENSION_CASE(I64SExtendI32, int64_t, int32_t);
3246 : #undef SIGN_EXTENSION_CASE
3247 : case kExprRefIsNull: {
3248 84 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
3249 168 : uint32_t result = Pop().to_anyref()->IsNull() ? 1 : 0;
3250 84 : Push(WasmValue(result));
3251 : break;
3252 : }
3253 : case kNumericPrefix: {
3254 3724 : ++len;
3255 3724 : if (!ExecuteNumericOp(opcode, &decoder, code, pc, len)) return;
3256 : break;
3257 : }
3258 : case kAtomicPrefix: {
3259 384116 : if (!ExecuteAtomicOp(opcode, &decoder, code, pc, len)) return;
3260 : break;
3261 : }
3262 : case kSimdPrefix: {
3263 2349608 : ++len;
3264 2349608 : if (!ExecuteSimdOp(opcode, &decoder, code, pc, len)) return;
3265 : break;
3266 : }
3267 :
3268 : #define EXECUTE_SIMPLE_BINOP(name, ctype, op) \
3269 : case kExpr##name: { \
3270 : WasmValue rval = Pop(); \
3271 : WasmValue lval = Pop(); \
3272 : auto result = lval.to<ctype>() op rval.to<ctype>(); \
3273 : possible_nondeterminism_ |= has_nondeterminism(result); \
3274 : Push(WasmValue(result)); \
3275 : break; \
3276 : }
3277 2349340 : FOREACH_SIMPLE_BINOP(EXECUTE_SIMPLE_BINOP)
3278 : #undef EXECUTE_SIMPLE_BINOP
3279 :
3280 : #define EXECUTE_OTHER_BINOP(name, ctype) \
3281 : case kExpr##name: { \
3282 : TrapReason trap = kTrapCount; \
3283 : ctype rval = Pop().to<ctype>(); \
3284 : ctype lval = Pop().to<ctype>(); \
3285 : auto result = Execute##name(lval, rval, &trap); \
3286 : possible_nondeterminism_ |= has_nondeterminism(result); \
3287 : if (trap != kTrapCount) return DoTrap(trap, pc); \
3288 : Push(WasmValue(result)); \
3289 : break; \
3290 : }
3291 1559760 : FOREACH_OTHER_BINOP(EXECUTE_OTHER_BINOP)
3292 : #undef EXECUTE_OTHER_BINOP
3293 :
3294 : #define EXECUTE_UNOP(name, ctype, exec_fn) \
3295 : case kExpr##name: { \
3296 : TrapReason trap = kTrapCount; \
3297 : ctype val = Pop().to<ctype>(); \
3298 : auto result = exec_fn(val, &trap); \
3299 : possible_nondeterminism_ |= has_nondeterminism(result); \
3300 : if (trap != kTrapCount) return DoTrap(trap, pc); \
3301 : Push(WasmValue(result)); \
3302 : break; \
3303 : }
3304 :
3305 : #define EXECUTE_OTHER_UNOP(name, ctype) EXECUTE_UNOP(name, ctype, Execute##name)
3306 209584 : FOREACH_OTHER_UNOP(EXECUTE_OTHER_UNOP)
3307 : #undef EXECUTE_OTHER_UNOP
3308 :
3309 : #define EXECUTE_I32CONV_FLOATOP(name, out_type, in_type) \
3310 : EXECUTE_UNOP(name, in_type, ExecuteConvert<out_type>)
3311 2852 : FOREACH_I32CONV_FLOATOP(EXECUTE_I32CONV_FLOATOP)
3312 : #undef EXECUTE_I32CONV_FLOATOP
3313 : #undef EXECUTE_UNOP
3314 :
3315 : default:
3316 0 : FATAL("Unknown or unimplemented opcode #%d:%s", code->start[pc],
3317 0 : OpcodeName(code->start[pc]));
3318 : UNREACHABLE();
3319 : }
3320 :
3321 : #ifdef DEBUG
3322 : if (!WasmOpcodes::IsControlOpcode(opcode)) {
3323 : DCHECK_EQ(expected_new_stack_height, StackHeight());
3324 : }
3325 : #endif
3326 :
3327 43165040 : pc += len;
3328 43165040 : if (pc == limit) {
3329 : // Fell off end of code; do an implicit return.
3330 : TRACE("@%-3zu: ImplicitReturn\n", pc);
3331 4588908 : size_t arity = code->function->sig->return_count();
3332 : DCHECK_EQ(StackHeight() - arity, frames_.back().llimit());
3333 4588908 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
3334 110688 : PAUSE_IF_BREAK_FLAG(AfterReturn);
3335 : }
3336 : #undef PAUSE_IF_BREAK_FLAG
3337 : }
3338 :
3339 5433 : state_ = WasmInterpreter::PAUSED;
3340 8473 : break_pc_ = hit_break ? pc : kInvalidPc;
3341 5433 : CommitPc(pc);
3342 : }
3343 :
3344 23994864 : WasmValue Pop() {
3345 : DCHECK_GT(frames_.size(), 0);
3346 : DCHECK_GT(StackHeight(), frames_.back().llimit()); // can't pop into locals
3347 23994864 : StackValue stack_value = *--sp_;
3348 : // Note that {StackHeight} depends on the current {sp} value, hence this
3349 : // operation is split into two statements to ensure proper evaluation order.
3350 23994864 : return stack_value.ExtractValue(this, StackHeight());
3351 : }
3352 :
3353 : void Drop(int n = 1) {
3354 : DCHECK_GE(StackHeight(), n);
3355 : DCHECK_GT(frames_.size(), 0);
3356 : // Check that we don't pop into locals.
3357 : DCHECK_GE(StackHeight() - n, frames_.back().llimit());
3358 15140 : sp_ -= n;
3359 : }
3360 :
3361 : WasmValue PopArity(size_t arity) {
3362 : if (arity == 0) return WasmValue();
3363 : CHECK_EQ(1, arity);
3364 : return Pop();
3365 : }
3366 :
3367 40841475 : void Push(WasmValue val) {
3368 : DCHECK_NE(kWasmStmt, val.type());
3369 : DCHECK_LE(1, stack_limit_ - sp_);
3370 40841475 : StackValue stack_value(val, this, StackHeight());
3371 : // Note that {StackHeight} depends on the current {sp} value, hence this
3372 : // operation is split into two statements to ensure proper evaluation order.
3373 40841475 : *sp_++ = stack_value;
3374 40841475 : }
3375 :
3376 : void Push(WasmValue* vals, size_t arity) {
3377 : DCHECK_LE(arity, stack_limit_ - sp_);
3378 11576739 : for (WasmValue *val = vals, *end = vals + arity; val != end; ++val) {
3379 : DCHECK_NE(kWasmStmt, val->type());
3380 6836552 : Push(*val);
3381 : }
3382 : }
3383 :
3384 10072090 : void EnsureStackSpace(size_t size) {
3385 19776727 : if (V8_LIKELY(static_cast<size_t>(stack_limit_ - sp_) >= size)) return;
3386 367453 : size_t old_size = stack_limit_ - stack_.get();
3387 : size_t requested_size =
3388 367453 : base::bits::RoundUpToPowerOfTwo64((sp_ - stack_.get()) + size);
3389 367453 : size_t new_size = Max(size_t{8}, Max(2 * old_size, requested_size));
3390 4962565 : std::unique_ptr<StackValue[]> new_stack(new StackValue[new_size]);
3391 367453 : if (old_size > 0) {
3392 1164 : memcpy(new_stack.get(), stack_.get(), old_size * sizeof(*sp_));
3393 : }
3394 734906 : sp_ = new_stack.get() + (sp_ - stack_.get());
3395 : stack_ = std::move(new_stack);
3396 367453 : stack_limit_ = stack_.get() + new_size;
3397 : // Also resize the reference stack to the same size.
3398 367453 : int grow_by = static_cast<int>(new_size - old_size);
3399 367453 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
3400 367453 : Handle<FixedArray> old_ref_stack(reference_stack(), isolate_);
3401 : Handle<FixedArray> new_ref_stack =
3402 367453 : isolate_->factory()->CopyFixedArrayAndGrow(old_ref_stack, grow_by);
3403 734906 : reference_stack_cell_->set_value(*new_ref_stack);
3404 : }
3405 :
3406 149634660 : sp_t StackHeight() { return sp_ - stack_.get(); }
3407 :
3408 : void TraceValueStack() {
3409 : #ifdef DEBUG
3410 : if (!FLAG_trace_wasm_interpreter) return;
3411 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
3412 : Frame* top = frames_.size() > 0 ? &frames_.back() : nullptr;
3413 : sp_t sp = top ? top->sp : 0;
3414 : sp_t plimit = top ? top->plimit() : 0;
3415 : sp_t llimit = top ? top->llimit() : 0;
3416 : for (size_t i = sp; i < StackHeight(); ++i) {
3417 : if (i < plimit)
3418 : PrintF(" p%zu:", i);
3419 : else if (i < llimit)
3420 : PrintF(" l%zu:", i);
3421 : else
3422 : PrintF(" s%zu:", i);
3423 : WasmValue val = GetStackValue(i);
3424 : switch (val.type()) {
3425 : case kWasmI32:
3426 : PrintF("i32:%d", val.to<int32_t>());
3427 : break;
3428 : case kWasmI64:
3429 : PrintF("i64:%" PRId64 "", val.to<int64_t>());
3430 : break;
3431 : case kWasmF32:
3432 : PrintF("f32:%f", val.to<float>());
3433 : break;
3434 : case kWasmF64:
3435 : PrintF("f64:%lf", val.to<double>());
3436 : break;
3437 : case kWasmS128: {
3438 : // This defaults to tracing all S128 values as i32x4 values for now,
3439 : // when there is more state to know what type of values are on the
3440 : // stack, the right format should be printed here.
3441 : int4 s = val.to_s128().to_i32x4();
3442 : PrintF("i32x4:%d,%d,%d,%d", s.val[0], s.val[1], s.val[2], s.val[3]);
3443 : break;
3444 : }
3445 : case kWasmAnyRef: {
3446 : Handle<Object> ref = val.to_anyref();
3447 : if (ref->IsNull()) {
3448 : PrintF("ref:null");
3449 : } else {
3450 : PrintF("ref:0x%" V8PRIxPTR, ref->ptr());
3451 : }
3452 : break;
3453 : }
3454 : case kWasmStmt:
3455 : PrintF("void");
3456 : break;
3457 : default:
3458 : UNREACHABLE();
3459 : break;
3460 : }
3461 : }
3462 : #endif // DEBUG
3463 : }
3464 :
3465 : ExternalCallResult TryHandleException(Isolate* isolate) {
3466 : DCHECK(isolate->has_pending_exception()); // Assume exceptional return.
3467 2206 : if (HandleException(isolate) == WasmInterpreter::Thread::UNWOUND) {
3468 : return {ExternalCallResult::EXTERNAL_UNWOUND};
3469 : }
3470 : return {ExternalCallResult::EXTERNAL_CAUGHT};
3471 : }
3472 :
3473 8237 : ExternalCallResult CallExternalWasmFunction(Isolate* isolate,
3474 : Handle<Object> object_ref,
3475 : const WasmCode* code,
3476 : FunctionSig* sig) {
3477 8237 : int num_args = static_cast<int>(sig->parameter_count());
3478 8237 : WasmFeatures enabled_features = WasmFeaturesFromIsolate(isolate);
3479 :
3480 16370 : if (code->kind() == WasmCode::kWasmToJsWrapper &&
3481 8133 : !IsJSCompatibleSignature(sig, enabled_features.bigint)) {
3482 20 : sp_ -= num_args; // Pop arguments before throwing.
3483 40 : isolate->Throw(*isolate->factory()->NewTypeError(
3484 40 : MessageTemplate::kWasmTrapTypeError));
3485 : return TryHandleException(isolate);
3486 : }
3487 :
3488 : Handle<WasmDebugInfo> debug_info(instance_object_->debug_info(), isolate);
3489 : Handle<JSFunction> wasm_entry =
3490 8217 : WasmDebugInfo::GetCWasmEntry(debug_info, sig);
3491 :
3492 : TRACE(" => Calling external wasm function\n");
3493 :
3494 : // Copy the arguments to one buffer.
3495 : // TODO(clemensh): Introduce a helper for all argument buffer
3496 : // con-/destruction.
3497 8217 : std::vector<uint8_t> arg_buffer(num_args * 8);
3498 : size_t offset = 0;
3499 8217 : sp_t base_index = StackHeight() - num_args;
3500 30833 : for (int i = 0; i < num_args; ++i) {
3501 22616 : int param_size = ValueTypes::ElementSizeInBytes(sig->GetParam(i));
3502 11308 : if (arg_buffer.size() < offset + param_size) {
3503 0 : arg_buffer.resize(std::max(2 * arg_buffer.size(), offset + param_size));
3504 : }
3505 11308 : Address address = reinterpret_cast<Address>(arg_buffer.data()) + offset;
3506 11308 : WasmValue arg = GetStackValue(base_index + i);
3507 11308 : switch (sig->GetParam(i)) {
3508 : case kWasmI32:
3509 : WriteUnalignedValue(address, arg.to<uint32_t>());
3510 : break;
3511 : case kWasmI64:
3512 : WriteUnalignedValue(address, arg.to<uint64_t>());
3513 : break;
3514 : case kWasmF32:
3515 : WriteUnalignedValue(address, arg.to<float>());
3516 : break;
3517 : case kWasmF64:
3518 : WriteUnalignedValue(address, arg.to<double>());
3519 : break;
3520 : case kWasmAnyRef:
3521 : case kWasmAnyFunc:
3522 : case kWasmExceptRef:
3523 : DCHECK_EQ(kSystemPointerSize, param_size);
3524 : WriteUnalignedValue<Object>(address, *arg.to_anyref());
3525 168 : break;
3526 : default:
3527 0 : UNIMPLEMENTED();
3528 : }
3529 : offset += param_size;
3530 : }
3531 :
3532 : // Ensure that there is enough space in the arg_buffer to hold the return
3533 : // value(s).
3534 : size_t return_size = 0;
3535 19535 : for (ValueType t : sig->returns()) {
3536 5659 : return_size += ValueTypes::ElementSizeInBytes(t);
3537 : }
3538 8217 : if (arg_buffer.size() < return_size) {
3539 64 : arg_buffer.resize(return_size);
3540 : }
3541 :
3542 : // Wrap the arg_buffer and the code target data pointers in handles. As
3543 : // these are aligned pointers, to the GC it will look like Smis.
3544 : Handle<Object> arg_buffer_obj(
3545 8217 : Object(reinterpret_cast<Address>(arg_buffer.data())), isolate);
3546 : DCHECK(!arg_buffer_obj->IsHeapObject());
3547 : Handle<Object> code_entry_obj(Object(code->instruction_start()), isolate);
3548 : DCHECK(!code_entry_obj->IsHeapObject());
3549 :
3550 : static_assert(compiler::CWasmEntryParameters::kNumParameters == 3,
3551 : "code below needs adaption");
3552 57519 : Handle<Object> args[compiler::CWasmEntryParameters::kNumParameters];
3553 8217 : args[compiler::CWasmEntryParameters::kCodeEntry] = code_entry_obj;
3554 8217 : args[compiler::CWasmEntryParameters::kObjectRef] = object_ref;
3555 8217 : args[compiler::CWasmEntryParameters::kArgumentsBuffer] = arg_buffer_obj;
3556 :
3557 : Handle<Object> receiver = isolate->factory()->undefined_value();
3558 : trap_handler::SetThreadInWasm();
3559 : MaybeHandle<Object> maybe_retval =
3560 8217 : Execution::Call(isolate, wasm_entry, receiver, arraysize(args), args);
3561 : TRACE(" => External wasm function returned%s\n",
3562 : maybe_retval.is_null() ? " with exception" : "");
3563 :
3564 : // Pop arguments off the stack.
3565 8217 : sp_ -= num_args;
3566 :
3567 8217 : if (maybe_retval.is_null()) {
3568 : // JSEntry may throw a stack overflow before we actually get to wasm code
3569 : // or back to the interpreter, meaning the thread-in-wasm flag won't be
3570 : // cleared.
3571 2186 : if (trap_handler::IsThreadInWasm()) {
3572 : trap_handler::ClearThreadInWasm();
3573 : }
3574 : return TryHandleException(isolate);
3575 : }
3576 :
3577 : trap_handler::ClearThreadInWasm();
3578 :
3579 : // Push return values.
3580 6031 : if (sig->return_count() > 0) {
3581 : // TODO(wasm): Handle multiple returns.
3582 : DCHECK_EQ(1, sig->return_count());
3583 : Address address = reinterpret_cast<Address>(arg_buffer.data());
3584 5583 : switch (sig->GetReturn()) {
3585 : case kWasmI32:
3586 4919 : Push(WasmValue(ReadUnalignedValue<uint32_t>(address)));
3587 4919 : break;
3588 : case kWasmI64:
3589 8 : Push(WasmValue(ReadUnalignedValue<uint64_t>(address)));
3590 8 : break;
3591 : case kWasmF32:
3592 8 : Push(WasmValue(ReadUnalignedValue<float>(address)));
3593 8 : break;
3594 : case kWasmF64:
3595 648 : Push(WasmValue(ReadUnalignedValue<double>(address)));
3596 648 : break;
3597 : case kWasmAnyRef:
3598 : case kWasmAnyFunc:
3599 : case kWasmExceptRef: {
3600 : Handle<Object> ref(ReadUnalignedValue<Object>(address), isolate);
3601 0 : Push(WasmValue(ref));
3602 : break;
3603 : }
3604 : default:
3605 0 : UNIMPLEMENTED();
3606 : }
3607 : }
3608 6031 : return {ExternalCallResult::EXTERNAL_RETURNED};
3609 : }
3610 :
3611 48757 : static WasmCode* GetTargetCode(WasmCodeManager* code_manager,
3612 : Address target) {
3613 48757 : NativeModule* native_module = code_manager->LookupNativeModule(target);
3614 48757 : if (native_module->is_jump_table_slot(target)) {
3615 : uint32_t func_index =
3616 40624 : native_module->GetFunctionIndexFromJumpTableSlot(target);
3617 40624 : return native_module->GetCode(func_index);
3618 : }
3619 8133 : WasmCode* code = native_module->Lookup(target);
3620 : DCHECK_EQ(code->instruction_start(), target);
3621 8133 : return code;
3622 : }
3623 :
3624 8181 : ExternalCallResult CallImportedFunction(uint32_t function_index) {
3625 : DCHECK_GT(module()->num_imported_functions, function_index);
3626 8181 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
3627 :
3628 8181 : ImportedFunctionEntry entry(instance_object_, function_index);
3629 8181 : Handle<Object> object_ref(entry.object_ref(), isolate_);
3630 : WasmCode* code =
3631 16362 : GetTargetCode(isolate_->wasm_engine()->code_manager(), entry.target());
3632 16362 : FunctionSig* sig = module()->functions[function_index].sig;
3633 16362 : return CallExternalWasmFunction(isolate_, object_ref, code, sig);
3634 : }
3635 :
3636 40740 : ExternalCallResult CallIndirectFunction(uint32_t table_index,
3637 : uint32_t entry_index,
3638 : uint32_t sig_index) {
3639 81480 : uint32_t expected_sig_id = module()->signature_ids[sig_index];
3640 : DCHECK_EQ(expected_sig_id,
3641 : module()->signature_map.Find(*module()->signatures[sig_index]));
3642 :
3643 : // The function table is stored in the instance.
3644 : // TODO(wasm): the wasm interpreter currently supports only one table.
3645 40740 : CHECK_EQ(0, table_index);
3646 : // Bounds check against table size.
3647 40740 : if (entry_index >= instance_object_->indirect_function_table_size()) {
3648 44 : return {ExternalCallResult::INVALID_FUNC};
3649 : }
3650 :
3651 40696 : IndirectFunctionTableEntry entry(instance_object_, entry_index);
3652 : // Signature check.
3653 40696 : if (entry.sig_id() != static_cast<int32_t>(expected_sig_id)) {
3654 120 : return {ExternalCallResult::SIGNATURE_MISMATCH};
3655 : }
3656 :
3657 40576 : HandleScope handle_scope(isolate_); // Avoid leaking handles.
3658 40576 : FunctionSig* signature = module()->signatures[sig_index];
3659 40576 : Handle<Object> object_ref = handle(entry.object_ref(), isolate_);
3660 : WasmCode* code =
3661 81152 : GetTargetCode(isolate_->wasm_engine()->code_manager(), entry.target());
3662 :
3663 81120 : if (!object_ref->IsWasmInstanceObject() || /* call to an import */
3664 : !instance_object_.is_identical_to(object_ref) /* cross-instance */) {
3665 56 : return CallExternalWasmFunction(isolate_, object_ref, code, signature);
3666 : }
3667 :
3668 : DCHECK(code->kind() == WasmCode::kInterpreterEntry ||
3669 : code->kind() == WasmCode::kFunction);
3670 40520 : return {ExternalCallResult::INTERNAL, codemap()->GetCode(code->index())};
3671 : }
3672 :
3673 : inline Activation current_activation() {
3674 9511248 : return activations_.empty() ? Activation(0, 0) : activations_.back();
3675 : }
3676 : };
3677 :
3678 : class InterpretedFrameImpl {
3679 : public:
3680 : InterpretedFrameImpl(ThreadImpl* thread, int index)
3681 680273 : : thread_(thread), index_(index) {
3682 : DCHECK_LE(0, index);
3683 : }
3684 :
3685 681525 : const WasmFunction* function() const { return frame()->code->function; }
3686 :
3687 : int pc() const {
3688 : DCHECK_LE(0, frame()->pc);
3689 : DCHECK_GE(kMaxInt, frame()->pc);
3690 678509 : return static_cast<int>(frame()->pc);
3691 : }
3692 :
3693 : int GetParameterCount() const {
3694 : DCHECK_GE(kMaxInt, function()->sig->parameter_count());
3695 464 : return static_cast<int>(function()->sig->parameter_count());
3696 : }
3697 :
3698 : int GetLocalCount() const {
3699 1960 : size_t num_locals = function()->sig->parameter_count() +
3700 1960 : frame()->code->locals.type_list.size();
3701 : DCHECK_GE(kMaxInt, num_locals);
3702 1960 : return static_cast<int>(num_locals);
3703 : }
3704 :
3705 704 : int GetStackHeight() const {
3706 : bool is_top_frame =
3707 704 : static_cast<size_t>(index_) + 1 == thread_->frames_.size();
3708 : size_t stack_limit =
3709 832 : is_top_frame ? thread_->StackHeight() : thread_->frames_[index_ + 1].sp;
3710 : DCHECK_LE(frame()->sp, stack_limit);
3711 704 : size_t frame_size = stack_limit - frame()->sp;
3712 : DCHECK_LE(GetLocalCount(), frame_size);
3713 1408 : return static_cast<int>(frame_size) - GetLocalCount();
3714 : }
3715 :
3716 944 : WasmValue GetLocalValue(int index) const {
3717 : DCHECK_LE(0, index);
3718 : DCHECK_GT(GetLocalCount(), index);
3719 1888 : return thread_->GetStackValue(static_cast<int>(frame()->sp) + index);
3720 : }
3721 :
3722 264 : WasmValue GetStackValue(int index) const {
3723 : DCHECK_LE(0, index);
3724 : // Index must be within the number of stack values of this frame.
3725 : DCHECK_GT(GetStackHeight(), index);
3726 528 : return thread_->GetStackValue(static_cast<int>(frame()->sp) +
3727 528 : GetLocalCount() + index);
3728 : }
3729 :
3730 : private:
3731 : ThreadImpl* thread_;
3732 : int index_;
3733 :
3734 : ThreadImpl::Frame* frame() const {
3735 : DCHECK_GT(thread_->frames_.size(), index_);
3736 1360274 : return &thread_->frames_[index_];
3737 : }
3738 : };
3739 :
3740 : namespace {
3741 :
3742 : // Converters between WasmInterpreter::Thread and WasmInterpreter::ThreadImpl.
3743 : // Thread* is the public interface, without knowledge of the object layout.
3744 : // This cast is potentially risky, but as long as we always cast it back before
3745 : // accessing any data, it should be fine. UBSan is not complaining.
3746 : WasmInterpreter::Thread* ToThread(ThreadImpl* impl) {
3747 : return reinterpret_cast<WasmInterpreter::Thread*>(impl);
3748 : }
3749 : ThreadImpl* ToImpl(WasmInterpreter::Thread* thread) {
3750 : return reinterpret_cast<ThreadImpl*>(thread);
3751 : }
3752 :
3753 : // Same conversion for InterpretedFrame and InterpretedFrameImpl.
3754 : InterpretedFrame* ToFrame(InterpretedFrameImpl* impl) {
3755 : return reinterpret_cast<InterpretedFrame*>(impl);
3756 : }
3757 : const InterpretedFrameImpl* ToImpl(const InterpretedFrame* frame) {
3758 : return reinterpret_cast<const InterpretedFrameImpl*>(frame);
3759 : }
3760 :
3761 : } // namespace
3762 :
3763 : //============================================================================
3764 : // Implementation of the pimpl idiom for WasmInterpreter::Thread.
3765 : // Instead of placing a pointer to the ThreadImpl inside of the Thread object,
3766 : // we just reinterpret_cast them. ThreadImpls are only allocated inside this
3767 : // translation unit anyway.
3768 : //============================================================================
3769 4743953 : WasmInterpreter::State WasmInterpreter::Thread::state() {
3770 4743953 : return ToImpl(this)->state();
3771 : }
3772 4740187 : void WasmInterpreter::Thread::InitFrame(const WasmFunction* function,
3773 : WasmValue* args) {
3774 4740187 : ToImpl(this)->InitFrame(function, args);
3775 4740187 : }
3776 4745635 : WasmInterpreter::State WasmInterpreter::Thread::Run(int num_steps) {
3777 4745635 : return ToImpl(this)->Run(num_steps);
3778 : }
3779 0 : void WasmInterpreter::Thread::Pause() { return ToImpl(this)->Pause(); }
3780 9405596 : void WasmInterpreter::Thread::Reset() { return ToImpl(this)->Reset(); }
3781 : WasmInterpreter::Thread::ExceptionHandlingResult
3782 960 : WasmInterpreter::Thread::RaiseException(Isolate* isolate,
3783 : Handle<Object> exception) {
3784 960 : return ToImpl(this)->RaiseException(isolate, exception);
3785 : }
3786 2784 : pc_t WasmInterpreter::Thread::GetBreakpointPc() {
3787 2784 : return ToImpl(this)->GetBreakpointPc();
3788 : }
3789 8121 : int WasmInterpreter::Thread::GetFrameCount() {
3790 8121 : return ToImpl(this)->GetFrameCount();
3791 : }
3792 680273 : WasmInterpreter::FramePtr WasmInterpreter::Thread::GetFrame(int index) {
3793 : DCHECK_LE(0, index);
3794 : DCHECK_GT(GetFrameCount(), index);
3795 1360546 : return FramePtr(ToFrame(new InterpretedFrameImpl(ToImpl(this), index)));
3796 : }
3797 4700028 : WasmValue WasmInterpreter::Thread::GetReturnValue(int index) {
3798 4700028 : return ToImpl(this)->GetReturnValue(index);
3799 : }
3800 952 : TrapReason WasmInterpreter::Thread::GetTrapReason() {
3801 952 : return ToImpl(this)->GetTrapReason();
3802 : }
3803 4663729 : bool WasmInterpreter::Thread::PossibleNondeterminism() {
3804 4663729 : return ToImpl(this)->PossibleNondeterminism();
3805 : }
3806 4754224 : uint64_t WasmInterpreter::Thread::NumInterpretedCalls() {
3807 4754224 : return ToImpl(this)->NumInterpretedCalls();
3808 : }
3809 40 : void WasmInterpreter::Thread::AddBreakFlags(uint8_t flags) {
3810 : ToImpl(this)->AddBreakFlags(flags);
3811 40 : }
3812 0 : void WasmInterpreter::Thread::ClearBreakFlags() {
3813 : ToImpl(this)->ClearBreakFlags();
3814 0 : }
3815 24 : uint32_t WasmInterpreter::Thread::NumActivations() {
3816 24 : return ToImpl(this)->NumActivations();
3817 : }
3818 37391 : uint32_t WasmInterpreter::Thread::StartActivation() {
3819 37391 : return ToImpl(this)->StartActivation();
3820 : }
3821 37389 : void WasmInterpreter::Thread::FinishActivation(uint32_t id) {
3822 : ToImpl(this)->FinishActivation(id);
3823 37389 : }
3824 7153 : uint32_t WasmInterpreter::Thread::ActivationFrameBase(uint32_t id) {
3825 7153 : return ToImpl(this)->ActivationFrameBase(id);
3826 : }
3827 :
3828 : //============================================================================
3829 : // The implementation details of the interpreter.
3830 : //============================================================================
3831 : class WasmInterpreterInternals : public ZoneObject {
3832 : public:
3833 : // Create a copy of the module bytes for the interpreter, since the passed
3834 : // pointer might be invalidated after constructing the interpreter.
3835 : const ZoneVector<uint8_t> module_bytes_;
3836 : CodeMap codemap_;
3837 : ZoneVector<ThreadImpl> threads_;
3838 :
3839 366609 : WasmInterpreterInternals(Zone* zone, const WasmModule* module,
3840 : const ModuleWireBytes& wire_bytes,
3841 : Handle<WasmInstanceObject> instance_object)
3842 : : module_bytes_(wire_bytes.start(), wire_bytes.end(), zone),
3843 : codemap_(module, module_bytes_.data(), zone),
3844 733218 : threads_(zone) {
3845 : Isolate* isolate = instance_object->GetIsolate();
3846 : Handle<Cell> reference_stack = isolate->global_handles()->Create(
3847 733218 : *isolate->factory()->NewCell(isolate->factory()->empty_fixed_array()));
3848 366609 : threads_.emplace_back(zone, &codemap_, instance_object, reference_stack);
3849 366609 : }
3850 :
3851 733218 : ~WasmInterpreterInternals() {
3852 : DCHECK_EQ(1, threads_.size());
3853 366609 : GlobalHandles::Destroy(threads_[0].reference_stack_cell().location());
3854 366609 : }
3855 : };
3856 :
3857 : namespace {
3858 342110 : void NopFinalizer(const v8::WeakCallbackInfo<void>& data) {
3859 : Address* global_handle_location =
3860 : reinterpret_cast<Address*>(data.GetParameter());
3861 342110 : GlobalHandles::Destroy(global_handle_location);
3862 342110 : }
3863 :
3864 366609 : Handle<WasmInstanceObject> MakeWeak(
3865 : Isolate* isolate, Handle<WasmInstanceObject> instance_object) {
3866 : Handle<WasmInstanceObject> weak_instance =
3867 : isolate->global_handles()->Create<WasmInstanceObject>(*instance_object);
3868 : Address* global_handle_location = weak_instance.location();
3869 : GlobalHandles::MakeWeak(global_handle_location, global_handle_location,
3870 366609 : &NopFinalizer, v8::WeakCallbackType::kParameter);
3871 366609 : return weak_instance;
3872 : }
3873 : } // namespace
3874 :
3875 : //============================================================================
3876 : // Implementation of the public interface of the interpreter.
3877 : //============================================================================
3878 366609 : WasmInterpreter::WasmInterpreter(Isolate* isolate, const WasmModule* module,
3879 : const ModuleWireBytes& wire_bytes,
3880 : Handle<WasmInstanceObject> instance_object)
3881 : : zone_(isolate->allocator(), ZONE_NAME),
3882 : internals_(new (&zone_) WasmInterpreterInternals(
3883 733218 : &zone_, module, wire_bytes, MakeWeak(isolate, instance_object))) {}
3884 :
3885 366609 : WasmInterpreter::~WasmInterpreter() { internals_->~WasmInterpreterInternals(); }
3886 :
3887 0 : void WasmInterpreter::Run() { internals_->threads_[0].Run(); }
3888 :
3889 0 : void WasmInterpreter::Pause() { internals_->threads_[0].Pause(); }
3890 :
3891 1552 : bool WasmInterpreter::SetBreakpoint(const WasmFunction* function, pc_t pc,
3892 : bool enabled) {
3893 1552 : InterpreterCode* code = internals_->codemap_.GetCode(function);
3894 1552 : size_t size = static_cast<size_t>(code->end - code->start);
3895 : // Check bounds for {pc}.
3896 1552 : if (pc < code->locals.encoded_size || pc >= size) return false;
3897 : // Make a copy of the code before enabling a breakpoint.
3898 1552 : if (enabled && code->orig_start == code->start) {
3899 68 : code->start = reinterpret_cast<byte*>(zone_.New(size));
3900 68 : memcpy(code->start, code->orig_start, size);
3901 68 : code->end = code->start + size;
3902 : }
3903 1552 : bool prev = code->start[pc] == kInternalBreakpoint;
3904 1552 : if (enabled) {
3905 856 : code->start[pc] = kInternalBreakpoint;
3906 : } else {
3907 696 : code->start[pc] = code->orig_start[pc];
3908 : }
3909 : return prev;
3910 : }
3911 :
3912 0 : bool WasmInterpreter::GetBreakpoint(const WasmFunction* function, pc_t pc) {
3913 0 : InterpreterCode* code = internals_->codemap_.GetCode(function);
3914 0 : size_t size = static_cast<size_t>(code->end - code->start);
3915 : // Check bounds for {pc}.
3916 0 : if (pc < code->locals.encoded_size || pc >= size) return false;
3917 : // Check if a breakpoint is present at that place in the code.
3918 0 : return code->start[pc] == kInternalBreakpoint;
3919 : }
3920 :
3921 0 : bool WasmInterpreter::SetTracing(const WasmFunction* function, bool enabled) {
3922 0 : UNIMPLEMENTED();
3923 : return false;
3924 : }
3925 :
3926 0 : int WasmInterpreter::GetThreadCount() {
3927 0 : return 1; // only one thread for now.
3928 : }
3929 :
3930 4874358 : WasmInterpreter::Thread* WasmInterpreter::GetThread(int id) {
3931 4874358 : CHECK_EQ(0, id); // only one thread for now.
3932 9748716 : return ToThread(&internals_->threads_[id]);
3933 : }
3934 :
3935 367596 : void WasmInterpreter::AddFunctionForTesting(const WasmFunction* function) {
3936 367596 : internals_->codemap_.AddFunction(function, nullptr, nullptr);
3937 367596 : }
3938 :
3939 366116 : void WasmInterpreter::SetFunctionCodeForTesting(const WasmFunction* function,
3940 : const byte* start,
3941 : const byte* end) {
3942 366116 : internals_->codemap_.SetFunctionCode(function, start, end);
3943 366116 : }
3944 :
3945 30 : ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting(
3946 : Zone* zone, const WasmModule* module, const byte* start, const byte* end) {
3947 : // Create some dummy structures, to avoid special-casing the implementation
3948 : // just for testing.
3949 30 : FunctionSig sig(0, 0, nullptr);
3950 30 : WasmFunction function{&sig, 0, 0, {0, 0}, false, false};
3951 : InterpreterCode code{
3952 60 : &function, BodyLocalDecls(zone), start, end, nullptr, nullptr, nullptr};
3953 :
3954 : // Now compute and return the control transfers.
3955 30 : SideTable side_table(zone, module, &code);
3956 30 : return side_table.map_;
3957 : }
3958 :
3959 : //============================================================================
3960 : // Implementation of the frame inspection interface.
3961 : //============================================================================
3962 679101 : const WasmFunction* InterpretedFrame::function() const {
3963 679101 : return ToImpl(this)->function();
3964 : }
3965 1357018 : int InterpretedFrame::pc() const { return ToImpl(this)->pc(); }
3966 464 : int InterpretedFrame::GetParameterCount() const {
3967 464 : return ToImpl(this)->GetParameterCount();
3968 : }
3969 992 : int InterpretedFrame::GetLocalCount() const {
3970 992 : return ToImpl(this)->GetLocalCount();
3971 : }
3972 704 : int InterpretedFrame::GetStackHeight() const {
3973 704 : return ToImpl(this)->GetStackHeight();
3974 : }
3975 944 : WasmValue InterpretedFrame::GetLocalValue(int index) const {
3976 944 : return ToImpl(this)->GetLocalValue(index);
3977 : }
3978 264 : WasmValue InterpretedFrame::GetStackValue(int index) const {
3979 264 : return ToImpl(this)->GetStackValue(index);
3980 : }
3981 680273 : void InterpretedFrameDeleter::operator()(InterpretedFrame* ptr) {
3982 680273 : delete ToImpl(ptr);
3983 680273 : }
3984 :
3985 : #undef TRACE
3986 : #undef LANE
3987 : #undef FOREACH_INTERNAL_OPCODE
3988 : #undef WASM_CTYPES
3989 : #undef FOREACH_SIMPLE_BINOP
3990 : #undef FOREACH_OTHER_BINOP
3991 : #undef FOREACH_I32CONV_FLOATOP
3992 : #undef FOREACH_OTHER_UNOP
3993 :
3994 : } // namespace wasm
3995 : } // namespace internal
3996 122004 : } // namespace v8
|