Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include <atomic>
6 : #include <type_traits>
7 :
8 : #include "src/wasm/wasm-interpreter.h"
9 :
10 : #include "src/assembler-inl.h"
11 : #include "src/base/overflowing-math.h"
12 : #include "src/boxed-float.h"
13 : #include "src/compiler/wasm-compiler.h"
14 : #include "src/conversions.h"
15 : #include "src/identity-map.h"
16 : #include "src/objects-inl.h"
17 : #include "src/trap-handler/trap-handler.h"
18 : #include "src/utils.h"
19 : #include "src/wasm/decoder.h"
20 : #include "src/wasm/function-body-decoder-impl.h"
21 : #include "src/wasm/function-body-decoder.h"
22 : #include "src/wasm/memory-tracing.h"
23 : #include "src/wasm/wasm-engine.h"
24 : #include "src/wasm/wasm-external-refs.h"
25 : #include "src/wasm/wasm-limits.h"
26 : #include "src/wasm/wasm-module.h"
27 : #include "src/wasm/wasm-objects-inl.h"
28 :
29 : #include "src/zone/accounting-allocator.h"
30 : #include "src/zone/zone-containers.h"
31 :
32 : namespace v8 {
33 : namespace internal {
34 : namespace wasm {
35 :
36 : #define TRACE(...) \
37 : do { \
38 : if (FLAG_trace_wasm_interpreter) PrintF(__VA_ARGS__); \
39 : } while (false)
40 :
41 : #if V8_TARGET_BIG_ENDIAN
42 : #define LANE(i, type) ((sizeof(type.val) / sizeof(type.val[0])) - (i)-1)
43 : #else
44 : #define LANE(i, type) (i)
45 : #endif
46 :
47 : #define FOREACH_INTERNAL_OPCODE(V) V(Breakpoint, 0xFF)
48 :
49 : #define WASM_CTYPES(V) \
50 : V(I32, int32_t) V(I64, int64_t) V(F32, float) V(F64, double) V(S128, Simd128)
51 :
52 : #define FOREACH_SIMPLE_BINOP(V) \
53 : V(I32Add, uint32_t, +) \
54 : V(I32Sub, uint32_t, -) \
55 : V(I32Mul, uint32_t, *) \
56 : V(I32And, uint32_t, &) \
57 : V(I32Ior, uint32_t, |) \
58 : V(I32Xor, uint32_t, ^) \
59 : V(I32Eq, uint32_t, ==) \
60 : V(I32Ne, uint32_t, !=) \
61 : V(I32LtU, uint32_t, <) \
62 : V(I32LeU, uint32_t, <=) \
63 : V(I32GtU, uint32_t, >) \
64 : V(I32GeU, uint32_t, >=) \
65 : V(I32LtS, int32_t, <) \
66 : V(I32LeS, int32_t, <=) \
67 : V(I32GtS, int32_t, >) \
68 : V(I32GeS, int32_t, >=) \
69 : V(I64Add, uint64_t, +) \
70 : V(I64Sub, uint64_t, -) \
71 : V(I64Mul, uint64_t, *) \
72 : V(I64And, uint64_t, &) \
73 : V(I64Ior, uint64_t, |) \
74 : V(I64Xor, uint64_t, ^) \
75 : V(I64Eq, uint64_t, ==) \
76 : V(I64Ne, uint64_t, !=) \
77 : V(I64LtU, uint64_t, <) \
78 : V(I64LeU, uint64_t, <=) \
79 : V(I64GtU, uint64_t, >) \
80 : V(I64GeU, uint64_t, >=) \
81 : V(I64LtS, int64_t, <) \
82 : V(I64LeS, int64_t, <=) \
83 : V(I64GtS, int64_t, >) \
84 : V(I64GeS, int64_t, >=) \
85 : V(F32Add, float, +) \
86 : V(F32Sub, float, -) \
87 : V(F32Eq, float, ==) \
88 : V(F32Ne, float, !=) \
89 : V(F32Lt, float, <) \
90 : V(F32Le, float, <=) \
91 : V(F32Gt, float, >) \
92 : V(F32Ge, float, >=) \
93 : V(F64Add, double, +) \
94 : V(F64Sub, double, -) \
95 : V(F64Eq, double, ==) \
96 : V(F64Ne, double, !=) \
97 : V(F64Lt, double, <) \
98 : V(F64Le, double, <=) \
99 : V(F64Gt, double, >) \
100 : V(F64Ge, double, >=) \
101 : V(F32Mul, float, *) \
102 : V(F64Mul, double, *) \
103 : V(F32Div, float, /) \
104 : V(F64Div, double, /)
105 :
106 : #define FOREACH_OTHER_BINOP(V) \
107 : V(I32DivS, int32_t) \
108 : V(I32DivU, uint32_t) \
109 : V(I32RemS, int32_t) \
110 : V(I32RemU, uint32_t) \
111 : V(I32Shl, uint32_t) \
112 : V(I32ShrU, uint32_t) \
113 : V(I32ShrS, int32_t) \
114 : V(I64DivS, int64_t) \
115 : V(I64DivU, uint64_t) \
116 : V(I64RemS, int64_t) \
117 : V(I64RemU, uint64_t) \
118 : V(I64Shl, uint64_t) \
119 : V(I64ShrU, uint64_t) \
120 : V(I64ShrS, int64_t) \
121 : V(I32Ror, int32_t) \
122 : V(I32Rol, int32_t) \
123 : V(I64Ror, int64_t) \
124 : V(I64Rol, int64_t) \
125 : V(F32Min, float) \
126 : V(F32Max, float) \
127 : V(F64Min, double) \
128 : V(F64Max, double) \
129 : V(I32AsmjsDivS, int32_t) \
130 : V(I32AsmjsDivU, uint32_t) \
131 : V(I32AsmjsRemS, int32_t) \
132 : V(I32AsmjsRemU, uint32_t) \
133 : V(F32CopySign, Float32) \
134 : V(F64CopySign, Float64)
135 :
136 : #define FOREACH_I32CONV_FLOATOP(V) \
137 : V(I32SConvertF32, int32_t, float) \
138 : V(I32SConvertF64, int32_t, double) \
139 : V(I32UConvertF32, uint32_t, float) \
140 : V(I32UConvertF64, uint32_t, double)
141 :
142 : #define FOREACH_OTHER_UNOP(V) \
143 : V(I32Clz, uint32_t) \
144 : V(I32Ctz, uint32_t) \
145 : V(I32Popcnt, uint32_t) \
146 : V(I32Eqz, uint32_t) \
147 : V(I64Clz, uint64_t) \
148 : V(I64Ctz, uint64_t) \
149 : V(I64Popcnt, uint64_t) \
150 : V(I64Eqz, uint64_t) \
151 : V(F32Abs, Float32) \
152 : V(F32Neg, Float32) \
153 : V(F32Ceil, float) \
154 : V(F32Floor, float) \
155 : V(F32Trunc, float) \
156 : V(F32NearestInt, float) \
157 : V(F64Abs, Float64) \
158 : V(F64Neg, Float64) \
159 : V(F64Ceil, double) \
160 : V(F64Floor, double) \
161 : V(F64Trunc, double) \
162 : V(F64NearestInt, double) \
163 : V(I32ConvertI64, int64_t) \
164 : V(I64SConvertF32, float) \
165 : V(I64SConvertF64, double) \
166 : V(I64UConvertF32, float) \
167 : V(I64UConvertF64, double) \
168 : V(I64SConvertI32, int32_t) \
169 : V(I64UConvertI32, uint32_t) \
170 : V(F32SConvertI32, int32_t) \
171 : V(F32UConvertI32, uint32_t) \
172 : V(F32SConvertI64, int64_t) \
173 : V(F32UConvertI64, uint64_t) \
174 : V(F32ConvertF64, double) \
175 : V(F32ReinterpretI32, int32_t) \
176 : V(F64SConvertI32, int32_t) \
177 : V(F64UConvertI32, uint32_t) \
178 : V(F64SConvertI64, int64_t) \
179 : V(F64UConvertI64, uint64_t) \
180 : V(F64ConvertF32, float) \
181 : V(F64ReinterpretI64, int64_t) \
182 : V(I32AsmjsSConvertF32, float) \
183 : V(I32AsmjsUConvertF32, float) \
184 : V(I32AsmjsSConvertF64, double) \
185 : V(I32AsmjsUConvertF64, double) \
186 : V(F32Sqrt, float) \
187 : V(F64Sqrt, double)
188 :
189 : namespace {
190 :
191 : constexpr uint32_t kFloat32SignBitMask = uint32_t{1} << 31;
192 : constexpr uint64_t kFloat64SignBitMask = uint64_t{1} << 63;
193 :
194 : inline int32_t ExecuteI32DivS(int32_t a, int32_t b, TrapReason* trap) {
195 41556 : if (b == 0) {
196 : *trap = kTrapDivByZero;
197 : return 0;
198 : }
199 38204 : if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
200 : *trap = kTrapDivUnrepresentable;
201 : return 0;
202 : }
203 38192 : return a / b;
204 : }
205 :
206 : inline uint32_t ExecuteI32DivU(uint32_t a, uint32_t b, TrapReason* trap) {
207 40932 : if (b == 0) {
208 : *trap = kTrapDivByZero;
209 : return 0;
210 : }
211 37656 : return a / b;
212 : }
213 :
214 : inline int32_t ExecuteI32RemS(int32_t a, int32_t b, TrapReason* trap) {
215 40964 : if (b == 0) {
216 : *trap = kTrapRemByZero;
217 : return 0;
218 : }
219 37672 : if (b == -1) return 0;
220 34400 : return a % b;
221 : }
222 :
223 : inline uint32_t ExecuteI32RemU(uint32_t a, uint32_t b, TrapReason* trap) {
224 40932 : if (b == 0) {
225 : *trap = kTrapRemByZero;
226 : return 0;
227 : }
228 37656 : return a % b;
229 : }
230 :
231 : inline uint32_t ExecuteI32Shl(uint32_t a, uint32_t b, TrapReason* trap) {
232 40912 : return a << (b & 0x1F);
233 : }
234 :
235 : inline uint32_t ExecuteI32ShrU(uint32_t a, uint32_t b, TrapReason* trap) {
236 40912 : return a >> (b & 0x1F);
237 : }
238 :
239 : inline int32_t ExecuteI32ShrS(int32_t a, int32_t b, TrapReason* trap) {
240 40912 : return a >> (b & 0x1F);
241 : }
242 :
243 : inline int64_t ExecuteI64DivS(int64_t a, int64_t b, TrapReason* trap) {
244 54316 : if (b == 0) {
245 : *trap = kTrapDivByZero;
246 : return 0;
247 : }
248 49672 : if (b == -1 && a == std::numeric_limits<int64_t>::min()) {
249 : *trap = kTrapDivUnrepresentable;
250 : return 0;
251 : }
252 49668 : return a / b;
253 : }
254 :
255 : inline uint64_t ExecuteI64DivU(uint64_t a, uint64_t b, TrapReason* trap) {
256 53708 : if (b == 0) {
257 : *trap = kTrapDivByZero;
258 : return 0;
259 : }
260 49128 : return a / b;
261 : }
262 :
263 : inline int64_t ExecuteI64RemS(int64_t a, int64_t b, TrapReason* trap) {
264 53712 : if (b == 0) {
265 : *trap = kTrapRemByZero;
266 : return 0;
267 : }
268 49132 : if (b == -1) return 0;
269 44884 : return a % b;
270 : }
271 :
272 : inline uint64_t ExecuteI64RemU(uint64_t a, uint64_t b, TrapReason* trap) {
273 53708 : if (b == 0) {
274 : *trap = kTrapRemByZero;
275 : return 0;
276 : }
277 49128 : return a % b;
278 : }
279 :
280 : inline uint64_t ExecuteI64Shl(uint64_t a, uint64_t b, TrapReason* trap) {
281 81232 : return a << (b & 0x3F);
282 : }
283 :
284 : inline uint64_t ExecuteI64ShrU(uint64_t a, uint64_t b, TrapReason* trap) {
285 81232 : return a >> (b & 0x3F);
286 : }
287 :
288 : inline int64_t ExecuteI64ShrS(int64_t a, int64_t b, TrapReason* trap) {
289 81232 : return a >> (b & 0x3F);
290 : }
291 :
292 : inline uint32_t ExecuteI32Ror(uint32_t a, uint32_t b, TrapReason* trap) {
293 26912 : return (a >> (b & 0x1F)) | (a << ((32 - b) & 0x1F));
294 : }
295 :
296 : inline uint32_t ExecuteI32Rol(uint32_t a, uint32_t b, TrapReason* trap) {
297 26912 : return (a << (b & 0x1F)) | (a >> ((32 - b) & 0x1F));
298 : }
299 :
300 : inline uint64_t ExecuteI64Ror(uint64_t a, uint64_t b, TrapReason* trap) {
301 26260 : return (a >> (b & 0x3F)) | (a << ((64 - b) & 0x3F));
302 : }
303 :
304 : inline uint64_t ExecuteI64Rol(uint64_t a, uint64_t b, TrapReason* trap) {
305 26260 : return (a << (b & 0x3F)) | (a >> ((64 - b) & 0x3F));
306 : }
307 :
308 : inline float ExecuteF32Min(float a, float b, TrapReason* trap) {
309 52912 : return JSMin(a, b);
310 : }
311 :
312 : inline float ExecuteF32Max(float a, float b, TrapReason* trap) {
313 52904 : return JSMax(a, b);
314 : }
315 :
316 : inline Float32 ExecuteF32CopySign(Float32 a, Float32 b, TrapReason* trap) {
317 52908 : return Float32::FromBits((a.get_bits() & ~kFloat32SignBitMask) |
318 52908 : (b.get_bits() & kFloat32SignBitMask));
319 : }
320 :
321 : inline double ExecuteF64Min(double a, double b, TrapReason* trap) {
322 9608 : return JSMin(a, b);
323 : }
324 :
325 : inline double ExecuteF64Max(double a, double b, TrapReason* trap) {
326 9616 : return JSMax(a, b);
327 : }
328 :
329 : inline Float64 ExecuteF64CopySign(Float64 a, Float64 b, TrapReason* trap) {
330 9612 : return Float64::FromBits((a.get_bits() & ~kFloat64SignBitMask) |
331 9612 : (b.get_bits() & kFloat64SignBitMask));
332 : }
333 :
334 : inline int32_t ExecuteI32AsmjsDivS(int32_t a, int32_t b, TrapReason* trap) {
335 2340 : if (b == 0) return 0;
336 2096 : if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
337 : return std::numeric_limits<int32_t>::min();
338 : }
339 2088 : return a / b;
340 : }
341 :
342 : inline uint32_t ExecuteI32AsmjsDivU(uint32_t a, uint32_t b, TrapReason* trap) {
343 20 : if (b == 0) return 0;
344 8 : return a / b;
345 : }
346 :
347 : inline int32_t ExecuteI32AsmjsRemS(int32_t a, int32_t b, TrapReason* trap) {
348 2340 : if (b == 0) return 0;
349 2096 : if (b == -1) return 0;
350 1860 : return a % b;
351 : }
352 :
353 : inline uint32_t ExecuteI32AsmjsRemU(uint32_t a, uint32_t b, TrapReason* trap) {
354 20 : if (b == 0) return 0;
355 8 : return a % b;
356 : }
357 :
358 : inline int32_t ExecuteI32AsmjsSConvertF32(float a, TrapReason* trap) {
359 460 : return DoubleToInt32(a);
360 : }
361 :
362 : inline uint32_t ExecuteI32AsmjsUConvertF32(float a, TrapReason* trap) {
363 460 : return DoubleToUint32(a);
364 : }
365 :
366 : inline int32_t ExecuteI32AsmjsSConvertF64(double a, TrapReason* trap) {
367 196 : return DoubleToInt32(a);
368 : }
369 :
370 : inline uint32_t ExecuteI32AsmjsUConvertF64(double a, TrapReason* trap) {
371 : return DoubleToUint32(a);
372 : }
373 :
374 : int32_t ExecuteI32Clz(uint32_t val, TrapReason* trap) {
375 : return base::bits::CountLeadingZeros(val);
376 : }
377 :
378 : uint32_t ExecuteI32Ctz(uint32_t val, TrapReason* trap) {
379 : return base::bits::CountTrailingZeros(val);
380 : }
381 :
382 : uint32_t ExecuteI32Popcnt(uint32_t val, TrapReason* trap) {
383 : return base::bits::CountPopulation(val);
384 : }
385 :
386 : inline uint32_t ExecuteI32Eqz(uint32_t val, TrapReason* trap) {
387 568 : return val == 0 ? 1 : 0;
388 : }
389 :
390 : int64_t ExecuteI64Clz(uint64_t val, TrapReason* trap) {
391 : return base::bits::CountLeadingZeros(val);
392 : }
393 :
394 : inline uint64_t ExecuteI64Ctz(uint64_t val, TrapReason* trap) {
395 260 : return base::bits::CountTrailingZeros(val);
396 : }
397 :
398 : inline int64_t ExecuteI64Popcnt(uint64_t val, TrapReason* trap) {
399 : return base::bits::CountPopulation(val);
400 : }
401 :
402 : inline int32_t ExecuteI64Eqz(uint64_t val, TrapReason* trap) {
403 332 : return val == 0 ? 1 : 0;
404 : }
405 :
406 : inline Float32 ExecuteF32Abs(Float32 a, TrapReason* trap) {
407 16 : return Float32::FromBits(a.get_bits() & ~kFloat32SignBitMask);
408 : }
409 :
410 : inline Float32 ExecuteF32Neg(Float32 a, TrapReason* trap) {
411 468 : return Float32::FromBits(a.get_bits() ^ kFloat32SignBitMask);
412 : }
413 :
414 460 : inline float ExecuteF32Ceil(float a, TrapReason* trap) { return ceilf(a); }
415 :
416 460 : inline float ExecuteF32Floor(float a, TrapReason* trap) { return floorf(a); }
417 :
418 460 : inline float ExecuteF32Trunc(float a, TrapReason* trap) { return truncf(a); }
419 :
420 : inline float ExecuteF32NearestInt(float a, TrapReason* trap) {
421 460 : return nearbyintf(a);
422 : }
423 :
424 : inline float ExecuteF32Sqrt(float a, TrapReason* trap) {
425 8 : float result = sqrtf(a);
426 : return result;
427 : }
428 :
429 : inline Float64 ExecuteF64Abs(Float64 a, TrapReason* trap) {
430 16 : return Float64::FromBits(a.get_bits() & ~kFloat64SignBitMask);
431 : }
432 :
433 : inline Float64 ExecuteF64Neg(Float64 a, TrapReason* trap) {
434 204 : return Float64::FromBits(a.get_bits() ^ kFloat64SignBitMask);
435 : }
436 :
437 196 : inline double ExecuteF64Ceil(double a, TrapReason* trap) { return ceil(a); }
438 :
439 196 : inline double ExecuteF64Floor(double a, TrapReason* trap) { return floor(a); }
440 :
441 196 : inline double ExecuteF64Trunc(double a, TrapReason* trap) { return trunc(a); }
442 :
443 : inline double ExecuteF64NearestInt(double a, TrapReason* trap) {
444 196 : return nearbyint(a);
445 : }
446 :
447 8 : inline double ExecuteF64Sqrt(double a, TrapReason* trap) { return sqrt(a); }
448 :
449 : template <typename int_type, typename float_type>
450 : int_type ExecuteConvert(float_type a, TrapReason* trap) {
451 2760 : if (is_inbounds<int_type>(a)) {
452 1552 : return static_cast<int_type>(a);
453 : }
454 : *trap = kTrapFloatUnrepresentable;
455 : return 0;
456 : }
457 :
458 : template <typename int_type, typename float_type>
459 : int_type ExecuteConvertSaturate(float_type a) {
460 : TrapReason base_trap = kTrapCount;
461 : int32_t val = ExecuteConvert<int_type>(a, &base_trap);
462 1312 : if (base_trap == kTrapCount) {
463 : return val;
464 : }
465 : return std::isnan(a) ? 0
466 : : (a < static_cast<float_type>(0.0)
467 : ? std::numeric_limits<int_type>::min()
468 604 : : std::numeric_limits<int_type>::max());
469 : }
470 :
471 : template <typename dst_type, typename src_type, void (*fn)(Address)>
472 604 : inline dst_type CallExternalIntToFloatFunction(src_type input) {
473 604 : uint8_t data[std::max(sizeof(dst_type), sizeof(src_type))];
474 604 : Address data_addr = reinterpret_cast<Address>(data);
475 : WriteUnalignedValue<src_type>(data_addr, input);
476 604 : fn(data_addr);
477 1208 : return ReadUnalignedValue<dst_type>(data_addr);
478 : }
479 :
480 : template <typename dst_type, typename src_type, int32_t (*fn)(Address)>
481 2956 : inline dst_type CallExternalFloatToIntFunction(src_type input,
482 : TrapReason* trap) {
483 2956 : uint8_t data[std::max(sizeof(dst_type), sizeof(src_type))];
484 2956 : Address data_addr = reinterpret_cast<Address>(data);
485 : WriteUnalignedValue<src_type>(data_addr, input);
486 2956 : if (!fn(data_addr)) *trap = kTrapFloatUnrepresentable;
487 5912 : return ReadUnalignedValue<dst_type>(data_addr);
488 : }
489 :
490 : inline uint32_t ExecuteI32ConvertI64(int64_t a, TrapReason* trap) {
491 158004 : return static_cast<uint32_t>(a & 0xFFFFFFFF);
492 : }
493 :
494 : int64_t ExecuteI64SConvertF32(float a, TrapReason* trap) {
495 : return CallExternalFloatToIntFunction<int64_t, float,
496 920 : float32_to_int64_wrapper>(a, trap);
497 : }
498 :
499 460 : int64_t ExecuteI64SConvertSatF32(float a) {
500 460 : TrapReason base_trap = kTrapCount;
501 : int64_t val = ExecuteI64SConvertF32(a, &base_trap);
502 460 : if (base_trap == kTrapCount) {
503 : return val;
504 : }
505 : return std::isnan(a) ? 0
506 : : (a < 0.0 ? std::numeric_limits<int64_t>::min()
507 128 : : std::numeric_limits<int64_t>::max());
508 : }
509 :
510 : int64_t ExecuteI64SConvertF64(double a, TrapReason* trap) {
511 : return CallExternalFloatToIntFunction<int64_t, double,
512 724 : float64_to_int64_wrapper>(a, trap);
513 : }
514 :
515 196 : int64_t ExecuteI64SConvertSatF64(double a) {
516 196 : TrapReason base_trap = kTrapCount;
517 : int64_t val = ExecuteI64SConvertF64(a, &base_trap);
518 196 : if (base_trap == kTrapCount) {
519 : return val;
520 : }
521 : return std::isnan(a) ? 0
522 : : (a < 0.0 ? std::numeric_limits<int64_t>::min()
523 44 : : std::numeric_limits<int64_t>::max());
524 : }
525 :
526 : uint64_t ExecuteI64UConvertF32(float a, TrapReason* trap) {
527 : return CallExternalFloatToIntFunction<uint64_t, float,
528 920 : float32_to_uint64_wrapper>(a, trap);
529 : }
530 :
531 460 : uint64_t ExecuteI64UConvertSatF32(float a) {
532 460 : TrapReason base_trap = kTrapCount;
533 : uint64_t val = ExecuteI64UConvertF32(a, &base_trap);
534 460 : if (base_trap == kTrapCount) {
535 : return val;
536 : }
537 : return std::isnan(a) ? 0
538 : : (a < 0.0 ? std::numeric_limits<uint64_t>::min()
539 256 : : std::numeric_limits<uint64_t>::max());
540 : }
541 :
542 : uint64_t ExecuteI64UConvertF64(double a, TrapReason* trap) {
543 : return CallExternalFloatToIntFunction<uint64_t, double,
544 392 : float64_to_uint64_wrapper>(a, trap);
545 : }
546 :
547 196 : uint64_t ExecuteI64UConvertSatF64(double a) {
548 196 : TrapReason base_trap = kTrapCount;
549 : int64_t val = ExecuteI64UConvertF64(a, &base_trap);
550 196 : if (base_trap == kTrapCount) {
551 : return val;
552 : }
553 : return std::isnan(a) ? 0
554 : : (a < 0.0 ? std::numeric_limits<uint64_t>::min()
555 80 : : std::numeric_limits<uint64_t>::max());
556 : }
557 :
558 : inline int64_t ExecuteI64SConvertI32(int32_t a, TrapReason* trap) {
559 : return static_cast<int64_t>(a);
560 : }
561 :
562 : inline int64_t ExecuteI64UConvertI32(uint32_t a, TrapReason* trap) {
563 : return static_cast<uint64_t>(a);
564 : }
565 :
566 : inline float ExecuteF32SConvertI32(int32_t a, TrapReason* trap) {
567 24 : return static_cast<float>(a);
568 : }
569 :
570 : inline float ExecuteF32UConvertI32(uint32_t a, TrapReason* trap) {
571 8 : return static_cast<float>(a);
572 : }
573 :
574 : inline float ExecuteF32SConvertI64(int64_t a, TrapReason* trap) {
575 324 : return static_cast<float>(a);
576 : }
577 :
578 : inline float ExecuteF32UConvertI64(uint64_t a, TrapReason* trap) {
579 : return CallExternalIntToFloatFunction<float, uint64_t,
580 304 : uint64_to_float32_wrapper>(a);
581 : }
582 :
583 : inline float ExecuteF32ConvertF64(double a, TrapReason* trap) {
584 8 : return static_cast<float>(a);
585 : }
586 :
587 : inline Float32 ExecuteF32ReinterpretI32(int32_t a, TrapReason* trap) {
588 : return Float32::FromBits(a);
589 : }
590 :
591 : inline double ExecuteF64SConvertI32(int32_t a, TrapReason* trap) {
592 936 : return static_cast<double>(a);
593 : }
594 :
595 : inline double ExecuteF64UConvertI32(uint32_t a, TrapReason* trap) {
596 8 : return static_cast<double>(a);
597 : }
598 :
599 : inline double ExecuteF64SConvertI64(int64_t a, TrapReason* trap) {
600 15372 : return static_cast<double>(a);
601 : }
602 :
603 : inline double ExecuteF64UConvertI64(uint64_t a, TrapReason* trap) {
604 : return CallExternalIntToFloatFunction<double, uint64_t,
605 300 : uint64_to_float64_wrapper>(a);
606 : }
607 :
608 : inline double ExecuteF64ConvertF32(float a, TrapReason* trap) {
609 1396 : return static_cast<double>(a);
610 : }
611 :
612 : inline Float64 ExecuteF64ReinterpretI64(int64_t a, TrapReason* trap) {
613 : return Float64::FromBits(a);
614 : }
615 :
616 : inline int32_t ExecuteI32ReinterpretF32(WasmValue a) {
617 : return a.to_f32_boxed().get_bits();
618 : }
619 :
620 : inline int64_t ExecuteI64ReinterpretF64(WasmValue a) {
621 : return a.to_f64_boxed().get_bits();
622 : }
623 :
624 : enum InternalOpcode {
625 : #define DECL_INTERNAL_ENUM(name, value) kInternal##name = value,
626 : FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_ENUM)
627 : #undef DECL_INTERNAL_ENUM
628 : };
629 :
630 : const char* OpcodeName(uint32_t val) {
631 0 : switch (val) {
632 : #define DECL_INTERNAL_CASE(name, value) \
633 : case kInternal##name: \
634 : return "Internal" #name;
635 : FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_CASE)
636 : #undef DECL_INTERNAL_CASE
637 : }
638 0 : return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(val));
639 : }
640 :
641 : constexpr uint32_t kCatchInArity = 1;
642 :
643 : } // namespace
644 :
645 : class SideTable;
646 :
647 : // Code and metadata needed to execute a function.
648 1117800 : struct InterpreterCode {
649 : const WasmFunction* function; // wasm function
650 : BodyLocalDecls locals; // local declarations
651 : const byte* orig_start; // start of original code
652 : const byte* orig_end; // end of original code
653 : byte* start; // start of (maybe altered) code
654 : byte* end; // end of (maybe altered) code
655 : SideTable* side_table; // precomputed side table for control flow.
656 :
657 33201681 : const byte* at(pc_t pc) { return start + pc; }
658 : };
659 :
660 : // A helper class to compute the control transfers for each bytecode offset.
661 : // Control transfers allow Br, BrIf, BrTable, If, Else, and End bytecodes to
662 : // be directly executed without the need to dynamically track blocks.
663 30 : class SideTable : public ZoneObject {
664 : public:
665 : ControlTransferMap map_;
666 : uint32_t max_stack_height_ = 0;
667 :
668 367008 : SideTable(Zone* zone, const WasmModule* module, InterpreterCode* code)
669 367008 : : map_(zone) {
670 : // Create a zone for all temporary objects.
671 734016 : Zone control_transfer_zone(zone->allocator(), ZONE_NAME);
672 :
673 : // Represents a control flow label.
674 : class CLabel : public ZoneObject {
675 : explicit CLabel(Zone* zone, uint32_t target_stack_height, uint32_t arity)
676 : : target_stack_height(target_stack_height),
677 : arity(arity),
678 389598 : refs(zone) {}
679 :
680 : public:
681 : struct Ref {
682 : const byte* from_pc;
683 : const uint32_t stack_height;
684 : };
685 : const byte* target = nullptr;
686 : uint32_t target_stack_height;
687 : // Arity when branching to this label.
688 : const uint32_t arity;
689 : ZoneVector<Ref> refs;
690 :
691 : static CLabel* New(Zone* zone, uint32_t stack_height, uint32_t arity) {
692 : return new (zone) CLabel(zone, stack_height, arity);
693 : }
694 :
695 : // Bind this label to the given PC.
696 : void Bind(const byte* pc) {
697 : DCHECK_NULL(target);
698 389598 : target = pc;
699 : }
700 :
701 : // Reference this label from the given location.
702 : void Ref(const byte* from_pc, uint32_t stack_height) {
703 : // Target being bound before a reference means this is a loop.
704 : DCHECK_IMPLIES(target, *target == kExprLoop);
705 35988 : refs.push_back({from_pc, stack_height});
706 : }
707 :
708 389598 : void Finish(ControlTransferMap* map, const byte* start) {
709 : DCHECK_NOT_NULL(target);
710 407592 : for (auto ref : refs) {
711 17994 : size_t offset = static_cast<size_t>(ref.from_pc - start);
712 17994 : auto pcdiff = static_cast<pcdiff_t>(target - ref.from_pc);
713 : DCHECK_GE(ref.stack_height, target_stack_height);
714 : spdiff_t spdiff =
715 17994 : static_cast<spdiff_t>(ref.stack_height - target_stack_height);
716 : TRACE("control transfer @%zu: Δpc %d, stack %u->%u = -%u\n", offset,
717 : pcdiff, ref.stack_height, target_stack_height, spdiff);
718 17994 : ControlTransferEntry& entry = (*map)[offset];
719 17994 : entry.pc_diff = pcdiff;
720 17994 : entry.sp_diff = spdiff;
721 17994 : entry.target_arity = arity;
722 : }
723 389598 : }
724 : };
725 :
726 : // An entry in the control stack.
727 : struct Control {
728 : const byte* pc;
729 : CLabel* end_label;
730 : CLabel* else_label;
731 : // Arity (number of values on the stack) when exiting this control
732 : // structure via |end|.
733 : uint32_t exit_arity;
734 : // Track whether this block was already left, i.e. all further
735 : // instructions are unreachable.
736 : bool unreachable = false;
737 :
738 : Control(const byte* pc, CLabel* end_label, CLabel* else_label,
739 : uint32_t exit_arity)
740 : : pc(pc),
741 : end_label(end_label),
742 : else_label(else_label),
743 389054 : exit_arity(exit_arity) {}
744 : Control(const byte* pc, CLabel* end_label, uint32_t exit_arity)
745 : : Control(pc, end_label, nullptr, exit_arity) {}
746 :
747 389054 : void Finish(ControlTransferMap* map, const byte* start) {
748 389054 : end_label->Finish(map, start);
749 389054 : if (else_label) else_label->Finish(map, start);
750 389054 : }
751 : };
752 :
753 : // Compute the ControlTransfer map.
754 : // This algorithm maintains a stack of control constructs similar to the
755 : // AST decoder. The {control_stack} allows matching {br,br_if,br_table}
756 : // bytecodes with their target, as well as determining whether the current
757 : // bytecodes are within the true or false block of an else.
758 : ZoneVector<Control> control_stack(&control_transfer_zone);
759 : // It also maintains a stack of all nested {try} blocks to resolve local
760 : // handler targets for potentially throwing operations. These exceptional
761 : // control transfers are treated just like other branches in the resulting
762 : // map. This stack contains indices into the above control stack.
763 : ZoneVector<size_t> exception_stack(zone);
764 : uint32_t stack_height = 0;
765 : uint32_t func_arity =
766 367008 : static_cast<uint32_t>(code->function->sig->return_count());
767 : CLabel* func_label =
768 367008 : CLabel::New(&control_transfer_zone, stack_height, func_arity);
769 367008 : control_stack.emplace_back(code->orig_start, func_label, func_arity);
770 : auto control_parent = [&]() -> Control& {
771 : DCHECK_LE(2, control_stack.size());
772 44701 : return control_stack[control_stack.size() - 2];
773 367008 : };
774 : auto copy_unreachable = [&] {
775 22249 : control_stack.back().unreachable = control_parent().unreachable;
776 : };
777 2369423 : for (BytecodeIterator i(code->orig_start, code->orig_end, &code->locals);
778 2002415 : i.has_next(); i.next()) {
779 : WasmOpcode opcode = i.current();
780 : uint32_t exceptional_stack_height = 0;
781 2002415 : if (WasmOpcodes::IsPrefixOpcode(opcode)) opcode = i.prefixed_opcode();
782 2002415 : bool unreachable = control_stack.back().unreachable;
783 2002415 : if (unreachable) {
784 : TRACE("@%u: %s (is unreachable)\n", i.pc_offset(),
785 : WasmOpcodes::OpcodeName(opcode));
786 : } else {
787 : auto stack_effect =
788 1979600 : StackEffect(module, code->function->sig, i.pc(), i.end());
789 : TRACE("@%u: %s (sp %d - %d + %d)\n", i.pc_offset(),
790 : WasmOpcodes::OpcodeName(opcode), stack_height, stack_effect.first,
791 : stack_effect.second);
792 : DCHECK_GE(stack_height, stack_effect.first);
793 : DCHECK_GE(kMaxUInt32, static_cast<uint64_t>(stack_height) -
794 : stack_effect.first + stack_effect.second);
795 1979600 : exceptional_stack_height = stack_height - stack_effect.first;
796 1979600 : stack_height = stack_height - stack_effect.first + stack_effect.second;
797 1979600 : if (stack_height > max_stack_height_) max_stack_height_ = stack_height;
798 : }
799 2002415 : if (!exception_stack.empty() && WasmOpcodes::IsThrowingOpcode(opcode)) {
800 : // Record exceptional control flow from potentially throwing opcodes to
801 : // the local handler if one is present. The stack height at the throw
802 : // point is assumed to have popped all operands and not pushed any yet.
803 : DCHECK_GE(control_stack.size() - 1, exception_stack.back());
804 36 : const Control* c = &control_stack[exception_stack.back()];
805 36 : if (!unreachable) c->else_label->Ref(i.pc(), exceptional_stack_height);
806 : TRACE("handler @%u: %s -> try @%u\n", i.pc_offset(), OpcodeName(opcode),
807 : static_cast<uint32_t>(c->pc - code->start));
808 : }
809 2002415 : switch (opcode) {
810 : case kExprBlock:
811 : case kExprLoop: {
812 : bool is_loop = opcode == kExprLoop;
813 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
814 21502 : i.pc());
815 21502 : if (imm.type == kWasmVar) {
816 16 : imm.sig = module->signatures[imm.sig_index];
817 : }
818 : TRACE("control @%u: %s, arity %d->%d\n", i.pc_offset(),
819 : is_loop ? "Loop" : "Block", imm.in_arity(), imm.out_arity());
820 : CLabel* label =
821 21502 : CLabel::New(&control_transfer_zone, stack_height,
822 21502 : is_loop ? imm.in_arity() : imm.out_arity());
823 21502 : control_stack.emplace_back(i.pc(), label, imm.out_arity());
824 : copy_unreachable();
825 21502 : if (is_loop) label->Bind(i.pc());
826 : break;
827 : }
828 : case kExprIf: {
829 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
830 508 : i.pc());
831 508 : if (imm.type == kWasmVar) {
832 8 : imm.sig = module->signatures[imm.sig_index];
833 : }
834 : TRACE("control @%u: If, arity %d->%d\n", i.pc_offset(),
835 : imm.in_arity(), imm.out_arity());
836 : CLabel* end_label = CLabel::New(&control_transfer_zone, stack_height,
837 508 : imm.out_arity());
838 : CLabel* else_label =
839 508 : CLabel::New(&control_transfer_zone, stack_height, 0);
840 1016 : control_stack.emplace_back(i.pc(), end_label, else_label,
841 508 : imm.out_arity());
842 : copy_unreachable();
843 508 : if (!unreachable) else_label->Ref(i.pc(), stack_height);
844 : break;
845 : }
846 : case kExprElse: {
847 : Control* c = &control_stack.back();
848 : copy_unreachable();
849 : TRACE("control @%u: Else\n", i.pc_offset());
850 167 : if (!control_parent().unreachable) {
851 162 : c->end_label->Ref(i.pc(), stack_height);
852 : }
853 : DCHECK_NOT_NULL(c->else_label);
854 167 : c->else_label->Bind(i.pc() + 1);
855 167 : c->else_label->Finish(&map_, code->orig_start);
856 167 : c->else_label = nullptr;
857 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
858 167 : stack_height = c->end_label->target_stack_height;
859 167 : break;
860 : }
861 : case kExprTry: {
862 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
863 36 : i.pc());
864 36 : if (imm.type == kWasmVar) {
865 0 : imm.sig = module->signatures[imm.sig_index];
866 : }
867 : TRACE("control @%u: Try, arity %d->%d\n", i.pc_offset(),
868 : imm.in_arity(), imm.out_arity());
869 : CLabel* end_label = CLabel::New(&control_transfer_zone, stack_height,
870 36 : imm.out_arity());
871 : CLabel* catch_label =
872 36 : CLabel::New(&control_transfer_zone, stack_height, kCatchInArity);
873 72 : control_stack.emplace_back(i.pc(), end_label, catch_label,
874 36 : imm.out_arity());
875 72 : exception_stack.push_back(control_stack.size() - 1);
876 : copy_unreachable();
877 : break;
878 : }
879 : case kExprCatch: {
880 : DCHECK_EQ(control_stack.size() - 1, exception_stack.back());
881 : Control* c = &control_stack.back();
882 : exception_stack.pop_back();
883 : copy_unreachable();
884 : TRACE("control @%u: Catch\n", i.pc_offset());
885 36 : if (!control_parent().unreachable) {
886 36 : c->end_label->Ref(i.pc(), stack_height);
887 : }
888 : DCHECK_NOT_NULL(c->else_label);
889 36 : c->else_label->Bind(i.pc() + 1);
890 36 : c->else_label->Finish(&map_, code->orig_start);
891 36 : c->else_label = nullptr;
892 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
893 36 : stack_height = c->end_label->target_stack_height + kCatchInArity;
894 36 : break;
895 : }
896 : case kExprEnd: {
897 : Control* c = &control_stack.back();
898 : TRACE("control @%u: End\n", i.pc_offset());
899 : // Only loops have bound labels.
900 : DCHECK_IMPLIES(c->end_label->target, *c->pc == kExprLoop);
901 389054 : if (!c->end_label->target) {
902 388885 : if (c->else_label) c->else_label->Bind(i.pc());
903 388885 : c->end_label->Bind(i.pc() + 1);
904 : }
905 389054 : c->Finish(&map_, code->orig_start);
906 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
907 389054 : stack_height = c->end_label->target_stack_height + c->exit_arity;
908 : control_stack.pop_back();
909 : break;
910 : }
911 : case kExprBr: {
912 : BranchDepthImmediate<Decoder::kNoValidate> imm(&i, i.pc());
913 : TRACE("control @%u: Br[depth=%u]\n", i.pc_offset(), imm.depth);
914 430 : Control* c = &control_stack[control_stack.size() - imm.depth - 1];
915 430 : if (!unreachable) c->end_label->Ref(i.pc(), stack_height);
916 : break;
917 : }
918 : case kExprBrIf: {
919 : BranchDepthImmediate<Decoder::kNoValidate> imm(&i, i.pc());
920 : TRACE("control @%u: BrIf[depth=%u]\n", i.pc_offset(), imm.depth);
921 97 : Control* c = &control_stack[control_stack.size() - imm.depth - 1];
922 97 : if (!unreachable) c->end_label->Ref(i.pc(), stack_height);
923 : break;
924 : }
925 : case kExprBrTable: {
926 4205 : BranchTableImmediate<Decoder::kNoValidate> imm(&i, i.pc());
927 : BranchTableIterator<Decoder::kNoValidate> iterator(&i, imm);
928 : TRACE("control @%u: BrTable[count=%u]\n", i.pc_offset(),
929 : imm.table_count);
930 4205 : if (!unreachable) {
931 20945 : while (iterator.has_next()) {
932 : uint32_t j = iterator.cur_index();
933 16740 : uint32_t target = iterator.next();
934 16740 : Control* c = &control_stack[control_stack.size() - target - 1];
935 16740 : c->end_label->Ref(i.pc() + j, stack_height);
936 : }
937 : }
938 : break;
939 : }
940 : default:
941 : break;
942 : }
943 2002415 : if (WasmOpcodes::IsUnconditionalJump(opcode)) {
944 21588 : control_stack.back().unreachable = true;
945 : }
946 : }
947 : DCHECK_EQ(0, control_stack.size());
948 : DCHECK_EQ(func_arity, stack_height);
949 367008 : }
950 :
951 : bool HasEntryAt(pc_t from) {
952 : auto result = map_.find(from);
953 : return result != map_.end();
954 : }
955 :
956 : ControlTransferEntry& Lookup(pc_t from) {
957 : auto result = map_.find(from);
958 : DCHECK(result != map_.end());
959 : return result->second;
960 : }
961 : };
962 :
963 : // The main storage for interpreter code. It maps {WasmFunction} to the
964 : // metadata needed to execute each function.
965 365686 : class CodeMap {
966 : Zone* zone_;
967 : const WasmModule* module_;
968 : ZoneVector<InterpreterCode> interpreter_code_;
969 :
970 : public:
971 365686 : CodeMap(const WasmModule* module, const uint8_t* module_start, Zone* zone)
972 365686 : : zone_(zone), module_(module), interpreter_code_(zone) {
973 365686 : if (module == nullptr) return;
974 365686 : interpreter_code_.reserve(module->functions.size());
975 368196 : for (const WasmFunction& function : module->functions) {
976 2510 : if (function.imported) {
977 : DCHECK(!function.code.is_set());
978 1364 : AddFunction(&function, nullptr, nullptr);
979 : } else {
980 1146 : AddFunction(&function, module_start + function.code.offset(),
981 1146 : module_start + function.code.end_offset());
982 : }
983 : }
984 : }
985 :
986 : const WasmModule* module() const { return module_; }
987 :
988 : InterpreterCode* GetCode(const WasmFunction* function) {
989 : InterpreterCode* code = GetCode(function->func_index);
990 : DCHECK_EQ(function, code->function);
991 : return code;
992 : }
993 :
994 : InterpreterCode* GetCode(uint32_t function_index) {
995 : DCHECK_LT(function_index, interpreter_code_.size());
996 10670400 : return Preprocess(&interpreter_code_[function_index]);
997 : }
998 :
999 5701128 : InterpreterCode* Preprocess(InterpreterCode* code) {
1000 : DCHECK_EQ(code->function->imported, code->start == nullptr);
1001 5701128 : if (!code->side_table && code->start) {
1002 : // Compute the control targets map and the local declarations.
1003 733956 : code->side_table = new (zone_) SideTable(zone_, module_, code);
1004 : }
1005 5701128 : return code;
1006 : }
1007 :
1008 369918 : void AddFunction(const WasmFunction* function, const byte* code_start,
1009 : const byte* code_end) {
1010 : InterpreterCode code = {
1011 : function, BodyLocalDecls(zone_), code_start,
1012 : code_end, const_cast<byte*>(code_start), const_cast<byte*>(code_end),
1013 739836 : nullptr};
1014 :
1015 : DCHECK_EQ(interpreter_code_.size(), function->func_index);
1016 369918 : interpreter_code_.push_back(code);
1017 369918 : }
1018 :
1019 : void SetFunctionCode(const WasmFunction* function, const byte* start,
1020 : const byte* end) {
1021 : DCHECK_LT(function->func_index, interpreter_code_.size());
1022 365928 : InterpreterCode* code = &interpreter_code_[function->func_index];
1023 : DCHECK_EQ(function, code->function);
1024 365928 : code->orig_start = start;
1025 365928 : code->orig_end = end;
1026 365928 : code->start = const_cast<byte*>(start);
1027 365928 : code->end = const_cast<byte*>(end);
1028 365928 : code->side_table = nullptr;
1029 365928 : Preprocess(code);
1030 : }
1031 : };
1032 :
1033 : namespace {
1034 :
1035 : struct ExternalCallResult {
1036 : enum Type {
1037 : // The function should be executed inside this interpreter.
1038 : INTERNAL,
1039 : // For indirect calls: Table or function does not exist.
1040 : INVALID_FUNC,
1041 : // For indirect calls: Signature does not match expected signature.
1042 : SIGNATURE_MISMATCH,
1043 : // The function was executed and returned normally.
1044 : EXTERNAL_RETURNED,
1045 : // The function was executed, threw an exception, and the stack was unwound.
1046 : EXTERNAL_UNWOUND,
1047 : // The function was executed and threw an exception that was locally caught.
1048 : EXTERNAL_CAUGHT
1049 : };
1050 : Type type;
1051 : // If type is INTERNAL, this field holds the function to call internally.
1052 : InterpreterCode* interpreter_code;
1053 :
1054 : ExternalCallResult(Type type) : type(type) { // NOLINT
1055 : DCHECK_NE(INTERNAL, type);
1056 : }
1057 : ExternalCallResult(Type type, InterpreterCode* code)
1058 : : type(type), interpreter_code(code) {
1059 : DCHECK_EQ(INTERNAL, type);
1060 : }
1061 : };
1062 :
1063 : // Like a static_cast from src to dst, but specialized for boxed floats.
1064 : template <typename dst, typename src>
1065 : struct converter {
1066 512 : dst operator()(src val) const { return static_cast<dst>(val); }
1067 : };
1068 : template <>
1069 : struct converter<Float64, uint64_t> {
1070 : Float64 operator()(uint64_t val) const { return Float64::FromBits(val); }
1071 : };
1072 : template <>
1073 : struct converter<Float32, uint32_t> {
1074 : Float32 operator()(uint32_t val) const { return Float32::FromBits(val); }
1075 : };
1076 : template <>
1077 : struct converter<uint64_t, Float64> {
1078 : uint64_t operator()(Float64 val) const { return val.get_bits(); }
1079 : };
1080 : template <>
1081 : struct converter<uint32_t, Float32> {
1082 : uint32_t operator()(Float32 val) const { return val.get_bits(); }
1083 : };
1084 :
1085 : template <typename T>
1086 : V8_INLINE bool has_nondeterminism(T val) {
1087 : static_assert(!std::is_floating_point<T>::value, "missing specialization");
1088 : return false;
1089 : }
1090 : template <>
1091 : V8_INLINE bool has_nondeterminism<float>(float val) {
1092 : return std::isnan(val);
1093 : }
1094 : template <>
1095 : V8_INLINE bool has_nondeterminism<double>(double val) {
1096 : return std::isnan(val);
1097 : }
1098 :
1099 : } // namespace
1100 :
1101 : // Responsible for executing code directly.
1102 365686 : class ThreadImpl {
1103 : struct Activation {
1104 : uint32_t fp;
1105 : sp_t sp;
1106 34383 : Activation(uint32_t fp, sp_t sp) : fp(fp), sp(sp) {}
1107 : };
1108 :
1109 : public:
1110 : ThreadImpl(Zone* zone, CodeMap* codemap,
1111 : Handle<WasmInstanceObject> instance_object)
1112 : : codemap_(codemap),
1113 : instance_object_(instance_object),
1114 : frames_(zone),
1115 1097058 : activations_(zone) {}
1116 :
1117 : //==========================================================================
1118 : // Implementation of public interface for WasmInterpreter::Thread.
1119 : //==========================================================================
1120 :
1121 : WasmInterpreter::State state() { return state_; }
1122 :
1123 4735487 : void InitFrame(const WasmFunction* function, WasmValue* args) {
1124 : DCHECK_EQ(current_activation().fp, frames_.size());
1125 : InterpreterCode* code = codemap()->GetCode(function);
1126 4735487 : size_t num_params = function->sig->parameter_count();
1127 4735487 : EnsureStackSpace(num_params);
1128 : Push(args, num_params);
1129 4735487 : PushFrame(code);
1130 4735487 : }
1131 :
1132 : WasmInterpreter::State Run(int num_steps = -1) {
1133 : DCHECK(state_ == WasmInterpreter::STOPPED ||
1134 : state_ == WasmInterpreter::PAUSED);
1135 : DCHECK(num_steps == -1 || num_steps > 0);
1136 : if (num_steps == -1) {
1137 : TRACE(" => Run()\n");
1138 : } else if (num_steps == 1) {
1139 : TRACE(" => Step()\n");
1140 : } else {
1141 : TRACE(" => Run(%d)\n", num_steps);
1142 : }
1143 4740931 : state_ = WasmInterpreter::RUNNING;
1144 4740931 : Execute(frames_.back().code, frames_.back().pc, num_steps);
1145 : // If state_ is STOPPED, the current activation must be fully unwound.
1146 : DCHECK_IMPLIES(state_ == WasmInterpreter::STOPPED,
1147 : current_activation().fp == frames_.size());
1148 4740931 : return state_;
1149 : }
1150 :
1151 0 : void Pause() { UNIMPLEMENTED(); }
1152 :
1153 : void Reset() {
1154 : TRACE("----- RESET -----\n");
1155 4701106 : sp_ = stack_.get();
1156 : frames_.clear();
1157 4701106 : state_ = WasmInterpreter::STOPPED;
1158 4701106 : trap_reason_ = kTrapCount;
1159 4701106 : possible_nondeterminism_ = false;
1160 : }
1161 :
1162 : int GetFrameCount() {
1163 : DCHECK_GE(kMaxInt, frames_.size());
1164 6736 : return static_cast<int>(frames_.size());
1165 : }
1166 :
1167 : WasmValue GetReturnValue(uint32_t index) {
1168 4697217 : if (state_ == WasmInterpreter::TRAPPED) return WasmValue(0xDEADBEEF);
1169 : DCHECK_EQ(WasmInterpreter::FINISHED, state_);
1170 : Activation act = current_activation();
1171 : // Current activation must be finished.
1172 : DCHECK_EQ(act.fp, frames_.size());
1173 4697217 : return GetStackValue(act.sp + index);
1174 : }
1175 :
1176 : WasmValue GetStackValue(sp_t index) {
1177 : DCHECK_GT(StackHeight(), index);
1178 16964477 : return stack_[index];
1179 : }
1180 :
1181 : void SetStackValue(sp_t index, WasmValue value) {
1182 : DCHECK_GT(StackHeight(), index);
1183 3876618 : stack_[index] = value;
1184 : }
1185 :
1186 : TrapReason GetTrapReason() { return trap_reason_; }
1187 :
1188 : pc_t GetBreakpointPc() { return break_pc_; }
1189 :
1190 : bool PossibleNondeterminism() { return possible_nondeterminism_; }
1191 :
1192 : uint64_t NumInterpretedCalls() { return num_interpreted_calls_; }
1193 :
1194 40 : void AddBreakFlags(uint8_t flags) { break_flags_ |= flags; }
1195 :
1196 0 : void ClearBreakFlags() { break_flags_ = WasmInterpreter::BreakFlag::None; }
1197 :
1198 : uint32_t NumActivations() {
1199 24 : return static_cast<uint32_t>(activations_.size());
1200 : }
1201 :
1202 34383 : uint32_t StartActivation() {
1203 : TRACE("----- START ACTIVATION %zu -----\n", activations_.size());
1204 : // If you use activations, use them consistently:
1205 : DCHECK_IMPLIES(activations_.empty(), frames_.empty());
1206 : DCHECK_IMPLIES(activations_.empty(), StackHeight() == 0);
1207 34383 : uint32_t activation_id = static_cast<uint32_t>(activations_.size());
1208 103149 : activations_.emplace_back(static_cast<uint32_t>(frames_.size()),
1209 34383 : StackHeight());
1210 34383 : state_ = WasmInterpreter::STOPPED;
1211 34383 : return activation_id;
1212 : }
1213 :
1214 : void FinishActivation(uint32_t id) {
1215 : TRACE("----- FINISH ACTIVATION %zu -----\n", activations_.size() - 1);
1216 : DCHECK_LT(0, activations_.size());
1217 : DCHECK_EQ(activations_.size() - 1, id);
1218 : // Stack height must match the start of this activation (otherwise unwind
1219 : // first).
1220 : DCHECK_EQ(activations_.back().fp, frames_.size());
1221 : DCHECK_LE(activations_.back().sp, StackHeight());
1222 34381 : sp_ = stack_.get() + activations_.back().sp;
1223 : activations_.pop_back();
1224 : }
1225 :
1226 : uint32_t ActivationFrameBase(uint32_t id) {
1227 : DCHECK_GT(activations_.size(), id);
1228 11544 : return activations_[id].fp;
1229 : }
1230 :
1231 788 : WasmInterpreter::Thread::ExceptionHandlingResult RaiseException(
1232 : Isolate* isolate, Handle<Object> exception) {
1233 : DCHECK_EQ(WasmInterpreter::TRAPPED, state_);
1234 788 : isolate->Throw(*exception); // Will check that none is pending.
1235 788 : if (HandleException(isolate) == WasmInterpreter::Thread::UNWOUND) {
1236 : DCHECK_EQ(WasmInterpreter::STOPPED, state_);
1237 : return WasmInterpreter::Thread::UNWOUND;
1238 : }
1239 16 : state_ = WasmInterpreter::PAUSED;
1240 16 : return WasmInterpreter::Thread::HANDLED;
1241 : }
1242 :
1243 : private:
1244 : // Handle a thrown exception. Returns whether the exception was handled inside
1245 : // the current activation. Unwinds the interpreted stack accordingly.
1246 1805 : WasmInterpreter::Thread::ExceptionHandlingResult HandleException(
1247 : Isolate* isolate) {
1248 : DCHECK(isolate->has_pending_exception());
1249 : DCHECK_LT(0, activations_.size());
1250 : Activation& act = activations_.back();
1251 339526 : while (frames_.size() > act.fp) {
1252 : Frame& frame = frames_.back();
1253 337757 : InterpreterCode* code = frame.code;
1254 675514 : if (code->side_table->HasEntryAt(frame.pc)) {
1255 : TRACE("----- HANDLE -----\n");
1256 : // TODO(mstarzinger): Push a reference to the pending exception instead
1257 : // of a bogus {int32_t(0)} value here once the interpreter supports it.
1258 : USE(isolate->pending_exception());
1259 36 : Push(WasmValue(int32_t{0}));
1260 : isolate->clear_pending_exception();
1261 36 : frame.pc += JumpToHandlerDelta(code, frame.pc);
1262 : TRACE(" => handler #%zu (#%u @%zu)\n", frames_.size() - 1,
1263 : code->function->func_index, frame.pc);
1264 36 : return WasmInterpreter::Thread::HANDLED;
1265 : }
1266 : TRACE(" => drop frame #%zu (#%u @%zu)\n", frames_.size() - 1,
1267 : code->function->func_index, frame.pc);
1268 337721 : sp_ = stack_.get() + frame.sp;
1269 : frames_.pop_back();
1270 : }
1271 : TRACE("----- UNWIND -----\n");
1272 : DCHECK_EQ(act.fp, frames_.size());
1273 : DCHECK_EQ(act.sp, StackHeight());
1274 1769 : state_ = WasmInterpreter::STOPPED;
1275 1769 : return WasmInterpreter::Thread::UNWOUND;
1276 : }
1277 :
1278 : // Entries on the stack of functions being evaluated.
1279 : struct Frame {
1280 : InterpreterCode* code;
1281 : pc_t pc;
1282 : sp_t sp;
1283 :
1284 : // Limit of parameters.
1285 : sp_t plimit() { return sp + code->function->sig->parameter_count(); }
1286 : // Limit of locals.
1287 : sp_t llimit() { return plimit() + code->locals.type_list.size(); }
1288 : };
1289 :
1290 : friend class InterpretedFrameImpl;
1291 :
1292 : CodeMap* codemap_;
1293 : Handle<WasmInstanceObject> instance_object_;
1294 : std::unique_ptr<WasmValue[]> stack_;
1295 : WasmValue* stack_limit_ = nullptr; // End of allocated stack space.
1296 : WasmValue* sp_ = nullptr; // Current stack pointer.
1297 : ZoneVector<Frame> frames_;
1298 : WasmInterpreter::State state_ = WasmInterpreter::STOPPED;
1299 : pc_t break_pc_ = kInvalidPc;
1300 : TrapReason trap_reason_ = kTrapCount;
1301 : bool possible_nondeterminism_ = false;
1302 : uint8_t break_flags_ = 0; // a combination of WasmInterpreter::BreakFlag
1303 : uint64_t num_interpreted_calls_ = 0;
1304 : // Store the stack height of each activation (for unwind and frame
1305 : // inspection).
1306 : ZoneVector<Activation> activations_;
1307 :
1308 : CodeMap* codemap() const { return codemap_; }
1309 : const WasmModule* module() const { return codemap_->module(); }
1310 :
1311 : void DoTrap(TrapReason trap, pc_t pc) {
1312 : TRACE("TRAP: %s\n", WasmOpcodes::TrapReasonMessage(trap));
1313 37072 : state_ = WasmInterpreter::TRAPPED;
1314 37072 : trap_reason_ = trap;
1315 : CommitPc(pc);
1316 : }
1317 :
1318 : // Check if there is room for a function's activation.
1319 : void EnsureStackSpaceForCall(InterpreterCode* code) {
1320 10654054 : EnsureStackSpace(code->side_table->max_stack_height_ +
1321 5327027 : code->locals.type_list.size());
1322 : DCHECK_GE(StackHeight(), code->function->sig->parameter_count());
1323 : }
1324 :
1325 : // Push a frame with arguments already on the stack.
1326 5182011 : void PushFrame(InterpreterCode* code) {
1327 : DCHECK_NOT_NULL(code);
1328 : DCHECK_NOT_NULL(code->side_table);
1329 : EnsureStackSpaceForCall(code);
1330 :
1331 5182011 : ++num_interpreted_calls_;
1332 5182011 : size_t arity = code->function->sig->parameter_count();
1333 : // The parameters will overlap the arguments already on the stack.
1334 : DCHECK_GE(StackHeight(), arity);
1335 :
1336 15546033 : frames_.push_back({code, 0, StackHeight() - arity});
1337 5182011 : frames_.back().pc = InitLocals(code);
1338 : TRACE(" => PushFrame #%zu (#%u @%zu)\n", frames_.size() - 1,
1339 : code->function->func_index, frames_.back().pc);
1340 5182011 : }
1341 :
1342 5327027 : pc_t InitLocals(InterpreterCode* code) {
1343 9989816 : for (auto p : code->locals.type_list) {
1344 : WasmValue val;
1345 4662789 : switch (p) {
1346 : #define CASE_TYPE(wasm, ctype) \
1347 : case kWasm##wasm: \
1348 : val = WasmValue(ctype{}); \
1349 : break;
1350 4662789 : WASM_CTYPES(CASE_TYPE)
1351 : #undef CASE_TYPE
1352 : default:
1353 0 : UNREACHABLE();
1354 : break;
1355 : }
1356 : Push(val);
1357 : }
1358 5327027 : return code->locals.encoded_size;
1359 : }
1360 :
1361 : void CommitPc(pc_t pc) {
1362 : DCHECK(!frames_.empty());
1363 89642 : frames_.back().pc = pc;
1364 : }
1365 :
1366 : bool SkipBreakpoint(InterpreterCode* code, pc_t pc) {
1367 5992 : if (pc == break_pc_) {
1368 : // Skip the previously hit breakpoint when resuming.
1369 2996 : break_pc_ = kInvalidPc;
1370 : return true;
1371 : }
1372 : return false;
1373 : }
1374 :
1375 20 : void ReloadFromFrameOnException(Decoder* decoder, InterpreterCode** code,
1376 : pc_t* pc, pc_t* limit) {
1377 : Frame* top = &frames_.back();
1378 20 : *code = top->code;
1379 20 : *pc = top->pc;
1380 20 : *limit = top->code->end - top->code->start;
1381 20 : decoder->Reset(top->code->start, top->code->end);
1382 20 : }
1383 :
1384 : int LookupTargetDelta(InterpreterCode* code, pc_t pc) {
1385 162210 : return static_cast<int>(code->side_table->Lookup(pc).pc_diff);
1386 : }
1387 :
1388 36 : int JumpToHandlerDelta(InterpreterCode* code, pc_t pc) {
1389 36 : ControlTransferEntry& control_transfer_entry = code->side_table->Lookup(pc);
1390 36 : DoStackTransfer(sp_ - (control_transfer_entry.sp_diff + kCatchInArity),
1391 72 : control_transfer_entry.target_arity);
1392 36 : return control_transfer_entry.pc_diff;
1393 : }
1394 :
1395 254658 : int DoBreak(InterpreterCode* code, pc_t pc, size_t depth) {
1396 254658 : ControlTransferEntry& control_transfer_entry = code->side_table->Lookup(pc);
1397 254658 : DoStackTransfer(sp_ - control_transfer_entry.sp_diff,
1398 509316 : control_transfer_entry.target_arity);
1399 254658 : return control_transfer_entry.pc_diff;
1400 : }
1401 :
1402 110572 : pc_t ReturnPc(Decoder* decoder, InterpreterCode* code, pc_t pc) {
1403 110572 : switch (code->orig_start[pc]) {
1404 : case kExprCallFunction: {
1405 : CallFunctionImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1406 110420 : return pc + 1 + imm.length;
1407 : }
1408 : case kExprCallIndirect: {
1409 : CallIndirectImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures,
1410 152 : decoder, code->at(pc));
1411 152 : return pc + 1 + imm.length;
1412 : }
1413 : default:
1414 0 : UNREACHABLE();
1415 : }
1416 : }
1417 :
1418 4808005 : bool DoReturn(Decoder* decoder, InterpreterCode** code, pc_t* pc, pc_t* limit,
1419 : size_t arity) {
1420 : DCHECK_GT(frames_.size(), 0);
1421 4808005 : WasmValue* sp_dest = stack_.get() + frames_.back().sp;
1422 : frames_.pop_back();
1423 4808005 : if (frames_.size() == current_activation().fp) {
1424 : // A return from the last frame terminates the execution.
1425 4697433 : state_ = WasmInterpreter::FINISHED;
1426 4697433 : DoStackTransfer(sp_dest, arity);
1427 : TRACE(" => finish\n");
1428 4697433 : return false;
1429 : } else {
1430 : // Return to caller frame.
1431 : Frame* top = &frames_.back();
1432 110572 : *code = top->code;
1433 110572 : decoder->Reset((*code)->start, (*code)->end);
1434 110572 : *pc = ReturnPc(decoder, *code, top->pc);
1435 110572 : *limit = top->code->end - top->code->start;
1436 : TRACE(" => Return to #%zu (#%u @%zu)\n", frames_.size() - 1,
1437 : (*code)->function->func_index, *pc);
1438 110572 : DoStackTransfer(sp_dest, arity);
1439 110572 : return true;
1440 : }
1441 : }
1442 :
1443 : // Returns true if the call was successful, false if the stack check failed
1444 : // and the current activation was fully unwound.
1445 446524 : bool DoCall(Decoder* decoder, InterpreterCode* target, pc_t* pc,
1446 : pc_t* limit) V8_WARN_UNUSED_RESULT {
1447 446524 : frames_.back().pc = *pc;
1448 446524 : PushFrame(target);
1449 446524 : if (!DoStackCheck()) return false;
1450 446508 : *pc = frames_.back().pc;
1451 446508 : *limit = target->end - target->start;
1452 446508 : decoder->Reset(target->start, target->end);
1453 446508 : return true;
1454 : }
1455 :
1456 : // Returns true if the tail call was successful, false if the stack check
1457 : // failed.
1458 145016 : bool DoReturnCall(Decoder* decoder, InterpreterCode* target, pc_t* pc,
1459 : pc_t* limit) V8_WARN_UNUSED_RESULT {
1460 : DCHECK_NOT_NULL(target);
1461 : DCHECK_NOT_NULL(target->side_table);
1462 : EnsureStackSpaceForCall(target);
1463 :
1464 145016 : ++num_interpreted_calls_;
1465 :
1466 : Frame* top = &frames_.back();
1467 :
1468 : // Drop everything except current parameters.
1469 145016 : WasmValue* sp_dest = stack_.get() + top->sp;
1470 145016 : size_t arity = target->function->sig->parameter_count();
1471 :
1472 145016 : DoStackTransfer(sp_dest, arity);
1473 :
1474 145016 : *limit = target->end - target->start;
1475 145016 : decoder->Reset(target->start, target->end);
1476 :
1477 : // Rebuild current frame to look like a call to callee.
1478 145016 : top->code = target;
1479 145016 : top->pc = 0;
1480 145016 : top->sp = StackHeight() - arity;
1481 145016 : top->pc = InitLocals(target);
1482 :
1483 145016 : *pc = top->pc;
1484 :
1485 : TRACE(" => ReturnCall #%zu (#%u @%zu)\n", frames_.size() - 1,
1486 : target->function->func_index, top->pc);
1487 :
1488 145016 : return true;
1489 : }
1490 :
1491 : // Copies {arity} values on the top of the stack down the stack to {dest},
1492 : // dropping the values in-between.
1493 5207715 : void DoStackTransfer(WasmValue* dest, size_t arity) {
1494 : // before: |---------------| pop_count | arity |
1495 : // ^ 0 ^ dest ^ sp_
1496 : //
1497 : // after: |---------------| arity |
1498 : // ^ 0 ^ sp_
1499 : DCHECK_LE(dest, sp_);
1500 : DCHECK_LE(dest + arity, sp_);
1501 5207715 : if (arity) memmove(dest, sp_ - arity, arity * sizeof(*sp_));
1502 5207715 : sp_ = dest + arity;
1503 5207715 : }
1504 :
1505 : inline Address EffectiveAddress(uint32_t index) {
1506 : // Compute the effective address of the access, making sure to condition
1507 : // the index even in the in-bounds case.
1508 : return reinterpret_cast<Address>(instance_object_->memory_start()) +
1509 6128026 : (index & instance_object_->memory_mask());
1510 : }
1511 :
1512 : template <typename mtype>
1513 6132266 : inline Address BoundsCheckMem(uint32_t offset, uint32_t index) {
1514 6132266 : uint32_t effective_index = offset + index;
1515 6132266 : if (effective_index < index) {
1516 : return kNullAddress; // wraparound => oob
1517 : }
1518 12264532 : if (!IsInBounds(effective_index, sizeof(mtype),
1519 : instance_object_->memory_size())) {
1520 : return kNullAddress; // oob
1521 : }
1522 6127586 : return EffectiveAddress(effective_index);
1523 : }
1524 :
1525 : inline bool BoundsCheckMemRange(uint32_t index, uint32_t* size,
1526 : Address* out_address) {
1527 188 : bool ok = ClampToBounds(
1528 : index, size, static_cast<uint32_t>(instance_object_->memory_size()));
1529 : *out_address = EffectiveAddress(index);
1530 : return ok;
1531 : }
1532 :
1533 : template <typename ctype, typename mtype>
1534 3173174 : bool ExecuteLoad(Decoder* decoder, InterpreterCode* code, pc_t pc, int& len,
1535 : MachineRepresentation rep) {
1536 : MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc),
1537 3173174 : sizeof(ctype));
1538 : uint32_t index = Pop().to<uint32_t>();
1539 3173174 : Address addr = BoundsCheckMem<mtype>(imm.offset, index);
1540 3173174 : if (!addr) {
1541 : DoTrap(kTrapMemOutOfBounds, pc);
1542 2136 : return false;
1543 : }
1544 : WasmValue result(
1545 0 : converter<ctype, mtype>{}(ReadLittleEndianValue<mtype>(addr)));
1546 :
1547 : Push(result);
1548 3171038 : len = 1 + imm.length;
1549 :
1550 3171038 : if (FLAG_trace_wasm_memory) {
1551 28 : MemoryTracingInfo info(imm.offset + index, false, rep);
1552 28 : TraceMemoryOperation(ExecutionTier::kInterpreter, &info,
1553 : code->function->func_index, static_cast<int>(pc),
1554 : instance_object_->memory_start());
1555 : }
1556 :
1557 : return true;
1558 : }
1559 :
1560 : template <typename ctype, typename mtype>
1561 2573736 : bool ExecuteStore(Decoder* decoder, InterpreterCode* code, pc_t pc, int& len,
1562 : MachineRepresentation rep) {
1563 : MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc),
1564 2573736 : sizeof(ctype));
1565 0 : ctype val = Pop().to<ctype>();
1566 :
1567 : uint32_t index = Pop().to<uint32_t>();
1568 2573736 : Address addr = BoundsCheckMem<mtype>(imm.offset, index);
1569 2573736 : if (!addr) {
1570 : DoTrap(kTrapMemOutOfBounds, pc);
1571 1296 : return false;
1572 : }
1573 : WriteLittleEndianValue<mtype>(addr, converter<mtype, ctype>{}(val));
1574 2572440 : len = 1 + imm.length;
1575 :
1576 2572440 : if (FLAG_trace_wasm_memory) {
1577 8 : MemoryTracingInfo info(imm.offset + index, true, rep);
1578 8 : TraceMemoryOperation(ExecutionTier::kInterpreter, &info,
1579 : code->function->func_index, static_cast<int>(pc),
1580 : instance_object_->memory_start());
1581 : }
1582 :
1583 : return true;
1584 : }
1585 :
1586 : bool CheckDataSegmentIsPassiveAndNotDropped(uint32_t index, pc_t pc) {
1587 : DCHECK_LT(index, module()->num_declared_data_segments);
1588 84 : if (instance_object_->dropped_data_segments()[index]) {
1589 : DoTrap(kTrapDataSegmentDropped, pc);
1590 : return false;
1591 : }
1592 : return true;
1593 : }
1594 :
1595 : template <typename type, typename op_type>
1596 384088 : bool ExtractAtomicOpParams(Decoder* decoder, InterpreterCode* code,
1597 : Address& address, pc_t pc, int& len,
1598 : type* val = nullptr, type* val2 = nullptr) {
1599 : MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc + 1),
1600 768176 : sizeof(type));
1601 438564 : if (val2) *val2 = static_cast<type>(Pop().to<op_type>());
1602 766300 : if (val) *val = static_cast<type>(Pop().to<op_type>());
1603 : uint32_t index = Pop().to<uint32_t>();
1604 384088 : address = BoundsCheckMem<type>(imm.offset, index);
1605 384088 : if (!address) {
1606 : DoTrap(kTrapMemOutOfBounds, pc);
1607 0 : return false;
1608 : }
1609 384088 : len = 2 + imm.length;
1610 384088 : return true;
1611 : }
1612 :
1613 3596 : bool ExecuteNumericOp(WasmOpcode opcode, Decoder* decoder,
1614 : InterpreterCode* code, pc_t pc, int& len) {
1615 3596 : switch (opcode) {
1616 : case kExprI32SConvertSatF32:
1617 460 : Push(WasmValue(ExecuteConvertSaturate<int32_t>(Pop().to<float>())));
1618 460 : return true;
1619 : case kExprI32UConvertSatF32:
1620 460 : Push(WasmValue(ExecuteConvertSaturate<uint32_t>(Pop().to<float>())));
1621 460 : return true;
1622 : case kExprI32SConvertSatF64:
1623 196 : Push(WasmValue(ExecuteConvertSaturate<int32_t>(Pop().to<double>())));
1624 196 : return true;
1625 : case kExprI32UConvertSatF64:
1626 196 : Push(WasmValue(ExecuteConvertSaturate<uint32_t>(Pop().to<double>())));
1627 196 : return true;
1628 : case kExprI64SConvertSatF32:
1629 920 : Push(WasmValue(ExecuteI64SConvertSatF32(Pop().to<float>())));
1630 460 : return true;
1631 : case kExprI64UConvertSatF32:
1632 920 : Push(WasmValue(ExecuteI64UConvertSatF32(Pop().to<float>())));
1633 460 : return true;
1634 : case kExprI64SConvertSatF64:
1635 392 : Push(WasmValue(ExecuteI64SConvertSatF64(Pop().to<double>())));
1636 196 : return true;
1637 : case kExprI64UConvertSatF64:
1638 392 : Push(WasmValue(ExecuteI64UConvertSatF64(Pop().to<double>())));
1639 196 : return true;
1640 : case kExprMemoryInit: {
1641 72 : MemoryInitImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1642 : DCHECK_LT(imm.data_segment_index, module()->num_declared_data_segments);
1643 72 : len += imm.length;
1644 144 : if (!CheckDataSegmentIsPassiveAndNotDropped(imm.data_segment_index,
1645 : pc)) {
1646 : return false;
1647 : }
1648 : auto size = Pop().to<uint32_t>();
1649 : auto src = Pop().to<uint32_t>();
1650 : auto dst = Pop().to<uint32_t>();
1651 : Address dst_addr;
1652 : bool ok = BoundsCheckMemRange(dst, &size, &dst_addr);
1653 : auto src_max =
1654 68 : instance_object_->data_segment_sizes()[imm.data_segment_index];
1655 : // Use & instead of && so the clamp is not short-circuited.
1656 : ok &= ClampToBounds(src, &size, src_max);
1657 : Address src_addr =
1658 68 : instance_object_->data_segment_starts()[imm.data_segment_index] +
1659 68 : src;
1660 68 : memory_copy_wrapper(dst_addr, src_addr, size);
1661 68 : if (!ok) DoTrap(kTrapMemOutOfBounds, pc);
1662 : return ok;
1663 : }
1664 : case kExprDataDrop: {
1665 : DataDropImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1666 12 : len += imm.length;
1667 12 : if (!CheckDataSegmentIsPassiveAndNotDropped(imm.index, pc)) {
1668 : return false;
1669 : }
1670 8 : instance_object_->dropped_data_segments()[imm.index] = 1;
1671 8 : return true;
1672 : }
1673 : case kExprMemoryCopy: {
1674 : MemoryCopyImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1675 : auto size = Pop().to<uint32_t>();
1676 : auto src = Pop().to<uint32_t>();
1677 : auto dst = Pop().to<uint32_t>();
1678 : Address dst_addr;
1679 76 : bool copy_backward = src < dst && dst - src < size;
1680 : bool ok = BoundsCheckMemRange(dst, &size, &dst_addr);
1681 : // Trap without copying any bytes if we are copying backward and the
1682 : // copy is partially out-of-bounds. We only need to check that the dst
1683 : // region is out-of-bounds, because we know that {src < dst}, so the src
1684 : // region is always out of bounds if the dst region is.
1685 76 : if (ok || !copy_backward) {
1686 : Address src_addr;
1687 : // Use & instead of && so the bounds check is not short-circuited.
1688 : ok &= BoundsCheckMemRange(src, &size, &src_addr);
1689 64 : memory_copy_wrapper(dst_addr, src_addr, size);
1690 : }
1691 76 : if (!ok) DoTrap(kTrapMemOutOfBounds, pc);
1692 76 : len += imm.length;
1693 : return ok;
1694 : }
1695 : case kExprMemoryFill: {
1696 : MemoryIndexImmediate<Decoder::kNoValidate> imm(decoder,
1697 : code->at(pc + 1));
1698 : auto size = Pop().to<uint32_t>();
1699 : auto value = Pop().to<uint32_t>();
1700 : auto dst = Pop().to<uint32_t>();
1701 : Address dst_addr;
1702 : bool ok = BoundsCheckMemRange(dst, &size, &dst_addr);
1703 44 : memory_fill_wrapper(dst_addr, value, size);
1704 44 : if (!ok) DoTrap(kTrapMemOutOfBounds, pc);
1705 44 : len += imm.length;
1706 : return ok;
1707 : }
1708 : case kExprTableCopy: {
1709 : TableCopyImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1710 : auto size = Pop().to<uint32_t>();
1711 : auto src = Pop().to<uint32_t>();
1712 : auto dst = Pop().to<uint32_t>();
1713 : bool ok = WasmInstanceObject::CopyTableEntries(
1714 : instance_object_->GetIsolate(), instance_object_,
1715 768 : imm.table_dst.index, imm.table_src.index, dst, src, size);
1716 768 : if (!ok) DoTrap(kTrapTableOutOfBounds, pc);
1717 768 : len += imm.length;
1718 : return ok;
1719 : }
1720 : default:
1721 0 : FATAL("Unknown or unimplemented opcode #%d:%s", code->start[pc],
1722 0 : OpcodeName(code->start[pc]));
1723 : UNREACHABLE();
1724 : }
1725 : return false;
1726 : }
1727 :
1728 : template <typename type, typename op_type, typename func>
1729 : op_type ExecuteAtomicBinopBE(type val, Address addr, func op) {
1730 : type old_val;
1731 : type new_val;
1732 : old_val = ReadUnalignedValue<type>(addr);
1733 : do {
1734 : new_val =
1735 : ByteReverse(static_cast<type>(op(ByteReverse<type>(old_val), val)));
1736 : } while (!(std::atomic_compare_exchange_strong(
1737 : reinterpret_cast<std::atomic<type>*>(addr), &old_val, new_val)));
1738 : return static_cast<op_type>(ByteReverse<type>(old_val));
1739 : }
1740 :
1741 : template <typename type>
1742 : type AdjustByteOrder(type param) {
1743 : #if V8_TARGET_BIG_ENDIAN
1744 : return ByteReverse(param);
1745 : #else
1746 : return param;
1747 : #endif
1748 : }
1749 :
1750 384088 : bool ExecuteAtomicOp(WasmOpcode opcode, Decoder* decoder,
1751 : InterpreterCode* code, pc_t pc, int& len) {
1752 : #if V8_TARGET_BIG_ENDIAN
1753 : constexpr bool kBigEndian = true;
1754 : #else
1755 : constexpr bool kBigEndian = false;
1756 : #endif
1757 : WasmValue result;
1758 384088 : switch (opcode) {
1759 : #define ATOMIC_BINOP_CASE(name, type, op_type, operation, op) \
1760 : case kExpr##name: { \
1761 : type val; \
1762 : Address addr; \
1763 : op_type result; \
1764 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
1765 : &val)) { \
1766 : return false; \
1767 : } \
1768 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1769 : "Size mismatch for types std::atomic<" #type \
1770 : ">, and " #type); \
1771 : if (kBigEndian) { \
1772 : auto oplambda = [](type a, type b) { return a op b; }; \
1773 : result = ExecuteAtomicBinopBE<type, op_type>(val, addr, oplambda); \
1774 : } else { \
1775 : result = static_cast<op_type>( \
1776 : std::operation(reinterpret_cast<std::atomic<type>*>(addr), val)); \
1777 : } \
1778 : Push(WasmValue(result)); \
1779 : break; \
1780 : }
1781 26920 : ATOMIC_BINOP_CASE(I32AtomicAdd, uint32_t, uint32_t, atomic_fetch_add, +);
1782 972 : ATOMIC_BINOP_CASE(I32AtomicAdd8U, uint8_t, uint32_t, atomic_fetch_add, +);
1783 972 : ATOMIC_BINOP_CASE(I32AtomicAdd16U, uint16_t, uint32_t, atomic_fetch_add,
1784 : +);
1785 26912 : ATOMIC_BINOP_CASE(I32AtomicSub, uint32_t, uint32_t, atomic_fetch_sub, -);
1786 972 : ATOMIC_BINOP_CASE(I32AtomicSub8U, uint8_t, uint32_t, atomic_fetch_sub, -);
1787 972 : ATOMIC_BINOP_CASE(I32AtomicSub16U, uint16_t, uint32_t, atomic_fetch_sub,
1788 : -);
1789 26912 : ATOMIC_BINOP_CASE(I32AtomicAnd, uint32_t, uint32_t, atomic_fetch_and, &);
1790 972 : ATOMIC_BINOP_CASE(I32AtomicAnd8U, uint8_t, uint32_t, atomic_fetch_and, &);
1791 972 : ATOMIC_BINOP_CASE(I32AtomicAnd16U, uint16_t, uint32_t,
1792 : atomic_fetch_and, &);
1793 26912 : ATOMIC_BINOP_CASE(I32AtomicOr, uint32_t, uint32_t, atomic_fetch_or, |);
1794 972 : ATOMIC_BINOP_CASE(I32AtomicOr8U, uint8_t, uint32_t, atomic_fetch_or, |);
1795 972 : ATOMIC_BINOP_CASE(I32AtomicOr16U, uint16_t, uint32_t, atomic_fetch_or, |);
1796 26912 : ATOMIC_BINOP_CASE(I32AtomicXor, uint32_t, uint32_t, atomic_fetch_xor, ^);
1797 972 : ATOMIC_BINOP_CASE(I32AtomicXor8U, uint8_t, uint32_t, atomic_fetch_xor, ^);
1798 972 : ATOMIC_BINOP_CASE(I32AtomicXor16U, uint16_t, uint32_t, atomic_fetch_xor,
1799 : ^);
1800 26912 : ATOMIC_BINOP_CASE(I32AtomicExchange, uint32_t, uint32_t, atomic_exchange,
1801 : =);
1802 972 : ATOMIC_BINOP_CASE(I32AtomicExchange8U, uint8_t, uint32_t, atomic_exchange,
1803 : =);
1804 972 : ATOMIC_BINOP_CASE(I32AtomicExchange16U, uint16_t, uint32_t,
1805 : atomic_exchange, =);
1806 52512 : ATOMIC_BINOP_CASE(I64AtomicAdd, uint64_t, uint64_t, atomic_fetch_add, +);
1807 972 : ATOMIC_BINOP_CASE(I64AtomicAdd8U, uint8_t, uint64_t, atomic_fetch_add, +);
1808 972 : ATOMIC_BINOP_CASE(I64AtomicAdd16U, uint16_t, uint64_t, atomic_fetch_add,
1809 : +);
1810 40380 : ATOMIC_BINOP_CASE(I64AtomicAdd32U, uint32_t, uint64_t, atomic_fetch_add,
1811 : +);
1812 52504 : ATOMIC_BINOP_CASE(I64AtomicSub, uint64_t, uint64_t, atomic_fetch_sub, -);
1813 972 : ATOMIC_BINOP_CASE(I64AtomicSub8U, uint8_t, uint64_t, atomic_fetch_sub, -);
1814 984 : ATOMIC_BINOP_CASE(I64AtomicSub16U, uint16_t, uint64_t, atomic_fetch_sub,
1815 : -);
1816 40380 : ATOMIC_BINOP_CASE(I64AtomicSub32U, uint32_t, uint64_t, atomic_fetch_sub,
1817 : -);
1818 52504 : ATOMIC_BINOP_CASE(I64AtomicAnd, uint64_t, uint64_t, atomic_fetch_and, &);
1819 972 : ATOMIC_BINOP_CASE(I64AtomicAnd8U, uint8_t, uint64_t, atomic_fetch_and, &);
1820 972 : ATOMIC_BINOP_CASE(I64AtomicAnd16U, uint16_t, uint64_t,
1821 : atomic_fetch_and, &);
1822 40380 : ATOMIC_BINOP_CASE(I64AtomicAnd32U, uint32_t, uint64_t,
1823 : atomic_fetch_and, &);
1824 52504 : ATOMIC_BINOP_CASE(I64AtomicOr, uint64_t, uint64_t, atomic_fetch_or, |);
1825 972 : ATOMIC_BINOP_CASE(I64AtomicOr8U, uint8_t, uint64_t, atomic_fetch_or, |);
1826 972 : ATOMIC_BINOP_CASE(I64AtomicOr16U, uint16_t, uint64_t, atomic_fetch_or, |);
1827 40380 : ATOMIC_BINOP_CASE(I64AtomicOr32U, uint32_t, uint64_t, atomic_fetch_or, |);
1828 52504 : ATOMIC_BINOP_CASE(I64AtomicXor, uint64_t, uint64_t, atomic_fetch_xor, ^);
1829 972 : ATOMIC_BINOP_CASE(I64AtomicXor8U, uint8_t, uint64_t, atomic_fetch_xor, ^);
1830 972 : ATOMIC_BINOP_CASE(I64AtomicXor16U, uint16_t, uint64_t, atomic_fetch_xor,
1831 : ^);
1832 40380 : ATOMIC_BINOP_CASE(I64AtomicXor32U, uint32_t, uint64_t, atomic_fetch_xor,
1833 : ^);
1834 52504 : ATOMIC_BINOP_CASE(I64AtomicExchange, uint64_t, uint64_t, atomic_exchange,
1835 : =);
1836 972 : ATOMIC_BINOP_CASE(I64AtomicExchange8U, uint8_t, uint64_t, atomic_exchange,
1837 : =);
1838 972 : ATOMIC_BINOP_CASE(I64AtomicExchange16U, uint16_t, uint64_t,
1839 : atomic_exchange, =);
1840 40380 : ATOMIC_BINOP_CASE(I64AtomicExchange32U, uint32_t, uint64_t,
1841 : atomic_exchange, =);
1842 : #undef ATOMIC_BINOP_CASE
1843 : #define ATOMIC_COMPARE_EXCHANGE_CASE(name, type, op_type) \
1844 : case kExpr##name: { \
1845 : type old_val; \
1846 : type new_val; \
1847 : Address addr; \
1848 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
1849 : &old_val, &new_val)) { \
1850 : return false; \
1851 : } \
1852 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1853 : "Size mismatch for types std::atomic<" #type \
1854 : ">, and " #type); \
1855 : old_val = AdjustByteOrder<type>(old_val); \
1856 : new_val = AdjustByteOrder<type>(new_val); \
1857 : std::atomic_compare_exchange_strong( \
1858 : reinterpret_cast<std::atomic<type>*>(addr), &old_val, new_val); \
1859 : Push(WasmValue(static_cast<op_type>(AdjustByteOrder<type>(old_val)))); \
1860 : break; \
1861 : }
1862 40380 : ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange, uint32_t,
1863 : uint32_t);
1864 972 : ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange8U, uint8_t,
1865 : uint32_t);
1866 972 : ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange16U, uint16_t,
1867 : uint32_t);
1868 78768 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange, uint64_t,
1869 : uint64_t);
1870 972 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange8U, uint8_t,
1871 : uint64_t);
1872 984 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange16U, uint16_t,
1873 : uint64_t);
1874 40380 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange32U, uint32_t,
1875 : uint64_t);
1876 : #undef ATOMIC_COMPARE_EXCHANGE_CASE
1877 : #define ATOMIC_LOAD_CASE(name, type, op_type, operation) \
1878 : case kExpr##name: { \
1879 : Address addr; \
1880 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len)) { \
1881 : return false; \
1882 : } \
1883 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1884 : "Size mismatch for types std::atomic<" #type \
1885 : ">, and " #type); \
1886 : result = WasmValue(static_cast<op_type>(AdjustByteOrder<type>( \
1887 : std::operation(reinterpret_cast<std::atomic<type>*>(addr))))); \
1888 : Push(result); \
1889 : break; \
1890 : }
1891 1392 : ATOMIC_LOAD_CASE(I32AtomicLoad, uint32_t, uint32_t, atomic_load);
1892 288 : ATOMIC_LOAD_CASE(I32AtomicLoad8U, uint8_t, uint32_t, atomic_load);
1893 288 : ATOMIC_LOAD_CASE(I32AtomicLoad16U, uint16_t, uint32_t, atomic_load);
1894 1968 : ATOMIC_LOAD_CASE(I64AtomicLoad, uint64_t, uint64_t, atomic_load);
1895 304 : ATOMIC_LOAD_CASE(I64AtomicLoad8U, uint8_t, uint64_t, atomic_load);
1896 288 : ATOMIC_LOAD_CASE(I64AtomicLoad16U, uint16_t, uint64_t, atomic_load);
1897 1856 : ATOMIC_LOAD_CASE(I64AtomicLoad32U, uint32_t, uint64_t, atomic_load);
1898 : #undef ATOMIC_LOAD_CASE
1899 : #define ATOMIC_STORE_CASE(name, type, op_type, operation) \
1900 : case kExpr##name: { \
1901 : type val; \
1902 : Address addr; \
1903 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
1904 : &val)) { \
1905 : return false; \
1906 : } \
1907 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1908 : "Size mismatch for types std::atomic<" #type \
1909 : ">, and " #type); \
1910 : std::operation(reinterpret_cast<std::atomic<type>*>(addr), \
1911 : AdjustByteOrder<type>(val)); \
1912 : break; \
1913 : }
1914 472 : ATOMIC_STORE_CASE(I32AtomicStore, uint32_t, uint32_t, atomic_store);
1915 72 : ATOMIC_STORE_CASE(I32AtomicStore8U, uint8_t, uint32_t, atomic_store);
1916 72 : ATOMIC_STORE_CASE(I32AtomicStore16U, uint16_t, uint32_t, atomic_store);
1917 656 : ATOMIC_STORE_CASE(I64AtomicStore, uint64_t, uint64_t, atomic_store);
1918 72 : ATOMIC_STORE_CASE(I64AtomicStore8U, uint8_t, uint64_t, atomic_store);
1919 72 : ATOMIC_STORE_CASE(I64AtomicStore16U, uint16_t, uint64_t, atomic_store);
1920 464 : ATOMIC_STORE_CASE(I64AtomicStore32U, uint32_t, uint64_t, atomic_store);
1921 : #undef ATOMIC_STORE_CASE
1922 : default:
1923 0 : UNREACHABLE();
1924 : return false;
1925 : }
1926 : return true;
1927 : }
1928 :
1929 : byte* GetGlobalPtr(const WasmGlobal* global) {
1930 807020 : if (global->mutability && global->imported) {
1931 : return reinterpret_cast<byte*>(
1932 0 : instance_object_->imported_mutable_globals()[global->index]);
1933 : } else {
1934 807020 : return instance_object_->globals_start() + global->offset;
1935 : }
1936 : }
1937 :
1938 2345000 : bool ExecuteSimdOp(WasmOpcode opcode, Decoder* decoder, InterpreterCode* code,
1939 : pc_t pc, int& len) {
1940 2345000 : switch (opcode) {
1941 : #define SPLAT_CASE(format, sType, valType, num) \
1942 : case kExpr##format##Splat: { \
1943 : WasmValue val = Pop(); \
1944 : valType v = val.to<valType>(); \
1945 : sType s; \
1946 : for (int i = 0; i < num; i++) s.val[i] = v; \
1947 : Push(WasmValue(Simd128(s))); \
1948 : return true; \
1949 : }
1950 3925628 : SPLAT_CASE(I32x4, int4, int32_t, 4)
1951 6683964 : SPLAT_CASE(F32x4, float4, float, 4)
1952 169180 : SPLAT_CASE(I16x8, int8, int32_t, 8)
1953 275120 : SPLAT_CASE(I8x16, int16, int32_t, 16)
1954 : #undef SPLAT_CASE
1955 : #define EXTRACT_LANE_CASE(format, name) \
1956 : case kExpr##format##ExtractLane: { \
1957 : SimdLaneImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
1958 : ++len; \
1959 : WasmValue val = Pop(); \
1960 : Simd128 s = val.to_s128(); \
1961 : auto ss = s.to_##name(); \
1962 : Push(WasmValue(ss.val[LANE(imm.lane, ss)])); \
1963 : return true; \
1964 : }
1965 156 : EXTRACT_LANE_CASE(I32x4, i32x4)
1966 96 : EXTRACT_LANE_CASE(F32x4, f32x4)
1967 0 : EXTRACT_LANE_CASE(I16x8, i16x8)
1968 0 : EXTRACT_LANE_CASE(I8x16, i8x16)
1969 : #undef EXTRACT_LANE_CASE
1970 : #define BINOP_CASE(op, name, stype, count, expr) \
1971 : case kExpr##op: { \
1972 : WasmValue v2 = Pop(); \
1973 : WasmValue v1 = Pop(); \
1974 : stype s1 = v1.to_s128().to_##name(); \
1975 : stype s2 = v2.to_s128().to_##name(); \
1976 : stype res; \
1977 : for (size_t i = 0; i < count; ++i) { \
1978 : auto a = s1.val[LANE(i, s1)]; \
1979 : auto b = s2.val[LANE(i, s1)]; \
1980 : res.val[LANE(i, s1)] = expr; \
1981 : } \
1982 : Push(WasmValue(Simd128(res))); \
1983 : return true; \
1984 : }
1985 211664 : BINOP_CASE(F32x4Add, f32x4, float4, 4, a + b)
1986 211600 : BINOP_CASE(F32x4Sub, f32x4, float4, 4, a - b)
1987 211600 : BINOP_CASE(F32x4Mul, f32x4, float4, 4, a * b)
1988 0 : BINOP_CASE(F32x4Min, f32x4, float4, 4, a < b ? a : b)
1989 0 : BINOP_CASE(F32x4Max, f32x4, float4, 4, a > b ? a : b)
1990 107840 : BINOP_CASE(I32x4Add, i32x4, int4, 4, base::AddWithWraparound(a, b))
1991 107648 : BINOP_CASE(I32x4Sub, i32x4, int4, 4, base::SubWithWraparound(a, b))
1992 107648 : BINOP_CASE(I32x4Mul, i32x4, int4, 4, base::MulWithWraparound(a, b))
1993 53824 : BINOP_CASE(I32x4MinS, i32x4, int4, 4, a < b ? a : b)
1994 53824 : BINOP_CASE(I32x4MinU, i32x4, int4, 4,
1995 : static_cast<uint32_t>(a) < static_cast<uint32_t>(b) ? a : b)
1996 53824 : BINOP_CASE(I32x4MaxS, i32x4, int4, 4, a > b ? a : b)
1997 53824 : BINOP_CASE(I32x4MaxU, i32x4, int4, 4,
1998 : static_cast<uint32_t>(a) > static_cast<uint32_t>(b) ? a : b)
1999 53824 : BINOP_CASE(S128And, i32x4, int4, 4, a & b)
2000 53824 : BINOP_CASE(S128Or, i32x4, int4, 4, a | b)
2001 53824 : BINOP_CASE(S128Xor, i32x4, int4, 4, a ^ b)
2002 3888 : BINOP_CASE(I16x8Add, i16x8, int8, 8, base::AddWithWraparound(a, b))
2003 3888 : BINOP_CASE(I16x8Sub, i16x8, int8, 8, base::SubWithWraparound(a, b))
2004 3888 : BINOP_CASE(I16x8Mul, i16x8, int8, 8, base::MulWithWraparound(a, b))
2005 1296 : BINOP_CASE(I16x8MinS, i16x8, int8, 8, a < b ? a : b)
2006 1296 : BINOP_CASE(I16x8MinU, i16x8, int8, 8,
2007 : static_cast<uint16_t>(a) < static_cast<uint16_t>(b) ? a : b)
2008 1296 : BINOP_CASE(I16x8MaxS, i16x8, int8, 8, a > b ? a : b)
2009 1296 : BINOP_CASE(I16x8MaxU, i16x8, int8, 8,
2010 : static_cast<uint16_t>(a) > static_cast<uint16_t>(b) ? a : b)
2011 3888 : BINOP_CASE(I16x8AddSaturateS, i16x8, int8, 8, SaturateAdd<int16_t>(a, b))
2012 3888 : BINOP_CASE(I16x8AddSaturateU, i16x8, int8, 8, SaturateAdd<uint16_t>(a, b))
2013 3888 : BINOP_CASE(I16x8SubSaturateS, i16x8, int8, 8, SaturateSub<int16_t>(a, b))
2014 3888 : BINOP_CASE(I16x8SubSaturateU, i16x8, int8, 8, SaturateSub<uint16_t>(a, b))
2015 6480 : BINOP_CASE(I8x16Add, i8x16, int16, 16, base::AddWithWraparound(a, b))
2016 6480 : BINOP_CASE(I8x16Sub, i8x16, int16, 16, base::SubWithWraparound(a, b))
2017 6480 : BINOP_CASE(I8x16Mul, i8x16, int16, 16, base::MulWithWraparound(a, b))
2018 1296 : BINOP_CASE(I8x16MinS, i8x16, int16, 16, a < b ? a : b)
2019 1296 : BINOP_CASE(I8x16MinU, i8x16, int16, 16,
2020 : static_cast<uint8_t>(a) < static_cast<uint8_t>(b) ? a : b)
2021 1296 : BINOP_CASE(I8x16MaxS, i8x16, int16, 16, a > b ? a : b)
2022 1296 : BINOP_CASE(I8x16MaxU, i8x16, int16, 16,
2023 : static_cast<uint8_t>(a) > static_cast<uint8_t>(b) ? a : b)
2024 6480 : BINOP_CASE(I8x16AddSaturateS, i8x16, int16, 16, SaturateAdd<int8_t>(a, b))
2025 6480 : BINOP_CASE(I8x16AddSaturateU, i8x16, int16, 16,
2026 : SaturateAdd<uint8_t>(a, b))
2027 6480 : BINOP_CASE(I8x16SubSaturateS, i8x16, int16, 16, SaturateSub<int8_t>(a, b))
2028 6480 : BINOP_CASE(I8x16SubSaturateU, i8x16, int16, 16,
2029 : SaturateSub<uint8_t>(a, b))
2030 : #undef BINOP_CASE
2031 : #define UNOP_CASE(op, name, stype, count, expr) \
2032 : case kExpr##op: { \
2033 : WasmValue v = Pop(); \
2034 : stype s = v.to_s128().to_##name(); \
2035 : stype res; \
2036 : for (size_t i = 0; i < count; ++i) { \
2037 : auto a = s.val[i]; \
2038 : res.val[i] = expr; \
2039 : } \
2040 : Push(WasmValue(Simd128(res))); \
2041 : return true; \
2042 : }
2043 3220 : UNOP_CASE(F32x4Abs, f32x4, float4, 4, std::abs(a))
2044 1380 : UNOP_CASE(F32x4Neg, f32x4, float4, 4, -a)
2045 2716 : UNOP_CASE(F32x4RecipApprox, f32x4, float4, 4, base::Recip(a))
2046 1164 : UNOP_CASE(F32x4RecipSqrtApprox, f32x4, float4, 4, base::RecipSqrt(a))
2047 1624 : UNOP_CASE(I32x4Neg, i32x4, int4, 4, base::NegateWithWraparound(a))
2048 696 : UNOP_CASE(S128Not, i32x4, int4, 4, ~a)
2049 396 : UNOP_CASE(I16x8Neg, i16x8, int8, 8, base::NegateWithWraparound(a))
2050 684 : UNOP_CASE(I8x16Neg, i8x16, int16, 16, base::NegateWithWraparound(a))
2051 : #undef UNOP_CASE
2052 : #define CMPOP_CASE(op, name, stype, out_stype, count, expr) \
2053 : case kExpr##op: { \
2054 : WasmValue v2 = Pop(); \
2055 : WasmValue v1 = Pop(); \
2056 : stype s1 = v1.to_s128().to_##name(); \
2057 : stype s2 = v2.to_s128().to_##name(); \
2058 : out_stype res; \
2059 : for (size_t i = 0; i < count; ++i) { \
2060 : auto a = s1.val[i]; \
2061 : auto b = s2.val[i]; \
2062 : res.val[i] = expr ? -1 : 0; \
2063 : } \
2064 : Push(WasmValue(Simd128(res))); \
2065 : return true; \
2066 : }
2067 211600 : CMPOP_CASE(F32x4Eq, f32x4, float4, int4, 4, a == b)
2068 211600 : CMPOP_CASE(F32x4Ne, f32x4, float4, int4, 4, a != b)
2069 211600 : CMPOP_CASE(F32x4Gt, f32x4, float4, int4, 4, a > b)
2070 211600 : CMPOP_CASE(F32x4Ge, f32x4, float4, int4, 4, a >= b)
2071 211600 : CMPOP_CASE(F32x4Lt, f32x4, float4, int4, 4, a < b)
2072 211600 : CMPOP_CASE(F32x4Le, f32x4, float4, int4, 4, a <= b)
2073 53888 : CMPOP_CASE(I32x4Eq, i32x4, int4, int4, 4, a == b)
2074 53888 : CMPOP_CASE(I32x4Ne, i32x4, int4, int4, 4, a != b)
2075 53824 : CMPOP_CASE(I32x4GtS, i32x4, int4, int4, 4, a > b)
2076 53824 : CMPOP_CASE(I32x4GeS, i32x4, int4, int4, 4, a >= b)
2077 53824 : CMPOP_CASE(I32x4LtS, i32x4, int4, int4, 4, a < b)
2078 53824 : CMPOP_CASE(I32x4LeS, i32x4, int4, int4, 4, a <= b)
2079 53824 : CMPOP_CASE(I32x4GtU, i32x4, int4, int4, 4,
2080 : static_cast<uint32_t>(a) > static_cast<uint32_t>(b))
2081 53824 : CMPOP_CASE(I32x4GeU, i32x4, int4, int4, 4,
2082 : static_cast<uint32_t>(a) >= static_cast<uint32_t>(b))
2083 53824 : CMPOP_CASE(I32x4LtU, i32x4, int4, int4, 4,
2084 : static_cast<uint32_t>(a) < static_cast<uint32_t>(b))
2085 53824 : CMPOP_CASE(I32x4LeU, i32x4, int4, int4, 4,
2086 : static_cast<uint32_t>(a) <= static_cast<uint32_t>(b))
2087 1360 : CMPOP_CASE(I16x8Eq, i16x8, int8, int8, 8, a == b)
2088 1360 : CMPOP_CASE(I16x8Ne, i16x8, int8, int8, 8, a != b)
2089 1296 : CMPOP_CASE(I16x8GtS, i16x8, int8, int8, 8, a > b)
2090 1296 : CMPOP_CASE(I16x8GeS, i16x8, int8, int8, 8, a >= b)
2091 1296 : CMPOP_CASE(I16x8LtS, i16x8, int8, int8, 8, a < b)
2092 1296 : CMPOP_CASE(I16x8LeS, i16x8, int8, int8, 8, a <= b)
2093 1296 : CMPOP_CASE(I16x8GtU, i16x8, int8, int8, 8,
2094 : static_cast<uint16_t>(a) > static_cast<uint16_t>(b))
2095 1296 : CMPOP_CASE(I16x8GeU, i16x8, int8, int8, 8,
2096 : static_cast<uint16_t>(a) >= static_cast<uint16_t>(b))
2097 1296 : CMPOP_CASE(I16x8LtU, i16x8, int8, int8, 8,
2098 : static_cast<uint16_t>(a) < static_cast<uint16_t>(b))
2099 1296 : CMPOP_CASE(I16x8LeU, i16x8, int8, int8, 8,
2100 : static_cast<uint16_t>(a) <= static_cast<uint16_t>(b))
2101 1360 : CMPOP_CASE(I8x16Eq, i8x16, int16, int16, 16, a == b)
2102 1360 : CMPOP_CASE(I8x16Ne, i8x16, int16, int16, 16, a != b)
2103 1296 : CMPOP_CASE(I8x16GtS, i8x16, int16, int16, 16, a > b)
2104 1296 : CMPOP_CASE(I8x16GeS, i8x16, int16, int16, 16, a >= b)
2105 1296 : CMPOP_CASE(I8x16LtS, i8x16, int16, int16, 16, a < b)
2106 1296 : CMPOP_CASE(I8x16LeS, i8x16, int16, int16, 16, a <= b)
2107 1296 : CMPOP_CASE(I8x16GtU, i8x16, int16, int16, 16,
2108 : static_cast<uint8_t>(a) > static_cast<uint8_t>(b))
2109 1296 : CMPOP_CASE(I8x16GeU, i8x16, int16, int16, 16,
2110 : static_cast<uint8_t>(a) >= static_cast<uint8_t>(b))
2111 1296 : CMPOP_CASE(I8x16LtU, i8x16, int16, int16, 16,
2112 : static_cast<uint8_t>(a) < static_cast<uint8_t>(b))
2113 1296 : CMPOP_CASE(I8x16LeU, i8x16, int16, int16, 16,
2114 : static_cast<uint8_t>(a) <= static_cast<uint8_t>(b))
2115 : #undef CMPOP_CASE
2116 : #define REPLACE_LANE_CASE(format, name, stype, ctype) \
2117 : case kExpr##format##ReplaceLane: { \
2118 : SimdLaneImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
2119 : ++len; \
2120 : WasmValue new_val = Pop(); \
2121 : WasmValue simd_val = Pop(); \
2122 : stype s = simd_val.to_s128().to_##name(); \
2123 : s.val[LANE(imm.lane, s)] = new_val.to<ctype>(); \
2124 : Push(WasmValue(Simd128(s))); \
2125 : return true; \
2126 : }
2127 128 : REPLACE_LANE_CASE(F32x4, f32x4, float4, float)
2128 160 : REPLACE_LANE_CASE(I32x4, i32x4, int4, int32_t)
2129 144 : REPLACE_LANE_CASE(I16x8, i16x8, int8, int32_t)
2130 272 : REPLACE_LANE_CASE(I8x16, i8x16, int16, int32_t)
2131 : #undef REPLACE_LANE_CASE
2132 : case kExprS128LoadMem:
2133 : return ExecuteLoad<Simd128, Simd128>(decoder, code, pc, len,
2134 0 : MachineRepresentation::kSimd128);
2135 : case kExprS128StoreMem:
2136 : return ExecuteStore<Simd128, Simd128>(decoder, code, pc, len,
2137 0 : MachineRepresentation::kSimd128);
2138 : #define SHIFT_CASE(op, name, stype, count, expr) \
2139 : case kExpr##op: { \
2140 : SimdShiftImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
2141 : ++len; \
2142 : WasmValue v = Pop(); \
2143 : stype s = v.to_s128().to_##name(); \
2144 : stype res; \
2145 : for (size_t i = 0; i < count; ++i) { \
2146 : auto a = s.val[i]; \
2147 : res.val[i] = expr; \
2148 : } \
2149 : Push(WasmValue(Simd128(res))); \
2150 : return true; \
2151 : }
2152 28768 : SHIFT_CASE(I32x4Shl, i32x4, int4, 4,
2153 : static_cast<uint32_t>(a) << imm.shift)
2154 28768 : SHIFT_CASE(I32x4ShrS, i32x4, int4, 4, a >> imm.shift)
2155 28768 : SHIFT_CASE(I32x4ShrU, i32x4, int4, 4,
2156 : static_cast<uint32_t>(a) >> imm.shift)
2157 2160 : SHIFT_CASE(I16x8Shl, i16x8, int8, 8,
2158 : static_cast<uint16_t>(a) << imm.shift)
2159 2160 : SHIFT_CASE(I16x8ShrS, i16x8, int8, 8, a >> imm.shift)
2160 2160 : SHIFT_CASE(I16x8ShrU, i16x8, int8, 8,
2161 : static_cast<uint16_t>(a) >> imm.shift)
2162 1008 : SHIFT_CASE(I8x16Shl, i8x16, int16, 16,
2163 : static_cast<uint8_t>(a) << imm.shift)
2164 1008 : SHIFT_CASE(I8x16ShrS, i8x16, int16, 16, a >> imm.shift)
2165 1008 : SHIFT_CASE(I8x16ShrU, i8x16, int16, 16,
2166 : static_cast<uint8_t>(a) >> imm.shift)
2167 : #undef SHIFT_CASE
2168 : #define CONVERT_CASE(op, src_type, name, dst_type, count, start_index, ctype, \
2169 : expr) \
2170 : case kExpr##op: { \
2171 : WasmValue v = Pop(); \
2172 : src_type s = v.to_s128().to_##name(); \
2173 : dst_type res; \
2174 : for (size_t i = 0; i < count; ++i) { \
2175 : ctype a = s.val[LANE(start_index + i, s)]; \
2176 : res.val[LANE(i, res)] = expr; \
2177 : } \
2178 : Push(WasmValue(Simd128(res))); \
2179 : return true; \
2180 : }
2181 0 : CONVERT_CASE(F32x4SConvertI32x4, int4, i32x4, float4, 4, 0, int32_t,
2182 : static_cast<float>(a))
2183 0 : CONVERT_CASE(F32x4UConvertI32x4, int4, i32x4, float4, 4, 0, uint32_t,
2184 : static_cast<float>(a))
2185 1380 : CONVERT_CASE(I32x4SConvertF32x4, float4, f32x4, int4, 4, 0, double,
2186 : std::isnan(a) ? 0
2187 : : a<kMinInt ? kMinInt : a> kMaxInt
2188 : ? kMaxInt
2189 : : static_cast<int32_t>(a))
2190 1380 : CONVERT_CASE(I32x4UConvertF32x4, float4, f32x4, int4, 4, 0, double,
2191 : std::isnan(a)
2192 : ? 0
2193 : : a<0 ? 0 : a> kMaxUInt32 ? kMaxUInt32
2194 : : static_cast<uint32_t>(a))
2195 108 : CONVERT_CASE(I32x4SConvertI16x8High, int8, i16x8, int4, 4, 4, int16_t,
2196 : a)
2197 108 : CONVERT_CASE(I32x4UConvertI16x8High, int8, i16x8, int4, 4, 4, uint16_t,
2198 : a)
2199 108 : CONVERT_CASE(I32x4SConvertI16x8Low, int8, i16x8, int4, 4, 0, int16_t, a)
2200 108 : CONVERT_CASE(I32x4UConvertI16x8Low, int8, i16x8, int4, 4, 0, uint16_t,
2201 : a)
2202 108 : CONVERT_CASE(I16x8SConvertI8x16High, int16, i8x16, int8, 8, 8, int8_t,
2203 : a)
2204 108 : CONVERT_CASE(I16x8UConvertI8x16High, int16, i8x16, int8, 8, 8, uint8_t,
2205 : a)
2206 108 : CONVERT_CASE(I16x8SConvertI8x16Low, int16, i8x16, int8, 8, 0, int8_t, a)
2207 108 : CONVERT_CASE(I16x8UConvertI8x16Low, int16, i8x16, int8, 8, 0, uint8_t,
2208 : a)
2209 : #undef CONVERT_CASE
2210 : #define PACK_CASE(op, src_type, name, dst_type, count, ctype, dst_ctype, \
2211 : is_unsigned) \
2212 : case kExpr##op: { \
2213 : WasmValue v2 = Pop(); \
2214 : WasmValue v1 = Pop(); \
2215 : src_type s1 = v1.to_s128().to_##name(); \
2216 : src_type s2 = v2.to_s128().to_##name(); \
2217 : dst_type res; \
2218 : int64_t min = std::numeric_limits<ctype>::min(); \
2219 : int64_t max = std::numeric_limits<ctype>::max(); \
2220 : for (size_t i = 0; i < count; ++i) { \
2221 : int32_t v = i < count / 2 ? s1.val[LANE(i, s1)] \
2222 : : s2.val[LANE(i - count / 2, s2)]; \
2223 : int64_t a = is_unsigned ? static_cast<int64_t>(v & 0xFFFFFFFFu) : v; \
2224 : res.val[LANE(i, res)] = \
2225 : static_cast<dst_ctype>(std::max(min, std::min(max, a))); \
2226 : } \
2227 : Push(WasmValue(Simd128(res))); \
2228 : return true; \
2229 : }
2230 2784 : PACK_CASE(I16x8SConvertI32x4, int4, i32x4, int8, 8, int16_t, int16_t,
2231 : false)
2232 2784 : PACK_CASE(I16x8UConvertI32x4, int4, i32x4, int8, 8, uint16_t, int16_t,
2233 : true)
2234 720 : PACK_CASE(I8x16SConvertI16x8, int8, i16x8, int16, 16, int8_t, int8_t,
2235 : false)
2236 720 : PACK_CASE(I8x16UConvertI16x8, int8, i16x8, int16, 16, uint8_t, int8_t,
2237 : true)
2238 : #undef PACK_CASE
2239 : case kExprS128Select: {
2240 0 : int4 v2 = Pop().to_s128().to_i32x4();
2241 0 : int4 v1 = Pop().to_s128().to_i32x4();
2242 0 : int4 bool_val = Pop().to_s128().to_i32x4();
2243 : int4 res;
2244 0 : for (size_t i = 0; i < 4; ++i) {
2245 0 : res.val[i] = v2.val[i] ^ ((v1.val[i] ^ v2.val[i]) & bool_val.val[i]);
2246 : }
2247 0 : Push(WasmValue(Simd128(res)));
2248 : return true;
2249 : }
2250 : #define ADD_HORIZ_CASE(op, name, stype, count) \
2251 : case kExpr##op: { \
2252 : WasmValue v2 = Pop(); \
2253 : WasmValue v1 = Pop(); \
2254 : stype s1 = v1.to_s128().to_##name(); \
2255 : stype s2 = v2.to_s128().to_##name(); \
2256 : stype res; \
2257 : for (size_t i = 0; i < count / 2; ++i) { \
2258 : res.val[LANE(i, s1)] = \
2259 : s1.val[LANE(i * 2, s1)] + s1.val[LANE(i * 2 + 1, s1)]; \
2260 : res.val[LANE(i + count / 2, s1)] = \
2261 : s2.val[LANE(i * 2, s1)] + s2.val[LANE(i * 2 + 1, s1)]; \
2262 : } \
2263 : Push(WasmValue(Simd128(res))); \
2264 : return true; \
2265 : }
2266 16 : ADD_HORIZ_CASE(I32x4AddHoriz, i32x4, int4, 4)
2267 16 : ADD_HORIZ_CASE(F32x4AddHoriz, f32x4, float4, 4)
2268 16 : ADD_HORIZ_CASE(I16x8AddHoriz, i16x8, int8, 8)
2269 : #undef ADD_HORIZ_CASE
2270 : case kExprS8x16Shuffle: {
2271 : Simd8x16ShuffleImmediate<Decoder::kNoValidate> imm(decoder,
2272 : code->at(pc));
2273 12224 : len += 16;
2274 12224 : int16 v2 = Pop().to_s128().to_i8x16();
2275 12224 : int16 v1 = Pop().to_s128().to_i8x16();
2276 : int16 res;
2277 403392 : for (size_t i = 0; i < kSimd128Size; ++i) {
2278 195584 : int lane = imm.shuffle[i];
2279 : res.val[LANE(i, v1)] = lane < kSimd128Size
2280 : ? v1.val[LANE(lane, v1)]
2281 195584 : : v2.val[LANE(lane - kSimd128Size, v1)];
2282 : }
2283 24448 : Push(WasmValue(Simd128(res)));
2284 : return true;
2285 : }
2286 : #define REDUCTION_CASE(op, name, stype, count, operation) \
2287 : case kExpr##op: { \
2288 : stype s = Pop().to_s128().to_##name(); \
2289 : int32_t res = s.val[0]; \
2290 : for (size_t i = 1; i < count; ++i) { \
2291 : res = res operation static_cast<int32_t>(s.val[i]); \
2292 : } \
2293 : Push(WasmValue(res)); \
2294 : return true; \
2295 : }
2296 32 : REDUCTION_CASE(S1x4AnyTrue, i32x4, int4, 4, |)
2297 32 : REDUCTION_CASE(S1x4AllTrue, i32x4, int4, 4, &)
2298 32 : REDUCTION_CASE(S1x8AnyTrue, i16x8, int8, 8, |)
2299 32 : REDUCTION_CASE(S1x8AllTrue, i16x8, int8, 8, &)
2300 32 : REDUCTION_CASE(S1x16AnyTrue, i8x16, int16, 16, |)
2301 32 : REDUCTION_CASE(S1x16AllTrue, i8x16, int16, 16, &)
2302 : #undef REDUCTION_CASE
2303 : default:
2304 : return false;
2305 : }
2306 : }
2307 :
2308 : // Check if our control stack (frames_) exceeds the limit. Trigger stack
2309 : // overflow if it does, and unwinding the current frame.
2310 : // Returns true if execution can continue, false if the current activation was
2311 : // fully unwound.
2312 : // Do call this function immediately *after* pushing a new frame. The pc of
2313 : // the top frame will be reset to 0 if the stack check fails.
2314 446524 : bool DoStackCheck() V8_WARN_UNUSED_RESULT {
2315 : // The goal of this stack check is not to prevent actual stack overflows,
2316 : // but to simulate stack overflows during the execution of compiled code.
2317 : // That is why this function uses FLAG_stack_size, even though the value
2318 : // stack actually lies in zone memory.
2319 446524 : const size_t stack_size_limit = FLAG_stack_size * KB;
2320 : // Sum up the value stack size and the control stack size.
2321 893048 : const size_t current_stack_size = (sp_ - stack_.get()) * sizeof(*sp_) +
2322 446524 : frames_.size() * sizeof(frames_[0]);
2323 446524 : if (V8_LIKELY(current_stack_size <= stack_size_limit)) {
2324 : return true;
2325 : }
2326 : // The pc of the top frame is initialized to the first instruction. We reset
2327 : // it to 0 here such that we report the same position as in compiled code.
2328 16 : frames_.back().pc = 0;
2329 : Isolate* isolate = instance_object_->GetIsolate();
2330 : HandleScope handle_scope(isolate);
2331 16 : isolate->StackOverflow();
2332 16 : return HandleException(isolate) == WasmInterpreter::Thread::HANDLED;
2333 : }
2334 :
2335 0 : void EncodeI32ExceptionValue(Handle<FixedArray> encoded_values,
2336 : uint32_t* encoded_index, uint32_t value) {
2337 0 : encoded_values->set((*encoded_index)++, Smi::FromInt(value >> 16));
2338 0 : encoded_values->set((*encoded_index)++, Smi::FromInt(value & 0xffff));
2339 0 : }
2340 :
2341 0 : void EncodeI64ExceptionValue(Handle<FixedArray> encoded_values,
2342 : uint32_t* encoded_index, uint64_t value) {
2343 0 : EncodeI32ExceptionValue(encoded_values, encoded_index,
2344 0 : static_cast<uint32_t>(value >> 32));
2345 0 : EncodeI32ExceptionValue(encoded_values, encoded_index,
2346 0 : static_cast<uint32_t>(value));
2347 0 : }
2348 :
2349 : // Allocate, initialize and throw a new exception. The exception values are
2350 : // being popped off the operand stack. Returns true if the exception is being
2351 : // handled locally by the interpreter, false otherwise (interpreter exits).
2352 12 : bool DoThrowException(const WasmException* exception,
2353 : uint32_t index) V8_WARN_UNUSED_RESULT {
2354 : Isolate* isolate = instance_object_->GetIsolate();
2355 : Handle<WasmExceptionTag> exception_tag(
2356 : WasmExceptionTag::cast(
2357 : instance_object_->exceptions_table()->get(index)),
2358 12 : isolate);
2359 12 : uint32_t encoded_size = WasmExceptionPackage::GetEncodedSize(exception);
2360 : Handle<Object> exception_object =
2361 12 : WasmExceptionPackage::New(isolate, exception_tag, encoded_size);
2362 : Handle<FixedArray> encoded_values = Handle<FixedArray>::cast(
2363 12 : WasmExceptionPackage::GetExceptionValues(isolate, exception_object));
2364 : // Encode the exception values on the operand stack into the exception
2365 : // package allocated above. This encoding has to be in sync with other
2366 : // backends so that exceptions can be passed between them.
2367 12 : const WasmExceptionSig* sig = exception->sig;
2368 12 : uint32_t encoded_index = 0;
2369 12 : for (size_t i = 0; i < sig->parameter_count(); ++i) {
2370 0 : WasmValue value = sp_[i - sig->parameter_count()];
2371 0 : switch (sig->GetParam(i)) {
2372 : case kWasmI32: {
2373 : uint32_t u32 = value.to_u32();
2374 0 : EncodeI32ExceptionValue(encoded_values, &encoded_index, u32);
2375 0 : break;
2376 : }
2377 : case kWasmF32: {
2378 : uint32_t f32 = value.to_f32_boxed().get_bits();
2379 0 : EncodeI32ExceptionValue(encoded_values, &encoded_index, f32);
2380 0 : break;
2381 : }
2382 : case kWasmI64: {
2383 : uint64_t u64 = value.to_u64();
2384 0 : EncodeI64ExceptionValue(encoded_values, &encoded_index, u64);
2385 0 : break;
2386 : }
2387 : case kWasmF64: {
2388 : uint64_t f64 = value.to_f64_boxed().get_bits();
2389 0 : EncodeI64ExceptionValue(encoded_values, &encoded_index, f64);
2390 0 : break;
2391 : }
2392 : case kWasmAnyRef:
2393 0 : UNIMPLEMENTED();
2394 : break;
2395 : default:
2396 0 : UNREACHABLE();
2397 : }
2398 : }
2399 : DCHECK_EQ(encoded_size, encoded_index);
2400 12 : PopN(static_cast<int>(sig->parameter_count()));
2401 : // Now that the exception is ready, set it as pending.
2402 12 : isolate->Throw(*exception_object);
2403 12 : return HandleException(isolate) == WasmInterpreter::Thread::HANDLED;
2404 : }
2405 :
2406 : // Throw a given existing exception. Returns true if the exception is being
2407 : // handled locally by the interpreter, false otherwise (interpreter exits).
2408 0 : bool DoRethrowException(WasmValue* exception) {
2409 : Isolate* isolate = instance_object_->GetIsolate();
2410 : // TODO(mstarzinger): Use the passed {exception} here once reference types
2411 : // as values on the operand stack are supported by the interpreter.
2412 0 : isolate->ReThrow(*isolate->factory()->undefined_value());
2413 0 : return HandleException(isolate) == WasmInterpreter::Thread::HANDLED;
2414 : }
2415 :
2416 4740931 : void Execute(InterpreterCode* code, pc_t pc, int max) {
2417 : DCHECK_NOT_NULL(code->side_table);
2418 : DCHECK(!frames_.empty());
2419 : // There must be enough space on the stack to hold the arguments, locals,
2420 : // and the value stack.
2421 : DCHECK_LE(code->function->sig->parameter_count() +
2422 : code->locals.type_list.size() +
2423 : code->side_table->max_stack_height_,
2424 : stack_limit_ - stack_.get() - frames_.back().sp);
2425 :
2426 4740931 : Decoder decoder(code->start, code->end);
2427 4740931 : pc_t limit = code->end - code->start;
2428 : bool hit_break = false;
2429 :
2430 : while (true) {
2431 : #define PAUSE_IF_BREAK_FLAG(flag) \
2432 : if (V8_UNLIKELY(break_flags_ & WasmInterpreter::BreakFlag::flag)) { \
2433 : hit_break = true; \
2434 : max = 0; \
2435 : }
2436 :
2437 : DCHECK_GT(limit, pc);
2438 : DCHECK_NOT_NULL(code->start);
2439 :
2440 : // Do first check for a breakpoint, in order to set hit_break correctly.
2441 : const char* skip = " ";
2442 43993261 : int len = 1;
2443 43993261 : byte orig = code->start[pc];
2444 43993261 : WasmOpcode opcode = static_cast<WasmOpcode>(orig);
2445 43993261 : if (WasmOpcodes::IsPrefixOpcode(opcode)) {
2446 2732684 : opcode = static_cast<WasmOpcode>(opcode << 8 | code->start[pc + 1]);
2447 : }
2448 43993261 : if (V8_UNLIKELY(orig == kInternalBreakpoint)) {
2449 5992 : orig = code->orig_start[pc];
2450 5992 : if (WasmOpcodes::IsPrefixOpcode(static_cast<WasmOpcode>(orig))) {
2451 : opcode =
2452 0 : static_cast<WasmOpcode>(orig << 8 | code->orig_start[pc + 1]);
2453 : }
2454 11984 : if (SkipBreakpoint(code, pc)) {
2455 : // skip breakpoint by switching on original code.
2456 : skip = "[skip] ";
2457 : } else {
2458 : TRACE("@%-3zu: [break] %-24s:", pc, WasmOpcodes::OpcodeName(opcode));
2459 : TraceValueStack();
2460 : TRACE("\n");
2461 : hit_break = true;
2462 5429 : break;
2463 : }
2464 : }
2465 :
2466 : // If max is 0, break. If max is positive (a limit is set), decrement it.
2467 43990265 : if (max == 0) break;
2468 43987832 : if (max > 0) --max;
2469 :
2470 : USE(skip);
2471 : TRACE("@%-3zu: %s%-24s:", pc, skip, WasmOpcodes::OpcodeName(opcode));
2472 : TraceValueStack();
2473 : TRACE("\n");
2474 :
2475 : #ifdef DEBUG
2476 : // Compute the stack effect of this opcode, and verify later that the
2477 : // stack was modified accordingly.
2478 : std::pair<uint32_t, uint32_t> stack_effect =
2479 : StackEffect(codemap_->module(), frames_.back().code->function->sig,
2480 : code->orig_start + pc, code->orig_end);
2481 : sp_t expected_new_stack_height =
2482 : StackHeight() - stack_effect.first + stack_effect.second;
2483 : #endif
2484 :
2485 43987832 : switch (orig) {
2486 : case kExprNop:
2487 : break;
2488 : case kExprBlock:
2489 : case kExprLoop:
2490 : case kExprTry: {
2491 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures,
2492 2336528 : &decoder, code->at(pc));
2493 1168264 : len = 1 + imm.length;
2494 : break;
2495 : }
2496 : case kExprIf: {
2497 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures,
2498 333584 : &decoder, code->at(pc));
2499 : WasmValue cond = Pop();
2500 : bool is_true = cond.to<uint32_t>() != 0;
2501 166792 : if (is_true) {
2502 : // fall through to the true block.
2503 19470 : len = 1 + imm.length;
2504 : TRACE(" true => fallthrough\n");
2505 : } else {
2506 294644 : len = LookupTargetDelta(code, pc);
2507 : TRACE(" false => @%zu\n", pc + len);
2508 : }
2509 : break;
2510 : }
2511 : case kExprElse:
2512 : case kExprCatch: {
2513 29776 : len = LookupTargetDelta(code, pc);
2514 : TRACE(" end => @%zu\n", pc + len);
2515 14888 : break;
2516 : }
2517 : case kExprThrow: {
2518 : ExceptionIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2519 12 : code->at(pc));
2520 12 : CommitPc(pc); // Needed for local unwinding.
2521 12 : const WasmException* exception = &module()->exceptions[imm.index];
2522 12 : if (!DoThrowException(exception, imm.index)) return;
2523 12 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2524 12 : continue; // Do not bump pc.
2525 : }
2526 : case kExprRethrow: {
2527 : WasmValue ex = Pop();
2528 0 : CommitPc(pc); // Needed for local unwinding.
2529 0 : if (!DoRethrowException(&ex)) return;
2530 0 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2531 0 : continue; // Do not bump pc.
2532 : }
2533 : case kExprSelect: {
2534 : WasmValue cond = Pop();
2535 : WasmValue fval = Pop();
2536 : WasmValue tval = Pop();
2537 932 : Push(cond.to<int32_t>() != 0 ? tval : fval);
2538 : break;
2539 : }
2540 : case kExprBr: {
2541 : BranchDepthImmediate<Decoder::kNoValidate> imm(&decoder,
2542 18054 : code->at(pc));
2543 18054 : len = DoBreak(code, pc, imm.depth);
2544 : TRACE(" br => @%zu\n", pc + len);
2545 : break;
2546 : }
2547 : case kExprBrIf: {
2548 : BranchDepthImmediate<Decoder::kNoValidate> imm(&decoder,
2549 28736 : code->at(pc));
2550 : WasmValue cond = Pop();
2551 : bool is_true = cond.to<uint32_t>() != 0;
2552 28736 : if (is_true) {
2553 14680 : len = DoBreak(code, pc, imm.depth);
2554 : TRACE(" br_if => @%zu\n", pc + len);
2555 : } else {
2556 : TRACE(" false => fallthrough\n");
2557 14056 : len = 1 + imm.length;
2558 : }
2559 : break;
2560 : }
2561 : case kExprBrTable: {
2562 : BranchTableImmediate<Decoder::kNoValidate> imm(&decoder,
2563 443848 : code->at(pc));
2564 : BranchTableIterator<Decoder::kNoValidate> iterator(&decoder, imm);
2565 221924 : uint32_t key = Pop().to<uint32_t>();
2566 : uint32_t depth = 0;
2567 221924 : if (key >= imm.table_count) key = imm.table_count;
2568 1941548 : for (uint32_t i = 0; i <= key; i++) {
2569 : DCHECK(iterator.has_next());
2570 859812 : depth = iterator.next();
2571 : }
2572 221924 : len = key + DoBreak(code, pc + key, static_cast<size_t>(depth));
2573 : TRACE(" br[%u] => @%zu\n", key, pc + key + len);
2574 : break;
2575 : }
2576 : case kExprReturn: {
2577 222248 : size_t arity = code->function->sig->return_count();
2578 222248 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
2579 12 : PAUSE_IF_BREAK_FLAG(AfterReturn);
2580 : continue; // Do not bump pc.
2581 : }
2582 : case kExprUnreachable: {
2583 76 : return DoTrap(kTrapUnreachable, pc);
2584 : }
2585 : case kExprEnd: {
2586 : break;
2587 : }
2588 : case kExprI32Const: {
2589 7762050 : ImmI32Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2590 7762050 : Push(WasmValue(imm.value));
2591 7762050 : len = 1 + imm.length;
2592 : break;
2593 : }
2594 : case kExprI64Const: {
2595 6976 : ImmI64Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2596 6976 : Push(WasmValue(imm.value));
2597 6976 : len = 1 + imm.length;
2598 : break;
2599 : }
2600 : case kExprF32Const: {
2601 300 : ImmF32Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2602 300 : Push(WasmValue(imm.value));
2603 300 : len = 1 + imm.length;
2604 : break;
2605 : }
2606 : case kExprF64Const: {
2607 1684 : ImmF64Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2608 1684 : Push(WasmValue(imm.value));
2609 1684 : len = 1 + imm.length;
2610 : break;
2611 : }
2612 : case kExprGetLocal: {
2613 12266052 : LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2614 12266052 : Push(GetStackValue(frames_.back().sp + imm.index));
2615 12266052 : len = 1 + imm.length;
2616 : break;
2617 : }
2618 : case kExprSetLocal: {
2619 3874066 : LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2620 : WasmValue val = Pop();
2621 3874066 : SetStackValue(frames_.back().sp + imm.index, val);
2622 3874066 : len = 1 + imm.length;
2623 : break;
2624 : }
2625 : case kExprTeeLocal: {
2626 2552 : LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2627 : WasmValue val = Pop();
2628 2552 : SetStackValue(frames_.back().sp + imm.index, val);
2629 : Push(val);
2630 2552 : len = 1 + imm.length;
2631 : break;
2632 : }
2633 : case kExprDrop: {
2634 14496 : Pop();
2635 14496 : break;
2636 : }
2637 : case kExprCallFunction: {
2638 : CallFunctionImmediate<Decoder::kNoValidate> imm(&decoder,
2639 452969 : code->at(pc));
2640 : InterpreterCode* target = codemap()->GetCode(imm.index);
2641 452969 : if (target->function->imported) {
2642 6617 : CommitPc(pc);
2643 : ExternalCallResult result =
2644 6617 : CallImportedFunction(target->function->func_index);
2645 6617 : switch (result.type) {
2646 : case ExternalCallResult::INTERNAL:
2647 : // The import is a function of this instance. Call it directly.
2648 : DCHECK(!result.interpreter_code->function->imported);
2649 : break;
2650 : case ExternalCallResult::INVALID_FUNC:
2651 : case ExternalCallResult::SIGNATURE_MISMATCH:
2652 : // Direct calls are checked statically.
2653 0 : UNREACHABLE();
2654 : case ExternalCallResult::EXTERNAL_RETURNED:
2655 5644 : PAUSE_IF_BREAK_FLAG(AfterCall);
2656 5644 : len = 1 + imm.length;
2657 5644 : break;
2658 : case ExternalCallResult::EXTERNAL_UNWOUND:
2659 965 : return;
2660 : case ExternalCallResult::EXTERNAL_CAUGHT:
2661 8 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2662 8 : continue; // Do not bump pc.
2663 : }
2664 5644 : if (result.type != ExternalCallResult::INTERNAL) break;
2665 : }
2666 : // Execute an internal call.
2667 446352 : if (!DoCall(&decoder, target, &pc, &limit)) return;
2668 446336 : code = target;
2669 446336 : PAUSE_IF_BREAK_FLAG(AfterCall);
2670 : continue; // Do not bump pc.
2671 : } break;
2672 :
2673 : case kExprCallIndirect: {
2674 : CallIndirectImmediate<Decoder::kNoValidate> imm(
2675 584 : kAllWasmFeatures, &decoder, code->at(pc));
2676 292 : uint32_t entry_index = Pop().to<uint32_t>();
2677 : // Assume only one table for now.
2678 : DCHECK_LE(module()->tables.size(), 1u);
2679 292 : CommitPc(pc); // TODO(wasm): Be more disciplined about committing PC.
2680 : ExternalCallResult result =
2681 292 : CallIndirectFunction(0, entry_index, imm.sig_index);
2682 292 : switch (result.type) {
2683 : case ExternalCallResult::INTERNAL:
2684 : // The import is a function of this instance. Call it directly.
2685 172 : if (!DoCall(&decoder, result.interpreter_code, &pc, &limit))
2686 88 : return;
2687 172 : code = result.interpreter_code;
2688 172 : PAUSE_IF_BREAK_FLAG(AfterCall);
2689 172 : continue; // Do not bump pc.
2690 : case ExternalCallResult::INVALID_FUNC:
2691 44 : return DoTrap(kTrapFuncInvalid, pc);
2692 : case ExternalCallResult::SIGNATURE_MISMATCH:
2693 28 : return DoTrap(kTrapFuncSigMismatch, pc);
2694 : case ExternalCallResult::EXTERNAL_RETURNED:
2695 32 : PAUSE_IF_BREAK_FLAG(AfterCall);
2696 32 : len = 1 + imm.length;
2697 32 : break;
2698 : case ExternalCallResult::EXTERNAL_UNWOUND:
2699 : return;
2700 : case ExternalCallResult::EXTERNAL_CAUGHT:
2701 0 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2702 0 : continue; // Do not bump pc.
2703 : }
2704 32 : } break;
2705 :
2706 : case kExprReturnCall: {
2707 : CallFunctionImmediate<Decoder::kNoValidate> imm(&decoder,
2708 104820 : code->at(pc));
2709 : InterpreterCode* target = codemap()->GetCode(imm.index);
2710 :
2711 104820 : if (!target->function->imported) {
2712 : // Enter internal found function.
2713 104820 : if (!DoReturnCall(&decoder, target, &pc, &limit)) return;
2714 104812 : code = target;
2715 104812 : PAUSE_IF_BREAK_FLAG(AfterCall);
2716 :
2717 104812 : continue; // Do not bump pc.
2718 : }
2719 : // Function is imported.
2720 8 : CommitPc(pc);
2721 : ExternalCallResult result =
2722 8 : CallImportedFunction(target->function->func_index);
2723 8 : switch (result.type) {
2724 : case ExternalCallResult::INTERNAL:
2725 : // Cannot import internal functions.
2726 : case ExternalCallResult::INVALID_FUNC:
2727 : case ExternalCallResult::SIGNATURE_MISMATCH:
2728 : // Direct calls are checked statically.
2729 0 : UNREACHABLE();
2730 : case ExternalCallResult::EXTERNAL_RETURNED:
2731 8 : len = 1 + imm.length;
2732 8 : break;
2733 : case ExternalCallResult::EXTERNAL_UNWOUND:
2734 : return;
2735 : case ExternalCallResult::EXTERNAL_CAUGHT:
2736 0 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2737 0 : continue;
2738 : }
2739 8 : size_t arity = code->function->sig->return_count();
2740 8 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
2741 0 : PAUSE_IF_BREAK_FLAG(AfterReturn);
2742 : continue;
2743 : } break;
2744 :
2745 : case kExprReturnCallIndirect: {
2746 : CallIndirectImmediate<Decoder::kNoValidate> imm(
2747 80424 : kAllWasmFeatures, &decoder, code->at(pc));
2748 40212 : uint32_t entry_index = Pop().to<uint32_t>();
2749 : // Assume only one table for now.
2750 : DCHECK_LE(module()->tables.size(), 1u);
2751 40212 : CommitPc(pc); // TODO(wasm): Be more disciplined about committing PC.
2752 :
2753 : // TODO(wasm): Calling functions needs some refactoring to avoid
2754 : // multi-exit code like this.
2755 : ExternalCallResult result =
2756 40212 : CallIndirectFunction(0, entry_index, imm.sig_index);
2757 40212 : switch (result.type) {
2758 : case ExternalCallResult::INTERNAL: {
2759 : InterpreterCode* target = result.interpreter_code;
2760 :
2761 : DCHECK(!target->function->imported);
2762 :
2763 : // The function belongs to this instance. Enter it directly.
2764 40212 : if (!DoReturnCall(&decoder, target, &pc, &limit)) return;
2765 40204 : code = result.interpreter_code;
2766 40204 : PAUSE_IF_BREAK_FLAG(AfterCall);
2767 40204 : continue; // Do not bump pc.
2768 : }
2769 : case ExternalCallResult::INVALID_FUNC:
2770 0 : return DoTrap(kTrapFuncInvalid, pc);
2771 : case ExternalCallResult::SIGNATURE_MISMATCH:
2772 0 : return DoTrap(kTrapFuncSigMismatch, pc);
2773 : case ExternalCallResult::EXTERNAL_RETURNED: {
2774 8 : len = 1 + imm.length;
2775 :
2776 8 : size_t arity = code->function->sig->return_count();
2777 8 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
2778 0 : PAUSE_IF_BREAK_FLAG(AfterCall);
2779 : break;
2780 : }
2781 : case ExternalCallResult::EXTERNAL_UNWOUND:
2782 : return;
2783 :
2784 : case ExternalCallResult::EXTERNAL_CAUGHT:
2785 0 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2786 0 : break;
2787 : }
2788 0 : } break;
2789 :
2790 : case kExprGetGlobal: {
2791 : GlobalIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2792 15904 : code->at(pc));
2793 15904 : const WasmGlobal* global = &module()->globals[imm.index];
2794 : byte* ptr = GetGlobalPtr(global);
2795 : WasmValue val;
2796 15904 : switch (global->type) {
2797 : #define CASE_TYPE(wasm, ctype) \
2798 : case kWasm##wasm: \
2799 : val = WasmValue( \
2800 : ReadLittleEndianValue<ctype>(reinterpret_cast<Address>(ptr))); \
2801 : break;
2802 224 : WASM_CTYPES(CASE_TYPE)
2803 : #undef CASE_TYPE
2804 : default:
2805 0 : UNREACHABLE();
2806 : }
2807 : Push(val);
2808 15904 : len = 1 + imm.length;
2809 : break;
2810 : }
2811 : case kExprSetGlobal: {
2812 : GlobalIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2813 791116 : code->at(pc));
2814 791116 : const WasmGlobal* global = &module()->globals[imm.index];
2815 : byte* ptr = GetGlobalPtr(global);
2816 : WasmValue val = Pop();
2817 791116 : switch (global->type) {
2818 : #define CASE_TYPE(wasm, ctype) \
2819 : case kWasm##wasm: \
2820 : WriteLittleEndianValue<ctype>(reinterpret_cast<Address>(ptr), \
2821 : val.to<ctype>()); \
2822 : break;
2823 : WASM_CTYPES(CASE_TYPE)
2824 : #undef CASE_TYPE
2825 : default:
2826 0 : UNREACHABLE();
2827 : }
2828 791116 : len = 1 + imm.length;
2829 : break;
2830 : }
2831 :
2832 : #define LOAD_CASE(name, ctype, mtype, rep) \
2833 : case kExpr##name: { \
2834 : if (!ExecuteLoad<ctype, mtype>(&decoder, code, pc, len, \
2835 : MachineRepresentation::rep)) \
2836 : return; \
2837 : break; \
2838 : }
2839 :
2840 262372 : LOAD_CASE(I32LoadMem8S, int32_t, int8_t, kWord8);
2841 262380 : LOAD_CASE(I32LoadMem8U, int32_t, uint8_t, kWord8);
2842 131300 : LOAD_CASE(I32LoadMem16S, int32_t, int16_t, kWord16);
2843 131300 : LOAD_CASE(I32LoadMem16U, int32_t, uint16_t, kWord16);
2844 96 : LOAD_CASE(I64LoadMem8S, int64_t, int8_t, kWord8);
2845 0 : LOAD_CASE(I64LoadMem8U, int64_t, uint8_t, kWord16);
2846 96 : LOAD_CASE(I64LoadMem16S, int64_t, int16_t, kWord16);
2847 0 : LOAD_CASE(I64LoadMem16U, int64_t, uint16_t, kWord16);
2848 96 : LOAD_CASE(I64LoadMem32S, int64_t, int32_t, kWord32);
2849 0 : LOAD_CASE(I64LoadMem32U, int64_t, uint32_t, kWord32);
2850 803250 : LOAD_CASE(I32LoadMem, int32_t, int32_t, kWord32);
2851 1542892 : LOAD_CASE(I64LoadMem, int64_t, int64_t, kWord64);
2852 9452 : LOAD_CASE(F32LoadMem, Float32, uint32_t, kFloat32);
2853 29940 : LOAD_CASE(F64LoadMem, Float64, uint64_t, kFloat64);
2854 : #undef LOAD_CASE
2855 :
2856 : #define STORE_CASE(name, ctype, mtype, rep) \
2857 : case kExpr##name: { \
2858 : if (!ExecuteStore<ctype, mtype>(&decoder, code, pc, len, \
2859 : MachineRepresentation::rep)) \
2860 : return; \
2861 : break; \
2862 : }
2863 :
2864 420 : STORE_CASE(I32StoreMem8, int32_t, int8_t, kWord8);
2865 396 : STORE_CASE(I32StoreMem16, int32_t, int16_t, kWord16);
2866 96 : STORE_CASE(I64StoreMem8, int64_t, int8_t, kWord8);
2867 92 : STORE_CASE(I64StoreMem16, int64_t, int16_t, kWord16);
2868 84 : STORE_CASE(I64StoreMem32, int64_t, int32_t, kWord32);
2869 852096 : STORE_CASE(I32StoreMem, int32_t, int32_t, kWord32);
2870 1708428 : STORE_CASE(I64StoreMem, int64_t, int64_t, kWord64);
2871 1200 : STORE_CASE(F32StoreMem, Float32, uint32_t, kFloat32);
2872 10924 : STORE_CASE(F64StoreMem, Float64, uint64_t, kFloat64);
2873 : #undef STORE_CASE
2874 :
2875 : #define ASMJS_LOAD_CASE(name, ctype, mtype, defval) \
2876 : case kExpr##name: { \
2877 : uint32_t index = Pop().to<uint32_t>(); \
2878 : ctype result; \
2879 : Address addr = BoundsCheckMem<mtype>(0, index); \
2880 : if (!addr) { \
2881 : result = defval; \
2882 : } else { \
2883 : /* TODO(titzer): alignment for asmjs load mem? */ \
2884 : result = static_cast<ctype>(*reinterpret_cast<mtype*>(addr)); \
2885 : } \
2886 : Push(WasmValue(result)); \
2887 : break; \
2888 : }
2889 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem8S, int32_t, int8_t, 0);
2890 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem8U, int32_t, uint8_t, 0);
2891 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem16S, int32_t, int16_t, 0);
2892 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem16U, int32_t, uint16_t, 0);
2893 200 : ASMJS_LOAD_CASE(I32AsmjsLoadMem, int32_t, int32_t, 0);
2894 200 : ASMJS_LOAD_CASE(F32AsmjsLoadMem, float, float,
2895 : std::numeric_limits<float>::quiet_NaN());
2896 272 : ASMJS_LOAD_CASE(F64AsmjsLoadMem, double, double,
2897 : std::numeric_limits<double>::quiet_NaN());
2898 : #undef ASMJS_LOAD_CASE
2899 :
2900 : #define ASMJS_STORE_CASE(name, ctype, mtype) \
2901 : case kExpr##name: { \
2902 : WasmValue val = Pop(); \
2903 : uint32_t index = Pop().to<uint32_t>(); \
2904 : Address addr = BoundsCheckMem<mtype>(0, index); \
2905 : if (addr) { \
2906 : *(reinterpret_cast<mtype*>(addr)) = static_cast<mtype>(val.to<ctype>()); \
2907 : } \
2908 : Push(val); \
2909 : break; \
2910 : }
2911 :
2912 0 : ASMJS_STORE_CASE(I32AsmjsStoreMem8, int32_t, int8_t);
2913 0 : ASMJS_STORE_CASE(I32AsmjsStoreMem16, int32_t, int16_t);
2914 932 : ASMJS_STORE_CASE(I32AsmjsStoreMem, int32_t, int32_t);
2915 0 : ASMJS_STORE_CASE(F32AsmjsStoreMem, float, float);
2916 0 : ASMJS_STORE_CASE(F64AsmjsStoreMem, double, double);
2917 : #undef ASMJS_STORE_CASE
2918 : case kExprMemoryGrow: {
2919 : MemoryIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2920 48 : code->at(pc));
2921 48 : uint32_t delta_pages = Pop().to<uint32_t>();
2922 : Handle<WasmMemoryObject> memory(instance_object_->memory_object(),
2923 : instance_object_->GetIsolate());
2924 : Isolate* isolate = memory->GetIsolate();
2925 48 : int32_t result = WasmMemoryObject::Grow(isolate, memory, delta_pages);
2926 48 : Push(WasmValue(result));
2927 48 : len = 1 + imm.length;
2928 : // Treat one grow_memory instruction like 1000 other instructions,
2929 : // because it is a really expensive operation.
2930 48 : if (max > 0) max = std::max(0, max - 1000);
2931 : break;
2932 : }
2933 : case kExprMemorySize: {
2934 : MemoryIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2935 0 : code->at(pc));
2936 0 : Push(WasmValue(static_cast<uint32_t>(instance_object_->memory_size() /
2937 : kWasmPageSize)));
2938 0 : len = 1 + imm.length;
2939 : break;
2940 : }
2941 : // We need to treat kExprI32ReinterpretF32 and kExprI64ReinterpretF64
2942 : // specially to guarantee that the quiet bit of a NaN is preserved on
2943 : // ia32 by the reinterpret casts.
2944 : case kExprI32ReinterpretF32: {
2945 : WasmValue val = Pop();
2946 256 : Push(WasmValue(ExecuteI32ReinterpretF32(val)));
2947 : break;
2948 : }
2949 : case kExprI64ReinterpretF64: {
2950 : WasmValue val = Pop();
2951 244 : Push(WasmValue(ExecuteI64ReinterpretF64(val)));
2952 : break;
2953 : }
2954 : #define SIGN_EXTENSION_CASE(name, wtype, ntype) \
2955 : case kExpr##name: { \
2956 : ntype val = static_cast<ntype>(Pop().to<wtype>()); \
2957 : Push(WasmValue(static_cast<wtype>(val))); \
2958 : break; \
2959 : }
2960 40 : SIGN_EXTENSION_CASE(I32SExtendI8, int32_t, int8_t);
2961 40 : SIGN_EXTENSION_CASE(I32SExtendI16, int32_t, int16_t);
2962 40 : SIGN_EXTENSION_CASE(I64SExtendI8, int64_t, int8_t);
2963 40 : SIGN_EXTENSION_CASE(I64SExtendI16, int64_t, int16_t);
2964 40 : SIGN_EXTENSION_CASE(I64SExtendI32, int64_t, int32_t);
2965 : #undef SIGN_EXTENSION_CASE
2966 : case kNumericPrefix: {
2967 3596 : ++len;
2968 3596 : if (!ExecuteNumericOp(opcode, &decoder, code, pc, len)) return;
2969 : break;
2970 : }
2971 : case kAtomicPrefix: {
2972 384088 : if (!ExecuteAtomicOp(opcode, &decoder, code, pc, len)) return;
2973 : break;
2974 : }
2975 : case kSimdPrefix: {
2976 2345000 : ++len;
2977 2345000 : if (!ExecuteSimdOp(opcode, &decoder, code, pc, len)) return;
2978 : break;
2979 : }
2980 :
2981 : #define EXECUTE_SIMPLE_BINOP(name, ctype, op) \
2982 : case kExpr##name: { \
2983 : WasmValue rval = Pop(); \
2984 : WasmValue lval = Pop(); \
2985 : auto result = lval.to<ctype>() op rval.to<ctype>(); \
2986 : possible_nondeterminism_ |= has_nondeterminism(result); \
2987 : Push(WasmValue(result)); \
2988 : break; \
2989 : }
2990 4384112 : FOREACH_SIMPLE_BINOP(EXECUTE_SIMPLE_BINOP)
2991 : #undef EXECUTE_SIMPLE_BINOP
2992 :
2993 : #define EXECUTE_OTHER_BINOP(name, ctype) \
2994 : case kExpr##name: { \
2995 : TrapReason trap = kTrapCount; \
2996 : ctype rval = Pop().to<ctype>(); \
2997 : ctype lval = Pop().to<ctype>(); \
2998 : auto result = Execute##name(lval, rval, &trap); \
2999 : possible_nondeterminism_ |= has_nondeterminism(result); \
3000 : if (trap != kTrapCount) return DoTrap(trap, pc); \
3001 : Push(WasmValue(result)); \
3002 : break; \
3003 : }
3004 3607924 : FOREACH_OTHER_BINOP(EXECUTE_OTHER_BINOP)
3005 : #undef EXECUTE_OTHER_BINOP
3006 :
3007 : #define EXECUTE_UNOP(name, ctype, exec_fn) \
3008 : case kExpr##name: { \
3009 : TrapReason trap = kTrapCount; \
3010 : ctype val = Pop().to<ctype>(); \
3011 : auto result = exec_fn(val, &trap); \
3012 : possible_nondeterminism_ |= has_nondeterminism(result); \
3013 : if (trap != kTrapCount) return DoTrap(trap, pc); \
3014 : Push(WasmValue(result)); \
3015 : break; \
3016 : }
3017 :
3018 : #define EXECUTE_OTHER_UNOP(name, ctype) EXECUTE_UNOP(name, ctype, Execute##name)
3019 395188 : FOREACH_OTHER_UNOP(EXECUTE_OTHER_UNOP)
3020 : #undef EXECUTE_OTHER_UNOP
3021 :
3022 : #define EXECUTE_I32CONV_FLOATOP(name, out_type, in_type) \
3023 : EXECUTE_UNOP(name, in_type, ExecuteConvert<out_type>)
3024 3740 : FOREACH_I32CONV_FLOATOP(EXECUTE_I32CONV_FLOATOP)
3025 : #undef EXECUTE_I32CONV_FLOATOP
3026 : #undef EXECUTE_UNOP
3027 :
3028 : default:
3029 0 : FATAL("Unknown or unimplemented opcode #%d:%s", code->start[pc],
3030 0 : OpcodeName(code->start[pc]));
3031 : UNREACHABLE();
3032 : }
3033 :
3034 : #ifdef DEBUG
3035 : if (!WasmOpcodes::IsControlOpcode(opcode)) {
3036 : DCHECK_EQ(expected_new_stack_height, StackHeight());
3037 : }
3038 : #endif
3039 :
3040 43135955 : pc += len;
3041 43135955 : if (pc == limit) {
3042 : // Fell off end of code; do an implicit return.
3043 : TRACE("@%-3zu: ImplicitReturn\n", pc);
3044 4585741 : size_t arity = code->function->sig->return_count();
3045 : DCHECK_EQ(StackHeight() - arity, frames_.back().llimit());
3046 4585741 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
3047 110560 : PAUSE_IF_BREAK_FLAG(AfterReturn);
3048 : }
3049 : #undef PAUSE_IF_BREAK_FLAG
3050 : }
3051 :
3052 5429 : state_ = WasmInterpreter::PAUSED;
3053 8465 : break_pc_ = hit_break ? pc : kInvalidPc;
3054 5429 : CommitPc(pc);
3055 : }
3056 :
3057 : WasmValue Pop() {
3058 : DCHECK_GT(frames_.size(), 0);
3059 : DCHECK_GT(StackHeight(), frames_.back().llimit()); // can't pop into locals
3060 18502916 : return *--sp_;
3061 : }
3062 :
3063 : void PopN(int n) {
3064 : DCHECK_GE(StackHeight(), n);
3065 : DCHECK_GT(frames_.size(), 0);
3066 : // Check that we don't pop into locals.
3067 : DCHECK_GE(StackHeight() - n, frames_.back().llimit());
3068 12 : sp_ -= n;
3069 : }
3070 :
3071 : WasmValue PopArity(size_t arity) {
3072 : if (arity == 0) return WasmValue();
3073 : CHECK_EQ(1, arity);
3074 : return Pop();
3075 : }
3076 :
3077 : void Push(WasmValue val) {
3078 : DCHECK_NE(kWasmStmt, val.type());
3079 : DCHECK_LE(1, stack_limit_ - sp_);
3080 33982389 : *sp_++ = val;
3081 : }
3082 :
3083 : void Push(WasmValue* vals, size_t arity) {
3084 : DCHECK_LE(arity, stack_limit_ - sp_);
3085 4735487 : for (WasmValue *val = vals, *end = vals + arity; val != end; ++val) {
3086 : DCHECK_NE(kWasmStmt, val->type());
3087 : }
3088 4735487 : if (arity > 0) {
3089 3962314 : memcpy(sp_, vals, arity * sizeof(*sp_));
3090 : }
3091 4735487 : sp_ += arity;
3092 : }
3093 :
3094 10062514 : void EnsureStackSpace(size_t size) {
3095 10062514 : if (V8_LIKELY(static_cast<size_t>(stack_limit_ - sp_) >= size)) return;
3096 366554 : size_t old_size = stack_limit_ - stack_.get();
3097 : size_t requested_size =
3098 366554 : base::bits::RoundUpToPowerOfTwo64((sp_ - stack_.get()) + size);
3099 366554 : size_t new_size = Max(size_t{8}, Max(2 * old_size, requested_size));
3100 4949546 : std::unique_ptr<WasmValue[]> new_stack(new WasmValue[new_size]);
3101 366554 : if (old_size > 0) {
3102 1116 : memcpy(new_stack.get(), stack_.get(), old_size * sizeof(*sp_));
3103 : }
3104 733108 : sp_ = new_stack.get() + (sp_ - stack_.get());
3105 : stack_ = std::move(new_stack);
3106 366554 : stack_limit_ = stack_.get() + new_size;
3107 : }
3108 :
3109 10724092 : sp_t StackHeight() { return sp_ - stack_.get(); }
3110 :
3111 : void TraceValueStack() {
3112 : #ifdef DEBUG
3113 : if (!FLAG_trace_wasm_interpreter) return;
3114 : Frame* top = frames_.size() > 0 ? &frames_.back() : nullptr;
3115 : sp_t sp = top ? top->sp : 0;
3116 : sp_t plimit = top ? top->plimit() : 0;
3117 : sp_t llimit = top ? top->llimit() : 0;
3118 : for (size_t i = sp; i < StackHeight(); ++i) {
3119 : if (i < plimit)
3120 : PrintF(" p%zu:", i);
3121 : else if (i < llimit)
3122 : PrintF(" l%zu:", i);
3123 : else
3124 : PrintF(" s%zu:", i);
3125 : WasmValue val = GetStackValue(i);
3126 : switch (val.type()) {
3127 : case kWasmI32:
3128 : PrintF("i32:%d", val.to<int32_t>());
3129 : break;
3130 : case kWasmI64:
3131 : PrintF("i64:%" PRId64 "", val.to<int64_t>());
3132 : break;
3133 : case kWasmF32:
3134 : PrintF("f32:%f", val.to<float>());
3135 : break;
3136 : case kWasmF64:
3137 : PrintF("f64:%lf", val.to<double>());
3138 : break;
3139 : case kWasmStmt:
3140 : PrintF("void");
3141 : break;
3142 : default:
3143 : UNREACHABLE();
3144 : break;
3145 : }
3146 : }
3147 : #endif // DEBUG
3148 : }
3149 :
3150 : ExternalCallResult TryHandleException(Isolate* isolate) {
3151 : DCHECK(isolate->has_pending_exception()); // Assume exceptional return.
3152 989 : if (HandleException(isolate) == WasmInterpreter::Thread::UNWOUND) {
3153 : return {ExternalCallResult::EXTERNAL_UNWOUND};
3154 : }
3155 : return {ExternalCallResult::EXTERNAL_CAUGHT};
3156 : }
3157 :
3158 6681 : ExternalCallResult CallExternalWasmFunction(Isolate* isolate,
3159 : Handle<Object> object_ref,
3160 : const WasmCode* code,
3161 : FunctionSig* sig) {
3162 6681 : int num_args = static_cast<int>(sig->parameter_count());
3163 6681 : WasmFeatures enabled_features = WasmFeaturesFromIsolate(isolate);
3164 :
3165 13258 : if (code->kind() == WasmCode::kWasmToJsWrapper &&
3166 6577 : !IsJSCompatibleSignature(sig, enabled_features.bigint)) {
3167 20 : sp_ -= num_args; // Pop arguments before throwing.
3168 40 : isolate->Throw(*isolate->factory()->NewTypeError(
3169 40 : MessageTemplate::kWasmTrapTypeError));
3170 : return TryHandleException(isolate);
3171 : }
3172 :
3173 : Handle<WasmDebugInfo> debug_info(instance_object_->debug_info(), isolate);
3174 : Handle<JSFunction> wasm_entry =
3175 6661 : WasmDebugInfo::GetCWasmEntry(debug_info, sig);
3176 :
3177 : TRACE(" => Calling external wasm function\n");
3178 :
3179 : // Copy the arguments to one buffer.
3180 : // TODO(clemensh): Introduce a helper for all argument buffer
3181 : // con-/destruction.
3182 6661 : std::vector<uint8_t> arg_buffer(num_args * 8);
3183 : size_t offset = 0;
3184 6661 : WasmValue* wasm_args = sp_ - num_args;
3185 26591 : for (int i = 0; i < num_args; ++i) {
3186 19930 : int param_size = ValueTypes::ElementSizeInBytes(sig->GetParam(i));
3187 9965 : if (arg_buffer.size() < offset + param_size) {
3188 0 : arg_buffer.resize(std::max(2 * arg_buffer.size(), offset + param_size));
3189 : }
3190 9965 : Address address = reinterpret_cast<Address>(arg_buffer.data()) + offset;
3191 9965 : switch (sig->GetParam(i)) {
3192 : case kWasmI32:
3193 6005 : WriteUnalignedValue(address, wasm_args[i].to<uint32_t>());
3194 : break;
3195 : case kWasmI64:
3196 0 : WriteUnalignedValue(address, wasm_args[i].to<uint64_t>());
3197 : break;
3198 : case kWasmF32:
3199 0 : WriteUnalignedValue(address, wasm_args[i].to<float>());
3200 : break;
3201 : case kWasmF64:
3202 3960 : WriteUnalignedValue(address, wasm_args[i].to<double>());
3203 : break;
3204 : default:
3205 0 : UNIMPLEMENTED();
3206 : }
3207 : offset += param_size;
3208 : }
3209 :
3210 : // Ensure that there is enough space in the arg_buffer to hold the return
3211 : // value(s).
3212 : size_t return_size = 0;
3213 17989 : for (ValueType t : sig->returns()) {
3214 5664 : return_size += ValueTypes::ElementSizeInBytes(t);
3215 : }
3216 6661 : if (arg_buffer.size() < return_size) {
3217 64 : arg_buffer.resize(return_size);
3218 : }
3219 :
3220 : // Wrap the arg_buffer and the code target data pointers in handles. As
3221 : // these are aligned pointers, to the GC it will look like Smis.
3222 : Handle<Object> arg_buffer_obj(
3223 6661 : Object(reinterpret_cast<Address>(arg_buffer.data())), isolate);
3224 : DCHECK(!arg_buffer_obj->IsHeapObject());
3225 : Handle<Object> code_entry_obj(Object(code->instruction_start()), isolate);
3226 : DCHECK(!code_entry_obj->IsHeapObject());
3227 :
3228 : static_assert(compiler::CWasmEntryParameters::kNumParameters == 3,
3229 : "code below needs adaption");
3230 46627 : Handle<Object> args[compiler::CWasmEntryParameters::kNumParameters];
3231 6661 : args[compiler::CWasmEntryParameters::kCodeEntry] = code_entry_obj;
3232 6661 : args[compiler::CWasmEntryParameters::kObjectRef] = object_ref;
3233 6661 : args[compiler::CWasmEntryParameters::kArgumentsBuffer] = arg_buffer_obj;
3234 :
3235 : Handle<Object> receiver = isolate->factory()->undefined_value();
3236 : trap_handler::SetThreadInWasm();
3237 : MaybeHandle<Object> maybe_retval =
3238 6661 : Execution::Call(isolate, wasm_entry, receiver, arraysize(args), args);
3239 : TRACE(" => External wasm function returned%s\n",
3240 : maybe_retval.is_null() ? " with exception" : "");
3241 :
3242 : // Pop arguments off the stack.
3243 6661 : sp_ -= num_args;
3244 :
3245 6661 : if (maybe_retval.is_null()) {
3246 : // JSEntry may throw a stack overflow before we actually get to wasm code
3247 : // or back to the interpreter, meaning the thread-in-wasm flag won't be
3248 : // cleared.
3249 969 : if (trap_handler::IsThreadInWasm()) {
3250 : trap_handler::ClearThreadInWasm();
3251 : }
3252 : return TryHandleException(isolate);
3253 : }
3254 :
3255 : trap_handler::ClearThreadInWasm();
3256 :
3257 : // Push return values.
3258 5692 : if (sig->return_count() > 0) {
3259 : // TODO(wasm): Handle multiple returns.
3260 : DCHECK_EQ(1, sig->return_count());
3261 : Address address = reinterpret_cast<Address>(arg_buffer.data());
3262 5588 : switch (sig->GetReturn()) {
3263 : case kWasmI32:
3264 4924 : Push(WasmValue(ReadUnalignedValue<uint32_t>(address)));
3265 4924 : break;
3266 : case kWasmI64:
3267 8 : Push(WasmValue(ReadUnalignedValue<uint64_t>(address)));
3268 8 : break;
3269 : case kWasmF32:
3270 8 : Push(WasmValue(ReadUnalignedValue<float>(address)));
3271 8 : break;
3272 : case kWasmF64:
3273 648 : Push(WasmValue(ReadUnalignedValue<double>(address)));
3274 648 : break;
3275 : default:
3276 0 : UNIMPLEMENTED();
3277 : }
3278 : }
3279 5692 : return {ExternalCallResult::EXTERNAL_RETURNED};
3280 : }
3281 :
3282 47057 : static WasmCode* GetTargetCode(WasmCodeManager* code_manager,
3283 : Address target) {
3284 47057 : NativeModule* native_module = code_manager->LookupNativeModule(target);
3285 47057 : if (native_module->is_jump_table_slot(target)) {
3286 : uint32_t func_index =
3287 40480 : native_module->GetFunctionIndexFromJumpTableSlot(target);
3288 40480 : return native_module->code(func_index);
3289 : }
3290 6577 : WasmCode* code = native_module->Lookup(target);
3291 : DCHECK_EQ(code->instruction_start(), target);
3292 6577 : return code;
3293 : }
3294 :
3295 6625 : ExternalCallResult CallImportedFunction(uint32_t function_index) {
3296 : DCHECK_GT(module()->num_imported_functions, function_index);
3297 : // Use a new HandleScope to avoid leaking / accumulating handles in the
3298 : // outer scope.
3299 : Isolate* isolate = instance_object_->GetIsolate();
3300 : HandleScope handle_scope(isolate);
3301 :
3302 6625 : ImportedFunctionEntry entry(instance_object_, function_index);
3303 6625 : Handle<Object> object_ref(entry.object_ref(), isolate);
3304 : WasmCode* code =
3305 13250 : GetTargetCode(isolate->wasm_engine()->code_manager(), entry.target());
3306 13250 : FunctionSig* sig = module()->functions[function_index].sig;
3307 13250 : return CallExternalWasmFunction(isolate, object_ref, code, sig);
3308 : }
3309 :
3310 40504 : ExternalCallResult CallIndirectFunction(uint32_t table_index,
3311 : uint32_t entry_index,
3312 : uint32_t sig_index) {
3313 : Isolate* isolate = instance_object_->GetIsolate();
3314 81008 : uint32_t expected_sig_id = module()->signature_ids[sig_index];
3315 : DCHECK_EQ(expected_sig_id,
3316 : module()->signature_map.Find(*module()->signatures[sig_index]));
3317 :
3318 : // The function table is stored in the instance.
3319 : // TODO(wasm): the wasm interpreter currently supports only one table.
3320 40504 : CHECK_EQ(0, table_index);
3321 : // Bounds check against table size.
3322 40504 : if (entry_index >= instance_object_->indirect_function_table_size()) {
3323 44 : return {ExternalCallResult::INVALID_FUNC};
3324 : }
3325 :
3326 40460 : IndirectFunctionTableEntry entry(instance_object_, entry_index);
3327 : // Signature check.
3328 40460 : if (entry.sig_id() != static_cast<int32_t>(expected_sig_id)) {
3329 28 : return {ExternalCallResult::SIGNATURE_MISMATCH};
3330 : }
3331 :
3332 : HandleScope scope(isolate);
3333 40432 : FunctionSig* signature = module()->signatures[sig_index];
3334 40432 : Handle<Object> object_ref = handle(entry.object_ref(), isolate);
3335 : WasmCode* code =
3336 80864 : GetTargetCode(isolate->wasm_engine()->code_manager(), entry.target());
3337 :
3338 80832 : if (!object_ref->IsWasmInstanceObject() || /* call to an import */
3339 : !instance_object_.is_identical_to(object_ref) /* cross-instance */) {
3340 56 : return CallExternalWasmFunction(isolate, object_ref, code, signature);
3341 : }
3342 :
3343 : DCHECK(code->kind() == WasmCode::kInterpreterEntry ||
3344 : code->kind() == WasmCode::kFunction);
3345 40376 : return {ExternalCallResult::INTERNAL, codemap()->GetCode(code->index())};
3346 : }
3347 :
3348 : inline Activation current_activation() {
3349 9505222 : return activations_.empty() ? Activation(0, 0) : activations_.back();
3350 : }
3351 : };
3352 :
3353 : class InterpretedFrameImpl {
3354 : public:
3355 : InterpretedFrameImpl(ThreadImpl* thread, int index)
3356 678792 : : thread_(thread), index_(index) {
3357 : DCHECK_LE(0, index);
3358 : }
3359 :
3360 680044 : const WasmFunction* function() const { return frame()->code->function; }
3361 :
3362 : int pc() const {
3363 : DCHECK_LE(0, frame()->pc);
3364 : DCHECK_GE(kMaxInt, frame()->pc);
3365 677040 : return static_cast<int>(frame()->pc);
3366 : }
3367 :
3368 : int GetParameterCount() const {
3369 : DCHECK_GE(kMaxInt, function()->sig->parameter_count());
3370 460 : return static_cast<int>(function()->sig->parameter_count());
3371 : }
3372 :
3373 : int GetLocalCount() const {
3374 1952 : size_t num_locals = function()->sig->parameter_count() +
3375 1952 : frame()->code->locals.type_list.size();
3376 : DCHECK_GE(kMaxInt, num_locals);
3377 1952 : return static_cast<int>(num_locals);
3378 : }
3379 :
3380 700 : int GetStackHeight() const {
3381 : bool is_top_frame =
3382 700 : static_cast<size_t>(index_) + 1 == thread_->frames_.size();
3383 : size_t stack_limit =
3384 828 : is_top_frame ? thread_->StackHeight() : thread_->frames_[index_ + 1].sp;
3385 : DCHECK_LE(frame()->sp, stack_limit);
3386 700 : size_t frame_size = stack_limit - frame()->sp;
3387 : DCHECK_LE(GetLocalCount(), frame_size);
3388 1400 : return static_cast<int>(frame_size) - GetLocalCount();
3389 : }
3390 :
3391 : WasmValue GetLocalValue(int index) const {
3392 : DCHECK_LE(0, index);
3393 : DCHECK_GT(GetLocalCount(), index);
3394 944 : return thread_->GetStackValue(static_cast<int>(frame()->sp) + index);
3395 : }
3396 :
3397 264 : WasmValue GetStackValue(int index) const {
3398 : DCHECK_LE(0, index);
3399 : // Index must be within the number of stack values of this frame.
3400 : DCHECK_GT(GetStackHeight(), index);
3401 528 : return thread_->GetStackValue(static_cast<int>(frame()->sp) +
3402 528 : GetLocalCount() + index);
3403 : }
3404 :
3405 : private:
3406 : ThreadImpl* thread_;
3407 : int index_;
3408 :
3409 : ThreadImpl::Frame* frame() const {
3410 : DCHECK_GT(thread_->frames_.size(), index_);
3411 1357328 : return &thread_->frames_[index_];
3412 : }
3413 : };
3414 :
3415 : namespace {
3416 :
3417 : // Converters between WasmInterpreter::Thread and WasmInterpreter::ThreadImpl.
3418 : // Thread* is the public interface, without knowledge of the object layout.
3419 : // This cast is potentially risky, but as long as we always cast it back before
3420 : // accessing any data, it should be fine. UBSan is not complaining.
3421 : WasmInterpreter::Thread* ToThread(ThreadImpl* impl) {
3422 : return reinterpret_cast<WasmInterpreter::Thread*>(impl);
3423 : }
3424 : ThreadImpl* ToImpl(WasmInterpreter::Thread* thread) {
3425 : return reinterpret_cast<ThreadImpl*>(thread);
3426 : }
3427 :
3428 : // Same conversion for InterpretedFrame and InterpretedFrameImpl.
3429 : InterpretedFrame* ToFrame(InterpretedFrameImpl* impl) {
3430 : return reinterpret_cast<InterpretedFrame*>(impl);
3431 : }
3432 : const InterpretedFrameImpl* ToImpl(const InterpretedFrame* frame) {
3433 : return reinterpret_cast<const InterpretedFrameImpl*>(frame);
3434 : }
3435 :
3436 : } // namespace
3437 :
3438 : //============================================================================
3439 : // Implementation of the pimpl idiom for WasmInterpreter::Thread.
3440 : // Instead of placing a pointer to the ThreadImpl inside of the Thread object,
3441 : // we just reinterpret_cast them. ThreadImpls are only allocated inside this
3442 : // translation unit anyway.
3443 : //============================================================================
3444 4742261 : WasmInterpreter::State WasmInterpreter::Thread::state() {
3445 4742261 : return ToImpl(this)->state();
3446 : }
3447 4735487 : void WasmInterpreter::Thread::InitFrame(const WasmFunction* function,
3448 : WasmValue* args) {
3449 4735487 : ToImpl(this)->InitFrame(function, args);
3450 4735487 : }
3451 4740931 : WasmInterpreter::State WasmInterpreter::Thread::Run(int num_steps) {
3452 4740931 : return ToImpl(this)->Run(num_steps);
3453 : }
3454 0 : void WasmInterpreter::Thread::Pause() { return ToImpl(this)->Pause(); }
3455 9402212 : void WasmInterpreter::Thread::Reset() { return ToImpl(this)->Reset(); }
3456 : WasmInterpreter::Thread::ExceptionHandlingResult
3457 788 : WasmInterpreter::Thread::RaiseException(Isolate* isolate,
3458 : Handle<Object> exception) {
3459 788 : return ToImpl(this)->RaiseException(isolate, exception);
3460 : }
3461 2784 : pc_t WasmInterpreter::Thread::GetBreakpointPc() {
3462 2784 : return ToImpl(this)->GetBreakpointPc();
3463 : }
3464 6736 : int WasmInterpreter::Thread::GetFrameCount() {
3465 6736 : return ToImpl(this)->GetFrameCount();
3466 : }
3467 678792 : WasmInterpreter::FramePtr WasmInterpreter::Thread::GetFrame(int index) {
3468 : DCHECK_LE(0, index);
3469 : DCHECK_GT(GetFrameCount(), index);
3470 1357584 : return FramePtr(ToFrame(new InterpretedFrameImpl(ToImpl(this), index)));
3471 : }
3472 4697217 : WasmValue WasmInterpreter::Thread::GetReturnValue(int index) {
3473 9394434 : return ToImpl(this)->GetReturnValue(index);
3474 : }
3475 780 : TrapReason WasmInterpreter::Thread::GetTrapReason() {
3476 780 : return ToImpl(this)->GetTrapReason();
3477 : }
3478 4662037 : bool WasmInterpreter::Thread::PossibleNondeterminism() {
3479 4662037 : return ToImpl(this)->PossibleNondeterminism();
3480 : }
3481 4752188 : uint64_t WasmInterpreter::Thread::NumInterpretedCalls() {
3482 4752188 : return ToImpl(this)->NumInterpretedCalls();
3483 : }
3484 40 : void WasmInterpreter::Thread::AddBreakFlags(uint8_t flags) {
3485 : ToImpl(this)->AddBreakFlags(flags);
3486 40 : }
3487 0 : void WasmInterpreter::Thread::ClearBreakFlags() {
3488 : ToImpl(this)->ClearBreakFlags();
3489 0 : }
3490 24 : uint32_t WasmInterpreter::Thread::NumActivations() {
3491 24 : return ToImpl(this)->NumActivations();
3492 : }
3493 34383 : uint32_t WasmInterpreter::Thread::StartActivation() {
3494 34383 : return ToImpl(this)->StartActivation();
3495 : }
3496 34381 : void WasmInterpreter::Thread::FinishActivation(uint32_t id) {
3497 : ToImpl(this)->FinishActivation(id);
3498 34381 : }
3499 5772 : uint32_t WasmInterpreter::Thread::ActivationFrameBase(uint32_t id) {
3500 5772 : return ToImpl(this)->ActivationFrameBase(id);
3501 : }
3502 :
3503 : //============================================================================
3504 : // The implementation details of the interpreter.
3505 : //============================================================================
3506 365686 : class WasmInterpreterInternals : public ZoneObject {
3507 : public:
3508 : // Create a copy of the module bytes for the interpreter, since the passed
3509 : // pointer might be invalidated after constructing the interpreter.
3510 : const ZoneVector<uint8_t> module_bytes_;
3511 : CodeMap codemap_;
3512 : ZoneVector<ThreadImpl> threads_;
3513 :
3514 365686 : WasmInterpreterInternals(Zone* zone, const WasmModule* module,
3515 : const ModuleWireBytes& wire_bytes,
3516 : Handle<WasmInstanceObject> instance_object)
3517 : : module_bytes_(wire_bytes.start(), wire_bytes.end(), zone),
3518 : codemap_(module, module_bytes_.data(), zone),
3519 731372 : threads_(zone) {
3520 365686 : threads_.emplace_back(zone, &codemap_, instance_object);
3521 365686 : }
3522 : };
3523 :
3524 : namespace {
3525 333740 : void NopFinalizer(const v8::WeakCallbackInfo<void>& data) {
3526 : Address* global_handle_location =
3527 : reinterpret_cast<Address*>(data.GetParameter());
3528 333740 : GlobalHandles::Destroy(global_handle_location);
3529 333740 : }
3530 :
3531 365686 : Handle<WasmInstanceObject> MakeWeak(
3532 : Isolate* isolate, Handle<WasmInstanceObject> instance_object) {
3533 : Handle<WasmInstanceObject> weak_instance =
3534 : isolate->global_handles()->Create<WasmInstanceObject>(*instance_object);
3535 : Address* global_handle_location = weak_instance.location();
3536 : GlobalHandles::MakeWeak(global_handle_location, global_handle_location,
3537 365686 : &NopFinalizer, v8::WeakCallbackType::kParameter);
3538 365686 : return weak_instance;
3539 : }
3540 : } // namespace
3541 :
3542 : //============================================================================
3543 : // Implementation of the public interface of the interpreter.
3544 : //============================================================================
3545 365686 : WasmInterpreter::WasmInterpreter(Isolate* isolate, const WasmModule* module,
3546 : const ModuleWireBytes& wire_bytes,
3547 : Handle<WasmInstanceObject> instance_object)
3548 : : zone_(isolate->allocator(), ZONE_NAME),
3549 : internals_(new (&zone_) WasmInterpreterInternals(
3550 731372 : &zone_, module, wire_bytes, MakeWeak(isolate, instance_object))) {}
3551 :
3552 731372 : WasmInterpreter::~WasmInterpreter() { internals_->~WasmInterpreterInternals(); }
3553 :
3554 0 : void WasmInterpreter::Run() { internals_->threads_[0].Run(); }
3555 :
3556 0 : void WasmInterpreter::Pause() { internals_->threads_[0].Pause(); }
3557 :
3558 1548 : bool WasmInterpreter::SetBreakpoint(const WasmFunction* function, pc_t pc,
3559 : bool enabled) {
3560 1548 : InterpreterCode* code = internals_->codemap_.GetCode(function);
3561 1548 : size_t size = static_cast<size_t>(code->end - code->start);
3562 : // Check bounds for {pc}.
3563 1548 : if (pc < code->locals.encoded_size || pc >= size) return false;
3564 : // Make a copy of the code before enabling a breakpoint.
3565 1548 : if (enabled && code->orig_start == code->start) {
3566 64 : code->start = reinterpret_cast<byte*>(zone_.New(size));
3567 64 : memcpy(code->start, code->orig_start, size);
3568 64 : code->end = code->start + size;
3569 : }
3570 1548 : bool prev = code->start[pc] == kInternalBreakpoint;
3571 1548 : if (enabled) {
3572 852 : code->start[pc] = kInternalBreakpoint;
3573 : } else {
3574 696 : code->start[pc] = code->orig_start[pc];
3575 : }
3576 : return prev;
3577 : }
3578 :
3579 0 : bool WasmInterpreter::GetBreakpoint(const WasmFunction* function, pc_t pc) {
3580 0 : InterpreterCode* code = internals_->codemap_.GetCode(function);
3581 0 : size_t size = static_cast<size_t>(code->end - code->start);
3582 : // Check bounds for {pc}.
3583 0 : if (pc < code->locals.encoded_size || pc >= size) return false;
3584 : // Check if a breakpoint is present at that place in the code.
3585 0 : return code->start[pc] == kInternalBreakpoint;
3586 : }
3587 :
3588 0 : bool WasmInterpreter::SetTracing(const WasmFunction* function, bool enabled) {
3589 0 : UNIMPLEMENTED();
3590 : return false;
3591 : }
3592 :
3593 0 : int WasmInterpreter::GetThreadCount() {
3594 0 : return 1; // only one thread for now.
3595 : }
3596 :
3597 4861913 : WasmInterpreter::Thread* WasmInterpreter::GetThread(int id) {
3598 4861913 : CHECK_EQ(0, id); // only one thread for now.
3599 9723826 : return ToThread(&internals_->threads_[id]);
3600 : }
3601 :
3602 367408 : void WasmInterpreter::AddFunctionForTesting(const WasmFunction* function) {
3603 367408 : internals_->codemap_.AddFunction(function, nullptr, nullptr);
3604 367408 : }
3605 :
3606 365928 : void WasmInterpreter::SetFunctionCodeForTesting(const WasmFunction* function,
3607 : const byte* start,
3608 : const byte* end) {
3609 365928 : internals_->codemap_.SetFunctionCode(function, start, end);
3610 365928 : }
3611 :
3612 30 : ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting(
3613 : Zone* zone, const WasmModule* module, const byte* start, const byte* end) {
3614 : // Create some dummy structures, to avoid special-casing the implementation
3615 : // just for testing.
3616 30 : FunctionSig sig(0, 0, nullptr);
3617 30 : WasmFunction function{&sig, 0, 0, {0, 0}, false, false};
3618 : InterpreterCode code{
3619 60 : &function, BodyLocalDecls(zone), start, end, nullptr, nullptr, nullptr};
3620 :
3621 : // Now compute and return the control transfers.
3622 30 : SideTable side_table(zone, module, &code);
3623 30 : return side_table.map_;
3624 : }
3625 :
3626 : //============================================================================
3627 : // Implementation of the frame inspection interface.
3628 : //============================================================================
3629 677632 : const WasmFunction* InterpretedFrame::function() const {
3630 677632 : return ToImpl(this)->function();
3631 : }
3632 1354080 : int InterpretedFrame::pc() const { return ToImpl(this)->pc(); }
3633 460 : int InterpretedFrame::GetParameterCount() const {
3634 460 : return ToImpl(this)->GetParameterCount();
3635 : }
3636 988 : int InterpretedFrame::GetLocalCount() const {
3637 988 : return ToImpl(this)->GetLocalCount();
3638 : }
3639 700 : int InterpretedFrame::GetStackHeight() const {
3640 700 : return ToImpl(this)->GetStackHeight();
3641 : }
3642 944 : WasmValue InterpretedFrame::GetLocalValue(int index) const {
3643 944 : return ToImpl(this)->GetLocalValue(index);
3644 : }
3645 264 : WasmValue InterpretedFrame::GetStackValue(int index) const {
3646 264 : return ToImpl(this)->GetStackValue(index);
3647 : }
3648 678792 : void InterpretedFrameDeleter::operator()(InterpretedFrame* ptr) {
3649 678792 : delete ToImpl(ptr);
3650 678792 : }
3651 :
3652 : #undef TRACE
3653 : #undef LANE
3654 : #undef FOREACH_INTERNAL_OPCODE
3655 : #undef WASM_CTYPES
3656 : #undef FOREACH_SIMPLE_BINOP
3657 : #undef FOREACH_OTHER_BINOP
3658 : #undef FOREACH_I32CONV_FLOATOP
3659 : #undef FOREACH_OTHER_UNOP
3660 :
3661 : } // namespace wasm
3662 : } // namespace internal
3663 120216 : } // namespace v8
|