Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include <atomic>
6 : #include <type_traits>
7 :
8 : #include "src/wasm/wasm-interpreter.h"
9 :
10 : #include "src/assembler-inl.h"
11 : #include "src/base/overflowing-math.h"
12 : #include "src/boxed-float.h"
13 : #include "src/compiler/wasm-compiler.h"
14 : #include "src/conversions.h"
15 : #include "src/identity-map.h"
16 : #include "src/objects-inl.h"
17 : #include "src/trap-handler/trap-handler.h"
18 : #include "src/utils.h"
19 : #include "src/wasm/decoder.h"
20 : #include "src/wasm/function-body-decoder-impl.h"
21 : #include "src/wasm/function-body-decoder.h"
22 : #include "src/wasm/memory-tracing.h"
23 : #include "src/wasm/wasm-engine.h"
24 : #include "src/wasm/wasm-external-refs.h"
25 : #include "src/wasm/wasm-limits.h"
26 : #include "src/wasm/wasm-module.h"
27 : #include "src/wasm/wasm-objects-inl.h"
28 :
29 : #include "src/zone/accounting-allocator.h"
30 : #include "src/zone/zone-containers.h"
31 :
32 : namespace v8 {
33 : namespace internal {
34 : namespace wasm {
35 :
36 : #define TRACE(...) \
37 : do { \
38 : if (FLAG_trace_wasm_interpreter) PrintF(__VA_ARGS__); \
39 : } while (false)
40 :
41 : #if V8_TARGET_BIG_ENDIAN
42 : #define LANE(i, type) ((sizeof(type.val) / sizeof(type.val[0])) - (i)-1)
43 : #else
44 : #define LANE(i, type) (i)
45 : #endif
46 :
47 : #define FOREACH_INTERNAL_OPCODE(V) V(Breakpoint, 0xFF)
48 :
49 : #define WASM_CTYPES(V) \
50 : V(I32, int32_t) V(I64, int64_t) V(F32, float) V(F64, double) V(S128, Simd128)
51 :
52 : #define FOREACH_SIMPLE_BINOP(V) \
53 : V(I32Add, uint32_t, +) \
54 : V(I32Sub, uint32_t, -) \
55 : V(I32Mul, uint32_t, *) \
56 : V(I32And, uint32_t, &) \
57 : V(I32Ior, uint32_t, |) \
58 : V(I32Xor, uint32_t, ^) \
59 : V(I32Eq, uint32_t, ==) \
60 : V(I32Ne, uint32_t, !=) \
61 : V(I32LtU, uint32_t, <) \
62 : V(I32LeU, uint32_t, <=) \
63 : V(I32GtU, uint32_t, >) \
64 : V(I32GeU, uint32_t, >=) \
65 : V(I32LtS, int32_t, <) \
66 : V(I32LeS, int32_t, <=) \
67 : V(I32GtS, int32_t, >) \
68 : V(I32GeS, int32_t, >=) \
69 : V(I64Add, uint64_t, +) \
70 : V(I64Sub, uint64_t, -) \
71 : V(I64Mul, uint64_t, *) \
72 : V(I64And, uint64_t, &) \
73 : V(I64Ior, uint64_t, |) \
74 : V(I64Xor, uint64_t, ^) \
75 : V(I64Eq, uint64_t, ==) \
76 : V(I64Ne, uint64_t, !=) \
77 : V(I64LtU, uint64_t, <) \
78 : V(I64LeU, uint64_t, <=) \
79 : V(I64GtU, uint64_t, >) \
80 : V(I64GeU, uint64_t, >=) \
81 : V(I64LtS, int64_t, <) \
82 : V(I64LeS, int64_t, <=) \
83 : V(I64GtS, int64_t, >) \
84 : V(I64GeS, int64_t, >=) \
85 : V(F32Add, float, +) \
86 : V(F32Sub, float, -) \
87 : V(F32Eq, float, ==) \
88 : V(F32Ne, float, !=) \
89 : V(F32Lt, float, <) \
90 : V(F32Le, float, <=) \
91 : V(F32Gt, float, >) \
92 : V(F32Ge, float, >=) \
93 : V(F64Add, double, +) \
94 : V(F64Sub, double, -) \
95 : V(F64Eq, double, ==) \
96 : V(F64Ne, double, !=) \
97 : V(F64Lt, double, <) \
98 : V(F64Le, double, <=) \
99 : V(F64Gt, double, >) \
100 : V(F64Ge, double, >=) \
101 : V(F32Mul, float, *) \
102 : V(F64Mul, double, *) \
103 : V(F32Div, float, /) \
104 : V(F64Div, double, /)
105 :
106 : #define FOREACH_OTHER_BINOP(V) \
107 : V(I32DivS, int32_t) \
108 : V(I32DivU, uint32_t) \
109 : V(I32RemS, int32_t) \
110 : V(I32RemU, uint32_t) \
111 : V(I32Shl, uint32_t) \
112 : V(I32ShrU, uint32_t) \
113 : V(I32ShrS, int32_t) \
114 : V(I64DivS, int64_t) \
115 : V(I64DivU, uint64_t) \
116 : V(I64RemS, int64_t) \
117 : V(I64RemU, uint64_t) \
118 : V(I64Shl, uint64_t) \
119 : V(I64ShrU, uint64_t) \
120 : V(I64ShrS, int64_t) \
121 : V(I32Ror, int32_t) \
122 : V(I32Rol, int32_t) \
123 : V(I64Ror, int64_t) \
124 : V(I64Rol, int64_t) \
125 : V(F32Min, float) \
126 : V(F32Max, float) \
127 : V(F64Min, double) \
128 : V(F64Max, double) \
129 : V(I32AsmjsDivS, int32_t) \
130 : V(I32AsmjsDivU, uint32_t) \
131 : V(I32AsmjsRemS, int32_t) \
132 : V(I32AsmjsRemU, uint32_t) \
133 : V(F32CopySign, Float32) \
134 : V(F64CopySign, Float64)
135 :
136 : #define FOREACH_I32CONV_FLOATOP(V) \
137 : V(I32SConvertF32, int32_t, float) \
138 : V(I32SConvertF64, int32_t, double) \
139 : V(I32UConvertF32, uint32_t, float) \
140 : V(I32UConvertF64, uint32_t, double)
141 :
142 : #define FOREACH_OTHER_UNOP(V) \
143 : V(I32Clz, uint32_t) \
144 : V(I32Ctz, uint32_t) \
145 : V(I32Popcnt, uint32_t) \
146 : V(I32Eqz, uint32_t) \
147 : V(I64Clz, uint64_t) \
148 : V(I64Ctz, uint64_t) \
149 : V(I64Popcnt, uint64_t) \
150 : V(I64Eqz, uint64_t) \
151 : V(F32Abs, Float32) \
152 : V(F32Neg, Float32) \
153 : V(F32Ceil, float) \
154 : V(F32Floor, float) \
155 : V(F32Trunc, float) \
156 : V(F32NearestInt, float) \
157 : V(F64Abs, Float64) \
158 : V(F64Neg, Float64) \
159 : V(F64Ceil, double) \
160 : V(F64Floor, double) \
161 : V(F64Trunc, double) \
162 : V(F64NearestInt, double) \
163 : V(I32ConvertI64, int64_t) \
164 : V(I64SConvertF32, float) \
165 : V(I64SConvertF64, double) \
166 : V(I64UConvertF32, float) \
167 : V(I64UConvertF64, double) \
168 : V(I64SConvertI32, int32_t) \
169 : V(I64UConvertI32, uint32_t) \
170 : V(F32SConvertI32, int32_t) \
171 : V(F32UConvertI32, uint32_t) \
172 : V(F32SConvertI64, int64_t) \
173 : V(F32UConvertI64, uint64_t) \
174 : V(F32ConvertF64, double) \
175 : V(F32ReinterpretI32, int32_t) \
176 : V(F64SConvertI32, int32_t) \
177 : V(F64UConvertI32, uint32_t) \
178 : V(F64SConvertI64, int64_t) \
179 : V(F64UConvertI64, uint64_t) \
180 : V(F64ConvertF32, float) \
181 : V(F64ReinterpretI64, int64_t) \
182 : V(I32AsmjsSConvertF32, float) \
183 : V(I32AsmjsUConvertF32, float) \
184 : V(I32AsmjsSConvertF64, double) \
185 : V(I32AsmjsUConvertF64, double) \
186 : V(F32Sqrt, float) \
187 : V(F64Sqrt, double)
188 :
189 : namespace {
190 :
191 : constexpr uint32_t kFloat32SignBitMask = uint32_t{1} << 31;
192 : constexpr uint64_t kFloat64SignBitMask = uint64_t{1} << 63;
193 :
194 : inline int32_t ExecuteI32DivS(int32_t a, int32_t b, TrapReason* trap) {
195 41556 : if (b == 0) {
196 : *trap = kTrapDivByZero;
197 : return 0;
198 : }
199 38204 : if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
200 : *trap = kTrapDivUnrepresentable;
201 : return 0;
202 : }
203 38192 : return a / b;
204 : }
205 :
206 : inline uint32_t ExecuteI32DivU(uint32_t a, uint32_t b, TrapReason* trap) {
207 40932 : if (b == 0) {
208 : *trap = kTrapDivByZero;
209 : return 0;
210 : }
211 37656 : return a / b;
212 : }
213 :
214 : inline int32_t ExecuteI32RemS(int32_t a, int32_t b, TrapReason* trap) {
215 40964 : if (b == 0) {
216 : *trap = kTrapRemByZero;
217 : return 0;
218 : }
219 37672 : if (b == -1) return 0;
220 34400 : return a % b;
221 : }
222 :
223 : inline uint32_t ExecuteI32RemU(uint32_t a, uint32_t b, TrapReason* trap) {
224 40932 : if (b == 0) {
225 : *trap = kTrapRemByZero;
226 : return 0;
227 : }
228 37656 : return a % b;
229 : }
230 :
231 : inline uint32_t ExecuteI32Shl(uint32_t a, uint32_t b, TrapReason* trap) {
232 40912 : return a << (b & 0x1F);
233 : }
234 :
235 : inline uint32_t ExecuteI32ShrU(uint32_t a, uint32_t b, TrapReason* trap) {
236 40912 : return a >> (b & 0x1F);
237 : }
238 :
239 : inline int32_t ExecuteI32ShrS(int32_t a, int32_t b, TrapReason* trap) {
240 40912 : return a >> (b & 0x1F);
241 : }
242 :
243 : inline int64_t ExecuteI64DivS(int64_t a, int64_t b, TrapReason* trap) {
244 54316 : if (b == 0) {
245 : *trap = kTrapDivByZero;
246 : return 0;
247 : }
248 49672 : if (b == -1 && a == std::numeric_limits<int64_t>::min()) {
249 : *trap = kTrapDivUnrepresentable;
250 : return 0;
251 : }
252 49668 : return a / b;
253 : }
254 :
255 : inline uint64_t ExecuteI64DivU(uint64_t a, uint64_t b, TrapReason* trap) {
256 53708 : if (b == 0) {
257 : *trap = kTrapDivByZero;
258 : return 0;
259 : }
260 49128 : return a / b;
261 : }
262 :
263 : inline int64_t ExecuteI64RemS(int64_t a, int64_t b, TrapReason* trap) {
264 53712 : if (b == 0) {
265 : *trap = kTrapRemByZero;
266 : return 0;
267 : }
268 49132 : if (b == -1) return 0;
269 44884 : return a % b;
270 : }
271 :
272 : inline uint64_t ExecuteI64RemU(uint64_t a, uint64_t b, TrapReason* trap) {
273 53708 : if (b == 0) {
274 : *trap = kTrapRemByZero;
275 : return 0;
276 : }
277 49128 : return a % b;
278 : }
279 :
280 : inline uint64_t ExecuteI64Shl(uint64_t a, uint64_t b, TrapReason* trap) {
281 81232 : return a << (b & 0x3F);
282 : }
283 :
284 : inline uint64_t ExecuteI64ShrU(uint64_t a, uint64_t b, TrapReason* trap) {
285 81232 : return a >> (b & 0x3F);
286 : }
287 :
288 : inline int64_t ExecuteI64ShrS(int64_t a, int64_t b, TrapReason* trap) {
289 81232 : return a >> (b & 0x3F);
290 : }
291 :
292 : inline uint32_t ExecuteI32Ror(uint32_t a, uint32_t b, TrapReason* trap) {
293 26912 : return (a >> (b & 0x1F)) | (a << ((32 - b) & 0x1F));
294 : }
295 :
296 : inline uint32_t ExecuteI32Rol(uint32_t a, uint32_t b, TrapReason* trap) {
297 26912 : return (a << (b & 0x1F)) | (a >> ((32 - b) & 0x1F));
298 : }
299 :
300 : inline uint64_t ExecuteI64Ror(uint64_t a, uint64_t b, TrapReason* trap) {
301 26260 : return (a >> (b & 0x3F)) | (a << ((64 - b) & 0x3F));
302 : }
303 :
304 : inline uint64_t ExecuteI64Rol(uint64_t a, uint64_t b, TrapReason* trap) {
305 26260 : return (a << (b & 0x3F)) | (a >> ((64 - b) & 0x3F));
306 : }
307 :
308 : inline float ExecuteF32Min(float a, float b, TrapReason* trap) {
309 52912 : return JSMin(a, b);
310 : }
311 :
312 : inline float ExecuteF32Max(float a, float b, TrapReason* trap) {
313 52904 : return JSMax(a, b);
314 : }
315 :
316 : inline Float32 ExecuteF32CopySign(Float32 a, Float32 b, TrapReason* trap) {
317 52908 : return Float32::FromBits((a.get_bits() & ~kFloat32SignBitMask) |
318 52908 : (b.get_bits() & kFloat32SignBitMask));
319 : }
320 :
321 : inline double ExecuteF64Min(double a, double b, TrapReason* trap) {
322 9608 : return JSMin(a, b);
323 : }
324 :
325 : inline double ExecuteF64Max(double a, double b, TrapReason* trap) {
326 9616 : return JSMax(a, b);
327 : }
328 :
329 : inline Float64 ExecuteF64CopySign(Float64 a, Float64 b, TrapReason* trap) {
330 9612 : return Float64::FromBits((a.get_bits() & ~kFloat64SignBitMask) |
331 9612 : (b.get_bits() & kFloat64SignBitMask));
332 : }
333 :
334 : inline int32_t ExecuteI32AsmjsDivS(int32_t a, int32_t b, TrapReason* trap) {
335 2340 : if (b == 0) return 0;
336 2096 : if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
337 : return std::numeric_limits<int32_t>::min();
338 : }
339 2088 : return a / b;
340 : }
341 :
342 : inline uint32_t ExecuteI32AsmjsDivU(uint32_t a, uint32_t b, TrapReason* trap) {
343 20 : if (b == 0) return 0;
344 8 : return a / b;
345 : }
346 :
347 : inline int32_t ExecuteI32AsmjsRemS(int32_t a, int32_t b, TrapReason* trap) {
348 2340 : if (b == 0) return 0;
349 2096 : if (b == -1) return 0;
350 1860 : return a % b;
351 : }
352 :
353 : inline uint32_t ExecuteI32AsmjsRemU(uint32_t a, uint32_t b, TrapReason* trap) {
354 20 : if (b == 0) return 0;
355 8 : return a % b;
356 : }
357 :
358 : inline int32_t ExecuteI32AsmjsSConvertF32(float a, TrapReason* trap) {
359 460 : return DoubleToInt32(a);
360 : }
361 :
362 : inline uint32_t ExecuteI32AsmjsUConvertF32(float a, TrapReason* trap) {
363 460 : return DoubleToUint32(a);
364 : }
365 :
366 : inline int32_t ExecuteI32AsmjsSConvertF64(double a, TrapReason* trap) {
367 196 : return DoubleToInt32(a);
368 : }
369 :
370 : inline uint32_t ExecuteI32AsmjsUConvertF64(double a, TrapReason* trap) {
371 : return DoubleToUint32(a);
372 : }
373 :
374 : int32_t ExecuteI32Clz(uint32_t val, TrapReason* trap) {
375 264 : return base::bits::CountLeadingZeros(val);
376 : }
377 :
378 : uint32_t ExecuteI32Ctz(uint32_t val, TrapReason* trap) {
379 : return base::bits::CountTrailingZeros(val);
380 : }
381 :
382 : uint32_t ExecuteI32Popcnt(uint32_t val, TrapReason* trap) {
383 : return base::bits::CountPopulation(val);
384 : }
385 :
386 : inline uint32_t ExecuteI32Eqz(uint32_t val, TrapReason* trap) {
387 568 : return val == 0 ? 1 : 0;
388 : }
389 :
390 : int64_t ExecuteI64Clz(uint64_t val, TrapReason* trap) {
391 260 : return base::bits::CountLeadingZeros(val);
392 : }
393 :
394 : inline uint64_t ExecuteI64Ctz(uint64_t val, TrapReason* trap) {
395 260 : return base::bits::CountTrailingZeros(val);
396 : }
397 :
398 : inline int64_t ExecuteI64Popcnt(uint64_t val, TrapReason* trap) {
399 40 : return base::bits::CountPopulation(val);
400 : }
401 :
402 : inline int32_t ExecuteI64Eqz(uint64_t val, TrapReason* trap) {
403 332 : return val == 0 ? 1 : 0;
404 : }
405 :
406 : inline Float32 ExecuteF32Abs(Float32 a, TrapReason* trap) {
407 16 : return Float32::FromBits(a.get_bits() & ~kFloat32SignBitMask);
408 : }
409 :
410 : inline Float32 ExecuteF32Neg(Float32 a, TrapReason* trap) {
411 468 : return Float32::FromBits(a.get_bits() ^ kFloat32SignBitMask);
412 : }
413 :
414 460 : inline float ExecuteF32Ceil(float a, TrapReason* trap) { return ceilf(a); }
415 :
416 460 : inline float ExecuteF32Floor(float a, TrapReason* trap) { return floorf(a); }
417 :
418 460 : inline float ExecuteF32Trunc(float a, TrapReason* trap) { return truncf(a); }
419 :
420 : inline float ExecuteF32NearestInt(float a, TrapReason* trap) {
421 460 : return nearbyintf(a);
422 : }
423 :
424 : inline float ExecuteF32Sqrt(float a, TrapReason* trap) {
425 8 : float result = sqrtf(a);
426 : return result;
427 : }
428 :
429 : inline Float64 ExecuteF64Abs(Float64 a, TrapReason* trap) {
430 16 : return Float64::FromBits(a.get_bits() & ~kFloat64SignBitMask);
431 : }
432 :
433 : inline Float64 ExecuteF64Neg(Float64 a, TrapReason* trap) {
434 204 : return Float64::FromBits(a.get_bits() ^ kFloat64SignBitMask);
435 : }
436 :
437 196 : inline double ExecuteF64Ceil(double a, TrapReason* trap) { return ceil(a); }
438 :
439 196 : inline double ExecuteF64Floor(double a, TrapReason* trap) { return floor(a); }
440 :
441 196 : inline double ExecuteF64Trunc(double a, TrapReason* trap) { return trunc(a); }
442 :
443 : inline double ExecuteF64NearestInt(double a, TrapReason* trap) {
444 196 : return nearbyint(a);
445 : }
446 :
447 8 : inline double ExecuteF64Sqrt(double a, TrapReason* trap) { return sqrt(a); }
448 :
449 : template <typename int_type, typename float_type>
450 : int_type ExecuteConvert(float_type a, TrapReason* trap) {
451 2760 : if (is_inbounds<int_type>(a)) {
452 1552 : return static_cast<int_type>(a);
453 : }
454 : *trap = kTrapFloatUnrepresentable;
455 : return 0;
456 : }
457 :
458 : template <typename int_type, typename float_type>
459 : int_type ExecuteConvertSaturate(float_type a) {
460 : TrapReason base_trap = kTrapCount;
461 : int32_t val = ExecuteConvert<int_type>(a, &base_trap);
462 1312 : if (base_trap == kTrapCount) {
463 : return val;
464 : }
465 : return std::isnan(a) ? 0
466 : : (a < static_cast<float_type>(0.0)
467 : ? std::numeric_limits<int_type>::min()
468 604 : : std::numeric_limits<int_type>::max());
469 : }
470 :
471 : template <typename dst_type, typename src_type, void (*fn)(Address)>
472 604 : inline dst_type CallExternalIntToFloatFunction(src_type input) {
473 604 : uint8_t data[std::max(sizeof(dst_type), sizeof(src_type))];
474 604 : Address data_addr = reinterpret_cast<Address>(data);
475 : WriteUnalignedValue<src_type>(data_addr, input);
476 604 : fn(data_addr);
477 604 : return ReadUnalignedValue<dst_type>(data_addr);
478 : }
479 :
480 : template <typename dst_type, typename src_type, int32_t (*fn)(Address)>
481 2956 : inline dst_type CallExternalFloatToIntFunction(src_type input,
482 : TrapReason* trap) {
483 2956 : uint8_t data[std::max(sizeof(dst_type), sizeof(src_type))];
484 2956 : Address data_addr = reinterpret_cast<Address>(data);
485 : WriteUnalignedValue<src_type>(data_addr, input);
486 2956 : if (!fn(data_addr)) *trap = kTrapFloatUnrepresentable;
487 2956 : return ReadUnalignedValue<dst_type>(data_addr);
488 : }
489 :
490 : inline uint32_t ExecuteI32ConvertI64(int64_t a, TrapReason* trap) {
491 158004 : return static_cast<uint32_t>(a & 0xFFFFFFFF);
492 : }
493 :
494 : int64_t ExecuteI64SConvertF32(float a, TrapReason* trap) {
495 : return CallExternalFloatToIntFunction<int64_t, float,
496 920 : float32_to_int64_wrapper>(a, trap);
497 : }
498 :
499 460 : int64_t ExecuteI64SConvertSatF32(float a) {
500 460 : TrapReason base_trap = kTrapCount;
501 : int64_t val = ExecuteI64SConvertF32(a, &base_trap);
502 460 : if (base_trap == kTrapCount) {
503 : return val;
504 : }
505 : return std::isnan(a) ? 0
506 : : (a < 0.0 ? std::numeric_limits<int64_t>::min()
507 128 : : std::numeric_limits<int64_t>::max());
508 : }
509 :
510 : int64_t ExecuteI64SConvertF64(double a, TrapReason* trap) {
511 : return CallExternalFloatToIntFunction<int64_t, double,
512 724 : float64_to_int64_wrapper>(a, trap);
513 : }
514 :
515 196 : int64_t ExecuteI64SConvertSatF64(double a) {
516 196 : TrapReason base_trap = kTrapCount;
517 : int64_t val = ExecuteI64SConvertF64(a, &base_trap);
518 196 : if (base_trap == kTrapCount) {
519 : return val;
520 : }
521 : return std::isnan(a) ? 0
522 : : (a < 0.0 ? std::numeric_limits<int64_t>::min()
523 44 : : std::numeric_limits<int64_t>::max());
524 : }
525 :
526 : uint64_t ExecuteI64UConvertF32(float a, TrapReason* trap) {
527 : return CallExternalFloatToIntFunction<uint64_t, float,
528 920 : float32_to_uint64_wrapper>(a, trap);
529 : }
530 :
531 460 : uint64_t ExecuteI64UConvertSatF32(float a) {
532 460 : TrapReason base_trap = kTrapCount;
533 : uint64_t val = ExecuteI64UConvertF32(a, &base_trap);
534 460 : if (base_trap == kTrapCount) {
535 : return val;
536 : }
537 : return std::isnan(a) ? 0
538 : : (a < 0.0 ? std::numeric_limits<uint64_t>::min()
539 256 : : std::numeric_limits<uint64_t>::max());
540 : }
541 :
542 : uint64_t ExecuteI64UConvertF64(double a, TrapReason* trap) {
543 : return CallExternalFloatToIntFunction<uint64_t, double,
544 392 : float64_to_uint64_wrapper>(a, trap);
545 : }
546 :
547 196 : uint64_t ExecuteI64UConvertSatF64(double a) {
548 196 : TrapReason base_trap = kTrapCount;
549 : int64_t val = ExecuteI64UConvertF64(a, &base_trap);
550 196 : if (base_trap == kTrapCount) {
551 : return val;
552 : }
553 : return std::isnan(a) ? 0
554 : : (a < 0.0 ? std::numeric_limits<uint64_t>::min()
555 80 : : std::numeric_limits<uint64_t>::max());
556 : }
557 :
558 : inline int64_t ExecuteI64SConvertI32(int32_t a, TrapReason* trap) {
559 248 : return static_cast<int64_t>(a);
560 : }
561 :
562 : inline int64_t ExecuteI64UConvertI32(uint32_t a, TrapReason* trap) {
563 232 : return static_cast<uint64_t>(a);
564 : }
565 :
566 : inline float ExecuteF32SConvertI32(int32_t a, TrapReason* trap) {
567 24 : return static_cast<float>(a);
568 : }
569 :
570 : inline float ExecuteF32UConvertI32(uint32_t a, TrapReason* trap) {
571 8 : return static_cast<float>(a);
572 : }
573 :
574 : inline float ExecuteF32SConvertI64(int64_t a, TrapReason* trap) {
575 324 : return static_cast<float>(a);
576 : }
577 :
578 : inline float ExecuteF32UConvertI64(uint64_t a, TrapReason* trap) {
579 : return CallExternalIntToFloatFunction<float, uint64_t,
580 304 : uint64_to_float32_wrapper>(a);
581 : }
582 :
583 : inline float ExecuteF32ConvertF64(double a, TrapReason* trap) {
584 8 : return static_cast<float>(a);
585 : }
586 :
587 : inline Float32 ExecuteF32ReinterpretI32(int32_t a, TrapReason* trap) {
588 244 : return Float32::FromBits(a);
589 : }
590 :
591 : inline double ExecuteF64SConvertI32(int32_t a, TrapReason* trap) {
592 936 : return static_cast<double>(a);
593 : }
594 :
595 : inline double ExecuteF64UConvertI32(uint32_t a, TrapReason* trap) {
596 8 : return static_cast<double>(a);
597 : }
598 :
599 : inline double ExecuteF64SConvertI64(int64_t a, TrapReason* trap) {
600 15372 : return static_cast<double>(a);
601 : }
602 :
603 : inline double ExecuteF64UConvertI64(uint64_t a, TrapReason* trap) {
604 : return CallExternalIntToFloatFunction<double, uint64_t,
605 300 : uint64_to_float64_wrapper>(a);
606 : }
607 :
608 : inline double ExecuteF64ConvertF32(float a, TrapReason* trap) {
609 1396 : return static_cast<double>(a);
610 : }
611 :
612 : inline Float64 ExecuteF64ReinterpretI64(int64_t a, TrapReason* trap) {
613 232 : return Float64::FromBits(a);
614 : }
615 :
616 : inline int32_t ExecuteI32ReinterpretF32(WasmValue a) {
617 256 : return a.to_f32_boxed().get_bits();
618 : }
619 :
620 : inline int64_t ExecuteI64ReinterpretF64(WasmValue a) {
621 244 : return a.to_f64_boxed().get_bits();
622 : }
623 :
624 : enum InternalOpcode {
625 : #define DECL_INTERNAL_ENUM(name, value) kInternal##name = value,
626 : FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_ENUM)
627 : #undef DECL_INTERNAL_ENUM
628 : };
629 :
630 : const char* OpcodeName(uint32_t val) {
631 0 : switch (val) {
632 : #define DECL_INTERNAL_CASE(name, value) \
633 : case kInternal##name: \
634 : return "Internal" #name;
635 : FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_CASE)
636 : #undef DECL_INTERNAL_CASE
637 : }
638 0 : return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(val));
639 : }
640 :
641 : constexpr uint32_t kCatchInArity = 1;
642 :
643 : } // namespace
644 :
645 : class SideTable;
646 :
647 : // Code and metadata needed to execute a function.
648 747012 : struct InterpreterCode {
649 : const WasmFunction* function; // wasm function
650 : BodyLocalDecls locals; // local declarations
651 : const byte* orig_start; // start of original code
652 : const byte* orig_end; // end of original code
653 : byte* start; // start of (maybe altered) code
654 : byte* end; // end of (maybe altered) code
655 : SideTable* side_table; // precomputed side table for control flow.
656 :
657 44123003 : const byte* at(pc_t pc) { return start + pc; }
658 : };
659 :
660 : // A helper class to compute the control transfers for each bytecode offset.
661 : // Control transfers allow Br, BrIf, BrTable, If, Else, and End bytecodes to
662 : // be directly executed without the need to dynamically track blocks.
663 : class SideTable : public ZoneObject {
664 : public:
665 : ControlTransferMap map_;
666 : uint32_t max_stack_height_ = 0;
667 :
668 733424 : SideTable(Zone* zone, const WasmModule* module, InterpreterCode* code)
669 366712 : : map_(zone) {
670 : // Create a zone for all temporary objects.
671 366712 : Zone control_transfer_zone(zone->allocator(), ZONE_NAME);
672 :
673 : // Represents a control flow label.
674 : class CLabel : public ZoneObject {
675 : explicit CLabel(Zone* zone, uint32_t target_stack_height, uint32_t arity)
676 : : target_stack_height(target_stack_height),
677 : arity(arity),
678 407126 : refs(zone) {}
679 :
680 : public:
681 : struct Ref {
682 : const byte* from_pc;
683 : const uint32_t stack_height;
684 : };
685 : const byte* target = nullptr;
686 : uint32_t target_stack_height;
687 : // Arity when branching to this label.
688 : const uint32_t arity;
689 : ZoneVector<Ref> refs;
690 :
691 407126 : static CLabel* New(Zone* zone, uint32_t stack_height, uint32_t arity) {
692 407126 : return new (zone) CLabel(zone, stack_height, arity);
693 : }
694 :
695 : // Bind this label to the given PC.
696 : void Bind(const byte* pc) {
697 : DCHECK_NULL(target);
698 407126 : target = pc;
699 : }
700 :
701 : // Reference this label from the given location.
702 : void Ref(const byte* from_pc, uint32_t stack_height) {
703 : // Target being bound before a reference means this is a loop.
704 : DCHECK_IMPLIES(target, *target == kExprLoop);
705 53716 : refs.push_back({from_pc, stack_height});
706 : }
707 :
708 407126 : void Finish(ControlTransferMap* map, const byte* start) {
709 : DCHECK_NOT_NULL(target);
710 841110 : for (auto ref : refs) {
711 26858 : size_t offset = static_cast<size_t>(ref.from_pc - start);
712 26858 : auto pcdiff = static_cast<pcdiff_t>(target - ref.from_pc);
713 : DCHECK_GE(ref.stack_height, target_stack_height);
714 : spdiff_t spdiff =
715 26858 : static_cast<spdiff_t>(ref.stack_height - target_stack_height);
716 : TRACE("control transfer @%zu: Δpc %d, stack %u->%u = -%u\n", offset,
717 : pcdiff, ref.stack_height, target_stack_height, spdiff);
718 26858 : ControlTransferEntry& entry = (*map)[offset];
719 26858 : entry.pc_diff = pcdiff;
720 26858 : entry.sp_diff = spdiff;
721 26858 : entry.target_arity = arity;
722 : }
723 407126 : }
724 : };
725 :
726 : // An entry in the control stack.
727 : struct Control {
728 : const byte* pc;
729 : CLabel* end_label;
730 : CLabel* else_label;
731 : // Arity (number of values on the stack) when exiting this control
732 : // structure via |end|.
733 : uint32_t exit_arity;
734 : // Track whether this block was already left, i.e. all further
735 : // instructions are unreachable.
736 : bool unreachable = false;
737 :
738 : Control(const byte* pc, CLabel* end_label, CLabel* else_label,
739 : uint32_t exit_arity)
740 : : pc(pc),
741 : end_label(end_label),
742 : else_label(else_label),
743 397670 : exit_arity(exit_arity) {}
744 : Control(const byte* pc, CLabel* end_label, uint32_t exit_arity)
745 : : Control(pc, end_label, nullptr, exit_arity) {}
746 :
747 397670 : void Finish(ControlTransferMap* map, const byte* start) {
748 397670 : end_label->Finish(map, start);
749 397670 : if (else_label) else_label->Finish(map, start);
750 397670 : }
751 : };
752 :
753 : // Compute the ControlTransfer map.
754 : // This algorithm maintains a stack of control constructs similar to the
755 : // AST decoder. The {control_stack} allows matching {br,br_if,br_table}
756 : // bytecodes with their target, as well as determining whether the current
757 : // bytecodes are within the true or false block of an else.
758 : ZoneVector<Control> control_stack(&control_transfer_zone);
759 : // It also maintains a stack of all nested {try} blocks to resolve local
760 : // handler targets for potentially throwing operations. These exceptional
761 : // control transfers are treated just like other branches in the resulting
762 : // map. This stack contains indices into the above control stack.
763 : ZoneVector<size_t> exception_stack(zone);
764 : uint32_t stack_height = 0;
765 : uint32_t func_arity =
766 366712 : static_cast<uint32_t>(code->function->sig->return_count());
767 : CLabel* func_label =
768 366712 : CLabel::New(&control_transfer_zone, stack_height, func_arity);
769 366712 : control_stack.emplace_back(code->orig_start, func_label, func_arity);
770 : auto control_parent = [&]() -> Control& {
771 : DCHECK_LE(2, control_stack.size());
772 62536 : return control_stack[control_stack.size() - 2];
773 366712 : };
774 : auto copy_unreachable = [&] {
775 31113 : control_stack.back().unreachable = control_parent().unreachable;
776 : };
777 2805511 : for (BytecodeIterator i(code->orig_start, code->orig_end, &code->locals);
778 2072087 : i.has_next(); i.next()) {
779 : WasmOpcode opcode = i.current();
780 : uint32_t exceptional_stack_height = 0;
781 2072087 : if (WasmOpcodes::IsPrefixOpcode(opcode)) opcode = i.prefixed_opcode();
782 2072087 : bool unreachable = control_stack.back().unreachable;
783 2072087 : if (unreachable) {
784 : TRACE("@%u: %s (is unreachable)\n", i.pc_offset(),
785 : WasmOpcodes::OpcodeName(opcode));
786 : } else {
787 : auto stack_effect =
788 2040308 : StackEffect(module, code->function->sig, i.pc(), i.end());
789 : TRACE("@%u: %s (sp %d - %d + %d)\n", i.pc_offset(),
790 : WasmOpcodes::OpcodeName(opcode), stack_height, stack_effect.first,
791 : stack_effect.second);
792 : DCHECK_GE(stack_height, stack_effect.first);
793 : DCHECK_GE(kMaxUInt32, static_cast<uint64_t>(stack_height) -
794 : stack_effect.first + stack_effect.second);
795 2040308 : exceptional_stack_height = stack_height - stack_effect.first;
796 2040308 : stack_height = stack_height - stack_effect.first + stack_effect.second;
797 2040308 : if (stack_height > max_stack_height_) max_stack_height_ = stack_height;
798 : }
799 2072087 : if (!exception_stack.empty() && WasmOpcodes::IsThrowingOpcode(opcode)) {
800 : // Record exceptional control flow from potentially throwing opcodes to
801 : // the local handler if one is present. The stack height at the throw
802 : // point is assumed to have popped all operands and not pushed any yet.
803 : DCHECK_GE(control_stack.size() - 1, exception_stack.back());
804 36 : const Control* c = &control_stack[exception_stack.back()];
805 36 : if (!unreachable) c->else_label->Ref(i.pc(), exceptional_stack_height);
806 : TRACE("handler @%u: %s -> try @%u\n", i.pc_offset(), OpcodeName(opcode),
807 : static_cast<uint32_t>(c->pc - code->start));
808 : }
809 2072087 : switch (opcode) {
810 : case kExprBlock:
811 : case kExprLoop: {
812 21502 : bool is_loop = opcode == kExprLoop;
813 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
814 21502 : i.pc());
815 21502 : if (imm.type == kWasmVar) {
816 20 : imm.sig = module->signatures[imm.sig_index];
817 : }
818 : TRACE("control @%u: %s, arity %d->%d\n", i.pc_offset(),
819 : is_loop ? "Loop" : "Block", imm.in_arity(), imm.out_arity());
820 : CLabel* label =
821 : CLabel::New(&control_transfer_zone, stack_height,
822 43004 : is_loop ? imm.in_arity() : imm.out_arity());
823 43004 : control_stack.emplace_back(i.pc(), label, imm.out_arity());
824 : copy_unreachable();
825 21502 : if (is_loop) label->Bind(i.pc());
826 : break;
827 : }
828 : case kExprIf: {
829 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
830 9420 : i.pc());
831 9420 : if (imm.type == kWasmVar) {
832 8 : imm.sig = module->signatures[imm.sig_index];
833 : }
834 : TRACE("control @%u: If, arity %d->%d\n", i.pc_offset(),
835 : imm.in_arity(), imm.out_arity());
836 : CLabel* end_label = CLabel::New(&control_transfer_zone, stack_height,
837 9420 : imm.out_arity());
838 : CLabel* else_label =
839 9420 : CLabel::New(&control_transfer_zone, stack_height, 0);
840 9420 : control_stack.emplace_back(i.pc(), end_label, else_label,
841 28260 : imm.out_arity());
842 : copy_unreachable();
843 9420 : if (!unreachable) else_label->Ref(i.pc(), stack_height);
844 : break;
845 : }
846 : case kExprElse: {
847 : Control* c = &control_stack.back();
848 : copy_unreachable();
849 : TRACE("control @%u: Else\n", i.pc_offset());
850 119 : if (!control_parent().unreachable) {
851 114 : c->end_label->Ref(i.pc(), stack_height);
852 : }
853 : DCHECK_NOT_NULL(c->else_label);
854 119 : c->else_label->Bind(i.pc() + 1);
855 119 : c->else_label->Finish(&map_, code->orig_start);
856 119 : c->else_label = nullptr;
857 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
858 119 : stack_height = c->end_label->target_stack_height;
859 119 : break;
860 : }
861 : case kExprTry: {
862 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
863 36 : i.pc());
864 36 : if (imm.type == kWasmVar) {
865 0 : imm.sig = module->signatures[imm.sig_index];
866 : }
867 : TRACE("control @%u: Try, arity %d->%d\n", i.pc_offset(),
868 : imm.in_arity(), imm.out_arity());
869 : CLabel* end_label = CLabel::New(&control_transfer_zone, stack_height,
870 36 : imm.out_arity());
871 : CLabel* catch_label =
872 36 : CLabel::New(&control_transfer_zone, stack_height, kCatchInArity);
873 36 : control_stack.emplace_back(i.pc(), end_label, catch_label,
874 108 : imm.out_arity());
875 108 : exception_stack.push_back(control_stack.size() - 1);
876 : copy_unreachable();
877 : break;
878 : }
879 : case kExprCatch: {
880 : DCHECK_EQ(control_stack.size() - 1, exception_stack.back());
881 : Control* c = &control_stack.back();
882 : exception_stack.pop_back();
883 : copy_unreachable();
884 : TRACE("control @%u: Catch\n", i.pc_offset());
885 36 : if (!control_parent().unreachable) {
886 36 : c->end_label->Ref(i.pc(), stack_height);
887 : }
888 : DCHECK_NOT_NULL(c->else_label);
889 36 : c->else_label->Bind(i.pc() + 1);
890 36 : c->else_label->Finish(&map_, code->orig_start);
891 36 : c->else_label = nullptr;
892 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
893 36 : stack_height = c->end_label->target_stack_height + kCatchInArity;
894 36 : break;
895 : }
896 : case kExprEnd: {
897 397670 : Control* c = &control_stack.back();
898 : TRACE("control @%u: End\n", i.pc_offset());
899 : // Only loops have bound labels.
900 : DCHECK_IMPLIES(c->end_label->target, *c->pc == kExprLoop);
901 397670 : if (!c->end_label->target) {
902 397501 : if (c->else_label) c->else_label->Bind(i.pc());
903 397501 : c->end_label->Bind(i.pc() + 1);
904 : }
905 795340 : c->Finish(&map_, code->orig_start);
906 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
907 397670 : stack_height = c->end_label->target_stack_height + c->exit_arity;
908 : control_stack.pop_back();
909 : break;
910 : }
911 : case kExprBr: {
912 430 : BranchDepthImmediate<Decoder::kNoValidate> imm(&i, i.pc());
913 : TRACE("control @%u: Br[depth=%u]\n", i.pc_offset(), imm.depth);
914 860 : Control* c = &control_stack[control_stack.size() - imm.depth - 1];
915 430 : if (!unreachable) c->end_label->Ref(i.pc(), stack_height);
916 : break;
917 : }
918 : case kExprBrIf: {
919 97 : BranchDepthImmediate<Decoder::kNoValidate> imm(&i, i.pc());
920 : TRACE("control @%u: BrIf[depth=%u]\n", i.pc_offset(), imm.depth);
921 194 : Control* c = &control_stack[control_stack.size() - imm.depth - 1];
922 97 : if (!unreachable) c->end_label->Ref(i.pc(), stack_height);
923 : break;
924 : }
925 : case kExprBrTable: {
926 4205 : BranchTableImmediate<Decoder::kNoValidate> imm(&i, i.pc());
927 : BranchTableIterator<Decoder::kNoValidate> iterator(&i, imm);
928 : TRACE("control @%u: BrTable[count=%u]\n", i.pc_offset(),
929 : imm.table_count);
930 4205 : if (!unreachable) {
931 41890 : while (iterator.has_next()) {
932 : uint32_t j = iterator.cur_index();
933 16740 : uint32_t target = iterator.next();
934 33480 : Control* c = &control_stack[control_stack.size() - target - 1];
935 16740 : c->end_label->Ref(i.pc() + j, stack_height);
936 : }
937 : }
938 : break;
939 : }
940 : default:
941 : break;
942 : }
943 2072087 : if (WasmOpcodes::IsUnconditionalJump(opcode)) {
944 30552 : control_stack.back().unreachable = true;
945 : }
946 : }
947 : DCHECK_EQ(0, control_stack.size());
948 366712 : DCHECK_EQ(func_arity, stack_height);
949 366712 : }
950 :
951 : bool HasEntryAt(pc_t from) {
952 : auto result = map_.find(from);
953 : return result != map_.end();
954 : }
955 :
956 : ControlTransferEntry& Lookup(pc_t from) {
957 : auto result = map_.find(from);
958 : DCHECK(result != map_.end());
959 : return result->second;
960 : }
961 : };
962 :
963 : // The main storage for interpreter code. It maps {WasmFunction} to the
964 : // metadata needed to execute each function.
965 : class CodeMap {
966 : Zone* zone_;
967 : const WasmModule* module_;
968 : ZoneVector<InterpreterCode> interpreter_code_;
969 : // TODO(wasm): Remove this testing wart. It is needed because interpreter
970 : // entry stubs are not generated in testing the interpreter in cctests.
971 : bool call_indirect_through_module_ = false;
972 :
973 : public:
974 365590 : CodeMap(const WasmModule* module, const uint8_t* module_start, Zone* zone)
975 731180 : : zone_(zone), module_(module), interpreter_code_(zone) {
976 731180 : if (module == nullptr) return;
977 731180 : interpreter_code_.reserve(module->functions.size());
978 733678 : for (const WasmFunction& function : module->functions) {
979 2498 : if (function.imported) {
980 : DCHECK(!function.code.is_set());
981 1352 : AddFunction(&function, nullptr, nullptr);
982 : } else {
983 : AddFunction(&function, module_start + function.code.offset(),
984 1146 : module_start + function.code.end_offset());
985 : }
986 : }
987 : }
988 :
989 : bool call_indirect_through_module() { return call_indirect_through_module_; }
990 :
991 : void set_call_indirect_through_module(bool val) {
992 364662 : call_indirect_through_module_ = val;
993 : }
994 :
995 : const WasmModule* module() const { return module_; }
996 :
997 : InterpreterCode* GetCode(const WasmFunction* function) {
998 : InterpreterCode* code = GetCode(function->func_index);
999 : DCHECK_EQ(function, code->function);
1000 : return code;
1001 : }
1002 :
1003 : InterpreterCode* GetCode(uint32_t function_index) {
1004 : DCHECK_LT(function_index, interpreter_code_.size());
1005 10237764 : return Preprocess(&interpreter_code_[function_index]);
1006 : }
1007 :
1008 108 : InterpreterCode* GetIndirectCode(uint32_t table_index, uint32_t entry_index) {
1009 : uint32_t saved_index;
1010 : USE(saved_index);
1011 216 : if (table_index >= module_->tables.size()) return nullptr;
1012 : // Mask table index for SSCA mitigation.
1013 : saved_index = table_index;
1014 : table_index &= static_cast<int32_t>((table_index - module_->tables.size()) &
1015 108 : ~static_cast<int32_t>(table_index)) >>
1016 108 : 31;
1017 : DCHECK_EQ(table_index, saved_index);
1018 108 : const WasmTable* table = &module_->tables[table_index];
1019 216 : if (entry_index >= table->values.size()) return nullptr;
1020 : // Mask entry_index for SSCA mitigation.
1021 : saved_index = entry_index;
1022 : entry_index &= static_cast<int32_t>((entry_index - table->values.size()) &
1023 72 : ~static_cast<int32_t>(entry_index)) >>
1024 72 : 31;
1025 : DCHECK_EQ(entry_index, saved_index);
1026 144 : uint32_t index = table->values[entry_index];
1027 144 : if (index >= interpreter_code_.size()) return nullptr;
1028 : // Mask index for SSCA mitigation.
1029 : saved_index = index;
1030 : index &= static_cast<int32_t>((index - interpreter_code_.size()) &
1031 72 : ~static_cast<int32_t>(index)) >>
1032 72 : 31;
1033 : DCHECK_EQ(index, saved_index);
1034 :
1035 72 : return GetCode(index);
1036 : }
1037 :
1038 5484586 : InterpreterCode* Preprocess(InterpreterCode* code) {
1039 : DCHECK_EQ(code->function->imported, code->start == nullptr);
1040 5484586 : if (!code->side_table && code->start) {
1041 : // Compute the control targets map and the local declarations.
1042 733364 : code->side_table = new (zone_) SideTable(zone_, module_, code);
1043 : }
1044 5484586 : return code;
1045 : }
1046 :
1047 369670 : void AddFunction(const WasmFunction* function, const byte* code_start,
1048 : const byte* code_end) {
1049 : InterpreterCode code = {
1050 : function, BodyLocalDecls(zone_), code_start,
1051 : code_end, const_cast<byte*>(code_start), const_cast<byte*>(code_end),
1052 739340 : nullptr};
1053 :
1054 : DCHECK_EQ(interpreter_code_.size(), function->func_index);
1055 369670 : interpreter_code_.push_back(code);
1056 369670 : }
1057 :
1058 365704 : void SetFunctionCode(const WasmFunction* function, const byte* start,
1059 : const byte* end) {
1060 : DCHECK_LT(function->func_index, interpreter_code_.size());
1061 365704 : InterpreterCode* code = &interpreter_code_[function->func_index];
1062 : DCHECK_EQ(function, code->function);
1063 365704 : code->orig_start = start;
1064 365704 : code->orig_end = end;
1065 365704 : code->start = const_cast<byte*>(start);
1066 365704 : code->end = const_cast<byte*>(end);
1067 365704 : code->side_table = nullptr;
1068 365704 : Preprocess(code);
1069 365704 : }
1070 : };
1071 :
1072 : namespace {
1073 :
1074 : struct ExternalCallResult {
1075 : enum Type {
1076 : // The function should be executed inside this interpreter.
1077 : INTERNAL,
1078 : // For indirect calls: Table or function does not exist.
1079 : INVALID_FUNC,
1080 : // For indirect calls: Signature does not match expected signature.
1081 : SIGNATURE_MISMATCH,
1082 : // The function was executed and returned normally.
1083 : EXTERNAL_RETURNED,
1084 : // The function was executed, threw an exception, and the stack was unwound.
1085 : EXTERNAL_UNWOUND,
1086 : // The function was executed and threw an exception that was locally caught.
1087 : EXTERNAL_CAUGHT
1088 : };
1089 : Type type;
1090 : // If type is INTERNAL, this field holds the function to call internally.
1091 : InterpreterCode* interpreter_code;
1092 :
1093 : ExternalCallResult(Type type) : type(type) { // NOLINT
1094 : DCHECK_NE(INTERNAL, type);
1095 : }
1096 : ExternalCallResult(Type type, InterpreterCode* code)
1097 : : type(type), interpreter_code(code) {
1098 : DCHECK_EQ(INTERNAL, type);
1099 : }
1100 : };
1101 :
1102 : // Like a static_cast from src to dst, but specialized for boxed floats.
1103 : template <typename dst, typename src>
1104 : struct converter {
1105 787864 : dst operator()(src val) const { return static_cast<dst>(val); }
1106 : };
1107 : template <>
1108 : struct converter<Float64, uint64_t> {
1109 : Float64 operator()(uint64_t val) const { return Float64::FromBits(val); }
1110 : };
1111 : template <>
1112 : struct converter<Float32, uint32_t> {
1113 : Float32 operator()(uint32_t val) const { return Float32::FromBits(val); }
1114 : };
1115 : template <>
1116 : struct converter<uint64_t, Float64> {
1117 : uint64_t operator()(Float64 val) const { return val.get_bits(); }
1118 : };
1119 : template <>
1120 : struct converter<uint32_t, Float32> {
1121 : uint32_t operator()(Float32 val) const { return val.get_bits(); }
1122 : };
1123 :
1124 : template <typename T>
1125 : V8_INLINE bool has_nondeterminism(T val) {
1126 : static_assert(!std::is_floating_point<T>::value, "missing specialization");
1127 : return false;
1128 : }
1129 : template <>
1130 : V8_INLINE bool has_nondeterminism<float>(float val) {
1131 161328 : return std::isnan(val);
1132 : }
1133 : template <>
1134 : V8_INLINE bool has_nondeterminism<double>(double val) {
1135 61016 : return std::isnan(val);
1136 : }
1137 :
1138 : } // namespace
1139 :
1140 : // Responsible for executing code directly.
1141 0 : class ThreadImpl {
1142 : struct Activation {
1143 : uint32_t fp;
1144 : sp_t sp;
1145 33462 : Activation(uint32_t fp, sp_t sp) : fp(fp), sp(sp) {}
1146 : };
1147 :
1148 : public:
1149 : ThreadImpl(Zone* zone, CodeMap* codemap,
1150 : Handle<WasmInstanceObject> instance_object)
1151 : : codemap_(codemap),
1152 : instance_object_(instance_object),
1153 : frames_(zone),
1154 731180 : activations_(zone) {}
1155 :
1156 : //==========================================================================
1157 : // Implementation of public interface for WasmInterpreter::Thread.
1158 : //==========================================================================
1159 :
1160 : WasmInterpreter::State state() { return state_; }
1161 :
1162 4664254 : void InitFrame(const WasmFunction* function, WasmValue* args) {
1163 : DCHECK_EQ(current_activation().fp, frames_.size());
1164 : InterpreterCode* code = codemap()->GetCode(function);
1165 4664254 : size_t num_params = function->sig->parameter_count();
1166 4664254 : EnsureStackSpace(num_params);
1167 4664254 : Push(args, num_params);
1168 4664254 : PushFrame(code);
1169 4664254 : }
1170 :
1171 0 : WasmInterpreter::State Run(int num_steps = -1) {
1172 : DCHECK(state_ == WasmInterpreter::STOPPED ||
1173 : state_ == WasmInterpreter::PAUSED);
1174 : DCHECK(num_steps == -1 || num_steps > 0);
1175 : if (num_steps == -1) {
1176 : TRACE(" => Run()\n");
1177 : } else if (num_steps == 1) {
1178 : TRACE(" => Step()\n");
1179 : } else {
1180 : TRACE(" => Run(%d)\n", num_steps);
1181 : }
1182 4669698 : state_ = WasmInterpreter::RUNNING;
1183 4669698 : Execute(frames_.back().code, frames_.back().pc, num_steps);
1184 : // If state_ is STOPPED, the current activation must be fully unwound.
1185 : DCHECK_IMPLIES(state_ == WasmInterpreter::STOPPED,
1186 : current_activation().fp == frames_.size());
1187 4669698 : return state_;
1188 : }
1189 :
1190 0 : void Pause() { UNIMPLEMENTED(); }
1191 :
1192 : void Reset() {
1193 : TRACE("----- RESET -----\n");
1194 4630794 : sp_ = stack_.get();
1195 : frames_.clear();
1196 4630794 : state_ = WasmInterpreter::STOPPED;
1197 4630794 : trap_reason_ = kTrapCount;
1198 4630794 : possible_nondeterminism_ = false;
1199 : }
1200 :
1201 : int GetFrameCount() {
1202 : DCHECK_GE(kMaxInt, frames_.size());
1203 12120 : return static_cast<int>(frames_.size());
1204 : }
1205 :
1206 4626777 : WasmValue GetReturnValue(uint32_t index) {
1207 4626777 : if (state_ == WasmInterpreter::TRAPPED) return WasmValue(0xDEADBEEF);
1208 : DCHECK_EQ(WasmInterpreter::FINISHED, state_);
1209 : Activation act = current_activation();
1210 : // Current activation must be finished.
1211 : DCHECK_EQ(act.fp, frames_.size());
1212 4626777 : return GetStackValue(act.sp + index);
1213 : }
1214 :
1215 : WasmValue GetStackValue(sp_t index) {
1216 : DCHECK_GT(StackHeight(), index);
1217 22319460 : return stack_[index];
1218 : }
1219 :
1220 : void SetStackValue(sp_t index, WasmValue value) {
1221 : DCHECK_GT(StackHeight(), index);
1222 4468810 : stack_[index] = value;
1223 : }
1224 :
1225 : TrapReason GetTrapReason() { return trap_reason_; }
1226 :
1227 : pc_t GetBreakpointPc() { return break_pc_; }
1228 :
1229 : bool PossibleNondeterminism() { return possible_nondeterminism_; }
1230 :
1231 : uint64_t NumInterpretedCalls() { return num_interpreted_calls_; }
1232 :
1233 40 : void AddBreakFlags(uint8_t flags) { break_flags_ |= flags; }
1234 :
1235 0 : void ClearBreakFlags() { break_flags_ = WasmInterpreter::BreakFlag::None; }
1236 :
1237 : uint32_t NumActivations() {
1238 48 : return static_cast<uint32_t>(activations_.size());
1239 : }
1240 :
1241 33462 : uint32_t StartActivation() {
1242 33462 : TRACE("----- START ACTIVATION %zu -----\n", activations_.size());
1243 : // If you use activations, use them consistently:
1244 : DCHECK_IMPLIES(activations_.empty(), frames_.empty());
1245 : DCHECK_IMPLIES(activations_.empty(), StackHeight() == 0);
1246 33462 : uint32_t activation_id = static_cast<uint32_t>(activations_.size());
1247 33462 : activations_.emplace_back(static_cast<uint32_t>(frames_.size()),
1248 66924 : StackHeight());
1249 33462 : state_ = WasmInterpreter::STOPPED;
1250 33462 : return activation_id;
1251 : }
1252 :
1253 : void FinishActivation(uint32_t id) {
1254 : TRACE("----- FINISH ACTIVATION %zu -----\n", activations_.size() - 1);
1255 : DCHECK_LT(0, activations_.size());
1256 : DCHECK_EQ(activations_.size() - 1, id);
1257 : // Stack height must match the start of this activation (otherwise unwind
1258 : // first).
1259 : DCHECK_EQ(activations_.back().fp, frames_.size());
1260 : DCHECK_LE(activations_.back().sp, StackHeight());
1261 33460 : sp_ = stack_.get() + activations_.back().sp;
1262 : activations_.pop_back();
1263 : }
1264 :
1265 : uint32_t ActivationFrameBase(uint32_t id) {
1266 : DCHECK_GT(activations_.size(), id);
1267 10192 : return activations_[id].fp;
1268 : }
1269 :
1270 104 : WasmInterpreter::Thread::ExceptionHandlingResult RaiseException(
1271 : Isolate* isolate, Handle<Object> exception) {
1272 : DCHECK_EQ(WasmInterpreter::TRAPPED, state_);
1273 104 : isolate->Throw(*exception); // Will check that none is pending.
1274 104 : if (HandleException(isolate) == WasmInterpreter::Thread::UNWOUND) {
1275 : DCHECK_EQ(WasmInterpreter::STOPPED, state_);
1276 : return WasmInterpreter::Thread::UNWOUND;
1277 : }
1278 16 : state_ = WasmInterpreter::PAUSED;
1279 16 : return WasmInterpreter::Thread::HANDLED;
1280 : }
1281 :
1282 : private:
1283 : // Handle a thrown exception. Returns whether the exception was handled inside
1284 : // the current activation. Unwinds the interpreted stack accordingly.
1285 1144 : WasmInterpreter::Thread::ExceptionHandlingResult HandleException(
1286 : Isolate* isolate) {
1287 : DCHECK(isolate->has_pending_exception());
1288 : DCHECK_LT(0, activations_.size());
1289 : Activation& act = activations_.back();
1290 339500 : while (frames_.size() > act.fp) {
1291 338356 : Frame& frame = frames_.back();
1292 337284 : InterpreterCode* code = frame.code;
1293 674496 : if (code->side_table->HasEntryAt(frame.pc)) {
1294 : TRACE("----- HANDLE -----\n");
1295 : // TODO(mstarzinger): Push a reference to the pending exception instead
1296 : // of a bogus {int32_t(0)} value here once the interpreter supports it.
1297 : USE(isolate->pending_exception());
1298 72 : Push(WasmValue(int32_t{0}));
1299 36 : isolate->clear_pending_exception();
1300 72 : frame.pc += JumpToHandlerDelta(code, frame.pc);
1301 : TRACE(" => handler #%zu (#%u @%zu)\n", frames_.size() - 1,
1302 : code->function->func_index, frame.pc);
1303 36 : return WasmInterpreter::Thread::HANDLED;
1304 : }
1305 : TRACE(" => drop frame #%zu (#%u @%zu)\n", frames_.size() - 1,
1306 : code->function->func_index, frame.pc);
1307 337212 : sp_ = stack_.get() + frame.sp;
1308 : frames_.pop_back();
1309 : }
1310 : TRACE("----- UNWIND -----\n");
1311 : DCHECK_EQ(act.fp, frames_.size());
1312 : DCHECK_EQ(act.sp, StackHeight());
1313 1108 : state_ = WasmInterpreter::STOPPED;
1314 1108 : return WasmInterpreter::Thread::UNWOUND;
1315 : }
1316 :
1317 : // Entries on the stack of functions being evaluated.
1318 : struct Frame {
1319 : InterpreterCode* code;
1320 : pc_t pc;
1321 : sp_t sp;
1322 :
1323 : // Limit of parameters.
1324 : sp_t plimit() { return sp + code->function->sig->parameter_count(); }
1325 : // Limit of locals.
1326 : sp_t llimit() { return plimit() + code->locals.type_list.size(); }
1327 : };
1328 :
1329 : friend class InterpretedFrameImpl;
1330 :
1331 : CodeMap* codemap_;
1332 : Handle<WasmInstanceObject> instance_object_;
1333 : std::unique_ptr<WasmValue[]> stack_;
1334 : WasmValue* stack_limit_ = nullptr; // End of allocated stack space.
1335 : WasmValue* sp_ = nullptr; // Current stack pointer.
1336 : ZoneVector<Frame> frames_;
1337 : WasmInterpreter::State state_ = WasmInterpreter::STOPPED;
1338 : pc_t break_pc_ = kInvalidPc;
1339 : TrapReason trap_reason_ = kTrapCount;
1340 : bool possible_nondeterminism_ = false;
1341 : uint8_t break_flags_ = 0; // a combination of WasmInterpreter::BreakFlag
1342 : uint64_t num_interpreted_calls_ = 0;
1343 : // Store the stack height of each activation (for unwind and frame
1344 : // inspection).
1345 : ZoneVector<Activation> activations_;
1346 :
1347 : CodeMap* codemap() const { return codemap_; }
1348 26044 : const WasmModule* module() const { return codemap_->module(); }
1349 :
1350 : void DoTrap(TrapReason trap, pc_t pc) {
1351 : TRACE("TRAP: %s\n", WasmOpcodes::TrapReasonMessage(trap));
1352 36264 : state_ = WasmInterpreter::TRAPPED;
1353 36264 : trap_reason_ = trap;
1354 : CommitPc(pc);
1355 : }
1356 :
1357 : // Push a frame with arguments already on the stack.
1358 5110698 : void PushFrame(InterpreterCode* code) {
1359 : DCHECK_NOT_NULL(code);
1360 : DCHECK_NOT_NULL(code->side_table);
1361 : EnsureStackSpace(code->side_table->max_stack_height_ +
1362 10221396 : code->locals.type_list.size());
1363 :
1364 5110698 : ++num_interpreted_calls_;
1365 5110698 : size_t arity = code->function->sig->parameter_count();
1366 : // The parameters will overlap the arguments already on the stack.
1367 : DCHECK_GE(StackHeight(), arity);
1368 15332094 : frames_.push_back({code, 0, StackHeight() - arity});
1369 5110698 : frames_.back().pc = InitLocals(code);
1370 : TRACE(" => PushFrame #%zu (#%u @%zu)\n", frames_.size() - 1,
1371 : code->function->func_index, frames_.back().pc);
1372 5110698 : }
1373 :
1374 5110698 : pc_t InitLocals(InterpreterCode* code) {
1375 22293137 : for (auto p : code->locals.type_list) {
1376 : WasmValue val;
1377 12071741 : switch (p) {
1378 : #define CASE_TYPE(wasm, ctype) \
1379 : case kWasm##wasm: \
1380 : val = WasmValue(ctype{}); \
1381 : break;
1382 2207649 : WASM_CTYPES(CASE_TYPE)
1383 : #undef CASE_TYPE
1384 : default:
1385 0 : UNREACHABLE();
1386 : break;
1387 : }
1388 : Push(val);
1389 : }
1390 5110698 : return code->locals.encoded_size;
1391 : }
1392 :
1393 : void CommitPc(pc_t pc) {
1394 : DCHECK(!frames_.empty());
1395 48541 : frames_.back().pc = pc;
1396 : }
1397 :
1398 : bool SkipBreakpoint(InterpreterCode* code, pc_t pc) {
1399 5992 : if (pc == break_pc_) {
1400 : // Skip the previously hit breakpoint when resuming.
1401 2996 : break_pc_ = kInvalidPc;
1402 : return true;
1403 : }
1404 : return false;
1405 : }
1406 :
1407 20 : void ReloadFromFrameOnException(Decoder* decoder, InterpreterCode** code,
1408 : pc_t* pc, pc_t* limit) {
1409 : Frame* top = &frames_.back();
1410 20 : *code = top->code;
1411 20 : *pc = top->pc;
1412 20 : *limit = top->code->end - top->code->start;
1413 20 : decoder->Reset(top->code->start, top->code->end);
1414 20 : }
1415 :
1416 : int LookupTargetDelta(InterpreterCode* code, pc_t pc) {
1417 3189058 : return static_cast<int>(code->side_table->Lookup(pc).pc_diff);
1418 : }
1419 :
1420 36 : int JumpToHandlerDelta(InterpreterCode* code, pc_t pc) {
1421 36 : ControlTransferEntry& control_transfer_entry = code->side_table->Lookup(pc);
1422 36 : DoStackTransfer(sp_ - (control_transfer_entry.sp_diff + kCatchInArity),
1423 72 : control_transfer_entry.target_arity);
1424 36 : return control_transfer_entry.pc_diff;
1425 : }
1426 :
1427 254658 : int DoBreak(InterpreterCode* code, pc_t pc, size_t depth) {
1428 254658 : ControlTransferEntry& control_transfer_entry = code->side_table->Lookup(pc);
1429 254658 : DoStackTransfer(sp_ - control_transfer_entry.sp_diff,
1430 509316 : control_transfer_entry.target_arity);
1431 254658 : return control_transfer_entry.pc_diff;
1432 : }
1433 :
1434 220680 : pc_t ReturnPc(Decoder* decoder, InterpreterCode* code, pc_t pc) {
1435 110340 : switch (code->orig_start[pc]) {
1436 : case kExprCallFunction: {
1437 : CallFunctionImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1438 110268 : return pc + 1 + imm.length;
1439 : }
1440 : case kExprCallIndirect: {
1441 72 : CallIndirectImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
1442 72 : return pc + 1 + imm.length;
1443 : }
1444 : default:
1445 0 : UNREACHABLE();
1446 : }
1447 : }
1448 :
1449 4737325 : bool DoReturn(Decoder* decoder, InterpreterCode** code, pc_t* pc, pc_t* limit,
1450 : size_t arity) {
1451 : DCHECK_GT(frames_.size(), 0);
1452 9474650 : WasmValue* sp_dest = stack_.get() + frames_.back().sp;
1453 : frames_.pop_back();
1454 4737325 : if (frames_.size() == current_activation().fp) {
1455 : // A return from the last frame terminates the execution.
1456 4626985 : state_ = WasmInterpreter::FINISHED;
1457 4626985 : DoStackTransfer(sp_dest, arity);
1458 : TRACE(" => finish\n");
1459 4626985 : return false;
1460 : } else {
1461 : // Return to caller frame.
1462 : Frame* top = &frames_.back();
1463 110340 : *code = top->code;
1464 110340 : decoder->Reset((*code)->start, (*code)->end);
1465 110340 : *pc = ReturnPc(decoder, *code, top->pc);
1466 110340 : *limit = top->code->end - top->code->start;
1467 : TRACE(" => Return to #%zu (#%u @%zu)\n", frames_.size() - 1,
1468 : (*code)->function->func_index, *pc);
1469 110340 : DoStackTransfer(sp_dest, arity);
1470 110340 : return true;
1471 : }
1472 : }
1473 :
1474 : // Returns true if the call was successful, false if the stack check failed
1475 : // and the current activation was fully unwound.
1476 446444 : bool DoCall(Decoder* decoder, InterpreterCode* target, pc_t* pc,
1477 : pc_t* limit) V8_WARN_UNUSED_RESULT {
1478 446444 : frames_.back().pc = *pc;
1479 446444 : PushFrame(target);
1480 446444 : if (!DoStackCheck()) return false;
1481 446428 : *pc = frames_.back().pc;
1482 446428 : *limit = target->end - target->start;
1483 446428 : decoder->Reset(target->start, target->end);
1484 446428 : return true;
1485 : }
1486 :
1487 : // Copies {arity} values on the top of the stack down the stack to {dest},
1488 : // dropping the values in-between.
1489 4992019 : void DoStackTransfer(WasmValue* dest, size_t arity) {
1490 : // before: |---------------| pop_count | arity |
1491 : // ^ 0 ^ dest ^ sp_
1492 : //
1493 : // after: |---------------| arity |
1494 : // ^ 0 ^ sp_
1495 : DCHECK_LE(dest, sp_);
1496 : DCHECK_LE(dest + arity, sp_);
1497 4992019 : if (arity) memmove(dest, sp_ - arity, arity * sizeof(*sp_));
1498 4992019 : sp_ = dest + arity;
1499 4992019 : }
1500 :
1501 : template <typename mtype>
1502 6132266 : inline Address BoundsCheckMem(uint32_t offset, uint32_t index) {
1503 6132266 : uint32_t effective_index = offset + index;
1504 6132266 : if (effective_index < index) {
1505 : return kNullAddress; // wraparound => oob
1506 : }
1507 6132266 : if (!IsInBounds(effective_index, sizeof(mtype),
1508 6132266 : instance_object_->memory_size())) {
1509 : return kNullAddress; // oob
1510 : }
1511 : // Compute the effective address of the access, making sure to condition
1512 : // the index even in the in-bounds case.
1513 : return reinterpret_cast<Address>(instance_object_->memory_start()) +
1514 12255172 : (effective_index & instance_object_->memory_mask());
1515 : }
1516 :
1517 : template <typename ctype, typename mtype>
1518 3173174 : bool ExecuteLoad(Decoder* decoder, InterpreterCode* code, pc_t pc, int& len,
1519 : MachineRepresentation rep) {
1520 : MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc),
1521 3173174 : sizeof(ctype));
1522 : uint32_t index = Pop().to<uint32_t>();
1523 3173174 : Address addr = BoundsCheckMem<mtype>(imm.offset, index);
1524 3173174 : if (!addr) {
1525 : DoTrap(kTrapMemOutOfBounds, pc);
1526 2136 : return false;
1527 : }
1528 : WasmValue result(
1529 3171038 : converter<ctype, mtype>{}(ReadLittleEndianValue<mtype>(addr)));
1530 :
1531 : Push(result);
1532 3171038 : len = 1 + imm.length;
1533 :
1534 3171038 : if (FLAG_trace_wasm_memory) {
1535 28 : MemoryTracingInfo info(imm.offset + index, false, rep);
1536 28 : TraceMemoryOperation(ExecutionTier::kInterpreter, &info,
1537 : code->function->func_index, static_cast<int>(pc),
1538 28 : instance_object_->memory_start());
1539 : }
1540 :
1541 : return true;
1542 : }
1543 :
1544 : template <typename ctype, typename mtype>
1545 2573736 : bool ExecuteStore(Decoder* decoder, InterpreterCode* code, pc_t pc, int& len,
1546 : MachineRepresentation rep) {
1547 : MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc),
1548 2573736 : sizeof(ctype));
1549 0 : ctype val = Pop().to<ctype>();
1550 :
1551 : uint32_t index = Pop().to<uint32_t>();
1552 2573736 : Address addr = BoundsCheckMem<mtype>(imm.offset, index);
1553 2573736 : if (!addr) {
1554 : DoTrap(kTrapMemOutOfBounds, pc);
1555 1296 : return false;
1556 : }
1557 : WriteLittleEndianValue<mtype>(addr, converter<mtype, ctype>{}(val));
1558 2572440 : len = 1 + imm.length;
1559 :
1560 2572440 : if (FLAG_trace_wasm_memory) {
1561 8 : MemoryTracingInfo info(imm.offset + index, true, rep);
1562 8 : TraceMemoryOperation(ExecutionTier::kInterpreter, &info,
1563 : code->function->func_index, static_cast<int>(pc),
1564 8 : instance_object_->memory_start());
1565 : }
1566 :
1567 : return true;
1568 : }
1569 :
1570 : template <typename type, typename op_type>
1571 768176 : bool ExtractAtomicOpParams(Decoder* decoder, InterpreterCode* code,
1572 : Address& address, pc_t pc, int& len,
1573 : type* val = nullptr, type* val2 = nullptr) {
1574 : MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc + 1),
1575 768176 : sizeof(type));
1576 438564 : if (val2) *val2 = static_cast<type>(Pop().to<op_type>());
1577 766300 : if (val) *val = static_cast<type>(Pop().to<op_type>());
1578 : uint32_t index = Pop().to<uint32_t>();
1579 384088 : address = BoundsCheckMem<type>(imm.offset, index);
1580 384088 : if (!address) {
1581 : DoTrap(kTrapMemOutOfBounds, pc);
1582 0 : return false;
1583 : }
1584 384088 : len = 2 + imm.length;
1585 384088 : return true;
1586 : }
1587 :
1588 2624 : bool ExecuteNumericOp(WasmOpcode opcode, Decoder* decoder,
1589 : InterpreterCode* code, pc_t pc, int& len) {
1590 2624 : switch (opcode) {
1591 : case kExprI32SConvertSatF32:
1592 920 : Push(WasmValue(ExecuteConvertSaturate<int32_t>(Pop().to<float>())));
1593 460 : return true;
1594 : case kExprI32UConvertSatF32:
1595 920 : Push(WasmValue(ExecuteConvertSaturate<uint32_t>(Pop().to<float>())));
1596 460 : return true;
1597 : case kExprI32SConvertSatF64:
1598 392 : Push(WasmValue(ExecuteConvertSaturate<int32_t>(Pop().to<double>())));
1599 196 : return true;
1600 : case kExprI32UConvertSatF64:
1601 392 : Push(WasmValue(ExecuteConvertSaturate<uint32_t>(Pop().to<double>())));
1602 196 : return true;
1603 : case kExprI64SConvertSatF32:
1604 920 : Push(WasmValue(ExecuteI64SConvertSatF32(Pop().to<float>())));
1605 460 : return true;
1606 : case kExprI64UConvertSatF32:
1607 920 : Push(WasmValue(ExecuteI64UConvertSatF32(Pop().to<float>())));
1608 460 : return true;
1609 : case kExprI64SConvertSatF64:
1610 392 : Push(WasmValue(ExecuteI64SConvertSatF64(Pop().to<double>())));
1611 196 : return true;
1612 : case kExprI64UConvertSatF64:
1613 392 : Push(WasmValue(ExecuteI64UConvertSatF64(Pop().to<double>())));
1614 196 : return true;
1615 : default:
1616 0 : FATAL("Unknown or unimplemented opcode #%d:%s", code->start[pc],
1617 0 : OpcodeName(code->start[pc]));
1618 : UNREACHABLE();
1619 : }
1620 : return false;
1621 : }
1622 :
1623 : template <typename type, typename op_type, typename func>
1624 : op_type ExecuteAtomicBinopBE(type val, Address addr, func op) {
1625 : type old_val;
1626 : type new_val;
1627 : old_val = ReadUnalignedValue<type>(addr);
1628 : do {
1629 : new_val =
1630 : ByteReverse(static_cast<type>(op(ByteReverse<type>(old_val), val)));
1631 : } while (!(std::atomic_compare_exchange_strong(
1632 : reinterpret_cast<std::atomic<type>*>(addr), &old_val, new_val)));
1633 : return static_cast<op_type>(ByteReverse<type>(old_val));
1634 : }
1635 :
1636 : template <typename type>
1637 : type AdjustByteOrder(type param) {
1638 : #if V8_TARGET_BIG_ENDIAN
1639 : return ByteReverse(param);
1640 : #else
1641 : return param;
1642 : #endif
1643 : }
1644 :
1645 384088 : bool ExecuteAtomicOp(WasmOpcode opcode, Decoder* decoder,
1646 : InterpreterCode* code, pc_t pc, int& len) {
1647 : #if V8_TARGET_BIG_ENDIAN
1648 : constexpr bool kBigEndian = true;
1649 : #else
1650 : constexpr bool kBigEndian = false;
1651 : #endif
1652 : WasmValue result;
1653 384088 : switch (opcode) {
1654 : #define ATOMIC_BINOP_CASE(name, type, op_type, operation, op) \
1655 : case kExpr##name: { \
1656 : type val; \
1657 : Address addr; \
1658 : op_type result; \
1659 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
1660 : &val)) { \
1661 : return false; \
1662 : } \
1663 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1664 : "Size mismatch for types std::atomic<" #type \
1665 : ">, and " #type); \
1666 : if (kBigEndian) { \
1667 : auto oplambda = [](type a, type b) { return a op b; }; \
1668 : result = ExecuteAtomicBinopBE<type, op_type>(val, addr, oplambda); \
1669 : } else { \
1670 : result = static_cast<op_type>( \
1671 : std::operation(reinterpret_cast<std::atomic<type>*>(addr), val)); \
1672 : } \
1673 : Push(WasmValue(result)); \
1674 : break; \
1675 : }
1676 40380 : ATOMIC_BINOP_CASE(I32AtomicAdd, uint32_t, uint32_t, atomic_fetch_add, +);
1677 972 : ATOMIC_BINOP_CASE(I32AtomicAdd8U, uint8_t, uint32_t, atomic_fetch_add, +);
1678 972 : ATOMIC_BINOP_CASE(I32AtomicAdd16U, uint16_t, uint32_t, atomic_fetch_add,
1679 : +);
1680 40368 : ATOMIC_BINOP_CASE(I32AtomicSub, uint32_t, uint32_t, atomic_fetch_sub, -);
1681 972 : ATOMIC_BINOP_CASE(I32AtomicSub8U, uint8_t, uint32_t, atomic_fetch_sub, -);
1682 972 : ATOMIC_BINOP_CASE(I32AtomicSub16U, uint16_t, uint32_t, atomic_fetch_sub,
1683 : -);
1684 40368 : ATOMIC_BINOP_CASE(I32AtomicAnd, uint32_t, uint32_t, atomic_fetch_and, &);
1685 972 : ATOMIC_BINOP_CASE(I32AtomicAnd8U, uint8_t, uint32_t, atomic_fetch_and, &);
1686 972 : ATOMIC_BINOP_CASE(I32AtomicAnd16U, uint16_t, uint32_t,
1687 : atomic_fetch_and, &);
1688 40368 : ATOMIC_BINOP_CASE(I32AtomicOr, uint32_t, uint32_t, atomic_fetch_or, |);
1689 972 : ATOMIC_BINOP_CASE(I32AtomicOr8U, uint8_t, uint32_t, atomic_fetch_or, |);
1690 972 : ATOMIC_BINOP_CASE(I32AtomicOr16U, uint16_t, uint32_t, atomic_fetch_or, |);
1691 40368 : ATOMIC_BINOP_CASE(I32AtomicXor, uint32_t, uint32_t, atomic_fetch_xor, ^);
1692 972 : ATOMIC_BINOP_CASE(I32AtomicXor8U, uint8_t, uint32_t, atomic_fetch_xor, ^);
1693 972 : ATOMIC_BINOP_CASE(I32AtomicXor16U, uint16_t, uint32_t, atomic_fetch_xor,
1694 : ^);
1695 40368 : ATOMIC_BINOP_CASE(I32AtomicExchange, uint32_t, uint32_t, atomic_exchange,
1696 : =);
1697 972 : ATOMIC_BINOP_CASE(I32AtomicExchange8U, uint8_t, uint32_t, atomic_exchange,
1698 : =);
1699 972 : ATOMIC_BINOP_CASE(I32AtomicExchange16U, uint16_t, uint32_t,
1700 : atomic_exchange, =);
1701 78768 : ATOMIC_BINOP_CASE(I64AtomicAdd, uint64_t, uint64_t, atomic_fetch_add, +);
1702 972 : ATOMIC_BINOP_CASE(I64AtomicAdd8U, uint8_t, uint64_t, atomic_fetch_add, +);
1703 972 : ATOMIC_BINOP_CASE(I64AtomicAdd16U, uint16_t, uint64_t, atomic_fetch_add,
1704 : +);
1705 40380 : ATOMIC_BINOP_CASE(I64AtomicAdd32U, uint32_t, uint64_t, atomic_fetch_add,
1706 : +);
1707 78756 : ATOMIC_BINOP_CASE(I64AtomicSub, uint64_t, uint64_t, atomic_fetch_sub, -);
1708 972 : ATOMIC_BINOP_CASE(I64AtomicSub8U, uint8_t, uint64_t, atomic_fetch_sub, -);
1709 984 : ATOMIC_BINOP_CASE(I64AtomicSub16U, uint16_t, uint64_t, atomic_fetch_sub,
1710 : -);
1711 40380 : ATOMIC_BINOP_CASE(I64AtomicSub32U, uint32_t, uint64_t, atomic_fetch_sub,
1712 : -);
1713 78756 : ATOMIC_BINOP_CASE(I64AtomicAnd, uint64_t, uint64_t, atomic_fetch_and, &);
1714 972 : ATOMIC_BINOP_CASE(I64AtomicAnd8U, uint8_t, uint64_t, atomic_fetch_and, &);
1715 972 : ATOMIC_BINOP_CASE(I64AtomicAnd16U, uint16_t, uint64_t,
1716 : atomic_fetch_and, &);
1717 40380 : ATOMIC_BINOP_CASE(I64AtomicAnd32U, uint32_t, uint64_t,
1718 : atomic_fetch_and, &);
1719 78756 : ATOMIC_BINOP_CASE(I64AtomicOr, uint64_t, uint64_t, atomic_fetch_or, |);
1720 972 : ATOMIC_BINOP_CASE(I64AtomicOr8U, uint8_t, uint64_t, atomic_fetch_or, |);
1721 972 : ATOMIC_BINOP_CASE(I64AtomicOr16U, uint16_t, uint64_t, atomic_fetch_or, |);
1722 40380 : ATOMIC_BINOP_CASE(I64AtomicOr32U, uint32_t, uint64_t, atomic_fetch_or, |);
1723 78756 : ATOMIC_BINOP_CASE(I64AtomicXor, uint64_t, uint64_t, atomic_fetch_xor, ^);
1724 972 : ATOMIC_BINOP_CASE(I64AtomicXor8U, uint8_t, uint64_t, atomic_fetch_xor, ^);
1725 972 : ATOMIC_BINOP_CASE(I64AtomicXor16U, uint16_t, uint64_t, atomic_fetch_xor,
1726 : ^);
1727 40380 : ATOMIC_BINOP_CASE(I64AtomicXor32U, uint32_t, uint64_t, atomic_fetch_xor,
1728 : ^);
1729 78756 : ATOMIC_BINOP_CASE(I64AtomicExchange, uint64_t, uint64_t, atomic_exchange,
1730 : =);
1731 972 : ATOMIC_BINOP_CASE(I64AtomicExchange8U, uint8_t, uint64_t, atomic_exchange,
1732 : =);
1733 972 : ATOMIC_BINOP_CASE(I64AtomicExchange16U, uint16_t, uint64_t,
1734 : atomic_exchange, =);
1735 40380 : ATOMIC_BINOP_CASE(I64AtomicExchange32U, uint32_t, uint64_t,
1736 : atomic_exchange, =);
1737 : #undef ATOMIC_BINOP_CASE
1738 : #define ATOMIC_COMPARE_EXCHANGE_CASE(name, type, op_type) \
1739 : case kExpr##name: { \
1740 : type old_val; \
1741 : type new_val; \
1742 : Address addr; \
1743 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
1744 : &old_val, &new_val)) { \
1745 : return false; \
1746 : } \
1747 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1748 : "Size mismatch for types std::atomic<" #type \
1749 : ">, and " #type); \
1750 : old_val = AdjustByteOrder<type>(old_val); \
1751 : new_val = AdjustByteOrder<type>(new_val); \
1752 : std::atomic_compare_exchange_strong( \
1753 : reinterpret_cast<std::atomic<type>*>(addr), &old_val, new_val); \
1754 : Push(WasmValue(static_cast<op_type>(AdjustByteOrder<type>(old_val)))); \
1755 : break; \
1756 : }
1757 40380 : ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange, uint32_t,
1758 : uint32_t);
1759 972 : ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange8U, uint8_t,
1760 : uint32_t);
1761 972 : ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange16U, uint16_t,
1762 : uint32_t);
1763 78768 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange, uint64_t,
1764 : uint64_t);
1765 972 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange8U, uint8_t,
1766 : uint64_t);
1767 984 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange16U, uint16_t,
1768 : uint64_t);
1769 40380 : ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange32U, uint32_t,
1770 : uint64_t);
1771 : #undef ATOMIC_COMPARE_EXCHANGE_CASE
1772 : #define ATOMIC_LOAD_CASE(name, type, op_type, operation) \
1773 : case kExpr##name: { \
1774 : Address addr; \
1775 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len)) { \
1776 : return false; \
1777 : } \
1778 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1779 : "Size mismatch for types std::atomic<" #type \
1780 : ">, and " #type); \
1781 : result = WasmValue(static_cast<op_type>(AdjustByteOrder<type>( \
1782 : std::operation(reinterpret_cast<std::atomic<type>*>(addr))))); \
1783 : Push(result); \
1784 : break; \
1785 : }
1786 1392 : ATOMIC_LOAD_CASE(I32AtomicLoad, uint32_t, uint32_t, atomic_load);
1787 216 : ATOMIC_LOAD_CASE(I32AtomicLoad8U, uint8_t, uint32_t, atomic_load);
1788 216 : ATOMIC_LOAD_CASE(I32AtomicLoad16U, uint16_t, uint32_t, atomic_load);
1789 1968 : ATOMIC_LOAD_CASE(I64AtomicLoad, uint64_t, uint64_t, atomic_load);
1790 228 : ATOMIC_LOAD_CASE(I64AtomicLoad8U, uint8_t, uint64_t, atomic_load);
1791 216 : ATOMIC_LOAD_CASE(I64AtomicLoad16U, uint16_t, uint64_t, atomic_load);
1792 1392 : ATOMIC_LOAD_CASE(I64AtomicLoad32U, uint32_t, uint64_t, atomic_load);
1793 : #undef ATOMIC_LOAD_CASE
1794 : #define ATOMIC_STORE_CASE(name, type, op_type, operation) \
1795 : case kExpr##name: { \
1796 : type val; \
1797 : Address addr; \
1798 : if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
1799 : &val)) { \
1800 : return false; \
1801 : } \
1802 : static_assert(sizeof(std::atomic<type>) == sizeof(type), \
1803 : "Size mismatch for types std::atomic<" #type \
1804 : ">, and " #type); \
1805 : std::operation(reinterpret_cast<std::atomic<type>*>(addr), \
1806 : AdjustByteOrder<type>(val)); \
1807 : break; \
1808 : }
1809 472 : ATOMIC_STORE_CASE(I32AtomicStore, uint32_t, uint32_t, atomic_store);
1810 72 : ATOMIC_STORE_CASE(I32AtomicStore8U, uint8_t, uint32_t, atomic_store);
1811 72 : ATOMIC_STORE_CASE(I32AtomicStore16U, uint16_t, uint32_t, atomic_store);
1812 656 : ATOMIC_STORE_CASE(I64AtomicStore, uint64_t, uint64_t, atomic_store);
1813 72 : ATOMIC_STORE_CASE(I64AtomicStore8U, uint8_t, uint64_t, atomic_store);
1814 72 : ATOMIC_STORE_CASE(I64AtomicStore16U, uint16_t, uint64_t, atomic_store);
1815 464 : ATOMIC_STORE_CASE(I64AtomicStore32U, uint32_t, uint64_t, atomic_store);
1816 : #undef ATOMIC_STORE_CASE
1817 : default:
1818 0 : UNREACHABLE();
1819 : return false;
1820 : }
1821 : return true;
1822 : }
1823 :
1824 19228 : byte* GetGlobalPtr(const WasmGlobal* global) {
1825 19228 : if (global->mutability && global->imported) {
1826 : return reinterpret_cast<byte*>(
1827 0 : instance_object_->imported_mutable_globals()[global->index]);
1828 : } else {
1829 19228 : return instance_object_->globals_start() + global->offset;
1830 : }
1831 : }
1832 :
1833 8529684 : bool ExecuteSimdOp(WasmOpcode opcode, Decoder* decoder, InterpreterCode* code,
1834 : pc_t pc, int& len) {
1835 5321416 : switch (opcode) {
1836 : #define SPLAT_CASE(format, sType, valType, num) \
1837 : case kExpr##format##Splat: { \
1838 : WasmValue val = Pop(); \
1839 : valType v = val.to<valType>(); \
1840 : sType s; \
1841 : for (int i = 0; i < num; i++) s.val[i] = v; \
1842 : Push(WasmValue(Simd128(s))); \
1843 : return true; \
1844 : }
1845 1762452 : SPLAT_CASE(I32x4, int4, int32_t, 4)
1846 2365632 : SPLAT_CASE(F32x4, float4, float, 4)
1847 47976 : SPLAT_CASE(I16x8, int8, int32_t, 8)
1848 43440 : SPLAT_CASE(I8x16, int16, int32_t, 16)
1849 : #undef SPLAT_CASE
1850 : #define EXTRACT_LANE_CASE(format, name) \
1851 : case kExpr##format##ExtractLane: { \
1852 : SimdLaneImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
1853 : ++len; \
1854 : WasmValue val = Pop(); \
1855 : Simd128 s = val.to_s128(); \
1856 : auto ss = s.to_##name(); \
1857 : Push(WasmValue(ss.val[LANE(imm.lane, ss)])); \
1858 : return true; \
1859 : }
1860 8143056 : EXTRACT_LANE_CASE(I32x4, i32x4)
1861 2873792 : EXTRACT_LANE_CASE(F32x4, f32x4)
1862 1138688 : EXTRACT_LANE_CASE(I16x8, i16x8)
1863 532864 : EXTRACT_LANE_CASE(I8x16, i8x16)
1864 : #undef EXTRACT_LANE_CASE
1865 : #define BINOP_CASE(op, name, stype, count, expr) \
1866 : case kExpr##op: { \
1867 : WasmValue v2 = Pop(); \
1868 : WasmValue v1 = Pop(); \
1869 : stype s1 = v1.to_s128().to_##name(); \
1870 : stype s2 = v2.to_s128().to_##name(); \
1871 : stype res; \
1872 : for (size_t i = 0; i < count; ++i) { \
1873 : auto a = s1.val[LANE(i, s1)]; \
1874 : auto b = s2.val[LANE(i, s1)]; \
1875 : res.val[LANE(i, s1)] = expr; \
1876 : } \
1877 : Push(WasmValue(Simd128(res))); \
1878 : return true; \
1879 : }
1880 216696 : BINOP_CASE(F32x4Add, f32x4, float4, 4, a + b)
1881 216600 : BINOP_CASE(F32x4Sub, f32x4, float4, 4, a - b)
1882 193344 : BINOP_CASE(F32x4Mul, f32x4, float4, 4, a * b)
1883 216600 : BINOP_CASE(F32x4Min, f32x4, float4, 4, a < b ? a : b)
1884 216600 : BINOP_CASE(F32x4Max, f32x4, float4, 4, a > b ? a : b)
1885 134800 : BINOP_CASE(I32x4Add, i32x4, int4, 4, base::AddWithWraparound(a, b))
1886 134560 : BINOP_CASE(I32x4Sub, i32x4, int4, 4, base::SubWithWraparound(a, b))
1887 134560 : BINOP_CASE(I32x4Mul, i32x4, int4, 4, base::MulWithWraparound(a, b))
1888 80736 : BINOP_CASE(I32x4MinS, i32x4, int4, 4, a < b ? a : b)
1889 80736 : BINOP_CASE(I32x4MinU, i32x4, int4, 4,
1890 : static_cast<uint32_t>(a) < static_cast<uint32_t>(b) ? a : b)
1891 80736 : BINOP_CASE(I32x4MaxS, i32x4, int4, 4, a > b ? a : b)
1892 80736 : BINOP_CASE(I32x4MaxU, i32x4, int4, 4,
1893 : static_cast<uint32_t>(a) > static_cast<uint32_t>(b) ? a : b)
1894 80736 : BINOP_CASE(S128And, i32x4, int4, 4, a & b)
1895 80736 : BINOP_CASE(S128Or, i32x4, int4, 4, a | b)
1896 80736 : BINOP_CASE(S128Xor, i32x4, int4, 4, a ^ b)
1897 4536 : BINOP_CASE(I16x8Add, i16x8, int8, 8, base::AddWithWraparound(a, b))
1898 4536 : BINOP_CASE(I16x8Sub, i16x8, int8, 8, base::SubWithWraparound(a, b))
1899 4536 : BINOP_CASE(I16x8Mul, i16x8, int8, 8, base::MulWithWraparound(a, b))
1900 1944 : BINOP_CASE(I16x8MinS, i16x8, int8, 8, a < b ? a : b)
1901 1944 : BINOP_CASE(I16x8MinU, i16x8, int8, 8,
1902 : static_cast<uint16_t>(a) < static_cast<uint16_t>(b) ? a : b)
1903 1944 : BINOP_CASE(I16x8MaxS, i16x8, int8, 8, a > b ? a : b)
1904 1944 : BINOP_CASE(I16x8MaxU, i16x8, int8, 8,
1905 : static_cast<uint16_t>(a) > static_cast<uint16_t>(b) ? a : b)
1906 1944 : BINOP_CASE(I16x8AddSaturateS, i16x8, int8, 8, SaturateAdd<int16_t>(a, b))
1907 4536 : BINOP_CASE(I16x8AddSaturateU, i16x8, int8, 8, SaturateAdd<uint16_t>(a, b))
1908 1944 : BINOP_CASE(I16x8SubSaturateS, i16x8, int8, 8, SaturateSub<int16_t>(a, b))
1909 4536 : BINOP_CASE(I16x8SubSaturateU, i16x8, int8, 8, SaturateSub<uint16_t>(a, b))
1910 7128 : BINOP_CASE(I8x16Add, i8x16, int16, 16, base::AddWithWraparound(a, b))
1911 7128 : BINOP_CASE(I8x16Sub, i8x16, int16, 16, base::SubWithWraparound(a, b))
1912 7128 : BINOP_CASE(I8x16Mul, i8x16, int16, 16, base::MulWithWraparound(a, b))
1913 1944 : BINOP_CASE(I8x16MinS, i8x16, int16, 16, a < b ? a : b)
1914 1944 : BINOP_CASE(I8x16MinU, i8x16, int16, 16,
1915 : static_cast<uint8_t>(a) < static_cast<uint8_t>(b) ? a : b)
1916 1944 : BINOP_CASE(I8x16MaxS, i8x16, int16, 16, a > b ? a : b)
1917 1944 : BINOP_CASE(I8x16MaxU, i8x16, int16, 16,
1918 : static_cast<uint8_t>(a) > static_cast<uint8_t>(b) ? a : b)
1919 1944 : BINOP_CASE(I8x16AddSaturateS, i8x16, int16, 16, SaturateAdd<int8_t>(a, b))
1920 7128 : BINOP_CASE(I8x16AddSaturateU, i8x16, int16, 16,
1921 : SaturateAdd<uint8_t>(a, b))
1922 1944 : BINOP_CASE(I8x16SubSaturateS, i8x16, int16, 16, SaturateSub<int8_t>(a, b))
1923 7128 : BINOP_CASE(I8x16SubSaturateU, i8x16, int16, 16,
1924 : SaturateSub<uint8_t>(a, b))
1925 : #undef BINOP_CASE
1926 : #define UNOP_CASE(op, name, stype, count, expr) \
1927 : case kExpr##op: { \
1928 : WasmValue v = Pop(); \
1929 : stype s = v.to_s128().to_##name(); \
1930 : stype res; \
1931 : for (size_t i = 0; i < count; ++i) { \
1932 : auto a = s.val[i]; \
1933 : res.val[i] = expr; \
1934 : } \
1935 : Push(WasmValue(Simd128(res))); \
1936 : return true; \
1937 : }
1938 3040 : UNOP_CASE(F32x4Abs, f32x4, float4, 4, std::abs(a))
1939 1520 : UNOP_CASE(F32x4Neg, f32x4, float4, 4, -a)
1940 2976 : UNOP_CASE(F32x4RecipApprox, f32x4, float4, 4, base::Recip(a))
1941 640 : UNOP_CASE(F32x4RecipSqrtApprox, f32x4, float4, 4, base::RecipSqrt(a))
1942 1856 : UNOP_CASE(I32x4Neg, i32x4, int4, 4, base::NegateWithWraparound(a))
1943 928 : UNOP_CASE(S128Not, i32x4, int4, 4, ~a)
1944 432 : UNOP_CASE(I16x8Neg, i16x8, int8, 8, base::NegateWithWraparound(a))
1945 720 : UNOP_CASE(I8x16Neg, i8x16, int16, 16, base::NegateWithWraparound(a))
1946 : #undef UNOP_CASE
1947 : #define CMPOP_CASE(op, name, stype, out_stype, count, expr) \
1948 : case kExpr##op: { \
1949 : WasmValue v2 = Pop(); \
1950 : WasmValue v1 = Pop(); \
1951 : stype s1 = v1.to_s128().to_##name(); \
1952 : stype s2 = v2.to_s128().to_##name(); \
1953 : out_stype res; \
1954 : for (size_t i = 0; i < count; ++i) { \
1955 : auto a = s1.val[i]; \
1956 : auto b = s2.val[i]; \
1957 : res.val[i] = expr ? -1 : 0; \
1958 : } \
1959 : Push(WasmValue(Simd128(res))); \
1960 : return true; \
1961 : }
1962 216600 : CMPOP_CASE(F32x4Eq, f32x4, float4, int4, 4, a == b)
1963 216600 : CMPOP_CASE(F32x4Ne, f32x4, float4, int4, 4, a != b)
1964 216600 : CMPOP_CASE(F32x4Gt, f32x4, float4, int4, 4, a > b)
1965 216600 : CMPOP_CASE(F32x4Ge, f32x4, float4, int4, 4, a >= b)
1966 216600 : CMPOP_CASE(F32x4Lt, f32x4, float4, int4, 4, a < b)
1967 216600 : CMPOP_CASE(F32x4Le, f32x4, float4, int4, 4, a <= b)
1968 80832 : CMPOP_CASE(I32x4Eq, i32x4, int4, int4, 4, a == b)
1969 80832 : CMPOP_CASE(I32x4Ne, i32x4, int4, int4, 4, a != b)
1970 80736 : CMPOP_CASE(I32x4GtS, i32x4, int4, int4, 4, a > b)
1971 80736 : CMPOP_CASE(I32x4GeS, i32x4, int4, int4, 4, a >= b)
1972 80736 : CMPOP_CASE(I32x4LtS, i32x4, int4, int4, 4, a < b)
1973 80736 : CMPOP_CASE(I32x4LeS, i32x4, int4, int4, 4, a <= b)
1974 80736 : CMPOP_CASE(I32x4GtU, i32x4, int4, int4, 4,
1975 : static_cast<uint32_t>(a) > static_cast<uint32_t>(b))
1976 80736 : CMPOP_CASE(I32x4GeU, i32x4, int4, int4, 4,
1977 : static_cast<uint32_t>(a) >= static_cast<uint32_t>(b))
1978 80736 : CMPOP_CASE(I32x4LtU, i32x4, int4, int4, 4,
1979 : static_cast<uint32_t>(a) < static_cast<uint32_t>(b))
1980 80736 : CMPOP_CASE(I32x4LeU, i32x4, int4, int4, 4,
1981 : static_cast<uint32_t>(a) <= static_cast<uint32_t>(b))
1982 2040 : CMPOP_CASE(I16x8Eq, i16x8, int8, int8, 8, a == b)
1983 2040 : CMPOP_CASE(I16x8Ne, i16x8, int8, int8, 8, a != b)
1984 1944 : CMPOP_CASE(I16x8GtS, i16x8, int8, int8, 8, a > b)
1985 1944 : CMPOP_CASE(I16x8GeS, i16x8, int8, int8, 8, a >= b)
1986 1944 : CMPOP_CASE(I16x8LtS, i16x8, int8, int8, 8, a < b)
1987 1944 : CMPOP_CASE(I16x8LeS, i16x8, int8, int8, 8, a <= b)
1988 1944 : CMPOP_CASE(I16x8GtU, i16x8, int8, int8, 8,
1989 : static_cast<uint16_t>(a) > static_cast<uint16_t>(b))
1990 1944 : CMPOP_CASE(I16x8GeU, i16x8, int8, int8, 8,
1991 : static_cast<uint16_t>(a) >= static_cast<uint16_t>(b))
1992 1944 : CMPOP_CASE(I16x8LtU, i16x8, int8, int8, 8,
1993 : static_cast<uint16_t>(a) < static_cast<uint16_t>(b))
1994 1944 : CMPOP_CASE(I16x8LeU, i16x8, int8, int8, 8,
1995 : static_cast<uint16_t>(a) <= static_cast<uint16_t>(b))
1996 2040 : CMPOP_CASE(I8x16Eq, i8x16, int16, int16, 16, a == b)
1997 2040 : CMPOP_CASE(I8x16Ne, i8x16, int16, int16, 16, a != b)
1998 1944 : CMPOP_CASE(I8x16GtS, i8x16, int16, int16, 16, a > b)
1999 1944 : CMPOP_CASE(I8x16GeS, i8x16, int16, int16, 16, a >= b)
2000 1944 : CMPOP_CASE(I8x16LtS, i8x16, int16, int16, 16, a < b)
2001 1944 : CMPOP_CASE(I8x16LeS, i8x16, int16, int16, 16, a <= b)
2002 1944 : CMPOP_CASE(I8x16GtU, i8x16, int16, int16, 16,
2003 : static_cast<uint8_t>(a) > static_cast<uint8_t>(b))
2004 1944 : CMPOP_CASE(I8x16GeU, i8x16, int16, int16, 16,
2005 : static_cast<uint8_t>(a) >= static_cast<uint8_t>(b))
2006 1944 : CMPOP_CASE(I8x16LtU, i8x16, int16, int16, 16,
2007 : static_cast<uint8_t>(a) < static_cast<uint8_t>(b))
2008 1944 : CMPOP_CASE(I8x16LeU, i8x16, int16, int16, 16,
2009 : static_cast<uint8_t>(a) <= static_cast<uint8_t>(b))
2010 : #undef CMPOP_CASE
2011 : #define REPLACE_LANE_CASE(format, name, stype, ctype) \
2012 : case kExpr##format##ReplaceLane: { \
2013 : SimdLaneImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
2014 : ++len; \
2015 : WasmValue new_val = Pop(); \
2016 : WasmValue simd_val = Pop(); \
2017 : stype s = simd_val.to_s128().to_##name(); \
2018 : s.val[LANE(imm.lane, s)] = new_val.to<ctype>(); \
2019 : Push(WasmValue(Simd128(s))); \
2020 : return true; \
2021 : }
2022 160 : REPLACE_LANE_CASE(F32x4, f32x4, float4, float)
2023 200 : REPLACE_LANE_CASE(I32x4, i32x4, int4, int32_t)
2024 360 : REPLACE_LANE_CASE(I16x8, i16x8, int8, int32_t)
2025 520 : REPLACE_LANE_CASE(I8x16, i8x16, int16, int32_t)
2026 : #undef REPLACE_LANE_CASE
2027 : case kExprS128LoadMem:
2028 : return ExecuteLoad<Simd128, Simd128>(decoder, code, pc, len,
2029 0 : MachineRepresentation::kSimd128);
2030 : case kExprS128StoreMem:
2031 : return ExecuteStore<Simd128, Simd128>(decoder, code, pc, len,
2032 0 : MachineRepresentation::kSimd128);
2033 : #define SHIFT_CASE(op, name, stype, count, expr) \
2034 : case kExpr##op: { \
2035 : SimdShiftImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
2036 : ++len; \
2037 : WasmValue v = Pop(); \
2038 : stype s = v.to_s128().to_##name(); \
2039 : stype res; \
2040 : for (size_t i = 0; i < count; ++i) { \
2041 : auto a = s.val[i]; \
2042 : res.val[i] = expr; \
2043 : } \
2044 : Push(WasmValue(Simd128(res))); \
2045 : return true; \
2046 : }
2047 35960 : SHIFT_CASE(I32x4Shl, i32x4, int4, 4,
2048 : static_cast<uint32_t>(a) << imm.shift)
2049 35960 : SHIFT_CASE(I32x4ShrS, i32x4, int4, 4, a >> imm.shift)
2050 35960 : SHIFT_CASE(I32x4ShrU, i32x4, int4, 4,
2051 : static_cast<uint32_t>(a) >> imm.shift)
2052 2700 : SHIFT_CASE(I16x8Shl, i16x8, int8, 8,
2053 : static_cast<uint16_t>(a) << imm.shift)
2054 2700 : SHIFT_CASE(I16x8ShrS, i16x8, int8, 8, a >> imm.shift)
2055 2700 : SHIFT_CASE(I16x8ShrU, i16x8, int8, 8,
2056 : static_cast<uint16_t>(a) >> imm.shift)
2057 1260 : SHIFT_CASE(I8x16Shl, i8x16, int16, 16,
2058 : static_cast<uint8_t>(a) << imm.shift)
2059 1260 : SHIFT_CASE(I8x16ShrS, i8x16, int16, 16, a >> imm.shift)
2060 1260 : SHIFT_CASE(I8x16ShrU, i8x16, int16, 16,
2061 : static_cast<uint8_t>(a) >> imm.shift)
2062 : #undef SHIFT_CASE
2063 : #define CONVERT_CASE(op, src_type, name, dst_type, count, start_index, ctype, \
2064 : expr) \
2065 : case kExpr##op: { \
2066 : WasmValue v = Pop(); \
2067 : src_type s = v.to_s128().to_##name(); \
2068 : dst_type res; \
2069 : for (size_t i = 0; i < count; ++i) { \
2070 : ctype a = s.val[LANE(start_index + i, s)]; \
2071 : res.val[LANE(i, res)] = expr; \
2072 : } \
2073 : Push(WasmValue(Simd128(res))); \
2074 : return true; \
2075 : }
2076 0 : CONVERT_CASE(F32x4SConvertI32x4, int4, i32x4, float4, 4, 0, int32_t,
2077 : static_cast<float>(a))
2078 0 : CONVERT_CASE(F32x4UConvertI32x4, int4, i32x4, float4, 4, 0, uint32_t,
2079 : static_cast<float>(a))
2080 1552 : CONVERT_CASE(I32x4SConvertF32x4, float4, f32x4, int4, 4, 0, double,
2081 : std::isnan(a) ? 0
2082 : : a<kMinInt ? kMinInt : a> kMaxInt
2083 : ? kMaxInt
2084 : : static_cast<int32_t>(a))
2085 1552 : CONVERT_CASE(I32x4UConvertF32x4, float4, f32x4, int4, 4, 0, double,
2086 : std::isnan(a)
2087 : ? 0
2088 : : a<0 ? 0 : a> kMaxUInt32 ? kMaxUInt32
2089 : : static_cast<uint32_t>(a))
2090 144 : CONVERT_CASE(I32x4SConvertI16x8High, int8, i16x8, int4, 4, 4, int16_t,
2091 : a)
2092 144 : CONVERT_CASE(I32x4UConvertI16x8High, int8, i16x8, int4, 4, 4, uint16_t,
2093 : a)
2094 144 : CONVERT_CASE(I32x4SConvertI16x8Low, int8, i16x8, int4, 4, 0, int16_t, a)
2095 144 : CONVERT_CASE(I32x4UConvertI16x8Low, int8, i16x8, int4, 4, 0, uint16_t,
2096 : a)
2097 144 : CONVERT_CASE(I16x8SConvertI8x16High, int16, i8x16, int8, 8, 8, int8_t,
2098 : a)
2099 144 : CONVERT_CASE(I16x8UConvertI8x16High, int16, i8x16, int8, 8, 8, uint8_t,
2100 : a)
2101 144 : CONVERT_CASE(I16x8SConvertI8x16Low, int16, i8x16, int8, 8, 0, int8_t, a)
2102 144 : CONVERT_CASE(I16x8UConvertI8x16Low, int16, i8x16, int8, 8, 0, uint8_t,
2103 : a)
2104 : #undef CONVERT_CASE
2105 : #define PACK_CASE(op, src_type, name, dst_type, count, ctype, dst_ctype, \
2106 : is_unsigned) \
2107 : case kExpr##op: { \
2108 : WasmValue v2 = Pop(); \
2109 : WasmValue v1 = Pop(); \
2110 : src_type s1 = v1.to_s128().to_##name(); \
2111 : src_type s2 = v2.to_s128().to_##name(); \
2112 : dst_type res; \
2113 : int64_t min = std::numeric_limits<ctype>::min(); \
2114 : int64_t max = std::numeric_limits<ctype>::max(); \
2115 : for (size_t i = 0; i < count; ++i) { \
2116 : int32_t v = i < count / 2 ? s1.val[LANE(i, s1)] \
2117 : : s2.val[LANE(i - count / 2, s2)]; \
2118 : int64_t a = is_unsigned ? static_cast<int64_t>(v & 0xFFFFFFFFu) : v; \
2119 : res.val[LANE(i, res)] = \
2120 : static_cast<dst_ctype>(std::max(min, std::min(max, a))); \
2121 : } \
2122 : Push(WasmValue(Simd128(res))); \
2123 : return true; \
2124 : }
2125 188384 : PACK_CASE(I16x8SConvertI32x4, int4, i32x4, int8, 8, int16_t, int16_t,
2126 : false)
2127 188384 : PACK_CASE(I16x8UConvertI32x4, int4, i32x4, int8, 8, uint16_t, int16_t,
2128 : true)
2129 7128 : PACK_CASE(I8x16SConvertI16x8, int8, i16x8, int16, 16, int8_t, int8_t,
2130 : false)
2131 7128 : PACK_CASE(I8x16UConvertI16x8, int8, i16x8, int16, 16, uint8_t, int8_t,
2132 : true)
2133 : #undef PACK_CASE
2134 : case kExprS128Select: {
2135 0 : int4 v2 = Pop().to_s128().to_i32x4();
2136 0 : int4 v1 = Pop().to_s128().to_i32x4();
2137 0 : int4 bool_val = Pop().to_s128().to_i32x4();
2138 : int4 res;
2139 0 : for (size_t i = 0; i < 4; ++i) {
2140 0 : res.val[i] = v2.val[i] ^ ((v1.val[i] ^ v2.val[i]) & bool_val.val[i]);
2141 : }
2142 0 : Push(WasmValue(Simd128(res)));
2143 : return true;
2144 : }
2145 : #define ADD_HORIZ_CASE(op, name, stype, count) \
2146 : case kExpr##op: { \
2147 : WasmValue v2 = Pop(); \
2148 : WasmValue v1 = Pop(); \
2149 : stype s1 = v1.to_s128().to_##name(); \
2150 : stype s2 = v2.to_s128().to_##name(); \
2151 : stype res; \
2152 : for (size_t i = 0; i < count / 2; ++i) { \
2153 : res.val[LANE(i, s1)] = \
2154 : s1.val[LANE(i * 2, s1)] + s1.val[LANE(i * 2 + 1, s1)]; \
2155 : res.val[LANE(i + count / 2, s1)] = \
2156 : s2.val[LANE(i * 2, s1)] + s2.val[LANE(i * 2 + 1, s1)]; \
2157 : } \
2158 : Push(WasmValue(Simd128(res))); \
2159 : return true; \
2160 : }
2161 24 : ADD_HORIZ_CASE(I32x4AddHoriz, i32x4, int4, 4)
2162 24 : ADD_HORIZ_CASE(F32x4AddHoriz, f32x4, float4, 4)
2163 24 : ADD_HORIZ_CASE(I16x8AddHoriz, i16x8, int8, 8)
2164 : #undef ADD_HORIZ_CASE
2165 : case kExprS8x16Shuffle: {
2166 : Simd8x16ShuffleImmediate<Decoder::kNoValidate> imm(decoder,
2167 : code->at(pc));
2168 11968 : len += 16;
2169 23936 : int16 v2 = Pop().to_s128().to_i8x16();
2170 23936 : int16 v1 = Pop().to_s128().to_i8x16();
2171 : int16 res;
2172 203456 : for (size_t i = 0; i < kSimd128Size; ++i) {
2173 191488 : int lane = imm.shuffle[i];
2174 : res.val[LANE(i, v1)] = lane < kSimd128Size
2175 : ? v1.val[LANE(lane, v1)]
2176 191488 : : v2.val[LANE(lane - kSimd128Size, v1)];
2177 : }
2178 23936 : Push(WasmValue(Simd128(res)));
2179 : return true;
2180 : }
2181 : #define REDUCTION_CASE(op, name, stype, count, operation) \
2182 : case kExpr##op: { \
2183 : stype s = Pop().to_s128().to_##name(); \
2184 : int32_t res = s.val[0]; \
2185 : for (size_t i = 1; i < count; ++i) { \
2186 : res = res operation static_cast<int32_t>(s.val[i]); \
2187 : } \
2188 : Push(WasmValue(res)); \
2189 : return true; \
2190 : }
2191 48 : REDUCTION_CASE(S1x4AnyTrue, i32x4, int4, 4, |)
2192 48 : REDUCTION_CASE(S1x4AllTrue, i32x4, int4, 4, &)
2193 48 : REDUCTION_CASE(S1x8AnyTrue, i16x8, int8, 8, |)
2194 48 : REDUCTION_CASE(S1x8AllTrue, i16x8, int8, 8, &)
2195 48 : REDUCTION_CASE(S1x16AnyTrue, i8x16, int16, 16, |)
2196 48 : REDUCTION_CASE(S1x16AllTrue, i8x16, int16, 16, &)
2197 : #undef REDUCTION_CASE
2198 : default:
2199 : return false;
2200 : }
2201 : }
2202 :
2203 : // Check if our control stack (frames_) exceeds the limit. Trigger stack
2204 : // overflow if it does, and unwinding the current frame.
2205 : // Returns true if execution can continue, false if the current activation was
2206 : // fully unwound.
2207 : // Do call this function immediately *after* pushing a new frame. The pc of
2208 : // the top frame will be reset to 0 if the stack check fails.
2209 446444 : bool DoStackCheck() V8_WARN_UNUSED_RESULT {
2210 : // The goal of this stack check is not to prevent actual stack overflows,
2211 : // but to simulate stack overflows during the execution of compiled code.
2212 : // That is why this function uses FLAG_stack_size, even though the value
2213 : // stack actually lies in zone memory.
2214 446444 : const size_t stack_size_limit = FLAG_stack_size * KB;
2215 : // Sum up the value stack size and the control stack size.
2216 : const size_t current_stack_size =
2217 1339332 : (sp_ - stack_.get()) + frames_.size() * sizeof(Frame);
2218 446444 : if (V8_LIKELY(current_stack_size <= stack_size_limit)) {
2219 : return true;
2220 : }
2221 : // The pc of the top frame is initialized to the first instruction. We reset
2222 : // it to 0 here such that we report the same position as in compiled code.
2223 16 : frames_.back().pc = 0;
2224 : Isolate* isolate = instance_object_->GetIsolate();
2225 : HandleScope handle_scope(isolate);
2226 16 : isolate->StackOverflow();
2227 16 : return HandleException(isolate) == WasmInterpreter::Thread::HANDLED;
2228 : }
2229 :
2230 0 : void EncodeI32ExceptionValue(Handle<FixedArray> encoded_values,
2231 : uint32_t* encoded_index, uint32_t value) {
2232 0 : encoded_values->set((*encoded_index)++, Smi::FromInt(value >> 16));
2233 0 : encoded_values->set((*encoded_index)++, Smi::FromInt(value & 0xffff));
2234 0 : }
2235 :
2236 0 : void EncodeI64ExceptionValue(Handle<FixedArray> encoded_values,
2237 : uint32_t* encoded_index, uint64_t value) {
2238 : EncodeI32ExceptionValue(encoded_values, encoded_index,
2239 0 : static_cast<uint32_t>(value >> 32));
2240 : EncodeI32ExceptionValue(encoded_values, encoded_index,
2241 0 : static_cast<uint32_t>(value));
2242 0 : }
2243 :
2244 : // Allocate, initialize and throw a new exception. The exception values are
2245 : // being popped off the operand stack. Returns true if the exception is being
2246 : // handled locally by the interpreter, false otherwise (interpreter exits).
2247 12 : bool DoThrowException(const WasmException* exception,
2248 : uint32_t index) V8_WARN_UNUSED_RESULT {
2249 : Isolate* isolate = instance_object_->GetIsolate();
2250 : Handle<WasmExceptionTag> exception_tag(
2251 : WasmExceptionTag::cast(
2252 24 : instance_object_->exceptions_table()->get(index)),
2253 36 : isolate);
2254 12 : uint32_t encoded_size = WasmExceptionPackage::GetEncodedSize(exception);
2255 : Handle<Object> exception_object =
2256 12 : WasmExceptionPackage::New(isolate, exception_tag, encoded_size);
2257 : Handle<FixedArray> encoded_values = Handle<FixedArray>::cast(
2258 12 : WasmExceptionPackage::GetExceptionValues(isolate, exception_object));
2259 : // Encode the exception values on the operand stack into the exception
2260 : // package allocated above. This encoding has to be in sync with other
2261 : // backends so that exceptions can be passed between them.
2262 24 : const wasm::WasmExceptionSig* sig = exception->sig;
2263 12 : uint32_t encoded_index = 0;
2264 24 : for (size_t i = 0; i < sig->parameter_count(); ++i) {
2265 0 : WasmValue value = sp_[i - sig->parameter_count()];
2266 0 : switch (sig->GetParam(i)) {
2267 : case wasm::kWasmI32: {
2268 : uint32_t u32 = value.to_u32();
2269 0 : EncodeI32ExceptionValue(encoded_values, &encoded_index, u32);
2270 0 : break;
2271 : }
2272 : case wasm::kWasmF32: {
2273 : uint32_t f32 = value.to_f32_boxed().get_bits();
2274 0 : EncodeI32ExceptionValue(encoded_values, &encoded_index, f32);
2275 0 : break;
2276 : }
2277 : case wasm::kWasmI64: {
2278 : uint64_t u64 = value.to_u64();
2279 0 : EncodeI64ExceptionValue(encoded_values, &encoded_index, u64);
2280 0 : break;
2281 : }
2282 : case wasm::kWasmF64: {
2283 : uint64_t f64 = value.to_f64_boxed().get_bits();
2284 0 : EncodeI64ExceptionValue(encoded_values, &encoded_index, f64);
2285 0 : break;
2286 : }
2287 : case wasm::kWasmAnyRef:
2288 0 : UNIMPLEMENTED();
2289 : break;
2290 : default:
2291 0 : UNREACHABLE();
2292 : }
2293 : }
2294 : DCHECK_EQ(encoded_size, encoded_index);
2295 12 : PopN(static_cast<int>(sig->parameter_count()));
2296 : // Now that the exception is ready, set it as pending.
2297 12 : isolate->Throw(*exception_object);
2298 12 : return HandleException(isolate) == WasmInterpreter::Thread::HANDLED;
2299 : }
2300 :
2301 : // Throw a given existing exception. Returns true if the exception is being
2302 : // handled locally by the interpreter, false otherwise (interpreter exits).
2303 0 : bool DoRethrowException(WasmValue* exception) {
2304 : Isolate* isolate = instance_object_->GetIsolate();
2305 : // TODO(mstarzinger): Use the passed {exception} here once reference types
2306 : // as values on the operand stack are supported by the interpreter.
2307 0 : isolate->ReThrow(*isolate->factory()->undefined_value());
2308 0 : return HandleException(isolate) == WasmInterpreter::Thread::HANDLED;
2309 : }
2310 :
2311 5141922 : void Execute(InterpreterCode* code, pc_t pc, int max) {
2312 : DCHECK_NOT_NULL(code->side_table);
2313 : DCHECK(!frames_.empty());
2314 : // There must be enough space on the stack to hold the arguments, locals,
2315 : // and the value stack.
2316 : DCHECK_LE(code->function->sig->parameter_count() +
2317 : code->locals.type_list.size() +
2318 : code->side_table->max_stack_height_,
2319 : stack_limit_ - stack_.get() - frames_.back().sp);
2320 :
2321 42786811 : Decoder decoder(code->start, code->end);
2322 4669698 : pc_t limit = code->end - code->start;
2323 : bool hit_break = false;
2324 :
2325 : while (true) {
2326 : #define PAUSE_IF_BREAK_FLAG(flag) \
2327 : if (V8_UNLIKELY(break_flags_ & WasmInterpreter::BreakFlag::flag)) { \
2328 : hit_break = true; \
2329 : max = 0; \
2330 : }
2331 :
2332 : DCHECK_GT(limit, pc);
2333 : DCHECK_NOT_NULL(code->start);
2334 :
2335 : // Do first check for a breakpoint, in order to set hit_break correctly.
2336 : const char* skip = " ";
2337 60557095 : int len = 1;
2338 60557095 : byte orig = code->start[pc];
2339 60557095 : WasmOpcode opcode = static_cast<WasmOpcode>(orig);
2340 60557095 : if (WasmOpcodes::IsPrefixOpcode(opcode)) {
2341 5708128 : opcode = static_cast<WasmOpcode>(opcode << 8 | code->start[pc + 1]);
2342 : }
2343 60557095 : if (V8_UNLIKELY(orig == kInternalBreakpoint)) {
2344 5992 : orig = code->orig_start[pc];
2345 5992 : if (WasmOpcodes::IsPrefixOpcode(static_cast<WasmOpcode>(orig))) {
2346 : opcode =
2347 0 : static_cast<WasmOpcode>(orig << 8 | code->orig_start[pc + 1]);
2348 : }
2349 11984 : if (SkipBreakpoint(code, pc)) {
2350 : // skip breakpoint by switching on original code.
2351 : skip = "[skip] ";
2352 : } else {
2353 : TRACE("@%-3zu: [break] %-24s:", pc, WasmOpcodes::OpcodeName(opcode));
2354 : TraceValueStack();
2355 : TRACE("\n");
2356 : hit_break = true;
2357 5429 : break;
2358 : }
2359 : }
2360 :
2361 : // If max is 0, break. If max is positive (a limit is set), decrement it.
2362 60554099 : if (max == 0) break;
2363 60551666 : if (max > 0) --max;
2364 :
2365 : USE(skip);
2366 : TRACE("@%-3zu: %s%-24s:", pc, skip, WasmOpcodes::OpcodeName(opcode));
2367 : TraceValueStack();
2368 : TRACE("\n");
2369 :
2370 : #ifdef DEBUG
2371 : // Compute the stack effect of this opcode, and verify later that the
2372 : // stack was modified accordingly.
2373 : std::pair<uint32_t, uint32_t> stack_effect =
2374 : StackEffect(codemap_->module(), frames_.back().code->function->sig,
2375 : code->orig_start + pc, code->orig_end);
2376 : sp_t expected_new_stack_height =
2377 : StackHeight() - stack_effect.first + stack_effect.second;
2378 : #endif
2379 :
2380 60551666 : switch (orig) {
2381 : case kExprNop:
2382 : break;
2383 : case kExprBlock:
2384 : case kExprLoop:
2385 : case kExprTry: {
2386 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures,
2387 2336528 : &decoder, code->at(pc));
2388 1168264 : len = 1 + imm.length;
2389 : break;
2390 : }
2391 : case kExprIf: {
2392 : BlockTypeImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures,
2393 6387280 : &decoder, code->at(pc));
2394 : WasmValue cond = Pop();
2395 : bool is_true = cond.to<uint32_t>() != 0;
2396 3193640 : if (is_true) {
2397 : // fall through to the true block.
2398 19278 : len = 1 + imm.length;
2399 : TRACE(" true => fallthrough\n");
2400 : } else {
2401 6348724 : len = LookupTargetDelta(code, pc);
2402 : TRACE(" false => @%zu\n", pc + len);
2403 : }
2404 : break;
2405 : }
2406 : case kExprElse:
2407 : case kExprCatch: {
2408 29392 : len = LookupTargetDelta(code, pc);
2409 : TRACE(" end => @%zu\n", pc + len);
2410 14696 : break;
2411 : }
2412 : case kExprThrow: {
2413 : ExceptionIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2414 12 : code->at(pc));
2415 12 : CommitPc(pc); // Needed for local unwinding.
2416 24 : const WasmException* exception = &module()->exceptions[imm.index];
2417 12 : if (!DoThrowException(exception, imm.index)) return;
2418 12 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2419 12 : continue; // Do not bump pc.
2420 : }
2421 : case kExprRethrow: {
2422 : WasmValue ex = Pop();
2423 0 : CommitPc(pc); // Needed for local unwinding.
2424 0 : if (!DoRethrowException(&ex)) return;
2425 0 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2426 : continue; // Do not bump pc.
2427 : }
2428 : case kExprSelect: {
2429 : WasmValue cond = Pop();
2430 : WasmValue fval = Pop();
2431 : WasmValue tval = Pop();
2432 932 : Push(cond.to<int32_t>() != 0 ? tval : fval);
2433 : break;
2434 : }
2435 : case kExprBr: {
2436 : BranchDepthImmediate<Decoder::kNoValidate> imm(&decoder,
2437 18054 : code->at(pc));
2438 36108 : len = DoBreak(code, pc, imm.depth);
2439 : TRACE(" br => @%zu\n", pc + len);
2440 : break;
2441 : }
2442 : case kExprBrIf: {
2443 : BranchDepthImmediate<Decoder::kNoValidate> imm(&decoder,
2444 28736 : code->at(pc));
2445 : WasmValue cond = Pop();
2446 : bool is_true = cond.to<uint32_t>() != 0;
2447 28736 : if (is_true) {
2448 29360 : len = DoBreak(code, pc, imm.depth);
2449 : TRACE(" br_if => @%zu\n", pc + len);
2450 : } else {
2451 : TRACE(" false => fallthrough\n");
2452 14056 : len = 1 + imm.length;
2453 : }
2454 : break;
2455 : }
2456 : case kExprBrTable: {
2457 : BranchTableImmediate<Decoder::kNoValidate> imm(&decoder,
2458 443848 : code->at(pc));
2459 : BranchTableIterator<Decoder::kNoValidate> iterator(&decoder, imm);
2460 : uint32_t key = Pop().to<uint32_t>();
2461 : uint32_t depth = 0;
2462 221924 : if (key >= imm.table_count) key = imm.table_count;
2463 1081736 : for (uint32_t i = 0; i <= key; i++) {
2464 : DCHECK(iterator.has_next());
2465 859812 : depth = iterator.next();
2466 : }
2467 443848 : len = key + DoBreak(code, pc + key, static_cast<size_t>(depth));
2468 : TRACE(" br[%u] => @%zu\n", key, pc + key + len);
2469 : break;
2470 : }
2471 : case kExprReturn: {
2472 4737325 : size_t arity = code->function->sig->return_count();
2473 400548 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
2474 12 : PAUSE_IF_BREAK_FLAG(AfterReturn);
2475 : continue; // Do not bump pc.
2476 : }
2477 : case kExprUnreachable: {
2478 52 : return DoTrap(kTrapUnreachable, pc);
2479 : }
2480 : case kExprEnd: {
2481 : break;
2482 : }
2483 : case kExprI32Const: {
2484 7401154 : ImmI32Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2485 14802308 : Push(WasmValue(imm.value));
2486 7401154 : len = 1 + imm.length;
2487 : break;
2488 : }
2489 : case kExprI64Const: {
2490 6948 : ImmI64Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2491 13896 : Push(WasmValue(imm.value));
2492 6948 : len = 1 + imm.length;
2493 : break;
2494 : }
2495 : case kExprF32Const: {
2496 280 : ImmF32Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2497 560 : Push(WasmValue(imm.value));
2498 280 : len = 1 + imm.length;
2499 : break;
2500 : }
2501 : case kExprF64Const: {
2502 1684 : ImmF64Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2503 3368 : Push(WasmValue(imm.value));
2504 1684 : len = 1 + imm.length;
2505 : break;
2506 : }
2507 : case kExprGetLocal: {
2508 17691475 : LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2509 17691475 : Push(GetStackValue(frames_.back().sp + imm.index));
2510 17691475 : len = 1 + imm.length;
2511 : break;
2512 : }
2513 : case kExprSetLocal: {
2514 4466258 : LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2515 : WasmValue val = Pop();
2516 4466258 : SetStackValue(frames_.back().sp + imm.index, val);
2517 4466258 : len = 1 + imm.length;
2518 : break;
2519 : }
2520 : case kExprTeeLocal: {
2521 2552 : LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
2522 : WasmValue val = Pop();
2523 2552 : SetStackValue(frames_.back().sp + imm.index, val);
2524 : Push(val);
2525 2552 : len = 1 + imm.length;
2526 : break;
2527 : }
2528 : case kExprDrop: {
2529 : Pop();
2530 14496 : break;
2531 : }
2532 : case kExprCallFunction: {
2533 : CallFunctionImmediate<Decoder::kNoValidate> imm(&decoder,
2534 452984 : code->at(pc));
2535 : InterpreterCode* target = codemap()->GetCode(imm.index);
2536 452984 : if (target->function->imported) {
2537 6632 : CommitPc(pc);
2538 : ExternalCallResult result =
2539 6632 : CallImportedFunction(target->function->func_index);
2540 6632 : switch (result.type) {
2541 : case ExternalCallResult::INTERNAL:
2542 : // The import is a function of this instance. Call it directly.
2543 0 : target = result.interpreter_code;
2544 : DCHECK(!target->function->imported);
2545 0 : break;
2546 : case ExternalCallResult::INVALID_FUNC:
2547 : case ExternalCallResult::SIGNATURE_MISMATCH:
2548 : // Direct calls are checked statically.
2549 0 : UNREACHABLE();
2550 : case ExternalCallResult::EXTERNAL_RETURNED:
2551 5636 : PAUSE_IF_BREAK_FLAG(AfterCall);
2552 5636 : len = 1 + imm.length;
2553 5636 : break;
2554 : case ExternalCallResult::EXTERNAL_UNWOUND:
2555 988 : return;
2556 : case ExternalCallResult::EXTERNAL_CAUGHT:
2557 8 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2558 8 : continue; // Do not bump pc.
2559 : }
2560 5636 : if (result.type != ExternalCallResult::INTERNAL) break;
2561 : }
2562 : // Execute an internal call.
2563 446352 : if (!DoCall(&decoder, target, &pc, &limit)) return;
2564 446336 : code = target;
2565 446336 : PAUSE_IF_BREAK_FLAG(AfterCall);
2566 : continue; // Do not bump pc.
2567 : } break;
2568 : case kExprCallIndirect: {
2569 : CallIndirectImmediate<Decoder::kNoValidate> imm(&decoder,
2570 408 : code->at(pc));
2571 : uint32_t entry_index = Pop().to<uint32_t>();
2572 : // Assume only one table for now.
2573 : DCHECK_LE(module()->tables.size(), 1u);
2574 204 : CommitPc(pc); // TODO(wasm): Be more disciplined about committing PC.
2575 : ExternalCallResult result =
2576 204 : CallIndirectFunction(0, entry_index, imm.sig_index);
2577 204 : switch (result.type) {
2578 : case ExternalCallResult::INTERNAL:
2579 : // The import is a function of this instance. Call it directly.
2580 92 : if (!DoCall(&decoder, result.interpreter_code, &pc, &limit))
2581 88 : return;
2582 92 : code = result.interpreter_code;
2583 92 : PAUSE_IF_BREAK_FLAG(AfterCall);
2584 92 : continue; // Do not bump pc.
2585 : case ExternalCallResult::INVALID_FUNC:
2586 44 : return DoTrap(kTrapFuncInvalid, pc);
2587 : case ExternalCallResult::SIGNATURE_MISMATCH:
2588 28 : return DoTrap(kTrapFuncSigMismatch, pc);
2589 : case ExternalCallResult::EXTERNAL_RETURNED:
2590 24 : PAUSE_IF_BREAK_FLAG(AfterCall);
2591 24 : len = 1 + imm.length;
2592 24 : break;
2593 : case ExternalCallResult::EXTERNAL_UNWOUND:
2594 : return;
2595 : case ExternalCallResult::EXTERNAL_CAUGHT:
2596 0 : ReloadFromFrameOnException(&decoder, &code, &pc, &limit);
2597 0 : continue; // Do not bump pc.
2598 : }
2599 24 : } break;
2600 : case kExprGetGlobal: {
2601 : GlobalIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2602 15648 : code->at(pc));
2603 31296 : const WasmGlobal* global = &module()->globals[imm.index];
2604 15648 : byte* ptr = GetGlobalPtr(global);
2605 : WasmValue val;
2606 15648 : switch (global->type) {
2607 : #define CASE_TYPE(wasm, ctype) \
2608 : case kWasm##wasm: \
2609 : val = WasmValue( \
2610 : ReadLittleEndianValue<ctype>(reinterpret_cast<Address>(ptr))); \
2611 : break;
2612 15648 : WASM_CTYPES(CASE_TYPE)
2613 : #undef CASE_TYPE
2614 : default:
2615 0 : UNREACHABLE();
2616 : }
2617 : Push(val);
2618 15648 : len = 1 + imm.length;
2619 : break;
2620 : }
2621 : case kExprSetGlobal: {
2622 : GlobalIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2623 3580 : code->at(pc));
2624 7160 : const WasmGlobal* global = &module()->globals[imm.index];
2625 3580 : byte* ptr = GetGlobalPtr(global);
2626 : WasmValue val = Pop();
2627 3580 : switch (global->type) {
2628 : #define CASE_TYPE(wasm, ctype) \
2629 : case kWasm##wasm: \
2630 : WriteLittleEndianValue<ctype>(reinterpret_cast<Address>(ptr), \
2631 : val.to<ctype>()); \
2632 : break;
2633 3420 : WASM_CTYPES(CASE_TYPE)
2634 : #undef CASE_TYPE
2635 : default:
2636 0 : UNREACHABLE();
2637 : }
2638 3580 : len = 1 + imm.length;
2639 : break;
2640 : }
2641 :
2642 : #define LOAD_CASE(name, ctype, mtype, rep) \
2643 : case kExpr##name: { \
2644 : if (!ExecuteLoad<ctype, mtype>(&decoder, code, pc, len, \
2645 : MachineRepresentation::rep)) \
2646 : return; \
2647 : break; \
2648 : }
2649 :
2650 262372 : LOAD_CASE(I32LoadMem8S, int32_t, int8_t, kWord8);
2651 262380 : LOAD_CASE(I32LoadMem8U, int32_t, uint8_t, kWord8);
2652 131300 : LOAD_CASE(I32LoadMem16S, int32_t, int16_t, kWord16);
2653 131300 : LOAD_CASE(I32LoadMem16U, int32_t, uint16_t, kWord16);
2654 96 : LOAD_CASE(I64LoadMem8S, int64_t, int8_t, kWord8);
2655 0 : LOAD_CASE(I64LoadMem8U, int64_t, uint8_t, kWord16);
2656 96 : LOAD_CASE(I64LoadMem16S, int64_t, int16_t, kWord16);
2657 0 : LOAD_CASE(I64LoadMem16U, int64_t, uint16_t, kWord16);
2658 96 : LOAD_CASE(I64LoadMem32S, int64_t, int32_t, kWord32);
2659 0 : LOAD_CASE(I64LoadMem32U, int64_t, uint32_t, kWord32);
2660 803250 : LOAD_CASE(I32LoadMem, int32_t, int32_t, kWord32);
2661 1542892 : LOAD_CASE(I64LoadMem, int64_t, int64_t, kWord64);
2662 9452 : LOAD_CASE(F32LoadMem, Float32, uint32_t, kFloat32);
2663 29940 : LOAD_CASE(F64LoadMem, Float64, uint64_t, kFloat64);
2664 : #undef LOAD_CASE
2665 :
2666 : #define STORE_CASE(name, ctype, mtype, rep) \
2667 : case kExpr##name: { \
2668 : if (!ExecuteStore<ctype, mtype>(&decoder, code, pc, len, \
2669 : MachineRepresentation::rep)) \
2670 : return; \
2671 : break; \
2672 : }
2673 :
2674 420 : STORE_CASE(I32StoreMem8, int32_t, int8_t, kWord8);
2675 396 : STORE_CASE(I32StoreMem16, int32_t, int16_t, kWord16);
2676 96 : STORE_CASE(I64StoreMem8, int64_t, int8_t, kWord8);
2677 92 : STORE_CASE(I64StoreMem16, int64_t, int16_t, kWord16);
2678 84 : STORE_CASE(I64StoreMem32, int64_t, int32_t, kWord32);
2679 852096 : STORE_CASE(I32StoreMem, int32_t, int32_t, kWord32);
2680 1708428 : STORE_CASE(I64StoreMem, int64_t, int64_t, kWord64);
2681 1200 : STORE_CASE(F32StoreMem, Float32, uint32_t, kFloat32);
2682 10924 : STORE_CASE(F64StoreMem, Float64, uint64_t, kFloat64);
2683 : #undef STORE_CASE
2684 :
2685 : #define ASMJS_LOAD_CASE(name, ctype, mtype, defval) \
2686 : case kExpr##name: { \
2687 : uint32_t index = Pop().to<uint32_t>(); \
2688 : ctype result; \
2689 : Address addr = BoundsCheckMem<mtype>(0, index); \
2690 : if (!addr) { \
2691 : result = defval; \
2692 : } else { \
2693 : /* TODO(titzer): alignment for asmjs load mem? */ \
2694 : result = static_cast<ctype>(*reinterpret_cast<mtype*>(addr)); \
2695 : } \
2696 : Push(WasmValue(result)); \
2697 : break; \
2698 : }
2699 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem8S, int32_t, int8_t, 0);
2700 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem8U, int32_t, uint8_t, 0);
2701 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem16S, int32_t, int16_t, 0);
2702 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem16U, int32_t, uint16_t, 0);
2703 200 : ASMJS_LOAD_CASE(I32AsmjsLoadMem, int32_t, int32_t, 0);
2704 200 : ASMJS_LOAD_CASE(F32AsmjsLoadMem, float, float,
2705 : std::numeric_limits<float>::quiet_NaN());
2706 272 : ASMJS_LOAD_CASE(F64AsmjsLoadMem, double, double,
2707 : std::numeric_limits<double>::quiet_NaN());
2708 : #undef ASMJS_LOAD_CASE
2709 :
2710 : #define ASMJS_STORE_CASE(name, ctype, mtype) \
2711 : case kExpr##name: { \
2712 : WasmValue val = Pop(); \
2713 : uint32_t index = Pop().to<uint32_t>(); \
2714 : Address addr = BoundsCheckMem<mtype>(0, index); \
2715 : if (addr) { \
2716 : *(reinterpret_cast<mtype*>(addr)) = static_cast<mtype>(val.to<ctype>()); \
2717 : } \
2718 : Push(val); \
2719 : break; \
2720 : }
2721 :
2722 0 : ASMJS_STORE_CASE(I32AsmjsStoreMem8, int32_t, int8_t);
2723 0 : ASMJS_STORE_CASE(I32AsmjsStoreMem16, int32_t, int16_t);
2724 932 : ASMJS_STORE_CASE(I32AsmjsStoreMem, int32_t, int32_t);
2725 0 : ASMJS_STORE_CASE(F32AsmjsStoreMem, float, float);
2726 0 : ASMJS_STORE_CASE(F64AsmjsStoreMem, double, double);
2727 : #undef ASMJS_STORE_CASE
2728 : case kExprMemoryGrow: {
2729 : MemoryIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2730 : code->at(pc));
2731 : uint32_t delta_pages = Pop().to<uint32_t>();
2732 : Handle<WasmMemoryObject> memory(instance_object_->memory_object(),
2733 96 : instance_object_->GetIsolate());
2734 : Isolate* isolate = memory->GetIsolate();
2735 48 : int32_t result = WasmMemoryObject::Grow(isolate, memory, delta_pages);
2736 96 : Push(WasmValue(result));
2737 48 : len = 1 + imm.length;
2738 : // Treat one grow_memory instruction like 1000 other instructions,
2739 : // because it is a really expensive operation.
2740 48 : if (max > 0) max = std::max(0, max - 1000);
2741 : break;
2742 : }
2743 : case kExprMemorySize: {
2744 : MemoryIndexImmediate<Decoder::kNoValidate> imm(&decoder,
2745 : code->at(pc));
2746 0 : Push(WasmValue(static_cast<uint32_t>(instance_object_->memory_size() /
2747 0 : kWasmPageSize)));
2748 0 : len = 1 + imm.length;
2749 : break;
2750 : }
2751 : // We need to treat kExprI32ReinterpretF32 and kExprI64ReinterpretF64
2752 : // specially to guarantee that the quiet bit of a NaN is preserved on
2753 : // ia32 by the reinterpret casts.
2754 : case kExprI32ReinterpretF32: {
2755 : WasmValue val = Pop();
2756 512 : Push(WasmValue(ExecuteI32ReinterpretF32(val)));
2757 : break;
2758 : }
2759 : case kExprI64ReinterpretF64: {
2760 : WasmValue val = Pop();
2761 488 : Push(WasmValue(ExecuteI64ReinterpretF64(val)));
2762 : break;
2763 : }
2764 : #define SIGN_EXTENSION_CASE(name, wtype, ntype) \
2765 : case kExpr##name: { \
2766 : ntype val = static_cast<ntype>(Pop().to<wtype>()); \
2767 : Push(WasmValue(static_cast<wtype>(val))); \
2768 : break; \
2769 : }
2770 40 : SIGN_EXTENSION_CASE(I32SExtendI8, int32_t, int8_t);
2771 40 : SIGN_EXTENSION_CASE(I32SExtendI16, int32_t, int16_t);
2772 40 : SIGN_EXTENSION_CASE(I64SExtendI8, int64_t, int8_t);
2773 40 : SIGN_EXTENSION_CASE(I64SExtendI16, int64_t, int16_t);
2774 40 : SIGN_EXTENSION_CASE(I64SExtendI32, int64_t, int32_t);
2775 : #undef SIGN_EXTENSION_CASE
2776 : case kNumericPrefix: {
2777 2624 : ++len;
2778 2624 : if (!ExecuteNumericOp(opcode, &decoder, code, pc, len)) return;
2779 : break;
2780 : }
2781 : case kAtomicPrefix: {
2782 384088 : if (!ExecuteAtomicOp(opcode, &decoder, code, pc, len)) return;
2783 : break;
2784 : }
2785 : case kSimdPrefix: {
2786 5321416 : ++len;
2787 5321416 : if (!ExecuteSimdOp(opcode, &decoder, code, pc, len)) return;
2788 : break;
2789 : }
2790 :
2791 : #define EXECUTE_SIMPLE_BINOP(name, ctype, op) \
2792 : case kExpr##name: { \
2793 : WasmValue rval = Pop(); \
2794 : WasmValue lval = Pop(); \
2795 : auto result = lval.to<ctype>() op rval.to<ctype>(); \
2796 : possible_nondeterminism_ |= has_nondeterminism(result); \
2797 : Push(WasmValue(result)); \
2798 : break; \
2799 : }
2800 9857904 : FOREACH_SIMPLE_BINOP(EXECUTE_SIMPLE_BINOP)
2801 : #undef EXECUTE_SIMPLE_BINOP
2802 :
2803 : #define EXECUTE_OTHER_BINOP(name, ctype) \
2804 : case kExpr##name: { \
2805 : TrapReason trap = kTrapCount; \
2806 : ctype rval = Pop().to<ctype>(); \
2807 : ctype lval = Pop().to<ctype>(); \
2808 : auto result = Execute##name(lval, rval, &trap); \
2809 : possible_nondeterminism_ |= has_nondeterminism(result); \
2810 : if (trap != kTrapCount) return DoTrap(trap, pc); \
2811 : Push(WasmValue(result)); \
2812 : break; \
2813 : }
2814 2512748 : FOREACH_OTHER_BINOP(EXECUTE_OTHER_BINOP)
2815 : #undef EXECUTE_OTHER_BINOP
2816 :
2817 : #define EXECUTE_UNOP(name, ctype, exec_fn) \
2818 : case kExpr##name: { \
2819 : TrapReason trap = kTrapCount; \
2820 : ctype val = Pop().to<ctype>(); \
2821 : auto result = exec_fn(val, &trap); \
2822 : possible_nondeterminism_ |= has_nondeterminism(result); \
2823 : if (trap != kTrapCount) return DoTrap(trap, pc); \
2824 : Push(WasmValue(result)); \
2825 : break; \
2826 : }
2827 :
2828 : #define EXECUTE_OTHER_UNOP(name, ctype) EXECUTE_UNOP(name, ctype, Execute##name)
2829 371960 : FOREACH_OTHER_UNOP(EXECUTE_OTHER_UNOP)
2830 : #undef EXECUTE_OTHER_UNOP
2831 :
2832 : #define EXECUTE_I32CONV_FLOATOP(name, out_type, in_type) \
2833 : EXECUTE_UNOP(name, in_type, ExecuteConvert<out_type>)
2834 3136 : FOREACH_I32CONV_FLOATOP(EXECUTE_I32CONV_FLOATOP)
2835 : #undef EXECUTE_I32CONV_FLOATOP
2836 : #undef EXECUTE_UNOP
2837 :
2838 : default:
2839 0 : FATAL("Unknown or unimplemented opcode #%d:%s", code->start[pc],
2840 0 : OpcodeName(code->start[pc]));
2841 : UNREACHABLE();
2842 : }
2843 :
2844 : #ifdef DEBUG
2845 : if (!WasmOpcodes::IsControlOpcode(opcode)) {
2846 : DCHECK_EQ(expected_new_stack_height, StackHeight());
2847 : }
2848 : #endif
2849 :
2850 59667386 : pc += len;
2851 59667386 : if (pc == limit) {
2852 : // Fell off end of code; do an implicit return.
2853 : TRACE("@%-3zu: ImplicitReturn\n", pc);
2854 4336777 : size_t arity = code->function->sig->return_count();
2855 : DCHECK_EQ(StackHeight() - arity, frames_.back().llimit());
2856 4336777 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
2857 110328 : PAUSE_IF_BREAK_FLAG(AfterReturn);
2858 : }
2859 : #undef PAUSE_IF_BREAK_FLAG
2860 : }
2861 :
2862 5429 : state_ = WasmInterpreter::PAUSED;
2863 8465 : break_pc_ = hit_break ? pc : kInvalidPc;
2864 5429 : CommitPc(pc);
2865 : }
2866 :
2867 : WasmValue Pop() {
2868 : DCHECK_GT(frames_.size(), 0);
2869 : DCHECK_GT(StackHeight(), frames_.back().llimit()); // can't pop into locals
2870 25961204 : return *--sp_;
2871 : }
2872 :
2873 : void PopN(int n) {
2874 : DCHECK_GE(StackHeight(), n);
2875 : DCHECK_GT(frames_.size(), 0);
2876 : // Check that we don't pop into locals.
2877 : DCHECK_GE(StackHeight() - n, frames_.back().llimit());
2878 12 : sp_ -= n;
2879 : }
2880 :
2881 : WasmValue PopArity(size_t arity) {
2882 : if (arity == 0) return WasmValue();
2883 : CHECK_EQ(1, arity);
2884 : return Pop();
2885 : }
2886 :
2887 : void Push(WasmValue val) {
2888 : DCHECK_NE(kWasmStmt, val.type());
2889 : DCHECK_LE(1, stack_limit_ - sp_);
2890 52168712 : *sp_++ = val;
2891 : }
2892 :
2893 4664254 : void Push(WasmValue* vals, size_t arity) {
2894 : DCHECK_LE(arity, stack_limit_ - sp_);
2895 4664254 : for (WasmValue *val = vals, *end = vals + arity; val != end; ++val) {
2896 : DCHECK_NE(kWasmStmt, val->type());
2897 : }
2898 4664254 : if (arity > 0) {
2899 3891149 : memcpy(sp_, vals, arity * sizeof(*sp_));
2900 : }
2901 4664254 : sp_ += arity;
2902 4664254 : }
2903 :
2904 9774952 : void EnsureStackSpace(size_t size) {
2905 19549904 : if (V8_LIKELY(static_cast<size_t>(stack_limit_ - sp_) >= size)) return;
2906 366414 : size_t old_size = stack_limit_ - stack_.get();
2907 : size_t requested_size =
2908 366414 : base::bits::RoundUpToPowerOfTwo64((sp_ - stack_.get()) + size);
2909 366414 : size_t new_size = Max(size_t{8}, Max(2 * old_size, requested_size));
2910 19627550 : std::unique_ptr<WasmValue[]> new_stack(new WasmValue[new_size]);
2911 366414 : if (old_size > 0) {
2912 1136 : memcpy(new_stack.get(), stack_.get(), old_size * sizeof(*sp_));
2913 : }
2914 732828 : sp_ = new_stack.get() + (sp_ - stack_.get());
2915 : stack_ = std::move(new_stack);
2916 366414 : stack_limit_ = stack_.get() + new_size;
2917 : }
2918 :
2919 10289592 : sp_t StackHeight() { return sp_ - stack_.get(); }
2920 :
2921 : void TraceValueStack() {
2922 : #ifdef DEBUG
2923 : if (!FLAG_trace_wasm_interpreter) return;
2924 : Frame* top = frames_.size() > 0 ? &frames_.back() : nullptr;
2925 : sp_t sp = top ? top->sp : 0;
2926 : sp_t plimit = top ? top->plimit() : 0;
2927 : sp_t llimit = top ? top->llimit() : 0;
2928 : for (size_t i = sp; i < StackHeight(); ++i) {
2929 : if (i < plimit)
2930 : PrintF(" p%zu:", i);
2931 : else if (i < llimit)
2932 : PrintF(" l%zu:", i);
2933 : else
2934 : PrintF(" s%zu:", i);
2935 : WasmValue val = GetStackValue(i);
2936 : switch (val.type()) {
2937 : case kWasmI32:
2938 : PrintF("i32:%d", val.to<int32_t>());
2939 : break;
2940 : case kWasmI64:
2941 : PrintF("i64:%" PRId64 "", val.to<int64_t>());
2942 : break;
2943 : case kWasmF32:
2944 : PrintF("f32:%f", val.to<float>());
2945 : break;
2946 : case kWasmF64:
2947 : PrintF("f64:%lf", val.to<double>());
2948 : break;
2949 : case kWasmStmt:
2950 : PrintF("void");
2951 : break;
2952 : default:
2953 : UNREACHABLE();
2954 : break;
2955 : }
2956 : }
2957 : #endif // DEBUG
2958 : }
2959 :
2960 : ExternalCallResult TryHandleException(Isolate* isolate) {
2961 : DCHECK(isolate->has_pending_exception()); // Assume exceptional return.
2962 1012 : if (HandleException(isolate) == WasmInterpreter::Thread::UNWOUND) {
2963 : return {ExternalCallResult::EXTERNAL_UNWOUND};
2964 : }
2965 : return {ExternalCallResult::EXTERNAL_CAUGHT};
2966 : }
2967 :
2968 6672 : ExternalCallResult CallExternalWasmFunction(Isolate* isolate,
2969 : Handle<Object> object_ref,
2970 6672 : const WasmCode* code,
2971 44340 : FunctionSig* sig) {
2972 6672 : int num_args = static_cast<int>(sig->parameter_count());
2973 : wasm::WasmFeatures enabled_features =
2974 6672 : wasm::WasmFeaturesFromIsolate(isolate);
2975 :
2976 13240 : if (code->kind() == WasmCode::kWasmToJsWrapper &&
2977 6568 : !IsJSCompatibleSignature(sig, enabled_features.bigint)) {
2978 20 : sp_ -= num_args; // Pop arguments before throwing.
2979 : isolate->Throw(*isolate->factory()->NewTypeError(
2980 40 : MessageTemplate::kWasmTrapTypeError));
2981 : return TryHandleException(isolate);
2982 : }
2983 :
2984 13304 : Handle<WasmDebugInfo> debug_info(instance_object_->debug_info(), isolate);
2985 : Handle<JSFunction> wasm_entry =
2986 6652 : WasmDebugInfo::GetCWasmEntry(debug_info, sig);
2987 :
2988 : TRACE(" => Calling external wasm function\n");
2989 :
2990 : // Copy the arguments to one buffer.
2991 : // TODO(clemensh): Introduce a helper for all argument buffer
2992 : // con-/destruction.
2993 6652 : std::vector<uint8_t> arg_buffer(num_args * 8);
2994 : size_t offset = 0;
2995 6652 : WasmValue* wasm_args = sp_ - num_args;
2996 16552 : for (int i = 0; i < num_args; ++i) {
2997 19800 : int param_size = ValueTypes::ElementSizeInBytes(sig->GetParam(i));
2998 19800 : if (arg_buffer.size() < offset + param_size) {
2999 0 : arg_buffer.resize(std::max(2 * arg_buffer.size(), offset + param_size));
3000 : }
3001 9900 : Address address = reinterpret_cast<Address>(arg_buffer.data()) + offset;
3002 9900 : switch (sig->GetParam(i)) {
3003 : case kWasmI32:
3004 5940 : WriteUnalignedValue(address, wasm_args[i].to<uint32_t>());
3005 : break;
3006 : case kWasmI64:
3007 0 : WriteUnalignedValue(address, wasm_args[i].to<uint64_t>());
3008 : break;
3009 : case kWasmF32:
3010 0 : WriteUnalignedValue(address, wasm_args[i].to<float>());
3011 : break;
3012 : case kWasmF64:
3013 3960 : WriteUnalignedValue(address, wasm_args[i].to<double>());
3014 : break;
3015 : default:
3016 0 : UNIMPLEMENTED();
3017 : }
3018 : offset += param_size;
3019 : }
3020 :
3021 : // Ensure that there is enough space in the arg_buffer to hold the return
3022 : // value(s).
3023 : size_t return_size = 0;
3024 12284 : for (ValueType t : sig->returns()) {
3025 5632 : return_size += ValueTypes::ElementSizeInBytes(t);
3026 : }
3027 13304 : if (arg_buffer.size() < return_size) {
3028 64 : arg_buffer.resize(return_size);
3029 : }
3030 :
3031 : // Wrap the arg_buffer and the code target data pointers in handles. As
3032 : // these are aligned pointers, to the GC it will look like Smis.
3033 : Handle<Object> arg_buffer_obj(
3034 6652 : Object(reinterpret_cast<Address>(arg_buffer.data())), isolate);
3035 : DCHECK(!arg_buffer_obj->IsHeapObject());
3036 : Handle<Object> code_entry_obj(Object(code->instruction_start()), isolate);
3037 : DCHECK(!code_entry_obj->IsHeapObject());
3038 :
3039 : static_assert(compiler::CWasmEntryParameters::kNumParameters == 3,
3040 : "code below needs adaption");
3041 26608 : Handle<Object> args[compiler::CWasmEntryParameters::kNumParameters];
3042 6652 : args[compiler::CWasmEntryParameters::kCodeEntry] = code_entry_obj;
3043 6652 : args[compiler::CWasmEntryParameters::kObjectRef] = object_ref;
3044 6652 : args[compiler::CWasmEntryParameters::kArgumentsBuffer] = arg_buffer_obj;
3045 :
3046 : Handle<Object> receiver = isolate->factory()->undefined_value();
3047 : trap_handler::SetThreadInWasm();
3048 : MaybeHandle<Object> maybe_retval =
3049 6652 : Execution::Call(isolate, wasm_entry, receiver, arraysize(args), args);
3050 : TRACE(" => External wasm function returned%s\n",
3051 : maybe_retval.is_null() ? " with exception" : "");
3052 :
3053 : // Pop arguments off the stack.
3054 6652 : sp_ -= num_args;
3055 :
3056 6652 : if (maybe_retval.is_null()) {
3057 : // JSEntry may throw a stack overflow before we actually get to wasm code
3058 : // or back to the interpreter, meaning the thread-in-wasm flag won't be
3059 : // cleared.
3060 992 : if (trap_handler::IsThreadInWasm()) {
3061 : trap_handler::ClearThreadInWasm();
3062 : }
3063 : return TryHandleException(isolate);
3064 : }
3065 :
3066 : trap_handler::ClearThreadInWasm();
3067 :
3068 : // Push return values.
3069 5660 : if (sig->return_count() > 0) {
3070 : // TODO(wasm): Handle multiple returns.
3071 : DCHECK_EQ(1, sig->return_count());
3072 : Address address = reinterpret_cast<Address>(arg_buffer.data());
3073 5556 : switch (sig->GetReturn()) {
3074 : case kWasmI32:
3075 9784 : Push(WasmValue(ReadUnalignedValue<uint32_t>(address)));
3076 4892 : break;
3077 : case kWasmI64:
3078 16 : Push(WasmValue(ReadUnalignedValue<uint64_t>(address)));
3079 8 : break;
3080 : case kWasmF32:
3081 16 : Push(WasmValue(ReadUnalignedValue<float>(address)));
3082 8 : break;
3083 : case kWasmF64:
3084 1296 : Push(WasmValue(ReadUnalignedValue<double>(address)));
3085 648 : break;
3086 : default:
3087 0 : UNIMPLEMENTED();
3088 : }
3089 : }
3090 5660 : return {ExternalCallResult::EXTERNAL_RETURNED};
3091 : }
3092 :
3093 6696 : static WasmCode* GetTargetCode(WasmCodeManager* code_manager,
3094 : Address target) {
3095 6696 : NativeModule* native_module = code_manager->LookupNativeModule(target);
3096 6696 : if (native_module->is_jump_table_slot(target)) {
3097 : uint32_t func_index =
3098 128 : native_module->GetFunctionIndexFromJumpTableSlot(target);
3099 128 : return native_module->code(func_index);
3100 : }
3101 6568 : WasmCode* code = native_module->Lookup(target);
3102 : DCHECK_EQ(code->instruction_start(), target);
3103 6568 : return code;
3104 : }
3105 :
3106 13264 : ExternalCallResult CallImportedFunction(uint32_t function_index) {
3107 : DCHECK_GT(module()->num_imported_functions, function_index);
3108 : // Use a new HandleScope to avoid leaking / accumulating handles in the
3109 : // outer scope.
3110 : Isolate* isolate = instance_object_->GetIsolate();
3111 : HandleScope handle_scope(isolate);
3112 :
3113 6632 : ImportedFunctionEntry entry(instance_object_, function_index);
3114 6632 : Handle<Object> object_ref(entry.object_ref(), isolate);
3115 : WasmCode* code =
3116 13264 : GetTargetCode(isolate->wasm_engine()->code_manager(), entry.target());
3117 19896 : FunctionSig* sig = module()->functions[function_index].sig;
3118 13264 : return CallExternalWasmFunction(isolate, object_ref, code, sig);
3119 : }
3120 :
3121 204 : ExternalCallResult CallIndirectFunction(uint32_t table_index,
3122 : uint32_t entry_index,
3123 400 : uint32_t sig_index) {
3124 204 : if (codemap()->call_indirect_through_module()) {
3125 : // Rely on the information stored in the WasmModule.
3126 : InterpreterCode* code =
3127 108 : codemap()->GetIndirectCode(table_index, entry_index);
3128 108 : if (!code) return {ExternalCallResult::INVALID_FUNC};
3129 72 : if (code->function->sig_index != sig_index) {
3130 : // If not an exact match, we have to do a canonical check.
3131 : int function_canonical_id =
3132 36 : module()->signature_ids[code->function->sig_index];
3133 24 : int expected_canonical_id = module()->signature_ids[sig_index];
3134 : DCHECK_EQ(function_canonical_id,
3135 : module()->signature_map.Find(*code->function->sig));
3136 12 : if (function_canonical_id != expected_canonical_id) {
3137 4 : return {ExternalCallResult::SIGNATURE_MISMATCH};
3138 : }
3139 : }
3140 68 : return {ExternalCallResult::INTERNAL, code};
3141 : }
3142 :
3143 : Isolate* isolate = instance_object_->GetIsolate();
3144 288 : uint32_t expected_sig_id = module()->signature_ids[sig_index];
3145 : DCHECK_EQ(expected_sig_id,
3146 : module()->signature_map.Find(*module()->signatures[sig_index]));
3147 :
3148 : // The function table is stored in the instance.
3149 : // TODO(wasm): the wasm interpreter currently supports only one table.
3150 96 : CHECK_EQ(0, table_index);
3151 : // Bounds check against table size.
3152 96 : if (entry_index >= instance_object_->indirect_function_table_size()) {
3153 8 : return {ExternalCallResult::INVALID_FUNC};
3154 : }
3155 :
3156 88 : IndirectFunctionTableEntry entry(instance_object_, entry_index);
3157 : // Signature check.
3158 88 : if (entry.sig_id() != static_cast<int32_t>(expected_sig_id)) {
3159 24 : return {ExternalCallResult::SIGNATURE_MISMATCH};
3160 : }
3161 :
3162 : HandleScope scope(isolate);
3163 128 : FunctionSig* signature = module()->signatures[sig_index];
3164 64 : Handle<Object> object_ref = handle(entry.object_ref(), isolate);
3165 24 : WasmCode* code =
3166 128 : GetTargetCode(isolate->wasm_engine()->code_manager(), entry.target());
3167 :
3168 176 : if (!object_ref->IsWasmInstanceObject() || /* call to an import */
3169 : !instance_object_.is_identical_to(object_ref) /* cross-instance */) {
3170 40 : return CallExternalWasmFunction(isolate, object_ref, code, signature);
3171 : }
3172 :
3173 : DCHECK(code->kind() == WasmCode::kInterpreterEntry ||
3174 : code->kind() == WasmCode::kFunction);
3175 24 : return {ExternalCallResult::INTERNAL, codemap()->GetCode(code->index())};
3176 : }
3177 :
3178 : inline Activation current_activation() {
3179 9364102 : return activations_.empty() ? Activation(0, 0) : activations_.back();
3180 : }
3181 : };
3182 :
3183 : class InterpretedFrameImpl {
3184 : public:
3185 : InterpretedFrameImpl(ThreadImpl* thread, int index)
3186 678444 : : thread_(thread), index_(index) {
3187 : DCHECK_LE(0, index);
3188 : }
3189 :
3190 1358428 : const WasmFunction* function() const { return frame()->code->function; }
3191 :
3192 676692 : int pc() const {
3193 : DCHECK_LE(0, frame()->pc);
3194 : DCHECK_GE(kMaxInt, frame()->pc);
3195 676692 : return static_cast<int>(frame()->pc);
3196 : }
3197 :
3198 : int GetParameterCount() const {
3199 : DCHECK_GE(kMaxInt, function()->sig->parameter_count());
3200 460 : return static_cast<int>(function()->sig->parameter_count());
3201 : }
3202 :
3203 : int GetLocalCount() const {
3204 1952 : size_t num_locals = function()->sig->parameter_count() +
3205 3904 : frame()->code->locals.type_list.size();
3206 : DCHECK_GE(kMaxInt, num_locals);
3207 1952 : return static_cast<int>(num_locals);
3208 : }
3209 :
3210 700 : int GetStackHeight() const {
3211 : bool is_top_frame =
3212 700 : static_cast<size_t>(index_) + 1 == thread_->frames_.size();
3213 : size_t stack_limit =
3214 828 : is_top_frame ? thread_->StackHeight() : thread_->frames_[index_ + 1].sp;
3215 : DCHECK_LE(frame()->sp, stack_limit);
3216 700 : size_t frame_size = stack_limit - frame()->sp;
3217 : DCHECK_LE(GetLocalCount(), frame_size);
3218 1400 : return static_cast<int>(frame_size) - GetLocalCount();
3219 : }
3220 :
3221 944 : WasmValue GetLocalValue(int index) const {
3222 : DCHECK_LE(0, index);
3223 : DCHECK_GT(GetLocalCount(), index);
3224 944 : return thread_->GetStackValue(static_cast<int>(frame()->sp) + index);
3225 : }
3226 :
3227 264 : WasmValue GetStackValue(int index) const {
3228 : DCHECK_LE(0, index);
3229 : // Index must be within the number of stack values of this frame.
3230 : DCHECK_GT(GetStackHeight(), index);
3231 528 : return thread_->GetStackValue(static_cast<int>(frame()->sp) +
3232 528 : GetLocalCount() + index);
3233 : }
3234 :
3235 : private:
3236 : ThreadImpl* thread_;
3237 : int index_;
3238 :
3239 : ThreadImpl::Frame* frame() const {
3240 : DCHECK_GT(thread_->frames_.size(), index_);
3241 1356632 : return &thread_->frames_[index_];
3242 : }
3243 : };
3244 :
3245 : namespace {
3246 :
3247 : // Converters between WasmInterpreter::Thread and WasmInterpreter::ThreadImpl.
3248 : // Thread* is the public interface, without knowledge of the object layout.
3249 : // This cast is potentially risky, but as long as we always cast it back before
3250 : // accessing any data, it should be fine. UBSan is not complaining.
3251 : WasmInterpreter::Thread* ToThread(ThreadImpl* impl) {
3252 : return reinterpret_cast<WasmInterpreter::Thread*>(impl);
3253 : }
3254 : ThreadImpl* ToImpl(WasmInterpreter::Thread* thread) {
3255 : return reinterpret_cast<ThreadImpl*>(thread);
3256 : }
3257 :
3258 : // Same conversion for InterpretedFrame and InterpretedFrameImpl.
3259 : InterpretedFrame* ToFrame(InterpretedFrameImpl* impl) {
3260 : return reinterpret_cast<InterpretedFrame*>(impl);
3261 : }
3262 : const InterpretedFrameImpl* ToImpl(const InterpretedFrame* frame) {
3263 : return reinterpret_cast<const InterpretedFrameImpl*>(frame);
3264 : }
3265 :
3266 : } // namespace
3267 :
3268 : //============================================================================
3269 : // Implementation of the pimpl idiom for WasmInterpreter::Thread.
3270 : // Instead of placing a pointer to the ThreadImpl inside of the Thread object,
3271 : // we just reinterpret_cast them. ThreadImpls are only allocated inside this
3272 : // translation unit anyway.
3273 : //============================================================================
3274 4671825 : WasmInterpreter::State WasmInterpreter::Thread::state() {
3275 4671825 : return ToImpl(this)->state();
3276 : }
3277 4664254 : void WasmInterpreter::Thread::InitFrame(const WasmFunction* function,
3278 : WasmValue* args) {
3279 4664254 : ToImpl(this)->InitFrame(function, args);
3280 4664254 : }
3281 4669698 : WasmInterpreter::State WasmInterpreter::Thread::Run(int num_steps) {
3282 4669698 : return ToImpl(this)->Run(num_steps);
3283 : }
3284 0 : void WasmInterpreter::Thread::Pause() { return ToImpl(this)->Pause(); }
3285 9261588 : void WasmInterpreter::Thread::Reset() { return ToImpl(this)->Reset(); }
3286 : WasmInterpreter::Thread::ExceptionHandlingResult
3287 104 : WasmInterpreter::Thread::RaiseException(Isolate* isolate,
3288 : Handle<Object> exception) {
3289 104 : return ToImpl(this)->RaiseException(isolate, exception);
3290 : }
3291 2784 : pc_t WasmInterpreter::Thread::GetBreakpointPc() {
3292 2784 : return ToImpl(this)->GetBreakpointPc();
3293 : }
3294 6060 : int WasmInterpreter::Thread::GetFrameCount() {
3295 6060 : return ToImpl(this)->GetFrameCount();
3296 : }
3297 678444 : WasmInterpreter::FramePtr WasmInterpreter::Thread::GetFrame(int index) {
3298 : DCHECK_LE(0, index);
3299 : DCHECK_GT(GetFrameCount(), index);
3300 1356888 : return FramePtr(ToFrame(new InterpretedFrameImpl(ToImpl(this), index)));
3301 : }
3302 4626777 : WasmValue WasmInterpreter::Thread::GetReturnValue(int index) {
3303 4626777 : return ToImpl(this)->GetReturnValue(index);
3304 : }
3305 96 : TrapReason WasmInterpreter::Thread::GetTrapReason() {
3306 96 : return ToImpl(this)->GetTrapReason();
3307 : }
3308 4591849 : bool WasmInterpreter::Thread::PossibleNondeterminism() {
3309 4591849 : return ToImpl(this)->PossibleNondeterminism();
3310 : }
3311 4681060 : uint64_t WasmInterpreter::Thread::NumInterpretedCalls() {
3312 4681060 : return ToImpl(this)->NumInterpretedCalls();
3313 : }
3314 40 : void WasmInterpreter::Thread::AddBreakFlags(uint8_t flags) {
3315 : ToImpl(this)->AddBreakFlags(flags);
3316 40 : }
3317 0 : void WasmInterpreter::Thread::ClearBreakFlags() {
3318 : ToImpl(this)->ClearBreakFlags();
3319 0 : }
3320 24 : uint32_t WasmInterpreter::Thread::NumActivations() {
3321 24 : return ToImpl(this)->NumActivations();
3322 : }
3323 33462 : uint32_t WasmInterpreter::Thread::StartActivation() {
3324 33462 : return ToImpl(this)->StartActivation();
3325 : }
3326 33460 : void WasmInterpreter::Thread::FinishActivation(uint32_t id) {
3327 : ToImpl(this)->FinishActivation(id);
3328 33460 : }
3329 5096 : uint32_t WasmInterpreter::Thread::ActivationFrameBase(uint32_t id) {
3330 5096 : return ToImpl(this)->ActivationFrameBase(id);
3331 : }
3332 :
3333 : //============================================================================
3334 : // The implementation details of the interpreter.
3335 : //============================================================================
3336 : class WasmInterpreterInternals : public ZoneObject {
3337 : public:
3338 : // Create a copy of the module bytes for the interpreter, since the passed
3339 : // pointer might be invalidated after constructing the interpreter.
3340 : const ZoneVector<uint8_t> module_bytes_;
3341 : CodeMap codemap_;
3342 : ZoneVector<ThreadImpl> threads_;
3343 :
3344 365590 : WasmInterpreterInternals(Zone* zone, const WasmModule* module,
3345 : const ModuleWireBytes& wire_bytes,
3346 : Handle<WasmInstanceObject> instance_object)
3347 : : module_bytes_(wire_bytes.start(), wire_bytes.end(), zone),
3348 : codemap_(module, module_bytes_.data(), zone),
3349 1096770 : threads_(zone) {
3350 365590 : threads_.emplace_back(zone, &codemap_, instance_object);
3351 365590 : }
3352 : };
3353 :
3354 : namespace {
3355 340250 : void NopFinalizer(const v8::WeakCallbackInfo<void>& data) {
3356 : Address* global_handle_location =
3357 : reinterpret_cast<Address*>(data.GetParameter());
3358 340250 : GlobalHandles::Destroy(global_handle_location);
3359 340250 : }
3360 :
3361 365590 : Handle<WasmInstanceObject> MakeWeak(
3362 365590 : Isolate* isolate, Handle<WasmInstanceObject> instance_object) {
3363 : Handle<WasmInstanceObject> weak_instance =
3364 : isolate->global_handles()->Create<WasmInstanceObject>(*instance_object);
3365 : Address* global_handle_location = weak_instance.location();
3366 : GlobalHandles::MakeWeak(global_handle_location, global_handle_location,
3367 365590 : &NopFinalizer, v8::WeakCallbackType::kParameter);
3368 365590 : return weak_instance;
3369 : }
3370 : } // namespace
3371 :
3372 : //============================================================================
3373 : // Implementation of the public interface of the interpreter.
3374 : //============================================================================
3375 365590 : WasmInterpreter::WasmInterpreter(Isolate* isolate, const WasmModule* module,
3376 : const ModuleWireBytes& wire_bytes,
3377 : Handle<WasmInstanceObject> instance_object)
3378 : : zone_(isolate->allocator(), ZONE_NAME),
3379 : internals_(new (&zone_) WasmInterpreterInternals(
3380 731180 : &zone_, module, wire_bytes, MakeWeak(isolate, instance_object))) {}
3381 :
3382 731180 : WasmInterpreter::~WasmInterpreter() { internals_->~WasmInterpreterInternals(); }
3383 :
3384 0 : void WasmInterpreter::Run() { internals_->threads_[0].Run(); }
3385 :
3386 0 : void WasmInterpreter::Pause() { internals_->threads_[0].Pause(); }
3387 :
3388 3096 : bool WasmInterpreter::SetBreakpoint(const WasmFunction* function, pc_t pc,
3389 : bool enabled) {
3390 1548 : InterpreterCode* code = internals_->codemap_.GetCode(function);
3391 1548 : size_t size = static_cast<size_t>(code->end - code->start);
3392 : // Check bounds for {pc}.
3393 1548 : if (pc < code->locals.encoded_size || pc >= size) return false;
3394 : // Make a copy of the code before enabling a breakpoint.
3395 1548 : if (enabled && code->orig_start == code->start) {
3396 64 : code->start = reinterpret_cast<byte*>(zone_.New(size));
3397 64 : memcpy(code->start, code->orig_start, size);
3398 64 : code->end = code->start + size;
3399 : }
3400 1548 : bool prev = code->start[pc] == kInternalBreakpoint;
3401 1548 : if (enabled) {
3402 852 : code->start[pc] = kInternalBreakpoint;
3403 : } else {
3404 696 : code->start[pc] = code->orig_start[pc];
3405 : }
3406 1548 : return prev;
3407 : }
3408 :
3409 0 : bool WasmInterpreter::GetBreakpoint(const WasmFunction* function, pc_t pc) {
3410 0 : InterpreterCode* code = internals_->codemap_.GetCode(function);
3411 0 : size_t size = static_cast<size_t>(code->end - code->start);
3412 : // Check bounds for {pc}.
3413 0 : if (pc < code->locals.encoded_size || pc >= size) return false;
3414 : // Check if a breakpoint is present at that place in the code.
3415 0 : return code->start[pc] == kInternalBreakpoint;
3416 : }
3417 :
3418 0 : bool WasmInterpreter::SetTracing(const WasmFunction* function, bool enabled) {
3419 0 : UNIMPLEMENTED();
3420 : return false;
3421 : }
3422 :
3423 0 : int WasmInterpreter::GetThreadCount() {
3424 0 : return 1; // only one thread for now.
3425 : }
3426 :
3427 4787346 : WasmInterpreter::Thread* WasmInterpreter::GetThread(int id) {
3428 4787346 : CHECK_EQ(0, id); // only one thread for now.
3429 9574692 : return ToThread(&internals_->threads_[id]);
3430 : }
3431 :
3432 367172 : void WasmInterpreter::AddFunctionForTesting(const WasmFunction* function) {
3433 367172 : internals_->codemap_.AddFunction(function, nullptr, nullptr);
3434 367172 : }
3435 :
3436 731408 : void WasmInterpreter::SetFunctionCodeForTesting(const WasmFunction* function,
3437 : const byte* start,
3438 : const byte* end) {
3439 731408 : internals_->codemap_.SetFunctionCode(function, start, end);
3440 365704 : }
3441 :
3442 364662 : void WasmInterpreter::SetCallIndirectTestMode() {
3443 364662 : internals_->codemap_.set_call_indirect_through_module(true);
3444 364662 : }
3445 :
3446 30 : ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting(
3447 : Zone* zone, const WasmModule* module, const byte* start, const byte* end) {
3448 : // Create some dummy structures, to avoid special-casing the implementation
3449 : // just for testing.
3450 30 : FunctionSig sig(0, 0, nullptr);
3451 30 : WasmFunction function{&sig, 0, 0, {0, 0}, false, false};
3452 : InterpreterCode code{
3453 60 : &function, BodyLocalDecls(zone), start, end, nullptr, nullptr, nullptr};
3454 :
3455 : // Now compute and return the control transfers.
3456 30 : SideTable side_table(zone, module, &code);
3457 30 : return side_table.map_;
3458 : }
3459 :
3460 : //============================================================================
3461 : // Implementation of the frame inspection interface.
3462 : //============================================================================
3463 677284 : const WasmFunction* InterpretedFrame::function() const {
3464 677284 : return ToImpl(this)->function();
3465 : }
3466 1353384 : int InterpretedFrame::pc() const { return ToImpl(this)->pc(); }
3467 460 : int InterpretedFrame::GetParameterCount() const {
3468 460 : return ToImpl(this)->GetParameterCount();
3469 : }
3470 988 : int InterpretedFrame::GetLocalCount() const {
3471 988 : return ToImpl(this)->GetLocalCount();
3472 : }
3473 700 : int InterpretedFrame::GetStackHeight() const {
3474 700 : return ToImpl(this)->GetStackHeight();
3475 : }
3476 944 : WasmValue InterpretedFrame::GetLocalValue(int index) const {
3477 944 : return ToImpl(this)->GetLocalValue(index);
3478 : }
3479 264 : WasmValue InterpretedFrame::GetStackValue(int index) const {
3480 264 : return ToImpl(this)->GetStackValue(index);
3481 : }
3482 678444 : void InterpretedFrameDeleter::operator()(InterpretedFrame* ptr) {
3483 678444 : delete ToImpl(ptr);
3484 678444 : }
3485 :
3486 : #undef TRACE
3487 : #undef LANE
3488 : #undef FOREACH_INTERNAL_OPCODE
3489 : #undef WASM_CTYPES
3490 : #undef FOREACH_SIMPLE_BINOP
3491 : #undef FOREACH_OTHER_BINOP
3492 : #undef FOREACH_I32CONV_FLOATOP
3493 : #undef FOREACH_OTHER_UNOP
3494 :
3495 : } // namespace wasm
3496 : } // namespace internal
3497 178779 : } // namespace v8
|