Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include <type_traits>
6 :
7 : #include "src/wasm/wasm-interpreter.h"
8 :
9 : #include "src/assembler-inl.h"
10 : #include "src/conversions.h"
11 : #include "src/identity-map.h"
12 : #include "src/objects-inl.h"
13 : #include "src/utils.h"
14 : #include "src/wasm/decoder.h"
15 : #include "src/wasm/function-body-decoder-impl.h"
16 : #include "src/wasm/function-body-decoder.h"
17 : #include "src/wasm/wasm-external-refs.h"
18 : #include "src/wasm/wasm-limits.h"
19 : #include "src/wasm/wasm-module.h"
20 : #include "src/wasm/wasm-objects.h"
21 :
22 : #include "src/zone/accounting-allocator.h"
23 : #include "src/zone/zone-containers.h"
24 :
25 : namespace v8 {
26 : namespace internal {
27 : namespace wasm {
28 :
29 : #if DEBUG
30 : #define TRACE(...) \
31 : do { \
32 : if (FLAG_trace_wasm_interpreter) PrintF(__VA_ARGS__); \
33 : } while (false)
34 : #else
35 : #define TRACE(...)
36 : #endif
37 :
38 : #define FOREACH_INTERNAL_OPCODE(V) V(Breakpoint, 0xFF)
39 :
40 : #define WASM_CTYPES(V) \
41 : V(I32, int32_t) V(I64, int64_t) V(F32, float) V(F64, double)
42 :
43 : #define FOREACH_SIMPLE_BINOP(V) \
44 : V(I32Add, uint32_t, +) \
45 : V(I32Sub, uint32_t, -) \
46 : V(I32Mul, uint32_t, *) \
47 : V(I32And, uint32_t, &) \
48 : V(I32Ior, uint32_t, |) \
49 : V(I32Xor, uint32_t, ^) \
50 : V(I32Eq, uint32_t, ==) \
51 : V(I32Ne, uint32_t, !=) \
52 : V(I32LtU, uint32_t, <) \
53 : V(I32LeU, uint32_t, <=) \
54 : V(I32GtU, uint32_t, >) \
55 : V(I32GeU, uint32_t, >=) \
56 : V(I32LtS, int32_t, <) \
57 : V(I32LeS, int32_t, <=) \
58 : V(I32GtS, int32_t, >) \
59 : V(I32GeS, int32_t, >=) \
60 : V(I64Add, uint64_t, +) \
61 : V(I64Sub, uint64_t, -) \
62 : V(I64Mul, uint64_t, *) \
63 : V(I64And, uint64_t, &) \
64 : V(I64Ior, uint64_t, |) \
65 : V(I64Xor, uint64_t, ^) \
66 : V(I64Eq, uint64_t, ==) \
67 : V(I64Ne, uint64_t, !=) \
68 : V(I64LtU, uint64_t, <) \
69 : V(I64LeU, uint64_t, <=) \
70 : V(I64GtU, uint64_t, >) \
71 : V(I64GeU, uint64_t, >=) \
72 : V(I64LtS, int64_t, <) \
73 : V(I64LeS, int64_t, <=) \
74 : V(I64GtS, int64_t, >) \
75 : V(I64GeS, int64_t, >=) \
76 : V(F32Add, float, +) \
77 : V(F32Sub, float, -) \
78 : V(F32Eq, float, ==) \
79 : V(F32Ne, float, !=) \
80 : V(F32Lt, float, <) \
81 : V(F32Le, float, <=) \
82 : V(F32Gt, float, >) \
83 : V(F32Ge, float, >=) \
84 : V(F64Add, double, +) \
85 : V(F64Sub, double, -) \
86 : V(F64Eq, double, ==) \
87 : V(F64Ne, double, !=) \
88 : V(F64Lt, double, <) \
89 : V(F64Le, double, <=) \
90 : V(F64Gt, double, >) \
91 : V(F64Ge, double, >=) \
92 : V(F32Mul, float, *) \
93 : V(F64Mul, double, *) \
94 : V(F32Div, float, /) \
95 : V(F64Div, double, /)
96 :
97 : #define FOREACH_OTHER_BINOP(V) \
98 : V(I32DivS, int32_t) \
99 : V(I32DivU, uint32_t) \
100 : V(I32RemS, int32_t) \
101 : V(I32RemU, uint32_t) \
102 : V(I32Shl, uint32_t) \
103 : V(I32ShrU, uint32_t) \
104 : V(I32ShrS, int32_t) \
105 : V(I64DivS, int64_t) \
106 : V(I64DivU, uint64_t) \
107 : V(I64RemS, int64_t) \
108 : V(I64RemU, uint64_t) \
109 : V(I64Shl, uint64_t) \
110 : V(I64ShrU, uint64_t) \
111 : V(I64ShrS, int64_t) \
112 : V(I32Ror, int32_t) \
113 : V(I32Rol, int32_t) \
114 : V(I64Ror, int64_t) \
115 : V(I64Rol, int64_t) \
116 : V(F32Min, float) \
117 : V(F32Max, float) \
118 : V(F64Min, double) \
119 : V(F64Max, double) \
120 : V(I32AsmjsDivS, int32_t) \
121 : V(I32AsmjsDivU, uint32_t) \
122 : V(I32AsmjsRemS, int32_t) \
123 : V(I32AsmjsRemU, uint32_t)
124 :
125 : #define FOREACH_OTHER_UNOP(V) \
126 : V(I32Clz, uint32_t) \
127 : V(I32Ctz, uint32_t) \
128 : V(I32Popcnt, uint32_t) \
129 : V(I32Eqz, uint32_t) \
130 : V(I64Clz, uint64_t) \
131 : V(I64Ctz, uint64_t) \
132 : V(I64Popcnt, uint64_t) \
133 : V(I64Eqz, uint64_t) \
134 : V(F32Abs, float) \
135 : V(F32Neg, float) \
136 : V(F32Ceil, float) \
137 : V(F32Floor, float) \
138 : V(F32Trunc, float) \
139 : V(F32NearestInt, float) \
140 : V(F64Abs, double) \
141 : V(F64Neg, double) \
142 : V(F64Ceil, double) \
143 : V(F64Floor, double) \
144 : V(F64Trunc, double) \
145 : V(F64NearestInt, double) \
146 : V(I32SConvertF32, float) \
147 : V(I32SConvertF64, double) \
148 : V(I32UConvertF32, float) \
149 : V(I32UConvertF64, double) \
150 : V(I32ConvertI64, int64_t) \
151 : V(I64SConvertF32, float) \
152 : V(I64SConvertF64, double) \
153 : V(I64UConvertF32, float) \
154 : V(I64UConvertF64, double) \
155 : V(I64SConvertI32, int32_t) \
156 : V(I64UConvertI32, uint32_t) \
157 : V(F32SConvertI32, int32_t) \
158 : V(F32UConvertI32, uint32_t) \
159 : V(F32SConvertI64, int64_t) \
160 : V(F32UConvertI64, uint64_t) \
161 : V(F32ConvertF64, double) \
162 : V(F32ReinterpretI32, int32_t) \
163 : V(F64SConvertI32, int32_t) \
164 : V(F64UConvertI32, uint32_t) \
165 : V(F64SConvertI64, int64_t) \
166 : V(F64UConvertI64, uint64_t) \
167 : V(F64ConvertF32, float) \
168 : V(F64ReinterpretI64, int64_t) \
169 : V(I32AsmjsSConvertF32, float) \
170 : V(I32AsmjsUConvertF32, float) \
171 : V(I32AsmjsSConvertF64, double) \
172 : V(I32AsmjsUConvertF64, double) \
173 : V(F32Sqrt, float) \
174 : V(F64Sqrt, double)
175 :
176 : namespace {
177 :
178 : inline int32_t ExecuteI32DivS(int32_t a, int32_t b, TrapReason* trap) {
179 1134 : if (b == 0) {
180 : *trap = kTrapDivByZero;
181 : return 0;
182 : }
183 987 : if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
184 : *trap = kTrapDivUnrepresentable;
185 : return 0;
186 : }
187 980 : return a / b;
188 : }
189 :
190 : inline uint32_t ExecuteI32DivU(uint32_t a, uint32_t b, TrapReason* trap) {
191 49 : if (b == 0) {
192 : *trap = kTrapDivByZero;
193 : return 0;
194 : }
195 28 : return a / b;
196 : }
197 :
198 : inline int32_t ExecuteI32RemS(int32_t a, int32_t b, TrapReason* trap) {
199 98 : if (b == 0) {
200 : *trap = kTrapRemByZero;
201 : return 0;
202 : }
203 56 : if (b == -1) return 0;
204 42 : return a % b;
205 : }
206 :
207 : inline uint32_t ExecuteI32RemU(uint32_t a, uint32_t b, TrapReason* trap) {
208 49 : if (b == 0) {
209 : *trap = kTrapRemByZero;
210 : return 0;
211 : }
212 28 : return a % b;
213 : }
214 :
215 : inline uint32_t ExecuteI32Shl(uint32_t a, uint32_t b, TrapReason* trap) {
216 20426 : return a << (b & 0x1f);
217 : }
218 :
219 : inline uint32_t ExecuteI32ShrU(uint32_t a, uint32_t b, TrapReason* trap) {
220 20426 : return a >> (b & 0x1f);
221 : }
222 :
223 : inline int32_t ExecuteI32ShrS(int32_t a, int32_t b, TrapReason* trap) {
224 20426 : return a >> (b & 0x1f);
225 : }
226 :
227 : inline int64_t ExecuteI64DivS(int64_t a, int64_t b, TrapReason* trap) {
228 42602 : if (b == 0) {
229 : *trap = kTrapDivByZero;
230 : return 0;
231 : }
232 41398 : if (b == -1 && a == std::numeric_limits<int64_t>::min()) {
233 : *trap = kTrapDivUnrepresentable;
234 : return 0;
235 : }
236 41391 : return a / b;
237 : }
238 :
239 : inline uint64_t ExecuteI64DivU(uint64_t a, uint64_t b, TrapReason* trap) {
240 41545 : if (b == 0) {
241 : *trap = kTrapDivByZero;
242 : return 0;
243 : }
244 40446 : return a / b;
245 : }
246 :
247 : inline int64_t ExecuteI64RemS(int64_t a, int64_t b, TrapReason* trap) {
248 41552 : if (b == 0) {
249 : *trap = kTrapRemByZero;
250 : return 0;
251 : }
252 40453 : if (b == -1) return 0;
253 39907 : return a % b;
254 : }
255 :
256 : inline uint64_t ExecuteI64RemU(uint64_t a, uint64_t b, TrapReason* trap) {
257 41545 : if (b == 0) {
258 : *trap = kTrapRemByZero;
259 : return 0;
260 : }
261 40446 : return a % b;
262 : }
263 :
264 : inline uint64_t ExecuteI64Shl(uint64_t a, uint64_t b, TrapReason* trap) {
265 85176 : return a << (b & 0x3f);
266 : }
267 :
268 : inline uint64_t ExecuteI64ShrU(uint64_t a, uint64_t b, TrapReason* trap) {
269 85176 : return a >> (b & 0x3f);
270 : }
271 :
272 : inline int64_t ExecuteI64ShrS(int64_t a, int64_t b, TrapReason* trap) {
273 85176 : return a >> (b & 0x3f);
274 : }
275 :
276 : inline uint32_t ExecuteI32Ror(uint32_t a, uint32_t b, TrapReason* trap) {
277 28 : uint32_t shift = (b & 0x1f);
278 28 : return (a >> shift) | (a << (32 - shift));
279 : }
280 :
281 : inline uint32_t ExecuteI32Rol(uint32_t a, uint32_t b, TrapReason* trap) {
282 28 : uint32_t shift = (b & 0x1f);
283 28 : return (a << shift) | (a >> (32 - shift));
284 : }
285 :
286 : inline uint64_t ExecuteI64Ror(uint64_t a, uint64_t b, TrapReason* trap) {
287 41531 : uint32_t shift = (b & 0x3f);
288 41531 : return (a >> shift) | (a << (64 - shift));
289 : }
290 :
291 : inline uint64_t ExecuteI64Rol(uint64_t a, uint64_t b, TrapReason* trap) {
292 41531 : uint32_t shift = (b & 0x3f);
293 41531 : return (a << shift) | (a >> (64 - shift));
294 : }
295 :
296 : inline float ExecuteF32Min(float a, float b, TrapReason* trap) {
297 92575 : return JSMin(a, b);
298 : }
299 :
300 : inline float ExecuteF32Max(float a, float b, TrapReason* trap) {
301 92575 : return JSMax(a, b);
302 : }
303 :
304 : inline float ExecuteF32CopySign(float a, float b, TrapReason* trap) {
305 92589 : return copysignf(a, b);
306 : }
307 :
308 : inline double ExecuteF64Min(double a, double b, TrapReason* trap) {
309 16807 : return JSMin(a, b);
310 : }
311 :
312 : inline double ExecuteF64Max(double a, double b, TrapReason* trap) {
313 16807 : return JSMax(a, b);
314 : }
315 :
316 : inline double ExecuteF64CopySign(double a, double b, TrapReason* trap) {
317 16821 : return copysign(a, b);
318 : }
319 :
320 : inline int32_t ExecuteI32AsmjsDivS(int32_t a, int32_t b, TrapReason* trap) {
321 3815 : if (b == 0) return 0;
322 3416 : if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
323 : return std::numeric_limits<int32_t>::min();
324 : }
325 3402 : return a / b;
326 : }
327 :
328 : inline uint32_t ExecuteI32AsmjsDivU(uint32_t a, uint32_t b, TrapReason* trap) {
329 35 : if (b == 0) return 0;
330 14 : return a / b;
331 : }
332 :
333 : inline int32_t ExecuteI32AsmjsRemS(int32_t a, int32_t b, TrapReason* trap) {
334 3815 : if (b == 0) return 0;
335 3416 : if (b == -1) return 0;
336 3031 : return a % b;
337 : }
338 :
339 : inline uint32_t ExecuteI32AsmjsRemU(uint32_t a, uint32_t b, TrapReason* trap) {
340 35 : if (b == 0) return 0;
341 14 : return a % b;
342 : }
343 :
344 : inline int32_t ExecuteI32AsmjsSConvertF32(float a, TrapReason* trap) {
345 805 : return DoubleToInt32(a);
346 : }
347 :
348 : inline uint32_t ExecuteI32AsmjsUConvertF32(float a, TrapReason* trap) {
349 805 : return DoubleToUint32(a);
350 : }
351 :
352 : inline int32_t ExecuteI32AsmjsSConvertF64(double a, TrapReason* trap) {
353 343 : return DoubleToInt32(a);
354 : }
355 :
356 : inline uint32_t ExecuteI32AsmjsUConvertF64(double a, TrapReason* trap) {
357 : return DoubleToUint32(a);
358 : }
359 :
360 : int32_t ExecuteI32Clz(uint32_t val, TrapReason* trap) {
361 462 : return base::bits::CountLeadingZeros32(val);
362 : }
363 :
364 : uint32_t ExecuteI32Ctz(uint32_t val, TrapReason* trap) {
365 : return base::bits::CountTrailingZeros32(val);
366 : }
367 :
368 : uint32_t ExecuteI32Popcnt(uint32_t val, TrapReason* trap) {
369 70 : return word32_popcnt_wrapper(&val);
370 : }
371 :
372 : inline uint32_t ExecuteI32Eqz(uint32_t val, TrapReason* trap) {
373 868 : return val == 0 ? 1 : 0;
374 : }
375 :
376 : int64_t ExecuteI64Clz(uint64_t val, TrapReason* trap) {
377 455 : return base::bits::CountLeadingZeros64(val);
378 : }
379 :
380 : inline uint64_t ExecuteI64Ctz(uint64_t val, TrapReason* trap) {
381 455 : return base::bits::CountTrailingZeros64(val);
382 : }
383 :
384 : inline int64_t ExecuteI64Popcnt(uint64_t val, TrapReason* trap) {
385 70 : return word64_popcnt_wrapper(&val);
386 : }
387 :
388 : inline int32_t ExecuteI64Eqz(uint64_t val, TrapReason* trap) {
389 553 : return val == 0 ? 1 : 0;
390 : }
391 :
392 : inline float ExecuteF32Abs(float a, TrapReason* trap) {
393 28 : return bit_cast<float>(bit_cast<uint32_t>(a) & 0x7fffffff);
394 : }
395 :
396 : inline float ExecuteF32Neg(float a, TrapReason* trap) {
397 819 : return bit_cast<float>(bit_cast<uint32_t>(a) ^ 0x80000000);
398 : }
399 :
400 805 : inline float ExecuteF32Ceil(float a, TrapReason* trap) { return ceilf(a); }
401 :
402 805 : inline float ExecuteF32Floor(float a, TrapReason* trap) { return floorf(a); }
403 :
404 805 : inline float ExecuteF32Trunc(float a, TrapReason* trap) { return truncf(a); }
405 :
406 : inline float ExecuteF32NearestInt(float a, TrapReason* trap) {
407 805 : return nearbyintf(a);
408 : }
409 :
410 : inline float ExecuteF32Sqrt(float a, TrapReason* trap) {
411 14 : float result = sqrtf(a);
412 : return result;
413 : }
414 :
415 : inline double ExecuteF64Abs(double a, TrapReason* trap) {
416 28 : return bit_cast<double>(bit_cast<uint64_t>(a) & 0x7fffffffffffffff);
417 : }
418 :
419 : inline double ExecuteF64Neg(double a, TrapReason* trap) {
420 357 : return bit_cast<double>(bit_cast<uint64_t>(a) ^ 0x8000000000000000);
421 : }
422 :
423 343 : inline double ExecuteF64Ceil(double a, TrapReason* trap) { return ceil(a); }
424 :
425 343 : inline double ExecuteF64Floor(double a, TrapReason* trap) { return floor(a); }
426 :
427 343 : inline double ExecuteF64Trunc(double a, TrapReason* trap) { return trunc(a); }
428 :
429 : inline double ExecuteF64NearestInt(double a, TrapReason* trap) {
430 343 : return nearbyint(a);
431 : }
432 :
433 14 : inline double ExecuteF64Sqrt(double a, TrapReason* trap) { return sqrt(a); }
434 :
435 : int32_t ExecuteI32SConvertF32(float a, TrapReason* trap) {
436 : // The upper bound is (INT32_MAX + 1), which is the lowest float-representable
437 : // number above INT32_MAX which cannot be represented as int32.
438 : float upper_bound = 2147483648.0f;
439 : // We use INT32_MIN as a lower bound because (INT32_MIN - 1) is not
440 : // representable as float, and no number between (INT32_MIN - 1) and INT32_MIN
441 : // is.
442 : float lower_bound = static_cast<float>(INT32_MIN);
443 924 : if (a < upper_bound && a >= lower_bound) {
444 616 : return static_cast<int32_t>(a);
445 : }
446 : *trap = kTrapFloatUnrepresentable;
447 : return 0;
448 : }
449 :
450 : int32_t ExecuteI32SConvertF64(double a, TrapReason* trap) {
451 : // The upper bound is (INT32_MAX + 1), which is the lowest double-
452 : // representable number above INT32_MAX which cannot be represented as int32.
453 : double upper_bound = 2147483648.0;
454 : // The lower bound is (INT32_MIN - 1), which is the greatest double-
455 : // representable number below INT32_MIN which cannot be represented as int32.
456 : double lower_bound = -2147483649.0;
457 462 : if (a < upper_bound && a > lower_bound) {
458 350 : return static_cast<int32_t>(a);
459 : }
460 : *trap = kTrapFloatUnrepresentable;
461 : return 0;
462 : }
463 :
464 : uint32_t ExecuteI32UConvertF32(float a, TrapReason* trap) {
465 : // The upper bound is (UINT32_MAX + 1), which is the lowest
466 : // float-representable number above UINT32_MAX which cannot be represented as
467 : // uint32.
468 : double upper_bound = 4294967296.0f;
469 : double lower_bound = -1.0f;
470 805 : if (a < upper_bound && a > lower_bound) {
471 322 : return static_cast<uint32_t>(a);
472 : }
473 : *trap = kTrapFloatUnrepresentable;
474 : return 0;
475 : }
476 :
477 : uint32_t ExecuteI32UConvertF64(double a, TrapReason* trap) {
478 : // The upper bound is (UINT32_MAX + 1), which is the lowest
479 : // double-representable number above UINT32_MAX which cannot be represented as
480 : // uint32.
481 : double upper_bound = 4294967296.0;
482 : double lower_bound = -1.0;
483 343 : if (a < upper_bound && a > lower_bound) {
484 189 : return static_cast<uint32_t>(a);
485 : }
486 : *trap = kTrapFloatUnrepresentable;
487 : return 0;
488 : }
489 :
490 : inline uint32_t ExecuteI32ConvertI64(int64_t a, TrapReason* trap) {
491 249802 : return static_cast<uint32_t>(a & 0xFFFFFFFF);
492 : }
493 :
494 : int64_t ExecuteI64SConvertF32(float a, TrapReason* trap) {
495 : int64_t output;
496 1610 : if (!float32_to_int64_wrapper(&a, &output)) {
497 : *trap = kTrapFloatUnrepresentable;
498 : }
499 1610 : return output;
500 : }
501 :
502 : int64_t ExecuteI64SConvertF64(double a, TrapReason* trap) {
503 : int64_t output;
504 1240 : if (!float64_to_int64_wrapper(&a, &output)) {
505 : *trap = kTrapFloatUnrepresentable;
506 : }
507 1240 : return output;
508 : }
509 :
510 : uint64_t ExecuteI64UConvertF32(float a, TrapReason* trap) {
511 : uint64_t output;
512 1610 : if (!float32_to_uint64_wrapper(&a, &output)) {
513 : *trap = kTrapFloatUnrepresentable;
514 : }
515 1610 : return output;
516 : }
517 :
518 : uint64_t ExecuteI64UConvertF64(double a, TrapReason* trap) {
519 : uint64_t output;
520 686 : if (!float64_to_uint64_wrapper(&a, &output)) {
521 : *trap = kTrapFloatUnrepresentable;
522 : }
523 686 : return output;
524 : }
525 :
526 : inline int64_t ExecuteI64SConvertI32(int32_t a, TrapReason* trap) {
527 406 : return static_cast<int64_t>(a);
528 : }
529 :
530 : inline int64_t ExecuteI64UConvertI32(uint32_t a, TrapReason* trap) {
531 378 : return static_cast<uint64_t>(a);
532 : }
533 :
534 : inline float ExecuteF32SConvertI32(int32_t a, TrapReason* trap) {
535 35 : return static_cast<float>(a);
536 : }
537 :
538 : inline float ExecuteF32UConvertI32(uint32_t a, TrapReason* trap) {
539 15 : return static_cast<float>(a);
540 : }
541 :
542 : inline float ExecuteF32SConvertI64(int64_t a, TrapReason* trap) {
543 : float output;
544 539 : int64_to_float32_wrapper(&a, &output);
545 539 : return output;
546 : }
547 :
548 : inline float ExecuteF32UConvertI64(uint64_t a, TrapReason* trap) {
549 : float output;
550 532 : uint64_to_float32_wrapper(&a, &output);
551 532 : return output;
552 : }
553 :
554 : inline float ExecuteF32ConvertF64(double a, TrapReason* trap) {
555 15 : return static_cast<float>(a);
556 : }
557 :
558 : inline float ExecuteF32ReinterpretI32(int32_t a, TrapReason* trap) {
559 : return bit_cast<float>(a);
560 : }
561 :
562 : inline double ExecuteF64SConvertI32(int32_t a, TrapReason* trap) {
563 1638 : return static_cast<double>(a);
564 : }
565 :
566 : inline double ExecuteF64UConvertI32(uint32_t a, TrapReason* trap) {
567 0 : return static_cast<double>(a);
568 : }
569 :
570 : inline double ExecuteF64SConvertI64(int64_t a, TrapReason* trap) {
571 : double output;
572 23676 : int64_to_float64_wrapper(&a, &output);
573 23676 : return output;
574 : }
575 :
576 : inline double ExecuteF64UConvertI64(uint64_t a, TrapReason* trap) {
577 : double output;
578 525 : uint64_to_float64_wrapper(&a, &output);
579 525 : return output;
580 : }
581 :
582 : inline double ExecuteF64ConvertF32(float a, TrapReason* trap) {
583 2445 : return static_cast<double>(a);
584 : }
585 :
586 : inline double ExecuteF64ReinterpretI64(int64_t a, TrapReason* trap) {
587 : return bit_cast<double>(a);
588 : }
589 :
590 : inline int32_t ExecuteI32ReinterpretF32(WasmVal a) {
591 : return a.to_unchecked<int32_t>();
592 : }
593 :
594 : inline int64_t ExecuteI64ReinterpretF64(WasmVal a) {
595 : return a.to_unchecked<int64_t>();
596 : }
597 :
598 95 : inline int32_t ExecuteGrowMemory(uint32_t delta_pages,
599 : MaybeHandle<WasmInstanceObject> instance_obj,
600 : WasmInstance* instance) {
601 : DCHECK_EQ(0, instance->mem_size % WasmModule::kPageSize);
602 95 : uint32_t old_pages = instance->mem_size / WasmModule::kPageSize;
603 :
604 : // If an instance is set, execute GrowMemory on the instance. This will also
605 : // update the WasmInstance struct used here.
606 95 : if (!instance_obj.is_null()) {
607 : Isolate* isolate = instance_obj.ToHandleChecked()->GetIsolate();
608 : int32_t ret = WasmInstanceObject::GrowMemory(
609 60 : isolate, instance_obj.ToHandleChecked(), delta_pages);
610 : // Some sanity checks.
611 : DCHECK_EQ(ret == -1 ? old_pages : old_pages + delta_pages,
612 : instance->mem_size / WasmModule::kPageSize);
613 : DCHECK(ret == -1 || static_cast<uint32_t>(ret) == old_pages);
614 60 : return ret;
615 : }
616 :
617 : // TODO(ahaas): Move memory allocation to wasm-module.cc for better
618 : // encapsulation.
619 56 : if (delta_pages > FLAG_wasm_max_mem_pages ||
620 21 : delta_pages > instance->module->max_mem_pages) {
621 : return -1;
622 : }
623 :
624 7 : uint32_t new_pages = old_pages + delta_pages;
625 7 : if (new_pages > FLAG_wasm_max_mem_pages ||
626 : new_pages > instance->module->max_mem_pages) {
627 : return -1;
628 : }
629 :
630 : byte* new_mem_start;
631 7 : if (instance->mem_size == 0) {
632 : // TODO(gdeepti): Fix bounds check to take into account size of memtype.
633 : new_mem_start = static_cast<byte*>(
634 0 : calloc(new_pages * WasmModule::kPageSize, sizeof(byte)));
635 0 : if (!new_mem_start) return -1;
636 : } else {
637 : DCHECK_NOT_NULL(instance->mem_start);
638 7 : if (EnableGuardRegions()) {
639 : v8::base::OS::Unprotect(instance->mem_start,
640 1 : new_pages * WasmModule::kPageSize);
641 1 : new_mem_start = instance->mem_start;
642 : } else {
643 : new_mem_start = static_cast<byte*>(
644 6 : realloc(instance->mem_start, new_pages * WasmModule::kPageSize));
645 6 : if (!new_mem_start) return -1;
646 : }
647 : // Zero initializing uninitialized memory from realloc
648 14 : memset(new_mem_start + old_pages * WasmModule::kPageSize, 0,
649 14 : delta_pages * WasmModule::kPageSize);
650 : }
651 7 : instance->mem_start = new_mem_start;
652 7 : instance->mem_size = new_pages * WasmModule::kPageSize;
653 7 : return static_cast<int32_t>(old_pages);
654 : }
655 :
656 : enum InternalOpcode {
657 : #define DECL_INTERNAL_ENUM(name, value) kInternal##name = value,
658 : FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_ENUM)
659 : #undef DECL_INTERNAL_ENUM
660 : };
661 :
662 : const char* OpcodeName(uint32_t val) {
663 0 : switch (val) {
664 : #define DECL_INTERNAL_CASE(name, value) \
665 : case kInternal##name: \
666 : return "Internal" #name;
667 : FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_CASE)
668 : #undef DECL_INTERNAL_CASE
669 : }
670 0 : return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(val));
671 : }
672 :
673 : // Unwrap a wasm to js wrapper, return the callable heap object.
674 : // If the wrapper would throw a TypeError, return a null handle.
675 1050 : Handle<HeapObject> UnwrapWasmToJSWrapper(Isolate* isolate,
676 : Handle<Code> js_wrapper) {
677 : DCHECK_EQ(Code::WASM_TO_JS_FUNCTION, js_wrapper->kind());
678 : int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
679 3059 : for (RelocIterator it(*js_wrapper, mask); !it.done(); it.next()) {
680 3029 : HeapObject* obj = it.rinfo()->target_object();
681 3029 : if (!obj->IsCallable()) continue;
682 : #ifdef DEBUG
683 : // There should only be this one reference to a callable object.
684 : for (it.next(); !it.done(); it.next()) {
685 : HeapObject* other = it.rinfo()->target_object();
686 : DCHECK(!other->IsCallable());
687 : }
688 : #endif
689 1020 : return handle(obj, isolate);
690 : }
691 : // If we did not find a callable object, then there must be a reference to
692 : // the WasmThrowTypeError runtime function.
693 : // TODO(clemensh): Check that this is the case.
694 : return Handle<HeapObject>::null();
695 : }
696 :
697 : class SideTable;
698 :
699 : // Code and metadata needed to execute a function.
700 48386 : struct InterpreterCode {
701 : const WasmFunction* function; // wasm function
702 : BodyLocalDecls locals; // local declarations
703 : const byte* orig_start; // start of original code
704 : const byte* orig_end; // end of original code
705 : byte* start; // start of (maybe altered) code
706 : byte* end; // end of (maybe altered) code
707 : SideTable* side_table; // precomputed side table for control flow.
708 :
709 300522 : const byte* at(pc_t pc) { return start + pc; }
710 : };
711 :
712 : // A helper class to compute the control transfers for each bytecode offset.
713 : // Control transfers allow Br, BrIf, BrTable, If, Else, and End bytecodes to
714 : // be directly executed without the need to dynamically track blocks.
715 : class SideTable : public ZoneObject {
716 : public:
717 : ControlTransferMap map_;
718 : uint32_t max_stack_height_;
719 :
720 42628 : SideTable(Zone* zone, const WasmModule* module, InterpreterCode* code)
721 21314 : : map_(zone), max_stack_height_(0) {
722 : // Create a zone for all temporary objects.
723 21314 : Zone control_transfer_zone(zone->allocator(), ZONE_NAME);
724 :
725 : // Represents a control flow label.
726 : class CLabel : public ZoneObject {
727 : explicit CLabel(Zone* zone, uint32_t target_stack_height, uint32_t arity)
728 : : target(nullptr),
729 : target_stack_height(target_stack_height),
730 : arity(arity),
731 59582 : refs(zone) {}
732 :
733 : public:
734 : struct Ref {
735 : const byte* from_pc;
736 : const uint32_t stack_height;
737 : };
738 : const byte* target;
739 : uint32_t target_stack_height;
740 : const uint32_t arity;
741 : // TODO(clemensh): Fix ZoneAllocator and make this ZoneVector<const Ref>.
742 : ZoneVector<Ref> refs;
743 :
744 59582 : static CLabel* New(Zone* zone, uint32_t stack_height, uint32_t arity) {
745 59582 : return new (zone) CLabel(zone, stack_height, arity);
746 : }
747 :
748 : // Bind this label to the given PC.
749 : void Bind(const byte* pc) {
750 : DCHECK_NULL(target);
751 59582 : target = pc;
752 : }
753 :
754 : // Reference this label from the given location.
755 : void Ref(const byte* from_pc, uint32_t stack_height) {
756 : // Target being bound before a reference means this is a loop.
757 : DCHECK_IMPLIES(target, *target == kExprLoop);
758 61282 : refs.push_back({from_pc, stack_height});
759 : }
760 :
761 59582 : void Finish(ControlTransferMap* map, const byte* start) {
762 : DCHECK_NOT_NULL(target);
763 149805 : for (auto ref : refs) {
764 30641 : size_t offset = static_cast<size_t>(ref.from_pc - start);
765 30641 : auto pcdiff = static_cast<pcdiff_t>(target - ref.from_pc);
766 : DCHECK_GE(ref.stack_height, target_stack_height);
767 : spdiff_t spdiff =
768 30641 : static_cast<spdiff_t>(ref.stack_height - target_stack_height);
769 : TRACE("control transfer @%zu: Δpc %d, stack %u->%u = -%u\n", offset,
770 : pcdiff, ref.stack_height, target_stack_height, spdiff);
771 30641 : ControlTransferEntry& entry = (*map)[offset];
772 30641 : entry.pc_diff = pcdiff;
773 30641 : entry.sp_diff = spdiff;
774 30641 : entry.target_arity = arity;
775 : }
776 59582 : }
777 : };
778 :
779 : // An entry in the control stack.
780 : struct Control {
781 : const byte* pc;
782 : CLabel* end_label;
783 : CLabel* else_label;
784 :
785 59190 : void Finish(ControlTransferMap* map, const byte* start) {
786 59190 : end_label->Finish(map, start);
787 59190 : if (else_label) else_label->Finish(map, start);
788 59190 : }
789 : };
790 :
791 : // Compute the ControlTransfer map.
792 : // This algorithm maintains a stack of control constructs similar to the
793 : // AST decoder. The {control_stack} allows matching {br,br_if,br_table}
794 : // bytecodes with their target, as well as determining whether the current
795 : // bytecodes are within the true or false block of an else.
796 : ZoneVector<Control> control_stack(&control_transfer_zone);
797 : uint32_t stack_height = 0;
798 : uint32_t func_arity =
799 21314 : static_cast<uint32_t>(code->function->sig->return_count());
800 : CLabel* func_label =
801 21314 : CLabel::New(&control_transfer_zone, stack_height, func_arity);
802 42628 : control_stack.push_back({code->orig_start, func_label, nullptr});
803 279229 : for (BytecodeIterator i(code->orig_start, code->orig_end, &code->locals);
804 236601 : i.has_next(); i.next()) {
805 : WasmOpcode opcode = i.current();
806 : auto stack_effect =
807 236601 : StackEffect(module, code->function->sig, i.pc(), i.end());
808 : TRACE("@%u: control %s (sp %d - %d + %d)\n", i.pc_offset(),
809 : WasmOpcodes::OpcodeName(opcode), stack_height, stack_effect.first,
810 : stack_effect.second);
811 : DCHECK_GE(stack_height, stack_effect.first);
812 : DCHECK_GE(kMaxUInt32, static_cast<uint64_t>(stack_height) -
813 : stack_effect.first + stack_effect.second);
814 236601 : stack_height = stack_height - stack_effect.first + stack_effect.second;
815 236601 : if (stack_height > max_stack_height_) max_stack_height_ = stack_height;
816 236601 : switch (opcode) {
817 : case kExprBlock:
818 : case kExprLoop: {
819 : bool loop = opcode == kExprLoop;
820 37484 : BlockTypeOperand<false> operand(&i, i.pc());
821 : TRACE("control @%u: %s, arity %d\n", i.pc_offset(),
822 : loop ? "Loop" : "Block", operand.arity);
823 : CLabel* label =
824 37484 : CLabel::New(&control_transfer_zone, stack_height, operand.arity);
825 74968 : control_stack.push_back({i.pc(), label, nullptr});
826 37484 : if (loop) label->Bind(i.pc());
827 : break;
828 : }
829 : case kExprIf: {
830 : TRACE("control @%u: If\n", i.pc_offset());
831 392 : BlockTypeOperand<false> operand(&i, i.pc());
832 : CLabel* end_label =
833 392 : CLabel::New(&control_transfer_zone, stack_height, operand.arity);
834 : CLabel* else_label =
835 392 : CLabel::New(&control_transfer_zone, stack_height, 0);
836 784 : control_stack.push_back({i.pc(), end_label, else_label});
837 392 : else_label->Ref(i.pc(), stack_height);
838 : break;
839 : }
840 : case kExprElse: {
841 : Control* c = &control_stack.back();
842 : TRACE("control @%u: Else\n", i.pc_offset());
843 166 : c->end_label->Ref(i.pc(), stack_height);
844 : DCHECK_NOT_NULL(c->else_label);
845 166 : c->else_label->Bind(i.pc() + 1);
846 166 : c->else_label->Finish(&map_, code->orig_start);
847 166 : c->else_label = nullptr;
848 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
849 166 : stack_height = c->end_label->target_stack_height;
850 166 : break;
851 : }
852 : case kExprEnd: {
853 59190 : Control* c = &control_stack.back();
854 : TRACE("control @%u: End\n", i.pc_offset());
855 : // Only loops have bound labels.
856 : DCHECK_IMPLIES(c->end_label->target, *c->pc == kExprLoop);
857 59190 : if (!c->end_label->target) {
858 58984 : if (c->else_label) c->else_label->Bind(i.pc());
859 58984 : c->end_label->Bind(i.pc() + 1);
860 : }
861 118380 : c->Finish(&map_, code->orig_start);
862 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
863 : stack_height =
864 59190 : c->end_label->target_stack_height + c->end_label->arity;
865 : control_stack.pop_back();
866 : break;
867 : }
868 : case kExprBr: {
869 647 : BreakDepthOperand<false> operand(&i, i.pc());
870 : TRACE("control @%u: Br[depth=%u]\n", i.pc_offset(), operand.depth);
871 1294 : Control* c = &control_stack[control_stack.size() - operand.depth - 1];
872 647 : c->end_label->Ref(i.pc(), stack_height);
873 : break;
874 : }
875 : case kExprBrIf: {
876 147 : BreakDepthOperand<false> operand(&i, i.pc());
877 : TRACE("control @%u: BrIf[depth=%u]\n", i.pc_offset(), operand.depth);
878 294 : Control* c = &control_stack[control_stack.size() - operand.depth - 1];
879 147 : c->end_label->Ref(i.pc(), stack_height);
880 : break;
881 : }
882 : case kExprBrTable: {
883 7355 : BranchTableOperand<false> operand(&i, i.pc());
884 : BranchTableIterator<false> iterator(&i, operand);
885 : TRACE("control @%u: BrTable[count=%u]\n", i.pc_offset(),
886 : operand.table_count);
887 36644 : while (iterator.has_next()) {
888 : uint32_t j = iterator.cur_index();
889 : uint32_t target = iterator.next();
890 58578 : Control* c = &control_stack[control_stack.size() - target - 1];
891 29289 : c->end_label->Ref(i.pc() + j, stack_height);
892 : }
893 : break;
894 : }
895 : default: {
896 : break;
897 : }
898 : }
899 : }
900 : DCHECK_EQ(0, control_stack.size());
901 21314 : DCHECK_EQ(func_arity, stack_height);
902 21314 : }
903 :
904 : ControlTransferEntry& Lookup(pc_t from) {
905 : auto result = map_.find(from);
906 : DCHECK(result != map_.end());
907 : return result->second;
908 : }
909 : };
910 :
911 : struct ExternalCallResult {
912 : enum Type {
913 : // The function should be executed inside this interpreter.
914 : INTERNAL,
915 : // For indirect calls: Table or function does not exist.
916 : INVALID_FUNC,
917 : // For indirect calls: Signature does not match expected signature.
918 : SIGNATURE_MISMATCH,
919 : // The function was executed and returned normally.
920 : EXTERNAL_RETURNED,
921 : // The function was executed, threw an exception, and the stack was unwound.
922 : EXTERNAL_UNWOUND
923 : };
924 : Type type;
925 : // If type is INTERNAL, this field holds the function to call internally.
926 : InterpreterCode* interpreter_code;
927 :
928 : ExternalCallResult(Type type) : type(type) { // NOLINT
929 : DCHECK_NE(INTERNAL, type);
930 : }
931 : ExternalCallResult(Type type, InterpreterCode* code)
932 : : type(type), interpreter_code(code) {
933 : DCHECK_EQ(INTERNAL, type);
934 : }
935 : };
936 :
937 : // The main storage for interpreter code. It maps {WasmFunction} to the
938 : // metadata needed to execute each function.
939 : class CodeMap {
940 : Zone* zone_;
941 : const WasmModule* module_;
942 : ZoneVector<InterpreterCode> interpreter_code_;
943 : // Global handle to the wasm instance.
944 : Handle<WasmInstanceObject> instance_;
945 : // Global handle to array of unwrapped imports.
946 : Handle<FixedArray> imported_functions_;
947 : // Map from WASM_TO_JS wrappers to unwrapped imports (indexes into
948 : // imported_functions_).
949 : IdentityMap<int, ZoneAllocationPolicy> unwrapped_imports_;
950 :
951 : public:
952 19314 : CodeMap(Isolate* isolate, const WasmModule* module,
953 : const uint8_t* module_start, Zone* zone)
954 : : zone_(zone),
955 : module_(module),
956 : interpreter_code_(zone),
957 38628 : unwrapped_imports_(isolate->heap(), ZoneAllocationPolicy(zone)) {
958 38628 : if (module == nullptr) return;
959 38628 : interpreter_code_.reserve(module->functions.size());
960 41133 : for (const WasmFunction& function : module->functions) {
961 2505 : if (function.imported) {
962 : DCHECK_EQ(function.code_start_offset, function.code_end_offset);
963 1035 : AddFunction(&function, nullptr, nullptr);
964 : } else {
965 1470 : const byte* code_start = module_start + function.code_start_offset;
966 1470 : const byte* code_end = module_start + function.code_end_offset;
967 1470 : AddFunction(&function, code_start, code_end);
968 : }
969 : }
970 : }
971 :
972 19296 : ~CodeMap() {
973 : // Destroy the global handles.
974 : // Cast the location, not the handle, because the handle cast might access
975 : // the object behind the handle.
976 19296 : GlobalHandles::Destroy(reinterpret_cast<Object**>(instance_.location()));
977 : GlobalHandles::Destroy(
978 19296 : reinterpret_cast<Object**>(imported_functions_.location()));
979 19296 : }
980 :
981 : const WasmModule* module() const { return module_; }
982 : bool has_instance() const { return !instance_.is_null(); }
983 : Handle<WasmInstanceObject> instance() const {
984 : DCHECK(has_instance());
985 : return instance_;
986 : }
987 : MaybeHandle<WasmInstanceObject> maybe_instance() const {
988 95 : return has_instance() ? instance_ : MaybeHandle<WasmInstanceObject>();
989 : }
990 :
991 1153 : void SetInstanceObject(WasmInstanceObject* instance) {
992 : // Only set the instance once (otherwise we have to destroy the global
993 : // handle first).
994 : DCHECK(instance_.is_null());
995 : DCHECK_EQ(instance->module(), module_);
996 1153 : instance_ = instance->GetIsolate()->global_handles()->Create(instance);
997 1153 : }
998 :
999 9300 : Code* GetImportedFunction(uint32_t function_index) {
1000 : DCHECK(!instance_.is_null());
1001 : DCHECK_GT(module_->num_imported_functions, function_index);
1002 9300 : FixedArray* code_table = instance_->compiled_module()->ptr_to_code_table();
1003 18600 : return Code::cast(code_table->get(static_cast<int>(function_index)));
1004 : }
1005 :
1006 : InterpreterCode* GetCode(const WasmFunction* function) {
1007 : InterpreterCode* code = GetCode(function->func_index);
1008 : DCHECK_EQ(function, code->function);
1009 : return code;
1010 : }
1011 :
1012 : InterpreterCode* GetCode(uint32_t function_index) {
1013 : DCHECK_LT(function_index, interpreter_code_.size());
1014 7874407 : return Preprocess(&interpreter_code_[function_index]);
1015 : }
1016 :
1017 182 : InterpreterCode* GetIndirectCode(uint32_t table_index, uint32_t entry_index) {
1018 364 : if (table_index >= module_->function_tables.size()) return nullptr;
1019 : const WasmIndirectFunctionTable* table =
1020 : &module_->function_tables[table_index];
1021 364 : if (entry_index >= table->values.size()) return nullptr;
1022 119 : uint32_t index = table->values[entry_index];
1023 238 : if (index >= interpreter_code_.size()) return nullptr;
1024 119 : return GetCode(index);
1025 : }
1026 :
1027 3957180 : InterpreterCode* Preprocess(InterpreterCode* code) {
1028 : DCHECK_EQ(code->function->imported, code->start == nullptr);
1029 3957180 : if (!code->side_table && code->start) {
1030 : // Compute the control targets map and the local declarations.
1031 42570 : code->side_table = new (zone_) SideTable(zone_, module_, code);
1032 : }
1033 3957180 : return code;
1034 : }
1035 :
1036 22422 : void AddFunction(const WasmFunction* function, const byte* code_start,
1037 : const byte* code_end) {
1038 : InterpreterCode code = {
1039 : function, BodyLocalDecls(zone_), code_start,
1040 : code_end, const_cast<byte*>(code_start), const_cast<byte*>(code_end),
1041 44844 : nullptr};
1042 :
1043 : DCHECK_EQ(interpreter_code_.size(), function->func_index);
1044 22422 : interpreter_code_.push_back(code);
1045 22422 : }
1046 :
1047 19917 : void SetFunctionCode(const WasmFunction* function, const byte* start,
1048 : const byte* end) {
1049 : DCHECK_LT(function->func_index, interpreter_code_.size());
1050 19917 : InterpreterCode* code = &interpreter_code_[function->func_index];
1051 : DCHECK_EQ(function, code->function);
1052 19917 : code->orig_start = start;
1053 19917 : code->orig_end = end;
1054 19917 : code->start = const_cast<byte*>(start);
1055 19917 : code->end = const_cast<byte*>(end);
1056 19917 : code->side_table = nullptr;
1057 19917 : Preprocess(code);
1058 19917 : }
1059 :
1060 : // Returns a callable object if the imported function has a JS-compatible
1061 : // signature, or a null handle otherwise.
1062 10320 : Handle<HeapObject> GetCallableObjectForJSImport(Isolate* isolate,
1063 : Handle<Code> code) {
1064 : DCHECK_EQ(Code::WASM_TO_JS_FUNCTION, code->kind());
1065 : int* unwrapped_index = unwrapped_imports_.Find(code);
1066 9330 : if (unwrapped_index) {
1067 : return handle(
1068 : HeapObject::cast(imported_functions_->get(*unwrapped_index)),
1069 8280 : isolate);
1070 : }
1071 1050 : Handle<HeapObject> called_obj = UnwrapWasmToJSWrapper(isolate, code);
1072 1050 : if (!called_obj.is_null()) {
1073 : // Cache the unwrapped callable object.
1074 1020 : if (imported_functions_.is_null()) {
1075 : // This is the first call to an imported function. Allocate the
1076 : // FixedArray to cache unwrapped objects.
1077 : constexpr int kInitialCacheSize = 8;
1078 : Handle<FixedArray> new_imported_functions =
1079 990 : isolate->factory()->NewFixedArray(kInitialCacheSize, TENURED);
1080 : // First entry: Number of occupied slots.
1081 : new_imported_functions->set(0, Smi::kZero);
1082 : imported_functions_ =
1083 990 : isolate->global_handles()->Create(*new_imported_functions);
1084 : }
1085 1020 : int this_idx = Smi::cast(imported_functions_->get(0))->value() + 1;
1086 1020 : if (this_idx == imported_functions_->length()) {
1087 : Handle<FixedArray> new_imported_functions =
1088 : isolate->factory()->CopyFixedArrayAndGrow(imported_functions_,
1089 0 : this_idx / 2, TENURED);
1090 : // Update the existing global handle:
1091 0 : *imported_functions_.location() = *new_imported_functions;
1092 : }
1093 : DCHECK_GT(imported_functions_->length(), this_idx);
1094 : DCHECK(imported_functions_->get(this_idx)->IsUndefined(isolate));
1095 : imported_functions_->set(0, Smi::FromInt(this_idx));
1096 1020 : imported_functions_->set(this_idx, *called_obj);
1097 : unwrapped_imports_.Set(code, this_idx);
1098 : }
1099 1050 : return called_obj;
1100 : }
1101 : };
1102 :
1103 9210 : Handle<Object> WasmValToNumber(Factory* factory, WasmVal val,
1104 : wasm::ValueType type) {
1105 9210 : switch (type) {
1106 : case kWasmI32:
1107 9195 : return factory->NewNumberFromInt(val.to<int32_t>());
1108 : case kWasmI64:
1109 : // wasm->js and js->wasm is illegal for i64 type.
1110 0 : UNREACHABLE();
1111 : return Handle<Object>::null();
1112 : case kWasmF32:
1113 0 : return factory->NewNumber(val.to<float>());
1114 : case kWasmF64:
1115 15 : return factory->NewNumber(val.to<double>());
1116 : default:
1117 : // TODO(wasm): Implement simd.
1118 0 : UNIMPLEMENTED();
1119 : return Handle<Object>::null();
1120 : }
1121 : }
1122 :
1123 : // Convert JS value to WebAssembly, spec here:
1124 : // https://github.com/WebAssembly/design/blob/master/JS.md#towebassemblyvalue
1125 9060 : WasmVal ToWebAssemblyValue(Isolate* isolate, Handle<Object> value,
1126 : wasm::ValueType type) {
1127 9060 : switch (type) {
1128 : case kWasmI32: {
1129 : MaybeHandle<Object> maybe_i32 = Object::ToInt32(isolate, value);
1130 : // TODO(clemensh): Handle failure here (unwind).
1131 : int32_t value;
1132 9045 : CHECK(maybe_i32.ToHandleChecked()->ToInt32(&value));
1133 9045 : return WasmVal(value);
1134 : }
1135 : case kWasmI64:
1136 : // If the signature contains i64, a type error was thrown before.
1137 0 : UNREACHABLE();
1138 : case kWasmF32: {
1139 15 : MaybeHandle<Object> maybe_number = Object::ToNumber(value);
1140 : // TODO(clemensh): Handle failure here (unwind).
1141 : return WasmVal(
1142 15 : static_cast<float>(maybe_number.ToHandleChecked()->Number()));
1143 : }
1144 : case kWasmF64: {
1145 0 : MaybeHandle<Object> maybe_number = Object::ToNumber(value);
1146 : // TODO(clemensh): Handle failure here (unwind).
1147 0 : return WasmVal(maybe_number.ToHandleChecked()->Number());
1148 : }
1149 : default:
1150 : // TODO(wasm): Handle simd.
1151 0 : UNIMPLEMENTED();
1152 : return WasmVal();
1153 : }
1154 : }
1155 :
1156 : // Responsible for executing code directly.
1157 0 : class ThreadImpl {
1158 : struct Activation {
1159 : uint32_t fp;
1160 : sp_t sp;
1161 51439 : Activation(uint32_t fp, sp_t sp) : fp(fp), sp(sp) {}
1162 : };
1163 :
1164 : public:
1165 : ThreadImpl(Zone* zone, CodeMap* codemap, WasmInstance* instance)
1166 : : codemap_(codemap),
1167 : instance_(instance),
1168 : zone_(zone),
1169 : frames_(zone),
1170 38628 : activations_(zone) {}
1171 :
1172 : //==========================================================================
1173 : // Implementation of public interface for WasmInterpreter::Thread.
1174 : //==========================================================================
1175 :
1176 : WasmInterpreter::State state() { return state_; }
1177 :
1178 2781254 : void InitFrame(const WasmFunction* function, WasmVal* args) {
1179 : DCHECK_EQ(current_activation().fp, frames_.size());
1180 : InterpreterCode* code = codemap()->GetCode(function);
1181 2781254 : size_t num_params = function->sig->parameter_count();
1182 2781254 : EnsureStackSpace(num_params);
1183 : Push(args, num_params);
1184 2781254 : PushFrame(code);
1185 2781254 : }
1186 :
1187 0 : WasmInterpreter::State Run(int num_steps = -1) {
1188 : DCHECK(state_ == WasmInterpreter::STOPPED ||
1189 : state_ == WasmInterpreter::PAUSED);
1190 : DCHECK(num_steps == -1 || num_steps > 0);
1191 : if (num_steps == -1) {
1192 : TRACE(" => Run()\n");
1193 : } else if (num_steps == 1) {
1194 : TRACE(" => Step()\n");
1195 : } else {
1196 : TRACE(" => Run(%d)\n", num_steps);
1197 : }
1198 2789533 : state_ = WasmInterpreter::RUNNING;
1199 2789533 : Execute(frames_.back().code, frames_.back().pc, num_steps);
1200 : // If state_ is STOPPED, the current activation must be fully unwound.
1201 : DCHECK_IMPLIES(state_ == WasmInterpreter::STOPPED,
1202 : current_activation().fp == frames_.size());
1203 2789533 : return state_;
1204 : }
1205 :
1206 0 : void Pause() { UNIMPLEMENTED(); }
1207 :
1208 : void Reset() {
1209 : TRACE("----- RESET -----\n");
1210 2729815 : sp_ = stack_start_;
1211 : frames_.clear();
1212 2729815 : state_ = WasmInterpreter::STOPPED;
1213 2729815 : trap_reason_ = kTrapCount;
1214 2729815 : possible_nondeterminism_ = false;
1215 : }
1216 :
1217 : int GetFrameCount() {
1218 : DCHECK_GE(kMaxInt, frames_.size());
1219 8884 : return static_cast<int>(frames_.size());
1220 : }
1221 :
1222 5536328 : WasmVal GetReturnValue(uint32_t index) {
1223 2768164 : if (state_ == WasmInterpreter::TRAPPED) return WasmVal(0xdeadbeef);
1224 : DCHECK_EQ(WasmInterpreter::FINISHED, state_);
1225 : Activation act = current_activation();
1226 : // Current activation must be finished.
1227 : DCHECK_EQ(act.fp, frames_.size());
1228 2768164 : return GetStackValue(act.sp + index);
1229 : }
1230 :
1231 : WasmVal GetStackValue(sp_t index) {
1232 : DCHECK_GT(StackHeight(), index);
1233 8628610 : return stack_start_[index];
1234 : }
1235 :
1236 : void SetStackValue(sp_t index, WasmVal value) {
1237 : DCHECK_GT(StackHeight(), index);
1238 16557 : stack_start_[index] = value;
1239 : }
1240 :
1241 : TrapReason GetTrapReason() { return trap_reason_; }
1242 :
1243 : pc_t GetBreakpointPc() { return break_pc_; }
1244 :
1245 : bool PossibleNondeterminism() { return possible_nondeterminism_; }
1246 :
1247 : uint64_t NumInterpretedCalls() { return num_interpreted_calls_; }
1248 :
1249 31 : void AddBreakFlags(uint8_t flags) { break_flags_ |= flags; }
1250 :
1251 0 : void ClearBreakFlags() { break_flags_ = WasmInterpreter::BreakFlag::None; }
1252 :
1253 : uint32_t NumActivations() {
1254 84 : return static_cast<uint32_t>(activations_.size());
1255 : }
1256 :
1257 102878 : uint32_t StartActivation() {
1258 : TRACE("----- START ACTIVATION %zu -----\n", activations_.size());
1259 : // If you use activations, use them consistently:
1260 : DCHECK_IMPLIES(activations_.empty(), frames_.empty());
1261 : DCHECK_IMPLIES(activations_.empty(), StackHeight() == 0);
1262 102878 : uint32_t activation_id = static_cast<uint32_t>(activations_.size());
1263 51439 : activations_.emplace_back(static_cast<uint32_t>(frames_.size()),
1264 102878 : StackHeight());
1265 51439 : state_ = WasmInterpreter::STOPPED;
1266 51439 : return activation_id;
1267 : }
1268 :
1269 : void FinishActivation(uint32_t id) {
1270 : TRACE("----- FINISH ACTIVATION %zu -----\n", activations_.size() - 1);
1271 : DCHECK_LT(0, activations_.size());
1272 : DCHECK_EQ(activations_.size() - 1, id);
1273 : // Stack height must match the start of this activation (otherwise unwind
1274 : // first).
1275 : DCHECK_EQ(activations_.back().fp, frames_.size());
1276 : DCHECK_LE(activations_.back().sp, StackHeight());
1277 102878 : sp_ = stack_start_ + activations_.back().sp;
1278 : activations_.pop_back();
1279 : }
1280 :
1281 : uint32_t ActivationFrameBase(uint32_t id) {
1282 : DCHECK_GT(activations_.size(), id);
1283 7980 : return activations_[id].fp;
1284 : }
1285 :
1286 : // Handle a thrown exception. Returns whether the exception was handled inside
1287 : // the current activation. Unwinds the interpreted stack accordingly.
1288 239 : WasmInterpreter::Thread::ExceptionHandlingResult HandleException(
1289 : Isolate* isolate) {
1290 : DCHECK(isolate->has_pending_exception());
1291 : // TODO(wasm): Add wasm exception handling (would return true).
1292 : USE(isolate->pending_exception());
1293 : TRACE("----- UNWIND -----\n");
1294 : DCHECK_LT(0, activations_.size());
1295 : Activation& act = activations_.back();
1296 : DCHECK_LE(act.fp, frames_.size());
1297 239 : frames_.resize(act.fp);
1298 : DCHECK_LE(act.sp, StackHeight());
1299 239 : sp_ = stack_start_ + act.sp;
1300 239 : state_ = WasmInterpreter::STOPPED;
1301 239 : return WasmInterpreter::Thread::UNWOUND;
1302 : }
1303 :
1304 : private:
1305 : // Entries on the stack of functions being evaluated.
1306 : struct Frame {
1307 : InterpreterCode* code;
1308 : pc_t pc;
1309 : sp_t sp;
1310 :
1311 : // Limit of parameters.
1312 : sp_t plimit() { return sp + code->function->sig->parameter_count(); }
1313 : // Limit of locals.
1314 : sp_t llimit() { return plimit() + code->locals.type_list.size(); }
1315 : };
1316 :
1317 : struct Block {
1318 : pc_t pc;
1319 : sp_t sp;
1320 : size_t fp;
1321 : unsigned arity;
1322 : };
1323 :
1324 : friend class InterpretedFrameImpl;
1325 :
1326 : CodeMap* codemap_;
1327 : WasmInstance* instance_;
1328 : Zone* zone_;
1329 : WasmVal* stack_start_ = nullptr; // Start of allocated stack space.
1330 : WasmVal* stack_limit_ = nullptr; // End of allocated stack space.
1331 : WasmVal* sp_ = nullptr; // Current stack pointer.
1332 : ZoneVector<Frame> frames_;
1333 : WasmInterpreter::State state_ = WasmInterpreter::STOPPED;
1334 : pc_t break_pc_ = kInvalidPc;
1335 : TrapReason trap_reason_ = kTrapCount;
1336 : bool possible_nondeterminism_ = false;
1337 : uint8_t break_flags_ = 0; // a combination of WasmInterpreter::BreakFlag
1338 : uint64_t num_interpreted_calls_ = 0;
1339 : // Store the stack height of each activation (for unwind and frame
1340 : // inspection).
1341 : ZoneVector<Activation> activations_;
1342 :
1343 : CodeMap* codemap() { return codemap_; }
1344 : WasmInstance* instance() { return instance_; }
1345 846 : const WasmModule* module() { return instance_->module; }
1346 :
1347 : void DoTrap(TrapReason trap, pc_t pc) {
1348 12699 : state_ = WasmInterpreter::TRAPPED;
1349 12699 : trap_reason_ = trap;
1350 : CommitPc(pc);
1351 : }
1352 :
1353 : // Push a frame with arguments already on the stack.
1354 7851122 : void PushFrame(InterpreterCode* code) {
1355 : DCHECK_NOT_NULL(code);
1356 : DCHECK_NOT_NULL(code->side_table);
1357 : EnsureStackSpace(code->side_table->max_stack_height_ +
1358 7851122 : code->locals.type_list.size());
1359 :
1360 3925561 : ++num_interpreted_calls_;
1361 3925561 : size_t arity = code->function->sig->parameter_count();
1362 : // The parameters will overlap the arguments already on the stack.
1363 : DCHECK_GE(StackHeight(), arity);
1364 11776683 : frames_.push_back({code, 0, StackHeight() - arity});
1365 3925561 : frames_.back().pc = InitLocals(code);
1366 : TRACE(" => PushFrame #%zu (#%u @%zu)\n", frames_.size() - 1,
1367 : code->function->func_index, frames_.back().pc);
1368 3925561 : }
1369 :
1370 3925561 : pc_t InitLocals(InterpreterCode* code) {
1371 7858884 : for (auto p : code->locals.type_list) {
1372 : WasmVal val;
1373 7762 : switch (p) {
1374 : #define CASE_TYPE(wasm, ctype) \
1375 : case kWasm##wasm: \
1376 : val = WasmVal(static_cast<ctype>(0)); \
1377 : break;
1378 3926 : WASM_CTYPES(CASE_TYPE)
1379 : #undef CASE_TYPE
1380 : default:
1381 0 : UNREACHABLE();
1382 : break;
1383 : }
1384 : Push(val);
1385 : }
1386 3925561 : return code->locals.encoded_size;
1387 : }
1388 :
1389 : void CommitPc(pc_t pc) {
1390 : DCHECK(!frames_.empty());
1391 30292 : frames_.back().pc = pc;
1392 : }
1393 :
1394 : bool SkipBreakpoint(InterpreterCode* code, pc_t pc) {
1395 9320 : if (pc == break_pc_) {
1396 : // Skip the previously hit breakpoint when resuming.
1397 4660 : break_pc_ = kInvalidPc;
1398 : return true;
1399 : }
1400 : return false;
1401 : }
1402 :
1403 : int LookupTargetDelta(InterpreterCode* code, pc_t pc) {
1404 24023 : return static_cast<int>(code->side_table->Lookup(pc).pc_diff);
1405 : }
1406 :
1407 436761 : int DoBreak(InterpreterCode* code, pc_t pc, size_t depth) {
1408 436761 : ControlTransferEntry& control_transfer_entry = code->side_table->Lookup(pc);
1409 436761 : DoStackTransfer(sp_ - control_transfer_entry.sp_diff,
1410 873522 : control_transfer_entry.target_arity);
1411 436761 : return control_transfer_entry.pc_diff;
1412 : }
1413 :
1414 322384 : pc_t ReturnPc(Decoder* decoder, InterpreterCode* code, pc_t pc) {
1415 161192 : switch (code->orig_start[pc]) {
1416 : case kExprCallFunction: {
1417 : CallFunctionOperand<false> operand(decoder, code->at(pc));
1418 161065 : return pc + 1 + operand.length;
1419 : }
1420 : case kExprCallIndirect: {
1421 127 : CallIndirectOperand<false> operand(decoder, code->at(pc));
1422 127 : return pc + 1 + operand.length;
1423 : }
1424 : default:
1425 0 : UNREACHABLE();
1426 : return 0;
1427 : }
1428 : }
1429 :
1430 2929628 : bool DoReturn(Decoder* decoder, InterpreterCode** code, pc_t* pc, pc_t* limit,
1431 : size_t arity) {
1432 : DCHECK_GT(frames_.size(), 0);
1433 8788884 : WasmVal* sp_dest = stack_start_ + frames_.back().sp;
1434 : frames_.pop_back();
1435 2929628 : if (frames_.size() == current_activation().fp) {
1436 : // A return from the last frame terminates the execution.
1437 2768436 : state_ = WasmInterpreter::FINISHED;
1438 2768436 : DoStackTransfer(sp_dest, arity);
1439 : TRACE(" => finish\n");
1440 2768436 : return false;
1441 : } else {
1442 : // Return to caller frame.
1443 : Frame* top = &frames_.back();
1444 161192 : *code = top->code;
1445 161192 : decoder->Reset((*code)->start, (*code)->end);
1446 161192 : *pc = ReturnPc(decoder, *code, top->pc);
1447 161192 : *limit = top->code->end - top->code->start;
1448 : TRACE(" => Return to #%zu (#%u @%zu)\n", frames_.size() - 1,
1449 : (*code)->function->func_index, *pc);
1450 161192 : DoStackTransfer(sp_dest, arity);
1451 161192 : return true;
1452 : }
1453 : }
1454 :
1455 : // Returns true if the call was successful, false if the stack check failed
1456 : // and the current activation was fully unwound.
1457 1144307 : bool DoCall(Decoder* decoder, InterpreterCode* target, pc_t* pc,
1458 : pc_t* limit) WARN_UNUSED_RESULT {
1459 1144307 : frames_.back().pc = *pc;
1460 1144307 : PushFrame(target);
1461 1144307 : if (!DoStackCheck()) return false;
1462 1144292 : *pc = frames_.back().pc;
1463 1144292 : *limit = target->end - target->start;
1464 : decoder->Reset(target->start, target->end);
1465 1144292 : return true;
1466 : }
1467 :
1468 : // Copies {arity} values on the top of the stack down the stack to {dest},
1469 : // dropping the values in-between.
1470 3366389 : void DoStackTransfer(WasmVal* dest, size_t arity) {
1471 : // before: |---------------| pop_count | arity |
1472 : // ^ 0 ^ dest ^ sp_
1473 : //
1474 : // after: |---------------| arity |
1475 : // ^ 0 ^ sp_
1476 : DCHECK_LE(dest, sp_);
1477 : DCHECK_LE(dest + arity, sp_);
1478 3366389 : if (arity) memcpy(dest, sp_ - arity, arity * sizeof(*sp_));
1479 3366389 : sp_ = dest + arity;
1480 3366389 : }
1481 :
1482 : template <typename mtype>
1483 : inline bool BoundsCheck(uint32_t mem_size, uint32_t offset, uint32_t index) {
1484 : return sizeof(mtype) <= mem_size && offset <= mem_size - sizeof(mtype) &&
1485 141549 : index <= mem_size - sizeof(mtype) - offset;
1486 : }
1487 :
1488 : template <typename ctype, typename mtype>
1489 225318 : bool ExecuteLoad(Decoder* decoder, InterpreterCode* code, pc_t pc, int& len) {
1490 112659 : MemoryAccessOperand<false> operand(decoder, code->at(pc), sizeof(ctype));
1491 112659 : uint32_t index = Pop().to<uint32_t>();
1492 337977 : if (!BoundsCheck<mtype>(instance()->mem_size, operand.offset, index)) {
1493 : DoTrap(kTrapMemOutOfBounds, pc);
1494 3007 : return false;
1495 : }
1496 109652 : byte* addr = instance()->mem_start + operand.offset + index;
1497 1776 : WasmVal result(static_cast<ctype>(ReadLittleEndianValue<mtype>(addr)));
1498 :
1499 : Push(result);
1500 109652 : len = 1 + operand.length;
1501 109652 : return true;
1502 : }
1503 :
1504 : template <typename ctype, typename mtype>
1505 26671 : bool ExecuteStore(Decoder* decoder, InterpreterCode* code, pc_t pc,
1506 26671 : int& len) {
1507 26671 : MemoryAccessOperand<false> operand(decoder, code->at(pc), sizeof(ctype));
1508 26671 : WasmVal val = Pop();
1509 :
1510 26671 : uint32_t index = Pop().to<uint32_t>();
1511 80013 : if (!BoundsCheck<mtype>(instance()->mem_size, operand.offset, index)) {
1512 : DoTrap(kTrapMemOutOfBounds, pc);
1513 1944 : return false;
1514 : }
1515 24727 : byte* addr = instance()->mem_start + operand.offset + index;
1516 76 : WriteLittleEndianValue<mtype>(addr, static_cast<mtype>(val.to<ctype>()));
1517 24727 : len = 1 + operand.length;
1518 :
1519 : if (std::is_same<float, ctype>::value) {
1520 1818 : possible_nondeterminism_ |= std::isnan(val.to<float>());
1521 : } else if (std::is_same<double, ctype>::value) {
1522 18835 : possible_nondeterminism_ |= std::isnan(val.to<double>());
1523 : }
1524 24727 : return true;
1525 : }
1526 :
1527 : // Check if our control stack (frames_) exceeds the limit. Trigger stack
1528 : // overflow if it does, and unwinding the current frame.
1529 : // Returns true if execution can continue, false if the current activation was
1530 : // fully unwound.
1531 : // Do call this function immediately *after* pushing a new frame. The pc of
1532 : // the top frame will be reset to 0 if the stack check fails.
1533 1144337 : bool DoStackCheck() WARN_UNUSED_RESULT {
1534 : // Sum up the size of all dynamically growing structures.
1535 2288614 : if (V8_LIKELY(frames_.size() <= kV8MaxWasmInterpretedStackSize)) {
1536 : return true;
1537 : }
1538 15 : if (!codemap()->has_instance()) {
1539 : // In test mode: Just abort.
1540 0 : FATAL("wasm interpreter: stack overflow");
1541 : }
1542 : // The pc of the top frame is initialized to the first instruction. We reset
1543 : // it to 0 here such that we report the same position as in compiled code.
1544 15 : frames_.back().pc = 0;
1545 : Isolate* isolate = codemap()->instance()->GetIsolate();
1546 : HandleScope handle_scope(isolate);
1547 15 : isolate->StackOverflow();
1548 15 : return HandleException(isolate) == WasmInterpreter::Thread::HANDLED;
1549 : }
1550 :
1551 9822326 : void Execute(InterpreterCode* code, pc_t pc, int max) {
1552 : DCHECK_NOT_NULL(code->side_table);
1553 : DCHECK(!frames_.empty());
1554 : // There must be enough space on the stack to hold the arguments, locals,
1555 : // and the value stack.
1556 : DCHECK_LE(code->function->sig->parameter_count() +
1557 : code->locals.type_list.size() +
1558 : code->side_table->max_stack_height_,
1559 : stack_limit_ - stack_start_ - frames_.back().sp);
1560 :
1561 3250317 : Decoder decoder(code->start, code->end);
1562 2789533 : pc_t limit = code->end - code->start;
1563 : bool hit_break = false;
1564 :
1565 : while (true) {
1566 : #define PAUSE_IF_BREAK_FLAG(flag) \
1567 : if (V8_UNLIKELY(break_flags_ & WasmInterpreter::BreakFlag::flag)) { \
1568 : hit_break = true; \
1569 : max = 0; \
1570 : }
1571 :
1572 : DCHECK_GT(limit, pc);
1573 : DCHECK_NOT_NULL(code->start);
1574 :
1575 : // Do first check for a breakpoint, in order to set hit_break correctly.
1576 : const char* skip = " ";
1577 17303719 : int len = 1;
1578 17303719 : byte opcode = code->start[pc];
1579 : byte orig = opcode;
1580 17303719 : if (V8_UNLIKELY(opcode == kInternalBreakpoint)) {
1581 9320 : orig = code->orig_start[pc];
1582 9320 : if (SkipBreakpoint(code, pc)) {
1583 : // skip breakpoint by switching on original code.
1584 : skip = "[skip] ";
1585 : } else {
1586 : TRACE("@%-3zu: [break] %-24s:", pc,
1587 : WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(orig)));
1588 : TraceValueStack();
1589 : TRACE("\n");
1590 : hit_break = true;
1591 8293 : break;
1592 : }
1593 : }
1594 :
1595 : // If max is 0, break. If max is positive (a limit is set), decrement it.
1596 17299059 : if (max == 0) break;
1597 17295426 : if (max > 0) --max;
1598 :
1599 : USE(skip);
1600 : TRACE("@%-3zu: %s%-24s:", pc, skip,
1601 : WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(orig)));
1602 : TraceValueStack();
1603 : TRACE("\n");
1604 :
1605 : #ifdef DEBUG
1606 : // Compute the stack effect of this opcode, and verify later that the
1607 : // stack was modified accordingly.
1608 : std::pair<uint32_t, uint32_t> stack_effect = wasm::StackEffect(
1609 : codemap_->module(), frames_.back().code->function->sig,
1610 : code->orig_start + pc, code->orig_end);
1611 : sp_t expected_new_stack_height =
1612 : StackHeight() - stack_effect.first + stack_effect.second;
1613 : #endif
1614 :
1615 17295426 : switch (orig) {
1616 : case kExprNop:
1617 : break;
1618 : case kExprBlock: {
1619 1980733 : BlockTypeOperand<false> operand(&decoder, code->at(pc));
1620 1980733 : len = 1 + operand.length;
1621 : break;
1622 : }
1623 : case kExprLoop: {
1624 48085 : BlockTypeOperand<false> operand(&decoder, code->at(pc));
1625 48085 : len = 1 + operand.length;
1626 : break;
1627 : }
1628 : case kExprIf: {
1629 29660 : BlockTypeOperand<false> operand(&decoder, code->at(pc));
1630 29660 : WasmVal cond = Pop();
1631 : bool is_true = cond.to<uint32_t>() != 0;
1632 29660 : if (is_true) {
1633 : // fall through to the true block.
1634 27988 : len = 1 + operand.length;
1635 : TRACE(" true => fallthrough\n");
1636 : } else {
1637 3344 : len = LookupTargetDelta(code, pc);
1638 : TRACE(" false => @%zu\n", pc + len);
1639 : }
1640 : break;
1641 : }
1642 : case kExprElse: {
1643 22351 : len = LookupTargetDelta(code, pc);
1644 : TRACE(" end => @%zu\n", pc + len);
1645 22351 : break;
1646 : }
1647 : case kExprSelect: {
1648 1519 : WasmVal cond = Pop();
1649 1519 : WasmVal fval = Pop();
1650 1519 : WasmVal tval = Pop();
1651 1519 : Push(cond.to<int32_t>() != 0 ? tval : fval);
1652 : break;
1653 : }
1654 : case kExprBr: {
1655 : BreakDepthOperand<false> operand(&decoder, code->at(pc));
1656 26127 : len = DoBreak(code, pc, operand.depth);
1657 : TRACE(" br => @%zu\n", pc + len);
1658 : break;
1659 : }
1660 : case kExprBrIf: {
1661 : BreakDepthOperand<false> operand(&decoder, code->at(pc));
1662 43764 : WasmVal cond = Pop();
1663 : bool is_true = cond.to<uint32_t>() != 0;
1664 43764 : if (is_true) {
1665 22379 : len = DoBreak(code, pc, operand.depth);
1666 : TRACE(" br_if => @%zu\n", pc + len);
1667 : } else {
1668 : TRACE(" false => fallthrough\n");
1669 21385 : len = 1 + operand.length;
1670 : }
1671 : break;
1672 : }
1673 : case kExprBrTable: {
1674 : BranchTableOperand<false> operand(&decoder, code->at(pc));
1675 : BranchTableIterator<false> iterator(&decoder, operand);
1676 776510 : uint32_t key = Pop().to<uint32_t>();
1677 : uint32_t depth = 0;
1678 388255 : if (key >= operand.table_count) key = operand.table_count;
1679 388255 : for (uint32_t i = 0; i <= key; i++) {
1680 : DCHECK(iterator.has_next());
1681 : depth = iterator.next();
1682 : }
1683 776510 : len = key + DoBreak(code, pc + key, static_cast<size_t>(depth));
1684 : TRACE(" br[%u] => @%zu\n", key, pc + key + len);
1685 : break;
1686 : }
1687 : case kExprReturn: {
1688 2929628 : size_t arity = code->function->sig->return_count();
1689 3170034 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
1690 14 : PAUSE_IF_BREAK_FLAG(AfterReturn);
1691 1144306 : continue;
1692 : }
1693 : case kExprUnreachable: {
1694 : return DoTrap(kTrapUnreachable, pc);
1695 : }
1696 : case kExprEnd: {
1697 : break;
1698 : }
1699 : case kExprI32Const: {
1700 571609 : ImmI32Operand<false> operand(&decoder, code->at(pc));
1701 571609 : Push(WasmVal(operand.value));
1702 571609 : len = 1 + operand.length;
1703 : break;
1704 : }
1705 : case kExprI64Const: {
1706 11585 : ImmI64Operand<false> operand(&decoder, code->at(pc));
1707 11585 : Push(WasmVal(operand.value));
1708 11585 : len = 1 + operand.length;
1709 : break;
1710 : }
1711 : case kExprF32Const: {
1712 : ImmF32Operand<false> operand(&decoder, code->at(pc));
1713 : Push(WasmVal(operand.value));
1714 343 : len = 1 + operand.length;
1715 : break;
1716 : }
1717 : case kExprF64Const: {
1718 : ImmF64Operand<false> operand(&decoder, code->at(pc));
1719 : Push(WasmVal(operand.value));
1720 1960 : len = 1 + operand.length;
1721 : break;
1722 : }
1723 : case kExprGetLocal: {
1724 : LocalIndexOperand<false> operand(&decoder, code->at(pc));
1725 5859788 : Push(GetStackValue(frames_.back().sp + operand.index));
1726 5859788 : len = 1 + operand.length;
1727 : break;
1728 : }
1729 : case kExprSetLocal: {
1730 : LocalIndexOperand<false> operand(&decoder, code->at(pc));
1731 12399 : WasmVal val = Pop();
1732 12399 : SetStackValue(frames_.back().sp + operand.index, val);
1733 12399 : len = 1 + operand.length;
1734 : break;
1735 : }
1736 : case kExprTeeLocal: {
1737 : LocalIndexOperand<false> operand(&decoder, code->at(pc));
1738 4158 : WasmVal val = Pop();
1739 4158 : SetStackValue(frames_.back().sp + operand.index, val);
1740 : Push(val);
1741 4158 : len = 1 + operand.length;
1742 : break;
1743 : }
1744 : case kExprDrop: {
1745 : Pop();
1746 : break;
1747 : }
1748 : case kExprCallFunction: {
1749 : CallFunctionOperand<false> operand(&decoder, code->at(pc));
1750 : InterpreterCode* target = codemap()->GetCode(operand.index);
1751 1153450 : if (target->function->imported) {
1752 9300 : CommitPc(pc);
1753 : ExternalCallResult result =
1754 9300 : CallImportedFunction(target->function->func_index);
1755 9300 : switch (result.type) {
1756 : case ExternalCallResult::INTERNAL:
1757 : // The import is a function of this instance. Call it directly.
1758 0 : target = result.interpreter_code;
1759 : DCHECK(!target->function->imported);
1760 0 : break;
1761 : case ExternalCallResult::INVALID_FUNC:
1762 : case ExternalCallResult::SIGNATURE_MISMATCH:
1763 : // Direct calls are checked statically.
1764 0 : UNREACHABLE();
1765 : case ExternalCallResult::EXTERNAL_RETURNED:
1766 9225 : PAUSE_IF_BREAK_FLAG(AfterCall);
1767 9225 : len = 1 + operand.length;
1768 9225 : break;
1769 : case ExternalCallResult::EXTERNAL_UNWOUND:
1770 75 : return;
1771 : }
1772 9225 : if (result.type != ExternalCallResult::INTERNAL) break;
1773 : }
1774 : // Execute an internal call.
1775 1144150 : if (!DoCall(&decoder, target, &pc, &limit)) return;
1776 1144135 : code = target;
1777 1144135 : PAUSE_IF_BREAK_FLAG(AfterCall);
1778 1144135 : continue; // don't bump pc
1779 : } break;
1780 : case kExprCallIndirect: {
1781 302 : CallIndirectOperand<false> operand(&decoder, code->at(pc));
1782 604 : uint32_t entry_index = Pop().to<uint32_t>();
1783 : // Assume only one table for now.
1784 : DCHECK_LE(module()->function_tables.size(), 1u);
1785 : ExternalCallResult result =
1786 302 : CallIndirectFunction(0, entry_index, operand.index);
1787 302 : switch (result.type) {
1788 : case ExternalCallResult::INTERNAL:
1789 : // The import is a function of this instance. Call it directly.
1790 157 : if (!DoCall(&decoder, result.interpreter_code, &pc, &limit))
1791 130 : return;
1792 157 : code = result.interpreter_code;
1793 157 : PAUSE_IF_BREAK_FLAG(AfterCall);
1794 157 : continue; // don't bump pc
1795 : case ExternalCallResult::INVALID_FUNC:
1796 78 : return DoTrap(kTrapFuncInvalid, pc);
1797 : case ExternalCallResult::SIGNATURE_MISMATCH:
1798 37 : return DoTrap(kTrapFuncSigMismatch, pc);
1799 : case ExternalCallResult::EXTERNAL_RETURNED:
1800 15 : PAUSE_IF_BREAK_FLAG(AfterCall);
1801 15 : len = 1 + operand.length;
1802 15 : break;
1803 : case ExternalCallResult::EXTERNAL_UNWOUND:
1804 : return;
1805 : }
1806 15 : } break;
1807 : case kExprGetGlobal: {
1808 : GlobalIndexOperand<false> operand(&decoder, code->at(pc));
1809 800 : const WasmGlobal* global = &module()->globals[operand.index];
1810 400 : byte* ptr = instance()->globals_start + global->offset;
1811 : WasmVal val;
1812 400 : switch (global->type) {
1813 : #define CASE_TYPE(wasm, ctype) \
1814 : case kWasm##wasm: \
1815 : val = WasmVal(*reinterpret_cast<ctype*>(ptr)); \
1816 : break;
1817 226 : WASM_CTYPES(CASE_TYPE)
1818 : #undef CASE_TYPE
1819 : default:
1820 0 : UNREACHABLE();
1821 : }
1822 : Push(val);
1823 400 : len = 1 + operand.length;
1824 : break;
1825 : }
1826 : case kExprSetGlobal: {
1827 : GlobalIndexOperand<false> operand(&decoder, code->at(pc));
1828 568 : const WasmGlobal* global = &module()->globals[operand.index];
1829 284 : byte* ptr = instance()->globals_start + global->offset;
1830 284 : WasmVal val = Pop();
1831 284 : switch (global->type) {
1832 : #define CASE_TYPE(wasm, ctype) \
1833 : case kWasm##wasm: \
1834 : *reinterpret_cast<ctype*>(ptr) = val.to<ctype>(); \
1835 : break;
1836 284 : WASM_CTYPES(CASE_TYPE)
1837 : #undef CASE_TYPE
1838 : default:
1839 0 : UNREACHABLE();
1840 : }
1841 284 : len = 1 + operand.length;
1842 : break;
1843 : }
1844 :
1845 : #define LOAD_CASE(name, ctype, mtype) \
1846 : case kExpr##name: { \
1847 : if (!ExecuteLoad<ctype, mtype>(&decoder, code, pc, len)) return; \
1848 : break; \
1849 : }
1850 :
1851 454 : LOAD_CASE(I32LoadMem8S, int32_t, int8_t);
1852 454 : LOAD_CASE(I32LoadMem8U, int32_t, uint8_t);
1853 398 : LOAD_CASE(I32LoadMem16S, int32_t, int16_t);
1854 398 : LOAD_CASE(I32LoadMem16U, int32_t, uint16_t);
1855 168 : LOAD_CASE(I64LoadMem8S, int64_t, int8_t);
1856 0 : LOAD_CASE(I64LoadMem8U, int64_t, uint8_t);
1857 168 : LOAD_CASE(I64LoadMem16S, int64_t, int16_t);
1858 0 : LOAD_CASE(I64LoadMem16U, int64_t, uint16_t);
1859 168 : LOAD_CASE(I64LoadMem32S, int64_t, int32_t);
1860 0 : LOAD_CASE(I64LoadMem32U, int64_t, uint32_t);
1861 30834 : LOAD_CASE(I32LoadMem, int32_t, int32_t);
1862 10865 : LOAD_CASE(I64LoadMem, int64_t, int64_t);
1863 16442 : LOAD_CASE(F32LoadMem, float, float);
1864 52310 : LOAD_CASE(F64LoadMem, double, double);
1865 : #undef LOAD_CASE
1866 :
1867 : #define STORE_CASE(name, ctype, mtype) \
1868 : case kExpr##name: { \
1869 : if (!ExecuteStore<ctype, mtype>(&decoder, code, pc, len)) return; \
1870 : break; \
1871 : }
1872 :
1873 484 : STORE_CASE(I32StoreMem8, int32_t, int8_t);
1874 456 : STORE_CASE(I32StoreMem16, int32_t, int16_t);
1875 0 : STORE_CASE(I64StoreMem8, int64_t, int8_t);
1876 0 : STORE_CASE(I64StoreMem16, int64_t, int16_t);
1877 0 : STORE_CASE(I64StoreMem32, int64_t, int32_t);
1878 3270 : STORE_CASE(I32StoreMem, int32_t, int32_t);
1879 1376 : STORE_CASE(I64StoreMem, int64_t, int64_t);
1880 2034 : STORE_CASE(F32StoreMem, float, float);
1881 19051 : STORE_CASE(F64StoreMem, double, double);
1882 : #undef STORE_CASE
1883 :
1884 : #define ASMJS_LOAD_CASE(name, ctype, mtype, defval) \
1885 : case kExpr##name: { \
1886 : uint32_t index = Pop().to<uint32_t>(); \
1887 : ctype result; \
1888 : if (!BoundsCheck<mtype>(instance()->mem_size, 0, index)) { \
1889 : result = defval; \
1890 : } else { \
1891 : byte* addr = instance()->mem_start + index; \
1892 : /* TODO(titzer): alignment for asmjs load mem? */ \
1893 : result = static_cast<ctype>(*reinterpret_cast<mtype*>(addr)); \
1894 : } \
1895 : Push(WasmVal(result)); \
1896 : break; \
1897 : }
1898 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem8S, int32_t, int8_t, 0);
1899 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem8U, int32_t, uint8_t, 0);
1900 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem16S, int32_t, int16_t, 0);
1901 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem16U, int32_t, uint16_t, 0);
1902 875 : ASMJS_LOAD_CASE(I32AsmjsLoadMem, int32_t, int32_t, 0);
1903 875 : ASMJS_LOAD_CASE(F32AsmjsLoadMem, float, float,
1904 : std::numeric_limits<float>::quiet_NaN());
1905 1190 : ASMJS_LOAD_CASE(F64AsmjsLoadMem, double, double,
1906 : std::numeric_limits<double>::quiet_NaN());
1907 : #undef ASMJS_LOAD_CASE
1908 :
1909 : #define ASMJS_STORE_CASE(name, ctype, mtype) \
1910 : case kExpr##name: { \
1911 : WasmVal val = Pop(); \
1912 : uint32_t index = Pop().to<uint32_t>(); \
1913 : if (BoundsCheck<mtype>(instance()->mem_size, 0, index)) { \
1914 : byte* addr = instance()->mem_start + index; \
1915 : /* TODO(titzer): alignment for asmjs store mem? */ \
1916 : *(reinterpret_cast<mtype*>(addr)) = static_cast<mtype>(val.to<ctype>()); \
1917 : } \
1918 : Push(val); \
1919 : break; \
1920 : }
1921 :
1922 0 : ASMJS_STORE_CASE(I32AsmjsStoreMem8, int32_t, int8_t);
1923 0 : ASMJS_STORE_CASE(I32AsmjsStoreMem16, int32_t, int16_t);
1924 8162 : ASMJS_STORE_CASE(I32AsmjsStoreMem, int32_t, int32_t);
1925 0 : ASMJS_STORE_CASE(F32AsmjsStoreMem, float, float);
1926 0 : ASMJS_STORE_CASE(F64AsmjsStoreMem, double, double);
1927 : #undef ASMJS_STORE_CASE
1928 : case kExprGrowMemory: {
1929 : MemoryIndexOperand<false> operand(&decoder, code->at(pc));
1930 190 : uint32_t delta_pages = Pop().to<uint32_t>();
1931 : Push(WasmVal(ExecuteGrowMemory(
1932 285 : delta_pages, codemap_->maybe_instance(), instance())));
1933 95 : len = 1 + operand.length;
1934 : break;
1935 : }
1936 : case kExprMemorySize: {
1937 : MemoryIndexOperand<false> operand(&decoder, code->at(pc));
1938 : Push(WasmVal(static_cast<uint32_t>(instance()->mem_size /
1939 0 : WasmModule::kPageSize)));
1940 0 : len = 1 + operand.length;
1941 : break;
1942 : }
1943 : // We need to treat kExprI32ReinterpretF32 and kExprI64ReinterpretF64
1944 : // specially to guarantee that the quiet bit of a NaN is preserved on
1945 : // ia32 by the reinterpret casts.
1946 : case kExprI32ReinterpretF32: {
1947 399 : WasmVal val = Pop();
1948 : Push(WasmVal(ExecuteI32ReinterpretF32(val)));
1949 399 : possible_nondeterminism_ |= std::isnan(val.to<float>());
1950 : break;
1951 : }
1952 : case kExprI64ReinterpretF64: {
1953 399 : WasmVal val = Pop();
1954 : Push(WasmVal(ExecuteI64ReinterpretF64(val)));
1955 399 : possible_nondeterminism_ |= std::isnan(val.to<double>());
1956 : break;
1957 : }
1958 : #define EXECUTE_SIMPLE_BINOP(name, ctype, op) \
1959 : case kExpr##name: { \
1960 : WasmVal rval = Pop(); \
1961 : WasmVal lval = Pop(); \
1962 : WasmVal result(lval.to<ctype>() op rval.to<ctype>()); \
1963 : Push(result); \
1964 : break; \
1965 : }
1966 5471214 : FOREACH_SIMPLE_BINOP(EXECUTE_SIMPLE_BINOP)
1967 : #undef EXECUTE_SIMPLE_BINOP
1968 :
1969 : #define EXECUTE_OTHER_BINOP(name, ctype) \
1970 : case kExpr##name: { \
1971 : TrapReason trap = kTrapCount; \
1972 : volatile ctype rval = Pop().to<ctype>(); \
1973 : volatile ctype lval = Pop().to<ctype>(); \
1974 : WasmVal result(Execute##name(lval, rval, &trap)); \
1975 : if (trap != kTrapCount) return DoTrap(trap, pc); \
1976 : Push(result); \
1977 : break; \
1978 : }
1979 3348422 : FOREACH_OTHER_BINOP(EXECUTE_OTHER_BINOP)
1980 : #undef EXECUTE_OTHER_BINOP
1981 :
1982 : case kExprF32CopySign: {
1983 : // Handle kExprF32CopySign separately because it may introduce
1984 : // observable non-determinism.
1985 : TrapReason trap = kTrapCount;
1986 185178 : volatile float rval = Pop().to<float>();
1987 185178 : volatile float lval = Pop().to<float>();
1988 92589 : WasmVal result(ExecuteF32CopySign(lval, rval, &trap));
1989 : Push(result);
1990 185178 : possible_nondeterminism_ |= std::isnan(rval);
1991 : break;
1992 : }
1993 : case kExprF64CopySign: {
1994 : // Handle kExprF32CopySign separately because it may introduce
1995 : // observable non-determinism.
1996 : TrapReason trap = kTrapCount;
1997 33642 : volatile double rval = Pop().to<double>();
1998 33642 : volatile double lval = Pop().to<double>();
1999 16821 : WasmVal result(ExecuteF64CopySign(lval, rval, &trap));
2000 : Push(result);
2001 33642 : possible_nondeterminism_ |= std::isnan(rval);
2002 : break;
2003 : }
2004 : #define EXECUTE_OTHER_UNOP(name, ctype) \
2005 : case kExpr##name: { \
2006 : TrapReason trap = kTrapCount; \
2007 : volatile ctype val = Pop().to<ctype>(); \
2008 : WasmVal result(Execute##name(val, &trap)); \
2009 : if (trap != kTrapCount) return DoTrap(trap, pc); \
2010 : Push(result); \
2011 : break; \
2012 : }
2013 607650 : FOREACH_OTHER_UNOP(EXECUTE_OTHER_UNOP)
2014 : #undef EXECUTE_OTHER_UNOP
2015 :
2016 : default:
2017 : V8_Fatal(__FILE__, __LINE__, "Unknown or unimplemented opcode #%d:%s",
2018 0 : code->start[pc], OpcodeName(code->start[pc]));
2019 : UNREACHABLE();
2020 : }
2021 :
2022 : #ifdef DEBUG
2023 : if (!WasmOpcodes::IsControlOpcode(static_cast<WasmOpcode>(opcode))) {
2024 : DCHECK_EQ(expected_new_stack_height, StackHeight());
2025 : }
2026 : #endif
2027 :
2028 15749536 : pc += len;
2029 15749536 : if (pc == limit) {
2030 : // Fell off end of code; do an implicit return.
2031 : TRACE("@%-3zu: ImplicitReturn\n", pc);
2032 2540834 : if (!DoReturn(&decoder, &code, &pc, &limit,
2033 5081668 : code->function->sig->return_count()))
2034 : return;
2035 161178 : PAUSE_IF_BREAK_FLAG(AfterReturn);
2036 : }
2037 : }
2038 :
2039 8293 : state_ = WasmInterpreter::PAUSED;
2040 12984 : break_pc_ = hit_break ? pc : kInvalidPc;
2041 8293 : CommitPc(pc);
2042 : }
2043 :
2044 : WasmVal Pop() {
2045 : DCHECK_GT(frames_.size(), 0);
2046 : DCHECK_GT(StackHeight(), frames_.back().llimit()); // can't pop into locals
2047 6432326 : return *--sp_;
2048 : }
2049 :
2050 : void PopN(int n) {
2051 : DCHECK_GE(StackHeight(), n);
2052 : DCHECK_GT(frames_.size(), 0);
2053 : // Check that we don't pop into locals.
2054 : DCHECK_GE(StackHeight() - n, frames_.back().llimit());
2055 : sp_ -= n;
2056 : }
2057 :
2058 : WasmVal PopArity(size_t arity) {
2059 : if (arity == 0) return WasmVal();
2060 : CHECK_EQ(1, arity);
2061 : return Pop();
2062 : }
2063 :
2064 : void Push(WasmVal val) {
2065 : DCHECK_NE(kWasmStmt, val.type);
2066 : DCHECK_LE(1, stack_limit_ - sp_);
2067 9601462 : *sp_++ = val;
2068 : }
2069 :
2070 : void Push(WasmVal* vals, size_t arity) {
2071 : DCHECK_LE(arity, stack_limit_ - sp_);
2072 2781254 : for (WasmVal *val = vals, *end = vals + arity; val != end; ++val) {
2073 : DCHECK_NE(kWasmStmt, val->type);
2074 : }
2075 2781254 : memcpy(sp_, vals, arity * sizeof(*sp_));
2076 2781254 : sp_ += arity;
2077 : }
2078 :
2079 6706815 : void EnsureStackSpace(size_t size) {
2080 13413630 : if (V8_LIKELY(static_cast<size_t>(stack_limit_ - sp_) >= size)) return;
2081 20123 : size_t old_size = stack_limit_ - stack_start_;
2082 20123 : size_t new_size = Max(size_t{8}, 2 * old_size);
2083 41128 : while (new_size < (sp_ - stack_start_) + size) {
2084 : DCHECK_GE(std::numeric_limits<decltype(new_size)>::max() / 4, new_size);
2085 882 : new_size *= 2;
2086 : }
2087 20123 : WasmVal* new_stack = zone_->NewArray<WasmVal>(new_size);
2088 20123 : memcpy(new_stack, stack_start_, old_size * sizeof(*sp_));
2089 20123 : sp_ = new_stack + (sp_ - stack_start_);
2090 20123 : stack_start_ = new_stack;
2091 20123 : stack_limit_ = new_stack + new_size;
2092 : }
2093 :
2094 3977426 : sp_t StackHeight() { return sp_ - stack_start_; }
2095 :
2096 : void TraceStack(const char* phase, pc_t pc) {
2097 : if (FLAG_trace_wasm_interpreter) {
2098 : PrintF("%s @%zu", phase, pc);
2099 : UNIMPLEMENTED();
2100 : PrintF("\n");
2101 : }
2102 : }
2103 :
2104 : void TraceValueStack() {
2105 : #ifdef DEBUG
2106 : if (!FLAG_trace_wasm_interpreter) return;
2107 : Frame* top = frames_.size() > 0 ? &frames_.back() : nullptr;
2108 : sp_t sp = top ? top->sp : 0;
2109 : sp_t plimit = top ? top->plimit() : 0;
2110 : sp_t llimit = top ? top->llimit() : 0;
2111 : for (size_t i = sp; i < StackHeight(); ++i) {
2112 : if (i < plimit)
2113 : PrintF(" p%zu:", i);
2114 : else if (i < llimit)
2115 : PrintF(" l%zu:", i);
2116 : else
2117 : PrintF(" s%zu:", i);
2118 : WasmVal val = GetStackValue(i);
2119 : switch (val.type) {
2120 : case kWasmI32:
2121 : PrintF("i32:%d", val.to<int32_t>());
2122 : break;
2123 : case kWasmI64:
2124 : PrintF("i64:%" PRId64 "", val.to<int64_t>());
2125 : break;
2126 : case kWasmF32:
2127 : PrintF("f32:%f", val.to<float>());
2128 : break;
2129 : case kWasmF64:
2130 : PrintF("f64:%lf", val.to<double>());
2131 : break;
2132 : case kWasmStmt:
2133 : PrintF("void");
2134 : break;
2135 : default:
2136 : UNREACHABLE();
2137 : break;
2138 : }
2139 : }
2140 : #endif // DEBUG
2141 : }
2142 :
2143 : ExternalCallResult TryHandleException(Isolate* isolate) {
2144 90 : if (HandleException(isolate) == WasmInterpreter::Thread::UNWOUND) {
2145 : return {ExternalCallResult::EXTERNAL_UNWOUND};
2146 : }
2147 : return {ExternalCallResult::EXTERNAL_RETURNED};
2148 : }
2149 :
2150 9375 : ExternalCallResult CallCodeObject(Isolate* isolate, Handle<Code> code,
2151 46230 : FunctionSig* signature) {
2152 : DCHECK(AllowHandleAllocation::IsAllowed());
2153 : DCHECK(AllowHeapAllocation::IsAllowed());
2154 :
2155 9375 : if (code->kind() == Code::WASM_FUNCTION) {
2156 : FixedArray* deopt_data = code->deoptimization_data();
2157 : DCHECK_EQ(2, deopt_data->length());
2158 : WasmInstanceObject* target_instance =
2159 45 : WasmInstanceObject::cast(WeakCell::cast(deopt_data->get(0))->value());
2160 45 : if (target_instance != *codemap()->instance()) {
2161 : // TODO(wasm): Implement calling functions of other instances/modules.
2162 0 : UNIMPLEMENTED();
2163 : }
2164 : int target_func_idx = Smi::cast(deopt_data->get(1))->value();
2165 : DCHECK_LE(0, target_func_idx);
2166 : return {ExternalCallResult::INTERNAL,
2167 90 : codemap()->GetCode(target_func_idx)};
2168 : }
2169 :
2170 : Handle<HeapObject> target =
2171 9330 : codemap()->GetCallableObjectForJSImport(isolate, code);
2172 :
2173 9330 : if (target.is_null()) {
2174 : isolate->Throw(*isolate->factory()->NewTypeError(
2175 60 : MessageTemplate::kWasmTrapTypeError));
2176 : return TryHandleException(isolate);
2177 : }
2178 :
2179 : #if DEBUG
2180 : std::ostringstream oss;
2181 : target->HeapObjectShortPrint(oss);
2182 : TRACE(" => Calling imported function %s\n", oss.str().c_str());
2183 : #endif
2184 :
2185 9300 : int num_args = static_cast<int>(signature->parameter_count());
2186 :
2187 : // Get all arguments as JS values.
2188 : std::vector<Handle<Object>> args;
2189 9300 : args.reserve(num_args);
2190 9300 : WasmVal* wasm_args = sp_ - num_args;
2191 18510 : for (int i = 0; i < num_args; ++i) {
2192 : args.push_back(WasmValToNumber(isolate->factory(), wasm_args[i],
2193 27630 : signature->GetParam(i)));
2194 : }
2195 :
2196 : // The receiver is the global proxy if in sloppy mode (default), undefined
2197 : // if in strict mode.
2198 9300 : Handle<Object> receiver = isolate->global_proxy();
2199 18600 : if (target->IsJSFunction() &&
2200 : is_strict(JSFunction::cast(*target)->shared()->language_mode())) {
2201 0 : receiver = isolate->factory()->undefined_value();
2202 : }
2203 :
2204 : MaybeHandle<Object> maybe_retval =
2205 9300 : Execution::Call(isolate, target, receiver, num_args, args.data());
2206 9300 : if (maybe_retval.is_null()) return TryHandleException(isolate);
2207 :
2208 9240 : Handle<Object> retval = maybe_retval.ToHandleChecked();
2209 : // Pop arguments off the stack.
2210 9240 : sp_ -= num_args;
2211 9240 : if (signature->return_count() > 0) {
2212 : // TODO(wasm): Handle multiple returns.
2213 : DCHECK_EQ(1, signature->return_count());
2214 9060 : Push(ToWebAssemblyValue(isolate, retval, signature->GetReturn()));
2215 : }
2216 9240 : return {ExternalCallResult::EXTERNAL_RETURNED};
2217 : }
2218 :
2219 27900 : ExternalCallResult CallImportedFunction(uint32_t function_index) {
2220 : // Use a new HandleScope to avoid leaking / accumulating handles in the
2221 : // outer scope.
2222 : Isolate* isolate = codemap()->instance()->GetIsolate();
2223 : HandleScope handle_scope(isolate);
2224 :
2225 : Handle<Code> target(codemap()->GetImportedFunction(function_index),
2226 9300 : isolate);
2227 : return CallCodeObject(isolate, target,
2228 37200 : codemap()->module()->functions[function_index].sig);
2229 : }
2230 :
2231 302 : ExternalCallResult CallIndirectFunction(uint32_t table_index,
2232 : uint32_t entry_index,
2233 539 : uint32_t sig_index) {
2234 302 : if (!codemap()->has_instance()) {
2235 : // No instance. Rely on the information stored in the WasmModule.
2236 : // TODO(wasm): This is only needed for testing. Refactor testing to use
2237 : // the same paths as production.
2238 : InterpreterCode* code =
2239 182 : codemap()->GetIndirectCode(table_index, entry_index);
2240 182 : if (!code) return {ExternalCallResult::INVALID_FUNC};
2241 119 : if (code->function->sig_index != sig_index) {
2242 : // If not an exact match, we have to do a canonical check.
2243 : // TODO(titzer): make this faster with some kind of caching?
2244 : const WasmIndirectFunctionTable* table =
2245 42 : &module()->function_tables[table_index];
2246 21 : int function_key = table->map.Find(code->function->sig);
2247 42 : if (function_key < 0 ||
2248 : (function_key !=
2249 63 : table->map.Find(module()->signatures[sig_index]))) {
2250 7 : return {ExternalCallResult::SIGNATURE_MISMATCH};
2251 : }
2252 : }
2253 112 : return {ExternalCallResult::INTERNAL, code};
2254 : }
2255 :
2256 : WasmCompiledModule* compiled_module =
2257 120 : codemap()->instance()->compiled_module();
2258 : Isolate* isolate = compiled_module->GetIsolate();
2259 :
2260 : Code* target;
2261 : {
2262 : DisallowHeapAllocation no_gc;
2263 : // Get function to be called directly from the live instance to see latest
2264 : // changes to the tables.
2265 :
2266 : // Canonicalize signature index.
2267 : // TODO(titzer): make this faster with some kind of caching?
2268 : const WasmIndirectFunctionTable* table =
2269 240 : &module()->function_tables[table_index];
2270 240 : FunctionSig* sig = module()->signatures[sig_index];
2271 120 : uint32_t canonical_sig_index = table->map.Find(sig);
2272 :
2273 : // Check signature.
2274 : FixedArray* sig_tables = compiled_module->ptr_to_signature_tables();
2275 120 : if (table_index >= static_cast<uint32_t>(sig_tables->length())) {
2276 0 : return {ExternalCallResult::INVALID_FUNC};
2277 : }
2278 : FixedArray* sig_table =
2279 120 : FixedArray::cast(sig_tables->get(static_cast<int>(table_index)));
2280 120 : if (entry_index >= static_cast<uint32_t>(sig_table->length())) {
2281 15 : return {ExternalCallResult::INVALID_FUNC};
2282 : }
2283 : int found_sig =
2284 105 : Smi::cast(sig_table->get(static_cast<int>(entry_index)))->value();
2285 105 : if (static_cast<uint32_t>(found_sig) != canonical_sig_index) {
2286 30 : return {ExternalCallResult::SIGNATURE_MISMATCH};
2287 : }
2288 :
2289 : // Get code object.
2290 : FixedArray* fun_tables = compiled_module->ptr_to_function_tables();
2291 : DCHECK_EQ(sig_tables->length(), fun_tables->length());
2292 : FixedArray* fun_table =
2293 : FixedArray::cast(fun_tables->get(static_cast<int>(table_index)));
2294 : DCHECK_EQ(sig_table->length(), fun_table->length());
2295 : target = Code::cast(fun_table->get(static_cast<int>(entry_index)));
2296 : }
2297 :
2298 : // Call the code object. Use a new HandleScope to avoid leaking /
2299 : // accumulating handles in the outer scope.
2300 : HandleScope handle_scope(isolate);
2301 : FunctionSig* signature =
2302 150 : &codemap()->module()->signatures[table_index][sig_index];
2303 75 : return CallCodeObject(isolate, handle(target, isolate), signature);
2304 : }
2305 :
2306 : inline Activation current_activation() {
2307 5697792 : return activations_.empty() ? Activation(0, 0) : activations_.back();
2308 : }
2309 : };
2310 :
2311 : class InterpretedFrameImpl {
2312 : public:
2313 : InterpretedFrameImpl(ThreadImpl* thread, int index)
2314 1971014 : : thread_(thread), index_(index) {
2315 : DCHECK_LE(0, index);
2316 : }
2317 :
2318 3943400 : const WasmFunction* function() const { return frame()->code->function; }
2319 :
2320 1970385 : int pc() const {
2321 : DCHECK_LE(0, frame()->pc);
2322 : DCHECK_GE(kMaxInt, frame()->pc);
2323 1970385 : return static_cast<int>(frame()->pc);
2324 : }
2325 :
2326 : int GetParameterCount() const {
2327 : DCHECK_GE(kMaxInt, function()->sig->parameter_count());
2328 276 : return static_cast<int>(function()->sig->parameter_count());
2329 : }
2330 :
2331 : int GetLocalCount() const {
2332 1340 : size_t num_locals = function()->sig->parameter_count() +
2333 2680 : frame()->code->locals.type_list.size();
2334 : DCHECK_GE(kMaxInt, num_locals);
2335 1340 : return static_cast<int>(num_locals);
2336 : }
2337 :
2338 486 : int GetStackHeight() const {
2339 : bool is_top_frame =
2340 912 : static_cast<size_t>(index_) + 1 == thread_->frames_.size();
2341 : size_t stack_limit =
2342 606 : is_top_frame ? thread_->StackHeight() : thread_->frames_[index_ + 1].sp;
2343 : DCHECK_LE(frame()->sp, stack_limit);
2344 486 : size_t frame_size = stack_limit - frame()->sp;
2345 : DCHECK_LE(GetLocalCount(), frame_size);
2346 972 : return static_cast<int>(frame_size) - GetLocalCount();
2347 : }
2348 :
2349 542 : WasmVal GetLocalValue(int index) const {
2350 : DCHECK_LE(0, index);
2351 : DCHECK_GT(GetLocalCount(), index);
2352 1084 : return thread_->GetStackValue(static_cast<int>(frame()->sp) + index);
2353 : }
2354 :
2355 116 : WasmVal GetStackValue(int index) const {
2356 : DCHECK_LE(0, index);
2357 : // Index must be within the number of stack values of this frame.
2358 : DCHECK_GT(GetStackHeight(), index);
2359 232 : return thread_->GetStackValue(static_cast<int>(frame()->sp) +
2360 232 : GetLocalCount() + index);
2361 : }
2362 :
2363 : private:
2364 : ThreadImpl* thread_;
2365 : int index_;
2366 :
2367 : ThreadImpl::Frame* frame() const {
2368 : DCHECK_GT(thread_->frames_.size(), index_);
2369 3942442 : return &thread_->frames_[index_];
2370 : }
2371 : };
2372 :
2373 : // Converters between WasmInterpreter::Thread and WasmInterpreter::ThreadImpl.
2374 : // Thread* is the public interface, without knowledge of the object layout.
2375 : // This cast is potentially risky, but as long as we always cast it back before
2376 : // accessing any data, it should be fine. UBSan is not complaining.
2377 : WasmInterpreter::Thread* ToThread(ThreadImpl* impl) {
2378 : return reinterpret_cast<WasmInterpreter::Thread*>(impl);
2379 : }
2380 : ThreadImpl* ToImpl(WasmInterpreter::Thread* thread) {
2381 : return reinterpret_cast<ThreadImpl*>(thread);
2382 : }
2383 :
2384 : // Same conversion for InterpretedFrame and InterpretedFrameImpl.
2385 : InterpretedFrame* ToFrame(InterpretedFrameImpl* impl) {
2386 : return reinterpret_cast<InterpretedFrame*>(impl);
2387 : }
2388 : const InterpretedFrameImpl* ToImpl(const InterpretedFrame* frame) {
2389 : return reinterpret_cast<const InterpretedFrameImpl*>(frame);
2390 : }
2391 :
2392 : } // namespace
2393 :
2394 : //============================================================================
2395 : // Implementation of the pimpl idiom for WasmInterpreter::Thread.
2396 : // Instead of placing a pointer to the ThreadImpl inside of the Thread object,
2397 : // we just reinterpret_cast them. ThreadImpls are only allocated inside this
2398 : // translation unit anyway.
2399 : //============================================================================
2400 25053 : WasmInterpreter::State WasmInterpreter::Thread::state() {
2401 25053 : return ToImpl(this)->state();
2402 : }
2403 2781254 : void WasmInterpreter::Thread::InitFrame(const WasmFunction* function,
2404 : WasmVal* args) {
2405 2781254 : ToImpl(this)->InitFrame(function, args);
2406 2781254 : }
2407 2789533 : WasmInterpreter::State WasmInterpreter::Thread::Run(int num_steps) {
2408 2789533 : return ToImpl(this)->Run(num_steps);
2409 : }
2410 0 : void WasmInterpreter::Thread::Pause() { return ToImpl(this)->Pause(); }
2411 5459630 : void WasmInterpreter::Thread::Reset() { return ToImpl(this)->Reset(); }
2412 : WasmInterpreter::Thread::ExceptionHandlingResult
2413 134 : WasmInterpreter::Thread::HandleException(Isolate* isolate) {
2414 134 : return ToImpl(this)->HandleException(isolate);
2415 : }
2416 4536 : pc_t WasmInterpreter::Thread::GetBreakpointPc() {
2417 4536 : return ToImpl(this)->GetBreakpointPc();
2418 : }
2419 4442 : int WasmInterpreter::Thread::GetFrameCount() {
2420 4442 : return ToImpl(this)->GetFrameCount();
2421 : }
2422 1971014 : std::unique_ptr<InterpretedFrame> WasmInterpreter::Thread::GetFrame(int index) {
2423 : DCHECK_LE(0, index);
2424 : DCHECK_GT(GetFrameCount(), index);
2425 : return std::unique_ptr<InterpretedFrame>(
2426 3942028 : ToFrame(new InterpretedFrameImpl(ToImpl(this), index)));
2427 : }
2428 2768164 : WasmVal WasmInterpreter::Thread::GetReturnValue(int index) {
2429 2768164 : return ToImpl(this)->GetReturnValue(index);
2430 : }
2431 120 : TrapReason WasmInterpreter::Thread::GetTrapReason() {
2432 120 : return ToImpl(this)->GetTrapReason();
2433 : }
2434 2712700 : bool WasmInterpreter::Thread::PossibleNondeterminism() {
2435 2712700 : return ToImpl(this)->PossibleNondeterminism();
2436 : }
2437 83708 : uint64_t WasmInterpreter::Thread::NumInterpretedCalls() {
2438 83708 : return ToImpl(this)->NumInterpretedCalls();
2439 : }
2440 31 : void WasmInterpreter::Thread::AddBreakFlags(uint8_t flags) {
2441 : ToImpl(this)->AddBreakFlags(flags);
2442 31 : }
2443 0 : void WasmInterpreter::Thread::ClearBreakFlags() {
2444 : ToImpl(this)->ClearBreakFlags();
2445 0 : }
2446 42 : uint32_t WasmInterpreter::Thread::NumActivations() {
2447 42 : return ToImpl(this)->NumActivations();
2448 : }
2449 51439 : uint32_t WasmInterpreter::Thread::StartActivation() {
2450 51439 : return ToImpl(this)->StartActivation();
2451 : }
2452 51439 : void WasmInterpreter::Thread::FinishActivation(uint32_t id) {
2453 : ToImpl(this)->FinishActivation(id);
2454 51439 : }
2455 3990 : uint32_t WasmInterpreter::Thread::ActivationFrameBase(uint32_t id) {
2456 3990 : return ToImpl(this)->ActivationFrameBase(id);
2457 : }
2458 :
2459 : //============================================================================
2460 : // The implementation details of the interpreter.
2461 : //============================================================================
2462 19296 : class WasmInterpreterInternals : public ZoneObject {
2463 : public:
2464 : WasmInstance* instance_;
2465 : // Create a copy of the module bytes for the interpreter, since the passed
2466 : // pointer might be invalidated after constructing the interpreter.
2467 : const ZoneVector<uint8_t> module_bytes_;
2468 : CodeMap codemap_;
2469 : ZoneVector<ThreadImpl> threads_;
2470 :
2471 19314 : WasmInterpreterInternals(Isolate* isolate, Zone* zone,
2472 : const ModuleBytesEnv& env)
2473 : : instance_(env.module_env.instance),
2474 : module_bytes_(env.wire_bytes.start(), env.wire_bytes.end(), zone),
2475 : codemap_(
2476 : isolate,
2477 : env.module_env.instance ? env.module_env.instance->module : nullptr,
2478 : module_bytes_.data(), zone),
2479 57942 : threads_(zone) {
2480 19314 : threads_.emplace_back(zone, &codemap_, env.module_env.instance);
2481 19314 : }
2482 : };
2483 :
2484 : //============================================================================
2485 : // Implementation of the public interface of the interpreter.
2486 : //============================================================================
2487 19314 : WasmInterpreter::WasmInterpreter(Isolate* isolate, const ModuleBytesEnv& env)
2488 : : zone_(isolate->allocator(), ZONE_NAME),
2489 38628 : internals_(new (&zone_) WasmInterpreterInternals(isolate, &zone_, env)) {}
2490 :
2491 38592 : WasmInterpreter::~WasmInterpreter() { internals_->~WasmInterpreterInternals(); }
2492 :
2493 0 : void WasmInterpreter::Run() { internals_->threads_[0].Run(); }
2494 :
2495 0 : void WasmInterpreter::Pause() { internals_->threads_[0].Pause(); }
2496 :
2497 4790 : bool WasmInterpreter::SetBreakpoint(const WasmFunction* function, pc_t pc,
2498 : bool enabled) {
2499 2395 : InterpreterCode* code = internals_->codemap_.GetCode(function);
2500 2395 : size_t size = static_cast<size_t>(code->end - code->start);
2501 : // Check bounds for {pc}.
2502 2395 : if (pc < code->locals.encoded_size || pc >= size) return false;
2503 : // Make a copy of the code before enabling a breakpoint.
2504 2395 : if (enabled && code->orig_start == code->start) {
2505 66 : code->start = reinterpret_cast<byte*>(zone_.New(size));
2506 66 : memcpy(code->start, code->orig_start, size);
2507 66 : code->end = code->start + size;
2508 : }
2509 2395 : bool prev = code->start[pc] == kInternalBreakpoint;
2510 2395 : if (enabled) {
2511 1261 : code->start[pc] = kInternalBreakpoint;
2512 : } else {
2513 1134 : code->start[pc] = code->orig_start[pc];
2514 : }
2515 2395 : return prev;
2516 : }
2517 :
2518 0 : bool WasmInterpreter::GetBreakpoint(const WasmFunction* function, pc_t pc) {
2519 0 : InterpreterCode* code = internals_->codemap_.GetCode(function);
2520 0 : size_t size = static_cast<size_t>(code->end - code->start);
2521 : // Check bounds for {pc}.
2522 0 : if (pc < code->locals.encoded_size || pc >= size) return false;
2523 : // Check if a breakpoint is present at that place in the code.
2524 0 : return code->start[pc] == kInternalBreakpoint;
2525 : }
2526 :
2527 0 : bool WasmInterpreter::SetTracing(const WasmFunction* function, bool enabled) {
2528 0 : UNIMPLEMENTED();
2529 : return false;
2530 : }
2531 :
2532 1153 : void WasmInterpreter::SetInstanceObject(WasmInstanceObject* instance) {
2533 1153 : internals_->codemap_.SetInstanceObject(instance);
2534 1153 : }
2535 :
2536 0 : int WasmInterpreter::GetThreadCount() {
2537 0 : return 1; // only one thread for now.
2538 : }
2539 :
2540 2967744 : WasmInterpreter::Thread* WasmInterpreter::GetThread(int id) {
2541 2967744 : CHECK_EQ(0, id); // only one thread for now.
2542 5935488 : return ToThread(&internals_->threads_[id]);
2543 : }
2544 :
2545 0 : size_t WasmInterpreter::GetMemorySize() {
2546 0 : return internals_->instance_->mem_size;
2547 : }
2548 :
2549 0 : WasmVal WasmInterpreter::ReadMemory(size_t offset) {
2550 0 : UNIMPLEMENTED();
2551 : return WasmVal();
2552 : }
2553 :
2554 0 : void WasmInterpreter::WriteMemory(size_t offset, WasmVal val) {
2555 0 : UNIMPLEMENTED();
2556 : }
2557 :
2558 19917 : void WasmInterpreter::AddFunctionForTesting(const WasmFunction* function) {
2559 19917 : internals_->codemap_.AddFunction(function, nullptr, nullptr);
2560 19917 : }
2561 :
2562 39834 : void WasmInterpreter::SetFunctionCodeForTesting(const WasmFunction* function,
2563 : const byte* start,
2564 : const byte* end) {
2565 39834 : internals_->codemap_.SetFunctionCode(function, start, end);
2566 19917 : }
2567 :
2568 29 : ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting(
2569 : Zone* zone, const WasmModule* module, const byte* start, const byte* end) {
2570 : // Create some dummy structures, to avoid special-casing the implementation
2571 : // just for testing.
2572 : FunctionSig sig(0, 0, nullptr);
2573 29 : WasmFunction function{&sig, 0, 0, 0, 0, 0, 0, false, false};
2574 : InterpreterCode code{
2575 58 : &function, BodyLocalDecls(zone), start, end, nullptr, nullptr, nullptr};
2576 :
2577 : // Now compute and return the control transfers.
2578 29 : SideTable side_table(zone, module, &code);
2579 29 : return side_table.map_;
2580 : }
2581 :
2582 : //============================================================================
2583 : // Implementation of the frame inspection interface.
2584 : //============================================================================
2585 1970385 : const WasmFunction* InterpretedFrame::function() const {
2586 1970385 : return ToImpl(this)->function();
2587 : }
2588 3940770 : int InterpretedFrame::pc() const { return ToImpl(this)->pc(); }
2589 276 : int InterpretedFrame::GetParameterCount() const {
2590 276 : return ToImpl(this)->GetParameterCount();
2591 : }
2592 738 : int InterpretedFrame::GetLocalCount() const {
2593 738 : return ToImpl(this)->GetLocalCount();
2594 : }
2595 486 : int InterpretedFrame::GetStackHeight() const {
2596 486 : return ToImpl(this)->GetStackHeight();
2597 : }
2598 542 : WasmVal InterpretedFrame::GetLocalValue(int index) const {
2599 542 : return ToImpl(this)->GetLocalValue(index);
2600 : }
2601 116 : WasmVal InterpretedFrame::GetStackValue(int index) const {
2602 116 : return ToImpl(this)->GetStackValue(index);
2603 : }
2604 :
2605 : } // namespace wasm
2606 : } // namespace internal
2607 : } // namespace v8
|