Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include <type_traits>
6 :
7 : #include "src/wasm/wasm-interpreter.h"
8 :
9 : #include "src/assembler-inl.h"
10 : #include "src/conversions.h"
11 : #include "src/identity-map.h"
12 : #include "src/objects-inl.h"
13 : #include "src/utils.h"
14 : #include "src/wasm/decoder.h"
15 : #include "src/wasm/function-body-decoder-impl.h"
16 : #include "src/wasm/function-body-decoder.h"
17 : #include "src/wasm/wasm-external-refs.h"
18 : #include "src/wasm/wasm-limits.h"
19 : #include "src/wasm/wasm-module.h"
20 : #include "src/wasm/wasm-objects.h"
21 :
22 : #include "src/zone/accounting-allocator.h"
23 : #include "src/zone/zone-containers.h"
24 :
25 : namespace v8 {
26 : namespace internal {
27 : namespace wasm {
28 :
29 : #if DEBUG
30 : #define TRACE(...) \
31 : do { \
32 : if (FLAG_trace_wasm_interpreter) PrintF(__VA_ARGS__); \
33 : } while (false)
34 : #else
35 : #define TRACE(...)
36 : #endif
37 :
38 : #define FOREACH_INTERNAL_OPCODE(V) V(Breakpoint, 0xFF)
39 :
40 : #define WASM_CTYPES(V) \
41 : V(I32, int32_t) V(I64, int64_t) V(F32, float) V(F64, double)
42 :
43 : #define FOREACH_SIMPLE_BINOP(V) \
44 : V(I32Add, uint32_t, +) \
45 : V(I32Sub, uint32_t, -) \
46 : V(I32Mul, uint32_t, *) \
47 : V(I32And, uint32_t, &) \
48 : V(I32Ior, uint32_t, |) \
49 : V(I32Xor, uint32_t, ^) \
50 : V(I32Eq, uint32_t, ==) \
51 : V(I32Ne, uint32_t, !=) \
52 : V(I32LtU, uint32_t, <) \
53 : V(I32LeU, uint32_t, <=) \
54 : V(I32GtU, uint32_t, >) \
55 : V(I32GeU, uint32_t, >=) \
56 : V(I32LtS, int32_t, <) \
57 : V(I32LeS, int32_t, <=) \
58 : V(I32GtS, int32_t, >) \
59 : V(I32GeS, int32_t, >=) \
60 : V(I64Add, uint64_t, +) \
61 : V(I64Sub, uint64_t, -) \
62 : V(I64Mul, uint64_t, *) \
63 : V(I64And, uint64_t, &) \
64 : V(I64Ior, uint64_t, |) \
65 : V(I64Xor, uint64_t, ^) \
66 : V(I64Eq, uint64_t, ==) \
67 : V(I64Ne, uint64_t, !=) \
68 : V(I64LtU, uint64_t, <) \
69 : V(I64LeU, uint64_t, <=) \
70 : V(I64GtU, uint64_t, >) \
71 : V(I64GeU, uint64_t, >=) \
72 : V(I64LtS, int64_t, <) \
73 : V(I64LeS, int64_t, <=) \
74 : V(I64GtS, int64_t, >) \
75 : V(I64GeS, int64_t, >=) \
76 : V(F32Add, float, +) \
77 : V(F32Sub, float, -) \
78 : V(F32Eq, float, ==) \
79 : V(F32Ne, float, !=) \
80 : V(F32Lt, float, <) \
81 : V(F32Le, float, <=) \
82 : V(F32Gt, float, >) \
83 : V(F32Ge, float, >=) \
84 : V(F64Add, double, +) \
85 : V(F64Sub, double, -) \
86 : V(F64Eq, double, ==) \
87 : V(F64Ne, double, !=) \
88 : V(F64Lt, double, <) \
89 : V(F64Le, double, <=) \
90 : V(F64Gt, double, >) \
91 : V(F64Ge, double, >=) \
92 : V(F32Mul, float, *) \
93 : V(F64Mul, double, *) \
94 : V(F32Div, float, /) \
95 : V(F64Div, double, /)
96 :
97 : #define FOREACH_OTHER_BINOP(V) \
98 : V(I32DivS, int32_t) \
99 : V(I32DivU, uint32_t) \
100 : V(I32RemS, int32_t) \
101 : V(I32RemU, uint32_t) \
102 : V(I32Shl, uint32_t) \
103 : V(I32ShrU, uint32_t) \
104 : V(I32ShrS, int32_t) \
105 : V(I64DivS, int64_t) \
106 : V(I64DivU, uint64_t) \
107 : V(I64RemS, int64_t) \
108 : V(I64RemU, uint64_t) \
109 : V(I64Shl, uint64_t) \
110 : V(I64ShrU, uint64_t) \
111 : V(I64ShrS, int64_t) \
112 : V(I32Ror, int32_t) \
113 : V(I32Rol, int32_t) \
114 : V(I64Ror, int64_t) \
115 : V(I64Rol, int64_t) \
116 : V(F32Min, float) \
117 : V(F32Max, float) \
118 : V(F64Min, double) \
119 : V(F64Max, double) \
120 : V(I32AsmjsDivS, int32_t) \
121 : V(I32AsmjsDivU, uint32_t) \
122 : V(I32AsmjsRemS, int32_t) \
123 : V(I32AsmjsRemU, uint32_t)
124 :
125 : #define FOREACH_OTHER_UNOP(V) \
126 : V(I32Clz, uint32_t) \
127 : V(I32Ctz, uint32_t) \
128 : V(I32Popcnt, uint32_t) \
129 : V(I32Eqz, uint32_t) \
130 : V(I64Clz, uint64_t) \
131 : V(I64Ctz, uint64_t) \
132 : V(I64Popcnt, uint64_t) \
133 : V(I64Eqz, uint64_t) \
134 : V(F32Abs, float) \
135 : V(F32Neg, float) \
136 : V(F32Ceil, float) \
137 : V(F32Floor, float) \
138 : V(F32Trunc, float) \
139 : V(F32NearestInt, float) \
140 : V(F64Abs, double) \
141 : V(F64Neg, double) \
142 : V(F64Ceil, double) \
143 : V(F64Floor, double) \
144 : V(F64Trunc, double) \
145 : V(F64NearestInt, double) \
146 : V(I32SConvertF32, float) \
147 : V(I32SConvertF64, double) \
148 : V(I32UConvertF32, float) \
149 : V(I32UConvertF64, double) \
150 : V(I32ConvertI64, int64_t) \
151 : V(I64SConvertF32, float) \
152 : V(I64SConvertF64, double) \
153 : V(I64UConvertF32, float) \
154 : V(I64UConvertF64, double) \
155 : V(I64SConvertI32, int32_t) \
156 : V(I64UConvertI32, uint32_t) \
157 : V(F32SConvertI32, int32_t) \
158 : V(F32UConvertI32, uint32_t) \
159 : V(F32SConvertI64, int64_t) \
160 : V(F32UConvertI64, uint64_t) \
161 : V(F32ConvertF64, double) \
162 : V(F32ReinterpretI32, int32_t) \
163 : V(F64SConvertI32, int32_t) \
164 : V(F64UConvertI32, uint32_t) \
165 : V(F64SConvertI64, int64_t) \
166 : V(F64UConvertI64, uint64_t) \
167 : V(F64ConvertF32, float) \
168 : V(F64ReinterpretI64, int64_t) \
169 : V(I32AsmjsSConvertF32, float) \
170 : V(I32AsmjsUConvertF32, float) \
171 : V(I32AsmjsSConvertF64, double) \
172 : V(I32AsmjsUConvertF64, double) \
173 : V(F32Sqrt, float) \
174 : V(F64Sqrt, double)
175 :
176 : namespace {
177 :
178 : inline int32_t ExecuteI32DivS(int32_t a, int32_t b, TrapReason* trap) {
179 1134 : if (b == 0) {
180 : *trap = kTrapDivByZero;
181 : return 0;
182 : }
183 987 : if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
184 : *trap = kTrapDivUnrepresentable;
185 : return 0;
186 : }
187 980 : return a / b;
188 : }
189 :
190 : inline uint32_t ExecuteI32DivU(uint32_t a, uint32_t b, TrapReason* trap) {
191 49 : if (b == 0) {
192 : *trap = kTrapDivByZero;
193 : return 0;
194 : }
195 28 : return a / b;
196 : }
197 :
198 : inline int32_t ExecuteI32RemS(int32_t a, int32_t b, TrapReason* trap) {
199 98 : if (b == 0) {
200 : *trap = kTrapRemByZero;
201 : return 0;
202 : }
203 56 : if (b == -1) return 0;
204 42 : return a % b;
205 : }
206 :
207 : inline uint32_t ExecuteI32RemU(uint32_t a, uint32_t b, TrapReason* trap) {
208 49 : if (b == 0) {
209 : *trap = kTrapRemByZero;
210 : return 0;
211 : }
212 28 : return a % b;
213 : }
214 :
215 : inline uint32_t ExecuteI32Shl(uint32_t a, uint32_t b, TrapReason* trap) {
216 20426 : return a << (b & 0x1f);
217 : }
218 :
219 : inline uint32_t ExecuteI32ShrU(uint32_t a, uint32_t b, TrapReason* trap) {
220 20426 : return a >> (b & 0x1f);
221 : }
222 :
223 : inline int32_t ExecuteI32ShrS(int32_t a, int32_t b, TrapReason* trap) {
224 20426 : return a >> (b & 0x1f);
225 : }
226 :
227 : inline int64_t ExecuteI64DivS(int64_t a, int64_t b, TrapReason* trap) {
228 42602 : if (b == 0) {
229 : *trap = kTrapDivByZero;
230 : return 0;
231 : }
232 41398 : if (b == -1 && a == std::numeric_limits<int64_t>::min()) {
233 : *trap = kTrapDivUnrepresentable;
234 : return 0;
235 : }
236 41391 : return a / b;
237 : }
238 :
239 : inline uint64_t ExecuteI64DivU(uint64_t a, uint64_t b, TrapReason* trap) {
240 41545 : if (b == 0) {
241 : *trap = kTrapDivByZero;
242 : return 0;
243 : }
244 40446 : return a / b;
245 : }
246 :
247 : inline int64_t ExecuteI64RemS(int64_t a, int64_t b, TrapReason* trap) {
248 41552 : if (b == 0) {
249 : *trap = kTrapRemByZero;
250 : return 0;
251 : }
252 40453 : if (b == -1) return 0;
253 39907 : return a % b;
254 : }
255 :
256 : inline uint64_t ExecuteI64RemU(uint64_t a, uint64_t b, TrapReason* trap) {
257 41545 : if (b == 0) {
258 : *trap = kTrapRemByZero;
259 : return 0;
260 : }
261 40446 : return a % b;
262 : }
263 :
264 : inline uint64_t ExecuteI64Shl(uint64_t a, uint64_t b, TrapReason* trap) {
265 85176 : return a << (b & 0x3f);
266 : }
267 :
268 : inline uint64_t ExecuteI64ShrU(uint64_t a, uint64_t b, TrapReason* trap) {
269 85176 : return a >> (b & 0x3f);
270 : }
271 :
272 : inline int64_t ExecuteI64ShrS(int64_t a, int64_t b, TrapReason* trap) {
273 85176 : return a >> (b & 0x3f);
274 : }
275 :
276 : inline uint32_t ExecuteI32Ror(uint32_t a, uint32_t b, TrapReason* trap) {
277 28 : uint32_t shift = (b & 0x1f);
278 28 : return (a >> shift) | (a << (32 - shift));
279 : }
280 :
281 : inline uint32_t ExecuteI32Rol(uint32_t a, uint32_t b, TrapReason* trap) {
282 28 : uint32_t shift = (b & 0x1f);
283 28 : return (a << shift) | (a >> (32 - shift));
284 : }
285 :
286 : inline uint64_t ExecuteI64Ror(uint64_t a, uint64_t b, TrapReason* trap) {
287 41531 : uint32_t shift = (b & 0x3f);
288 41531 : return (a >> shift) | (a << (64 - shift));
289 : }
290 :
291 : inline uint64_t ExecuteI64Rol(uint64_t a, uint64_t b, TrapReason* trap) {
292 41531 : uint32_t shift = (b & 0x3f);
293 41531 : return (a << shift) | (a >> (64 - shift));
294 : }
295 :
296 : inline float ExecuteF32Min(float a, float b, TrapReason* trap) {
297 92575 : return JSMin(a, b);
298 : }
299 :
300 : inline float ExecuteF32Max(float a, float b, TrapReason* trap) {
301 92575 : return JSMax(a, b);
302 : }
303 :
304 : inline float ExecuteF32CopySign(float a, float b, TrapReason* trap) {
305 92589 : return copysignf(a, b);
306 : }
307 :
308 : inline double ExecuteF64Min(double a, double b, TrapReason* trap) {
309 16807 : return JSMin(a, b);
310 : }
311 :
312 : inline double ExecuteF64Max(double a, double b, TrapReason* trap) {
313 16807 : return JSMax(a, b);
314 : }
315 :
316 : inline double ExecuteF64CopySign(double a, double b, TrapReason* trap) {
317 16821 : return copysign(a, b);
318 : }
319 :
320 : inline int32_t ExecuteI32AsmjsDivS(int32_t a, int32_t b, TrapReason* trap) {
321 3815 : if (b == 0) return 0;
322 3416 : if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
323 : return std::numeric_limits<int32_t>::min();
324 : }
325 3402 : return a / b;
326 : }
327 :
328 : inline uint32_t ExecuteI32AsmjsDivU(uint32_t a, uint32_t b, TrapReason* trap) {
329 35 : if (b == 0) return 0;
330 14 : return a / b;
331 : }
332 :
333 : inline int32_t ExecuteI32AsmjsRemS(int32_t a, int32_t b, TrapReason* trap) {
334 3815 : if (b == 0) return 0;
335 3416 : if (b == -1) return 0;
336 3031 : return a % b;
337 : }
338 :
339 : inline uint32_t ExecuteI32AsmjsRemU(uint32_t a, uint32_t b, TrapReason* trap) {
340 35 : if (b == 0) return 0;
341 14 : return a % b;
342 : }
343 :
344 : inline int32_t ExecuteI32AsmjsSConvertF32(float a, TrapReason* trap) {
345 805 : return DoubleToInt32(a);
346 : }
347 :
348 : inline uint32_t ExecuteI32AsmjsUConvertF32(float a, TrapReason* trap) {
349 805 : return DoubleToUint32(a);
350 : }
351 :
352 : inline int32_t ExecuteI32AsmjsSConvertF64(double a, TrapReason* trap) {
353 343 : return DoubleToInt32(a);
354 : }
355 :
356 : inline uint32_t ExecuteI32AsmjsUConvertF64(double a, TrapReason* trap) {
357 : return DoubleToUint32(a);
358 : }
359 :
360 : int32_t ExecuteI32Clz(uint32_t val, TrapReason* trap) {
361 462 : return base::bits::CountLeadingZeros32(val);
362 : }
363 :
364 : uint32_t ExecuteI32Ctz(uint32_t val, TrapReason* trap) {
365 : return base::bits::CountTrailingZeros32(val);
366 : }
367 :
368 : uint32_t ExecuteI32Popcnt(uint32_t val, TrapReason* trap) {
369 70 : return word32_popcnt_wrapper(&val);
370 : }
371 :
372 : inline uint32_t ExecuteI32Eqz(uint32_t val, TrapReason* trap) {
373 868 : return val == 0 ? 1 : 0;
374 : }
375 :
376 : int64_t ExecuteI64Clz(uint64_t val, TrapReason* trap) {
377 455 : return base::bits::CountLeadingZeros64(val);
378 : }
379 :
380 : inline uint64_t ExecuteI64Ctz(uint64_t val, TrapReason* trap) {
381 455 : return base::bits::CountTrailingZeros64(val);
382 : }
383 :
384 : inline int64_t ExecuteI64Popcnt(uint64_t val, TrapReason* trap) {
385 70 : return word64_popcnt_wrapper(&val);
386 : }
387 :
388 : inline int32_t ExecuteI64Eqz(uint64_t val, TrapReason* trap) {
389 553 : return val == 0 ? 1 : 0;
390 : }
391 :
392 : inline float ExecuteF32Abs(float a, TrapReason* trap) {
393 28 : return bit_cast<float>(bit_cast<uint32_t>(a) & 0x7fffffff);
394 : }
395 :
396 : inline float ExecuteF32Neg(float a, TrapReason* trap) {
397 819 : return bit_cast<float>(bit_cast<uint32_t>(a) ^ 0x80000000);
398 : }
399 :
400 805 : inline float ExecuteF32Ceil(float a, TrapReason* trap) { return ceilf(a); }
401 :
402 805 : inline float ExecuteF32Floor(float a, TrapReason* trap) { return floorf(a); }
403 :
404 805 : inline float ExecuteF32Trunc(float a, TrapReason* trap) { return truncf(a); }
405 :
406 : inline float ExecuteF32NearestInt(float a, TrapReason* trap) {
407 805 : return nearbyintf(a);
408 : }
409 :
410 : inline float ExecuteF32Sqrt(float a, TrapReason* trap) {
411 14 : float result = sqrtf(a);
412 : return result;
413 : }
414 :
415 : inline double ExecuteF64Abs(double a, TrapReason* trap) {
416 28 : return bit_cast<double>(bit_cast<uint64_t>(a) & 0x7fffffffffffffff);
417 : }
418 :
419 : inline double ExecuteF64Neg(double a, TrapReason* trap) {
420 357 : return bit_cast<double>(bit_cast<uint64_t>(a) ^ 0x8000000000000000);
421 : }
422 :
423 343 : inline double ExecuteF64Ceil(double a, TrapReason* trap) { return ceil(a); }
424 :
425 343 : inline double ExecuteF64Floor(double a, TrapReason* trap) { return floor(a); }
426 :
427 343 : inline double ExecuteF64Trunc(double a, TrapReason* trap) { return trunc(a); }
428 :
429 : inline double ExecuteF64NearestInt(double a, TrapReason* trap) {
430 343 : return nearbyint(a);
431 : }
432 :
433 14 : inline double ExecuteF64Sqrt(double a, TrapReason* trap) { return sqrt(a); }
434 :
435 : int32_t ExecuteI32SConvertF32(float a, TrapReason* trap) {
436 : // The upper bound is (INT32_MAX + 1), which is the lowest float-representable
437 : // number above INT32_MAX which cannot be represented as int32.
438 : float upper_bound = 2147483648.0f;
439 : // We use INT32_MIN as a lower bound because (INT32_MIN - 1) is not
440 : // representable as float, and no number between (INT32_MIN - 1) and INT32_MIN
441 : // is.
442 : float lower_bound = static_cast<float>(INT32_MIN);
443 924 : if (a < upper_bound && a >= lower_bound) {
444 616 : return static_cast<int32_t>(a);
445 : }
446 : *trap = kTrapFloatUnrepresentable;
447 : return 0;
448 : }
449 :
450 : int32_t ExecuteI32SConvertF64(double a, TrapReason* trap) {
451 : // The upper bound is (INT32_MAX + 1), which is the lowest double-
452 : // representable number above INT32_MAX which cannot be represented as int32.
453 : double upper_bound = 2147483648.0;
454 : // The lower bound is (INT32_MIN - 1), which is the greatest double-
455 : // representable number below INT32_MIN which cannot be represented as int32.
456 : double lower_bound = -2147483649.0;
457 462 : if (a < upper_bound && a > lower_bound) {
458 350 : return static_cast<int32_t>(a);
459 : }
460 : *trap = kTrapFloatUnrepresentable;
461 : return 0;
462 : }
463 :
464 : uint32_t ExecuteI32UConvertF32(float a, TrapReason* trap) {
465 : // The upper bound is (UINT32_MAX + 1), which is the lowest
466 : // float-representable number above UINT32_MAX which cannot be represented as
467 : // uint32.
468 : double upper_bound = 4294967296.0f;
469 : double lower_bound = -1.0f;
470 805 : if (a < upper_bound && a > lower_bound) {
471 322 : return static_cast<uint32_t>(a);
472 : }
473 : *trap = kTrapFloatUnrepresentable;
474 : return 0;
475 : }
476 :
477 : uint32_t ExecuteI32UConvertF64(double a, TrapReason* trap) {
478 : // The upper bound is (UINT32_MAX + 1), which is the lowest
479 : // double-representable number above UINT32_MAX which cannot be represented as
480 : // uint32.
481 : double upper_bound = 4294967296.0;
482 : double lower_bound = -1.0;
483 343 : if (a < upper_bound && a > lower_bound) {
484 189 : return static_cast<uint32_t>(a);
485 : }
486 : *trap = kTrapFloatUnrepresentable;
487 : return 0;
488 : }
489 :
490 : inline uint32_t ExecuteI32ConvertI64(int64_t a, TrapReason* trap) {
491 249802 : return static_cast<uint32_t>(a & 0xFFFFFFFF);
492 : }
493 :
494 : int64_t ExecuteI64SConvertF32(float a, TrapReason* trap) {
495 : int64_t output;
496 1610 : if (!float32_to_int64_wrapper(&a, &output)) {
497 : *trap = kTrapFloatUnrepresentable;
498 : }
499 1610 : return output;
500 : }
501 :
502 : int64_t ExecuteI64SConvertF64(double a, TrapReason* trap) {
503 : int64_t output;
504 1240 : if (!float64_to_int64_wrapper(&a, &output)) {
505 : *trap = kTrapFloatUnrepresentable;
506 : }
507 1240 : return output;
508 : }
509 :
510 : uint64_t ExecuteI64UConvertF32(float a, TrapReason* trap) {
511 : uint64_t output;
512 1610 : if (!float32_to_uint64_wrapper(&a, &output)) {
513 : *trap = kTrapFloatUnrepresentable;
514 : }
515 1610 : return output;
516 : }
517 :
518 : uint64_t ExecuteI64UConvertF64(double a, TrapReason* trap) {
519 : uint64_t output;
520 686 : if (!float64_to_uint64_wrapper(&a, &output)) {
521 : *trap = kTrapFloatUnrepresentable;
522 : }
523 686 : return output;
524 : }
525 :
526 : inline int64_t ExecuteI64SConvertI32(int32_t a, TrapReason* trap) {
527 406 : return static_cast<int64_t>(a);
528 : }
529 :
530 : inline int64_t ExecuteI64UConvertI32(uint32_t a, TrapReason* trap) {
531 378 : return static_cast<uint64_t>(a);
532 : }
533 :
534 : inline float ExecuteF32SConvertI32(int32_t a, TrapReason* trap) {
535 35 : return static_cast<float>(a);
536 : }
537 :
538 : inline float ExecuteF32UConvertI32(uint32_t a, TrapReason* trap) {
539 15 : return static_cast<float>(a);
540 : }
541 :
542 : inline float ExecuteF32SConvertI64(int64_t a, TrapReason* trap) {
543 : float output;
544 539 : int64_to_float32_wrapper(&a, &output);
545 539 : return output;
546 : }
547 :
548 : inline float ExecuteF32UConvertI64(uint64_t a, TrapReason* trap) {
549 : float output;
550 532 : uint64_to_float32_wrapper(&a, &output);
551 532 : return output;
552 : }
553 :
554 : inline float ExecuteF32ConvertF64(double a, TrapReason* trap) {
555 15 : return static_cast<float>(a);
556 : }
557 :
558 : inline float ExecuteF32ReinterpretI32(int32_t a, TrapReason* trap) {
559 : return bit_cast<float>(a);
560 : }
561 :
562 : inline double ExecuteF64SConvertI32(int32_t a, TrapReason* trap) {
563 1638 : return static_cast<double>(a);
564 : }
565 :
566 : inline double ExecuteF64UConvertI32(uint32_t a, TrapReason* trap) {
567 0 : return static_cast<double>(a);
568 : }
569 :
570 : inline double ExecuteF64SConvertI64(int64_t a, TrapReason* trap) {
571 : double output;
572 23676 : int64_to_float64_wrapper(&a, &output);
573 23676 : return output;
574 : }
575 :
576 : inline double ExecuteF64UConvertI64(uint64_t a, TrapReason* trap) {
577 : double output;
578 525 : uint64_to_float64_wrapper(&a, &output);
579 525 : return output;
580 : }
581 :
582 : inline double ExecuteF64ConvertF32(float a, TrapReason* trap) {
583 2445 : return static_cast<double>(a);
584 : }
585 :
586 : inline double ExecuteF64ReinterpretI64(int64_t a, TrapReason* trap) {
587 : return bit_cast<double>(a);
588 : }
589 :
590 : inline int32_t ExecuteI32ReinterpretF32(WasmVal a) {
591 : return a.to_unchecked<int32_t>();
592 : }
593 :
594 : inline int64_t ExecuteI64ReinterpretF64(WasmVal a) {
595 : return a.to_unchecked<int64_t>();
596 : }
597 :
598 95 : inline int32_t ExecuteGrowMemory(uint32_t delta_pages,
599 : MaybeHandle<WasmInstanceObject> instance_obj,
600 : WasmInstance* instance) {
601 : DCHECK_EQ(0, instance->mem_size % WasmModule::kPageSize);
602 95 : uint32_t old_pages = instance->mem_size / WasmModule::kPageSize;
603 :
604 : // If an instance is set, execute GrowMemory on the instance. This will also
605 : // update the WasmInstance struct used here.
606 95 : if (!instance_obj.is_null()) {
607 : Isolate* isolate = instance_obj.ToHandleChecked()->GetIsolate();
608 : int32_t ret = WasmInstanceObject::GrowMemory(
609 60 : isolate, instance_obj.ToHandleChecked(), delta_pages);
610 : // Some sanity checks.
611 : DCHECK_EQ(ret == -1 ? old_pages : old_pages + delta_pages,
612 : instance->mem_size / WasmModule::kPageSize);
613 : DCHECK(ret == -1 || static_cast<uint32_t>(ret) == old_pages);
614 60 : return ret;
615 : }
616 :
617 : // TODO(ahaas): Move memory allocation to wasm-module.cc for better
618 : // encapsulation.
619 56 : if (delta_pages > FLAG_wasm_max_mem_pages ||
620 21 : delta_pages > instance->module->max_mem_pages) {
621 : return -1;
622 : }
623 :
624 7 : uint32_t new_pages = old_pages + delta_pages;
625 7 : if (new_pages > FLAG_wasm_max_mem_pages ||
626 : new_pages > instance->module->max_mem_pages) {
627 : return -1;
628 : }
629 :
630 : byte* new_mem_start;
631 7 : if (instance->mem_size == 0) {
632 : // TODO(gdeepti): Fix bounds check to take into account size of memtype.
633 : new_mem_start = static_cast<byte*>(
634 0 : calloc(new_pages * WasmModule::kPageSize, sizeof(byte)));
635 0 : if (!new_mem_start) return -1;
636 : } else {
637 : DCHECK_NOT_NULL(instance->mem_start);
638 7 : if (EnableGuardRegions()) {
639 : v8::base::OS::Unprotect(instance->mem_start,
640 1 : new_pages * WasmModule::kPageSize);
641 1 : new_mem_start = instance->mem_start;
642 : } else {
643 : new_mem_start = static_cast<byte*>(
644 6 : realloc(instance->mem_start, new_pages * WasmModule::kPageSize));
645 6 : if (!new_mem_start) return -1;
646 : }
647 : // Zero initializing uninitialized memory from realloc
648 14 : memset(new_mem_start + old_pages * WasmModule::kPageSize, 0,
649 14 : delta_pages * WasmModule::kPageSize);
650 : }
651 7 : instance->mem_start = new_mem_start;
652 7 : instance->mem_size = new_pages * WasmModule::kPageSize;
653 7 : return static_cast<int32_t>(old_pages);
654 : }
655 :
656 : enum InternalOpcode {
657 : #define DECL_INTERNAL_ENUM(name, value) kInternal##name = value,
658 : FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_ENUM)
659 : #undef DECL_INTERNAL_ENUM
660 : };
661 :
662 : const char* OpcodeName(uint32_t val) {
663 0 : switch (val) {
664 : #define DECL_INTERNAL_CASE(name, value) \
665 : case kInternal##name: \
666 : return "Internal" #name;
667 : FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_CASE)
668 : #undef DECL_INTERNAL_CASE
669 : }
670 0 : return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(val));
671 : }
672 :
673 : // Unwrap a wasm to js wrapper, return the callable heap object.
674 : // If the wrapper would throw a TypeError, return a null handle.
675 1050 : Handle<HeapObject> UnwrapWasmToJSWrapper(Isolate* isolate,
676 : Handle<Code> js_wrapper) {
677 : DCHECK_EQ(Code::WASM_TO_JS_FUNCTION, js_wrapper->kind());
678 : int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
679 3059 : for (RelocIterator it(*js_wrapper, mask); !it.done(); it.next()) {
680 3029 : HeapObject* obj = it.rinfo()->target_object();
681 3029 : if (!obj->IsCallable()) continue;
682 : #ifdef DEBUG
683 : // There should only be this one reference to a callable object.
684 : for (it.next(); !it.done(); it.next()) {
685 : HeapObject* other = it.rinfo()->target_object();
686 : DCHECK(!other->IsCallable());
687 : }
688 : #endif
689 1020 : return handle(obj, isolate);
690 : }
691 : // If we did not find a callable object, then there must be a reference to
692 : // the WasmThrowTypeError runtime function.
693 : // TODO(clemensh): Check that this is the case.
694 : return Handle<HeapObject>::null();
695 : }
696 :
697 : class ControlTransfers;
698 :
699 : // Code and metadata needed to execute a function.
700 48386 : struct InterpreterCode {
701 : const WasmFunction* function; // wasm function
702 : BodyLocalDecls locals; // local declarations
703 : const byte* orig_start; // start of original code
704 : const byte* orig_end; // end of original code
705 : byte* start; // start of (maybe altered) code
706 : byte* end; // end of (maybe altered) code
707 : ControlTransfers* targets; // helper for control flow.
708 :
709 300522 : const byte* at(pc_t pc) { return start + pc; }
710 : };
711 :
712 : // A helper class to compute the control transfers for each bytecode offset.
713 : // Control transfers allow Br, BrIf, BrTable, If, Else, and End bytecodes to
714 : // be directly executed without the need to dynamically track blocks.
715 : class ControlTransfers : public ZoneObject {
716 : public:
717 : ControlTransferMap map_;
718 :
719 42628 : ControlTransfers(Zone* zone, const WasmModule* module, InterpreterCode* code)
720 : : map_(zone) {
721 : // Create a zone for all temporary objects.
722 21314 : Zone control_transfer_zone(zone->allocator(), ZONE_NAME);
723 :
724 : // Represents a control flow label.
725 : class CLabel : public ZoneObject {
726 : explicit CLabel(Zone* zone, uint32_t target_stack_height, uint32_t arity)
727 : : target(nullptr),
728 : target_stack_height(target_stack_height),
729 : arity(arity),
730 59582 : refs(zone) {}
731 :
732 : public:
733 : struct Ref {
734 : const byte* from_pc;
735 : const uint32_t stack_height;
736 : };
737 : const byte* target;
738 : uint32_t target_stack_height;
739 : const uint32_t arity;
740 : // TODO(clemensh): Fix ZoneAllocator and make this ZoneVector<const Ref>.
741 : ZoneVector<Ref> refs;
742 :
743 59582 : static CLabel* New(Zone* zone, uint32_t stack_height, uint32_t arity) {
744 59582 : return new (zone) CLabel(zone, stack_height, arity);
745 : }
746 :
747 : // Bind this label to the given PC.
748 : void Bind(const byte* pc) {
749 : DCHECK_NULL(target);
750 59582 : target = pc;
751 : }
752 :
753 : // Reference this label from the given location.
754 : void Ref(const byte* from_pc, uint32_t stack_height) {
755 : // Target being bound before a reference means this is a loop.
756 : DCHECK_IMPLIES(target, *target == kExprLoop);
757 61282 : refs.push_back({from_pc, stack_height});
758 : }
759 :
760 59582 : void Finish(ControlTransferMap* map, const byte* start) {
761 : DCHECK_NOT_NULL(target);
762 149805 : for (auto ref : refs) {
763 30641 : size_t offset = static_cast<size_t>(ref.from_pc - start);
764 30641 : auto pcdiff = static_cast<pcdiff_t>(target - ref.from_pc);
765 : DCHECK_GE(ref.stack_height, target_stack_height);
766 : spdiff_t spdiff =
767 30641 : static_cast<spdiff_t>(ref.stack_height - target_stack_height);
768 : TRACE("control transfer @%zu: Δpc %d, stack %u->%u = -%u\n", offset,
769 : pcdiff, ref.stack_height, target_stack_height, spdiff);
770 30641 : ControlTransferEntry& entry = (*map)[offset];
771 30641 : entry.pc_diff = pcdiff;
772 30641 : entry.sp_diff = spdiff;
773 30641 : entry.target_arity = arity;
774 : }
775 59582 : }
776 : };
777 :
778 : // An entry in the control stack.
779 : struct Control {
780 : const byte* pc;
781 : CLabel* end_label;
782 : CLabel* else_label;
783 :
784 59190 : void Finish(ControlTransferMap* map, const byte* start) {
785 59190 : end_label->Finish(map, start);
786 59190 : if (else_label) else_label->Finish(map, start);
787 59190 : }
788 : };
789 :
790 : // Compute the ControlTransfer map.
791 : // This algorithm maintains a stack of control constructs similar to the
792 : // AST decoder. The {control_stack} allows matching {br,br_if,br_table}
793 : // bytecodes with their target, as well as determining whether the current
794 : // bytecodes are within the true or false block of an else.
795 : ZoneVector<Control> control_stack(&control_transfer_zone);
796 : uint32_t stack_height = 0;
797 : uint32_t func_arity =
798 21314 : static_cast<uint32_t>(code->function->sig->return_count());
799 : CLabel* func_label =
800 21314 : CLabel::New(&control_transfer_zone, stack_height, func_arity);
801 42628 : control_stack.push_back({code->orig_start, func_label, nullptr});
802 279229 : for (BytecodeIterator i(code->orig_start, code->orig_end, &code->locals);
803 236601 : i.has_next(); i.next()) {
804 : WasmOpcode opcode = i.current();
805 : auto stack_effect =
806 236601 : StackEffect(module, code->function->sig, i.pc(), i.end());
807 : TRACE("@%u: control %s (sp %d - %d + %d)\n", i.pc_offset(),
808 : WasmOpcodes::OpcodeName(opcode), stack_height, stack_effect.first,
809 : stack_effect.second);
810 : DCHECK_GE(stack_height, stack_effect.first);
811 236601 : stack_height = stack_height - stack_effect.first + stack_effect.second;
812 236601 : switch (opcode) {
813 : case kExprBlock:
814 : case kExprLoop: {
815 : bool loop = opcode == kExprLoop;
816 37484 : BlockTypeOperand<false> operand(&i, i.pc());
817 : TRACE("control @%u: %s, arity %d\n", i.pc_offset(),
818 : loop ? "Loop" : "Block", operand.arity);
819 : CLabel* label =
820 37484 : CLabel::New(&control_transfer_zone, stack_height, operand.arity);
821 74968 : control_stack.push_back({i.pc(), label, nullptr});
822 37484 : if (loop) label->Bind(i.pc());
823 : break;
824 : }
825 : case kExprIf: {
826 : TRACE("control @%u: If\n", i.pc_offset());
827 392 : BlockTypeOperand<false> operand(&i, i.pc());
828 : CLabel* end_label =
829 392 : CLabel::New(&control_transfer_zone, stack_height, operand.arity);
830 : CLabel* else_label =
831 392 : CLabel::New(&control_transfer_zone, stack_height, 0);
832 784 : control_stack.push_back({i.pc(), end_label, else_label});
833 392 : else_label->Ref(i.pc(), stack_height);
834 : break;
835 : }
836 : case kExprElse: {
837 : Control* c = &control_stack.back();
838 : TRACE("control @%u: Else\n", i.pc_offset());
839 166 : c->end_label->Ref(i.pc(), stack_height);
840 : DCHECK_NOT_NULL(c->else_label);
841 166 : c->else_label->Bind(i.pc() + 1);
842 166 : c->else_label->Finish(&map_, code->orig_start);
843 166 : c->else_label = nullptr;
844 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
845 166 : stack_height = c->end_label->target_stack_height;
846 166 : break;
847 : }
848 : case kExprEnd: {
849 59190 : Control* c = &control_stack.back();
850 : TRACE("control @%u: End\n", i.pc_offset());
851 : // Only loops have bound labels.
852 : DCHECK_IMPLIES(c->end_label->target, *c->pc == kExprLoop);
853 59190 : if (!c->end_label->target) {
854 58984 : if (c->else_label) c->else_label->Bind(i.pc());
855 58984 : c->end_label->Bind(i.pc() + 1);
856 : }
857 118380 : c->Finish(&map_, code->orig_start);
858 : DCHECK_GE(stack_height, c->end_label->target_stack_height);
859 : stack_height =
860 59190 : c->end_label->target_stack_height + c->end_label->arity;
861 : control_stack.pop_back();
862 : break;
863 : }
864 : case kExprBr: {
865 647 : BreakDepthOperand<false> operand(&i, i.pc());
866 : TRACE("control @%u: Br[depth=%u]\n", i.pc_offset(), operand.depth);
867 1294 : Control* c = &control_stack[control_stack.size() - operand.depth - 1];
868 647 : c->end_label->Ref(i.pc(), stack_height);
869 : break;
870 : }
871 : case kExprBrIf: {
872 147 : BreakDepthOperand<false> operand(&i, i.pc());
873 : TRACE("control @%u: BrIf[depth=%u]\n", i.pc_offset(), operand.depth);
874 294 : Control* c = &control_stack[control_stack.size() - operand.depth - 1];
875 147 : c->end_label->Ref(i.pc(), stack_height);
876 : break;
877 : }
878 : case kExprBrTable: {
879 7355 : BranchTableOperand<false> operand(&i, i.pc());
880 : BranchTableIterator<false> iterator(&i, operand);
881 : TRACE("control @%u: BrTable[count=%u]\n", i.pc_offset(),
882 : operand.table_count);
883 36644 : while (iterator.has_next()) {
884 : uint32_t j = iterator.cur_index();
885 : uint32_t target = iterator.next();
886 58578 : Control* c = &control_stack[control_stack.size() - target - 1];
887 29289 : c->end_label->Ref(i.pc() + j, stack_height);
888 : }
889 : break;
890 : }
891 : default: {
892 : break;
893 : }
894 : }
895 : }
896 : DCHECK_EQ(0, control_stack.size());
897 21314 : DCHECK_EQ(func_arity, stack_height);
898 21314 : }
899 :
900 : ControlTransferEntry& Lookup(pc_t from) {
901 : auto result = map_.find(from);
902 : DCHECK(result != map_.end());
903 : return result->second;
904 : }
905 : };
906 :
907 : struct ExternalCallResult {
908 : enum Type {
909 : // The function should be executed inside this interpreter.
910 : INTERNAL,
911 : // For indirect calls: Table or function does not exist.
912 : INVALID_FUNC,
913 : // For indirect calls: Signature does not match expected signature.
914 : SIGNATURE_MISMATCH,
915 : // The function was executed and returned normally.
916 : EXTERNAL_RETURNED,
917 : // The function was executed, threw an exception, and the stack was unwound.
918 : EXTERNAL_UNWOUND
919 : };
920 : Type type;
921 : // If type is INTERNAL, this field holds the function to call internally.
922 : InterpreterCode* interpreter_code;
923 :
924 : ExternalCallResult(Type type) : type(type) { // NOLINT
925 : DCHECK_NE(INTERNAL, type);
926 : }
927 : ExternalCallResult(Type type, InterpreterCode* code)
928 : : type(type), interpreter_code(code) {
929 : DCHECK_EQ(INTERNAL, type);
930 : }
931 : };
932 :
933 : // The main storage for interpreter code. It maps {WasmFunction} to the
934 : // metadata needed to execute each function.
935 : class CodeMap {
936 : Zone* zone_;
937 : const WasmModule* module_;
938 : ZoneVector<InterpreterCode> interpreter_code_;
939 : // Global handle to the wasm instance.
940 : Handle<WasmInstanceObject> instance_;
941 : // Global handle to array of unwrapped imports.
942 : Handle<FixedArray> imported_functions_;
943 : // Map from WASM_TO_JS wrappers to unwrapped imports (indexes into
944 : // imported_functions_).
945 : IdentityMap<int, ZoneAllocationPolicy> unwrapped_imports_;
946 :
947 : public:
948 19314 : CodeMap(Isolate* isolate, const WasmModule* module,
949 : const uint8_t* module_start, Zone* zone)
950 : : zone_(zone),
951 : module_(module),
952 : interpreter_code_(zone),
953 38628 : unwrapped_imports_(isolate->heap(), ZoneAllocationPolicy(zone)) {
954 38628 : if (module == nullptr) return;
955 38628 : interpreter_code_.reserve(module->functions.size());
956 41133 : for (const WasmFunction& function : module->functions) {
957 2505 : if (function.imported) {
958 : DCHECK_EQ(function.code_start_offset, function.code_end_offset);
959 1035 : AddFunction(&function, nullptr, nullptr);
960 : } else {
961 1470 : const byte* code_start = module_start + function.code_start_offset;
962 1470 : const byte* code_end = module_start + function.code_end_offset;
963 1470 : AddFunction(&function, code_start, code_end);
964 : }
965 : }
966 : }
967 :
968 19296 : ~CodeMap() {
969 : // Destroy the global handles.
970 : // Cast the location, not the handle, because the handle cast might access
971 : // the object behind the handle.
972 19296 : GlobalHandles::Destroy(reinterpret_cast<Object**>(instance_.location()));
973 : GlobalHandles::Destroy(
974 19296 : reinterpret_cast<Object**>(imported_functions_.location()));
975 19296 : }
976 :
977 : const WasmModule* module() const { return module_; }
978 : bool has_instance() const { return !instance_.is_null(); }
979 : Handle<WasmInstanceObject> instance() const {
980 : DCHECK(has_instance());
981 : return instance_;
982 : }
983 : MaybeHandle<WasmInstanceObject> maybe_instance() const {
984 95 : return has_instance() ? instance_ : MaybeHandle<WasmInstanceObject>();
985 : }
986 :
987 1153 : void SetInstanceObject(WasmInstanceObject* instance) {
988 : // Only set the instance once (otherwise we have to destroy the global
989 : // handle first).
990 : DCHECK(instance_.is_null());
991 : DCHECK_EQ(instance->module(), module_);
992 1153 : instance_ = instance->GetIsolate()->global_handles()->Create(instance);
993 1153 : }
994 :
995 9300 : Code* GetImportedFunction(uint32_t function_index) {
996 : DCHECK(!instance_.is_null());
997 : DCHECK_GT(module_->num_imported_functions, function_index);
998 9300 : FixedArray* code_table = instance_->compiled_module()->ptr_to_code_table();
999 18600 : return Code::cast(code_table->get(static_cast<int>(function_index)));
1000 : }
1001 :
1002 : InterpreterCode* GetCode(const WasmFunction* function) {
1003 : InterpreterCode* code = GetCode(function->func_index);
1004 : DCHECK_EQ(function, code->function);
1005 : return code;
1006 : }
1007 :
1008 : InterpreterCode* GetCode(uint32_t function_index) {
1009 : DCHECK_LT(function_index, interpreter_code_.size());
1010 7874407 : return Preprocess(&interpreter_code_[function_index]);
1011 : }
1012 :
1013 182 : InterpreterCode* GetIndirectCode(uint32_t table_index, uint32_t entry_index) {
1014 364 : if (table_index >= module_->function_tables.size()) return nullptr;
1015 : const WasmIndirectFunctionTable* table =
1016 : &module_->function_tables[table_index];
1017 364 : if (entry_index >= table->values.size()) return nullptr;
1018 119 : uint32_t index = table->values[entry_index];
1019 238 : if (index >= interpreter_code_.size()) return nullptr;
1020 119 : return GetCode(index);
1021 : }
1022 :
1023 3957180 : InterpreterCode* Preprocess(InterpreterCode* code) {
1024 : DCHECK_EQ(code->function->imported, code->start == nullptr);
1025 3957180 : if (code->targets == nullptr && code->start != nullptr) {
1026 : // Compute the control targets map and the local declarations.
1027 42570 : code->targets = new (zone_) ControlTransfers(zone_, module_, code);
1028 : }
1029 3957180 : return code;
1030 : }
1031 :
1032 22422 : void AddFunction(const WasmFunction* function, const byte* code_start,
1033 : const byte* code_end) {
1034 : InterpreterCode code = {
1035 : function, BodyLocalDecls(zone_), code_start,
1036 : code_end, const_cast<byte*>(code_start), const_cast<byte*>(code_end),
1037 44844 : nullptr};
1038 :
1039 : DCHECK_EQ(interpreter_code_.size(), function->func_index);
1040 22422 : interpreter_code_.push_back(code);
1041 22422 : }
1042 :
1043 19917 : void SetFunctionCode(const WasmFunction* function, const byte* start,
1044 : const byte* end) {
1045 : DCHECK_LT(function->func_index, interpreter_code_.size());
1046 19917 : InterpreterCode* code = &interpreter_code_[function->func_index];
1047 : DCHECK_EQ(function, code->function);
1048 19917 : code->targets = nullptr;
1049 19917 : code->orig_start = start;
1050 19917 : code->orig_end = end;
1051 19917 : code->start = const_cast<byte*>(start);
1052 19917 : code->end = const_cast<byte*>(end);
1053 19917 : Preprocess(code);
1054 19917 : }
1055 :
1056 : // Returns a callable object if the imported function has a JS-compatible
1057 : // signature, or a null handle otherwise.
1058 10320 : Handle<HeapObject> GetCallableObjectForJSImport(Isolate* isolate,
1059 : Handle<Code> code) {
1060 : DCHECK_EQ(Code::WASM_TO_JS_FUNCTION, code->kind());
1061 : int* unwrapped_index = unwrapped_imports_.Find(code);
1062 9330 : if (unwrapped_index) {
1063 : return handle(
1064 : HeapObject::cast(imported_functions_->get(*unwrapped_index)),
1065 8280 : isolate);
1066 : }
1067 1050 : Handle<HeapObject> called_obj = UnwrapWasmToJSWrapper(isolate, code);
1068 1050 : if (!called_obj.is_null()) {
1069 : // Cache the unwrapped callable object.
1070 1020 : if (imported_functions_.is_null()) {
1071 : // This is the first call to an imported function. Allocate the
1072 : // FixedArray to cache unwrapped objects.
1073 : constexpr int kInitialCacheSize = 8;
1074 : Handle<FixedArray> new_imported_functions =
1075 990 : isolate->factory()->NewFixedArray(kInitialCacheSize, TENURED);
1076 : // First entry: Number of occupied slots.
1077 : new_imported_functions->set(0, Smi::kZero);
1078 : imported_functions_ =
1079 990 : isolate->global_handles()->Create(*new_imported_functions);
1080 : }
1081 1020 : int this_idx = Smi::cast(imported_functions_->get(0))->value() + 1;
1082 1020 : if (this_idx == imported_functions_->length()) {
1083 : Handle<FixedArray> new_imported_functions =
1084 : isolate->factory()->CopyFixedArrayAndGrow(imported_functions_,
1085 0 : this_idx / 2, TENURED);
1086 : // Update the existing global handle:
1087 0 : *imported_functions_.location() = *new_imported_functions;
1088 : }
1089 : DCHECK_GT(imported_functions_->length(), this_idx);
1090 : DCHECK(imported_functions_->get(this_idx)->IsUndefined(isolate));
1091 : imported_functions_->set(0, Smi::FromInt(this_idx));
1092 1020 : imported_functions_->set(this_idx, *called_obj);
1093 : unwrapped_imports_.Set(code, this_idx);
1094 : }
1095 1050 : return called_obj;
1096 : }
1097 : };
1098 :
1099 9210 : Handle<Object> WasmValToNumber(Factory* factory, WasmVal val,
1100 : wasm::ValueType type) {
1101 9210 : switch (type) {
1102 : case kWasmI32:
1103 9195 : return factory->NewNumberFromInt(val.to<int32_t>());
1104 : case kWasmI64:
1105 : // wasm->js and js->wasm is illegal for i64 type.
1106 0 : UNREACHABLE();
1107 : return Handle<Object>::null();
1108 : case kWasmF32:
1109 0 : return factory->NewNumber(val.to<float>());
1110 : case kWasmF64:
1111 15 : return factory->NewNumber(val.to<double>());
1112 : default:
1113 : // TODO(wasm): Implement simd.
1114 0 : UNIMPLEMENTED();
1115 : return Handle<Object>::null();
1116 : }
1117 : }
1118 :
1119 : // Convert JS value to WebAssembly, spec here:
1120 : // https://github.com/WebAssembly/design/blob/master/JS.md#towebassemblyvalue
1121 9060 : WasmVal ToWebAssemblyValue(Isolate* isolate, Handle<Object> value,
1122 : wasm::ValueType type) {
1123 9060 : switch (type) {
1124 : case kWasmI32: {
1125 : MaybeHandle<Object> maybe_i32 = Object::ToInt32(isolate, value);
1126 : // TODO(clemensh): Handle failure here (unwind).
1127 : int32_t value;
1128 9045 : CHECK(maybe_i32.ToHandleChecked()->ToInt32(&value));
1129 9045 : return WasmVal(value);
1130 : }
1131 : case kWasmI64:
1132 : // If the signature contains i64, a type error was thrown before.
1133 0 : UNREACHABLE();
1134 : case kWasmF32: {
1135 15 : MaybeHandle<Object> maybe_number = Object::ToNumber(value);
1136 : // TODO(clemensh): Handle failure here (unwind).
1137 : return WasmVal(
1138 15 : static_cast<float>(maybe_number.ToHandleChecked()->Number()));
1139 : }
1140 : case kWasmF64: {
1141 0 : MaybeHandle<Object> maybe_number = Object::ToNumber(value);
1142 : // TODO(clemensh): Handle failure here (unwind).
1143 0 : return WasmVal(maybe_number.ToHandleChecked()->Number());
1144 : }
1145 : default:
1146 : // TODO(wasm): Handle simd.
1147 0 : UNIMPLEMENTED();
1148 : return WasmVal();
1149 : }
1150 : }
1151 :
1152 : // Responsible for executing code directly.
1153 0 : class ThreadImpl {
1154 : struct Activation {
1155 : uint32_t fp;
1156 : uint32_t sp;
1157 51439 : Activation(uint32_t fp, uint32_t sp) : fp(fp), sp(sp) {}
1158 : };
1159 :
1160 : public:
1161 : ThreadImpl(Zone* zone, CodeMap* codemap, WasmInstance* instance)
1162 : : codemap_(codemap),
1163 : instance_(instance),
1164 : stack_(zone),
1165 : frames_(zone),
1166 38628 : activations_(zone) {}
1167 :
1168 : //==========================================================================
1169 : // Implementation of public interface for WasmInterpreter::Thread.
1170 : //==========================================================================
1171 :
1172 : WasmInterpreter::State state() { return state_; }
1173 :
1174 2781254 : void InitFrame(const WasmFunction* function, WasmVal* args) {
1175 : DCHECK_EQ(current_activation().fp, frames_.size());
1176 : InterpreterCode* code = codemap()->GetCode(function);
1177 8306891 : for (size_t i = 0; i < function->sig->parameter_count(); ++i) {
1178 5525637 : stack_.push_back(args[i]);
1179 : }
1180 2781254 : PushFrame(code);
1181 2781254 : }
1182 :
1183 0 : WasmInterpreter::State Run(int num_steps = -1) {
1184 : DCHECK(state_ == WasmInterpreter::STOPPED ||
1185 : state_ == WasmInterpreter::PAUSED);
1186 : DCHECK(num_steps == -1 || num_steps > 0);
1187 : if (num_steps == -1) {
1188 : TRACE(" => Run()\n");
1189 : } else if (num_steps == 1) {
1190 : TRACE(" => Step()\n");
1191 : } else {
1192 : TRACE(" => Run(%d)\n", num_steps);
1193 : }
1194 2789533 : state_ = WasmInterpreter::RUNNING;
1195 2789533 : Execute(frames_.back().code, frames_.back().pc, num_steps);
1196 : // If state_ is STOPPED, the current activation must be fully unwound.
1197 : DCHECK_IMPLIES(state_ == WasmInterpreter::STOPPED,
1198 : current_activation().fp == frames_.size());
1199 2789533 : return state_;
1200 : }
1201 :
1202 0 : void Pause() { UNIMPLEMENTED(); }
1203 :
1204 : void Reset() {
1205 : TRACE("----- RESET -----\n");
1206 : stack_.clear();
1207 : frames_.clear();
1208 2729815 : state_ = WasmInterpreter::STOPPED;
1209 2729815 : trap_reason_ = kTrapCount;
1210 2729815 : possible_nondeterminism_ = false;
1211 : }
1212 :
1213 : int GetFrameCount() {
1214 : DCHECK_GE(kMaxInt, frames_.size());
1215 8884 : return static_cast<int>(frames_.size());
1216 : }
1217 :
1218 : WasmVal GetReturnValue(uint32_t index) {
1219 2768164 : if (state_ == WasmInterpreter::TRAPPED) return WasmVal(0xdeadbeef);
1220 : DCHECK_EQ(WasmInterpreter::FINISHED, state_);
1221 : Activation act = current_activation();
1222 : // Current activation must be finished.
1223 : DCHECK_EQ(act.fp, frames_.size());
1224 : DCHECK_GT(stack_.size(), act.sp + index);
1225 5536328 : return stack_[act.sp + index];
1226 : }
1227 :
1228 : WasmVal GetStackValue(uint32_t index) {
1229 : DCHECK_GT(stack_.size(), index);
1230 1316 : return stack_[index];
1231 : }
1232 :
1233 : TrapReason GetTrapReason() { return trap_reason_; }
1234 :
1235 : pc_t GetBreakpointPc() { return break_pc_; }
1236 :
1237 : bool PossibleNondeterminism() { return possible_nondeterminism_; }
1238 :
1239 : uint64_t NumInterpretedCalls() { return num_interpreted_calls_; }
1240 :
1241 31 : void AddBreakFlags(uint8_t flags) { break_flags_ |= flags; }
1242 :
1243 0 : void ClearBreakFlags() { break_flags_ = WasmInterpreter::BreakFlag::None; }
1244 :
1245 : uint32_t NumActivations() {
1246 84 : return static_cast<uint32_t>(activations_.size());
1247 : }
1248 :
1249 51439 : uint32_t StartActivation() {
1250 : TRACE("----- START ACTIVATION %zu -----\n", activations_.size());
1251 : // If you use activations, use them consistently:
1252 : DCHECK_IMPLIES(activations_.empty(), frames_.empty());
1253 : DCHECK_IMPLIES(activations_.empty(), stack_.empty());
1254 102878 : uint32_t activation_id = static_cast<uint32_t>(activations_.size());
1255 51439 : activations_.emplace_back(static_cast<uint32_t>(frames_.size()),
1256 154317 : static_cast<uint32_t>(stack_.size()));
1257 51439 : state_ = WasmInterpreter::STOPPED;
1258 51439 : return activation_id;
1259 : }
1260 :
1261 : void FinishActivation(uint32_t id) {
1262 : TRACE("----- FINISH ACTIVATION %zu -----\n", activations_.size() - 1);
1263 : DCHECK_LT(0, activations_.size());
1264 : DCHECK_EQ(activations_.size() - 1, id);
1265 : // Stack height must match the start of this activation (otherwise unwind
1266 : // first).
1267 : DCHECK_EQ(activations_.back().fp, frames_.size());
1268 : DCHECK_LE(activations_.back().sp, stack_.size());
1269 51439 : stack_.resize(activations_.back().sp);
1270 : activations_.pop_back();
1271 : }
1272 :
1273 : uint32_t ActivationFrameBase(uint32_t id) {
1274 : DCHECK_GT(activations_.size(), id);
1275 7980 : return activations_[id].fp;
1276 : }
1277 :
1278 : // Handle a thrown exception. Returns whether the exception was handled inside
1279 : // the current activation. Unwinds the interpreted stack accordingly.
1280 239 : WasmInterpreter::Thread::ExceptionHandlingResult HandleException(
1281 : Isolate* isolate) {
1282 : DCHECK(isolate->has_pending_exception());
1283 : // TODO(wasm): Add wasm exception handling (would return true).
1284 : USE(isolate->pending_exception());
1285 : TRACE("----- UNWIND -----\n");
1286 : DCHECK_LT(0, activations_.size());
1287 : Activation& act = activations_.back();
1288 : DCHECK_LE(act.fp, frames_.size());
1289 239 : frames_.resize(act.fp);
1290 : DCHECK_LE(act.sp, stack_.size());
1291 239 : stack_.resize(act.sp);
1292 239 : state_ = WasmInterpreter::STOPPED;
1293 239 : return WasmInterpreter::Thread::UNWOUND;
1294 : }
1295 :
1296 : private:
1297 : // Entries on the stack of functions being evaluated.
1298 : struct Frame {
1299 : InterpreterCode* code;
1300 : pc_t pc;
1301 : sp_t sp;
1302 :
1303 : // Limit of parameters.
1304 : sp_t plimit() { return sp + code->function->sig->parameter_count(); }
1305 : // Limit of locals.
1306 : sp_t llimit() { return plimit() + code->locals.type_list.size(); }
1307 : };
1308 :
1309 : struct Block {
1310 : pc_t pc;
1311 : sp_t sp;
1312 : size_t fp;
1313 : unsigned arity;
1314 : };
1315 :
1316 : friend class InterpretedFrameImpl;
1317 :
1318 : CodeMap* codemap_;
1319 : WasmInstance* instance_;
1320 : ZoneVector<WasmVal> stack_;
1321 : ZoneVector<Frame> frames_;
1322 : WasmInterpreter::State state_ = WasmInterpreter::STOPPED;
1323 : pc_t break_pc_ = kInvalidPc;
1324 : TrapReason trap_reason_ = kTrapCount;
1325 : bool possible_nondeterminism_ = false;
1326 : uint8_t break_flags_ = 0; // a combination of WasmInterpreter::BreakFlag
1327 : uint64_t num_interpreted_calls_ = 0;
1328 : // Store the stack height of each activation (for unwind and frame
1329 : // inspection).
1330 : ZoneVector<Activation> activations_;
1331 :
1332 : CodeMap* codemap() { return codemap_; }
1333 : WasmInstance* instance() { return instance_; }
1334 846 : const WasmModule* module() { return instance_->module; }
1335 :
1336 : void DoTrap(TrapReason trap, pc_t pc) {
1337 12699 : state_ = WasmInterpreter::TRAPPED;
1338 12699 : trap_reason_ = trap;
1339 : CommitPc(pc);
1340 : }
1341 :
1342 : // Push a frame with arguments already on the stack.
1343 3925561 : void PushFrame(InterpreterCode* code) {
1344 : DCHECK_NOT_NULL(code);
1345 3925561 : ++num_interpreted_calls_;
1346 3925561 : size_t arity = code->function->sig->parameter_count();
1347 : // The parameters will overlap the arguments already on the stack.
1348 : DCHECK_GE(stack_.size(), arity);
1349 11776683 : frames_.push_back({code, 0, stack_.size() - arity});
1350 3925561 : frames_.back().pc = InitLocals(code);
1351 : TRACE(" => PushFrame #%zu (#%u @%zu)\n", frames_.size() - 1,
1352 : code->function->func_index, frames_.back().pc);
1353 3925561 : }
1354 :
1355 3925561 : pc_t InitLocals(InterpreterCode* code) {
1356 7858884 : for (auto p : code->locals.type_list) {
1357 : WasmVal val;
1358 7762 : switch (p) {
1359 : #define CASE_TYPE(wasm, ctype) \
1360 : case kWasm##wasm: \
1361 : val = WasmVal(static_cast<ctype>(0)); \
1362 : break;
1363 3926 : WASM_CTYPES(CASE_TYPE)
1364 : #undef CASE_TYPE
1365 : default:
1366 0 : UNREACHABLE();
1367 : break;
1368 : }
1369 7762 : stack_.push_back(val);
1370 : }
1371 3925561 : return code->locals.encoded_size;
1372 : }
1373 :
1374 : void CommitPc(pc_t pc) {
1375 : DCHECK(!frames_.empty());
1376 30292 : frames_.back().pc = pc;
1377 : }
1378 :
1379 : bool SkipBreakpoint(InterpreterCode* code, pc_t pc) {
1380 9320 : if (pc == break_pc_) {
1381 : // Skip the previously hit breakpoint when resuming.
1382 4660 : break_pc_ = kInvalidPc;
1383 : return true;
1384 : }
1385 : return false;
1386 : }
1387 :
1388 : int LookupTargetDelta(InterpreterCode* code, pc_t pc) {
1389 24023 : return static_cast<int>(code->targets->Lookup(pc).pc_diff);
1390 : }
1391 :
1392 436761 : int DoBreak(InterpreterCode* code, pc_t pc, size_t depth) {
1393 436761 : ControlTransferEntry& control_transfer_entry = code->targets->Lookup(pc);
1394 436761 : DoStackTransfer(stack_.size() - control_transfer_entry.sp_diff,
1395 873522 : control_transfer_entry.target_arity);
1396 436761 : return control_transfer_entry.pc_diff;
1397 : }
1398 :
1399 322384 : pc_t ReturnPc(Decoder* decoder, InterpreterCode* code, pc_t pc) {
1400 161192 : switch (code->orig_start[pc]) {
1401 : case kExprCallFunction: {
1402 : CallFunctionOperand<false> operand(decoder, code->at(pc));
1403 161065 : return pc + 1 + operand.length;
1404 : }
1405 : case kExprCallIndirect: {
1406 127 : CallIndirectOperand<false> operand(decoder, code->at(pc));
1407 127 : return pc + 1 + operand.length;
1408 : }
1409 : default:
1410 0 : UNREACHABLE();
1411 : return 0;
1412 : }
1413 : }
1414 :
1415 2929628 : bool DoReturn(Decoder* decoder, InterpreterCode** code, pc_t* pc, pc_t* limit,
1416 : size_t arity) {
1417 : DCHECK_GT(frames_.size(), 0);
1418 5859256 : sp_t dest = frames_.back().sp;
1419 : frames_.pop_back();
1420 2929628 : if (frames_.size() == current_activation().fp) {
1421 : // A return from the last frame terminates the execution.
1422 2768436 : state_ = WasmInterpreter::FINISHED;
1423 2768436 : DoStackTransfer(dest, arity);
1424 : TRACE(" => finish\n");
1425 2768436 : return false;
1426 : } else {
1427 : // Return to caller frame.
1428 : Frame* top = &frames_.back();
1429 161192 : *code = top->code;
1430 161192 : decoder->Reset((*code)->start, (*code)->end);
1431 161192 : *pc = ReturnPc(decoder, *code, top->pc);
1432 161192 : *limit = top->code->end - top->code->start;
1433 : TRACE(" => Return to #%zu (#%u @%zu)\n", frames_.size() - 1,
1434 : (*code)->function->func_index, *pc);
1435 161192 : DoStackTransfer(dest, arity);
1436 161192 : return true;
1437 : }
1438 : }
1439 :
1440 : // Returns true if the call was successful, false if the stack check failed
1441 : // and the current activation was fully unwound.
1442 1144307 : bool DoCall(Decoder* decoder, InterpreterCode* target, pc_t* pc,
1443 : pc_t* limit) WARN_UNUSED_RESULT {
1444 1144307 : frames_.back().pc = *pc;
1445 1144307 : PushFrame(target);
1446 1144307 : if (!DoStackCheck()) return false;
1447 1144292 : *pc = frames_.back().pc;
1448 1144292 : *limit = target->end - target->start;
1449 : decoder->Reset(target->start, target->end);
1450 1144292 : return true;
1451 : }
1452 :
1453 : // Copies {arity} values on the top of the stack down the stack to {dest},
1454 : // dropping the values in-between.
1455 3366389 : void DoStackTransfer(sp_t dest, size_t arity) {
1456 : // before: |---------------| pop_count | arity |
1457 : // ^ 0 ^ dest ^ stack_.size()
1458 : //
1459 : // after: |---------------| arity |
1460 : // ^ 0 ^ stack_.size()
1461 : DCHECK_LE(dest, stack_.size());
1462 : DCHECK_LE(dest + arity, stack_.size());
1463 6732778 : size_t pop_count = stack_.size() - dest - arity;
1464 6381472 : for (size_t i = 0; i < arity; i++) {
1465 9045249 : stack_[dest + i] = stack_[dest + pop_count + i];
1466 : }
1467 3366389 : stack_.resize(stack_.size() - pop_count);
1468 3366389 : }
1469 :
1470 : template <typename mtype>
1471 : inline bool BoundsCheck(uint32_t mem_size, uint32_t offset, uint32_t index) {
1472 : return sizeof(mtype) <= mem_size && offset <= mem_size - sizeof(mtype) &&
1473 141549 : index <= mem_size - sizeof(mtype) - offset;
1474 : }
1475 :
1476 : template <typename ctype, typename mtype>
1477 225318 : bool ExecuteLoad(Decoder* decoder, InterpreterCode* code, pc_t pc, int& len) {
1478 112659 : MemoryAccessOperand<false> operand(decoder, code->at(pc), sizeof(ctype));
1479 112659 : uint32_t index = Pop().to<uint32_t>();
1480 337977 : if (!BoundsCheck<mtype>(instance()->mem_size, operand.offset, index)) {
1481 : DoTrap(kTrapMemOutOfBounds, pc);
1482 3007 : return false;
1483 : }
1484 109652 : byte* addr = instance()->mem_start + operand.offset + index;
1485 1776 : WasmVal result(static_cast<ctype>(ReadLittleEndianValue<mtype>(addr)));
1486 :
1487 : Push(pc, result);
1488 109652 : len = 1 + operand.length;
1489 109652 : return true;
1490 : }
1491 :
1492 : template <typename ctype, typename mtype>
1493 26671 : bool ExecuteStore(Decoder* decoder, InterpreterCode* code, pc_t pc,
1494 26671 : int& len) {
1495 26671 : MemoryAccessOperand<false> operand(decoder, code->at(pc), sizeof(ctype));
1496 26671 : WasmVal val = Pop();
1497 :
1498 26671 : uint32_t index = Pop().to<uint32_t>();
1499 80013 : if (!BoundsCheck<mtype>(instance()->mem_size, operand.offset, index)) {
1500 : DoTrap(kTrapMemOutOfBounds, pc);
1501 1944 : return false;
1502 : }
1503 24727 : byte* addr = instance()->mem_start + operand.offset + index;
1504 76 : WriteLittleEndianValue<mtype>(addr, static_cast<mtype>(val.to<ctype>()));
1505 24727 : len = 1 + operand.length;
1506 :
1507 : if (std::is_same<float, ctype>::value) {
1508 1818 : possible_nondeterminism_ |= std::isnan(val.to<float>());
1509 : } else if (std::is_same<double, ctype>::value) {
1510 18835 : possible_nondeterminism_ |= std::isnan(val.to<double>());
1511 : }
1512 24727 : return true;
1513 : }
1514 :
1515 : // Check if our control stack (frames_) exceeds the limit. Trigger stack
1516 : // overflow if it does, and unwinding the current frame.
1517 : // Returns true if execution can continue, false if the current activation was
1518 : // fully unwound.
1519 : // Do call this function immediately *after* pushing a new frame. The pc of
1520 : // the top frame will be reset to 0 if the stack check fails.
1521 1144337 : bool DoStackCheck() WARN_UNUSED_RESULT {
1522 : // Sum up the size of all dynamically growing structures.
1523 2288614 : if (V8_LIKELY(frames_.size() <= kV8MaxWasmInterpretedStackSize)) {
1524 : return true;
1525 : }
1526 15 : if (!codemap()->has_instance()) {
1527 : // In test mode: Just abort.
1528 0 : FATAL("wasm interpreter: stack overflow");
1529 : }
1530 : // The pc of the top frame is initialized to the first instruction. We reset
1531 : // it to 0 here such that we report the same position as in compiled code.
1532 15 : frames_.back().pc = 0;
1533 : Isolate* isolate = codemap()->instance()->GetIsolate();
1534 : HandleScope handle_scope(isolate);
1535 15 : isolate->StackOverflow();
1536 15 : return HandleException(isolate) == WasmInterpreter::Thread::HANDLED;
1537 : }
1538 :
1539 3945981 : void Execute(InterpreterCode* code, pc_t pc, int max) {
1540 3250317 : Decoder decoder(code->start, code->end);
1541 2789533 : pc_t limit = code->end - code->start;
1542 : bool hit_break = false;
1543 :
1544 : while (true) {
1545 : #define PAUSE_IF_BREAK_FLAG(flag) \
1546 : if (V8_UNLIKELY(break_flags_ & WasmInterpreter::BreakFlag::flag)) { \
1547 : hit_break = true; \
1548 : max = 0; \
1549 : }
1550 :
1551 : DCHECK_GT(limit, pc);
1552 : DCHECK_NOT_NULL(code->start);
1553 :
1554 : // Do first check for a breakpoint, in order to set hit_break correctly.
1555 : const char* skip = " ";
1556 17303719 : int len = 1;
1557 17303719 : byte opcode = code->start[pc];
1558 : byte orig = opcode;
1559 17303719 : if (V8_UNLIKELY(opcode == kInternalBreakpoint)) {
1560 9320 : orig = code->orig_start[pc];
1561 9320 : if (SkipBreakpoint(code, pc)) {
1562 : // skip breakpoint by switching on original code.
1563 : skip = "[skip] ";
1564 : } else {
1565 : TRACE("@%-3zu: [break] %-24s:", pc,
1566 : WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(orig)));
1567 : TraceValueStack();
1568 : TRACE("\n");
1569 : hit_break = true;
1570 8293 : break;
1571 : }
1572 : }
1573 :
1574 : // If max is 0, break. If max is positive (a limit is set), decrement it.
1575 17299059 : if (max == 0) break;
1576 17295426 : if (max > 0) --max;
1577 :
1578 : USE(skip);
1579 : TRACE("@%-3zu: %s%-24s:", pc, skip,
1580 : WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(orig)));
1581 : TraceValueStack();
1582 : TRACE("\n");
1583 :
1584 : #ifdef DEBUG
1585 : // Compute the stack effect of this opcode, and verify later that the
1586 : // stack was modified accordingly.
1587 : std::pair<uint32_t, uint32_t> stack_effect = wasm::StackEffect(
1588 : codemap_->module(), frames_.back().code->function->sig,
1589 : code->orig_start + pc, code->orig_end);
1590 : uint32_t expected_new_stack_height =
1591 : static_cast<uint32_t>(stack_.size()) - stack_effect.first +
1592 : stack_effect.second;
1593 : #endif
1594 :
1595 17295426 : switch (orig) {
1596 : case kExprNop:
1597 : break;
1598 : case kExprBlock: {
1599 1980733 : BlockTypeOperand<false> operand(&decoder, code->at(pc));
1600 1980733 : len = 1 + operand.length;
1601 : break;
1602 : }
1603 : case kExprLoop: {
1604 48085 : BlockTypeOperand<false> operand(&decoder, code->at(pc));
1605 48085 : len = 1 + operand.length;
1606 : break;
1607 : }
1608 : case kExprIf: {
1609 29660 : BlockTypeOperand<false> operand(&decoder, code->at(pc));
1610 29660 : WasmVal cond = Pop();
1611 : bool is_true = cond.to<uint32_t>() != 0;
1612 29660 : if (is_true) {
1613 : // fall through to the true block.
1614 27988 : len = 1 + operand.length;
1615 : TRACE(" true => fallthrough\n");
1616 : } else {
1617 3344 : len = LookupTargetDelta(code, pc);
1618 : TRACE(" false => @%zu\n", pc + len);
1619 : }
1620 : break;
1621 : }
1622 : case kExprElse: {
1623 22351 : len = LookupTargetDelta(code, pc);
1624 : TRACE(" end => @%zu\n", pc + len);
1625 22351 : break;
1626 : }
1627 : case kExprSelect: {
1628 1519 : WasmVal cond = Pop();
1629 1519 : WasmVal fval = Pop();
1630 1519 : WasmVal tval = Pop();
1631 1519 : Push(pc, cond.to<int32_t>() != 0 ? tval : fval);
1632 : break;
1633 : }
1634 : case kExprBr: {
1635 : BreakDepthOperand<false> operand(&decoder, code->at(pc));
1636 26127 : len = DoBreak(code, pc, operand.depth);
1637 : TRACE(" br => @%zu\n", pc + len);
1638 : break;
1639 : }
1640 : case kExprBrIf: {
1641 : BreakDepthOperand<false> operand(&decoder, code->at(pc));
1642 43764 : WasmVal cond = Pop();
1643 : bool is_true = cond.to<uint32_t>() != 0;
1644 43764 : if (is_true) {
1645 22379 : len = DoBreak(code, pc, operand.depth);
1646 : TRACE(" br_if => @%zu\n", pc + len);
1647 : } else {
1648 : TRACE(" false => fallthrough\n");
1649 21385 : len = 1 + operand.length;
1650 : }
1651 : break;
1652 : }
1653 : case kExprBrTable: {
1654 : BranchTableOperand<false> operand(&decoder, code->at(pc));
1655 : BranchTableIterator<false> iterator(&decoder, operand);
1656 388255 : uint32_t key = Pop().to<uint32_t>();
1657 : uint32_t depth = 0;
1658 388255 : if (key >= operand.table_count) key = operand.table_count;
1659 388255 : for (uint32_t i = 0; i <= key; i++) {
1660 : DCHECK(iterator.has_next());
1661 : depth = iterator.next();
1662 : }
1663 776510 : len = key + DoBreak(code, pc + key, static_cast<size_t>(depth));
1664 : TRACE(" br[%u] => @%zu\n", key, pc + key + len);
1665 : break;
1666 : }
1667 : case kExprReturn: {
1668 2929628 : size_t arity = code->function->sig->return_count();
1669 3170034 : if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
1670 14 : PAUSE_IF_BREAK_FLAG(AfterReturn);
1671 1144306 : continue;
1672 : }
1673 : case kExprUnreachable: {
1674 : return DoTrap(kTrapUnreachable, pc);
1675 : }
1676 : case kExprEnd: {
1677 : break;
1678 : }
1679 : case kExprI32Const: {
1680 571609 : ImmI32Operand<false> operand(&decoder, code->at(pc));
1681 571609 : Push(pc, WasmVal(operand.value));
1682 571609 : len = 1 + operand.length;
1683 : break;
1684 : }
1685 : case kExprI64Const: {
1686 11585 : ImmI64Operand<false> operand(&decoder, code->at(pc));
1687 11585 : Push(pc, WasmVal(operand.value));
1688 11585 : len = 1 + operand.length;
1689 : break;
1690 : }
1691 : case kExprF32Const: {
1692 : ImmF32Operand<false> operand(&decoder, code->at(pc));
1693 : Push(pc, WasmVal(operand.value));
1694 343 : len = 1 + operand.length;
1695 : break;
1696 : }
1697 : case kExprF64Const: {
1698 : ImmF64Operand<false> operand(&decoder, code->at(pc));
1699 : Push(pc, WasmVal(operand.value));
1700 1960 : len = 1 + operand.length;
1701 : break;
1702 : }
1703 : case kExprGetLocal: {
1704 : LocalIndexOperand<false> operand(&decoder, code->at(pc));
1705 5876345 : Push(pc, stack_[frames_.back().sp + operand.index]);
1706 5859788 : len = 1 + operand.length;
1707 : break;
1708 : }
1709 : case kExprSetLocal: {
1710 : LocalIndexOperand<false> operand(&decoder, code->at(pc));
1711 12399 : WasmVal val = Pop();
1712 24798 : stack_[frames_.back().sp + operand.index] = val;
1713 12399 : len = 1 + operand.length;
1714 : break;
1715 : }
1716 : case kExprTeeLocal: {
1717 : LocalIndexOperand<false> operand(&decoder, code->at(pc));
1718 4158 : WasmVal val = Pop();
1719 8316 : stack_[frames_.back().sp + operand.index] = val;
1720 : Push(pc, val);
1721 4158 : len = 1 + operand.length;
1722 : break;
1723 : }
1724 : case kExprDrop: {
1725 : Pop();
1726 : break;
1727 : }
1728 : case kExprCallFunction: {
1729 : CallFunctionOperand<false> operand(&decoder, code->at(pc));
1730 : InterpreterCode* target = codemap()->GetCode(operand.index);
1731 1153450 : if (target->function->imported) {
1732 9300 : CommitPc(pc);
1733 : ExternalCallResult result =
1734 9300 : CallImportedFunction(target->function->func_index);
1735 9300 : switch (result.type) {
1736 : case ExternalCallResult::INTERNAL:
1737 : // The import is a function of this instance. Call it directly.
1738 0 : target = result.interpreter_code;
1739 : DCHECK(!target->function->imported);
1740 0 : break;
1741 : case ExternalCallResult::INVALID_FUNC:
1742 : case ExternalCallResult::SIGNATURE_MISMATCH:
1743 : // Direct calls are checked statically.
1744 0 : UNREACHABLE();
1745 : case ExternalCallResult::EXTERNAL_RETURNED:
1746 9225 : PAUSE_IF_BREAK_FLAG(AfterCall);
1747 9225 : len = 1 + operand.length;
1748 9225 : break;
1749 : case ExternalCallResult::EXTERNAL_UNWOUND:
1750 75 : return;
1751 : }
1752 9225 : if (result.type != ExternalCallResult::INTERNAL) break;
1753 : }
1754 : // Execute an internal call.
1755 1144150 : if (!DoCall(&decoder, target, &pc, &limit)) return;
1756 1144135 : code = target;
1757 1144135 : PAUSE_IF_BREAK_FLAG(AfterCall);
1758 : continue; // don't bump pc
1759 : } break;
1760 : case kExprCallIndirect: {
1761 302 : CallIndirectOperand<false> operand(&decoder, code->at(pc));
1762 302 : uint32_t entry_index = Pop().to<uint32_t>();
1763 : // Assume only one table for now.
1764 : DCHECK_LE(module()->function_tables.size(), 1u);
1765 : ExternalCallResult result =
1766 302 : CallIndirectFunction(0, entry_index, operand.index);
1767 302 : switch (result.type) {
1768 : case ExternalCallResult::INTERNAL:
1769 : // The import is a function of this instance. Call it directly.
1770 157 : if (!DoCall(&decoder, result.interpreter_code, &pc, &limit))
1771 130 : return;
1772 157 : code = result.interpreter_code;
1773 157 : PAUSE_IF_BREAK_FLAG(AfterCall);
1774 157 : continue; // don't bump pc
1775 : case ExternalCallResult::INVALID_FUNC:
1776 78 : return DoTrap(kTrapFuncInvalid, pc);
1777 : case ExternalCallResult::SIGNATURE_MISMATCH:
1778 37 : return DoTrap(kTrapFuncSigMismatch, pc);
1779 : case ExternalCallResult::EXTERNAL_RETURNED:
1780 15 : PAUSE_IF_BREAK_FLAG(AfterCall);
1781 15 : len = 1 + operand.length;
1782 15 : break;
1783 : case ExternalCallResult::EXTERNAL_UNWOUND:
1784 : return;
1785 : }
1786 15 : } break;
1787 : case kExprGetGlobal: {
1788 : GlobalIndexOperand<false> operand(&decoder, code->at(pc));
1789 800 : const WasmGlobal* global = &module()->globals[operand.index];
1790 400 : byte* ptr = instance()->globals_start + global->offset;
1791 : WasmVal val;
1792 400 : switch (global->type) {
1793 : #define CASE_TYPE(wasm, ctype) \
1794 : case kWasm##wasm: \
1795 : val = WasmVal(*reinterpret_cast<ctype*>(ptr)); \
1796 : break;
1797 226 : WASM_CTYPES(CASE_TYPE)
1798 : #undef CASE_TYPE
1799 : default:
1800 0 : UNREACHABLE();
1801 : }
1802 : Push(pc, val);
1803 400 : len = 1 + operand.length;
1804 : break;
1805 : }
1806 : case kExprSetGlobal: {
1807 : GlobalIndexOperand<false> operand(&decoder, code->at(pc));
1808 568 : const WasmGlobal* global = &module()->globals[operand.index];
1809 284 : byte* ptr = instance()->globals_start + global->offset;
1810 284 : WasmVal val = Pop();
1811 284 : switch (global->type) {
1812 : #define CASE_TYPE(wasm, ctype) \
1813 : case kWasm##wasm: \
1814 : *reinterpret_cast<ctype*>(ptr) = val.to<ctype>(); \
1815 : break;
1816 284 : WASM_CTYPES(CASE_TYPE)
1817 : #undef CASE_TYPE
1818 : default:
1819 0 : UNREACHABLE();
1820 : }
1821 284 : len = 1 + operand.length;
1822 : break;
1823 : }
1824 :
1825 : #define LOAD_CASE(name, ctype, mtype) \
1826 : case kExpr##name: { \
1827 : if (!ExecuteLoad<ctype, mtype>(&decoder, code, pc, len)) return; \
1828 : break; \
1829 : }
1830 :
1831 454 : LOAD_CASE(I32LoadMem8S, int32_t, int8_t);
1832 454 : LOAD_CASE(I32LoadMem8U, int32_t, uint8_t);
1833 398 : LOAD_CASE(I32LoadMem16S, int32_t, int16_t);
1834 398 : LOAD_CASE(I32LoadMem16U, int32_t, uint16_t);
1835 168 : LOAD_CASE(I64LoadMem8S, int64_t, int8_t);
1836 0 : LOAD_CASE(I64LoadMem8U, int64_t, uint8_t);
1837 168 : LOAD_CASE(I64LoadMem16S, int64_t, int16_t);
1838 0 : LOAD_CASE(I64LoadMem16U, int64_t, uint16_t);
1839 168 : LOAD_CASE(I64LoadMem32S, int64_t, int32_t);
1840 0 : LOAD_CASE(I64LoadMem32U, int64_t, uint32_t);
1841 30834 : LOAD_CASE(I32LoadMem, int32_t, int32_t);
1842 10865 : LOAD_CASE(I64LoadMem, int64_t, int64_t);
1843 16442 : LOAD_CASE(F32LoadMem, float, float);
1844 52310 : LOAD_CASE(F64LoadMem, double, double);
1845 : #undef LOAD_CASE
1846 :
1847 : #define STORE_CASE(name, ctype, mtype) \
1848 : case kExpr##name: { \
1849 : if (!ExecuteStore<ctype, mtype>(&decoder, code, pc, len)) return; \
1850 : break; \
1851 : }
1852 :
1853 484 : STORE_CASE(I32StoreMem8, int32_t, int8_t);
1854 456 : STORE_CASE(I32StoreMem16, int32_t, int16_t);
1855 0 : STORE_CASE(I64StoreMem8, int64_t, int8_t);
1856 0 : STORE_CASE(I64StoreMem16, int64_t, int16_t);
1857 0 : STORE_CASE(I64StoreMem32, int64_t, int32_t);
1858 3270 : STORE_CASE(I32StoreMem, int32_t, int32_t);
1859 1376 : STORE_CASE(I64StoreMem, int64_t, int64_t);
1860 2034 : STORE_CASE(F32StoreMem, float, float);
1861 19051 : STORE_CASE(F64StoreMem, double, double);
1862 : #undef STORE_CASE
1863 :
1864 : #define ASMJS_LOAD_CASE(name, ctype, mtype, defval) \
1865 : case kExpr##name: { \
1866 : uint32_t index = Pop().to<uint32_t>(); \
1867 : ctype result; \
1868 : if (!BoundsCheck<mtype>(instance()->mem_size, 0, index)) { \
1869 : result = defval; \
1870 : } else { \
1871 : byte* addr = instance()->mem_start + index; \
1872 : /* TODO(titzer): alignment for asmjs load mem? */ \
1873 : result = static_cast<ctype>(*reinterpret_cast<mtype*>(addr)); \
1874 : } \
1875 : Push(pc, WasmVal(result)); \
1876 : break; \
1877 : }
1878 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem8S, int32_t, int8_t, 0);
1879 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem8U, int32_t, uint8_t, 0);
1880 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem16S, int32_t, int16_t, 0);
1881 0 : ASMJS_LOAD_CASE(I32AsmjsLoadMem16U, int32_t, uint16_t, 0);
1882 700 : ASMJS_LOAD_CASE(I32AsmjsLoadMem, int32_t, int32_t, 0);
1883 700 : ASMJS_LOAD_CASE(F32AsmjsLoadMem, float, float,
1884 : std::numeric_limits<float>::quiet_NaN());
1885 952 : ASMJS_LOAD_CASE(F64AsmjsLoadMem, double, double,
1886 : std::numeric_limits<double>::quiet_NaN());
1887 : #undef ASMJS_LOAD_CASE
1888 :
1889 : #define ASMJS_STORE_CASE(name, ctype, mtype) \
1890 : case kExpr##name: { \
1891 : WasmVal val = Pop(); \
1892 : uint32_t index = Pop().to<uint32_t>(); \
1893 : if (BoundsCheck<mtype>(instance()->mem_size, 0, index)) { \
1894 : byte* addr = instance()->mem_start + index; \
1895 : /* TODO(titzer): alignment for asmjs store mem? */ \
1896 : *(reinterpret_cast<mtype*>(addr)) = static_cast<mtype>(val.to<ctype>()); \
1897 : } \
1898 : Push(pc, val); \
1899 : break; \
1900 : }
1901 :
1902 0 : ASMJS_STORE_CASE(I32AsmjsStoreMem8, int32_t, int8_t);
1903 0 : ASMJS_STORE_CASE(I32AsmjsStoreMem16, int32_t, int16_t);
1904 6531 : ASMJS_STORE_CASE(I32AsmjsStoreMem, int32_t, int32_t);
1905 0 : ASMJS_STORE_CASE(F32AsmjsStoreMem, float, float);
1906 0 : ASMJS_STORE_CASE(F64AsmjsStoreMem, double, double);
1907 : #undef ASMJS_STORE_CASE
1908 : case kExprGrowMemory: {
1909 : MemoryIndexOperand<false> operand(&decoder, code->at(pc));
1910 95 : uint32_t delta_pages = Pop().to<uint32_t>();
1911 : Push(pc, WasmVal(ExecuteGrowMemory(
1912 285 : delta_pages, codemap_->maybe_instance(), instance())));
1913 95 : len = 1 + operand.length;
1914 : break;
1915 : }
1916 : case kExprMemorySize: {
1917 : MemoryIndexOperand<false> operand(&decoder, code->at(pc));
1918 : Push(pc, WasmVal(static_cast<uint32_t>(instance()->mem_size /
1919 0 : WasmModule::kPageSize)));
1920 0 : len = 1 + operand.length;
1921 : break;
1922 : }
1923 : // We need to treat kExprI32ReinterpretF32 and kExprI64ReinterpretF64
1924 : // specially to guarantee that the quiet bit of a NaN is preserved on
1925 : // ia32 by the reinterpret casts.
1926 : case kExprI32ReinterpretF32: {
1927 399 : WasmVal val = Pop();
1928 : WasmVal result(ExecuteI32ReinterpretF32(val));
1929 : Push(pc, result);
1930 399 : possible_nondeterminism_ |= std::isnan(val.to<float>());
1931 : break;
1932 : }
1933 : case kExprI64ReinterpretF64: {
1934 399 : WasmVal val = Pop();
1935 : WasmVal result(ExecuteI64ReinterpretF64(val));
1936 : Push(pc, result);
1937 399 : possible_nondeterminism_ |= std::isnan(val.to<double>());
1938 : break;
1939 : }
1940 : #define EXECUTE_SIMPLE_BINOP(name, ctype, op) \
1941 : case kExpr##name: { \
1942 : WasmVal rval = Pop(); \
1943 : WasmVal lval = Pop(); \
1944 : WasmVal result(lval.to<ctype>() op rval.to<ctype>()); \
1945 : Push(pc, result); \
1946 : break; \
1947 : }
1948 5471214 : FOREACH_SIMPLE_BINOP(EXECUTE_SIMPLE_BINOP)
1949 : #undef EXECUTE_SIMPLE_BINOP
1950 :
1951 : #define EXECUTE_OTHER_BINOP(name, ctype) \
1952 : case kExpr##name: { \
1953 : TrapReason trap = kTrapCount; \
1954 : volatile ctype rval = Pop().to<ctype>(); \
1955 : volatile ctype lval = Pop().to<ctype>(); \
1956 : WasmVal result(Execute##name(lval, rval, &trap)); \
1957 : if (trap != kTrapCount) return DoTrap(trap, pc); \
1958 : Push(pc, result); \
1959 : break; \
1960 : }
1961 3348422 : FOREACH_OTHER_BINOP(EXECUTE_OTHER_BINOP)
1962 : #undef EXECUTE_OTHER_BINOP
1963 :
1964 : case kExprF32CopySign: {
1965 : // Handle kExprF32CopySign separately because it may introduce
1966 : // observable non-determinism.
1967 : TrapReason trap = kTrapCount;
1968 185178 : volatile float rval = Pop().to<float>();
1969 185178 : volatile float lval = Pop().to<float>();
1970 92589 : WasmVal result(ExecuteF32CopySign(lval, rval, &trap));
1971 : Push(pc, result);
1972 185178 : possible_nondeterminism_ |= std::isnan(rval);
1973 : break;
1974 : }
1975 : case kExprF64CopySign: {
1976 : // Handle kExprF32CopySign separately because it may introduce
1977 : // observable non-determinism.
1978 : TrapReason trap = kTrapCount;
1979 33642 : volatile double rval = Pop().to<double>();
1980 33642 : volatile double lval = Pop().to<double>();
1981 16821 : WasmVal result(ExecuteF64CopySign(lval, rval, &trap));
1982 : Push(pc, result);
1983 33642 : possible_nondeterminism_ |= std::isnan(rval);
1984 : break;
1985 : }
1986 : #define EXECUTE_OTHER_UNOP(name, ctype) \
1987 : case kExpr##name: { \
1988 : TrapReason trap = kTrapCount; \
1989 : volatile ctype val = Pop().to<ctype>(); \
1990 : WasmVal result(Execute##name(val, &trap)); \
1991 : if (trap != kTrapCount) return DoTrap(trap, pc); \
1992 : Push(pc, result); \
1993 : break; \
1994 : }
1995 607650 : FOREACH_OTHER_UNOP(EXECUTE_OTHER_UNOP)
1996 : #undef EXECUTE_OTHER_UNOP
1997 :
1998 : default:
1999 : V8_Fatal(__FILE__, __LINE__, "Unknown or unimplemented opcode #%d:%s",
2000 0 : code->start[pc], OpcodeName(code->start[pc]));
2001 : UNREACHABLE();
2002 : }
2003 :
2004 : #ifdef DEBUG
2005 : if (!WasmOpcodes::IsControlOpcode(static_cast<WasmOpcode>(opcode))) {
2006 : DCHECK_EQ(expected_new_stack_height, stack_.size());
2007 : }
2008 : #endif
2009 :
2010 15749536 : pc += len;
2011 15749536 : if (pc == limit) {
2012 : // Fell off end of code; do an implicit return.
2013 : TRACE("@%-3zu: ImplicitReturn\n", pc);
2014 2540834 : if (!DoReturn(&decoder, &code, &pc, &limit,
2015 5081668 : code->function->sig->return_count()))
2016 : return;
2017 161178 : PAUSE_IF_BREAK_FLAG(AfterReturn);
2018 : }
2019 : }
2020 :
2021 8293 : state_ = WasmInterpreter::PAUSED;
2022 12984 : break_pc_ = hit_break ? pc : kInvalidPc;
2023 8293 : CommitPc(pc);
2024 : }
2025 :
2026 : WasmVal Pop() {
2027 : DCHECK_GT(stack_.size(), 0);
2028 : DCHECK_GT(frames_.size(), 0);
2029 : DCHECK_GT(stack_.size(), frames_.back().llimit()); // can't pop into locals
2030 6410328 : WasmVal val = stack_.back();
2031 : stack_.pop_back();
2032 6410328 : return val;
2033 : }
2034 :
2035 : void PopN(int n) {
2036 : DCHECK_GE(stack_.size(), n);
2037 : DCHECK_GT(frames_.size(), 0);
2038 : size_t nsize = stack_.size() - n;
2039 : DCHECK_GE(nsize, frames_.back().llimit()); // can't pop into locals
2040 : stack_.resize(nsize);
2041 : }
2042 :
2043 : WasmVal PopArity(size_t arity) {
2044 : if (arity == 0) return WasmVal();
2045 : CHECK_EQ(1, arity);
2046 : return Pop();
2047 : }
2048 :
2049 : void Push(pc_t pc, WasmVal val) {
2050 : // TODO(titzer): store PC as well?
2051 : DCHECK_NE(kWasmStmt, val.type);
2052 9584640 : stack_.push_back(val);
2053 : }
2054 :
2055 : void TraceStack(const char* phase, pc_t pc) {
2056 : if (FLAG_trace_wasm_interpreter) {
2057 : PrintF("%s @%zu", phase, pc);
2058 : UNIMPLEMENTED();
2059 : PrintF("\n");
2060 : }
2061 : }
2062 :
2063 : void TraceValueStack() {
2064 : #ifdef DEBUG
2065 : Frame* top = frames_.size() > 0 ? &frames_.back() : nullptr;
2066 : sp_t sp = top ? top->sp : 0;
2067 : sp_t plimit = top ? top->plimit() : 0;
2068 : sp_t llimit = top ? top->llimit() : 0;
2069 : if (FLAG_trace_wasm_interpreter) {
2070 : for (size_t i = sp; i < stack_.size(); ++i) {
2071 : if (i < plimit)
2072 : PrintF(" p%zu:", i);
2073 : else if (i < llimit)
2074 : PrintF(" l%zu:", i);
2075 : else
2076 : PrintF(" s%zu:", i);
2077 : WasmVal val = stack_[i];
2078 : switch (val.type) {
2079 : case kWasmI32:
2080 : PrintF("i32:%d", val.to<int32_t>());
2081 : break;
2082 : case kWasmI64:
2083 : PrintF("i64:%" PRId64 "", val.to<int64_t>());
2084 : break;
2085 : case kWasmF32:
2086 : PrintF("f32:%f", val.to<float>());
2087 : break;
2088 : case kWasmF64:
2089 : PrintF("f64:%lf", val.to<double>());
2090 : break;
2091 : case kWasmStmt:
2092 : PrintF("void");
2093 : break;
2094 : default:
2095 : UNREACHABLE();
2096 : break;
2097 : }
2098 : }
2099 : }
2100 : #endif // DEBUG
2101 : }
2102 :
2103 : ExternalCallResult TryHandleException(Isolate* isolate) {
2104 90 : if (HandleException(isolate) == WasmInterpreter::Thread::UNWOUND) {
2105 : return {ExternalCallResult::EXTERNAL_UNWOUND};
2106 : }
2107 : return {ExternalCallResult::EXTERNAL_RETURNED};
2108 : }
2109 :
2110 9375 : ExternalCallResult CallCodeObject(Isolate* isolate, Handle<Code> code,
2111 46230 : FunctionSig* signature) {
2112 : DCHECK(AllowHandleAllocation::IsAllowed());
2113 : DCHECK(AllowHeapAllocation::IsAllowed());
2114 :
2115 9375 : if (code->kind() == Code::WASM_FUNCTION) {
2116 : FixedArray* deopt_data = code->deoptimization_data();
2117 : DCHECK_EQ(2, deopt_data->length());
2118 : WasmInstanceObject* target_instance =
2119 45 : WasmInstanceObject::cast(WeakCell::cast(deopt_data->get(0))->value());
2120 45 : if (target_instance != *codemap()->instance()) {
2121 : // TODO(wasm): Implement calling functions of other instances/modules.
2122 0 : UNIMPLEMENTED();
2123 : }
2124 : int target_func_idx = Smi::cast(deopt_data->get(1))->value();
2125 : DCHECK_LE(0, target_func_idx);
2126 : return {ExternalCallResult::INTERNAL,
2127 90 : codemap()->GetCode(target_func_idx)};
2128 : }
2129 :
2130 : Handle<HeapObject> target =
2131 9330 : codemap()->GetCallableObjectForJSImport(isolate, code);
2132 :
2133 9330 : if (target.is_null()) {
2134 : isolate->Throw(*isolate->factory()->NewTypeError(
2135 60 : MessageTemplate::kWasmTrapTypeError));
2136 : return TryHandleException(isolate);
2137 : }
2138 :
2139 : #if DEBUG
2140 : std::ostringstream oss;
2141 : target->HeapObjectShortPrint(oss);
2142 : TRACE(" => Calling imported function %s\n", oss.str().c_str());
2143 : #endif
2144 :
2145 9300 : int num_args = static_cast<int>(signature->parameter_count());
2146 :
2147 : // Get all arguments as JS values.
2148 : std::vector<Handle<Object>> args;
2149 9300 : args.reserve(num_args);
2150 27840 : WasmVal* wasm_args = stack_.data() + (stack_.size() - num_args);
2151 18510 : for (int i = 0; i < num_args; ++i) {
2152 : args.push_back(WasmValToNumber(isolate->factory(), wasm_args[i],
2153 27630 : signature->GetParam(i)));
2154 : }
2155 :
2156 : // The receiver is the global proxy if in sloppy mode (default), undefined
2157 : // if in strict mode.
2158 9300 : Handle<Object> receiver = isolate->global_proxy();
2159 18600 : if (target->IsJSFunction() &&
2160 : is_strict(JSFunction::cast(*target)->shared()->language_mode())) {
2161 0 : receiver = isolate->factory()->undefined_value();
2162 : }
2163 :
2164 : MaybeHandle<Object> maybe_retval =
2165 9300 : Execution::Call(isolate, target, receiver, num_args, args.data());
2166 9300 : if (maybe_retval.is_null()) return TryHandleException(isolate);
2167 :
2168 9240 : Handle<Object> retval = maybe_retval.ToHandleChecked();
2169 : // Pop arguments off the stack.
2170 9240 : stack_.resize(stack_.size() - num_args);
2171 9240 : if (signature->return_count() > 0) {
2172 : // TODO(wasm): Handle multiple returns.
2173 : DCHECK_EQ(1, signature->return_count());
2174 : stack_.push_back(
2175 18120 : ToWebAssemblyValue(isolate, retval, signature->GetReturn()));
2176 : }
2177 9240 : return {ExternalCallResult::EXTERNAL_RETURNED};
2178 : }
2179 :
2180 27900 : ExternalCallResult CallImportedFunction(uint32_t function_index) {
2181 : // Use a new HandleScope to avoid leaking / accumulating handles in the
2182 : // outer scope.
2183 : Isolate* isolate = codemap()->instance()->GetIsolate();
2184 : HandleScope handle_scope(isolate);
2185 :
2186 : Handle<Code> target(codemap()->GetImportedFunction(function_index),
2187 9300 : isolate);
2188 : return CallCodeObject(isolate, target,
2189 37200 : codemap()->module()->functions[function_index].sig);
2190 : }
2191 :
2192 302 : ExternalCallResult CallIndirectFunction(uint32_t table_index,
2193 : uint32_t entry_index,
2194 539 : uint32_t sig_index) {
2195 302 : if (!codemap()->has_instance()) {
2196 : // No instance. Rely on the information stored in the WasmModule.
2197 : // TODO(wasm): This is only needed for testing. Refactor testing to use
2198 : // the same paths as production.
2199 : InterpreterCode* code =
2200 182 : codemap()->GetIndirectCode(table_index, entry_index);
2201 182 : if (!code) return {ExternalCallResult::INVALID_FUNC};
2202 119 : if (code->function->sig_index != sig_index) {
2203 : // If not an exact match, we have to do a canonical check.
2204 : // TODO(titzer): make this faster with some kind of caching?
2205 : const WasmIndirectFunctionTable* table =
2206 42 : &module()->function_tables[table_index];
2207 21 : int function_key = table->map.Find(code->function->sig);
2208 42 : if (function_key < 0 ||
2209 : (function_key !=
2210 63 : table->map.Find(module()->signatures[sig_index]))) {
2211 7 : return {ExternalCallResult::SIGNATURE_MISMATCH};
2212 : }
2213 : }
2214 112 : return {ExternalCallResult::INTERNAL, code};
2215 : }
2216 :
2217 : WasmCompiledModule* compiled_module =
2218 120 : codemap()->instance()->compiled_module();
2219 : Isolate* isolate = compiled_module->GetIsolate();
2220 :
2221 : Code* target;
2222 : {
2223 : DisallowHeapAllocation no_gc;
2224 : // Get function to be called directly from the live instance to see latest
2225 : // changes to the tables.
2226 :
2227 : // Canonicalize signature index.
2228 : // TODO(titzer): make this faster with some kind of caching?
2229 : const WasmIndirectFunctionTable* table =
2230 240 : &module()->function_tables[table_index];
2231 240 : FunctionSig* sig = module()->signatures[sig_index];
2232 120 : uint32_t canonical_sig_index = table->map.Find(sig);
2233 :
2234 : // Check signature.
2235 : FixedArray* sig_tables = compiled_module->ptr_to_signature_tables();
2236 120 : if (table_index >= static_cast<uint32_t>(sig_tables->length())) {
2237 0 : return {ExternalCallResult::INVALID_FUNC};
2238 : }
2239 : FixedArray* sig_table =
2240 120 : FixedArray::cast(sig_tables->get(static_cast<int>(table_index)));
2241 120 : if (entry_index >= static_cast<uint32_t>(sig_table->length())) {
2242 15 : return {ExternalCallResult::INVALID_FUNC};
2243 : }
2244 : int found_sig =
2245 105 : Smi::cast(sig_table->get(static_cast<int>(entry_index)))->value();
2246 105 : if (static_cast<uint32_t>(found_sig) != canonical_sig_index) {
2247 30 : return {ExternalCallResult::SIGNATURE_MISMATCH};
2248 : }
2249 :
2250 : // Get code object.
2251 : FixedArray* fun_tables = compiled_module->ptr_to_function_tables();
2252 : DCHECK_EQ(sig_tables->length(), fun_tables->length());
2253 : FixedArray* fun_table =
2254 : FixedArray::cast(fun_tables->get(static_cast<int>(table_index)));
2255 : DCHECK_EQ(sig_table->length(), fun_table->length());
2256 : target = Code::cast(fun_table->get(static_cast<int>(entry_index)));
2257 : }
2258 :
2259 : // Call the code object. Use a new HandleScope to avoid leaking /
2260 : // accumulating handles in the outer scope.
2261 : HandleScope handle_scope(isolate);
2262 : FunctionSig* signature =
2263 150 : &codemap()->module()->signatures[table_index][sig_index];
2264 75 : return CallCodeObject(isolate, handle(target, isolate), signature);
2265 : }
2266 :
2267 : inline Activation current_activation() {
2268 5697792 : return activations_.empty() ? Activation(0, 0) : activations_.back();
2269 : }
2270 : };
2271 :
2272 : class InterpretedFrameImpl {
2273 : public:
2274 : InterpretedFrameImpl(ThreadImpl* thread, int index)
2275 1971014 : : thread_(thread), index_(index) {
2276 : DCHECK_LE(0, index);
2277 : }
2278 :
2279 3943400 : const WasmFunction* function() const { return frame()->code->function; }
2280 :
2281 1970385 : int pc() const {
2282 : DCHECK_LE(0, frame()->pc);
2283 : DCHECK_GE(kMaxInt, frame()->pc);
2284 1970385 : return static_cast<int>(frame()->pc);
2285 : }
2286 :
2287 : int GetParameterCount() const {
2288 : DCHECK_GE(kMaxInt, function()->sig->parameter_count());
2289 276 : return static_cast<int>(function()->sig->parameter_count());
2290 : }
2291 :
2292 : int GetLocalCount() const {
2293 1340 : size_t num_locals = function()->sig->parameter_count() +
2294 2680 : frame()->code->locals.type_list.size();
2295 : DCHECK_GE(kMaxInt, num_locals);
2296 1340 : return static_cast<int>(num_locals);
2297 : }
2298 :
2299 486 : int GetStackHeight() const {
2300 : bool is_top_frame =
2301 486 : static_cast<size_t>(index_) + 1 == thread_->frames_.size();
2302 : size_t stack_limit =
2303 606 : is_top_frame ? thread_->stack_.size() : thread_->frames_[index_ + 1].sp;
2304 : DCHECK_LE(frame()->sp, stack_limit);
2305 486 : size_t frame_size = stack_limit - frame()->sp;
2306 : DCHECK_LE(GetLocalCount(), frame_size);
2307 972 : return static_cast<int>(frame_size) - GetLocalCount();
2308 : }
2309 :
2310 542 : WasmVal GetLocalValue(int index) const {
2311 : DCHECK_LE(0, index);
2312 : DCHECK_GT(GetLocalCount(), index);
2313 1084 : return thread_->GetStackValue(static_cast<int>(frame()->sp) + index);
2314 : }
2315 :
2316 116 : WasmVal GetStackValue(int index) const {
2317 : DCHECK_LE(0, index);
2318 : // Index must be within the number of stack values of this frame.
2319 : DCHECK_GT(GetStackHeight(), index);
2320 232 : return thread_->GetStackValue(static_cast<int>(frame()->sp) +
2321 232 : GetLocalCount() + index);
2322 : }
2323 :
2324 : private:
2325 : ThreadImpl* thread_;
2326 : int index_;
2327 :
2328 : ThreadImpl::Frame* frame() const {
2329 : DCHECK_GT(thread_->frames_.size(), index_);
2330 3942442 : return &thread_->frames_[index_];
2331 : }
2332 : };
2333 :
2334 : // Converters between WasmInterpreter::Thread and WasmInterpreter::ThreadImpl.
2335 : // Thread* is the public interface, without knowledge of the object layout.
2336 : // This cast is potentially risky, but as long as we always cast it back before
2337 : // accessing any data, it should be fine. UBSan is not complaining.
2338 : WasmInterpreter::Thread* ToThread(ThreadImpl* impl) {
2339 : return reinterpret_cast<WasmInterpreter::Thread*>(impl);
2340 : }
2341 : ThreadImpl* ToImpl(WasmInterpreter::Thread* thread) {
2342 : return reinterpret_cast<ThreadImpl*>(thread);
2343 : }
2344 :
2345 : // Same conversion for InterpretedFrame and InterpretedFrameImpl.
2346 : InterpretedFrame* ToFrame(InterpretedFrameImpl* impl) {
2347 : return reinterpret_cast<InterpretedFrame*>(impl);
2348 : }
2349 : const InterpretedFrameImpl* ToImpl(const InterpretedFrame* frame) {
2350 : return reinterpret_cast<const InterpretedFrameImpl*>(frame);
2351 : }
2352 :
2353 : } // namespace
2354 :
2355 : //============================================================================
2356 : // Implementation of the pimpl idiom for WasmInterpreter::Thread.
2357 : // Instead of placing a pointer to the ThreadImpl inside of the Thread object,
2358 : // we just reinterpret_cast them. ThreadImpls are only allocated inside this
2359 : // translation unit anyway.
2360 : //============================================================================
2361 25053 : WasmInterpreter::State WasmInterpreter::Thread::state() {
2362 25053 : return ToImpl(this)->state();
2363 : }
2364 2781254 : void WasmInterpreter::Thread::InitFrame(const WasmFunction* function,
2365 : WasmVal* args) {
2366 2781254 : ToImpl(this)->InitFrame(function, args);
2367 2781254 : }
2368 2789533 : WasmInterpreter::State WasmInterpreter::Thread::Run(int num_steps) {
2369 2789533 : return ToImpl(this)->Run(num_steps);
2370 : }
2371 0 : void WasmInterpreter::Thread::Pause() { return ToImpl(this)->Pause(); }
2372 5459630 : void WasmInterpreter::Thread::Reset() { return ToImpl(this)->Reset(); }
2373 : WasmInterpreter::Thread::ExceptionHandlingResult
2374 134 : WasmInterpreter::Thread::HandleException(Isolate* isolate) {
2375 134 : return ToImpl(this)->HandleException(isolate);
2376 : }
2377 4536 : pc_t WasmInterpreter::Thread::GetBreakpointPc() {
2378 4536 : return ToImpl(this)->GetBreakpointPc();
2379 : }
2380 4442 : int WasmInterpreter::Thread::GetFrameCount() {
2381 4442 : return ToImpl(this)->GetFrameCount();
2382 : }
2383 1971014 : std::unique_ptr<InterpretedFrame> WasmInterpreter::Thread::GetFrame(int index) {
2384 : DCHECK_LE(0, index);
2385 : DCHECK_GT(GetFrameCount(), index);
2386 : return std::unique_ptr<InterpretedFrame>(
2387 3942028 : ToFrame(new InterpretedFrameImpl(ToImpl(this), index)));
2388 : }
2389 2768164 : WasmVal WasmInterpreter::Thread::GetReturnValue(int index) {
2390 5536328 : return ToImpl(this)->GetReturnValue(index);
2391 : }
2392 120 : TrapReason WasmInterpreter::Thread::GetTrapReason() {
2393 120 : return ToImpl(this)->GetTrapReason();
2394 : }
2395 2712700 : bool WasmInterpreter::Thread::PossibleNondeterminism() {
2396 2712700 : return ToImpl(this)->PossibleNondeterminism();
2397 : }
2398 83708 : uint64_t WasmInterpreter::Thread::NumInterpretedCalls() {
2399 83708 : return ToImpl(this)->NumInterpretedCalls();
2400 : }
2401 31 : void WasmInterpreter::Thread::AddBreakFlags(uint8_t flags) {
2402 : ToImpl(this)->AddBreakFlags(flags);
2403 31 : }
2404 0 : void WasmInterpreter::Thread::ClearBreakFlags() {
2405 : ToImpl(this)->ClearBreakFlags();
2406 0 : }
2407 42 : uint32_t WasmInterpreter::Thread::NumActivations() {
2408 42 : return ToImpl(this)->NumActivations();
2409 : }
2410 51439 : uint32_t WasmInterpreter::Thread::StartActivation() {
2411 51439 : return ToImpl(this)->StartActivation();
2412 : }
2413 51439 : void WasmInterpreter::Thread::FinishActivation(uint32_t id) {
2414 : ToImpl(this)->FinishActivation(id);
2415 51439 : }
2416 3990 : uint32_t WasmInterpreter::Thread::ActivationFrameBase(uint32_t id) {
2417 3990 : return ToImpl(this)->ActivationFrameBase(id);
2418 : }
2419 :
2420 : //============================================================================
2421 : // The implementation details of the interpreter.
2422 : //============================================================================
2423 19296 : class WasmInterpreterInternals : public ZoneObject {
2424 : public:
2425 : WasmInstance* instance_;
2426 : // Create a copy of the module bytes for the interpreter, since the passed
2427 : // pointer might be invalidated after constructing the interpreter.
2428 : const ZoneVector<uint8_t> module_bytes_;
2429 : CodeMap codemap_;
2430 : ZoneVector<ThreadImpl> threads_;
2431 :
2432 19314 : WasmInterpreterInternals(Isolate* isolate, Zone* zone,
2433 : const ModuleBytesEnv& env)
2434 : : instance_(env.module_env.instance),
2435 : module_bytes_(env.wire_bytes.start(), env.wire_bytes.end(), zone),
2436 : codemap_(
2437 : isolate,
2438 : env.module_env.instance ? env.module_env.instance->module : nullptr,
2439 : module_bytes_.data(), zone),
2440 57942 : threads_(zone) {
2441 19314 : threads_.emplace_back(zone, &codemap_, env.module_env.instance);
2442 19314 : }
2443 : };
2444 :
2445 : //============================================================================
2446 : // Implementation of the public interface of the interpreter.
2447 : //============================================================================
2448 19314 : WasmInterpreter::WasmInterpreter(Isolate* isolate, const ModuleBytesEnv& env)
2449 : : zone_(isolate->allocator(), ZONE_NAME),
2450 38628 : internals_(new (&zone_) WasmInterpreterInternals(isolate, &zone_, env)) {}
2451 :
2452 38592 : WasmInterpreter::~WasmInterpreter() { internals_->~WasmInterpreterInternals(); }
2453 :
2454 0 : void WasmInterpreter::Run() { internals_->threads_[0].Run(); }
2455 :
2456 0 : void WasmInterpreter::Pause() { internals_->threads_[0].Pause(); }
2457 :
2458 4790 : bool WasmInterpreter::SetBreakpoint(const WasmFunction* function, pc_t pc,
2459 : bool enabled) {
2460 2395 : InterpreterCode* code = internals_->codemap_.GetCode(function);
2461 2395 : size_t size = static_cast<size_t>(code->end - code->start);
2462 : // Check bounds for {pc}.
2463 2395 : if (pc < code->locals.encoded_size || pc >= size) return false;
2464 : // Make a copy of the code before enabling a breakpoint.
2465 2395 : if (enabled && code->orig_start == code->start) {
2466 66 : code->start = reinterpret_cast<byte*>(zone_.New(size));
2467 66 : memcpy(code->start, code->orig_start, size);
2468 66 : code->end = code->start + size;
2469 : }
2470 2395 : bool prev = code->start[pc] == kInternalBreakpoint;
2471 2395 : if (enabled) {
2472 1261 : code->start[pc] = kInternalBreakpoint;
2473 : } else {
2474 1134 : code->start[pc] = code->orig_start[pc];
2475 : }
2476 2395 : return prev;
2477 : }
2478 :
2479 0 : bool WasmInterpreter::GetBreakpoint(const WasmFunction* function, pc_t pc) {
2480 0 : InterpreterCode* code = internals_->codemap_.GetCode(function);
2481 0 : size_t size = static_cast<size_t>(code->end - code->start);
2482 : // Check bounds for {pc}.
2483 0 : if (pc < code->locals.encoded_size || pc >= size) return false;
2484 : // Check if a breakpoint is present at that place in the code.
2485 0 : return code->start[pc] == kInternalBreakpoint;
2486 : }
2487 :
2488 0 : bool WasmInterpreter::SetTracing(const WasmFunction* function, bool enabled) {
2489 0 : UNIMPLEMENTED();
2490 : return false;
2491 : }
2492 :
2493 1153 : void WasmInterpreter::SetInstanceObject(WasmInstanceObject* instance) {
2494 1153 : internals_->codemap_.SetInstanceObject(instance);
2495 1153 : }
2496 :
2497 0 : int WasmInterpreter::GetThreadCount() {
2498 0 : return 1; // only one thread for now.
2499 : }
2500 :
2501 2967744 : WasmInterpreter::Thread* WasmInterpreter::GetThread(int id) {
2502 2967744 : CHECK_EQ(0, id); // only one thread for now.
2503 5935488 : return ToThread(&internals_->threads_[id]);
2504 : }
2505 :
2506 0 : size_t WasmInterpreter::GetMemorySize() {
2507 0 : return internals_->instance_->mem_size;
2508 : }
2509 :
2510 0 : WasmVal WasmInterpreter::ReadMemory(size_t offset) {
2511 0 : UNIMPLEMENTED();
2512 : return WasmVal();
2513 : }
2514 :
2515 0 : void WasmInterpreter::WriteMemory(size_t offset, WasmVal val) {
2516 0 : UNIMPLEMENTED();
2517 : }
2518 :
2519 19917 : void WasmInterpreter::AddFunctionForTesting(const WasmFunction* function) {
2520 19917 : internals_->codemap_.AddFunction(function, nullptr, nullptr);
2521 19917 : }
2522 :
2523 39834 : void WasmInterpreter::SetFunctionCodeForTesting(const WasmFunction* function,
2524 : const byte* start,
2525 : const byte* end) {
2526 39834 : internals_->codemap_.SetFunctionCode(function, start, end);
2527 19917 : }
2528 :
2529 29 : ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting(
2530 : Zone* zone, const WasmModule* module, const byte* start, const byte* end) {
2531 : // Create some dummy structures, to avoid special-casing the implementation
2532 : // just for testing.
2533 : FunctionSig sig(0, 0, nullptr);
2534 29 : WasmFunction function{&sig, 0, 0, 0, 0, 0, 0, false, false};
2535 : InterpreterCode code{
2536 58 : &function, BodyLocalDecls(zone), start, end, nullptr, nullptr, nullptr};
2537 :
2538 : // Now compute and return the control transfers.
2539 29 : ControlTransfers targets(zone, module, &code);
2540 29 : return targets.map_;
2541 : }
2542 :
2543 : //============================================================================
2544 : // Implementation of the frame inspection interface.
2545 : //============================================================================
2546 1970385 : const WasmFunction* InterpretedFrame::function() const {
2547 1970385 : return ToImpl(this)->function();
2548 : }
2549 3940770 : int InterpretedFrame::pc() const { return ToImpl(this)->pc(); }
2550 276 : int InterpretedFrame::GetParameterCount() const {
2551 276 : return ToImpl(this)->GetParameterCount();
2552 : }
2553 738 : int InterpretedFrame::GetLocalCount() const {
2554 738 : return ToImpl(this)->GetLocalCount();
2555 : }
2556 486 : int InterpretedFrame::GetStackHeight() const {
2557 486 : return ToImpl(this)->GetStackHeight();
2558 : }
2559 542 : WasmVal InterpretedFrame::GetLocalValue(int index) const {
2560 542 : return ToImpl(this)->GetLocalValue(index);
2561 : }
2562 116 : WasmVal InterpretedFrame::GetStackValue(int index) const {
2563 116 : return ToImpl(this)->GetStackValue(index);
2564 : }
2565 :
2566 : } // namespace wasm
2567 : } // namespace internal
2568 : } // namespace v8
|