Line data Source code
1 : // Copyright 2015 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/interpreter/bytecode-generator.h"
6 :
7 : #include "src/api-inl.h"
8 : #include "src/ast/ast-source-ranges.h"
9 : #include "src/ast/scopes.h"
10 : #include "src/builtins/builtins-constructor.h"
11 : #include "src/compiler.h"
12 : #include "src/interpreter/bytecode-flags.h"
13 : #include "src/interpreter/bytecode-jump-table.h"
14 : #include "src/interpreter/bytecode-label.h"
15 : #include "src/interpreter/bytecode-register-allocator.h"
16 : #include "src/interpreter/control-flow-builders.h"
17 : #include "src/objects-inl.h"
18 : #include "src/objects/debug-objects.h"
19 : #include "src/objects/literal-objects-inl.h"
20 : #include "src/objects/smi.h"
21 : #include "src/objects/template-objects-inl.h"
22 : #include "src/parsing/parse-info.h"
23 : #include "src/parsing/token.h"
24 : #include "src/unoptimized-compilation-info.h"
25 :
26 : namespace v8 {
27 : namespace internal {
28 : namespace interpreter {
29 :
30 : // Scoped class tracking context objects created by the visitor. Represents
31 : // mutations of the context chain within the function body, allowing pushing and
32 : // popping of the current {context_register} during visitation.
33 : class BytecodeGenerator::ContextScope {
34 : public:
35 2447509 : ContextScope(BytecodeGenerator* generator, Scope* scope)
36 : : generator_(generator),
37 : scope_(scope),
38 : outer_(generator_->execution_context()),
39 : register_(Register::current_context()),
40 2447509 : depth_(0) {
41 : DCHECK(scope->NeedsContext() || outer_ == nullptr);
42 2447516 : if (outer_) {
43 328866 : depth_ = outer_->depth_ + 1;
44 :
45 : // Push the outer context into a new context register.
46 : Register outer_context_reg =
47 657732 : generator_->register_allocator()->NewRegister();
48 328866 : outer_->set_register(outer_context_reg);
49 657732 : generator_->builder()->PushContext(outer_context_reg);
50 : }
51 2447517 : generator_->set_execution_context(this);
52 2447517 : }
53 :
54 4894968 : ~ContextScope() {
55 2447483 : if (outer_) {
56 : DCHECK_EQ(register_.index(), Register::current_context().index());
57 657734 : generator_->builder()->PopContext(outer_->reg());
58 328869 : outer_->set_register(register_);
59 : }
60 2447485 : generator_->set_execution_context(outer_);
61 2447485 : }
62 :
63 : // Returns the depth of the given |scope| for the current execution context.
64 : int ContextChainDepth(Scope* scope) {
65 2844076 : return scope_->ContextChainLength(scope);
66 : }
67 :
68 : // Returns the execution context at |depth| in the current context chain if it
69 : // is a function local execution context, otherwise returns nullptr.
70 : ContextScope* Previous(int depth) {
71 2804397 : if (depth > depth_) {
72 : return nullptr;
73 : }
74 :
75 : ContextScope* previous = this;
76 2758944 : for (int i = depth; i > 0; --i) {
77 23101 : previous = previous->outer_;
78 : }
79 : return previous;
80 : }
81 :
82 : Register reg() const { return register_; }
83 :
84 : private:
85 : const BytecodeArrayBuilder* builder() const { return generator_->builder(); }
86 :
87 657735 : void set_register(Register reg) { register_ = reg; }
88 :
89 : BytecodeGenerator* generator_;
90 : Scope* scope_;
91 : ContextScope* outer_;
92 : Register register_;
93 : int depth_;
94 : };
95 :
96 : // Scoped class for tracking control statements entered by the
97 : // visitor. The pattern derives AstGraphBuilder::ControlScope.
98 : class BytecodeGenerator::ControlScope {
99 : public:
100 : explicit ControlScope(BytecodeGenerator* generator)
101 : : generator_(generator),
102 : outer_(generator->execution_control()),
103 7800038 : context_(generator->execution_context()) {
104 : generator_->set_execution_control(this);
105 : }
106 15600236 : virtual ~ControlScope() { generator_->set_execution_control(outer()); }
107 :
108 : void Break(Statement* stmt) {
109 46092 : PerformCommand(CMD_BREAK, stmt, kNoSourcePosition);
110 : }
111 : void Continue(Statement* stmt) {
112 3642 : PerformCommand(CMD_CONTINUE, stmt, kNoSourcePosition);
113 : }
114 : void ReturnAccumulator(int source_position = kNoSourcePosition) {
115 2080033 : PerformCommand(CMD_RETURN, nullptr, source_position);
116 : }
117 : void AsyncReturnAccumulator(int source_position = kNoSourcePosition) {
118 8201 : PerformCommand(CMD_ASYNC_RETURN, nullptr, source_position);
119 : }
120 :
121 : class DeferredCommands;
122 :
123 : protected:
124 : enum Command {
125 : CMD_BREAK,
126 : CMD_CONTINUE,
127 : CMD_RETURN,
128 : CMD_ASYNC_RETURN,
129 : CMD_RETHROW
130 : };
131 : static constexpr bool CommandUsesAccumulator(Command command) {
132 : return command != CMD_BREAK && command != CMD_CONTINUE;
133 : }
134 :
135 : void PerformCommand(Command command, Statement* statement,
136 : int source_position);
137 : virtual bool Execute(Command command, Statement* statement,
138 : int source_position) = 0;
139 :
140 : // Helper to pop the context chain to a depth expected by this control scope.
141 : // Note that it is the responsibility of each individual {Execute} method to
142 : // trigger this when commands are handled and control-flow continues locally.
143 : void PopContextToExpectedDepth();
144 :
145 : BytecodeGenerator* generator() const { return generator_; }
146 : ControlScope* outer() const { return outer_; }
147 : ContextScope* context() const { return context_; }
148 :
149 : private:
150 : BytecodeGenerator* generator_;
151 : ControlScope* outer_;
152 : ContextScope* context_;
153 :
154 : DISALLOW_COPY_AND_ASSIGN(ControlScope);
155 : };
156 :
157 : // Helper class for a try-finally control scope. It can record intercepted
158 : // control-flow commands that cause entry into a finally-block, and re-apply
159 : // them after again leaving that block. Special tokens are used to identify
160 : // paths going through the finally-block to dispatch after leaving the block.
161 41652 : class BytecodeGenerator::ControlScope::DeferredCommands final {
162 : public:
163 : // Fixed value tokens for paths we know we need.
164 : // Fallthrough is set to -1 to make it the fallthrough case of the jump table,
165 : // where the remaining cases start at 0.
166 : static const int kFallthroughToken = -1;
167 : // TODO(leszeks): Rethrow being 0 makes it use up a valuable LdaZero, which
168 : // means that other commands (such as break or return) have to use LdaSmi.
169 : // This can very slightly bloat bytecode, so perhaps token values should all
170 : // be shifted down by 1.
171 : static const int kRethrowToken = 0;
172 :
173 : DeferredCommands(BytecodeGenerator* generator, Register token_register,
174 : Register result_register)
175 : : generator_(generator),
176 : deferred_(generator->zone()),
177 : token_register_(token_register),
178 : result_register_(result_register),
179 : return_token_(-1),
180 83304 : async_return_token_(-1) {
181 : // There's always a rethrow path.
182 : // TODO(leszeks): We could decouple deferred_ index and token to allow us
183 : // to still push this lazily.
184 : STATIC_ASSERT(kRethrowToken == 0);
185 83302 : deferred_.push_back({CMD_RETHROW, nullptr, kRethrowToken});
186 : }
187 :
188 : // One recorded control-flow command.
189 : struct Entry {
190 : Command command; // The command type being applied on this path.
191 : Statement* statement; // The target statement for the command or {nullptr}.
192 : int token; // A token identifying this particular path.
193 : };
194 :
195 : // Records a control-flow command while entering the finally-block. This also
196 : // generates a new dispatch token that identifies one particular path. This
197 : // expects the result to be in the accumulator.
198 52604 : void RecordCommand(Command command, Statement* statement) {
199 52604 : int token = GetTokenForCommand(command, statement);
200 :
201 : DCHECK_LT(token, deferred_.size());
202 : DCHECK_EQ(deferred_[token].command, command);
203 : DCHECK_EQ(deferred_[token].statement, statement);
204 : DCHECK_EQ(deferred_[token].token, token);
205 :
206 52604 : if (CommandUsesAccumulator(command)) {
207 52352 : builder()->StoreAccumulatorInRegister(result_register_);
208 : }
209 52605 : builder()->LoadLiteral(Smi::FromInt(token));
210 52608 : builder()->StoreAccumulatorInRegister(token_register_);
211 52607 : if (!CommandUsesAccumulator(command)) {
212 : // If we're not saving the accumulator in the result register, shove a
213 : // harmless value there instead so that it is still considered "killed" in
214 : // the liveness analysis. Normally we would LdaUndefined first, but the
215 : // Smi token value is just as good, and by reusing it we save a bytecode.
216 253 : builder()->StoreAccumulatorInRegister(result_register_);
217 : }
218 52607 : }
219 :
220 : // Records the dispatch token to be used to identify the re-throw path when
221 : // the finally-block has been entered through the exception handler. This
222 : // expects the exception to be in the accumulator.
223 : void RecordHandlerReThrowPath() {
224 : // The accumulator contains the exception object.
225 41653 : RecordCommand(CMD_RETHROW, nullptr);
226 : }
227 :
228 : // Records the dispatch token to be used to identify the implicit fall-through
229 : // path at the end of a try-block into the corresponding finally-block.
230 41644 : void RecordFallThroughPath() {
231 41644 : builder()->LoadLiteral(Smi::FromInt(kFallthroughToken));
232 41651 : builder()->StoreAccumulatorInRegister(token_register_);
233 : // Since we're not saving the accumulator in the result register, shove a
234 : // harmless value there instead so that it is still considered "killed" in
235 : // the liveness analysis. Normally we would LdaUndefined first, but the Smi
236 : // token value is just as good, and by reusing it we save a bytecode.
237 41651 : builder()->StoreAccumulatorInRegister(result_register_);
238 41653 : }
239 :
240 : // Applies all recorded control-flow commands after the finally-block again.
241 : // This generates a dynamic dispatch on the token from the entry point.
242 41641 : void ApplyDeferredCommands() {
243 41641 : if (deferred_.size() == 0) return;
244 :
245 : BytecodeLabel fall_through;
246 :
247 41641 : if (deferred_.size() == 1) {
248 : // For a single entry, just jump to the fallthrough if we don't match the
249 : // entry token.
250 : const Entry& entry = deferred_[0];
251 :
252 : builder()
253 78688 : ->LoadLiteral(Smi::FromInt(entry.token))
254 39356 : .CompareReference(token_register_)
255 39354 : .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &fall_through);
256 :
257 39353 : if (CommandUsesAccumulator(entry.command)) {
258 39355 : builder()->LoadAccumulatorWithRegister(result_register_);
259 : }
260 39352 : execution_control()->PerformCommand(entry.command, entry.statement,
261 39352 : kNoSourcePosition);
262 : } else {
263 : // For multiple entries, build a jump table and switch on the token,
264 : // jumping to the fallthrough if none of them match.
265 :
266 : BytecodeJumpTable* jump_table =
267 4594 : builder()->AllocateJumpTable(static_cast<int>(deferred_.size()), 0);
268 : builder()
269 2297 : ->LoadAccumulatorWithRegister(token_register_)
270 2297 : .SwitchOnSmiNoFeedback(jump_table)
271 2297 : .Jump(&fall_through);
272 7891 : for (const Entry& entry : deferred_) {
273 11188 : builder()->Bind(jump_table, entry.token);
274 :
275 5594 : if (CommandUsesAccumulator(entry.command)) {
276 5341 : builder()->LoadAccumulatorWithRegister(result_register_);
277 : }
278 5594 : execution_control()->PerformCommand(entry.command, entry.statement,
279 5594 : kNoSourcePosition);
280 : }
281 : }
282 :
283 41653 : builder()->Bind(&fall_through);
284 : }
285 :
286 : BytecodeArrayBuilder* builder() { return generator_->builder(); }
287 : ControlScope* execution_control() { return generator_->execution_control(); }
288 :
289 : private:
290 52600 : int GetTokenForCommand(Command command, Statement* statement) {
291 52600 : switch (command) {
292 : case CMD_RETURN:
293 1969 : return GetReturnToken();
294 : case CMD_ASYNC_RETURN:
295 2333 : return GetAsyncReturnToken();
296 : case CMD_RETHROW:
297 : return kRethrowToken;
298 : default:
299 : // TODO(leszeks): We could also search for entries with the same
300 : // command and statement.
301 253 : return GetNewTokenForCommand(command, statement);
302 : }
303 : }
304 :
305 1969 : int GetReturnToken() {
306 1969 : if (return_token_ == -1) {
307 1820 : return_token_ = GetNewTokenForCommand(CMD_RETURN, nullptr);
308 : }
309 1969 : return return_token_;
310 : }
311 :
312 2333 : int GetAsyncReturnToken() {
313 2333 : if (async_return_token_ == -1) {
314 1224 : async_return_token_ = GetNewTokenForCommand(CMD_ASYNC_RETURN, nullptr);
315 : }
316 2333 : return async_return_token_;
317 : }
318 :
319 : int GetNewTokenForCommand(Command command, Statement* statement) {
320 3297 : int token = static_cast<int>(deferred_.size());
321 6594 : deferred_.push_back({command, statement, token});
322 : return token;
323 : }
324 :
325 : BytecodeGenerator* generator_;
326 : ZoneVector<Entry> deferred_;
327 : Register token_register_;
328 : Register result_register_;
329 :
330 : // Tokens for commands that don't need a statement.
331 : int return_token_;
332 : int async_return_token_;
333 : };
334 :
335 : // Scoped class for dealing with control flow reaching the function level.
336 4237264 : class BytecodeGenerator::ControlScopeForTopLevel final
337 : : public BytecodeGenerator::ControlScope {
338 : public:
339 : explicit ControlScopeForTopLevel(BytecodeGenerator* generator)
340 2118653 : : ControlScope(generator) {}
341 :
342 : protected:
343 2120537 : bool Execute(Command command, Statement* statement,
344 : int source_position) override {
345 2120537 : switch (command) {
346 : case CMD_BREAK: // We should never see break/continue in top-level.
347 : case CMD_CONTINUE:
348 0 : UNREACHABLE();
349 : case CMD_RETURN:
350 : // No need to pop contexts, execution leaves the method body.
351 2079852 : generator()->BuildReturn(source_position);
352 2079887 : return true;
353 : case CMD_ASYNC_RETURN:
354 : // No need to pop contexts, execution leaves the method body.
355 7092 : generator()->BuildAsyncReturn(source_position);
356 7092 : return true;
357 : case CMD_RETHROW:
358 : // No need to pop contexts, execution leaves the method body.
359 : generator()->BuildReThrow();
360 33638 : return true;
361 : }
362 : return false;
363 : }
364 : };
365 :
366 : // Scoped class for enabling break inside blocks and switch blocks.
367 10532302 : class BytecodeGenerator::ControlScopeForBreakable final
368 : : public BytecodeGenerator::ControlScope {
369 : public:
370 : ControlScopeForBreakable(BytecodeGenerator* generator,
371 : BreakableStatement* statement,
372 : BreakableControlFlowBuilder* control_builder)
373 : : ControlScope(generator),
374 : statement_(statement),
375 5266055 : control_builder_(control_builder) {}
376 :
377 : protected:
378 414981 : bool Execute(Command command, Statement* statement,
379 : int source_position) override {
380 414981 : if (statement != statement_) return false;
381 26927 : switch (command) {
382 : case CMD_BREAK:
383 : PopContextToExpectedDepth();
384 26927 : control_builder_->Break();
385 26927 : return true;
386 : case CMD_CONTINUE:
387 : case CMD_RETURN:
388 : case CMD_ASYNC_RETURN:
389 : case CMD_RETHROW:
390 : break;
391 : }
392 : return false;
393 : }
394 :
395 : private:
396 : Statement* statement_;
397 : BreakableControlFlowBuilder* control_builder_;
398 : };
399 :
400 : // Scoped class for enabling 'break' and 'continue' in iteration
401 : // constructs, e.g. do...while, while..., for...
402 : class BytecodeGenerator::ControlScopeForIteration final
403 : : public BytecodeGenerator::ControlScope {
404 : public:
405 : ControlScopeForIteration(BytecodeGenerator* generator,
406 : IterationStatement* statement,
407 : LoopBuilder* loop_builder)
408 : : ControlScope(generator),
409 : statement_(statement),
410 260548 : loop_builder_(loop_builder) {
411 260548 : generator->loop_depth_++;
412 : }
413 521104 : ~ControlScopeForIteration() override { generator()->loop_depth_--; }
414 :
415 : protected:
416 68560 : bool Execute(Command command, Statement* statement,
417 : int source_position) override {
418 68560 : if (statement != statement_) return false;
419 22807 : switch (command) {
420 : case CMD_BREAK:
421 : PopContextToExpectedDepth();
422 19165 : loop_builder_->Break();
423 19165 : return true;
424 : case CMD_CONTINUE:
425 : PopContextToExpectedDepth();
426 3642 : loop_builder_->Continue();
427 3643 : return true;
428 : case CMD_RETURN:
429 : case CMD_ASYNC_RETURN:
430 : case CMD_RETHROW:
431 : break;
432 : }
433 : return false;
434 : }
435 :
436 : private:
437 : Statement* statement_;
438 : LoopBuilder* loop_builder_;
439 : };
440 :
441 : // Scoped class for enabling 'throw' in try-catch constructs.
442 226262 : class BytecodeGenerator::ControlScopeForTryCatch final
443 : : public BytecodeGenerator::ControlScope {
444 : public:
445 : ControlScopeForTryCatch(BytecodeGenerator* generator,
446 : TryCatchBuilder* try_catch_builder)
447 113131 : : ControlScope(generator) {}
448 :
449 : protected:
450 14051 : bool Execute(Command command, Statement* statement,
451 : int source_position) override {
452 14051 : switch (command) {
453 : case CMD_BREAK:
454 : case CMD_CONTINUE:
455 : case CMD_RETURN:
456 : case CMD_ASYNC_RETURN:
457 : break;
458 : case CMD_RETHROW:
459 : // No need to pop contexts, execution re-enters the method body via the
460 : // stack unwinding mechanism which itself restores contexts correctly.
461 : generator()->BuildReThrow();
462 1614 : return true;
463 : }
464 : return false;
465 : }
466 : };
467 :
468 : // Scoped class for enabling control flow through try-finally constructs.
469 83304 : class BytecodeGenerator::ControlScopeForTryFinally final
470 : : public BytecodeGenerator::ControlScope {
471 : public:
472 : ControlScopeForTryFinally(BytecodeGenerator* generator,
473 : TryFinallyBuilder* try_finally_builder,
474 : DeferredCommands* commands)
475 : : ControlScope(generator),
476 : try_finally_builder_(try_finally_builder),
477 41651 : commands_(commands) {}
478 :
479 : protected:
480 10951 : bool Execute(Command command, Statement* statement,
481 : int source_position) override {
482 10951 : switch (command) {
483 : case CMD_BREAK:
484 : case CMD_CONTINUE:
485 : case CMD_RETURN:
486 : case CMD_ASYNC_RETURN:
487 : case CMD_RETHROW:
488 : PopContextToExpectedDepth();
489 : // We don't record source_position here since we don't generate return
490 : // bytecode right here and will generate it later as part of finally
491 : // block. Each return bytecode generated in finally block will get own
492 : // return source position from corresponded return statement or we'll
493 : // use end of function if no return statement is presented.
494 10952 : commands_->RecordCommand(command, statement);
495 10954 : try_finally_builder_->LeaveTry();
496 10956 : return true;
497 : }
498 : return false;
499 : }
500 :
501 : private:
502 : TryFinallyBuilder* try_finally_builder_;
503 : DeferredCommands* commands_;
504 : };
505 :
506 : // Allocate and fetch the coverage indices tracking NaryLogical Expressions.
507 21184 : class BytecodeGenerator::NaryCodeCoverageSlots {
508 : public:
509 21184 : NaryCodeCoverageSlots(BytecodeGenerator* generator, NaryOperation* expr)
510 21184 : : generator_(generator) {
511 21184 : if (generator_->block_coverage_builder_ == nullptr) return;
512 592 : for (size_t i = 0; i < expr->subsequent_length(); i++) {
513 488 : coverage_slots_.push_back(
514 244 : generator_->AllocateNaryBlockCoverageSlotIfEnabled(expr, i));
515 : }
516 : }
517 :
518 : int GetSlotFor(size_t subsequent_expr_index) const {
519 80483 : if (generator_->block_coverage_builder_ == nullptr) {
520 : return BlockCoverageBuilder::kNoCoverageArraySlot;
521 : }
522 : DCHECK(coverage_slots_.size() > subsequent_expr_index);
523 220 : return coverage_slots_[subsequent_expr_index];
524 : }
525 :
526 : private:
527 : BytecodeGenerator* generator_;
528 : std::vector<int> coverage_slots_;
529 : };
530 :
531 2182861 : void BytecodeGenerator::ControlScope::PerformCommand(Command command,
532 : Statement* statement,
533 : int source_position) {
534 : ControlScope* current = this;
535 : do {
536 2629107 : if (current->Execute(command, statement, source_position)) {
537 2182921 : return;
538 : }
539 : current = current->outer();
540 446246 : } while (current != nullptr);
541 0 : UNREACHABLE();
542 : }
543 :
544 0 : void BytecodeGenerator::ControlScope::PopContextToExpectedDepth() {
545 : // Pop context to the expected depth. Note that this can in fact pop multiple
546 : // contexts at once because the {PopContext} bytecode takes a saved register.
547 60686 : if (generator()->execution_context() != context()) {
548 9410 : generator()->builder()->PopContext(context()->reg());
549 : }
550 0 : }
551 :
552 : class BytecodeGenerator::RegisterAllocationScope final {
553 : public:
554 : explicit RegisterAllocationScope(BytecodeGenerator* generator)
555 : : generator_(generator),
556 : outer_next_register_index_(
557 44829113 : generator->register_allocator()->next_register_index()) {}
558 :
559 44825580 : ~RegisterAllocationScope() {
560 44826011 : generator_->register_allocator()->ReleaseRegisters(
561 : outer_next_register_index_);
562 : }
563 :
564 : BytecodeGenerator* generator() const { return generator_; }
565 :
566 : private:
567 : BytecodeGenerator* generator_;
568 : int outer_next_register_index_;
569 :
570 : DISALLOW_COPY_AND_ASSIGN(RegisterAllocationScope);
571 : };
572 :
573 : class BytecodeGenerator::AccumulatorPreservingScope final {
574 : public:
575 2434580 : explicit AccumulatorPreservingScope(BytecodeGenerator* generator,
576 : AccumulatorPreservingMode mode)
577 2434580 : : generator_(generator) {
578 2434580 : if (mode == AccumulatorPreservingMode::kPreserve) {
579 : saved_accumulator_register_ =
580 144 : generator_->register_allocator()->NewRegister();
581 144 : generator_->builder()->StoreAccumulatorInRegister(
582 144 : saved_accumulator_register_);
583 : }
584 2434580 : }
585 :
586 2434579 : ~AccumulatorPreservingScope() {
587 2434569 : if (saved_accumulator_register_.is_valid()) {
588 144 : generator_->builder()->LoadAccumulatorWithRegister(
589 144 : saved_accumulator_register_);
590 : }
591 : }
592 :
593 : private:
594 : BytecodeGenerator* generator_;
595 : Register saved_accumulator_register_;
596 :
597 : DISALLOW_COPY_AND_ASSIGN(AccumulatorPreservingScope);
598 : };
599 :
600 : // Scoped base class for determining how the result of an expression will be
601 : // used.
602 : class BytecodeGenerator::ExpressionResultScope {
603 : public:
604 : ExpressionResultScope(BytecodeGenerator* generator, Expression::Context kind)
605 : : outer_(generator->execution_result()),
606 : allocator_(generator),
607 : kind_(kind),
608 89658226 : type_hint_(TypeHint::kAny) {
609 : generator->set_execution_result(this);
610 : }
611 :
612 89651591 : ~ExpressionResultScope() {
613 44826011 : allocator_.generator()->set_execution_result(outer_);
614 44825580 : }
615 :
616 : bool IsEffect() const { return kind_ == Expression::kEffect; }
617 : bool IsValue() const { return kind_ == Expression::kValue; }
618 : bool IsTest() const { return kind_ == Expression::kTest; }
619 :
620 : TestResultScope* AsTest() {
621 : DCHECK(IsTest());
622 : return reinterpret_cast<TestResultScope*>(this);
623 : }
624 :
625 : // Specify expression always returns a Boolean result value.
626 : void SetResultIsBoolean() {
627 : DCHECK_EQ(type_hint_, TypeHint::kAny);
628 1250802 : type_hint_ = TypeHint::kBoolean;
629 : }
630 :
631 : void SetResultIsString() {
632 : DCHECK_EQ(type_hint_, TypeHint::kAny);
633 1761146 : type_hint_ = TypeHint::kString;
634 : }
635 :
636 : TypeHint type_hint() const { return type_hint_; }
637 :
638 : private:
639 : ExpressionResultScope* outer_;
640 : RegisterAllocationScope allocator_;
641 : Expression::Context kind_;
642 : TypeHint type_hint_;
643 :
644 : DISALLOW_COPY_AND_ASSIGN(ExpressionResultScope);
645 : };
646 :
647 : // Scoped class used when the result of the current expression is not
648 : // expected to produce a result.
649 12905159 : class BytecodeGenerator::EffectResultScope final
650 : : public ExpressionResultScope {
651 : public:
652 : explicit EffectResultScope(BytecodeGenerator* generator)
653 : : ExpressionResultScope(generator, Expression::kEffect) {}
654 : };
655 :
656 : // Scoped class used when the result of the current expression to be
657 : // evaluated should go into the interpreter's accumulator.
658 30882446 : class BytecodeGenerator::ValueResultScope final : public ExpressionResultScope {
659 : public:
660 : explicit ValueResultScope(BytecodeGenerator* generator)
661 : : ExpressionResultScope(generator, Expression::kValue) {}
662 : };
663 :
664 : // Scoped class used when the result of the current expression to be
665 : // evaluated is only tested with jumps to two branches.
666 1041379 : class BytecodeGenerator::TestResultScope final : public ExpressionResultScope {
667 : public:
668 : TestResultScope(BytecodeGenerator* generator, BytecodeLabels* then_labels,
669 : BytecodeLabels* else_labels, TestFallthrough fallthrough)
670 : : ExpressionResultScope(generator, Expression::kTest),
671 : result_consumed_by_test_(false),
672 : fallthrough_(fallthrough),
673 : then_labels_(then_labels),
674 1041370 : else_labels_(else_labels) {}
675 :
676 : // Used when code special cases for TestResultScope and consumes any
677 : // possible value by testing and jumping to a then/else label.
678 354302 : void SetResultConsumedByTest() { result_consumed_by_test_ = true; }
679 : bool result_consumed_by_test() { return result_consumed_by_test_; }
680 :
681 : // Inverts the control flow of the operation, swapping the then and else
682 : // labels and the fallthrough.
683 : void InvertControlFlow() {
684 : std::swap(then_labels_, else_labels_);
685 246713 : fallthrough_ = inverted_fallthrough();
686 : }
687 :
688 44316 : BytecodeLabel* NewThenLabel() { return then_labels_->New(); }
689 14466 : BytecodeLabel* NewElseLabel() { return else_labels_->New(); }
690 :
691 : BytecodeLabels* then_labels() const { return then_labels_; }
692 : BytecodeLabels* else_labels() const { return else_labels_; }
693 :
694 : void set_then_labels(BytecodeLabels* then_labels) {
695 : then_labels_ = then_labels;
696 : }
697 : void set_else_labels(BytecodeLabels* else_labels) {
698 : else_labels_ = else_labels;
699 : }
700 :
701 : TestFallthrough fallthrough() const { return fallthrough_; }
702 : TestFallthrough inverted_fallthrough() const {
703 246713 : switch (fallthrough_) {
704 : case TestFallthrough::kThen:
705 : return TestFallthrough::kElse;
706 : case TestFallthrough::kElse:
707 : return TestFallthrough::kThen;
708 : default:
709 : return TestFallthrough::kNone;
710 : }
711 : }
712 : void set_fallthrough(TestFallthrough fallthrough) {
713 : fallthrough_ = fallthrough;
714 : }
715 :
716 : private:
717 : bool result_consumed_by_test_;
718 : TestFallthrough fallthrough_;
719 : BytecodeLabels* then_labels_;
720 : BytecodeLabels* else_labels_;
721 :
722 : DISALLOW_COPY_AND_ASSIGN(TestResultScope);
723 : };
724 :
725 : // Used to build a list of global declaration initial value pairs.
726 : class BytecodeGenerator::GlobalDeclarationsBuilder final : public ZoneObject {
727 : public:
728 : explicit GlobalDeclarationsBuilder(Zone* zone)
729 : : declarations_(0, zone),
730 : constant_pool_entry_(0),
731 2236744 : has_constant_pool_entry_(false) {}
732 :
733 : void AddFunctionDeclaration(const AstRawString* name, FeedbackSlot slot,
734 : int feedback_cell_index, FunctionLiteral* func) {
735 : DCHECK(!slot.IsInvalid());
736 493248 : declarations_.push_back(Declaration(name, slot, feedback_cell_index, func));
737 : }
738 :
739 : void AddUndefinedDeclaration(const AstRawString* name, FeedbackSlot slot) {
740 : DCHECK(!slot.IsInvalid());
741 2573210 : declarations_.push_back(Declaration(name, slot));
742 : }
743 :
744 107406 : Handle<FixedArray> AllocateDeclarations(UnoptimizedCompilationInfo* info,
745 : Handle<Script> script,
746 : Isolate* isolate) {
747 : DCHECK(has_constant_pool_entry_);
748 : int array_index = 0;
749 : Handle<FixedArray> data = isolate->factory()->NewFixedArray(
750 107406 : static_cast<int>(declarations_.size() * 4), AllocationType::kOld);
751 1416734 : for (const Declaration& declaration : declarations_) {
752 1309328 : FunctionLiteral* func = declaration.func;
753 : Handle<Object> initial_value;
754 1309328 : if (func == nullptr) {
755 : initial_value = isolate->factory()->undefined_value();
756 : } else {
757 213297 : initial_value = Compiler::GetSharedFunctionInfo(func, script, isolate);
758 : }
759 :
760 : // Return a null handle if any initial values can't be created. Caller
761 : // will set stack overflow.
762 1309328 : if (initial_value.is_null()) return Handle<FixedArray>();
763 :
764 3927984 : data->set(array_index++, *declaration.name->string());
765 1309328 : data->set(array_index++, Smi::FromInt(declaration.slot.ToInt()));
766 : Object undefined_or_literal_slot;
767 1309328 : if (declaration.feedback_cell_index_for_function == -1) {
768 1096031 : undefined_or_literal_slot = ReadOnlyRoots(isolate).undefined_value();
769 : } else {
770 : undefined_or_literal_slot =
771 213297 : Smi::FromInt(declaration.feedback_cell_index_for_function);
772 : }
773 1309328 : data->set(array_index++, undefined_or_literal_slot);
774 2618656 : data->set(array_index++, *initial_value);
775 : }
776 107406 : return data;
777 : }
778 :
779 : size_t constant_pool_entry() {
780 : DCHECK(has_constant_pool_entry_);
781 : return constant_pool_entry_;
782 : }
783 :
784 : void set_constant_pool_entry(size_t constant_pool_entry) {
785 : DCHECK(!empty());
786 : DCHECK(!has_constant_pool_entry_);
787 118077 : constant_pool_entry_ = constant_pool_entry;
788 118077 : has_constant_pool_entry_ = true;
789 : }
790 :
791 : bool empty() { return declarations_.empty(); }
792 :
793 : private:
794 : struct Declaration {
795 2236731 : Declaration() : slot(FeedbackSlot::Invalid()), func(nullptr) {}
796 : Declaration(const AstRawString* name, FeedbackSlot slot,
797 : int feedback_cell_index, FunctionLiteral* func)
798 : : name(name),
799 : slot(slot),
800 : feedback_cell_index_for_function(feedback_cell_index),
801 246624 : func(func) {}
802 : Declaration(const AstRawString* name, FeedbackSlot slot)
803 : : name(name),
804 : slot(slot),
805 : feedback_cell_index_for_function(-1),
806 1286560 : func(nullptr) {}
807 :
808 : const AstRawString* name;
809 : FeedbackSlot slot;
810 : // Only valid for function declarations. Specifies the index into the
811 : // closure_feedback_cell array used when creating closures of this
812 : // function.
813 : int feedback_cell_index_for_function;
814 : FunctionLiteral* func;
815 : };
816 : ZoneVector<Declaration> declarations_;
817 : size_t constant_pool_entry_;
818 : bool has_constant_pool_entry_;
819 : };
820 :
821 : class BytecodeGenerator::CurrentScope final {
822 : public:
823 : CurrentScope(BytecodeGenerator* generator, Scope* scope)
824 : : generator_(generator), outer_scope_(generator->current_scope()) {
825 5376147 : if (scope != nullptr) {
826 : DCHECK_EQ(outer_scope_, scope->outer_scope());
827 : generator_->set_current_scope(scope);
828 : }
829 : }
830 : ~CurrentScope() {
831 5376307 : if (outer_scope_ != generator_->current_scope()) {
832 : generator_->set_current_scope(outer_scope_);
833 : }
834 : }
835 :
836 : private:
837 : BytecodeGenerator* generator_;
838 : Scope* outer_scope_;
839 : };
840 :
841 : class BytecodeGenerator::FeedbackSlotCache : public ZoneObject {
842 : public:
843 : enum class SlotKind {
844 : kStoreGlobalSloppy,
845 : kStoreGlobalStrict,
846 : kStoreNamedStrict,
847 : kStoreNamedSloppy,
848 : kLoadProperty,
849 : kLoadGlobalNotInsideTypeof,
850 : kLoadGlobalInsideTypeof,
851 : kClosureFeedbackCell
852 : };
853 :
854 : explicit FeedbackSlotCache(Zone* zone) : map_(zone) {}
855 :
856 : void Put(SlotKind slot_kind, Variable* variable, int slot_index) {
857 : PutImpl(slot_kind, 0, variable, slot_index);
858 : }
859 : void Put(SlotKind slot_kind, AstNode* node, int slot_index) {
860 : PutImpl(slot_kind, 0, node, slot_index);
861 : }
862 : void Put(SlotKind slot_kind, int variable_index, const AstRawString* name,
863 : int slot_index) {
864 : PutImpl(slot_kind, variable_index, name, slot_index);
865 : }
866 :
867 : int Get(SlotKind slot_kind, Variable* variable) const {
868 : return GetImpl(slot_kind, 0, variable);
869 : }
870 : int Get(SlotKind slot_kind, AstNode* node) const {
871 : return GetImpl(slot_kind, 0, node);
872 : }
873 : int Get(SlotKind slot_kind, int variable_index,
874 : const AstRawString* name) const {
875 : return GetImpl(slot_kind, variable_index, name);
876 : }
877 :
878 : private:
879 : using Key = std::tuple<SlotKind, int, const void*>;
880 :
881 : void PutImpl(SlotKind slot_kind, int index, const void* node,
882 : int slot_index) {
883 : Key key = std::make_tuple(slot_kind, index, node);
884 : auto entry = std::make_pair(key, slot_index);
885 : map_.insert(entry);
886 : }
887 :
888 : int GetImpl(SlotKind slot_kind, int index, const void* node) const {
889 : Key key = std::make_tuple(slot_kind, index, node);
890 : auto iter = map_.find(key);
891 12946501 : if (iter != map_.end()) {
892 5166016 : return iter->second;
893 : }
894 : return -1;
895 : }
896 :
897 : ZoneMap<Key, int> map_;
898 : };
899 :
900 : class BytecodeGenerator::IteratorRecord final {
901 : public:
902 : IteratorRecord(Register object_register, Register next_register,
903 : IteratorType type = IteratorType::kNormal)
904 : : type_(type), object_(object_register), next_(next_register) {
905 : DCHECK(object_.is_valid() && next_.is_valid());
906 : }
907 :
908 : inline IteratorType type() const { return type_; }
909 : inline Register object() const { return object_; }
910 : inline Register next() const { return next_; }
911 :
912 : private:
913 : IteratorType type_;
914 : Register object_;
915 : Register next_;
916 : };
917 :
918 : #ifdef DEBUG
919 :
920 : static bool IsInEagerLiterals(
921 : FunctionLiteral* literal,
922 : const std::vector<FunctionLiteral*>& eager_literals) {
923 : for (FunctionLiteral* eager_literal : eager_literals) {
924 : if (literal == eager_literal) return true;
925 : }
926 : return false;
927 : }
928 :
929 : #endif // DEBUG
930 :
931 2118658 : BytecodeGenerator::BytecodeGenerator(
932 : UnoptimizedCompilationInfo* info,
933 : const AstStringConstants* ast_string_constants,
934 : std::vector<FunctionLiteral*>* eager_inner_literals)
935 : : zone_(info->zone()),
936 : builder_(zone(), info->num_parameters_including_this(),
937 2118664 : info->scope()->num_stack_slots(), info->feedback_vector_spec(),
938 : info->SourcePositionRecordingMode()),
939 : info_(info),
940 : ast_string_constants_(ast_string_constants),
941 2118673 : closure_scope_(info->scope()),
942 2118675 : current_scope_(info->scope()),
943 : eager_inner_literals_(eager_inner_literals),
944 : feedback_slot_cache_(new (zone()) FeedbackSlotCache(zone())),
945 : globals_builder_(new (zone()) GlobalDeclarationsBuilder(zone())),
946 : block_coverage_builder_(nullptr),
947 : global_declarations_(0, zone()),
948 : function_literals_(0, zone()),
949 : native_function_literals_(0, zone()),
950 : object_literals_(0, zone()),
951 : array_literals_(0, zone()),
952 : class_literals_(0, zone()),
953 : template_objects_(0, zone()),
954 : execution_control_(nullptr),
955 : execution_context_(nullptr),
956 : execution_result_(nullptr),
957 : incoming_new_target_or_generator_(),
958 : dummy_feedback_slot_(feedback_spec(), FeedbackSlotKind::kCompareOp),
959 : generator_jump_table_(nullptr),
960 : suspend_count_(0),
961 : loop_depth_(0),
962 16949327 : catch_prediction_(HandlerTable::UNCAUGHT) {
963 : DCHECK_EQ(closure_scope(), closure_scope()->GetClosureScope());
964 2118666 : if (info->has_source_range_map()) {
965 : block_coverage_builder_ = new (zone())
966 892 : BlockCoverageBuilder(zone(), builder(), info->source_range_map());
967 : }
968 2118666 : }
969 :
970 2094392 : Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
971 : Isolate* isolate, Handle<Script> script) {
972 : DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
973 : #ifdef DEBUG
974 : // Unoptimized compilation should be context-independent. Verify that we don't
975 : // access the native context by nulling it out during finalization.
976 : SaveAndSwitchContext save(isolate, Context());
977 : #endif
978 :
979 2094392 : AllocateDeferredConstants(isolate, script);
980 :
981 2094379 : if (block_coverage_builder_) {
982 892 : info()->set_coverage_info(
983 : isolate->factory()->NewCoverageInfo(block_coverage_builder_->slots()));
984 892 : if (FLAG_trace_block_coverage) {
985 0 : info()->coverage_info()->Print(info()->literal()->GetDebugName());
986 : }
987 : }
988 :
989 2094379 : if (HasStackOverflow()) return Handle<BytecodeArray>();
990 2094379 : Handle<BytecodeArray> bytecode_array = builder()->ToBytecodeArray(isolate);
991 :
992 2094403 : if (incoming_new_target_or_generator_.is_valid()) {
993 : bytecode_array->set_incoming_new_target_or_generator_register(
994 : incoming_new_target_or_generator_);
995 : }
996 :
997 2094403 : return bytecode_array;
998 : }
999 :
1000 2094379 : void BytecodeGenerator::AllocateDeferredConstants(Isolate* isolate,
1001 : Handle<Script> script) {
1002 : // Build global declaration pair arrays.
1003 2201785 : for (GlobalDeclarationsBuilder* globals_builder : global_declarations_) {
1004 : Handle<FixedArray> declarations =
1005 107406 : globals_builder->AllocateDeclarations(info(), script, isolate);
1006 107406 : if (declarations.is_null()) return SetStackOverflow();
1007 107406 : builder()->SetDeferredConstantPoolEntry(
1008 107406 : globals_builder->constant_pool_entry(), declarations);
1009 : }
1010 :
1011 : // Find or build shared function infos.
1012 4509529 : for (std::pair<FunctionLiteral*, size_t> literal : function_literals_) {
1013 : FunctionLiteral* expr = literal.first;
1014 : Handle<SharedFunctionInfo> shared_info =
1015 2415145 : Compiler::GetSharedFunctionInfo(expr, script, isolate);
1016 2415151 : if (shared_info.is_null()) return SetStackOverflow();
1017 2415151 : builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
1018 : }
1019 :
1020 : // Find or build shared function infos for the native function templates.
1021 2096228 : for (std::pair<NativeFunctionLiteral*, size_t> literal :
1022 1844 : native_function_literals_) {
1023 : NativeFunctionLiteral* expr = literal.first;
1024 : v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1025 :
1026 : // Compute the function template for the native function.
1027 : v8::Local<v8::FunctionTemplate> info =
1028 : expr->extension()->GetNativeFunctionTemplate(
1029 3688 : v8_isolate, Utils::ToLocal(expr->name()));
1030 : DCHECK(!info.IsEmpty());
1031 :
1032 : Handle<SharedFunctionInfo> shared_info =
1033 : FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
1034 1844 : isolate, Utils::OpenHandle(*info), expr->name());
1035 : DCHECK(!shared_info.is_null());
1036 1844 : builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
1037 : }
1038 :
1039 : // Build object literal constant properties
1040 2283122 : for (std::pair<ObjectLiteral*, size_t> literal : object_literals_) {
1041 : ObjectLiteral* object_literal = literal.first;
1042 188732 : if (object_literal->properties_count() > 0) {
1043 : // If constant properties is an empty fixed array, we've already added it
1044 : // to the constant pool when visiting the object literal.
1045 : Handle<ObjectBoilerplateDescription> constant_properties =
1046 : object_literal->GetOrBuildBoilerplateDescription(isolate);
1047 :
1048 188737 : builder()->SetDeferredConstantPoolEntry(literal.second,
1049 188740 : constant_properties);
1050 : }
1051 : }
1052 :
1053 : // Build array literal constant elements
1054 2250668 : for (std::pair<ArrayLiteral*, size_t> literal : array_literals_) {
1055 : ArrayLiteral* array_literal = literal.first;
1056 : Handle<ArrayBoilerplateDescription> constant_elements =
1057 : array_literal->GetOrBuildBoilerplateDescription(isolate);
1058 156276 : builder()->SetDeferredConstantPoolEntry(literal.second, constant_elements);
1059 : }
1060 :
1061 : // Build class literal boilerplates.
1062 2134780 : for (std::pair<ClassLiteral*, size_t> literal : class_literals_) {
1063 : ClassLiteral* class_literal = literal.first;
1064 : Handle<ClassBoilerplate> class_boilerplate =
1065 40389 : ClassBoilerplate::BuildClassBoilerplate(isolate, class_literal);
1066 40389 : builder()->SetDeferredConstantPoolEntry(literal.second, class_boilerplate);
1067 : }
1068 :
1069 : // Build template literals.
1070 2096140 : for (std::pair<GetTemplateObject*, size_t> literal : template_objects_) {
1071 : GetTemplateObject* get_template_object = literal.first;
1072 : Handle<TemplateObjectDescription> description =
1073 1749 : get_template_object->GetOrBuildDescription(isolate);
1074 1749 : builder()->SetDeferredConstantPoolEntry(literal.second, description);
1075 : }
1076 : }
1077 :
1078 2118647 : void BytecodeGenerator::GenerateBytecode(uintptr_t stack_limit) {
1079 : DisallowHeapAllocation no_allocation;
1080 : DisallowHandleAllocation no_handles;
1081 : DisallowHandleDereference no_deref;
1082 :
1083 : InitializeAstVisitor(stack_limit);
1084 :
1085 : // Initialize the incoming context.
1086 4237279 : ContextScope incoming_context(this, closure_scope());
1087 :
1088 : // Initialize control scope.
1089 : ControlScopeForTopLevel control(this);
1090 :
1091 : RegisterAllocationScope register_scope(this);
1092 :
1093 2118653 : AllocateTopLevelRegisters();
1094 :
1095 2118661 : if (info()->literal()->CanSuspend()) {
1096 9416 : BuildGeneratorPrologue();
1097 : }
1098 :
1099 2118655 : if (closure_scope()->NeedsContext()) {
1100 : // Push a new inner context scope for the function.
1101 191328 : BuildNewLocalActivationContext();
1102 382655 : ContextScope local_function_context(this, closure_scope());
1103 191329 : BuildLocalActivationContextInitialization();
1104 191329 : GenerateBytecodeBody();
1105 : } else {
1106 1927327 : GenerateBytecodeBody();
1107 : }
1108 :
1109 : // Check that we are not falling off the end.
1110 : DCHECK(builder()->RemainderOfBlockIsDead());
1111 2118633 : }
1112 :
1113 2118646 : void BytecodeGenerator::GenerateBytecodeBody() {
1114 : // Build the arguments object if it is used.
1115 2118646 : VisitArgumentsObject(closure_scope()->arguments());
1116 :
1117 : // Build rest arguments array if it is used.
1118 : Variable* rest_parameter = closure_scope()->rest_parameter();
1119 2118651 : VisitRestArgumentsArray(rest_parameter);
1120 :
1121 : // Build assignment to the function name or {.this_function}
1122 : // variables if used.
1123 2118667 : VisitThisFunctionVariable(closure_scope()->function_var());
1124 2118668 : VisitThisFunctionVariable(closure_scope()->this_function_var());
1125 :
1126 : // Build assignment to {new.target} variable if it is used.
1127 2118665 : VisitNewTargetVariable(closure_scope()->new_target_var());
1128 :
1129 : // Create a generator object if necessary and initialize the
1130 : // {.generator_object} variable.
1131 4237299 : if (IsResumableFunction(info()->literal()->kind())) {
1132 11091 : BuildGeneratorObjectVariableInitialization();
1133 : }
1134 :
1135 : // Emit tracing call if requested to do so.
1136 2118654 : if (FLAG_trace) builder()->CallRuntime(Runtime::kTraceEnter);
1137 :
1138 : // Emit type profile call.
1139 2118654 : if (info()->collect_type_profile()) {
1140 68 : feedback_spec()->AddTypeProfileSlot();
1141 : int num_parameters = closure_scope()->num_parameters();
1142 260 : for (int i = 0; i < num_parameters; i++) {
1143 96 : Register parameter(builder()->Parameter(i));
1144 96 : builder()->LoadAccumulatorWithRegister(parameter).CollectTypeProfile(
1145 96 : closure_scope()->parameter(i)->initializer_position());
1146 : }
1147 : }
1148 :
1149 : // Visit declarations within the function scope.
1150 2118654 : VisitDeclarations(closure_scope()->declarations());
1151 :
1152 : // Emit initializing assignments for module namespace imports (if any).
1153 2118643 : VisitModuleNamespaceImports();
1154 :
1155 : // Perform a stack-check before the body.
1156 4237258 : builder()->StackCheck(info()->literal()->start_position());
1157 :
1158 : // The derived constructor case is handled in VisitCallSuper.
1159 2136484 : if (IsBaseConstructor(function_kind()) &&
1160 : info()->literal()->requires_instance_members_initializer()) {
1161 585 : BuildInstanceMemberInitialization(Register::function_closure(),
1162 585 : builder()->Receiver());
1163 : }
1164 :
1165 : // Visit statements in the function body.
1166 2118662 : VisitStatements(info()->literal()->body());
1167 :
1168 : // Emit an implicit return instruction in case control flow can fall off the
1169 : // end of the function without an explicit return being present on all paths.
1170 2118606 : if (!builder()->RemainderOfBlockIsDead()) {
1171 377799 : builder()->LoadUndefined();
1172 377801 : BuildReturn();
1173 : }
1174 2118608 : }
1175 :
1176 2118645 : void BytecodeGenerator::AllocateTopLevelRegisters() {
1177 4237305 : if (IsResumableFunction(info()->literal()->kind())) {
1178 : // Either directly use generator_object_var or allocate a new register for
1179 : // the incoming generator object.
1180 : Variable* generator_object_var = closure_scope()->generator_object_var();
1181 11091 : if (generator_object_var->location() == VariableLocation::LOCAL) {
1182 : incoming_new_target_or_generator_ =
1183 11091 : GetRegisterForLocalVariable(generator_object_var);
1184 : } else {
1185 0 : incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1186 : }
1187 2107569 : } else if (closure_scope()->new_target_var()) {
1188 : // Either directly use new_target_var or allocate a new register for
1189 : // the incoming new target object.
1190 : Variable* new_target_var = closure_scope()->new_target_var();
1191 99317 : if (new_target_var->location() == VariableLocation::LOCAL) {
1192 : incoming_new_target_or_generator_ =
1193 4341 : GetRegisterForLocalVariable(new_target_var);
1194 : } else {
1195 94976 : incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1196 : }
1197 : }
1198 2118660 : }
1199 :
1200 9416 : void BytecodeGenerator::BuildGeneratorPrologue() {
1201 : DCHECK_GT(info()->literal()->suspend_count(), 0);
1202 : DCHECK(generator_object().is_valid());
1203 : generator_jump_table_ =
1204 9416 : builder()->AllocateJumpTable(info()->literal()->suspend_count(), 0);
1205 :
1206 : // If the generator is not undefined, this is a resume, so perform state
1207 : // dispatch.
1208 9416 : builder()->SwitchOnGeneratorState(generator_object(), generator_jump_table_);
1209 :
1210 : // Otherwise, fall-through to the ordinary function prologue, after which we
1211 : // will run into the generator object creation and other extra code inserted
1212 : // by the parser.
1213 9416 : }
1214 :
1215 5255335 : void BytecodeGenerator::VisitBlock(Block* stmt) {
1216 : // Visit declarations and statements.
1217 : CurrentScope current_scope(this, stmt->scope());
1218 5255335 : if (stmt->scope() != nullptr && stmt->scope()->NeedsContext()) {
1219 24460 : BuildNewLocalBlockContext(stmt->scope());
1220 48920 : ContextScope scope(this, stmt->scope());
1221 24459 : VisitBlockDeclarationsAndStatements(stmt);
1222 : } else {
1223 5230875 : VisitBlockDeclarationsAndStatements(stmt);
1224 : }
1225 5255493 : }
1226 :
1227 5255390 : void BytecodeGenerator::VisitBlockDeclarationsAndStatements(Block* stmt) {
1228 5255390 : BlockBuilder block_builder(builder(), block_coverage_builder_, stmt);
1229 : ControlScopeForBreakable execution_control(this, stmt, &block_builder);
1230 5255390 : if (stmt->scope() != nullptr) {
1231 191023 : VisitDeclarations(stmt->scope()->declarations());
1232 : }
1233 5255403 : VisitStatements(stmt->statements());
1234 5255473 : }
1235 :
1236 6893511 : void BytecodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
1237 : Variable* variable = decl->var();
1238 : // Unused variables don't need to be visited.
1239 6893511 : if (!variable->is_used()) return;
1240 :
1241 6474185 : switch (variable->location()) {
1242 : case VariableLocation::UNALLOCATED: {
1243 : DCHECK(!variable->binding_needs_init());
1244 : FeedbackSlot slot =
1245 1286598 : GetCachedLoadGlobalICSlot(NOT_INSIDE_TYPEOF, variable);
1246 : globals_builder()->AddUndefinedDeclaration(variable->raw_name(), slot);
1247 : break;
1248 : }
1249 : case VariableLocation::LOCAL:
1250 1050228 : if (variable->binding_needs_init()) {
1251 3274 : Register destination(builder()->Local(variable->index()));
1252 3274 : builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1253 : }
1254 : break;
1255 : case VariableLocation::PARAMETER:
1256 2301594 : if (variable->binding_needs_init()) {
1257 0 : Register destination(builder()->Parameter(variable->index()));
1258 0 : builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1259 : }
1260 : break;
1261 : case VariableLocation::CONTEXT:
1262 1536110 : if (variable->binding_needs_init()) {
1263 : DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1264 755519 : builder()->LoadTheHole().StoreContextSlot(execution_context()->reg(),
1265 755549 : variable->index(), 0);
1266 : }
1267 : break;
1268 : case VariableLocation::LOOKUP: {
1269 : DCHECK_EQ(VariableMode::kDynamic, variable->mode());
1270 : DCHECK(!variable->binding_needs_init());
1271 :
1272 281927 : Register name = register_allocator()->NewRegister();
1273 :
1274 : builder()
1275 281927 : ->LoadLiteral(variable->raw_name())
1276 281927 : .StoreAccumulatorInRegister(name)
1277 281927 : .CallRuntime(Runtime::kDeclareEvalVar, name);
1278 : break;
1279 : }
1280 : case VariableLocation::MODULE:
1281 35057 : if (variable->IsExport() && variable->binding_needs_init()) {
1282 17068 : builder()->LoadTheHole();
1283 17068 : BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1284 : }
1285 : // Nothing to do for imports.
1286 : break;
1287 : }
1288 : }
1289 :
1290 563040 : void BytecodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
1291 : Variable* variable = decl->var();
1292 : DCHECK(variable->mode() == VariableMode::kLet ||
1293 : variable->mode() == VariableMode::kVar ||
1294 : variable->mode() == VariableMode::kDynamic);
1295 : // Unused variables don't need to be visited.
1296 563040 : if (!variable->is_used()) return;
1297 :
1298 557486 : switch (variable->location()) {
1299 : case VariableLocation::UNALLOCATED: {
1300 : FeedbackSlot slot =
1301 246618 : GetCachedLoadGlobalICSlot(NOT_INSIDE_TYPEOF, variable);
1302 246624 : int literal_index = GetCachedCreateClosureSlot(decl->fun());
1303 : globals_builder()->AddFunctionDeclaration(variable->raw_name(), slot,
1304 : literal_index, decl->fun());
1305 246624 : AddToEagerLiteralsIfEager(decl->fun());
1306 : break;
1307 : }
1308 : case VariableLocation::PARAMETER:
1309 : case VariableLocation::LOCAL: {
1310 26151 : VisitFunctionLiteral(decl->fun());
1311 26151 : BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1312 26151 : break;
1313 : }
1314 : case VariableLocation::CONTEXT: {
1315 : DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1316 277146 : VisitFunctionLiteral(decl->fun());
1317 : builder()->StoreContextSlot(execution_context()->reg(), variable->index(),
1318 277146 : 0);
1319 277145 : break;
1320 : }
1321 : case VariableLocation::LOOKUP: {
1322 7196 : RegisterList args = register_allocator()->NewRegisterList(2);
1323 : builder()
1324 7196 : ->LoadLiteral(variable->raw_name())
1325 7196 : .StoreAccumulatorInRegister(args[0]);
1326 7196 : VisitFunctionLiteral(decl->fun());
1327 7196 : builder()->StoreAccumulatorInRegister(args[1]).CallRuntime(
1328 7196 : Runtime::kDeclareEvalFunction, args);
1329 : break;
1330 : }
1331 : case VariableLocation::MODULE:
1332 : DCHECK_EQ(variable->mode(), VariableMode::kLet);
1333 : DCHECK(variable->IsExport());
1334 370 : VisitForAccumulatorValue(decl->fun());
1335 370 : BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1336 370 : break;
1337 : }
1338 : DCHECK_IMPLIES(decl->fun()->ShouldEagerCompile(),
1339 : IsInEagerLiterals(decl->fun(), *eager_inner_literals_));
1340 : }
1341 :
1342 2118627 : void BytecodeGenerator::VisitModuleNamespaceImports() {
1343 4235997 : if (!closure_scope()->is_module_scope()) return;
1344 :
1345 : RegisterAllocationScope register_scope(this);
1346 1257 : Register module_request = register_allocator()->NewRegister();
1347 :
1348 1257 : ModuleDescriptor* descriptor = closure_scope()->AsModuleScope()->module();
1349 1397 : for (auto entry : descriptor->namespace_imports()) {
1350 : builder()
1351 280 : ->LoadLiteral(Smi::FromInt(entry->module_request))
1352 140 : .StoreAccumulatorInRegister(module_request)
1353 140 : .CallRuntime(Runtime::kGetModuleNamespace, module_request);
1354 140 : Variable* var = closure_scope()->LookupInModule(entry->local_name);
1355 140 : BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kElided);
1356 : }
1357 : }
1358 :
1359 2352764 : void BytecodeGenerator::VisitDeclarations(Declaration::List* declarations) {
1360 : RegisterAllocationScope register_scope(this);
1361 : DCHECK(globals_builder()->empty());
1362 9809338 : for (Declaration* decl : *declarations) {
1363 : RegisterAllocationScope register_scope(this);
1364 7456555 : Visit(decl);
1365 : }
1366 2352783 : if (globals_builder()->empty()) return;
1367 :
1368 118078 : globals_builder()->set_constant_pool_entry(
1369 : builder()->AllocateDeferredConstantPoolEntry());
1370 : int encoded_flags = DeclareGlobalsEvalFlag::encode(info()->is_eval());
1371 :
1372 : // Emit code to declare globals.
1373 118077 : RegisterList args = register_allocator()->NewRegisterList(3);
1374 : builder()
1375 118072 : ->LoadConstantPoolEntry(globals_builder()->constant_pool_entry())
1376 118076 : .StoreAccumulatorInRegister(args[0])
1377 118081 : .LoadLiteral(Smi::FromInt(encoded_flags))
1378 118068 : .StoreAccumulatorInRegister(args[1])
1379 236149 : .MoveRegister(Register::function_closure(), args[2])
1380 118068 : .CallRuntime(Runtime::kDeclareGlobals, args);
1381 :
1382 : // Push and reset globals builder.
1383 236158 : global_declarations_.push_back(globals_builder());
1384 118080 : globals_builder_ = new (zone()) GlobalDeclarationsBuilder(zone());
1385 : }
1386 :
1387 7454822 : void BytecodeGenerator::VisitStatements(
1388 : const ZonePtrList<Statement>* statements) {
1389 38103054 : for (int i = 0; i < statements->length(); i++) {
1390 : // Allocate an outer register allocations scope for the statement.
1391 : RegisterAllocationScope allocation_scope(this);
1392 17284298 : Statement* stmt = statements->at(i);
1393 17284298 : Visit(stmt);
1394 17284108 : if (builder()->RemainderOfBlockIsDead()) break;
1395 : }
1396 7454569 : }
1397 :
1398 10212343 : void BytecodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1399 : builder()->SetStatementPosition(stmt);
1400 10212343 : VisitForEffect(stmt->expression());
1401 10212207 : }
1402 :
1403 0 : void BytecodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {}
1404 :
1405 582467 : void BytecodeGenerator::VisitIfStatement(IfStatement* stmt) {
1406 : ConditionalControlFlowBuilder conditional_builder(
1407 1747412 : builder(), block_coverage_builder_, stmt);
1408 : builder()->SetStatementPosition(stmt);
1409 :
1410 582466 : if (stmt->condition()->ToBooleanIsTrue()) {
1411 : // Generate then block unconditionally as always true.
1412 429 : conditional_builder.Then();
1413 429 : Visit(stmt->then_statement());
1414 582043 : } else if (stmt->condition()->ToBooleanIsFalse()) {
1415 : // Generate else block unconditionally if it exists.
1416 8302 : if (stmt->HasElseStatement()) {
1417 7044 : conditional_builder.Else();
1418 7044 : Visit(stmt->else_statement());
1419 : }
1420 : } else {
1421 : // TODO(oth): If then statement is BreakStatement or
1422 : // ContinueStatement we can reduce number of generated
1423 : // jump/jump_ifs here. See BasicLoops test.
1424 : VisitForTest(stmt->condition(), conditional_builder.then_labels(),
1425 573743 : conditional_builder.else_labels(), TestFallthrough::kThen);
1426 :
1427 573749 : conditional_builder.Then();
1428 573747 : Visit(stmt->then_statement());
1429 :
1430 573749 : if (stmt->HasElseStatement()) {
1431 43131 : conditional_builder.JumpToEnd();
1432 43130 : conditional_builder.Else();
1433 43131 : Visit(stmt->else_statement());
1434 : }
1435 : }
1436 582481 : }
1437 :
1438 0 : void BytecodeGenerator::VisitSloppyBlockFunctionStatement(
1439 : SloppyBlockFunctionStatement* stmt) {
1440 3116 : Visit(stmt->statement());
1441 0 : }
1442 :
1443 3642 : void BytecodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1444 : AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1445 : builder()->SetStatementPosition(stmt);
1446 : execution_control()->Continue(stmt->target());
1447 3643 : }
1448 :
1449 46092 : void BytecodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1450 : AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1451 : builder()->SetStatementPosition(stmt);
1452 : execution_control()->Break(stmt->target());
1453 46092 : }
1454 :
1455 2079477 : void BytecodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1456 : AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1457 : builder()->SetStatementPosition(stmt);
1458 2079510 : VisitForAccumulatorValue(stmt->expression());
1459 2079538 : if (stmt->is_async_return()) {
1460 : execution_control()->AsyncReturnAccumulator(stmt->end_position());
1461 : } else {
1462 : execution_control()->ReturnAccumulator(stmt->end_position());
1463 : }
1464 2079543 : }
1465 :
1466 2962 : void BytecodeGenerator::VisitWithStatement(WithStatement* stmt) {
1467 : builder()->SetStatementPosition(stmt);
1468 2962 : VisitForAccumulatorValue(stmt->expression());
1469 2963 : BuildNewLocalWithContext(stmt->scope());
1470 2963 : VisitInScope(stmt->statement(), stmt->scope());
1471 2963 : }
1472 :
1473 10665 : void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1474 : // We need this scope because we visit for register values. We have to
1475 : // maintain a execution result scope where registers can be allocated.
1476 : ZonePtrList<CaseClause>* clauses = stmt->cases();
1477 : SwitchBuilder switch_builder(builder(), block_coverage_builder_, stmt,
1478 21330 : clauses->length());
1479 : ControlScopeForBreakable scope(this, stmt, &switch_builder);
1480 : int default_index = -1;
1481 :
1482 : builder()->SetStatementPosition(stmt);
1483 :
1484 : // Keep the switch value in a register until a case matches.
1485 10665 : Register tag = VisitForRegisterValue(stmt->tag());
1486 : FeedbackSlot slot = clauses->length() > 0
1487 : ? feedback_spec()->AddCompareICSlot()
1488 10665 : : FeedbackSlot::Invalid();
1489 :
1490 : // Iterate over all cases and create nodes for label comparison.
1491 171831 : for (int i = 0; i < clauses->length(); i++) {
1492 80583 : CaseClause* clause = clauses->at(i);
1493 :
1494 : // The default is not a test, remember index.
1495 80583 : if (clause->is_default()) {
1496 : default_index = i;
1497 : continue;
1498 : }
1499 :
1500 : // Perform label comparison as if via '===' with tag.
1501 73100 : VisitForAccumulatorValue(clause->label());
1502 : builder()->CompareOperation(Token::Value::EQ_STRICT, tag,
1503 73100 : feedback_index(slot));
1504 73100 : switch_builder.Case(ToBooleanMode::kAlreadyBoolean, i);
1505 : }
1506 :
1507 10665 : if (default_index >= 0) {
1508 : // Emit default jump if there is a default case.
1509 7483 : switch_builder.DefaultAt(default_index);
1510 : } else {
1511 : // Otherwise if we have reached here none of the cases matched, so jump to
1512 : // the end.
1513 : switch_builder.Break();
1514 : }
1515 :
1516 : // Iterate over all cases and create the case bodies.
1517 171831 : for (int i = 0; i < clauses->length(); i++) {
1518 80583 : CaseClause* clause = clauses->at(i);
1519 80583 : switch_builder.SetCaseTarget(i, clause);
1520 80583 : VisitStatements(clause->statements());
1521 : }
1522 10665 : }
1523 :
1524 : template <typename TryBodyFunc, typename CatchBodyFunc>
1525 113128 : void BytecodeGenerator::BuildTryCatch(
1526 : TryBodyFunc try_body_func, CatchBodyFunc catch_body_func,
1527 : HandlerTable::CatchPrediction catch_prediction,
1528 : TryCatchStatement* stmt_for_coverage) {
1529 : TryCatchBuilder try_control_builder(
1530 : builder(),
1531 : stmt_for_coverage == nullptr ? nullptr : block_coverage_builder_,
1532 226257 : stmt_for_coverage, catch_prediction);
1533 :
1534 : // Preserve the context in a dedicated register, so that it can be restored
1535 : // when the handler is entered by the stack-unwinding machinery.
1536 : // TODO(mstarzinger): Be smarter about register allocation.
1537 113135 : Register context = register_allocator()->NewRegister();
1538 113132 : builder()->MoveRegister(Register::current_context(), context);
1539 :
1540 : // Evaluate the try-block inside a control scope. This simulates a handler
1541 : // that is intercepting 'throw' control commands.
1542 113134 : try_control_builder.BeginTry(context);
1543 : {
1544 : ControlScopeForTryCatch scope(this, &try_control_builder);
1545 38295 : try_body_func();
1546 : }
1547 113131 : try_control_builder.EndTry();
1548 :
1549 113130 : catch_body_func(context);
1550 :
1551 113131 : try_control_builder.EndCatch();
1552 113120 : }
1553 :
1554 : template <typename TryBodyFunc, typename FinallyBodyFunc>
1555 41649 : void BytecodeGenerator::BuildTryFinally(
1556 : TryBodyFunc try_body_func, FinallyBodyFunc finally_body_func,
1557 : HandlerTable::CatchPrediction catch_prediction,
1558 : TryFinallyStatement* stmt_for_coverage) {
1559 : // We can't know whether the finally block will override ("catch") an
1560 : // exception thrown in the try block, so we just adopt the outer prediction.
1561 : TryFinallyBuilder try_control_builder(
1562 : builder(),
1563 : stmt_for_coverage == nullptr ? nullptr : block_coverage_builder_,
1564 83301 : stmt_for_coverage, catch_prediction);
1565 :
1566 : // We keep a record of all paths that enter the finally-block to be able to
1567 : // dispatch to the correct continuation point after the statements in the
1568 : // finally-block have been evaluated.
1569 : //
1570 : // The try-finally construct can enter the finally-block in three ways:
1571 : // 1. By exiting the try-block normally, falling through at the end.
1572 : // 2. By exiting the try-block with a function-local control flow transfer
1573 : // (i.e. through break/continue/return statements).
1574 : // 3. By exiting the try-block with a thrown exception.
1575 : //
1576 : // The result register semantics depend on how the block was entered:
1577 : // - ReturnStatement: It represents the return value being returned.
1578 : // - ThrowStatement: It represents the exception being thrown.
1579 : // - BreakStatement/ContinueStatement: Undefined and not used.
1580 : // - Falling through into finally-block: Undefined and not used.
1581 41647 : Register token = register_allocator()->NewRegister();
1582 41651 : Register result = register_allocator()->NewRegister();
1583 : ControlScope::DeferredCommands commands(this, token, result);
1584 :
1585 : // Preserve the context in a dedicated register, so that it can be restored
1586 : // when the handler is entered by the stack-unwinding machinery.
1587 : // TODO(mstarzinger): Be smarter about register allocation.
1588 41650 : Register context = register_allocator()->NewRegister();
1589 41650 : builder()->MoveRegister(Register::current_context(), context);
1590 :
1591 : // Evaluate the try-block inside a control scope. This simulates a handler
1592 : // that is intercepting all control commands.
1593 41648 : try_control_builder.BeginTry(context);
1594 : {
1595 : ControlScopeForTryFinally scope(this, &try_control_builder, &commands);
1596 38295 : try_body_func();
1597 : }
1598 41652 : try_control_builder.EndTry();
1599 :
1600 : // Record fall-through and exception cases.
1601 41646 : commands.RecordFallThroughPath();
1602 41653 : try_control_builder.LeaveTry();
1603 41651 : try_control_builder.BeginHandler();
1604 : commands.RecordHandlerReThrowPath();
1605 :
1606 : // Pending message object is saved on entry.
1607 41653 : try_control_builder.BeginFinally();
1608 41651 : Register message = context; // Reuse register.
1609 :
1610 : // Clear message object as we enter the finally block.
1611 41651 : builder()->LoadTheHole().SetPendingMessage().StoreAccumulatorInRegister(
1612 : message);
1613 :
1614 : // Evaluate the finally-block.
1615 : finally_body_func(token);
1616 41651 : try_control_builder.EndFinally();
1617 :
1618 : // Pending message object is restored on exit.
1619 41651 : builder()->LoadAccumulatorWithRegister(message).SetPendingMessage();
1620 :
1621 : // Dynamic dispatch after the finally-block.
1622 41653 : commands.ApplyDeferredCommands();
1623 41650 : }
1624 :
1625 260544 : void BytecodeGenerator::VisitIterationBody(IterationStatement* stmt,
1626 : LoopBuilder* loop_builder) {
1627 260544 : loop_builder->LoopBody();
1628 : ControlScopeForIteration execution_control(this, stmt, loop_builder);
1629 260548 : builder()->StackCheck(stmt->position());
1630 260554 : Visit(stmt->body());
1631 260549 : loop_builder->BindContinueTarget();
1632 260552 : }
1633 :
1634 1200 : void BytecodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1635 3600 : LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1636 1200 : if (stmt->cond()->ToBooleanIsFalse()) {
1637 347 : VisitIterationBody(stmt, &loop_builder);
1638 853 : } else if (stmt->cond()->ToBooleanIsTrue()) {
1639 220 : loop_builder.LoopHeader();
1640 220 : VisitIterationBody(stmt, &loop_builder);
1641 220 : loop_builder.JumpToHeader(loop_depth_);
1642 : } else {
1643 633 : loop_builder.LoopHeader();
1644 633 : VisitIterationBody(stmt, &loop_builder);
1645 : builder()->SetExpressionAsStatementPosition(stmt->cond());
1646 : BytecodeLabels loop_backbranch(zone());
1647 : VisitForTest(stmt->cond(), &loop_backbranch, loop_builder.break_labels(),
1648 633 : TestFallthrough::kThen);
1649 633 : loop_backbranch.Bind(builder());
1650 633 : loop_builder.JumpToHeader(loop_depth_);
1651 : }
1652 1200 : }
1653 :
1654 13713 : void BytecodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1655 41028 : LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1656 :
1657 13713 : if (stmt->cond()->ToBooleanIsFalse()) {
1658 : // If the condition is false there is no need to generate the loop.
1659 111 : return;
1660 : }
1661 :
1662 13602 : loop_builder.LoopHeader();
1663 13603 : if (!stmt->cond()->ToBooleanIsTrue()) {
1664 : builder()->SetExpressionAsStatementPosition(stmt->cond());
1665 : BytecodeLabels loop_body(zone());
1666 : VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
1667 12582 : TestFallthrough::kThen);
1668 12583 : loop_body.Bind(builder());
1669 : }
1670 13604 : VisitIterationBody(stmt, &loop_builder);
1671 13603 : loop_builder.JumpToHeader(loop_depth_);
1672 : }
1673 :
1674 220920 : void BytecodeGenerator::VisitForStatement(ForStatement* stmt) {
1675 647290 : LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1676 :
1677 220920 : if (stmt->init() != nullptr) {
1678 160638 : Visit(stmt->init());
1679 : }
1680 220922 : if (stmt->cond() && stmt->cond()->ToBooleanIsFalse()) {
1681 : // If the condition is known to be false there is no need to generate
1682 : // body, next or condition blocks. Init block should be generated.
1683 15471 : return;
1684 : }
1685 :
1686 205452 : loop_builder.LoopHeader();
1687 205450 : if (stmt->cond() && !stmt->cond()->ToBooleanIsTrue()) {
1688 : builder()->SetExpressionAsStatementPosition(stmt->cond());
1689 : BytecodeLabels loop_body(zone());
1690 : VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
1691 188560 : TestFallthrough::kThen);
1692 188559 : loop_body.Bind(builder());
1693 : }
1694 205450 : VisitIterationBody(stmt, &loop_builder);
1695 205452 : if (stmt->next() != nullptr) {
1696 : builder()->SetStatementPosition(stmt->next());
1697 180295 : Visit(stmt->next());
1698 : }
1699 205454 : loop_builder.JumpToHeader(loop_depth_);
1700 : }
1701 :
1702 4804 : void BytecodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1703 9582 : if (stmt->subject()->IsNullLiteral() ||
1704 4778 : stmt->subject()->IsUndefinedLiteral()) {
1705 : // ForIn generates lots of code, skip if it wouldn't produce any effects.
1706 51 : return;
1707 : }
1708 :
1709 : BytecodeLabel subject_null_label, subject_undefined_label;
1710 : FeedbackSlot slot = feedback_spec()->AddForInSlot();
1711 :
1712 : // Prepare the state for executing ForIn.
1713 : builder()->SetExpressionAsStatementPosition(stmt->subject());
1714 4753 : VisitForAccumulatorValue(stmt->subject());
1715 4753 : builder()->JumpIfUndefined(&subject_undefined_label);
1716 4753 : builder()->JumpIfNull(&subject_null_label);
1717 4753 : Register receiver = register_allocator()->NewRegister();
1718 4753 : builder()->ToObject(receiver);
1719 :
1720 : // Used as kRegTriple and kRegPair in ForInPrepare and ForInNext.
1721 4753 : RegisterList triple = register_allocator()->NewRegisterList(3);
1722 4753 : Register cache_length = triple[2];
1723 4753 : builder()->ForInEnumerate(receiver);
1724 4753 : builder()->ForInPrepare(triple, feedback_index(slot));
1725 :
1726 : // Set up loop counter
1727 4753 : Register index = register_allocator()->NewRegister();
1728 4753 : builder()->LoadLiteral(Smi::zero());
1729 4753 : builder()->StoreAccumulatorInRegister(index);
1730 :
1731 : // The loop
1732 : {
1733 9506 : LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1734 4753 : loop_builder.LoopHeader();
1735 : builder()->SetExpressionAsStatementPosition(stmt->each());
1736 4753 : builder()->ForInContinue(index, cache_length);
1737 : loop_builder.BreakIfFalse(ToBooleanMode::kAlreadyBoolean);
1738 : builder()->ForInNext(receiver, index, triple.Truncate(2),
1739 4753 : feedback_index(slot));
1740 : loop_builder.ContinueIfUndefined();
1741 :
1742 : // Assign accumulator value to the 'each' target.
1743 : {
1744 : EffectResultScope scope(this);
1745 : // Make sure to preserve the accumulator across the PrepareAssignmentLhs
1746 : // call.
1747 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(
1748 4753 : stmt->each(), AccumulatorPreservingMode::kPreserve);
1749 : builder()->SetExpressionPosition(stmt->each());
1750 4753 : BuildAssignment(lhs_data, Token::ASSIGN, LookupHoistingMode::kNormal);
1751 : }
1752 :
1753 4753 : VisitIterationBody(stmt, &loop_builder);
1754 4753 : builder()->ForInStep(index);
1755 4753 : builder()->StoreAccumulatorInRegister(index);
1756 4753 : loop_builder.JumpToHeader(loop_depth_);
1757 : }
1758 4753 : builder()->Bind(&subject_null_label);
1759 4753 : builder()->Bind(&subject_undefined_label);
1760 : }
1761 :
1762 : // Desugar a for-of statement into an application of the iteration protocol.
1763 : //
1764 : // for (EACH of SUBJECT) BODY
1765 : //
1766 : // becomes
1767 : //
1768 : // iterator = %GetIterator(SUBJECT)
1769 : // try {
1770 : //
1771 : // loop {
1772 : // // Make sure we are considered 'done' if .next(), .done or .value fail.
1773 : // done = true
1774 : // value = iterator.next()
1775 : // if (value.done) break;
1776 : // value = value.value
1777 : // done = false
1778 : //
1779 : // EACH = value
1780 : // BODY
1781 : // }
1782 : // done = true
1783 : //
1784 : // } catch(e) {
1785 : // iteration_continuation = RETHROW
1786 : // } finally {
1787 : // %FinalizeIteration(iterator, done, iteration_continuation)
1788 : // }
1789 35542 : void BytecodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1790 : EffectResultScope effect_scope(this);
1791 :
1792 35542 : builder()->SetExpressionAsStatementPosition(stmt->subject());
1793 35542 : VisitForAccumulatorValue(stmt->subject());
1794 :
1795 : // Store the iterator in a dedicated register so that it can be closed on
1796 : // exit, and the 'done' value in a dedicated register so that it can be
1797 : // changed and accessed independently of the iteration result.
1798 35547 : IteratorRecord iterator = BuildGetIteratorRecord(stmt->type());
1799 35548 : Register done = register_allocator()->NewRegister();
1800 35547 : builder()->LoadFalse();
1801 35545 : builder()->StoreAccumulatorInRegister(done);
1802 :
1803 : BuildTryFinally(
1804 : // Try block.
1805 35542 : [&]() {
1806 568740 : Register next_result = register_allocator()->NewRegister();
1807 :
1808 248829 : LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1809 35542 : loop_builder.LoopHeader();
1810 :
1811 106639 : builder()->LoadTrue().StoreAccumulatorInRegister(done);
1812 :
1813 : // Call the iterator's .next() method. Break from the loop if the `done`
1814 : // property is truthy, otherwise load the value from the iterator result
1815 : // and append the argument.
1816 35549 : builder()->SetExpressionAsStatementPosition(stmt->each());
1817 71098 : BuildIteratorNext(iterator, next_result);
1818 : builder()->LoadNamedProperty(
1819 : next_result, ast_string_constants()->done_string(),
1820 35547 : feedback_index(feedback_spec()->AddLoadICSlot()));
1821 : loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
1822 :
1823 : builder()
1824 : // value = value.value
1825 : ->LoadNamedProperty(
1826 : next_result, ast_string_constants()->value_string(),
1827 35549 : feedback_index(feedback_spec()->AddLoadICSlot()));
1828 : // done = false, before the assignment to each happens, so that done is
1829 : // false if the assignment throws.
1830 : builder()
1831 35549 : ->StoreAccumulatorInRegister(next_result)
1832 35548 : .LoadFalse()
1833 35549 : .StoreAccumulatorInRegister(done);
1834 :
1835 : // Assign to the 'each' target.
1836 71094 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(stmt->each());
1837 35542 : builder()->LoadAccumulatorWithRegister(next_result);
1838 35547 : BuildAssignment(lhs_data, Token::ASSIGN, LookupHoistingMode::kNormal);
1839 :
1840 71090 : VisitIterationBody(stmt, &loop_builder);
1841 :
1842 35546 : loop_builder.JumpToHeader(loop_depth_);
1843 35549 : },
1844 : // Finally block.
1845 : [&](Register iteration_continuation_token) {
1846 : // Finish the iteration in the finally block.
1847 35549 : BuildFinalizeIteration(iterator, done, iteration_continuation_token);
1848 : },
1849 35546 : HandlerTable::UNCAUGHT);
1850 35547 : }
1851 :
1852 74835 : void BytecodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1853 : // Update catch prediction tracking. The updated catch_prediction value lasts
1854 : // until the end of the try_block in the AST node, and does not apply to the
1855 : // catch_block.
1856 74835 : HandlerTable::CatchPrediction outer_catch_prediction = catch_prediction();
1857 74835 : set_catch_prediction(stmt->GetCatchPrediction(outer_catch_prediction));
1858 :
1859 : BuildTryCatch(
1860 : // Try body.
1861 : [&]() {
1862 74836 : Visit(stmt->try_block());
1863 74836 : set_catch_prediction(outer_catch_prediction);
1864 : },
1865 : // Catch body.
1866 74834 : [&](Register context) {
1867 224505 : if (stmt->scope()) {
1868 : // Create a catch scope that binds the exception.
1869 299251 : BuildNewLocalCatchContext(stmt->scope());
1870 74745 : builder()->StoreAccumulatorInRegister(context);
1871 : }
1872 :
1873 : // If requested, clear message object as we enter the catch block.
1874 224502 : if (stmt->ShouldClearPendingException(outer_catch_prediction)) {
1875 74834 : builder()->LoadTheHole().SetPendingMessage();
1876 : }
1877 :
1878 : // Load the catch context into the accumulator.
1879 74836 : builder()->LoadAccumulatorWithRegister(context);
1880 :
1881 : // Evaluate the catch-block.
1882 74837 : if (stmt->scope()) {
1883 74747 : VisitInScope(stmt->catch_block(), stmt->scope());
1884 : } else {
1885 90 : VisitBlock(stmt->catch_block());
1886 : }
1887 74836 : },
1888 74835 : catch_prediction(), stmt);
1889 74830 : }
1890 :
1891 3356 : void BytecodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1892 : BuildTryFinally(
1893 : // Try block.
1894 3356 : [&]() { Visit(stmt->try_block()); },
1895 : // Finally block.
1896 3356 : [&](Register body_continuation_token) { Visit(stmt->finally_block()); },
1897 3356 : catch_prediction(), stmt);
1898 3356 : }
1899 :
1900 0 : void BytecodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1901 : builder()->SetStatementPosition(stmt);
1902 6556 : builder()->Debugger();
1903 0 : }
1904 :
1905 2708468 : void BytecodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1906 : DCHECK(expr->scope()->outer_scope() == current_scope());
1907 8125404 : uint8_t flags = CreateClosureFlags::Encode(
1908 : expr->pretenure(), closure_scope()->is_function_scope(),
1909 2708468 : info()->might_always_opt());
1910 2708471 : size_t entry = builder()->AllocateDeferredConstantPoolEntry();
1911 2708473 : builder()->CreateClosure(entry, GetCachedCreateClosureSlot(expr), flags);
1912 5417079 : function_literals_.push_back(std::make_pair(expr, entry));
1913 2708540 : AddToEagerLiteralsIfEager(expr);
1914 2708543 : }
1915 :
1916 2955163 : void BytecodeGenerator::AddToEagerLiteralsIfEager(FunctionLiteral* literal) {
1917 2955163 : if (eager_inner_literals_ && literal->ShouldEagerCompile()) {
1918 : DCHECK(!IsInEagerLiterals(literal, *eager_inner_literals_));
1919 524335 : eager_inner_literals_->push_back(literal);
1920 : }
1921 2955169 : }
1922 :
1923 : bool BytecodeGenerator::ShouldOptimizeAsOneShot() const {
1924 10609092 : if (!FLAG_enable_one_shot_optimization) return false;
1925 :
1926 10592401 : if (loop_depth_ > 0) return false;
1927 :
1928 18469844 : return info()->literal()->is_toplevel() ||
1929 : info()->literal()->is_oneshot_iife();
1930 : }
1931 :
1932 43104 : void BytecodeGenerator::BuildClassLiteral(ClassLiteral* expr, Register name) {
1933 : size_t class_boilerplate_entry =
1934 43104 : builder()->AllocateDeferredConstantPoolEntry();
1935 86207 : class_literals_.push_back(std::make_pair(expr, class_boilerplate_entry));
1936 :
1937 43104 : VisitDeclarations(expr->scope()->declarations());
1938 43104 : Register class_constructor = register_allocator()->NewRegister();
1939 :
1940 : {
1941 : RegisterAllocationScope register_scope(this);
1942 43103 : RegisterList args = register_allocator()->NewGrowableRegisterList();
1943 :
1944 43103 : Register class_boilerplate = register_allocator()->GrowRegisterList(&args);
1945 : Register class_constructor_in_args =
1946 43102 : register_allocator()->GrowRegisterList(&args);
1947 43103 : Register super_class = register_allocator()->GrowRegisterList(&args);
1948 : DCHECK_EQ(ClassBoilerplate::kFirstDynamicArgumentIndex,
1949 : args.register_count());
1950 :
1951 : VisitForAccumulatorValueOrTheHole(expr->extends());
1952 43103 : builder()->StoreAccumulatorInRegister(super_class);
1953 :
1954 43104 : VisitFunctionLiteral(expr->constructor());
1955 : builder()
1956 43104 : ->StoreAccumulatorInRegister(class_constructor)
1957 43104 : .MoveRegister(class_constructor, class_constructor_in_args)
1958 43104 : .LoadConstantPoolEntry(class_boilerplate_entry)
1959 43104 : .StoreAccumulatorInRegister(class_boilerplate);
1960 :
1961 : // Create computed names and method values nodes to store into the literal.
1962 691105 : for (int i = 0; i < expr->properties()->length(); i++) {
1963 324001 : ClassLiteral::Property* property = expr->properties()->at(i);
1964 324001 : if (property->is_computed_name()) {
1965 6075 : Register key = register_allocator()->GrowRegisterList(&args);
1966 :
1967 : builder()->SetExpressionAsStatementPosition(property->key());
1968 6075 : BuildLoadPropertyKey(property, key);
1969 6075 : if (property->is_static()) {
1970 : // The static prototype property is read only. We handle the non
1971 : // computed property name case in the parser. Since this is the only
1972 : // case where we need to check for an own read only property we
1973 : // special case this so we do not need to do this for every property.
1974 :
1975 : FeedbackSlot slot = GetDummyCompareICSlot();
1976 : BytecodeLabel done;
1977 : builder()
1978 1710 : ->LoadLiteral(ast_string_constants()->prototype_string())
1979 : .CompareOperation(Token::Value::EQ_STRICT, key,
1980 1710 : feedback_index(slot))
1981 1710 : .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &done)
1982 1710 : .CallRuntime(Runtime::kThrowStaticPrototypeError)
1983 1710 : .Bind(&done);
1984 : }
1985 :
1986 6075 : if (property->kind() == ClassLiteral::Property::FIELD) {
1987 : DCHECK(!property->is_private());
1988 : // Initialize field's name variable with the computed name.
1989 : DCHECK_NOT_NULL(property->computed_name_var());
1990 407 : builder()->LoadAccumulatorWithRegister(key);
1991 : BuildVariableAssignment(property->computed_name_var(), Token::INIT,
1992 407 : HoleCheckMode::kElided);
1993 : }
1994 : }
1995 :
1996 324001 : if (property->kind() == ClassLiteral::Property::FIELD) {
1997 1105 : if (property->is_private()) {
1998 : RegisterAllocationScope private_name_register_scope(this);
1999 698 : Register private_name = register_allocator()->NewRegister();
2000 : VisitForRegisterValue(property->key(), private_name);
2001 : builder()
2002 1396 : ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
2003 698 : .StoreAccumulatorInRegister(private_name)
2004 698 : .CallRuntime(Runtime::kCreatePrivateNameSymbol, private_name);
2005 : DCHECK_NOT_NULL(property->private_name_var());
2006 : BuildVariableAssignment(property->private_name_var(), Token::INIT,
2007 698 : HoleCheckMode::kElided);
2008 : }
2009 : // We don't compute field's value here, but instead do it in the
2010 : // initializer function.
2011 1105 : continue;
2012 : }
2013 :
2014 322896 : Register value = register_allocator()->GrowRegisterList(&args);
2015 : VisitForRegisterValue(property->value(), value);
2016 : }
2017 :
2018 43104 : builder()->CallRuntime(Runtime::kDefineClass, args);
2019 : }
2020 43104 : Register prototype = register_allocator()->NewRegister();
2021 43104 : builder()->StoreAccumulatorInRegister(prototype);
2022 :
2023 : // Assign to class variable.
2024 43104 : if (expr->class_variable() != nullptr) {
2025 : DCHECK(expr->class_variable()->IsStackLocal() ||
2026 : expr->class_variable()->IsContextSlot());
2027 38766 : builder()->LoadAccumulatorWithRegister(class_constructor);
2028 : BuildVariableAssignment(expr->class_variable(), Token::INIT,
2029 38766 : HoleCheckMode::kElided);
2030 : }
2031 :
2032 43104 : if (expr->instance_members_initializer_function() != nullptr) {
2033 : Register initializer =
2034 1032 : VisitForRegisterValue(expr->instance_members_initializer_function());
2035 :
2036 1031 : if (FunctionLiteral::NeedsHomeObject(
2037 : expr->instance_members_initializer_function())) {
2038 : FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
2039 24 : builder()->LoadAccumulatorWithRegister(prototype).StoreHomeObjectProperty(
2040 24 : initializer, feedback_index(slot), language_mode());
2041 : }
2042 :
2043 : FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
2044 : builder()
2045 1031 : ->LoadAccumulatorWithRegister(initializer)
2046 1031 : .StoreClassFieldsInitializer(class_constructor, feedback_index(slot))
2047 1031 : .LoadAccumulatorWithRegister(class_constructor);
2048 : }
2049 :
2050 43104 : if (expr->static_fields_initializer() != nullptr) {
2051 : // TODO(gsathya): This can be optimized away to be a part of the
2052 : // class boilerplate in the future. The name argument can be
2053 : // passed to the DefineClass runtime function and have it set
2054 : // there.
2055 523 : if (name.is_valid()) {
2056 6 : Register key = register_allocator()->NewRegister();
2057 : builder()
2058 6 : ->LoadLiteral(ast_string_constants()->name_string())
2059 6 : .StoreAccumulatorInRegister(key);
2060 :
2061 : DataPropertyInLiteralFlags data_property_flags =
2062 : DataPropertyInLiteralFlag::kNoFlags;
2063 : FeedbackSlot slot =
2064 : feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
2065 6 : builder()->LoadAccumulatorWithRegister(name).StoreDataPropertyInLiteral(
2066 6 : class_constructor, key, data_property_flags, feedback_index(slot));
2067 : }
2068 :
2069 523 : RegisterList args = register_allocator()->NewRegisterList(1);
2070 : Register initializer =
2071 523 : VisitForRegisterValue(expr->static_fields_initializer());
2072 :
2073 523 : if (FunctionLiteral::NeedsHomeObject(expr->static_fields_initializer())) {
2074 : FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
2075 : builder()
2076 23 : ->LoadAccumulatorWithRegister(class_constructor)
2077 : .StoreHomeObjectProperty(initializer, feedback_index(slot),
2078 23 : language_mode());
2079 : }
2080 :
2081 : builder()
2082 523 : ->MoveRegister(class_constructor, args[0])
2083 : .CallProperty(initializer, args,
2084 523 : feedback_index(feedback_spec()->AddCallICSlot()));
2085 : }
2086 43104 : builder()->LoadAccumulatorWithRegister(class_constructor);
2087 43104 : }
2088 :
2089 43098 : void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr) {
2090 43098 : VisitClassLiteral(expr, Register::invalid_value());
2091 43098 : }
2092 :
2093 43104 : void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr, Register name) {
2094 : CurrentScope current_scope(this, expr->scope());
2095 : DCHECK_NOT_NULL(expr->scope());
2096 43104 : if (expr->scope()->NeedsContext()) {
2097 35370 : BuildNewLocalBlockContext(expr->scope());
2098 70740 : ContextScope scope(this, expr->scope());
2099 35370 : BuildClassLiteral(expr, name);
2100 : } else {
2101 7734 : BuildClassLiteral(expr, name);
2102 : }
2103 43104 : }
2104 :
2105 1555 : void BytecodeGenerator::VisitInitializeClassMembersStatement(
2106 : InitializeClassMembersStatement* stmt) {
2107 1555 : RegisterList args = register_allocator()->NewRegisterList(3);
2108 3110 : Register constructor = args[0], key = args[1], value = args[2];
2109 1555 : builder()->MoveRegister(builder()->Receiver(), constructor);
2110 :
2111 6640 : for (int i = 0; i < stmt->fields()->length(); i++) {
2112 2543 : ClassLiteral::Property* property = stmt->fields()->at(i);
2113 :
2114 2543 : if (property->is_computed_name()) {
2115 : DCHECK_EQ(property->kind(), ClassLiteral::Property::FIELD);
2116 : DCHECK(!property->is_private());
2117 : Variable* var = property->computed_name_var();
2118 : DCHECK_NOT_NULL(var);
2119 : // The computed name is already evaluated and stored in a
2120 : // variable at class definition time.
2121 407 : BuildVariableLoad(var, HoleCheckMode::kElided);
2122 407 : builder()->StoreAccumulatorInRegister(key);
2123 2136 : } else if (property->kind() == ClassLiteral::Property::FIELD &&
2124 : property->is_private()) {
2125 : Variable* private_name_var = property->private_name_var();
2126 : DCHECK_NOT_NULL(private_name_var);
2127 698 : BuildVariableLoad(private_name_var, HoleCheckMode::kElided);
2128 698 : builder()->StoreAccumulatorInRegister(key);
2129 : } else {
2130 1438 : BuildLoadPropertyKey(property, key);
2131 : }
2132 :
2133 : builder()->SetExpressionAsStatementPosition(property->value());
2134 : VisitForRegisterValue(property->value(), value);
2135 2543 : VisitSetHomeObject(value, constructor, property);
2136 :
2137 : Runtime::FunctionId function_id =
2138 2543 : property->kind() == ClassLiteral::Property::FIELD &&
2139 : !property->is_private()
2140 : ? Runtime::kCreateDataProperty
2141 2543 : : Runtime::kAddPrivateField;
2142 2543 : builder()->CallRuntime(function_id, args);
2143 : }
2144 1555 : }
2145 :
2146 941 : void BytecodeGenerator::BuildInstanceMemberInitialization(Register constructor,
2147 : Register instance) {
2148 941 : RegisterList args = register_allocator()->NewRegisterList(1);
2149 941 : Register initializer = register_allocator()->NewRegister();
2150 :
2151 : FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
2152 : BytecodeLabel done;
2153 :
2154 : builder()
2155 941 : ->LoadClassFieldsInitializer(constructor, feedback_index(slot))
2156 : // TODO(gsathya): This jump can be elided for the base
2157 : // constructor and derived constructor. This is only required
2158 : // when called from an arrow function.
2159 941 : .JumpIfUndefined(&done)
2160 941 : .StoreAccumulatorInRegister(initializer)
2161 941 : .MoveRegister(instance, args[0])
2162 : .CallProperty(initializer, args,
2163 941 : feedback_index(feedback_spec()->AddCallICSlot()))
2164 941 : .Bind(&done);
2165 941 : }
2166 :
2167 1844 : void BytecodeGenerator::VisitNativeFunctionLiteral(
2168 : NativeFunctionLiteral* expr) {
2169 1844 : size_t entry = builder()->AllocateDeferredConstantPoolEntry();
2170 : int index = feedback_spec()->AddFeedbackCellForCreateClosure();
2171 1844 : uint8_t flags = CreateClosureFlags::Encode(false, false, false);
2172 1844 : builder()->CreateClosure(entry, index, flags);
2173 3688 : native_function_literals_.push_back(std::make_pair(expr, entry));
2174 1844 : }
2175 :
2176 0 : void BytecodeGenerator::VisitDoExpression(DoExpression* expr) {
2177 0 : VisitBlock(expr->block());
2178 0 : VisitVariableProxy(expr->result());
2179 0 : }
2180 :
2181 29455 : void BytecodeGenerator::VisitConditional(Conditional* expr) {
2182 : ConditionalControlFlowBuilder conditional_builder(
2183 88368 : builder(), block_coverage_builder_, expr);
2184 :
2185 29456 : if (expr->condition()->ToBooleanIsTrue()) {
2186 : // Generate then block unconditionally as always true.
2187 235 : conditional_builder.Then();
2188 235 : VisitForAccumulatorValue(expr->then_expression());
2189 29221 : } else if (expr->condition()->ToBooleanIsFalse()) {
2190 : // Generate else block unconditionally if it exists.
2191 112 : conditional_builder.Else();
2192 112 : VisitForAccumulatorValue(expr->else_expression());
2193 : } else {
2194 : VisitForTest(expr->condition(), conditional_builder.then_labels(),
2195 29109 : conditional_builder.else_labels(), TestFallthrough::kThen);
2196 :
2197 29110 : conditional_builder.Then();
2198 29109 : VisitForAccumulatorValue(expr->then_expression());
2199 29109 : conditional_builder.JumpToEnd();
2200 :
2201 29110 : conditional_builder.Else();
2202 29109 : VisitForAccumulatorValue(expr->else_expression());
2203 : }
2204 29457 : }
2205 :
2206 10291214 : void BytecodeGenerator::VisitLiteral(Literal* expr) {
2207 10291214 : if (execution_result()->IsEffect()) return;
2208 9163950 : switch (expr->type()) {
2209 : case Literal::kSmi:
2210 6754820 : builder()->LoadLiteral(expr->AsSmiLiteral());
2211 6754822 : break;
2212 : case Literal::kHeapNumber:
2213 542901 : builder()->LoadLiteral(expr->AsNumber());
2214 271448 : break;
2215 : case Literal::kUndefined:
2216 113227 : builder()->LoadUndefined();
2217 113231 : break;
2218 : case Literal::kBoolean:
2219 653201 : builder()->LoadBoolean(expr->ToBooleanIsTrue());
2220 : execution_result()->SetResultIsBoolean();
2221 : break;
2222 : case Literal::kNull:
2223 22732 : builder()->LoadNull();
2224 22731 : break;
2225 : case Literal::kTheHole:
2226 0 : builder()->LoadTheHole();
2227 0 : break;
2228 : case Literal::kString:
2229 1664114 : builder()->LoadLiteral(expr->AsRawString());
2230 : execution_result()->SetResultIsString();
2231 : break;
2232 : case Literal::kSymbol:
2233 1991 : builder()->LoadLiteral(expr->AsSymbol());
2234 1991 : break;
2235 : case Literal::kBigInt:
2236 9038 : builder()->LoadLiteral(expr->AsBigInt());
2237 9038 : break;
2238 : }
2239 : }
2240 :
2241 45108 : void BytecodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
2242 : // Materialize a regular expression literal.
2243 : builder()->CreateRegExpLiteral(
2244 : expr->raw_pattern(), feedback_index(feedback_spec()->AddLiteralSlot()),
2245 45108 : expr->flags());
2246 45108 : }
2247 :
2248 203117 : void BytecodeGenerator::BuildCreateObjectLiteral(Register literal,
2249 : uint8_t flags, size_t entry) {
2250 203117 : if (ShouldOptimizeAsOneShot()) {
2251 115063 : RegisterList args = register_allocator()->NewRegisterList(2);
2252 : builder()
2253 115077 : ->LoadConstantPoolEntry(entry)
2254 115085 : .StoreAccumulatorInRegister(args[0])
2255 115083 : .LoadLiteral(Smi::FromInt(flags))
2256 115083 : .StoreAccumulatorInRegister(args[1])
2257 115083 : .CallRuntime(Runtime::kCreateObjectLiteralWithoutAllocationSite, args)
2258 115080 : .StoreAccumulatorInRegister(literal);
2259 :
2260 : } else {
2261 : // TODO(cbruni): Directly generate runtime call for literals we cannot
2262 : // optimize once the CreateShallowObjectLiteral stub is in sync with the TF
2263 : // optimizations.
2264 : int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2265 : builder()
2266 176108 : ->CreateObjectLiteral(entry, literal_index, flags)
2267 88058 : .StoreAccumulatorInRegister(literal);
2268 : }
2269 203140 : }
2270 :
2271 231619 : void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
2272 231619 : expr->InitDepthAndFlags();
2273 :
2274 : // Fast path for the empty object literal which doesn't need an
2275 : // AllocationSite.
2276 231642 : if (expr->IsEmptyObjectLiteral()) {
2277 : DCHECK(expr->IsFastCloningSupported());
2278 28232 : builder()->CreateEmptyObjectLiteral();
2279 28233 : return;
2280 : }
2281 :
2282 : // Deep-copy the literal boilerplate.
2283 203412 : uint8_t flags = CreateObjectLiteralFlags::Encode(
2284 406807 : expr->ComputeFlags(), expr->IsFastCloningSupported());
2285 :
2286 203397 : Register literal = register_allocator()->NewRegister();
2287 :
2288 : // Create literal object.
2289 : int property_index = 0;
2290 : bool clone_object_spread =
2291 203392 : expr->properties()->first()->kind() == ObjectLiteral::Property::SPREAD;
2292 203392 : if (clone_object_spread) {
2293 : // Avoid the slow path for spreads in the following common cases:
2294 : // 1) `let obj = { ...source }`
2295 : // 2) `let obj = { ...source, override: 1 }`
2296 : // 3) `let obj = { ...source, ...overrides }`
2297 : RegisterAllocationScope register_scope(this);
2298 : Expression* property = expr->properties()->first()->value();
2299 284 : Register from_value = VisitForRegisterValue(property);
2300 :
2301 : BytecodeLabels clone_object(zone());
2302 568 : builder()->JumpIfUndefined(clone_object.New());
2303 284 : builder()->JumpIfNull(clone_object.New());
2304 284 : builder()->ToObject(from_value);
2305 :
2306 284 : clone_object.Bind(builder());
2307 : int clone_index = feedback_index(feedback_spec()->AddCloneObjectSlot());
2308 284 : builder()->CloneObject(from_value, flags, clone_index);
2309 284 : builder()->StoreAccumulatorInRegister(literal);
2310 : property_index++;
2311 : } else {
2312 : size_t entry;
2313 : // If constant properties is an empty fixed array, use a cached empty fixed
2314 : // array to ensure it's only added to the constant pool once.
2315 203108 : if (expr->properties_count() == 0) {
2316 2248 : entry = builder()->EmptyObjectBoilerplateDescriptionConstantPoolEntry();
2317 : } else {
2318 200860 : entry = builder()->AllocateDeferredConstantPoolEntry();
2319 401751 : object_literals_.push_back(std::make_pair(expr, entry));
2320 : }
2321 203123 : BuildCreateObjectLiteral(literal, flags, entry);
2322 : }
2323 :
2324 : // Store computed values into the literal.
2325 : AccessorTable accessor_table(zone());
2326 4246355 : for (; property_index < expr->properties()->length(); property_index++) {
2327 2023422 : ObjectLiteral::Property* property = expr->properties()->at(property_index);
2328 2023422 : if (property->is_computed_name()) break;
2329 2021487 : if (!clone_object_spread && property->IsCompileTimeValue()) continue;
2330 :
2331 : RegisterAllocationScope inner_register_scope(this);
2332 255194 : Literal* key = property->key()->AsLiteral();
2333 255194 : switch (property->kind()) {
2334 : case ObjectLiteral::Property::SPREAD:
2335 0 : UNREACHABLE();
2336 : case ObjectLiteral::Property::CONSTANT:
2337 : case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2338 : DCHECK(clone_object_spread || !property->value()->IsCompileTimeValue());
2339 : V8_FALLTHROUGH;
2340 : case ObjectLiteral::Property::COMPUTED: {
2341 : // It is safe to use [[Put]] here because the boilerplate already
2342 : // contains computed properties with an uninitialized value.
2343 245099 : if (key->IsStringLiteral()) {
2344 : DCHECK(key->IsPropertyName());
2345 244542 : if (property->emit_store()) {
2346 : builder()->SetExpressionPosition(property->value());
2347 244388 : VisitForAccumulatorValue(property->value());
2348 : FeedbackSlot slot = feedback_spec()->AddStoreOwnICSlot();
2349 244398 : if (FunctionLiteral::NeedsHomeObject(property->value())) {
2350 : RegisterAllocationScope register_scope(this);
2351 546 : Register value = register_allocator()->NewRegister();
2352 546 : builder()->StoreAccumulatorInRegister(value);
2353 : builder()->StoreNamedOwnProperty(
2354 546 : literal, key->AsRawPropertyName(), feedback_index(slot));
2355 546 : VisitSetHomeObject(value, literal, property);
2356 : } else {
2357 : builder()->StoreNamedOwnProperty(
2358 243844 : literal, key->AsRawPropertyName(), feedback_index(slot));
2359 : }
2360 : } else {
2361 : builder()->SetExpressionPosition(property->value());
2362 154 : VisitForEffect(property->value());
2363 : }
2364 : } else {
2365 554 : RegisterList args = register_allocator()->NewRegisterList(3);
2366 :
2367 554 : builder()->MoveRegister(literal, args[0]);
2368 : builder()->SetExpressionPosition(property->key());
2369 : VisitForRegisterValue(property->key(), args[1]);
2370 : builder()->SetExpressionPosition(property->value());
2371 : VisitForRegisterValue(property->value(), args[2]);
2372 554 : if (property->emit_store()) {
2373 522 : builder()->CallRuntime(Runtime::kSetKeyedProperty, args);
2374 522 : Register value = args[2];
2375 522 : VisitSetHomeObject(value, literal, property);
2376 : }
2377 : }
2378 : break;
2379 : }
2380 : case ObjectLiteral::Property::PROTOTYPE: {
2381 : // __proto__:null is handled by CreateObjectLiteral.
2382 4106 : if (property->IsNullPrototype()) break;
2383 : DCHECK(property->emit_store());
2384 : DCHECK(!property->NeedsSetFunctionName());
2385 1276 : RegisterList args = register_allocator()->NewRegisterList(2);
2386 1276 : builder()->MoveRegister(literal, args[0]);
2387 : builder()->SetExpressionPosition(property->value());
2388 : VisitForRegisterValue(property->value(), args[1]);
2389 1276 : builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2390 1276 : break;
2391 : }
2392 : case ObjectLiteral::Property::GETTER:
2393 3971 : if (property->emit_store()) {
2394 3868 : accessor_table.lookup(key)->second->getter = property;
2395 : }
2396 : break;
2397 : case ObjectLiteral::Property::SETTER:
2398 2012 : if (property->emit_store()) {
2399 1913 : accessor_table.lookup(key)->second->setter = property;
2400 : }
2401 : break;
2402 : }
2403 : }
2404 :
2405 : // Define accessors, using only a single call to the runtime for each pair of
2406 : // corresponding getters and setters.
2407 208449 : for (AccessorTable::Iterator it = accessor_table.begin();
2408 : it != accessor_table.end(); ++it) {
2409 : RegisterAllocationScope inner_register_scope(this);
2410 5060 : RegisterList args = register_allocator()->NewRegisterList(5);
2411 5059 : builder()->MoveRegister(literal, args[0]);
2412 5059 : VisitForRegisterValue(it->first, args[1]);
2413 5060 : VisitObjectLiteralAccessor(literal, it->second->getter, args[2]);
2414 5059 : VisitObjectLiteralAccessor(literal, it->second->setter, args[3]);
2415 : builder()
2416 5059 : ->LoadLiteral(Smi::FromInt(NONE))
2417 5060 : .StoreAccumulatorInRegister(args[4])
2418 5059 : .CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, args);
2419 : }
2420 :
2421 : // Object literals have two parts. The "static" part on the left contains no
2422 : // computed property names, and so we can compute its map ahead of time; see
2423 : // Runtime_CreateObjectLiteralBoilerplate. The second "dynamic" part starts
2424 : // with the first computed property name and continues with all properties to
2425 : // its right. All the code from above initializes the static component of the
2426 : // object literal, and arranges for the map of the result to reflect the
2427 : // static order in which the keys appear. For the dynamic properties, we
2428 : // compile them into a series of "SetOwnProperty" runtime calls. This will
2429 : // preserve insertion order.
2430 209285 : for (; property_index < expr->properties()->length(); property_index++) {
2431 2947 : ObjectLiteral::Property* property = expr->properties()->at(property_index);
2432 : RegisterAllocationScope inner_register_scope(this);
2433 :
2434 2947 : if (property->IsPrototype()) {
2435 : // __proto__:null is handled by CreateObjectLiteral.
2436 35 : if (property->IsNullPrototype()) continue;
2437 : DCHECK(property->emit_store());
2438 : DCHECK(!property->NeedsSetFunctionName());
2439 30 : RegisterList args = register_allocator()->NewRegisterList(2);
2440 30 : builder()->MoveRegister(literal, args[0]);
2441 : builder()->SetExpressionPosition(property->value());
2442 : VisitForRegisterValue(property->value(), args[1]);
2443 30 : builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2444 30 : continue;
2445 : }
2446 :
2447 2912 : switch (property->kind()) {
2448 : case ObjectLiteral::Property::CONSTANT:
2449 : case ObjectLiteral::Property::COMPUTED:
2450 : case ObjectLiteral::Property::MATERIALIZED_LITERAL: {
2451 2392 : Register key = register_allocator()->NewRegister();
2452 2392 : BuildLoadPropertyKey(property, key);
2453 : builder()->SetExpressionPosition(property->value());
2454 : Register value;
2455 :
2456 : // Static class fields require the name property to be set on
2457 : // the class, meaning we can't wait until the
2458 : // StoreDataPropertyInLiteral call later to set the name.
2459 2405 : if (property->value()->IsClassLiteral() &&
2460 12 : property->value()->AsClassLiteral()->static_fields_initializer() !=
2461 : nullptr) {
2462 6 : value = register_allocator()->NewRegister();
2463 12 : VisitClassLiteral(property->value()->AsClassLiteral(), key);
2464 6 : builder()->StoreAccumulatorInRegister(value);
2465 : } else {
2466 2387 : value = VisitForRegisterValue(property->value());
2467 : }
2468 2393 : VisitSetHomeObject(value, literal, property);
2469 :
2470 : DataPropertyInLiteralFlags data_property_flags =
2471 : DataPropertyInLiteralFlag::kNoFlags;
2472 2393 : if (property->NeedsSetFunctionName()) {
2473 : data_property_flags |= DataPropertyInLiteralFlag::kSetFunctionName;
2474 : }
2475 :
2476 : FeedbackSlot slot =
2477 : feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
2478 : builder()
2479 2393 : ->LoadAccumulatorWithRegister(value)
2480 : .StoreDataPropertyInLiteral(literal, key, data_property_flags,
2481 2393 : feedback_index(slot));
2482 : break;
2483 : }
2484 : case ObjectLiteral::Property::GETTER:
2485 : case ObjectLiteral::Property::SETTER: {
2486 424 : RegisterList args = register_allocator()->NewRegisterList(4);
2487 424 : builder()->MoveRegister(literal, args[0]);
2488 424 : BuildLoadPropertyKey(property, args[1]);
2489 : builder()->SetExpressionPosition(property->value());
2490 : VisitForRegisterValue(property->value(), args[2]);
2491 424 : VisitSetHomeObject(args[2], literal, property);
2492 : builder()
2493 424 : ->LoadLiteral(Smi::FromInt(NONE))
2494 424 : .StoreAccumulatorInRegister(args[3]);
2495 : Runtime::FunctionId function_id =
2496 : property->kind() == ObjectLiteral::Property::GETTER
2497 : ? Runtime::kDefineGetterPropertyUnchecked
2498 424 : : Runtime::kDefineSetterPropertyUnchecked;
2499 424 : builder()->CallRuntime(function_id, args);
2500 : break;
2501 : }
2502 : case ObjectLiteral::Property::SPREAD: {
2503 96 : RegisterList args = register_allocator()->NewRegisterList(2);
2504 96 : builder()->MoveRegister(literal, args[0]);
2505 : builder()->SetExpressionPosition(property->value());
2506 : VisitForRegisterValue(property->value(), args[1]);
2507 96 : builder()->CallRuntime(Runtime::kCopyDataProperties, args);
2508 : break;
2509 : }
2510 : case ObjectLiteral::Property::PROTOTYPE:
2511 0 : UNREACHABLE(); // Handled specially above.
2512 : break;
2513 : }
2514 : }
2515 :
2516 203390 : builder()->LoadAccumulatorWithRegister(literal);
2517 : }
2518 :
2519 : // Fill an array with values from an iterator, starting at a given index. It is
2520 : // guaranteed that the loop will only terminate if the iterator is exhausted, or
2521 : // if one of iterator.next(), value.done, or value.value fail.
2522 : //
2523 : // In pseudocode:
2524 : //
2525 : // loop {
2526 : // value = iterator.next()
2527 : // if (value.done) break;
2528 : // value = value.value
2529 : // array[index++] = value
2530 : // }
2531 2071 : void BytecodeGenerator::BuildFillArrayWithIterator(
2532 : IteratorRecord iterator, Register array, Register index, Register value,
2533 : FeedbackSlot next_value_slot, FeedbackSlot next_done_slot,
2534 : FeedbackSlot index_slot, FeedbackSlot element_slot) {
2535 : DCHECK(array.is_valid());
2536 : DCHECK(index.is_valid());
2537 : DCHECK(value.is_valid());
2538 :
2539 2072 : LoopBuilder loop_builder(builder(), nullptr, nullptr);
2540 2071 : loop_builder.LoopHeader();
2541 :
2542 : // Call the iterator's .next() method. Break from the loop if the `done`
2543 : // property is truthy, otherwise load the value from the iterator result and
2544 : // append the argument.
2545 2072 : BuildIteratorNext(iterator, value);
2546 : builder()->LoadNamedProperty(
2547 : value, ast_string_constants()->done_string(),
2548 2072 : feedback_index(feedback_spec()->AddLoadICSlot()));
2549 : loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
2550 :
2551 2070 : loop_builder.LoopBody();
2552 : builder()
2553 : // value = value.value
2554 : ->LoadNamedProperty(value, ast_string_constants()->value_string(),
2555 2070 : feedback_index(next_value_slot))
2556 : // array[index] = value
2557 2072 : .StoreInArrayLiteral(array, index, feedback_index(element_slot))
2558 : // index++
2559 2072 : .LoadAccumulatorWithRegister(index)
2560 2072 : .UnaryOperation(Token::INC, feedback_index(index_slot))
2561 2071 : .StoreAccumulatorInRegister(index);
2562 2072 : loop_builder.BindContinueTarget();
2563 2072 : loop_builder.JumpToHeader(loop_depth_);
2564 2072 : }
2565 :
2566 385208 : void BytecodeGenerator::BuildCreateArrayLiteral(
2567 : const ZonePtrList<Expression>* elements, ArrayLiteral* expr) {
2568 : RegisterAllocationScope register_scope(this);
2569 385208 : Register index = register_allocator()->NewRegister();
2570 385215 : Register array = register_allocator()->NewRegister();
2571 : SharedFeedbackSlot element_slot(feedback_spec(),
2572 : FeedbackSlotKind::kStoreInArrayLiteral);
2573 : ZonePtrList<Expression>::iterator current = elements->begin();
2574 : ZonePtrList<Expression>::iterator end = elements->end();
2575 : bool is_empty = elements->is_empty();
2576 :
2577 570394 : if (!is_empty && (*current)->IsSpread()) {
2578 : // If we have a leading spread, use CreateArrayFromIterable to create
2579 : // an array from it and then add the remaining components to that array.
2580 2068 : VisitForAccumulatorValue(*current);
2581 2071 : builder()->CreateArrayFromIterable().StoreAccumulatorInRegister(array);
2582 :
2583 2071 : if (++current != end) {
2584 : // If there are remaning elements, prepare the index register that is
2585 : // used for adding those elements. The next index is the length of the
2586 : // newly created array.
2587 : auto length = ast_string_constants()->length_string();
2588 : int length_load_slot = feedback_index(feedback_spec()->AddLoadICSlot());
2589 : builder()
2590 568 : ->LoadNamedProperty(array, length, length_load_slot)
2591 568 : .StoreAccumulatorInRegister(index);
2592 : }
2593 383141 : } else if (expr != nullptr) {
2594 : // There are some elements before the first (if any) spread, and we can
2595 : // use a boilerplate when creating the initial array from those elements.
2596 :
2597 : // First, allocate a constant pool entry for the boilerplate that will
2598 : // be created during finalization, and will contain all the constant
2599 : // elements before the first spread. This also handle the empty array case
2600 : // and one-shot optimization.
2601 383120 : uint8_t flags = CreateArrayLiteralFlags::Encode(
2602 766241 : expr->IsFastCloningSupported(), expr->ComputeFlags());
2603 : bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
2604 : size_t entry;
2605 383122 : if (is_empty && optimize_as_one_shot) {
2606 60647 : entry = builder()->EmptyArrayBoilerplateDescriptionConstantPoolEntry();
2607 322475 : } else if (!is_empty) {
2608 183093 : entry = builder()->AllocateDeferredConstantPoolEntry();
2609 366191 : array_literals_.push_back(std::make_pair(expr, entry));
2610 : }
2611 :
2612 383116 : if (optimize_as_one_shot) {
2613 213234 : RegisterList args = register_allocator()->NewRegisterList(2);
2614 : builder()
2615 213235 : ->LoadConstantPoolEntry(entry)
2616 213240 : .StoreAccumulatorInRegister(args[0])
2617 213239 : .LoadLiteral(Smi::FromInt(flags))
2618 213238 : .StoreAccumulatorInRegister(args[1])
2619 213239 : .CallRuntime(Runtime::kCreateArrayLiteralWithoutAllocationSite, args);
2620 169882 : } else if (is_empty) {
2621 : // Empty array literal fast-path.
2622 : int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2623 : DCHECK(expr->IsFastCloningSupported());
2624 139383 : builder()->CreateEmptyArrayLiteral(literal_index);
2625 : } else {
2626 : // Create array literal from boilerplate.
2627 : int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2628 61000 : builder()->CreateArrayLiteral(entry, literal_index, flags);
2629 : }
2630 383123 : builder()->StoreAccumulatorInRegister(array);
2631 :
2632 : // Insert the missing non-constant elements, up until the first spread
2633 : // index, into the initial array (the remaining elements will be inserted
2634 : // below).
2635 : DCHECK_EQ(current, elements->begin());
2636 : ZonePtrList<Expression>::iterator first_spread_or_end =
2637 431 : expr->first_spread_index() >= 0 ? current + expr->first_spread_index()
2638 383558 : : end;
2639 : int array_index = 0;
2640 13011389 : for (; current != first_spread_or_end; ++current, array_index++) {
2641 6314134 : Expression* subexpr = *current;
2642 : DCHECK(!subexpr->IsSpread());
2643 : // Skip the constants.
2644 6314134 : if (subexpr->IsCompileTimeValue()) continue;
2645 :
2646 : builder()
2647 258844 : ->LoadLiteral(Smi::FromInt(array_index))
2648 258847 : .StoreAccumulatorInRegister(index);
2649 258837 : VisitForAccumulatorValue(subexpr);
2650 : builder()->StoreInArrayLiteral(array, index,
2651 258840 : feedback_index(element_slot.Get()));
2652 : }
2653 :
2654 383124 : if (current != end) {
2655 : // If there are remaining elements, prepare the index register
2656 : // to store the next element, which comes from the first spread.
2657 431 : builder()->LoadLiteral(array_index).StoreAccumulatorInRegister(index);
2658 : }
2659 : } else {
2660 : // In other cases, we prepare an empty array to be filled in below.
2661 : DCHECK(!elements->is_empty());
2662 : int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2663 : builder()
2664 20 : ->CreateEmptyArrayLiteral(literal_index)
2665 20 : .StoreAccumulatorInRegister(array);
2666 : // Prepare the index for the first element.
2667 20 : builder()->LoadLiteral(Smi::FromInt(0)).StoreAccumulatorInRegister(index);
2668 : }
2669 :
2670 : // Now build insertions for the remaining elements from current to end.
2671 : SharedFeedbackSlot index_slot(feedback_spec(), FeedbackSlotKind::kBinaryOp);
2672 : SharedFeedbackSlot length_slot(
2673 : feedback_spec(), feedback_spec()->GetStoreICSlot(LanguageMode::kStrict));
2674 391387 : for (; current != end; ++current) {
2675 3087 : Expression* subexpr = *current;
2676 3087 : if (subexpr->IsSpread()) {
2677 : RegisterAllocationScope scope(this);
2678 1489 : builder()->SetExpressionAsStatementPosition(
2679 : subexpr->AsSpread()->expression());
2680 1489 : VisitForAccumulatorValue(subexpr->AsSpread()->expression());
2681 1489 : IteratorRecord iterator = BuildGetIteratorRecord(IteratorType::kNormal);
2682 :
2683 1489 : Register value = register_allocator()->NewRegister();
2684 1489 : FeedbackSlot next_value_load_slot = feedback_spec()->AddLoadICSlot();
2685 1489 : FeedbackSlot next_done_load_slot = feedback_spec()->AddLoadICSlot();
2686 1490 : FeedbackSlot real_index_slot = index_slot.Get();
2687 1490 : FeedbackSlot real_element_slot = element_slot.Get();
2688 : BuildFillArrayWithIterator(iterator, array, index, value,
2689 : next_value_load_slot, next_done_load_slot,
2690 1490 : real_index_slot, real_element_slot);
2691 1598 : } else if (!subexpr->IsTheHoleLiteral()) {
2692 : // literal[index++] = subexpr
2693 1542 : VisitForAccumulatorValue(subexpr);
2694 : builder()
2695 : ->StoreInArrayLiteral(array, index,
2696 1542 : feedback_index(element_slot.Get()))
2697 1542 : .LoadAccumulatorWithRegister(index);
2698 : // Only increase the index if we are not the last element.
2699 1542 : if (current + 1 != end) {
2700 : builder()
2701 981 : ->UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
2702 981 : .StoreAccumulatorInRegister(index);
2703 : }
2704 : } else {
2705 : // literal.length = ++index
2706 : // length_slot is only used when there are holes.
2707 : auto length = ast_string_constants()->length_string();
2708 : builder()
2709 56 : ->LoadAccumulatorWithRegister(index)
2710 56 : .UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
2711 56 : .StoreAccumulatorInRegister(index)
2712 : .StoreNamedProperty(array, length, feedback_index(length_slot.Get()),
2713 56 : LanguageMode::kStrict);
2714 : }
2715 : }
2716 :
2717 385213 : builder()->LoadAccumulatorWithRegister(array);
2718 385217 : }
2719 :
2720 385178 : void BytecodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
2721 385178 : expr->InitDepthAndFlags();
2722 385184 : BuildCreateArrayLiteral(expr->values(), expr);
2723 385183 : }
2724 :
2725 0 : void BytecodeGenerator::VisitStoreInArrayLiteral(StoreInArrayLiteral* expr) {
2726 : builder()->SetExpressionAsStatementPosition(expr);
2727 : RegisterAllocationScope register_scope(this);
2728 0 : Register array = register_allocator()->NewRegister();
2729 0 : Register index = register_allocator()->NewRegister();
2730 : VisitForRegisterValue(expr->array(), array);
2731 : VisitForRegisterValue(expr->index(), index);
2732 0 : VisitForAccumulatorValue(expr->value());
2733 : builder()->StoreInArrayLiteral(
2734 : array, index,
2735 0 : feedback_index(feedback_spec()->AddStoreInArrayLiteralICSlot()));
2736 0 : }
2737 :
2738 6875357 : void BytecodeGenerator::VisitVariableProxy(VariableProxy* proxy) {
2739 : builder()->SetExpressionPosition(proxy);
2740 6875357 : BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
2741 6875463 : }
2742 :
2743 13633516 : void BytecodeGenerator::BuildVariableLoad(Variable* variable,
2744 : HoleCheckMode hole_check_mode,
2745 : TypeofMode typeof_mode) {
2746 13633516 : switch (variable->location()) {
2747 : case VariableLocation::LOCAL: {
2748 2470059 : Register source(builder()->Local(variable->index()));
2749 : // We need to load the variable into the accumulator, even when in a
2750 : // VisitForRegisterScope, in order to avoid register aliasing if
2751 : // subsequent expressions assign to the same variable.
2752 2470066 : builder()->LoadAccumulatorWithRegister(source);
2753 2470089 : if (hole_check_mode == HoleCheckMode::kRequired) {
2754 2637 : BuildThrowIfHole(variable);
2755 : }
2756 : break;
2757 : }
2758 : case VariableLocation::PARAMETER: {
2759 : Register source;
2760 3828834 : if (variable->IsReceiver()) {
2761 2415057 : source = builder()->Receiver();
2762 : } else {
2763 1413777 : source = builder()->Parameter(variable->index());
2764 : }
2765 : // We need to load the variable into the accumulator, even when in a
2766 : // VisitForRegisterScope, in order to avoid register aliasing if
2767 : // subsequent expressions assign to the same variable.
2768 3828839 : builder()->LoadAccumulatorWithRegister(source);
2769 3828847 : if (hole_check_mode == HoleCheckMode::kRequired) {
2770 2171667 : BuildThrowIfHole(variable);
2771 : }
2772 : break;
2773 : }
2774 : case VariableLocation::UNALLOCATED: {
2775 : // The global identifier "undefined" is immutable. Everything
2776 : // else could be reassigned. For performance, we do a pointer comparison
2777 : // rather than checking if the raw_name is really "undefined".
2778 5900937 : if (variable->raw_name() == ast_string_constants()->undefined_string()) {
2779 80704 : builder()->LoadUndefined();
2780 : } else {
2781 5820233 : FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
2782 : builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
2783 5820377 : typeof_mode);
2784 : }
2785 : break;
2786 : }
2787 : case VariableLocation::CONTEXT: {
2788 : int depth = execution_context()->ContextChainDepth(variable->scope());
2789 : ContextScope* context = execution_context()->Previous(depth);
2790 : Register context_reg;
2791 1047417 : if (context) {
2792 967844 : context_reg = context->reg();
2793 : depth = 0;
2794 : } else {
2795 79573 : context_reg = execution_context()->reg();
2796 : }
2797 :
2798 : BytecodeArrayBuilder::ContextSlotMutability immutable =
2799 : (variable->maybe_assigned() == kNotAssigned)
2800 : ? BytecodeArrayBuilder::kImmutableSlot
2801 1047417 : : BytecodeArrayBuilder::kMutableSlot;
2802 :
2803 : builder()->LoadContextSlot(context_reg, variable->index(), depth,
2804 1047417 : immutable);
2805 1047427 : if (hole_check_mode == HoleCheckMode::kRequired) {
2806 268488 : BuildThrowIfHole(variable);
2807 : }
2808 : break;
2809 : }
2810 : case VariableLocation::LOOKUP: {
2811 385034 : switch (variable->mode()) {
2812 : case VariableMode::kDynamicLocal: {
2813 : Variable* local_variable = variable->local_if_not_shadowed();
2814 : int depth =
2815 : execution_context()->ContextChainDepth(local_variable->scope());
2816 : builder()->LoadLookupContextSlot(variable->raw_name(), typeof_mode,
2817 3456 : local_variable->index(), depth);
2818 3456 : if (hole_check_mode == HoleCheckMode::kRequired) {
2819 1219 : BuildThrowIfHole(variable);
2820 : }
2821 : break;
2822 : }
2823 : case VariableMode::kDynamicGlobal: {
2824 : int depth =
2825 355677 : current_scope()->ContextChainLengthUntilOutermostSloppyEval();
2826 355677 : FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
2827 : builder()->LoadLookupGlobalSlot(variable->raw_name(), typeof_mode,
2828 355677 : feedback_index(slot), depth);
2829 : break;
2830 : }
2831 : default:
2832 25901 : builder()->LoadLookupSlot(variable->raw_name(), typeof_mode);
2833 : }
2834 : break;
2835 : }
2836 : case VariableLocation::MODULE: {
2837 : int depth = execution_context()->ContextChainDepth(variable->scope());
2838 1360 : builder()->LoadModuleVariable(variable->index(), depth);
2839 1360 : if (hole_check_mode == HoleCheckMode::kRequired) {
2840 848 : BuildThrowIfHole(variable);
2841 : }
2842 : break;
2843 : }
2844 : }
2845 13633634 : }
2846 :
2847 4201319 : void BytecodeGenerator::BuildVariableLoadForAccumulatorValue(
2848 : Variable* variable, HoleCheckMode hole_check_mode, TypeofMode typeof_mode) {
2849 : ValueResultScope accumulator_result(this);
2850 4201319 : BuildVariableLoad(variable, hole_check_mode, typeof_mode);
2851 4201332 : }
2852 :
2853 2464712 : void BytecodeGenerator::BuildReturn(int source_position) {
2854 2464712 : if (FLAG_trace) {
2855 : RegisterAllocationScope register_scope(this);
2856 0 : Register result = register_allocator()->NewRegister();
2857 : // Runtime returns {result} value, preserving accumulator.
2858 0 : builder()->StoreAccumulatorInRegister(result).CallRuntime(
2859 0 : Runtime::kTraceExit, result);
2860 : }
2861 2464712 : if (info()->collect_type_profile()) {
2862 136 : builder()->CollectTypeProfile(info()->literal()->return_position());
2863 : }
2864 2464712 : builder()->SetReturnPosition(source_position, info()->literal());
2865 2464750 : builder()->Return();
2866 2464780 : }
2867 :
2868 7092 : void BytecodeGenerator::BuildAsyncReturn(int source_position) {
2869 : RegisterAllocationScope register_scope(this);
2870 :
2871 14184 : if (IsAsyncGeneratorFunction(info()->literal()->kind())) {
2872 979 : RegisterList args = register_allocator()->NewRegisterList(3);
2873 : builder()
2874 979 : ->MoveRegister(generator_object(), args[0]) // generator
2875 979 : .StoreAccumulatorInRegister(args[1]) // value
2876 979 : .LoadTrue()
2877 979 : .StoreAccumulatorInRegister(args[2]) // done
2878 979 : .CallRuntime(Runtime::kInlineAsyncGeneratorResolve, args);
2879 : } else {
2880 : DCHECK(IsAsyncFunction(info()->literal()->kind()));
2881 6113 : RegisterList args = register_allocator()->NewRegisterList(3);
2882 : builder()
2883 6113 : ->MoveRegister(generator_object(), args[0]) // generator
2884 6113 : .StoreAccumulatorInRegister(args[1]) // value
2885 12226 : .LoadBoolean(info()->literal()->CanSuspend())
2886 6113 : .StoreAccumulatorInRegister(args[2]) // can_suspend
2887 6113 : .CallRuntime(Runtime::kInlineAsyncFunctionResolve, args);
2888 : }
2889 :
2890 7092 : BuildReturn(source_position);
2891 7092 : }
2892 :
2893 35248 : void BytecodeGenerator::BuildReThrow() { builder()->ReThrow(); }
2894 :
2895 2481510 : void BytecodeGenerator::BuildThrowIfHole(Variable* variable) {
2896 2481510 : if (variable->is_this()) {
2897 : DCHECK(variable->mode() == VariableMode::kConst);
2898 2171853 : builder()->ThrowSuperNotCalledIfHole();
2899 : } else {
2900 309657 : builder()->ThrowReferenceErrorIfHole(variable->raw_name());
2901 : }
2902 2481516 : }
2903 :
2904 39213 : void BytecodeGenerator::BuildHoleCheckForVariableAssignment(Variable* variable,
2905 : Token::Value op) {
2906 41770 : if (variable->is_this() && variable->mode() == VariableMode::kConst &&
2907 : op == Token::INIT) {
2908 : // Perform an initialization check for 'this'. 'this' variable is the
2909 : // only variable able to trigger bind operations outside the TDZ
2910 : // via 'super' calls.
2911 2557 : builder()->ThrowSuperAlreadyCalledIfNotHole();
2912 : } else {
2913 : // Perform an initialization check for let/const declared variables.
2914 : // E.g. let x = (x = 20); is not allowed.
2915 : DCHECK(IsLexicalVariableMode(variable->mode()));
2916 36656 : BuildThrowIfHole(variable);
2917 : }
2918 39213 : }
2919 :
2920 5739472 : void BytecodeGenerator::BuildVariableAssignment(
2921 : Variable* variable, Token::Value op, HoleCheckMode hole_check_mode,
2922 : LookupHoistingMode lookup_hoisting_mode) {
2923 : VariableMode mode = variable->mode();
2924 : RegisterAllocationScope assignment_register_scope(this);
2925 : BytecodeLabel end_label;
2926 5739472 : switch (variable->location()) {
2927 : case VariableLocation::PARAMETER:
2928 : case VariableLocation::LOCAL: {
2929 : Register destination;
2930 2444521 : if (VariableLocation::PARAMETER == variable->location()) {
2931 27239 : if (variable->IsReceiver()) {
2932 2401 : destination = builder()->Receiver();
2933 : } else {
2934 24838 : destination = builder()->Parameter(variable->index());
2935 : }
2936 : } else {
2937 2417282 : destination = builder()->Local(variable->index());
2938 : }
2939 :
2940 2444514 : if (hole_check_mode == HoleCheckMode::kRequired) {
2941 : // Load destination to check for hole.
2942 4973 : Register value_temp = register_allocator()->NewRegister();
2943 : builder()
2944 4973 : ->StoreAccumulatorInRegister(value_temp)
2945 4973 : .LoadAccumulatorWithRegister(destination);
2946 :
2947 4973 : BuildHoleCheckForVariableAssignment(variable, op);
2948 4973 : builder()->LoadAccumulatorWithRegister(value_temp);
2949 : }
2950 :
2951 2444514 : if (mode != VariableMode::kConst || op == Token::INIT) {
2952 2439657 : builder()->StoreAccumulatorInRegister(destination);
2953 4857 : } else if (variable->throw_on_const_assignment(language_mode())) {
2954 4824 : builder()->CallRuntime(Runtime::kThrowConstAssignError);
2955 : }
2956 : break;
2957 : }
2958 : case VariableLocation::UNALLOCATED: {
2959 1465717 : FeedbackSlot slot = GetCachedStoreGlobalICSlot(language_mode(), variable);
2960 1465737 : builder()->StoreGlobal(variable->raw_name(), feedback_index(slot));
2961 : break;
2962 : }
2963 : case VariableLocation::CONTEXT: {
2964 : int depth = execution_context()->ContextChainDepth(variable->scope());
2965 : ContextScope* context = execution_context()->Previous(depth);
2966 : Register context_reg;
2967 :
2968 1756980 : if (context) {
2969 1744896 : context_reg = context->reg();
2970 : depth = 0;
2971 : } else {
2972 12084 : context_reg = execution_context()->reg();
2973 : }
2974 :
2975 1756980 : if (hole_check_mode == HoleCheckMode::kRequired) {
2976 : // Load destination to check for hole.
2977 34171 : Register value_temp = register_allocator()->NewRegister();
2978 : builder()
2979 34171 : ->StoreAccumulatorInRegister(value_temp)
2980 : .LoadContextSlot(context_reg, variable->index(), depth,
2981 34171 : BytecodeArrayBuilder::kMutableSlot);
2982 :
2983 34171 : BuildHoleCheckForVariableAssignment(variable, op);
2984 34171 : builder()->LoadAccumulatorWithRegister(value_temp);
2985 : }
2986 :
2987 1756980 : if (mode != VariableMode::kConst || op == Token::INIT) {
2988 1728901 : builder()->StoreContextSlot(context_reg, variable->index(), depth);
2989 28079 : } else if (variable->throw_on_const_assignment(language_mode())) {
2990 28057 : builder()->CallRuntime(Runtime::kThrowConstAssignError);
2991 : }
2992 : break;
2993 : }
2994 : case VariableLocation::LOOKUP: {
2995 : builder()->StoreLookupSlot(variable->raw_name(), language_mode(),
2996 37319 : lookup_hoisting_mode);
2997 37319 : break;
2998 : }
2999 : case VariableLocation::MODULE: {
3000 : DCHECK(IsDeclaredVariableMode(mode));
3001 :
3002 34967 : if (mode == VariableMode::kConst && op != Token::INIT) {
3003 110 : builder()->CallRuntime(Runtime::kThrowConstAssignError);
3004 110 : break;
3005 : }
3006 :
3007 : // If we don't throw above, we know that we're dealing with an
3008 : // export because imports are const and we do not generate initializing
3009 : // assignments for them.
3010 : DCHECK(variable->IsExport());
3011 :
3012 : int depth = execution_context()->ContextChainDepth(variable->scope());
3013 34857 : if (hole_check_mode == HoleCheckMode::kRequired) {
3014 69 : Register value_temp = register_allocator()->NewRegister();
3015 : builder()
3016 69 : ->StoreAccumulatorInRegister(value_temp)
3017 69 : .LoadModuleVariable(variable->index(), depth);
3018 69 : BuildHoleCheckForVariableAssignment(variable, op);
3019 69 : builder()->LoadAccumulatorWithRegister(value_temp);
3020 : }
3021 34857 : builder()->StoreModuleVariable(variable->index(), depth);
3022 34857 : break;
3023 : }
3024 : }
3025 5739528 : }
3026 :
3027 2446286 : void BytecodeGenerator::BuildLoadNamedProperty(const Expression* object_expr,
3028 : Register object,
3029 : const AstRawString* name) {
3030 2446286 : if (ShouldOptimizeAsOneShot()) {
3031 1280067 : builder()->LoadNamedPropertyNoFeedback(object, name);
3032 : } else {
3033 1166219 : FeedbackSlot slot = GetCachedLoadICSlot(object_expr, name);
3034 1166213 : builder()->LoadNamedProperty(object, name, feedback_index(slot));
3035 : }
3036 2446342 : }
3037 :
3038 2400205 : void BytecodeGenerator::BuildStoreNamedProperty(const Expression* object_expr,
3039 : Register object,
3040 : const AstRawString* name) {
3041 : Register value;
3042 2400205 : if (!execution_result()->IsEffect()) {
3043 8443 : value = register_allocator()->NewRegister();
3044 8443 : builder()->StoreAccumulatorInRegister(value);
3045 : }
3046 :
3047 2400212 : if (ShouldOptimizeAsOneShot()) {
3048 115041 : builder()->StoreNamedPropertyNoFeedback(object, name, language_mode());
3049 : } else {
3050 2285171 : FeedbackSlot slot = GetCachedStoreICSlot(object_expr, name);
3051 : builder()->StoreNamedProperty(object, name, feedback_index(slot),
3052 2285174 : language_mode());
3053 : }
3054 :
3055 2400223 : if (!execution_result()->IsEffect()) {
3056 8443 : builder()->LoadAccumulatorWithRegister(value);
3057 : }
3058 2400223 : }
3059 :
3060 : // static
3061 : BytecodeGenerator::AssignmentLhsData
3062 0 : BytecodeGenerator::AssignmentLhsData::NonProperty(Expression* expr) {
3063 : return AssignmentLhsData(NON_PROPERTY, expr, RegisterList(), Register(),
3064 0 : Register(), nullptr, nullptr);
3065 : }
3066 : // static
3067 : BytecodeGenerator::AssignmentLhsData
3068 0 : BytecodeGenerator::AssignmentLhsData::NamedProperty(Expression* object_expr,
3069 : Register object,
3070 : const AstRawString* name) {
3071 : return AssignmentLhsData(NAMED_PROPERTY, nullptr, RegisterList(), object,
3072 0 : Register(), object_expr, name);
3073 : }
3074 : // static
3075 : BytecodeGenerator::AssignmentLhsData
3076 0 : BytecodeGenerator::AssignmentLhsData::KeyedProperty(Register object,
3077 : Register key) {
3078 : return AssignmentLhsData(KEYED_PROPERTY, nullptr, RegisterList(), object, key,
3079 0 : nullptr, nullptr);
3080 : }
3081 : // static
3082 : BytecodeGenerator::AssignmentLhsData
3083 0 : BytecodeGenerator::AssignmentLhsData::NamedSuperProperty(
3084 : RegisterList super_property_args) {
3085 : return AssignmentLhsData(NAMED_SUPER_PROPERTY, nullptr, super_property_args,
3086 0 : Register(), Register(), nullptr, nullptr);
3087 : }
3088 : // static
3089 : BytecodeGenerator::AssignmentLhsData
3090 0 : BytecodeGenerator::AssignmentLhsData::KeyedSuperProperty(
3091 : RegisterList super_property_args) {
3092 : return AssignmentLhsData(KEYED_SUPER_PROPERTY, nullptr, super_property_args,
3093 0 : Register(), Register(), nullptr, nullptr);
3094 : }
3095 :
3096 7637855 : BytecodeGenerator::AssignmentLhsData BytecodeGenerator::PrepareAssignmentLhs(
3097 : Expression* lhs, AccumulatorPreservingMode accumulator_preserving_mode) {
3098 : // Left-hand side can only be a property, a global or a variable slot.
3099 7637855 : Property* property = lhs->AsProperty();
3100 7637855 : AssignType assign_type = Property::GetAssignType(property);
3101 :
3102 : // Evaluate LHS expression.
3103 7637865 : switch (assign_type) {
3104 : case NON_PROPERTY:
3105 : return AssignmentLhsData::NonProperty(lhs);
3106 : case NAMED_PROPERTY: {
3107 2400220 : AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3108 2400220 : Register object = VisitForRegisterValue(property->obj());
3109 : const AstRawString* name =
3110 2400206 : property->key()->AsLiteral()->AsRawPropertyName();
3111 : return AssignmentLhsData::NamedProperty(property->obj(), object, name);
3112 : }
3113 : case KEYED_PROPERTY: {
3114 33801 : AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3115 33801 : Register object = VisitForRegisterValue(property->obj());
3116 33802 : Register key = VisitForRegisterValue(property->key());
3117 : return AssignmentLhsData::KeyedProperty(object, key);
3118 : }
3119 : case NAMED_SUPER_PROPERTY: {
3120 220 : AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3121 : RegisterList super_property_args =
3122 220 : register_allocator()->NewRegisterList(4);
3123 : SuperPropertyReference* super_property =
3124 220 : property->obj()->AsSuperPropertyReference();
3125 220 : BuildThisVariableLoad();
3126 220 : builder()->StoreAccumulatorInRegister(super_property_args[0]);
3127 : VisitForRegisterValue(super_property->home_object(),
3128 : super_property_args[1]);
3129 : builder()
3130 440 : ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
3131 220 : .StoreAccumulatorInRegister(super_property_args[2]);
3132 : return AssignmentLhsData::NamedSuperProperty(super_property_args);
3133 : }
3134 : case KEYED_SUPER_PROPERTY: {
3135 340 : AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3136 : RegisterList super_property_args =
3137 340 : register_allocator()->NewRegisterList(4);
3138 : SuperPropertyReference* super_property =
3139 340 : property->obj()->AsSuperPropertyReference();
3140 340 : BuildThisVariableLoad();
3141 340 : builder()->StoreAccumulatorInRegister(super_property_args[0]);
3142 : VisitForRegisterValue(super_property->home_object(),
3143 : super_property_args[1]);
3144 : VisitForRegisterValue(property->key(), super_property_args[2]);
3145 : return AssignmentLhsData::KeyedSuperProperty(super_property_args);
3146 : }
3147 : }
3148 0 : UNREACHABLE();
3149 : }
3150 :
3151 : // Build the iteration finalizer called in the finally block of an iteration
3152 : // protocol execution. This closes the iterator if needed, and suppresses any
3153 : // exception it throws if necessary.
3154 : //
3155 : // In pseudo-code, this builds:
3156 : //
3157 : // if (!done) {
3158 : // let method = iterator.return
3159 : // if (method !== null && method !== undefined) {
3160 : // if (typeof(method) !== "function") throw TypeError
3161 : // try {
3162 : // let return_val = method.call(iterator)
3163 : // if (!%IsObject(return_val)) throw TypeError
3164 : // } catch (e) {
3165 : // if (iteration_continuation != RETHROW)
3166 : // rethrow e
3167 : // }
3168 : // }
3169 : // }
3170 : //
3171 : // For async iterators, iterator.close() becomes await iterator.close().
3172 38288 : void BytecodeGenerator::BuildFinalizeIteration(
3173 : IteratorRecord iterator, Register done,
3174 : Register iteration_continuation_token) {
3175 : RegisterAllocationScope register_scope(this);
3176 : BytecodeLabels iterator_is_done(zone());
3177 :
3178 : // if (!done) {
3179 38291 : builder()->LoadAccumulatorWithRegister(done).JumpIfTrue(
3180 76586 : ToBooleanMode::kConvertToBoolean, iterator_is_done.New());
3181 :
3182 : // method = iterator.return
3183 : // if (method !== null && method !== undefined) {
3184 38296 : Register method = register_allocator()->NewRegister();
3185 : builder()
3186 : ->LoadNamedProperty(iterator.object(),
3187 : ast_string_constants()->return_string(),
3188 38298 : feedback_index(feedback_spec()->AddLoadICSlot()))
3189 38293 : .StoreAccumulatorInRegister(method)
3190 76591 : .JumpIfUndefined(iterator_is_done.New())
3191 76589 : .JumpIfNull(iterator_is_done.New());
3192 :
3193 : // if (typeof(method) !== "function") throw TypeError
3194 : BytecodeLabel if_callable;
3195 : builder()
3196 38297 : ->CompareTypeOf(TestTypeOfFlags::LiteralFlag::kFunction)
3197 38296 : .JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &if_callable);
3198 : {
3199 : // throw %NewTypeError(kReturnMethodNotCallable)
3200 : RegisterAllocationScope register_scope(this);
3201 38297 : RegisterList new_type_error_args = register_allocator()->NewRegisterList(2);
3202 : builder()
3203 38295 : ->LoadLiteral(Smi::FromEnum(MessageTemplate::kReturnMethodNotCallable))
3204 38297 : .StoreAccumulatorInRegister(new_type_error_args[0])
3205 38290 : .LoadLiteral(ast_string_constants()->empty_string())
3206 38297 : .StoreAccumulatorInRegister(new_type_error_args[1])
3207 38296 : .CallRuntime(Runtime::kNewTypeError, new_type_error_args)
3208 38296 : .Throw();
3209 : }
3210 38294 : builder()->Bind(&if_callable);
3211 :
3212 : {
3213 : RegisterAllocationScope register_scope(this);
3214 : BuildTryCatch(
3215 : // try {
3216 : // let return_val = method.call(iterator)
3217 : // if (!%IsObject(return_val)) throw TypeError
3218 : // }
3219 38288 : [&]() {
3220 76587 : RegisterList args(iterator.object());
3221 191806 : builder()->CallProperty(
3222 76582 : method, args, feedback_index(feedback_spec()->AddCallICSlot()));
3223 38299 : if (iterator.type() == IteratorType::kAsync) {
3224 341 : BuildAwait();
3225 : }
3226 76594 : builder()->JumpIfJSReceiver(iterator_is_done.New());
3227 : {
3228 : // Throw this exception inside the try block so that it is
3229 : // suppressed by the iteration continuation if necessary.
3230 : RegisterAllocationScope register_scope(this);
3231 38296 : Register return_result = register_allocator()->NewRegister();
3232 : builder()
3233 38295 : ->StoreAccumulatorInRegister(return_result)
3234 : .CallRuntime(Runtime::kThrowIteratorResultNotAnObject,
3235 38296 : return_result);
3236 : }
3237 38295 : },
3238 :
3239 : // catch (e) {
3240 : // if (iteration_continuation != RETHROW)
3241 : // rethrow e
3242 : // }
3243 38295 : [&](Register context) {
3244 : // Reuse context register to store the exception.
3245 38295 : Register close_exception = context;
3246 114886 : builder()->StoreAccumulatorInRegister(close_exception);
3247 :
3248 : BytecodeLabel suppress_close_exception;
3249 : builder()
3250 : ->LoadLiteral(
3251 38296 : Smi::FromInt(ControlScope::DeferredCommands::kRethrowToken))
3252 76592 : .CompareReference(iteration_continuation_token)
3253 : .JumpIfTrue(ToBooleanMode::kAlreadyBoolean,
3254 38295 : &suppress_close_exception)
3255 38295 : .LoadAccumulatorWithRegister(close_exception)
3256 38295 : .ReThrow()
3257 38297 : .Bind(&suppress_close_exception);
3258 38295 : },
3259 38295 : HandlerTable::UNCAUGHT);
3260 : }
3261 :
3262 38287 : iterator_is_done.Bind(builder());
3263 38297 : }
3264 :
3265 : // Get the default value of a destructuring target. Will mutate the
3266 : // destructuring target expression if there is a default value.
3267 : //
3268 : // For
3269 : // a = b
3270 : // in
3271 : // let {a = b} = c
3272 : // returns b and mutates the input into a.
3273 0 : Expression* BytecodeGenerator::GetDestructuringDefaultValue(
3274 : Expression** target) {
3275 : Expression* default_value = nullptr;
3276 22822 : if ((*target)->IsAssignment()) {
3277 1889 : Assignment* default_init = (*target)->AsAssignment();
3278 : DCHECK_EQ(default_init->op(), Token::ASSIGN);
3279 : default_value = default_init->value();
3280 0 : *target = default_init->target();
3281 : DCHECK((*target)->IsValidReferenceExpression() || (*target)->IsPattern());
3282 : }
3283 0 : return default_value;
3284 : }
3285 :
3286 : // Convert a destructuring assignment to an array literal into a sequence of
3287 : // iterator accesses into the value being assigned (in the accumulator).
3288 : //
3289 : // [a().x, ...b] = accumulator
3290 : //
3291 : // becomes
3292 : //
3293 : // iterator = %GetIterator(accumulator)
3294 : // try {
3295 : //
3296 : // // Individual assignments read off the value from iterator.next() This gets
3297 : // // repeated per destructuring element.
3298 : // if (!done) {
3299 : // // Make sure we are considered 'done' if .next(), .done or .value fail.
3300 : // done = true
3301 : // var next_result = iterator.next()
3302 : // var tmp_done = next_result.done
3303 : // if (!tmp_done) {
3304 : // value = next_result.value
3305 : // done = false
3306 : // }
3307 : // }
3308 : // if (done)
3309 : // value = undefined
3310 : // a().x = value
3311 : //
3312 : // // A spread receives the remaining items in the iterator.
3313 : // var array = []
3314 : // var index = 0
3315 : // %FillArrayWithIterator(iterator, array, index, done)
3316 : // done = true
3317 : // b = array
3318 : //
3319 : // } catch(e) {
3320 : // iteration_continuation = RETHROW
3321 : // } finally {
3322 : // %FinalizeIteration(iterator, done, iteration_continuation)
3323 : // }
3324 2748 : void BytecodeGenerator::BuildDestructuringArrayAssignment(
3325 : ArrayLiteral* pattern, Token::Value op,
3326 : LookupHoistingMode lookup_hoisting_mode) {
3327 : RegisterAllocationScope scope(this);
3328 :
3329 2748 : Register value = register_allocator()->NewRegister();
3330 2748 : builder()->StoreAccumulatorInRegister(value);
3331 :
3332 : // Store the iterator in a dedicated register so that it can be closed on
3333 : // exit, and the 'done' value in a dedicated register so that it can be
3334 : // changed and accessed independently of the iteration result.
3335 2748 : IteratorRecord iterator = BuildGetIteratorRecord(IteratorType::kNormal);
3336 2748 : Register done = register_allocator()->NewRegister();
3337 2748 : builder()->LoadFalse();
3338 2748 : builder()->StoreAccumulatorInRegister(done);
3339 :
3340 : BuildTryFinally(
3341 : // Try block.
3342 2748 : [&]() {
3343 62639 : Register next_result = register_allocator()->NewRegister();
3344 : FeedbackSlot next_value_load_slot = feedback_spec()->AddLoadICSlot();
3345 : FeedbackSlot next_done_load_slot = feedback_spec()->AddLoadICSlot();
3346 :
3347 : Spread* spread = nullptr;
3348 12640 : for (Expression* target : *pattern->values()) {
3349 4155 : if (target->IsSpread()) {
3350 583 : spread = target->AsSpread();
3351 583 : break;
3352 : }
3353 :
3354 : Expression* default_value = GetDestructuringDefaultValue(&target);
3355 3572 : if (!target->IsPattern()) {
3356 : builder()->SetExpressionAsStatementPosition(target);
3357 : }
3358 :
3359 3572 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
3360 :
3361 : // if (!done) {
3362 : // // Make sure we are considered done if .next(), .done or .value
3363 : // // fail.
3364 : // done = true
3365 : // var next_result = iterator.next()
3366 : // var tmp_done = next_result.done
3367 : // if (!tmp_done) {
3368 : // value = next_result.value
3369 : // done = false
3370 : // }
3371 : // }
3372 : // if (done)
3373 : // value = undefined
3374 : BytecodeLabels is_done(zone());
3375 :
3376 14871 : builder()->LoadAccumulatorWithRegister(done);
3377 : builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean,
3378 7144 : is_done.New());
3379 :
3380 3572 : builder()->LoadTrue().StoreAccumulatorInRegister(done);
3381 7727 : BuildIteratorNext(iterator, next_result);
3382 : builder()
3383 : ->LoadNamedProperty(next_result,
3384 : ast_string_constants()->done_string(),
3385 3572 : feedback_index(next_done_load_slot))
3386 7144 : .JumpIfTrue(ToBooleanMode::kConvertToBoolean, is_done.New())
3387 : .LoadNamedProperty(next_result,
3388 : ast_string_constants()->value_string(),
3389 3572 : feedback_index(next_value_load_slot))
3390 3572 : .StoreAccumulatorInRegister(next_result)
3391 3572 : .LoadFalse()
3392 3572 : .StoreAccumulatorInRegister(done)
3393 3572 : .LoadAccumulatorWithRegister(next_result);
3394 :
3395 : // Only do the assignment if this is not a hole (i.e. 'elided').
3396 3572 : if (!target->IsTheHoleLiteral()) {
3397 : // [<pattern> = <init>] = <value>
3398 : // becomes (roughly)
3399 : // temp = <value>.next();
3400 : // <pattern> = temp === undefined ? <init> : temp;
3401 : BytecodeLabel do_assignment;
3402 3463 : if (default_value) {
3403 454 : builder()->JumpIfNotUndefined(&do_assignment);
3404 : // Since done == true => temp == undefined, jump directly to using
3405 : // the default value for that case.
3406 454 : is_done.Bind(builder());
3407 454 : VisitForAccumulatorValue(default_value);
3408 : } else {
3409 3009 : builder()->Jump(&do_assignment);
3410 3009 : is_done.Bind(builder());
3411 3009 : builder()->LoadUndefined();
3412 : }
3413 3463 : builder()->Bind(&do_assignment);
3414 :
3415 8092 : BuildAssignment(lhs_data, op, lookup_hoisting_mode);
3416 : } else {
3417 : DCHECK_EQ(lhs_data.assign_type(), NON_PROPERTY);
3418 109 : is_done.Bind(builder());
3419 : }
3420 : }
3421 :
3422 2748 : if (spread) {
3423 : RegisterAllocationScope scope(this);
3424 :
3425 : // A spread is turned into a loop over the remainer of the iterator.
3426 : Expression* target = spread->expression();
3427 :
3428 583 : if (!target->IsPattern()) {
3429 : builder()->SetExpressionAsStatementPosition(spread);
3430 : }
3431 :
3432 583 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
3433 :
3434 : // var array = [];
3435 583 : Register array = register_allocator()->NewRegister();
3436 : builder()->CreateEmptyArrayLiteral(
3437 583 : feedback_index(feedback_spec()->AddLiteralSlot()));
3438 583 : builder()->StoreAccumulatorInRegister(array);
3439 :
3440 : // var index = 0;
3441 583 : Register index = register_allocator()->NewRegister();
3442 583 : builder()->LoadLiteral(Smi::zero());
3443 583 : builder()->StoreAccumulatorInRegister(index);
3444 :
3445 : // Set done to true, since it's guaranteed to be true by the time the
3446 : // array fill completes.
3447 583 : builder()->LoadTrue().StoreAccumulatorInRegister(done);
3448 :
3449 : // Fill the array with the iterator.
3450 : FeedbackSlot element_slot =
3451 583 : feedback_spec()->AddStoreInArrayLiteralICSlot();
3452 583 : FeedbackSlot index_slot = feedback_spec()->AddBinaryOpICSlot();
3453 : BuildFillArrayWithIterator(iterator, array, index, next_result,
3454 : next_value_load_slot, next_done_load_slot,
3455 583 : index_slot, element_slot);
3456 :
3457 : // Assign the array to the LHS.
3458 583 : builder()->LoadAccumulatorWithRegister(array);
3459 1749 : BuildAssignment(lhs_data, op, lookup_hoisting_mode);
3460 : }
3461 2748 : },
3462 : // Finally block.
3463 : [&](Register iteration_continuation_token) {
3464 : // Finish the iteration in the finally block.
3465 2748 : BuildFinalizeIteration(iterator, done, iteration_continuation_token);
3466 : },
3467 2748 : HandlerTable::UNCAUGHT);
3468 :
3469 2748 : if (!execution_result()->IsEffect()) {
3470 266 : builder()->LoadAccumulatorWithRegister(value);
3471 : }
3472 2748 : }
3473 :
3474 : // Convert a destructuring assignment to an object literal into a sequence of
3475 : // property accesses into the value being assigned (in the accumulator).
3476 : //
3477 : // { y, [x++]: a(), ...b.c } = value
3478 : //
3479 : // becomes
3480 : //
3481 : // var rest_runtime_callargs = new Array(3);
3482 : // rest_runtime_callargs[0] = value;
3483 : //
3484 : // rest_runtime_callargs[1] = value;
3485 : // y = value.y;
3486 : //
3487 : // var temp1 = %ToName(x++);
3488 : // rest_runtime_callargs[2] = temp1;
3489 : // a() = value[temp1];
3490 : //
3491 : // b.c = %CopyDataPropertiesWithExcludedProperties.call(rest_runtime_callargs);
3492 11134 : void BytecodeGenerator::BuildDestructuringObjectAssignment(
3493 : ObjectLiteral* pattern, Token::Value op,
3494 : LookupHoistingMode lookup_hoisting_mode) {
3495 : RegisterAllocationScope scope(this);
3496 :
3497 : // if (value === null || value === undefined)
3498 : // throw new TypeError(kNonCoercible);
3499 : //
3500 : // TODO(leszeks): Eliminate check if value is known to be non-null (e.g.
3501 : // an object literal).
3502 : BytecodeLabel is_null_or_undefined, not_null_or_undefined;
3503 : builder()
3504 11134 : ->JumpIfNull(&is_null_or_undefined)
3505 11134 : .JumpIfNotUndefined(¬_null_or_undefined);
3506 :
3507 : {
3508 11134 : builder()->Bind(&is_null_or_undefined);
3509 : builder()->SetExpressionPosition(pattern);
3510 11134 : builder()->CallRuntime(Runtime::kThrowPatternAssignmentNonCoercible);
3511 : }
3512 :
3513 : // Store the assignment value in a register.
3514 : Register value;
3515 : RegisterList rest_runtime_callargs;
3516 11134 : if (pattern->has_rest_property()) {
3517 : rest_runtime_callargs =
3518 205 : register_allocator()->NewRegisterList(pattern->properties()->length());
3519 205 : value = rest_runtime_callargs[0];
3520 : } else {
3521 10929 : value = register_allocator()->NewRegister();
3522 : }
3523 11134 : builder()->Bind(¬_null_or_undefined).StoreAccumulatorInRegister(value);
3524 :
3525 : int i = 0;
3526 49633 : for (ObjectLiteralProperty* pattern_property : *pattern->properties()) {
3527 : RegisterAllocationScope scope(this);
3528 :
3529 : // The key of the pattern becomes the key into the RHS value, and the value
3530 : // of the pattern becomes the target of the assignment.
3531 : //
3532 : // e.g. { a: b } = o becomes b = o.a
3533 : Expression* pattern_key = pattern_property->key();
3534 : Expression* target = pattern_property->value();
3535 : Expression* default_value = GetDestructuringDefaultValue(&target);
3536 :
3537 19250 : if (!target->IsPattern()) {
3538 : builder()->SetExpressionAsStatementPosition(target);
3539 : }
3540 :
3541 : // Calculate this property's key into the assignment RHS value, additionally
3542 : // storing the key for rest_runtime_callargs if needed.
3543 : //
3544 : // The RHS is accessed using the key either by LoadNamedProperty (if
3545 : // value_name is valid) or by LoadKeyedProperty (otherwise).
3546 : const AstRawString* value_name = nullptr;
3547 : Register value_key;
3548 :
3549 19250 : if (pattern_property->kind() != ObjectLiteralProperty::Kind::SPREAD) {
3550 19045 : if (pattern_key->IsPropertyName()) {
3551 18725 : value_name = pattern_key->AsLiteral()->AsRawPropertyName();
3552 : }
3553 19045 : if (pattern->has_rest_property() || !value_name) {
3554 383 : if (pattern->has_rest_property()) {
3555 129 : value_key = rest_runtime_callargs[i + 1];
3556 : } else {
3557 254 : value_key = register_allocator()->NewRegister();
3558 : }
3559 383 : if (pattern_property->is_computed_name()) {
3560 : // { [a()]: b().x } = c
3561 : // becomes
3562 : // var tmp = a()
3563 : // b().x = c[tmp]
3564 : DCHECK(!pattern_key->IsPropertyName() ||
3565 : !pattern_key->IsNumberLiteral());
3566 270 : VisitForAccumulatorValue(pattern_key);
3567 270 : builder()->ToName(value_key);
3568 : } else {
3569 : // We only need the key for non-computed properties when it is numeric
3570 : // or is being saved for the rest_runtime_callargs.
3571 : DCHECK(
3572 : pattern_key->IsNumberLiteral() ||
3573 : (pattern->has_rest_property() && pattern_key->IsPropertyName()));
3574 : VisitForRegisterValue(pattern_key, value_key);
3575 : }
3576 : }
3577 : }
3578 :
3579 19250 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
3580 :
3581 : // Get the value from the RHS.
3582 19249 : if (pattern_property->kind() == ObjectLiteralProperty::Kind::SPREAD) {
3583 : DCHECK_EQ(i, pattern->properties()->length() - 1);
3584 : DCHECK(!value_key.is_valid());
3585 : DCHECK_NULL(value_name);
3586 : builder()->CallRuntime(Runtime::kCopyDataPropertiesWithExcludedProperties,
3587 204 : rest_runtime_callargs);
3588 19045 : } else if (value_name) {
3589 : builder()->LoadNamedProperty(
3590 18725 : value, value_name, feedback_index(feedback_spec()->AddLoadICSlot()));
3591 : } else {
3592 : DCHECK(value_key.is_valid());
3593 320 : builder()->LoadAccumulatorWithRegister(value_key).LoadKeyedProperty(
3594 320 : value, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
3595 : }
3596 :
3597 : // {<pattern> = <init>} = <value>
3598 : // becomes
3599 : // temp = <value>;
3600 : // <pattern> = temp === undefined ? <init> : temp;
3601 19249 : if (default_value) {
3602 : BytecodeLabel value_not_undefined;
3603 1435 : builder()->JumpIfNotUndefined(&value_not_undefined);
3604 1435 : VisitForAccumulatorValue(default_value);
3605 1435 : builder()->Bind(&value_not_undefined);
3606 : }
3607 :
3608 19249 : BuildAssignment(lhs_data, op, lookup_hoisting_mode);
3609 :
3610 19249 : i++;
3611 : }
3612 :
3613 11134 : if (!execution_result()->IsEffect()) {
3614 193 : builder()->LoadAccumulatorWithRegister(value);
3615 : }
3616 11134 : }
3617 :
3618 7637712 : void BytecodeGenerator::BuildAssignment(
3619 : const AssignmentLhsData& lhs_data, Token::Value op,
3620 : LookupHoistingMode lookup_hoisting_mode) {
3621 : // Assign the value to the LHS.
3622 7637712 : switch (lhs_data.assign_type()) {
3623 : case NON_PROPERTY: {
3624 10406218 : if (ObjectLiteral* pattern = lhs_data.expr()->AsObjectLiteral()) {
3625 : // Split object literals into destructuring.
3626 11134 : BuildDestructuringObjectAssignment(pattern, op, lookup_hoisting_mode);
3627 5191975 : } else if (ArrayLiteral* pattern = lhs_data.expr()->AsArrayLiteral()) {
3628 : // Split object literals into destructuring.
3629 2748 : BuildDestructuringArrayAssignment(pattern, op, lookup_hoisting_mode);
3630 : } else {
3631 : DCHECK(lhs_data.expr()->IsVariableProxy());
3632 : VariableProxy* proxy = lhs_data.expr()->AsVariableProxy();
3633 5189227 : BuildVariableAssignment(proxy->var(), op, proxy->hole_check_mode(),
3634 5189227 : lookup_hoisting_mode);
3635 : }
3636 : break;
3637 : }
3638 : case NAMED_PROPERTY: {
3639 : BuildStoreNamedProperty(lhs_data.object_expr(), lhs_data.object(),
3640 2400219 : lhs_data.name());
3641 2400218 : break;
3642 : }
3643 : case KEYED_PROPERTY: {
3644 : FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
3645 : Register value;
3646 33803 : if (!execution_result()->IsEffect()) {
3647 1968 : value = register_allocator()->NewRegister();
3648 1968 : builder()->StoreAccumulatorInRegister(value);
3649 : }
3650 : builder()->StoreKeyedProperty(lhs_data.object(), lhs_data.key(),
3651 33803 : feedback_index(slot), language_mode());
3652 33803 : if (!execution_result()->IsEffect()) {
3653 1968 : builder()->LoadAccumulatorWithRegister(value);
3654 : }
3655 : break;
3656 : }
3657 : case NAMED_SUPER_PROPERTY: {
3658 : builder()
3659 220 : ->StoreAccumulatorInRegister(lhs_data.super_property_args()[3])
3660 220 : .CallRuntime(Runtime::kStoreToSuper, lhs_data.super_property_args());
3661 220 : break;
3662 : }
3663 : case KEYED_SUPER_PROPERTY: {
3664 : builder()
3665 340 : ->StoreAccumulatorInRegister(lhs_data.super_property_args()[3])
3666 : .CallRuntime(Runtime::kStoreKeyedToSuper,
3667 340 : lhs_data.super_property_args());
3668 340 : break;
3669 : }
3670 : }
3671 7637700 : }
3672 :
3673 7484741 : void BytecodeGenerator::VisitAssignment(Assignment* expr) {
3674 7484741 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(expr->target());
3675 :
3676 7484731 : VisitForAccumulatorValue(expr->value());
3677 :
3678 : builder()->SetExpressionPosition(expr);
3679 7484740 : BuildAssignment(lhs_data, expr->op(), expr->lookup_hoisting_mode());
3680 7484666 : }
3681 :
3682 89416 : void BytecodeGenerator::VisitCompoundAssignment(CompoundAssignment* expr) {
3683 89416 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(expr->target());
3684 :
3685 : // Evaluate the value and potentially handle compound assignments by loading
3686 : // the left-hand side value and performing a binary operation.
3687 89418 : switch (lhs_data.assign_type()) {
3688 : case NON_PROPERTY: {
3689 86752 : VariableProxy* proxy = expr->target()->AsVariableProxy();
3690 86752 : BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
3691 86753 : break;
3692 : }
3693 : case NAMED_PROPERTY: {
3694 : BuildLoadNamedProperty(lhs_data.object_expr(), lhs_data.object(),
3695 1801 : lhs_data.name());
3696 1802 : break;
3697 : }
3698 : case KEYED_PROPERTY: {
3699 : FeedbackSlot slot = feedback_spec()->AddKeyedLoadICSlot();
3700 : builder()
3701 806 : ->LoadAccumulatorWithRegister(lhs_data.key())
3702 806 : .LoadKeyedProperty(lhs_data.object(), feedback_index(slot));
3703 : break;
3704 : }
3705 : case NAMED_SUPER_PROPERTY: {
3706 : builder()->CallRuntime(Runtime::kLoadFromSuper,
3707 20 : lhs_data.super_property_args().Truncate(3));
3708 20 : break;
3709 : }
3710 : case KEYED_SUPER_PROPERTY: {
3711 : builder()->CallRuntime(Runtime::kLoadKeyedFromSuper,
3712 40 : lhs_data.super_property_args().Truncate(3));
3713 40 : break;
3714 : }
3715 : }
3716 89421 : BinaryOperation* binop = expr->AsCompoundAssignment()->binary_operation();
3717 : FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
3718 89418 : if (expr->value()->IsSmiLiteral()) {
3719 : builder()->BinaryOperationSmiLiteral(
3720 : binop->op(), expr->value()->AsLiteral()->AsSmiLiteral(),
3721 67047 : feedback_index(slot));
3722 : } else {
3723 67069 : Register old_value = register_allocator()->NewRegister();
3724 67071 : builder()->StoreAccumulatorInRegister(old_value);
3725 67071 : VisitForAccumulatorValue(expr->value());
3726 67070 : builder()->BinaryOperation(binop->op(), old_value, feedback_index(slot));
3727 : }
3728 :
3729 : builder()->SetExpressionPosition(expr);
3730 89419 : BuildAssignment(lhs_data, expr->op(), expr->lookup_hoisting_mode());
3731 89419 : }
3732 :
3733 : // Suspends the generator to resume at the next suspend_id, with output stored
3734 : // in the accumulator. When the generator is resumed, the sent value is loaded
3735 : // in the accumulator.
3736 23944 : void BytecodeGenerator::BuildSuspendPoint(int position) {
3737 23944 : const int suspend_id = suspend_count_++;
3738 :
3739 23944 : RegisterList registers = register_allocator()->AllLiveRegisters();
3740 :
3741 : // Save context, registers, and state. This bytecode then returns the value
3742 : // in the accumulator.
3743 : builder()->SetExpressionPosition(position);
3744 23944 : builder()->SuspendGenerator(generator_object(), registers, suspend_id);
3745 :
3746 : // Upon resume, we continue here.
3747 23944 : builder()->Bind(generator_jump_table_, suspend_id);
3748 :
3749 : // Clobbers all registers and sets the accumulator to the
3750 : // [[input_or_debug_pos]] slot of the generator object.
3751 23944 : builder()->ResumeGenerator(generator_object(), registers);
3752 23944 : }
3753 :
3754 8308 : void BytecodeGenerator::VisitYield(Yield* expr) {
3755 : builder()->SetExpressionPosition(expr);
3756 8308 : VisitForAccumulatorValue(expr->expression());
3757 :
3758 : // If this is not the first yield
3759 8308 : if (suspend_count_ > 0) {
3760 3401 : if (IsAsyncGeneratorFunction(function_kind())) {
3761 : // AsyncGenerator yields (with the exception of the initial yield)
3762 : // delegate work to the AsyncGeneratorYield stub, which Awaits the operand
3763 : // and on success, wraps the value in an IteratorResult.
3764 : RegisterAllocationScope register_scope(this);
3765 352 : RegisterList args = register_allocator()->NewRegisterList(3);
3766 : builder()
3767 352 : ->MoveRegister(generator_object(), args[0]) // generator
3768 352 : .StoreAccumulatorInRegister(args[1]) // value
3769 704 : .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
3770 352 : .StoreAccumulatorInRegister(args[2]) // is_caught
3771 352 : .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
3772 : } else {
3773 : // Generator yields (with the exception of the initial yield) wrap the
3774 : // value into IteratorResult.
3775 : RegisterAllocationScope register_scope(this);
3776 3049 : RegisterList args = register_allocator()->NewRegisterList(2);
3777 : builder()
3778 3049 : ->StoreAccumulatorInRegister(args[0]) // value
3779 3049 : .LoadFalse()
3780 3049 : .StoreAccumulatorInRegister(args[1]) // done
3781 3049 : .CallRuntime(Runtime::kInlineCreateIterResultObject, args);
3782 : }
3783 : }
3784 :
3785 8308 : BuildSuspendPoint(expr->position());
3786 : // At this point, the generator has been resumed, with the received value in
3787 : // the accumulator.
3788 :
3789 : // TODO(caitp): remove once yield* desugaring for async generators is handled
3790 : // in BytecodeGenerator.
3791 8308 : if (expr->on_abrupt_resume() == Yield::kNoControl) {
3792 : DCHECK(IsAsyncGeneratorFunction(function_kind()));
3793 0 : return;
3794 : }
3795 :
3796 8308 : Register input = register_allocator()->NewRegister();
3797 8308 : builder()->StoreAccumulatorInRegister(input).CallRuntime(
3798 8308 : Runtime::kInlineGeneratorGetResumeMode, generator_object());
3799 :
3800 : // Now dispatch on resume mode.
3801 : STATIC_ASSERT(JSGeneratorObject::kNext + 1 == JSGeneratorObject::kReturn);
3802 : BytecodeJumpTable* jump_table =
3803 8308 : builder()->AllocateJumpTable(2, JSGeneratorObject::kNext);
3804 :
3805 8308 : builder()->SwitchOnSmiNoFeedback(jump_table);
3806 :
3807 : {
3808 : // Resume with throw (switch fallthrough).
3809 : // TODO(leszeks): Add a debug-only check that the accumulator is
3810 : // JSGeneratorObject::kThrow.
3811 : builder()->SetExpressionPosition(expr);
3812 8308 : builder()->LoadAccumulatorWithRegister(input);
3813 8308 : builder()->Throw();
3814 : }
3815 :
3816 : {
3817 : // Resume with return.
3818 8308 : builder()->Bind(jump_table, JSGeneratorObject::kReturn);
3819 8308 : builder()->LoadAccumulatorWithRegister(input);
3820 8308 : if (IsAsyncGeneratorFunction(function_kind())) {
3821 : execution_control()->AsyncReturnAccumulator();
3822 : } else {
3823 : execution_control()->ReturnAccumulator();
3824 : }
3825 : }
3826 :
3827 : {
3828 : // Resume with next.
3829 8308 : builder()->Bind(jump_table, JSGeneratorObject::kNext);
3830 : BuildIncrementBlockCoverageCounterIfEnabled(expr,
3831 : SourceRangeKind::kContinuation);
3832 8308 : builder()->LoadAccumulatorWithRegister(input);
3833 : }
3834 : }
3835 :
3836 : // Desugaring of (yield* iterable)
3837 : //
3838 : // do {
3839 : // const kNext = 0;
3840 : // const kReturn = 1;
3841 : // const kThrow = 2;
3842 : //
3843 : // let output; // uninitialized
3844 : //
3845 : // let iteratorRecord = GetIterator(iterable);
3846 : // let iterator = iteratorRecord.[[Iterator]];
3847 : // let next = iteratorRecord.[[NextMethod]];
3848 : // let input = undefined;
3849 : // let resumeMode = kNext;
3850 : //
3851 : // while (true) {
3852 : // // From the generator to the iterator:
3853 : // // Forward input according to resumeMode and obtain output.
3854 : // switch (resumeMode) {
3855 : // case kNext:
3856 : // output = next.[[Call]](iterator, « »);;
3857 : // break;
3858 : // case kReturn:
3859 : // let iteratorReturn = iterator.return;
3860 : // if (IS_NULL_OR_UNDEFINED(iteratorReturn)) return input;
3861 : // output = iteratorReturn.[[Call]](iterator, «input»);
3862 : // break;
3863 : // case kThrow:
3864 : // let iteratorThrow = iterator.throw;
3865 : // if (IS_NULL_OR_UNDEFINED(iteratorThrow)) {
3866 : // let iteratorReturn = iterator.return;
3867 : // if (!IS_NULL_OR_UNDEFINED(iteratorReturn)) {
3868 : // output = iteratorReturn.[[Call]](iterator, « »);
3869 : // if (IS_ASYNC_GENERATOR) output = await output;
3870 : // if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
3871 : // }
3872 : // throw MakeTypeError(kThrowMethodMissing);
3873 : // }
3874 : // output = iteratorThrow.[[Call]](iterator, «input»);
3875 : // break;
3876 : // }
3877 : //
3878 : // if (IS_ASYNC_GENERATOR) output = await output;
3879 : // if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
3880 : // if (output.done) break;
3881 : //
3882 : // // From the generator to its user:
3883 : // // Forward output, receive new input, and determine resume mode.
3884 : // if (IS_ASYNC_GENERATOR) {
3885 : // // AsyncGeneratorYield abstract operation awaits the operand before
3886 : // // resolving the promise for the current AsyncGeneratorRequest.
3887 : // %_AsyncGeneratorYield(output.value)
3888 : // }
3889 : // input = Suspend(output);
3890 : // resumeMode = %GeneratorGetResumeMode();
3891 : // }
3892 : //
3893 : // if (resumeMode === kReturn) {
3894 : // return output.value;
3895 : // }
3896 : // output.value
3897 : // }
3898 194 : void BytecodeGenerator::VisitYieldStar(YieldStar* expr) {
3899 194 : Register output = register_allocator()->NewRegister();
3900 194 : Register resume_mode = register_allocator()->NewRegister();
3901 : IteratorType iterator_type = IsAsyncGeneratorFunction(function_kind())
3902 : ? IteratorType::kAsync
3903 194 : : IteratorType::kNormal;
3904 :
3905 : {
3906 : RegisterAllocationScope register_scope(this);
3907 194 : RegisterList iterator_and_input = register_allocator()->NewRegisterList(2);
3908 194 : VisitForAccumulatorValue(expr->expression());
3909 : IteratorRecord iterator = BuildGetIteratorRecord(
3910 : register_allocator()->NewRegister() /* next method */,
3911 194 : iterator_and_input[0], iterator_type);
3912 :
3913 194 : Register input = iterator_and_input[1];
3914 194 : builder()->LoadUndefined().StoreAccumulatorInRegister(input);
3915 : builder()
3916 194 : ->LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
3917 194 : .StoreAccumulatorInRegister(resume_mode);
3918 :
3919 : {
3920 : // This loop builder does not construct counters as the loop is not
3921 : // visible to the user, and we therefore neither pass the block coverage
3922 : // builder nor the expression.
3923 : //
3924 : // In addition to the normal suspend for yield*, a yield* in an async
3925 : // generator has 2 additional suspends:
3926 : // - One for awaiting the iterator result of closing the generator when
3927 : // resumed with a "throw" completion, and a throw method is not
3928 : // present on the delegated iterator
3929 : // - One for awaiting the iterator result yielded by the delegated
3930 : // iterator
3931 :
3932 194 : LoopBuilder loop(builder(), nullptr, nullptr);
3933 194 : loop.LoopHeader();
3934 :
3935 : {
3936 : BytecodeLabels after_switch(zone());
3937 : BytecodeJumpTable* switch_jump_table =
3938 194 : builder()->AllocateJumpTable(2, 1);
3939 :
3940 : builder()
3941 194 : ->LoadAccumulatorWithRegister(resume_mode)
3942 194 : .SwitchOnSmiNoFeedback(switch_jump_table);
3943 :
3944 : // Fallthrough to default case.
3945 : // TODO(tebbi): Add debug code to check that {resume_mode} really is
3946 : // {JSGeneratorObject::kNext} in this case.
3947 : STATIC_ASSERT(JSGeneratorObject::kNext == 0);
3948 : {
3949 : FeedbackSlot slot = feedback_spec()->AddCallICSlot();
3950 : builder()->CallProperty(iterator.next(), iterator_and_input,
3951 194 : feedback_index(slot));
3952 194 : builder()->Jump(after_switch.New());
3953 : }
3954 :
3955 : STATIC_ASSERT(JSGeneratorObject::kReturn == 1);
3956 194 : builder()->Bind(switch_jump_table, JSGeneratorObject::kReturn);
3957 : {
3958 : const AstRawString* return_string =
3959 : ast_string_constants()->return_string();
3960 : BytecodeLabels no_return_method(zone());
3961 :
3962 194 : BuildCallIteratorMethod(iterator.object(), return_string,
3963 : iterator_and_input, after_switch.New(),
3964 194 : &no_return_method);
3965 194 : no_return_method.Bind(builder());
3966 194 : builder()->LoadAccumulatorWithRegister(input);
3967 194 : if (iterator_type == IteratorType::kAsync) {
3968 : execution_control()->AsyncReturnAccumulator();
3969 : } else {
3970 : execution_control()->ReturnAccumulator();
3971 : }
3972 : }
3973 :
3974 : STATIC_ASSERT(JSGeneratorObject::kThrow == 2);
3975 194 : builder()->Bind(switch_jump_table, JSGeneratorObject::kThrow);
3976 : {
3977 : const AstRawString* throw_string =
3978 : ast_string_constants()->throw_string();
3979 : BytecodeLabels no_throw_method(zone());
3980 194 : BuildCallIteratorMethod(iterator.object(), throw_string,
3981 : iterator_and_input, after_switch.New(),
3982 194 : &no_throw_method);
3983 :
3984 : // If there is no "throw" method, perform IteratorClose, and finally
3985 : // throw a TypeError.
3986 194 : no_throw_method.Bind(builder());
3987 194 : BuildIteratorClose(iterator, expr);
3988 194 : builder()->CallRuntime(Runtime::kThrowThrowMethodMissing);
3989 : }
3990 :
3991 194 : after_switch.Bind(builder());
3992 : }
3993 :
3994 194 : if (iterator_type == IteratorType::kAsync) {
3995 : // Await the result of the method invocation.
3996 10 : BuildAwait(expr->position());
3997 : }
3998 :
3999 : // Check that output is an object.
4000 : BytecodeLabel check_if_done;
4001 : builder()
4002 194 : ->StoreAccumulatorInRegister(output)
4003 194 : .JumpIfJSReceiver(&check_if_done)
4004 194 : .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, output);
4005 :
4006 194 : builder()->Bind(&check_if_done);
4007 : // Break once output.done is true.
4008 : builder()->LoadNamedProperty(
4009 : output, ast_string_constants()->done_string(),
4010 194 : feedback_index(feedback_spec()->AddLoadICSlot()));
4011 :
4012 : loop.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
4013 :
4014 : // Suspend the current generator.
4015 194 : if (iterator_type == IteratorType::kNormal) {
4016 184 : builder()->LoadAccumulatorWithRegister(output);
4017 : } else {
4018 : RegisterAllocationScope register_scope(this);
4019 : DCHECK_EQ(iterator_type, IteratorType::kAsync);
4020 : // If generatorKind is async, perform AsyncGeneratorYield(output.value),
4021 : // which will await `output.value` before resolving the current
4022 : // AsyncGeneratorRequest's promise.
4023 : builder()->LoadNamedProperty(
4024 : output, ast_string_constants()->value_string(),
4025 10 : feedback_index(feedback_spec()->AddLoadICSlot()));
4026 :
4027 10 : RegisterList args = register_allocator()->NewRegisterList(3);
4028 : builder()
4029 10 : ->MoveRegister(generator_object(), args[0]) // generator
4030 10 : .StoreAccumulatorInRegister(args[1]) // value
4031 20 : .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
4032 10 : .StoreAccumulatorInRegister(args[2]) // is_caught
4033 10 : .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
4034 : }
4035 :
4036 194 : BuildSuspendPoint(expr->position());
4037 194 : builder()->StoreAccumulatorInRegister(input);
4038 : builder()
4039 : ->CallRuntime(Runtime::kInlineGeneratorGetResumeMode,
4040 194 : generator_object())
4041 194 : .StoreAccumulatorInRegister(resume_mode);
4042 :
4043 194 : loop.BindContinueTarget();
4044 194 : loop.JumpToHeader(loop_depth_);
4045 : }
4046 : }
4047 :
4048 : // Decide if we trigger a return or if the yield* expression should just
4049 : // produce a value.
4050 : BytecodeLabel completion_is_output_value;
4051 194 : Register output_value = register_allocator()->NewRegister();
4052 : builder()
4053 : ->LoadNamedProperty(output, ast_string_constants()->value_string(),
4054 194 : feedback_index(feedback_spec()->AddLoadICSlot()))
4055 194 : .StoreAccumulatorInRegister(output_value)
4056 194 : .LoadLiteral(Smi::FromInt(JSGeneratorObject::kReturn))
4057 194 : .CompareReference(resume_mode)
4058 194 : .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &completion_is_output_value)
4059 194 : .LoadAccumulatorWithRegister(output_value);
4060 194 : if (iterator_type == IteratorType::kAsync) {
4061 : execution_control()->AsyncReturnAccumulator();
4062 : } else {
4063 : execution_control()->ReturnAccumulator();
4064 : }
4065 :
4066 194 : builder()->Bind(&completion_is_output_value);
4067 : BuildIncrementBlockCoverageCounterIfEnabled(expr,
4068 : SourceRangeKind::kContinuation);
4069 194 : builder()->LoadAccumulatorWithRegister(output_value);
4070 194 : }
4071 :
4072 15442 : void BytecodeGenerator::BuildAwait(int position) {
4073 : // Rather than HandlerTable::UNCAUGHT, async functions use
4074 : // HandlerTable::ASYNC_AWAIT to communicate that top-level exceptions are
4075 : // transformed into promise rejections. This is necessary to prevent emitting
4076 : // multiple debug events for the same uncaught exception. There is no point
4077 : // in the body of an async function where catch prediction is
4078 : // HandlerTable::UNCAUGHT.
4079 : DCHECK(catch_prediction() != HandlerTable::UNCAUGHT);
4080 :
4081 : {
4082 : // Await(operand) and suspend.
4083 : RegisterAllocationScope register_scope(this);
4084 :
4085 : Runtime::FunctionId await_intrinsic_id;
4086 15442 : if (IsAsyncGeneratorFunction(function_kind())) {
4087 : await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
4088 : ? Runtime::kInlineAsyncGeneratorAwaitUncaught
4089 1197 : : Runtime::kInlineAsyncGeneratorAwaitCaught;
4090 : } else {
4091 : await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
4092 : ? Runtime::kInlineAsyncFunctionAwaitUncaught
4093 14245 : : Runtime::kInlineAsyncFunctionAwaitCaught;
4094 : }
4095 15442 : RegisterList args = register_allocator()->NewRegisterList(2);
4096 : builder()
4097 15442 : ->MoveRegister(generator_object(), args[0])
4098 15442 : .StoreAccumulatorInRegister(args[1])
4099 15442 : .CallRuntime(await_intrinsic_id, args);
4100 : }
4101 :
4102 15442 : BuildSuspendPoint(position);
4103 :
4104 15442 : Register input = register_allocator()->NewRegister();
4105 15442 : Register resume_mode = register_allocator()->NewRegister();
4106 :
4107 : // Now dispatch on resume mode.
4108 : BytecodeLabel resume_next;
4109 : builder()
4110 15442 : ->StoreAccumulatorInRegister(input)
4111 15442 : .CallRuntime(Runtime::kInlineGeneratorGetResumeMode, generator_object())
4112 15442 : .StoreAccumulatorInRegister(resume_mode)
4113 15442 : .LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
4114 15442 : .CompareReference(resume_mode)
4115 15442 : .JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &resume_next);
4116 :
4117 : // Resume with "throw" completion (rethrow the received value).
4118 : // TODO(leszeks): Add a debug-only check that the accumulator is
4119 : // JSGeneratorObject::kThrow.
4120 15442 : builder()->LoadAccumulatorWithRegister(input).ReThrow();
4121 :
4122 : // Resume with next.
4123 15442 : builder()->Bind(&resume_next);
4124 15442 : builder()->LoadAccumulatorWithRegister(input);
4125 15442 : }
4126 :
4127 14740 : void BytecodeGenerator::VisitAwait(Await* expr) {
4128 : builder()->SetExpressionPosition(expr);
4129 14740 : VisitForAccumulatorValue(expr->expression());
4130 14740 : BuildAwait(expr->position());
4131 : BuildIncrementBlockCoverageCounterIfEnabled(expr,
4132 : SourceRangeKind::kContinuation);
4133 14740 : }
4134 :
4135 20539 : void BytecodeGenerator::VisitThrow(Throw* expr) {
4136 : AllocateBlockCoverageSlotIfEnabled(expr, SourceRangeKind::kContinuation);
4137 20539 : VisitForAccumulatorValue(expr->exception());
4138 : builder()->SetExpressionPosition(expr);
4139 20539 : builder()->Throw();
4140 20541 : }
4141 :
4142 2584016 : void BytecodeGenerator::VisitPropertyLoad(Register obj, Property* property) {
4143 2584016 : AssignType property_kind = Property::GetAssignType(property);
4144 2584047 : switch (property_kind) {
4145 : case NON_PROPERTY:
4146 0 : UNREACHABLE();
4147 : case NAMED_PROPERTY: {
4148 : builder()->SetExpressionPosition(property);
4149 : const AstRawString* name =
4150 2444486 : property->key()->AsLiteral()->AsRawPropertyName();
4151 2444486 : BuildLoadNamedProperty(property->obj(), obj, name);
4152 2444535 : break;
4153 : }
4154 : case KEYED_PROPERTY: {
4155 138788 : VisitForAccumulatorValue(property->key());
4156 : builder()->SetExpressionPosition(property);
4157 : builder()->LoadKeyedProperty(
4158 138788 : obj, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
4159 138788 : break;
4160 : }
4161 : case NAMED_SUPER_PROPERTY:
4162 417 : VisitNamedSuperPropertyLoad(property, Register::invalid_value());
4163 417 : break;
4164 : case KEYED_SUPER_PROPERTY:
4165 360 : VisitKeyedSuperPropertyLoad(property, Register::invalid_value());
4166 360 : break;
4167 : }
4168 2584096 : }
4169 :
4170 826453 : void BytecodeGenerator::VisitPropertyLoadForRegister(Register obj,
4171 : Property* expr,
4172 : Register destination) {
4173 : ValueResultScope result_scope(this);
4174 826453 : VisitPropertyLoad(obj, expr);
4175 826494 : builder()->StoreAccumulatorInRegister(destination);
4176 826489 : }
4177 :
4178 896 : void BytecodeGenerator::VisitNamedSuperPropertyLoad(Property* property,
4179 : Register opt_receiver_out) {
4180 : RegisterAllocationScope register_scope(this);
4181 : SuperPropertyReference* super_property =
4182 896 : property->obj()->AsSuperPropertyReference();
4183 896 : RegisterList args = register_allocator()->NewRegisterList(3);
4184 896 : BuildThisVariableLoad();
4185 896 : builder()->StoreAccumulatorInRegister(args[0]);
4186 : VisitForRegisterValue(super_property->home_object(), args[1]);
4187 :
4188 : builder()->SetExpressionPosition(property);
4189 : builder()
4190 1792 : ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
4191 896 : .StoreAccumulatorInRegister(args[2])
4192 896 : .CallRuntime(Runtime::kLoadFromSuper, args);
4193 :
4194 896 : if (opt_receiver_out.is_valid()) {
4195 479 : builder()->MoveRegister(args[0], opt_receiver_out);
4196 : }
4197 896 : }
4198 :
4199 400 : void BytecodeGenerator::VisitKeyedSuperPropertyLoad(Property* property,
4200 : Register opt_receiver_out) {
4201 : RegisterAllocationScope register_scope(this);
4202 : SuperPropertyReference* super_property =
4203 400 : property->obj()->AsSuperPropertyReference();
4204 400 : RegisterList args = register_allocator()->NewRegisterList(3);
4205 400 : BuildThisVariableLoad();
4206 400 : builder()->StoreAccumulatorInRegister(args[0]);
4207 : VisitForRegisterValue(super_property->home_object(), args[1]);
4208 : VisitForRegisterValue(property->key(), args[2]);
4209 :
4210 : builder()->SetExpressionPosition(property);
4211 400 : builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, args);
4212 :
4213 400 : if (opt_receiver_out.is_valid()) {
4214 40 : builder()->MoveRegister(args[0], opt_receiver_out);
4215 : }
4216 400 : }
4217 :
4218 1757581 : void BytecodeGenerator::VisitProperty(Property* expr) {
4219 1757581 : AssignType property_kind = Property::GetAssignType(expr);
4220 1757594 : if (property_kind != NAMED_SUPER_PROPERTY &&
4221 : property_kind != KEYED_SUPER_PROPERTY) {
4222 1756817 : Register obj = VisitForRegisterValue(expr->obj());
4223 1756832 : VisitPropertyLoad(obj, expr);
4224 : } else {
4225 777 : VisitPropertyLoad(Register::invalid_value(), expr);
4226 : }
4227 1757602 : }
4228 :
4229 0 : void BytecodeGenerator::VisitResolvedProperty(ResolvedProperty* expr) {
4230 : // Handled by VisitCall().
4231 0 : UNREACHABLE();
4232 : }
4233 :
4234 0 : void BytecodeGenerator::VisitArguments(const ZonePtrList<Expression>* args,
4235 : RegisterList* arg_regs) {
4236 : // Visit arguments.
4237 17684218 : for (int i = 0; i < static_cast<int>(args->length()); i++) {
4238 6144508 : VisitAndPushIntoRegisterList(args->at(i), arg_regs);
4239 : }
4240 0 : }
4241 :
4242 5180559 : void BytecodeGenerator::VisitCall(Call* expr) {
4243 : Expression* callee_expr = expr->expression();
4244 5180559 : Call::CallType call_type = expr->GetCallType();
4245 :
4246 5180577 : if (call_type == Call::SUPER_CALL) {
4247 4181 : return VisitCallSuper(expr);
4248 : }
4249 :
4250 : // Grow the args list as we visit receiver / arguments to avoid allocating all
4251 : // the registers up-front. Otherwise these registers are unavailable during
4252 : // receiver / argument visiting and we can end up with memory leaks due to
4253 : // registers keeping objects alive.
4254 5176396 : Register callee = register_allocator()->NewRegister();
4255 5176355 : RegisterList args = register_allocator()->NewGrowableRegisterList();
4256 :
4257 : bool implicit_undefined_receiver = false;
4258 : // When a call contains a spread, a Call AST node is only created if there is
4259 : // exactly one spread, and it is the last argument.
4260 : bool is_spread_call = expr->only_last_arg_is_spread();
4261 : bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
4262 :
4263 : // TODO(petermarshall): We have a lot of call bytecodes that are very similar,
4264 : // see if we can reduce the number by adding a separate argument which
4265 : // specifies the call type (e.g., property, spread, tailcall, etc.).
4266 :
4267 : // Prepare the callee and the receiver to the function call. This depends on
4268 : // the semantics of the underlying call type.
4269 5176355 : switch (call_type) {
4270 : case Call::NAMED_PROPERTY_CALL:
4271 : case Call::KEYED_PROPERTY_CALL: {
4272 826473 : Property* property = callee_expr->AsProperty();
4273 826473 : VisitAndPushIntoRegisterList(property->obj(), &args);
4274 826476 : VisitPropertyLoadForRegister(args.last_register(), property, callee);
4275 826475 : break;
4276 : }
4277 : case Call::RESOLVED_PROPERTY_CALL: {
4278 0 : ResolvedProperty* resolved = callee_expr->AsResolvedProperty();
4279 0 : VisitAndPushIntoRegisterList(resolved->object(), &args);
4280 0 : VisitForAccumulatorValue(resolved->property());
4281 0 : builder()->StoreAccumulatorInRegister(callee);
4282 0 : break;
4283 : }
4284 : case Call::GLOBAL_CALL: {
4285 : // Receiver is undefined for global calls.
4286 3809576 : if (!is_spread_call && !optimize_as_one_shot) {
4287 : implicit_undefined_receiver = true;
4288 : } else {
4289 : // TODO(leszeks): There's no special bytecode for tail calls or spread
4290 : // calls with an undefined receiver, so just push undefined ourselves.
4291 704734 : BuildPushUndefinedIntoRegisterList(&args);
4292 : }
4293 : // Load callee as a global variable.
4294 3809585 : VariableProxy* proxy = callee_expr->AsVariableProxy();
4295 : BuildVariableLoadForAccumulatorValue(proxy->var(),
4296 3809585 : proxy->hole_check_mode());
4297 3809587 : builder()->StoreAccumulatorInRegister(callee);
4298 3809586 : break;
4299 : }
4300 : case Call::WITH_CALL: {
4301 4550 : Register receiver = register_allocator()->GrowRegisterList(&args);
4302 : DCHECK(callee_expr->AsVariableProxy()->var()->IsLookupSlot());
4303 : {
4304 : RegisterAllocationScope inner_register_scope(this);
4305 4549 : Register name = register_allocator()->NewRegister();
4306 :
4307 : // Call %LoadLookupSlotForCall to get the callee and receiver.
4308 4550 : RegisterList result_pair = register_allocator()->NewRegisterList(2);
4309 4549 : Variable* variable = callee_expr->AsVariableProxy()->var();
4310 : builder()
4311 4549 : ->LoadLiteral(variable->raw_name())
4312 4550 : .StoreAccumulatorInRegister(name)
4313 : .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, name,
4314 4550 : result_pair)
4315 4550 : .MoveRegister(result_pair[0], callee)
4316 4550 : .MoveRegister(result_pair[1], receiver);
4317 : }
4318 : break;
4319 : }
4320 : case Call::OTHER_CALL: {
4321 : // Receiver is undefined for other calls.
4322 535302 : if (!is_spread_call && !optimize_as_one_shot) {
4323 : implicit_undefined_receiver = true;
4324 : } else {
4325 : // TODO(leszeks): There's no special bytecode for tail calls or spread
4326 : // calls with an undefined receiver, so just push undefined ourselves.
4327 227283 : BuildPushUndefinedIntoRegisterList(&args);
4328 : }
4329 : VisitForRegisterValue(callee_expr, callee);
4330 : break;
4331 : }
4332 : case Call::NAMED_SUPER_PROPERTY_CALL: {
4333 479 : Register receiver = register_allocator()->GrowRegisterList(&args);
4334 479 : Property* property = callee_expr->AsProperty();
4335 479 : VisitNamedSuperPropertyLoad(property, receiver);
4336 479 : builder()->StoreAccumulatorInRegister(callee);
4337 : break;
4338 : }
4339 : case Call::KEYED_SUPER_PROPERTY_CALL: {
4340 40 : Register receiver = register_allocator()->GrowRegisterList(&args);
4341 40 : Property* property = callee_expr->AsProperty();
4342 40 : VisitKeyedSuperPropertyLoad(property, receiver);
4343 40 : builder()->StoreAccumulatorInRegister(callee);
4344 : break;
4345 : }
4346 : case Call::SUPER_CALL:
4347 0 : UNREACHABLE();
4348 : break;
4349 : }
4350 :
4351 : // Evaluate all arguments to the function call and store in sequential args
4352 : // registers.
4353 : VisitArguments(expr->arguments(), &args);
4354 5176406 : int reciever_arg_count = implicit_undefined_receiver ? 0 : 1;
4355 5176406 : CHECK_EQ(reciever_arg_count + expr->arguments()->length(),
4356 : args.register_count());
4357 :
4358 : // Resolve callee for a potential direct eval call. This block will mutate the
4359 : // callee value.
4360 5176406 : if (expr->is_possibly_eval() && expr->arguments()->length() > 0) {
4361 : RegisterAllocationScope inner_register_scope(this);
4362 : // Set up arguments for ResolvePossiblyDirectEval by copying callee, source
4363 : // strings and function closure, and loading language and
4364 : // position.
4365 106742 : Register first_arg = args[reciever_arg_count];
4366 106742 : RegisterList runtime_call_args = register_allocator()->NewRegisterList(6);
4367 : builder()
4368 106742 : ->MoveRegister(callee, runtime_call_args[0])
4369 106742 : .MoveRegister(first_arg, runtime_call_args[1])
4370 213484 : .MoveRegister(Register::function_closure(), runtime_call_args[2])
4371 106742 : .LoadLiteral(Smi::FromEnum(language_mode()))
4372 106742 : .StoreAccumulatorInRegister(runtime_call_args[3])
4373 106742 : .LoadLiteral(Smi::FromInt(current_scope()->start_position()))
4374 106742 : .StoreAccumulatorInRegister(runtime_call_args[4])
4375 106742 : .LoadLiteral(Smi::FromInt(expr->position()))
4376 106742 : .StoreAccumulatorInRegister(runtime_call_args[5]);
4377 :
4378 : // Call ResolvePossiblyDirectEval and modify the callee.
4379 : builder()
4380 106742 : ->CallRuntime(Runtime::kResolvePossiblyDirectEval, runtime_call_args)
4381 106742 : .StoreAccumulatorInRegister(callee);
4382 : }
4383 :
4384 : builder()->SetExpressionPosition(expr);
4385 :
4386 5176406 : if (is_spread_call) {
4387 : DCHECK(!implicit_undefined_receiver);
4388 : builder()->CallWithSpread(callee, args,
4389 1695 : feedback_index(feedback_spec()->AddCallICSlot()));
4390 5174711 : } else if (optimize_as_one_shot) {
4391 : DCHECK(!implicit_undefined_receiver);
4392 1245682 : builder()->CallNoFeedback(callee, args);
4393 7858058 : } else if (call_type == Call::NAMED_PROPERTY_CALL ||
4394 3929029 : call_type == Call::KEYED_PROPERTY_CALL ||
4395 3929029 : call_type == Call::RESOLVED_PROPERTY_CALL) {
4396 : DCHECK(!implicit_undefined_receiver);
4397 : builder()->CallProperty(callee, args,
4398 512884 : feedback_index(feedback_spec()->AddCallICSlot()));
4399 3416144 : } else if (implicit_undefined_receiver) {
4400 : builder()->CallUndefinedReceiver(
4401 3412859 : callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
4402 : } else {
4403 : builder()->CallAnyReceiver(
4404 3285 : callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
4405 : }
4406 : }
4407 :
4408 4181 : void BytecodeGenerator::VisitCallSuper(Call* expr) {
4409 : RegisterAllocationScope register_scope(this);
4410 4181 : SuperCallReference* super = expr->expression()->AsSuperCallReference();
4411 : const ZonePtrList<Expression>* args = expr->arguments();
4412 :
4413 : int first_spread_index = 0;
4414 5423 : for (; first_spread_index < args->length(); first_spread_index++) {
4415 6910 : if (args->at(first_spread_index)->IsSpread()) break;
4416 : }
4417 :
4418 : // Prepare the constructor to the super call.
4419 4181 : Register this_function = VisitForRegisterValue(super->this_function_var());
4420 4181 : Register constructor = register_allocator()->NewRegister();
4421 : builder()
4422 4181 : ->LoadAccumulatorWithRegister(this_function)
4423 4181 : .GetSuperConstructor(constructor);
4424 :
4425 4181 : if (first_spread_index < expr->arguments()->length() - 1) {
4426 : // We rewrite something like
4427 : // super(1, ...x, 2)
4428 : // to
4429 : // %reflect_construct(constructor, [1, ...x, 2], new_target)
4430 : // That is, we implement (non-last-arg) spreads in super calls via our
4431 : // mechanism for spreads in array literals.
4432 :
4433 : // First generate the array containing all arguments.
4434 30 : BuildCreateArrayLiteral(args, nullptr);
4435 :
4436 : // Now pass that array to %reflect_construct.
4437 30 : RegisterList construct_args = register_allocator()->NewRegisterList(3);
4438 30 : builder()->StoreAccumulatorInRegister(construct_args[1]);
4439 30 : builder()->MoveRegister(constructor, construct_args[0]);
4440 : VisitForRegisterValue(super->new_target_var(), construct_args[2]);
4441 30 : builder()->CallJSRuntime(Context::REFLECT_CONSTRUCT_INDEX, construct_args);
4442 : } else {
4443 4151 : RegisterList args_regs = register_allocator()->NewGrowableRegisterList();
4444 : VisitArguments(args, &args_regs);
4445 : // The new target is loaded into the accumulator from the
4446 : // {new.target} variable.
4447 4151 : VisitForAccumulatorValue(super->new_target_var());
4448 : builder()->SetExpressionPosition(expr);
4449 :
4450 : int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
4451 :
4452 4151 : if (first_spread_index == expr->arguments()->length() - 1) {
4453 : builder()->ConstructWithSpread(constructor, args_regs,
4454 2804 : feedback_slot_index);
4455 : } else {
4456 : DCHECK_EQ(first_spread_index, expr->arguments()->length());
4457 : // Call construct.
4458 : // TODO(turbofan): For now we do gather feedback on super constructor
4459 : // calls, utilizing the existing machinery to inline the actual call
4460 : // target and the JSCreate for the implicit receiver allocation. This
4461 : // is not an ideal solution for super constructor calls, but it gets
4462 : // the job done for now. In the long run we might want to revisit this
4463 : // and come up with a better way.
4464 1347 : builder()->Construct(constructor, args_regs, feedback_slot_index);
4465 : }
4466 : }
4467 :
4468 : // Explicit calls to the super constructor using super() perform an
4469 : // implicit binding assignment to the 'this' variable.
4470 : //
4471 : // Default constructors don't need have to do the assignment because
4472 : // 'this' isn't accessed in default constructors.
4473 8362 : if (!IsDefaultConstructor(info()->literal()->kind())) {
4474 2557 : Variable* var = closure_scope()->GetReceiverScope()->receiver();
4475 2557 : BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kRequired);
4476 : }
4477 :
4478 : // The derived constructor has the correct bit set always, so we
4479 : // don't emit code to load and call the initializer if not
4480 : // required.
4481 : //
4482 : // For the arrow function or eval case, we always emit code to load
4483 : // and call the initializer.
4484 : //
4485 : // TODO(gsathya): In the future, we could tag nested arrow functions
4486 : // or eval with the correct bit so that we do the load conditionally
4487 : // if required.
4488 8137 : if (info()->literal()->requires_instance_members_initializer() ||
4489 3956 : !IsDerivedConstructor(info()->literal()->kind())) {
4490 356 : Register instance = register_allocator()->NewRegister();
4491 356 : builder()->StoreAccumulatorInRegister(instance);
4492 356 : BuildInstanceMemberInitialization(this_function, instance);
4493 356 : builder()->LoadAccumulatorWithRegister(instance);
4494 : }
4495 4181 : }
4496 :
4497 145892 : void BytecodeGenerator::VisitCallNew(CallNew* expr) {
4498 145892 : Register constructor = VisitForRegisterValue(expr->expression());
4499 145901 : RegisterList args = register_allocator()->NewGrowableRegisterList();
4500 : VisitArguments(expr->arguments(), &args);
4501 :
4502 : // The accumulator holds new target which is the same as the
4503 : // constructor for CallNew.
4504 : builder()->SetExpressionPosition(expr);
4505 145908 : builder()->LoadAccumulatorWithRegister(constructor);
4506 :
4507 : int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
4508 145901 : if (expr->only_last_arg_is_spread()) {
4509 176 : builder()->ConstructWithSpread(constructor, args, feedback_slot_index);
4510 : } else {
4511 145725 : builder()->Construct(constructor, args, feedback_slot_index);
4512 : }
4513 145910 : }
4514 :
4515 68726 : void BytecodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4516 68726 : if (expr->is_jsruntime()) {
4517 381 : RegisterList args = register_allocator()->NewGrowableRegisterList();
4518 : VisitArguments(expr->arguments(), &args);
4519 383 : builder()->CallJSRuntime(expr->context_index(), args);
4520 : } else {
4521 : // Evaluate all arguments to the runtime call.
4522 68345 : RegisterList args = register_allocator()->NewGrowableRegisterList();
4523 : VisitArguments(expr->arguments(), &args);
4524 68344 : Runtime::FunctionId function_id = expr->function()->function_id;
4525 68344 : builder()->CallRuntime(function_id, args);
4526 : }
4527 68727 : }
4528 :
4529 0 : void BytecodeGenerator::VisitVoid(UnaryOperation* expr) {
4530 3337 : VisitForEffect(expr->expression());
4531 3337 : builder()->LoadUndefined();
4532 0 : }
4533 :
4534 164003 : void BytecodeGenerator::VisitForTypeOfValue(Expression* expr) {
4535 164003 : if (expr->IsVariableProxy()) {
4536 : // Typeof does not throw a reference error on global variables, hence we
4537 : // perform a non-contextual load in case the operand is a variable proxy.
4538 158237 : VariableProxy* proxy = expr->AsVariableProxy();
4539 : BuildVariableLoadForAccumulatorValue(proxy->var(), proxy->hole_check_mode(),
4540 158237 : INSIDE_TYPEOF);
4541 : } else {
4542 5766 : VisitForAccumulatorValue(expr);
4543 : }
4544 164006 : }
4545 :
4546 0 : void BytecodeGenerator::VisitTypeOf(UnaryOperation* expr) {
4547 63860 : VisitForTypeOfValue(expr->expression());
4548 63861 : builder()->TypeOf();
4549 0 : }
4550 :
4551 276594 : void BytecodeGenerator::VisitNot(UnaryOperation* expr) {
4552 276594 : if (execution_result()->IsEffect()) {
4553 154 : VisitForEffect(expr->expression());
4554 276440 : } else if (execution_result()->IsTest()) {
4555 : // No actual logical negation happening, we just swap the control flow, by
4556 : // swapping the target labels and the fallthrough branch, and visit in the
4557 : // same test result context.
4558 : TestResultScope* test_result = execution_result()->AsTest();
4559 : test_result->InvertControlFlow();
4560 246713 : VisitInSameTestExecutionScope(expr->expression());
4561 : } else {
4562 29727 : TypeHint type_hint = VisitForAccumulatorValue(expr->expression());
4563 29728 : builder()->LogicalNot(ToBooleanModeFromTypeHint(type_hint));
4564 : // Always returns a boolean value.
4565 : execution_result()->SetResultIsBoolean();
4566 : }
4567 276600 : }
4568 :
4569 414453 : void BytecodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4570 414453 : switch (expr->op()) {
4571 : case Token::Value::NOT:
4572 276594 : VisitNot(expr);
4573 276597 : break;
4574 : case Token::Value::TYPEOF:
4575 : VisitTypeOf(expr);
4576 : break;
4577 : case Token::Value::VOID:
4578 : VisitVoid(expr);
4579 : break;
4580 : case Token::Value::DELETE:
4581 5980 : VisitDelete(expr);
4582 5980 : break;
4583 : case Token::Value::ADD:
4584 : case Token::Value::SUB:
4585 : case Token::Value::BIT_NOT:
4586 64682 : VisitForAccumulatorValue(expr->expression());
4587 : builder()->SetExpressionPosition(expr);
4588 : builder()->UnaryOperation(
4589 64682 : expr->op(), feedback_index(feedback_spec()->AddBinaryOpICSlot()));
4590 64682 : break;
4591 : default:
4592 0 : UNREACHABLE();
4593 : }
4594 414458 : }
4595 :
4596 5980 : void BytecodeGenerator::VisitDelete(UnaryOperation* unary) {
4597 : Expression* expr = unary->expression();
4598 5980 : if (expr->IsProperty()) {
4599 : // Delete of an object property is allowed both in sloppy
4600 : // and strict modes.
4601 4416 : Property* property = expr->AsProperty();
4602 4416 : Register object = VisitForRegisterValue(property->obj());
4603 4416 : VisitForAccumulatorValue(property->key());
4604 4416 : builder()->Delete(object, language_mode());
4605 2374 : } else if (expr->IsVariableProxy() &&
4606 810 : !expr->AsVariableProxy()->is_new_target()) {
4607 : // Delete of an unqualified identifier is allowed in sloppy mode but is
4608 : // not allowed in strict mode.
4609 : DCHECK(is_sloppy(language_mode()));
4610 798 : Variable* variable = expr->AsVariableProxy()->var();
4611 798 : switch (variable->location()) {
4612 : case VariableLocation::PARAMETER:
4613 : case VariableLocation::LOCAL:
4614 : case VariableLocation::CONTEXT: {
4615 : // Deleting local var/let/const, context variables, and arguments
4616 : // does not have any effect.
4617 166 : builder()->LoadFalse();
4618 166 : break;
4619 : }
4620 : case VariableLocation::UNALLOCATED:
4621 : // TODO(adamk): Falling through to the runtime results in correct
4622 : // behavior, but does unnecessary context-walking (since scope
4623 : // analysis has already proven that the variable doesn't exist in
4624 : // any non-global scope). Consider adding a DeleteGlobal bytecode
4625 : // that knows how to deal with ScriptContexts as well as global
4626 : // object properties.
4627 : case VariableLocation::LOOKUP: {
4628 632 : Register name_reg = register_allocator()->NewRegister();
4629 : builder()
4630 632 : ->LoadLiteral(variable->raw_name())
4631 632 : .StoreAccumulatorInRegister(name_reg)
4632 632 : .CallRuntime(Runtime::kDeleteLookupSlot, name_reg);
4633 : break;
4634 : }
4635 : default:
4636 0 : UNREACHABLE();
4637 : }
4638 : } else {
4639 : // Delete of an unresolvable reference, new.target, and this returns true.
4640 766 : VisitForEffect(expr);
4641 766 : builder()->LoadTrue();
4642 : }
4643 5980 : }
4644 :
4645 245055 : void BytecodeGenerator::VisitCountOperation(CountOperation* expr) {
4646 : DCHECK(expr->expression()->IsValidReferenceExpression());
4647 :
4648 : // Left-hand side can only be a property, a global or a variable slot.
4649 245055 : Property* property = expr->expression()->AsProperty();
4650 245055 : AssignType assign_type = Property::GetAssignType(property);
4651 :
4652 245057 : bool is_postfix = expr->is_postfix() && !execution_result()->IsEffect();
4653 :
4654 : // Evaluate LHS expression and get old value.
4655 : Register object, key, old_value;
4656 : RegisterList super_property_args;
4657 : const AstRawString* name;
4658 245057 : switch (assign_type) {
4659 : case NON_PROPERTY: {
4660 233507 : VariableProxy* proxy = expr->expression()->AsVariableProxy();
4661 : BuildVariableLoadForAccumulatorValue(proxy->var(),
4662 233507 : proxy->hole_check_mode());
4663 233511 : break;
4664 : }
4665 : case NAMED_PROPERTY: {
4666 8659 : object = VisitForRegisterValue(property->obj());
4667 8661 : name = property->key()->AsLiteral()->AsRawPropertyName();
4668 : builder()->LoadNamedProperty(
4669 : object, name,
4670 17321 : feedback_index(GetCachedLoadICSlot(property->obj(), name)));
4671 8661 : break;
4672 : }
4673 : case KEYED_PROPERTY: {
4674 2757 : object = VisitForRegisterValue(property->obj());
4675 : // Use visit for accumulator here since we need the key in the accumulator
4676 : // for the LoadKeyedProperty.
4677 2757 : key = register_allocator()->NewRegister();
4678 2757 : VisitForAccumulatorValue(property->key());
4679 2757 : builder()->StoreAccumulatorInRegister(key).LoadKeyedProperty(
4680 2757 : object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
4681 2757 : break;
4682 : }
4683 : case NAMED_SUPER_PROPERTY: {
4684 45 : super_property_args = register_allocator()->NewRegisterList(4);
4685 45 : RegisterList load_super_args = super_property_args.Truncate(3);
4686 : SuperPropertyReference* super_property =
4687 45 : property->obj()->AsSuperPropertyReference();
4688 45 : BuildThisVariableLoad();
4689 45 : builder()->StoreAccumulatorInRegister(load_super_args[0]);
4690 : VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
4691 : builder()
4692 90 : ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
4693 45 : .StoreAccumulatorInRegister(load_super_args[2])
4694 45 : .CallRuntime(Runtime::kLoadFromSuper, load_super_args);
4695 : break;
4696 : }
4697 : case KEYED_SUPER_PROPERTY: {
4698 90 : super_property_args = register_allocator()->NewRegisterList(4);
4699 90 : RegisterList load_super_args = super_property_args.Truncate(3);
4700 : SuperPropertyReference* super_property =
4701 90 : property->obj()->AsSuperPropertyReference();
4702 90 : BuildThisVariableLoad();
4703 90 : builder()->StoreAccumulatorInRegister(load_super_args[0]);
4704 : VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
4705 : VisitForRegisterValue(property->key(), load_super_args[2]);
4706 90 : builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, load_super_args);
4707 : break;
4708 : }
4709 : }
4710 :
4711 : // Save result for postfix expressions.
4712 : FeedbackSlot count_slot = feedback_spec()->AddBinaryOpICSlot();
4713 245059 : if (is_postfix) {
4714 30633 : old_value = register_allocator()->NewRegister();
4715 : // Convert old value into a number before saving it.
4716 : // TODO(ignition): Think about adding proper PostInc/PostDec bytecodes
4717 : // instead of this ToNumeric + Inc/Dec dance.
4718 : builder()
4719 30635 : ->ToNumeric(feedback_index(count_slot))
4720 30633 : .StoreAccumulatorInRegister(old_value);
4721 : }
4722 :
4723 : // Perform +1/-1 operation.
4724 490118 : builder()->UnaryOperation(expr->op(), feedback_index(count_slot));
4725 :
4726 : // Store the value.
4727 : builder()->SetExpressionPosition(expr);
4728 245063 : switch (assign_type) {
4729 : case NON_PROPERTY: {
4730 233508 : VariableProxy* proxy = expr->expression()->AsVariableProxy();
4731 233508 : BuildVariableAssignment(proxy->var(), expr->op(),
4732 233508 : proxy->hole_check_mode());
4733 233511 : break;
4734 : }
4735 : case NAMED_PROPERTY: {
4736 8660 : FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
4737 : Register value;
4738 8660 : if (!execution_result()->IsEffect()) {
4739 8255 : value = register_allocator()->NewRegister();
4740 8255 : builder()->StoreAccumulatorInRegister(value);
4741 : }
4742 : builder()->StoreNamedProperty(object, name, feedback_index(slot),
4743 8660 : language_mode());
4744 8660 : if (!execution_result()->IsEffect()) {
4745 8255 : builder()->LoadAccumulatorWithRegister(value);
4746 : }
4747 : break;
4748 : }
4749 : case KEYED_PROPERTY: {
4750 : FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
4751 : Register value;
4752 2757 : if (!execution_result()->IsEffect()) {
4753 486 : value = register_allocator()->NewRegister();
4754 486 : builder()->StoreAccumulatorInRegister(value);
4755 : }
4756 : builder()->StoreKeyedProperty(object, key, feedback_index(slot),
4757 2757 : language_mode());
4758 2757 : if (!execution_result()->IsEffect()) {
4759 486 : builder()->LoadAccumulatorWithRegister(value);
4760 : }
4761 : break;
4762 : }
4763 : case NAMED_SUPER_PROPERTY: {
4764 : builder()
4765 45 : ->StoreAccumulatorInRegister(super_property_args[3])
4766 45 : .CallRuntime(Runtime::kStoreToSuper, super_property_args);
4767 45 : break;
4768 : }
4769 : case KEYED_SUPER_PROPERTY: {
4770 : builder()
4771 90 : ->StoreAccumulatorInRegister(super_property_args[3])
4772 90 : .CallRuntime(Runtime::kStoreKeyedToSuper, super_property_args);
4773 90 : break;
4774 : }
4775 : }
4776 :
4777 : // Restore old value for postfix expressions.
4778 245065 : if (is_postfix) {
4779 30633 : builder()->LoadAccumulatorWithRegister(old_value);
4780 : }
4781 245068 : }
4782 :
4783 538369 : void BytecodeGenerator::VisitBinaryOperation(BinaryOperation* binop) {
4784 538369 : switch (binop->op()) {
4785 : case Token::COMMA:
4786 : VisitCommaExpression(binop);
4787 : break;
4788 : case Token::OR:
4789 41591 : VisitLogicalOrExpression(binop);
4790 41593 : break;
4791 : case Token::AND:
4792 84725 : VisitLogicalAndExpression(binop);
4793 84725 : break;
4794 : default:
4795 374387 : VisitArithmeticExpression(binop);
4796 374394 : break;
4797 : }
4798 538378 : }
4799 :
4800 115227 : void BytecodeGenerator::VisitNaryOperation(NaryOperation* expr) {
4801 115227 : switch (expr->op()) {
4802 : case Token::COMMA:
4803 326 : VisitNaryCommaExpression(expr);
4804 326 : break;
4805 : case Token::OR:
4806 20607 : VisitNaryLogicalOrExpression(expr);
4807 20607 : break;
4808 : case Token::AND:
4809 577 : VisitNaryLogicalAndExpression(expr);
4810 577 : break;
4811 : default:
4812 93717 : VisitNaryArithmeticExpression(expr);
4813 93717 : break;
4814 : }
4815 115227 : }
4816 :
4817 66607 : void BytecodeGenerator::BuildLiteralCompareNil(
4818 : Token::Value op, BytecodeArrayBuilder::NilValue nil) {
4819 66607 : if (execution_result()->IsTest()) {
4820 : TestResultScope* test_result = execution_result()->AsTest();
4821 54420 : switch (test_result->fallthrough()) {
4822 : case TestFallthrough::kThen:
4823 28910 : builder()->JumpIfNotNil(test_result->NewElseLabel(), op, nil);
4824 14455 : break;
4825 : case TestFallthrough::kElse:
4826 79931 : builder()->JumpIfNil(test_result->NewThenLabel(), op, nil);
4827 39967 : break;
4828 : case TestFallthrough::kNone:
4829 : builder()
4830 0 : ->JumpIfNil(test_result->NewThenLabel(), op, nil)
4831 0 : .Jump(test_result->NewElseLabel());
4832 : }
4833 : test_result->SetResultConsumedByTest();
4834 : } else {
4835 24374 : builder()->CompareNil(op, nil);
4836 : }
4837 66608 : }
4838 :
4839 894456 : void BytecodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4840 : Expression* sub_expr;
4841 : Literal* literal;
4842 894456 : if (expr->IsLiteralCompareTypeof(&sub_expr, &literal)) {
4843 : // Emit a fast literal comparion for expressions of the form:
4844 : // typeof(x) === 'string'.
4845 100145 : VisitForTypeOfValue(sub_expr);
4846 : builder()->SetExpressionPosition(expr);
4847 : TestTypeOfFlags::LiteralFlag literal_flag =
4848 100145 : TestTypeOfFlags::GetFlagForLiteral(ast_string_constants(), literal);
4849 100144 : if (literal_flag == TestTypeOfFlags::LiteralFlag::kOther) {
4850 243 : builder()->LoadFalse();
4851 : } else {
4852 99901 : builder()->CompareTypeOf(literal_flag);
4853 : }
4854 794322 : } else if (expr->IsLiteralCompareUndefined(&sub_expr)) {
4855 61382 : VisitForAccumulatorValue(sub_expr);
4856 : builder()->SetExpressionPosition(expr);
4857 61389 : BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kUndefinedValue);
4858 732935 : } else if (expr->IsLiteralCompareNull(&sub_expr)) {
4859 5222 : VisitForAccumulatorValue(sub_expr);
4860 : builder()->SetExpressionPosition(expr);
4861 5222 : BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kNullValue);
4862 : } else {
4863 727713 : Register lhs = VisitForRegisterValue(expr->left());
4864 727721 : VisitForAccumulatorValue(expr->right());
4865 : builder()->SetExpressionPosition(expr);
4866 : FeedbackSlot slot;
4867 727712 : if (expr->op() == Token::IN) {
4868 : slot = feedback_spec()->AddKeyedHasICSlot();
4869 723917 : } else if (expr->op() == Token::INSTANCEOF) {
4870 : slot = feedback_spec()->AddInstanceOfSlot();
4871 : } else {
4872 : slot = feedback_spec()->AddCompareICSlot();
4873 : }
4874 727713 : builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
4875 : }
4876 : // Always returns a boolean value.
4877 : execution_result()->SetResultIsBoolean();
4878 894469 : }
4879 :
4880 374385 : void BytecodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
4881 : FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
4882 : Expression* subexpr;
4883 374390 : Smi literal;
4884 374390 : if (expr->IsSmiLiteralOperation(&subexpr, &literal)) {
4885 126963 : TypeHint type_hint = VisitForAccumulatorValue(subexpr);
4886 : builder()->SetExpressionPosition(expr);
4887 : builder()->BinaryOperationSmiLiteral(expr->op(), literal,
4888 126961 : feedback_index(slot));
4889 126963 : if (expr->op() == Token::ADD && type_hint == TypeHint::kString) {
4890 : execution_result()->SetResultIsString();
4891 : }
4892 : } else {
4893 247431 : TypeHint lhs_type = VisitForAccumulatorValue(expr->left());
4894 247439 : Register lhs = register_allocator()->NewRegister();
4895 247439 : builder()->StoreAccumulatorInRegister(lhs);
4896 247439 : TypeHint rhs_type = VisitForAccumulatorValue(expr->right());
4897 359483 : if (expr->op() == Token::ADD &&
4898 112046 : (lhs_type == TypeHint::kString || rhs_type == TypeHint::kString)) {
4899 : execution_result()->SetResultIsString();
4900 : }
4901 :
4902 : builder()->SetExpressionPosition(expr);
4903 247437 : builder()->BinaryOperation(expr->op(), lhs, feedback_index(slot));
4904 : }
4905 374397 : }
4906 :
4907 93717 : void BytecodeGenerator::VisitNaryArithmeticExpression(NaryOperation* expr) {
4908 : // TODO(leszeks): Add support for lhs smi in commutative ops.
4909 93717 : TypeHint type_hint = VisitForAccumulatorValue(expr->first());
4910 :
4911 768101 : for (size_t i = 0; i < expr->subsequent_length(); ++i) {
4912 : RegisterAllocationScope register_scope(this);
4913 337192 : if (expr->subsequent(i)->IsSmiLiteral()) {
4914 : builder()->SetExpressionPosition(expr->subsequent_op_position(i));
4915 : builder()->BinaryOperationSmiLiteral(
4916 : expr->op(), expr->subsequent(i)->AsLiteral()->AsSmiLiteral(),
4917 89348 : feedback_index(feedback_spec()->AddBinaryOpICSlot()));
4918 : } else {
4919 292518 : Register lhs = register_allocator()->NewRegister();
4920 292517 : builder()->StoreAccumulatorInRegister(lhs);
4921 292519 : TypeHint rhs_hint = VisitForAccumulatorValue(expr->subsequent(i));
4922 292519 : if (rhs_hint == TypeHint::kString) type_hint = TypeHint::kString;
4923 : builder()->SetExpressionPosition(expr->subsequent_op_position(i));
4924 : builder()->BinaryOperation(
4925 : expr->op(), lhs,
4926 292519 : feedback_index(feedback_spec()->AddBinaryOpICSlot()));
4927 : }
4928 : }
4929 :
4930 135692 : if (type_hint == TypeHint::kString && expr->op() == Token::ADD) {
4931 : // If any operand of an ADD is a String, a String is produced.
4932 : execution_result()->SetResultIsString();
4933 : }
4934 93718 : }
4935 :
4936 : // Note: the actual spreading is performed by the surrounding expression's
4937 : // visitor.
4938 6744 : void BytecodeGenerator::VisitSpread(Spread* expr) { Visit(expr->expression()); }
4939 :
4940 0 : void BytecodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
4941 0 : UNREACHABLE();
4942 : }
4943 :
4944 283 : void BytecodeGenerator::VisitImportCallExpression(ImportCallExpression* expr) {
4945 283 : RegisterList args = register_allocator()->NewRegisterList(2);
4946 : VisitForRegisterValue(expr->argument(), args[1]);
4947 : builder()
4948 283 : ->MoveRegister(Register::function_closure(), args[0])
4949 283 : .CallRuntime(Runtime::kDynamicImportCall, args);
4950 283 : }
4951 :
4952 39970 : void BytecodeGenerator::BuildGetIterator(IteratorType hint) {
4953 39970 : RegisterList args = register_allocator()->NewRegisterList(1);
4954 39981 : Register method = register_allocator()->NewRegister();
4955 39981 : Register obj = args[0];
4956 :
4957 39981 : if (hint == IteratorType::kAsync) {
4958 : // Set method to GetMethod(obj, @@asyncIterator)
4959 351 : builder()->StoreAccumulatorInRegister(obj).LoadAsyncIteratorProperty(
4960 351 : obj, feedback_index(feedback_spec()->AddLoadICSlot()));
4961 :
4962 : BytecodeLabel async_iterator_undefined, async_iterator_null, done;
4963 : // TODO(ignition): Add a single opcode for JumpIfNullOrUndefined
4964 351 : builder()->JumpIfUndefined(&async_iterator_undefined);
4965 351 : builder()->JumpIfNull(&async_iterator_null);
4966 :
4967 : // Let iterator be Call(method, obj)
4968 351 : builder()->StoreAccumulatorInRegister(method).CallProperty(
4969 351 : method, args, feedback_index(feedback_spec()->AddCallICSlot()));
4970 :
4971 : // If Type(iterator) is not Object, throw a TypeError exception.
4972 351 : builder()->JumpIfJSReceiver(&done);
4973 351 : builder()->CallRuntime(Runtime::kThrowSymbolAsyncIteratorInvalid);
4974 :
4975 351 : builder()->Bind(&async_iterator_undefined);
4976 351 : builder()->Bind(&async_iterator_null);
4977 : // If method is undefined,
4978 : // Let syncMethod be GetMethod(obj, @@iterator)
4979 : builder()
4980 : ->LoadIteratorProperty(obj,
4981 351 : feedback_index(feedback_spec()->AddLoadICSlot()))
4982 351 : .StoreAccumulatorInRegister(method);
4983 :
4984 : // Let syncIterator be Call(syncMethod, obj)
4985 : builder()->CallProperty(method, args,
4986 351 : feedback_index(feedback_spec()->AddCallICSlot()));
4987 :
4988 : // Return CreateAsyncFromSyncIterator(syncIterator)
4989 : // alias `method` register as it's no longer used
4990 351 : Register sync_iter = method;
4991 351 : builder()->StoreAccumulatorInRegister(sync_iter).CallRuntime(
4992 351 : Runtime::kInlineCreateAsyncFromSyncIterator, sync_iter);
4993 :
4994 351 : builder()->Bind(&done);
4995 : } else {
4996 : // Let method be GetMethod(obj, @@iterator).
4997 : builder()
4998 39624 : ->StoreAccumulatorInRegister(obj)
4999 : .LoadIteratorProperty(obj,
5000 39629 : feedback_index(feedback_spec()->AddLoadICSlot()))
5001 39628 : .StoreAccumulatorInRegister(method);
5002 :
5003 : // Let iterator be Call(method, obj).
5004 : builder()->CallProperty(method, args,
5005 39626 : feedback_index(feedback_spec()->AddCallICSlot()));
5006 :
5007 : // If Type(iterator) is not Object, throw a TypeError exception.
5008 : BytecodeLabel no_type_error;
5009 39630 : builder()->JumpIfJSReceiver(&no_type_error);
5010 39618 : builder()->CallRuntime(Runtime::kThrowSymbolIteratorInvalid);
5011 39628 : builder()->Bind(&no_type_error);
5012 : }
5013 39975 : }
5014 :
5015 : // Returns an IteratorRecord which is valid for the lifetime of the current
5016 : // register_allocation_scope.
5017 39976 : BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
5018 : Register next, Register object, IteratorType hint) {
5019 : DCHECK(next.is_valid() && object.is_valid());
5020 39976 : BuildGetIterator(hint);
5021 :
5022 : builder()
5023 39974 : ->StoreAccumulatorInRegister(object)
5024 : .LoadNamedProperty(object, ast_string_constants()->next_string(),
5025 39979 : feedback_index(feedback_spec()->AddLoadICSlot()))
5026 39980 : .StoreAccumulatorInRegister(next);
5027 39980 : return IteratorRecord(object, next, hint);
5028 : }
5029 :
5030 39780 : BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
5031 : IteratorType hint) {
5032 39780 : Register next = register_allocator()->NewRegister();
5033 39784 : Register object = register_allocator()->NewRegister();
5034 39785 : return BuildGetIteratorRecord(next, object, hint);
5035 : }
5036 :
5037 41191 : void BytecodeGenerator::BuildIteratorNext(const IteratorRecord& iterator,
5038 : Register next_result) {
5039 : DCHECK(next_result.is_valid());
5040 : builder()->CallProperty(iterator.next(), RegisterList(iterator.object()),
5041 41193 : feedback_index(feedback_spec()->AddCallICSlot()));
5042 :
5043 41193 : if (iterator.type() == IteratorType::kAsync) {
5044 341 : BuildAwait();
5045 : }
5046 :
5047 : BytecodeLabel is_object;
5048 : builder()
5049 41193 : ->StoreAccumulatorInRegister(next_result)
5050 41190 : .JumpIfJSReceiver(&is_object)
5051 41192 : .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, next_result)
5052 41191 : .Bind(&is_object);
5053 41190 : }
5054 :
5055 582 : void BytecodeGenerator::BuildCallIteratorMethod(Register iterator,
5056 : const AstRawString* method_name,
5057 : RegisterList receiver_and_args,
5058 : BytecodeLabel* if_called,
5059 : BytecodeLabels* if_notcalled) {
5060 : RegisterAllocationScope register_scope(this);
5061 :
5062 582 : Register method = register_allocator()->NewRegister();
5063 : FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
5064 : builder()
5065 582 : ->LoadNamedProperty(iterator, method_name, feedback_index(slot))
5066 1164 : .JumpIfUndefined(if_notcalled->New())
5067 1164 : .JumpIfNull(if_notcalled->New())
5068 582 : .StoreAccumulatorInRegister(method)
5069 : .CallProperty(method, receiver_and_args,
5070 582 : feedback_index(feedback_spec()->AddCallICSlot()))
5071 582 : .Jump(if_called);
5072 582 : }
5073 :
5074 194 : void BytecodeGenerator::BuildIteratorClose(const IteratorRecord& iterator,
5075 : Expression* expr) {
5076 : RegisterAllocationScope register_scope(this);
5077 : BytecodeLabels done(zone());
5078 : BytecodeLabel if_called;
5079 : RegisterList args = RegisterList(iterator.object());
5080 : BuildCallIteratorMethod(iterator.object(),
5081 : ast_string_constants()->return_string(), args,
5082 194 : &if_called, &done);
5083 194 : builder()->Bind(&if_called);
5084 :
5085 194 : if (iterator.type() == IteratorType::kAsync) {
5086 : DCHECK_NOT_NULL(expr);
5087 10 : BuildAwait(expr->position());
5088 : }
5089 :
5090 194 : builder()->JumpIfJSReceiver(done.New());
5091 : {
5092 : RegisterAllocationScope register_scope(this);
5093 194 : Register return_result = register_allocator()->NewRegister();
5094 : builder()
5095 194 : ->StoreAccumulatorInRegister(return_result)
5096 194 : .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, return_result);
5097 : }
5098 :
5099 194 : done.Bind(builder());
5100 194 : }
5101 :
5102 1948 : void BytecodeGenerator::VisitGetTemplateObject(GetTemplateObject* expr) {
5103 : builder()->SetExpressionPosition(expr);
5104 1948 : size_t entry = builder()->AllocateDeferredConstantPoolEntry();
5105 3896 : template_objects_.push_back(std::make_pair(expr, entry));
5106 : FeedbackSlot literal_slot = feedback_spec()->AddLiteralSlot();
5107 1948 : builder()->GetTemplateObject(entry, feedback_index(literal_slot));
5108 1948 : }
5109 :
5110 7003 : void BytecodeGenerator::VisitTemplateLiteral(TemplateLiteral* expr) {
5111 : const ZonePtrList<const AstRawString>& parts = *expr->string_parts();
5112 : const ZonePtrList<Expression>& substitutions = *expr->substitutions();
5113 : // Template strings with no substitutions are turned into StringLiterals.
5114 : DCHECK_GT(substitutions.length(), 0);
5115 : DCHECK_EQ(parts.length(), substitutions.length() + 1);
5116 :
5117 : // Generate string concatenation
5118 : // TODO(caitp): Don't generate feedback slot if it's not used --- introduce
5119 : // a simple, concise, reusable mechanism to lazily create reusable slots.
5120 : FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
5121 7003 : Register last_part = register_allocator()->NewRegister();
5122 : bool last_part_valid = false;
5123 :
5124 : builder()->SetExpressionPosition(expr);
5125 27963 : for (int i = 0; i < substitutions.length(); ++i) {
5126 10480 : if (i != 0) {
5127 3477 : builder()->StoreAccumulatorInRegister(last_part);
5128 : last_part_valid = true;
5129 : }
5130 :
5131 20960 : if (!parts[i]->IsEmpty()) {
5132 7295 : builder()->LoadLiteral(parts[i]);
5133 7295 : if (last_part_valid) {
5134 3402 : builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
5135 : }
5136 7295 : builder()->StoreAccumulatorInRegister(last_part);
5137 : last_part_valid = true;
5138 : }
5139 :
5140 10480 : TypeHint type_hint = VisitForAccumulatorValue(substitutions[i]);
5141 10480 : if (type_hint != TypeHint::kString) {
5142 10470 : builder()->ToString();
5143 : }
5144 10480 : if (last_part_valid) {
5145 7370 : builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
5146 : }
5147 : last_part_valid = false;
5148 : }
5149 :
5150 14006 : if (!parts.last()->IsEmpty()) {
5151 2745 : builder()->StoreAccumulatorInRegister(last_part);
5152 2745 : builder()->LoadLiteral(parts.last());
5153 2745 : builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
5154 : }
5155 7003 : }
5156 :
5157 2469078 : void BytecodeGenerator::BuildThisVariableLoad() {
5158 2469078 : DeclarationScope* receiver_scope = closure_scope()->GetReceiverScope();
5159 : Variable* var = receiver_scope->receiver();
5160 : // TODO(littledan): implement 'this' hole check elimination.
5161 : HoleCheckMode hole_check_mode =
5162 : IsDerivedConstructor(receiver_scope->function_kind())
5163 : ? HoleCheckMode::kRequired
5164 2469081 : : HoleCheckMode::kElided;
5165 2469081 : BuildVariableLoad(var, hole_check_mode);
5166 2469089 : }
5167 :
5168 0 : void BytecodeGenerator::VisitThisExpression(ThisExpression* expr) {
5169 2467093 : BuildThisVariableLoad();
5170 0 : }
5171 :
5172 0 : void BytecodeGenerator::VisitSuperCallReference(SuperCallReference* expr) {
5173 : // Handled by VisitCall().
5174 0 : UNREACHABLE();
5175 : }
5176 :
5177 0 : void BytecodeGenerator::VisitSuperPropertyReference(
5178 : SuperPropertyReference* expr) {
5179 17 : builder()->CallRuntime(Runtime::kThrowUnsupportedSuperError);
5180 0 : }
5181 :
5182 0 : void BytecodeGenerator::VisitCommaExpression(BinaryOperation* binop) {
5183 37666 : VisitForEffect(binop->left());
5184 37666 : Visit(binop->right());
5185 0 : }
5186 :
5187 326 : void BytecodeGenerator::VisitNaryCommaExpression(NaryOperation* expr) {
5188 : DCHECK_GT(expr->subsequent_length(), 0);
5189 :
5190 326 : VisitForEffect(expr->first());
5191 5220800 : for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
5192 2610237 : VisitForEffect(expr->subsequent(i));
5193 : }
5194 326 : Visit(expr->subsequent(expr->subsequent_length() - 1));
5195 326 : }
5196 :
5197 147545 : void BytecodeGenerator::VisitLogicalTestSubExpression(
5198 : Token::Value token, Expression* expr, BytecodeLabels* then_labels,
5199 : BytecodeLabels* else_labels, int coverage_slot) {
5200 : DCHECK(token == Token::OR || token == Token::AND);
5201 :
5202 : BytecodeLabels test_next(zone());
5203 147545 : if (token == Token::OR) {
5204 93366 : VisitForTest(expr, then_labels, &test_next, TestFallthrough::kElse);
5205 : } else {
5206 : DCHECK_EQ(Token::AND, token);
5207 54179 : VisitForTest(expr, &test_next, else_labels, TestFallthrough::kThen);
5208 : }
5209 147547 : test_next.Bind(builder());
5210 :
5211 : BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
5212 147546 : }
5213 :
5214 68569 : void BytecodeGenerator::VisitLogicalTest(Token::Value token, Expression* left,
5215 : Expression* right,
5216 : int right_coverage_slot) {
5217 : DCHECK(token == Token::OR || token == Token::AND);
5218 : TestResultScope* test_result = execution_result()->AsTest();
5219 : BytecodeLabels* then_labels = test_result->then_labels();
5220 : BytecodeLabels* else_labels = test_result->else_labels();
5221 : TestFallthrough fallthrough = test_result->fallthrough();
5222 :
5223 68569 : VisitLogicalTestSubExpression(token, left, then_labels, else_labels,
5224 68569 : right_coverage_slot);
5225 : // The last test has the same then, else and fallthrough as the parent test.
5226 68570 : VisitForTest(right, then_labels, else_labels, fallthrough);
5227 68570 : }
5228 :
5229 20631 : void BytecodeGenerator::VisitNaryLogicalTest(
5230 : Token::Value token, NaryOperation* expr,
5231 : const NaryCodeCoverageSlots* coverage_slots) {
5232 : DCHECK(token == Token::OR || token == Token::AND);
5233 : DCHECK_GT(expr->subsequent_length(), 0);
5234 :
5235 : TestResultScope* test_result = execution_result()->AsTest();
5236 : BytecodeLabels* then_labels = test_result->then_labels();
5237 : BytecodeLabels* else_labels = test_result->else_labels();
5238 : TestFallthrough fallthrough = test_result->fallthrough();
5239 :
5240 20631 : VisitLogicalTestSubExpression(token, expr->first(), then_labels, else_labels,
5241 20631 : coverage_slots->GetSlotFor(0));
5242 137322 : for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
5243 58345 : VisitLogicalTestSubExpression(token, expr->subsequent(i), then_labels,
5244 : else_labels,
5245 58345 : coverage_slots->GetSlotFor(i + 1));
5246 : }
5247 : // The last test has the same then, else and fallthrough as the parent test.
5248 : VisitForTest(expr->subsequent(expr->subsequent_length() - 1), then_labels,
5249 20632 : else_labels, fallthrough);
5250 20632 : }
5251 :
5252 22759 : bool BytecodeGenerator::VisitLogicalOrSubExpression(Expression* expr,
5253 : BytecodeLabels* end_labels,
5254 : int coverage_slot) {
5255 22759 : if (expr->ToBooleanIsTrue()) {
5256 1721 : VisitForAccumulatorValue(expr);
5257 1721 : end_labels->Bind(builder());
5258 1721 : return true;
5259 21039 : } else if (!expr->ToBooleanIsFalse()) {
5260 20859 : TypeHint type_hint = VisitForAccumulatorValue(expr);
5261 : builder()->JumpIfTrue(ToBooleanModeFromTypeHint(type_hint),
5262 41717 : end_labels->New());
5263 : }
5264 :
5265 : BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
5266 :
5267 : return false;
5268 : }
5269 :
5270 32133 : bool BytecodeGenerator::VisitLogicalAndSubExpression(Expression* expr,
5271 : BytecodeLabels* end_labels,
5272 : int coverage_slot) {
5273 32133 : if (expr->ToBooleanIsFalse()) {
5274 169 : VisitForAccumulatorValue(expr);
5275 169 : end_labels->Bind(builder());
5276 169 : return true;
5277 31964 : } else if (!expr->ToBooleanIsTrue()) {
5278 30226 : TypeHint type_hint = VisitForAccumulatorValue(expr);
5279 : builder()->JumpIfFalse(ToBooleanModeFromTypeHint(type_hint),
5280 60451 : end_labels->New());
5281 : }
5282 :
5283 : BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
5284 :
5285 : return false;
5286 : }
5287 :
5288 41590 : void BytecodeGenerator::VisitLogicalOrExpression(BinaryOperation* binop) {
5289 : Expression* left = binop->left();
5290 : Expression* right = binop->right();
5291 :
5292 : int right_coverage_slot =
5293 : AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
5294 :
5295 41592 : if (execution_result()->IsTest()) {
5296 : TestResultScope* test_result = execution_result()->AsTest();
5297 19112 : if (left->ToBooleanIsTrue()) {
5298 4350 : builder()->Jump(test_result->NewThenLabel());
5299 14762 : } else if (left->ToBooleanIsFalse() && right->ToBooleanIsFalse()) {
5300 : BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
5301 0 : builder()->Jump(test_result->NewElseLabel());
5302 : } else {
5303 14761 : VisitLogicalTest(Token::OR, left, right, right_coverage_slot);
5304 : }
5305 : test_result->SetResultConsumedByTest();
5306 : } else {
5307 : BytecodeLabels end_labels(zone());
5308 22480 : if (VisitLogicalOrSubExpression(left, &end_labels, right_coverage_slot)) {
5309 : return;
5310 : }
5311 20793 : VisitForAccumulatorValue(right);
5312 20795 : end_labels.Bind(builder());
5313 : }
5314 : }
5315 :
5316 20607 : void BytecodeGenerator::VisitNaryLogicalOrExpression(NaryOperation* expr) {
5317 : Expression* first = expr->first();
5318 : DCHECK_GT(expr->subsequent_length(), 0);
5319 :
5320 20607 : NaryCodeCoverageSlots coverage_slots(this, expr);
5321 :
5322 20607 : if (execution_result()->IsTest()) {
5323 : TestResultScope* test_result = execution_result()->AsTest();
5324 20468 : if (first->ToBooleanIsTrue()) {
5325 0 : builder()->Jump(test_result->NewThenLabel());
5326 : } else {
5327 20468 : VisitNaryLogicalTest(Token::OR, expr, &coverage_slots);
5328 : }
5329 : test_result->SetResultConsumedByTest();
5330 : } else {
5331 : BytecodeLabels end_labels(zone());
5332 139 : if (VisitLogicalOrSubExpression(first, &end_labels,
5333 : coverage_slots.GetSlotFor(0))) {
5334 : return;
5335 : }
5336 248 : for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
5337 282 : if (VisitLogicalOrSubExpression(expr->subsequent(i), &end_labels,
5338 : coverage_slots.GetSlotFor(i + 1))) {
5339 : return;
5340 : }
5341 : }
5342 : // We have to visit the last value even if it's true, because we need its
5343 : // actual value.
5344 107 : VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
5345 107 : end_labels.Bind(builder());
5346 : }
5347 : }
5348 :
5349 84725 : void BytecodeGenerator::VisitLogicalAndExpression(BinaryOperation* binop) {
5350 : Expression* left = binop->left();
5351 : Expression* right = binop->right();
5352 :
5353 : int right_coverage_slot =
5354 : AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
5355 :
5356 84725 : if (execution_result()->IsTest()) {
5357 : TestResultScope* test_result = execution_result()->AsTest();
5358 53819 : if (left->ToBooleanIsFalse()) {
5359 11 : builder()->Jump(test_result->NewElseLabel());
5360 53808 : } else if (left->ToBooleanIsTrue() && right->ToBooleanIsTrue()) {
5361 : BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
5362 0 : builder()->Jump(test_result->NewThenLabel());
5363 : } else {
5364 53808 : VisitLogicalTest(Token::AND, left, right, right_coverage_slot);
5365 : }
5366 : test_result->SetResultConsumedByTest();
5367 : } else {
5368 : BytecodeLabels end_labels(zone());
5369 30906 : if (VisitLogicalAndSubExpression(left, &end_labels, right_coverage_slot)) {
5370 : return;
5371 : }
5372 30754 : VisitForAccumulatorValue(right);
5373 30754 : end_labels.Bind(builder());
5374 : }
5375 : }
5376 :
5377 577 : void BytecodeGenerator::VisitNaryLogicalAndExpression(NaryOperation* expr) {
5378 : Expression* first = expr->first();
5379 : DCHECK_GT(expr->subsequent_length(), 0);
5380 :
5381 577 : NaryCodeCoverageSlots coverage_slots(this, expr);
5382 :
5383 577 : if (execution_result()->IsTest()) {
5384 : TestResultScope* test_result = execution_result()->AsTest();
5385 163 : if (first->ToBooleanIsFalse()) {
5386 0 : builder()->Jump(test_result->NewElseLabel());
5387 : } else {
5388 163 : VisitNaryLogicalTest(Token::AND, expr, &coverage_slots);
5389 : }
5390 : test_result->SetResultConsumedByTest();
5391 : } else {
5392 : BytecodeLabels end_labels(zone());
5393 414 : if (VisitLogicalAndSubExpression(first, &end_labels,
5394 : coverage_slots.GetSlotFor(0))) {
5395 : return;
5396 : }
5397 1211 : for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
5398 1626 : if (VisitLogicalAndSubExpression(expr->subsequent(i), &end_labels,
5399 : coverage_slots.GetSlotFor(i + 1))) {
5400 : return;
5401 : }
5402 : }
5403 : // We have to visit the last value even if it's false, because we need its
5404 : // actual value.
5405 398 : VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
5406 398 : end_labels.Bind(builder());
5407 : }
5408 : }
5409 :
5410 191329 : void BytecodeGenerator::BuildNewLocalActivationContext() {
5411 : ValueResultScope value_execution_result(this);
5412 : Scope* scope = closure_scope();
5413 : DCHECK_EQ(current_scope(), closure_scope());
5414 :
5415 : // Create the appropriate context.
5416 191329 : if (scope->is_script_scope()) {
5417 12128 : Register scope_reg = register_allocator()->NewRegister();
5418 : builder()
5419 12127 : ->LoadLiteral(scope)
5420 12128 : .StoreAccumulatorInRegister(scope_reg)
5421 12128 : .CallRuntime(Runtime::kNewScriptContext, scope_reg);
5422 179201 : } else if (scope->is_module_scope()) {
5423 : // We don't need to do anything for the outer script scope.
5424 : DCHECK(scope->outer_scope()->is_script_scope());
5425 :
5426 : // A JSFunction representing a module is called with the module object as
5427 : // its sole argument.
5428 1257 : RegisterList args = register_allocator()->NewRegisterList(2);
5429 : builder()
5430 1257 : ->MoveRegister(builder()->Parameter(0), args[0])
5431 1257 : .LoadLiteral(scope)
5432 1257 : .StoreAccumulatorInRegister(args[1])
5433 1257 : .CallRuntime(Runtime::kPushModuleContext, args);
5434 : } else {
5435 : DCHECK(scope->is_function_scope() || scope->is_eval_scope());
5436 177944 : int slot_count = scope->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
5437 177944 : if (slot_count <= ConstructorBuiltins::MaximumFunctionContextSlots()) {
5438 177927 : switch (scope->scope_type()) {
5439 : case EVAL_SCOPE:
5440 42301 : builder()->CreateEvalContext(scope, slot_count);
5441 42301 : break;
5442 : case FUNCTION_SCOPE:
5443 135626 : builder()->CreateFunctionContext(scope, slot_count);
5444 135626 : break;
5445 : default:
5446 0 : UNREACHABLE();
5447 : }
5448 : } else {
5449 17 : Register arg = register_allocator()->NewRegister();
5450 17 : builder()->LoadLiteral(scope).StoreAccumulatorInRegister(arg).CallRuntime(
5451 17 : Runtime::kNewFunctionContext, arg);
5452 : }
5453 : }
5454 191326 : }
5455 :
5456 191329 : void BytecodeGenerator::BuildLocalActivationContextInitialization() {
5457 : DeclarationScope* scope = closure_scope();
5458 :
5459 323432 : if (scope->has_this_declaration() && scope->receiver()->IsContextSlot()) {
5460 : Variable* variable = scope->receiver();
5461 106321 : Register receiver(builder()->Receiver());
5462 : // Context variable (at bottom of the context chain).
5463 : DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
5464 106321 : builder()->LoadAccumulatorWithRegister(receiver).StoreContextSlot(
5465 106321 : execution_context()->reg(), variable->index(), 0);
5466 : }
5467 :
5468 : // Copy parameters into context if necessary.
5469 : int num_parameters = scope->num_parameters();
5470 310375 : for (int i = 0; i < num_parameters; i++) {
5471 : Variable* variable = scope->parameter(i);
5472 86546 : if (!variable->IsContextSlot()) continue;
5473 :
5474 32500 : Register parameter(builder()->Parameter(i));
5475 : // Context variable (at bottom of the context chain).
5476 : DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
5477 32500 : builder()->LoadAccumulatorWithRegister(parameter).StoreContextSlot(
5478 32500 : execution_context()->reg(), variable->index(), 0);
5479 : }
5480 191329 : }
5481 :
5482 59829 : void BytecodeGenerator::BuildNewLocalBlockContext(Scope* scope) {
5483 : ValueResultScope value_execution_result(this);
5484 : DCHECK(scope->is_block_scope());
5485 :
5486 59829 : builder()->CreateBlockContext(scope);
5487 59830 : }
5488 :
5489 2962 : void BytecodeGenerator::BuildNewLocalWithContext(Scope* scope) {
5490 : ValueResultScope value_execution_result(this);
5491 :
5492 2962 : Register extension_object = register_allocator()->NewRegister();
5493 :
5494 2963 : builder()->ToObject(extension_object);
5495 2963 : builder()->CreateWithContext(extension_object, scope);
5496 2963 : }
5497 :
5498 74744 : void BytecodeGenerator::BuildNewLocalCatchContext(Scope* scope) {
5499 : ValueResultScope value_execution_result(this);
5500 : DCHECK(scope->catch_variable()->IsContextSlot());
5501 :
5502 74744 : Register exception = register_allocator()->NewRegister();
5503 74744 : builder()->StoreAccumulatorInRegister(exception);
5504 74746 : builder()->CreateCatchContext(exception, scope);
5505 74745 : }
5506 :
5507 10119 : void BytecodeGenerator::VisitObjectLiteralAccessor(
5508 : Register home_object, ObjectLiteralProperty* property, Register value_out) {
5509 10119 : if (property == nullptr) {
5510 4328 : builder()->LoadNull().StoreAccumulatorInRegister(value_out);
5511 : } else {
5512 : VisitForRegisterValue(property->value(), value_out);
5513 5790 : VisitSetHomeObject(value_out, home_object, property);
5514 : }
5515 10118 : }
5516 :
5517 12218 : void BytecodeGenerator::VisitSetHomeObject(Register value, Register home_object,
5518 : LiteralProperty* property) {
5519 : Expression* expr = property->value();
5520 12218 : if (FunctionLiteral::NeedsHomeObject(expr)) {
5521 : FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
5522 : builder()
5523 659 : ->LoadAccumulatorWithRegister(home_object)
5524 659 : .StoreHomeObjectProperty(value, feedback_index(slot), language_mode());
5525 : }
5526 12218 : }
5527 :
5528 2118647 : void BytecodeGenerator::VisitArgumentsObject(Variable* variable) {
5529 2118647 : if (variable == nullptr) return;
5530 :
5531 : DCHECK(variable->IsContextSlot() || variable->IsStackAllocated());
5532 :
5533 : // Allocate and initialize a new arguments object and assign to the
5534 : // {arguments} variable.
5535 101436 : builder()->CreateArguments(closure_scope()->GetArgumentsType());
5536 101436 : BuildVariableAssignment(variable, Token::ASSIGN, HoleCheckMode::kElided);
5537 : }
5538 :
5539 2118653 : void BytecodeGenerator::VisitRestArgumentsArray(Variable* rest) {
5540 2118653 : if (rest == nullptr) return;
5541 :
5542 : // Allocate and initialize a new rest parameter and assign to the {rest}
5543 : // variable.
5544 4003 : builder()->CreateArguments(CreateArgumentsType::kRestParameter);
5545 : DCHECK(rest->IsContextSlot() || rest->IsStackAllocated());
5546 4003 : BuildVariableAssignment(rest, Token::ASSIGN, HoleCheckMode::kElided);
5547 : }
5548 :
5549 4237301 : void BytecodeGenerator::VisitThisFunctionVariable(Variable* variable) {
5550 4237301 : if (variable == nullptr) return;
5551 :
5552 : // Store the closure we were called with in the given variable.
5553 60302 : builder()->LoadAccumulatorWithRegister(Register::function_closure());
5554 30151 : BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
5555 : }
5556 :
5557 2118648 : void BytecodeGenerator::VisitNewTargetVariable(Variable* variable) {
5558 2118648 : if (variable == nullptr) return;
5559 :
5560 : // The generator resume trampoline abuses the new.target register
5561 : // to pass in the generator object. In ordinary calls, new.target is always
5562 : // undefined because generator functions are non-constructible, so don't
5563 : // assign anything to the new.target variable.
5564 199666 : if (IsResumableFunction(info()->literal()->kind())) return;
5565 :
5566 99317 : if (variable->location() == VariableLocation::LOCAL) {
5567 : // The new.target register was already assigned by entry trampoline.
5568 : DCHECK_EQ(incoming_new_target_or_generator_.index(),
5569 : GetRegisterForLocalVariable(variable).index());
5570 : return;
5571 : }
5572 :
5573 : // Store the new target we were called with in the given variable.
5574 94976 : builder()->LoadAccumulatorWithRegister(incoming_new_target_or_generator_);
5575 94976 : BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
5576 : }
5577 :
5578 11091 : void BytecodeGenerator::BuildGeneratorObjectVariableInitialization() {
5579 : DCHECK(IsResumableFunction(info()->literal()->kind()));
5580 :
5581 : Variable* generator_object_var = closure_scope()->generator_object_var();
5582 : RegisterAllocationScope register_scope(this);
5583 11091 : RegisterList args = register_allocator()->NewRegisterList(2);
5584 : Runtime::FunctionId function_id =
5585 18254 : (IsAsyncFunction(info()->literal()->kind()) &&
5586 7163 : !IsAsyncGeneratorFunction(info()->literal()->kind()))
5587 : ? Runtime::kInlineAsyncFunctionEnter
5588 11091 : : Runtime::kInlineCreateJSGeneratorObject;
5589 : builder()
5590 11091 : ->MoveRegister(Register::function_closure(), args[0])
5591 22182 : .MoveRegister(builder()->Receiver(), args[1])
5592 11091 : .CallRuntime(function_id, args)
5593 11091 : .StoreAccumulatorInRegister(generator_object());
5594 :
5595 11091 : if (generator_object_var->location() == VariableLocation::LOCAL) {
5596 : // The generator object register is already set to the variable's local
5597 : // register.
5598 : DCHECK_EQ(generator_object().index(),
5599 : GetRegisterForLocalVariable(generator_object_var).index());
5600 : } else {
5601 : BuildVariableAssignment(generator_object_var, Token::INIT,
5602 0 : HoleCheckMode::kElided);
5603 : }
5604 11091 : }
5605 :
5606 931998 : void BytecodeGenerator::BuildPushUndefinedIntoRegisterList(
5607 : RegisterList* reg_list) {
5608 931998 : Register reg = register_allocator()->GrowRegisterList(reg_list);
5609 932041 : builder()->LoadUndefined().StoreAccumulatorInRegister(reg);
5610 932076 : }
5611 :
5612 10329 : void BytecodeGenerator::BuildLoadPropertyKey(LiteralProperty* property,
5613 : Register out_reg) {
5614 10329 : if (property->key()->IsStringLiteral()) {
5615 : builder()
5616 5050 : ->LoadLiteral(property->key()->AsLiteral()->AsRawString())
5617 2525 : .StoreAccumulatorInRegister(out_reg);
5618 : } else {
5619 7805 : VisitForAccumulatorValue(property->key());
5620 7805 : builder()->ToName(out_reg);
5621 : }
5622 10330 : }
5623 :
5624 0 : int BytecodeGenerator::AllocateBlockCoverageSlotIfEnabled(
5625 : AstNode* node, SourceRangeKind kind) {
5626 2276065 : return (block_coverage_builder_ == nullptr)
5627 : ? BlockCoverageBuilder::kNoCoverageArraySlot
5628 2276065 : : block_coverage_builder_->AllocateBlockCoverageSlot(node, kind);
5629 : }
5630 :
5631 0 : int BytecodeGenerator::AllocateNaryBlockCoverageSlotIfEnabled(
5632 : NaryOperation* node, size_t index) {
5633 244 : return (block_coverage_builder_ == nullptr)
5634 : ? BlockCoverageBuilder::kNoCoverageArraySlot
5635 : : block_coverage_builder_->AllocateNaryBlockCoverageSlot(node,
5636 244 : index);
5637 : }
5638 :
5639 0 : void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
5640 : AstNode* node, SourceRangeKind kind) {
5641 23242 : if (block_coverage_builder_ == nullptr) return;
5642 152 : block_coverage_builder_->IncrementBlockCounter(node, kind);
5643 : }
5644 :
5645 0 : void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
5646 : int coverage_array_slot) {
5647 200553 : if (block_coverage_builder_ != nullptr) {
5648 : block_coverage_builder_->IncrementBlockCounter(coverage_array_slot);
5649 : }
5650 0 : }
5651 :
5652 : // Visits the expression |expr| and places the result in the accumulator.
5653 17676469 : BytecodeGenerator::TypeHint BytecodeGenerator::VisitForAccumulatorValue(
5654 : Expression* expr) {
5655 : ValueResultScope accumulator_scope(this);
5656 17676469 : Visit(expr);
5657 17676412 : return accumulator_scope.type_hint();
5658 : }
5659 :
5660 0 : void BytecodeGenerator::VisitForAccumulatorValueOrTheHole(Expression* expr) {
5661 43103 : if (expr == nullptr) {
5662 36475 : builder()->LoadTheHole();
5663 : } else {
5664 6628 : VisitForAccumulatorValue(expr);
5665 : }
5666 0 : }
5667 :
5668 : // Visits the expression |expr| and discards the result.
5669 12864984 : void BytecodeGenerator::VisitForEffect(Expression* expr) {
5670 : EffectResultScope effect_scope(this);
5671 12864984 : Visit(expr);
5672 12864845 : }
5673 :
5674 : // Visits the expression |expr| and returns the register containing
5675 : // the expression result.
5676 5133139 : Register BytecodeGenerator::VisitForRegisterValue(Expression* expr) {
5677 5133139 : VisitForAccumulatorValue(expr);
5678 5133164 : Register result = register_allocator()->NewRegister();
5679 5133158 : builder()->StoreAccumulatorInRegister(result);
5680 5133183 : return result;
5681 : }
5682 :
5683 : // Visits the expression |expr| and stores the expression result in
5684 : // |destination|.
5685 : void BytecodeGenerator::VisitForRegisterValue(Expression* expr,
5686 : Register destination) {
5687 : ValueResultScope register_scope(this);
5688 878506 : Visit(expr);
5689 878506 : builder()->StoreAccumulatorInRegister(destination);
5690 : }
5691 :
5692 : // Visits the expression |expr| and pushes the result into a new register
5693 : // added to the end of |reg_list|.
5694 6970853 : void BytecodeGenerator::VisitAndPushIntoRegisterList(Expression* expr,
5695 : RegisterList* reg_list) {
5696 : {
5697 : ValueResultScope register_scope(this);
5698 6970853 : Visit(expr);
5699 : }
5700 : // Grow the register list after visiting the expression to avoid reserving
5701 : // the register across the expression evaluation, which could cause memory
5702 : // leaks for deep expressions due to dead objects being kept alive by pointers
5703 : // in registers.
5704 6970817 : Register destination = register_allocator()->GrowRegisterList(reg_list);
5705 6970823 : builder()->StoreAccumulatorInRegister(destination);
5706 6970884 : }
5707 :
5708 893400 : void BytecodeGenerator::BuildTest(ToBooleanMode mode,
5709 : BytecodeLabels* then_labels,
5710 : BytecodeLabels* else_labels,
5711 : TestFallthrough fallthrough) {
5712 893400 : switch (fallthrough) {
5713 : case TestFallthrough::kThen:
5714 1217397 : builder()->JumpIfFalse(mode, else_labels->New());
5715 608697 : break;
5716 : case TestFallthrough::kElse:
5717 569396 : builder()->JumpIfTrue(mode, then_labels->New());
5718 284698 : break;
5719 : case TestFallthrough::kNone:
5720 0 : builder()->JumpIfTrue(mode, then_labels->New());
5721 0 : builder()->Jump(else_labels->New());
5722 0 : break;
5723 : }
5724 893396 : }
5725 :
5726 : // Visits the expression |expr| for testing its boolean value and jumping to the
5727 : // |then| or |other| label depending on value and short-circuit semantics
5728 1041370 : void BytecodeGenerator::VisitForTest(Expression* expr,
5729 : BytecodeLabels* then_labels,
5730 : BytecodeLabels* else_labels,
5731 : TestFallthrough fallthrough) {
5732 : bool result_consumed;
5733 : TypeHint type_hint;
5734 : {
5735 : // To make sure that all temporary registers are returned before generating
5736 : // jumps below, we ensure that the result scope is deleted before doing so.
5737 : // Dead registers might be materialized otherwise.
5738 : TestResultScope test_result(this, then_labels, else_labels, fallthrough);
5739 1041370 : Visit(expr);
5740 : result_consumed = test_result.result_consumed_by_test();
5741 : type_hint = test_result.type_hint();
5742 : // Labels and fallthrough might have been mutated, so update based on
5743 : // TestResultScope.
5744 : then_labels = test_result.then_labels();
5745 : else_labels = test_result.else_labels();
5746 : fallthrough = test_result.fallthrough();
5747 : }
5748 1041385 : if (!result_consumed) {
5749 : BuildTest(ToBooleanModeFromTypeHint(type_hint), then_labels, else_labels,
5750 687084 : fallthrough);
5751 : }
5752 1041379 : }
5753 :
5754 246712 : void BytecodeGenerator::VisitInSameTestExecutionScope(Expression* expr) {
5755 : DCHECK(execution_result()->IsTest());
5756 : {
5757 : RegisterAllocationScope reg_scope(this);
5758 246712 : Visit(expr);
5759 : }
5760 246716 : if (!execution_result()->AsTest()->result_consumed_by_test()) {
5761 : TestResultScope* result_scope = execution_result()->AsTest();
5762 : BuildTest(ToBooleanModeFromTypeHint(result_scope->type_hint()),
5763 : result_scope->then_labels(), result_scope->else_labels(),
5764 206318 : result_scope->fallthrough());
5765 : result_scope->SetResultConsumedByTest();
5766 : }
5767 246715 : }
5768 :
5769 77708 : void BytecodeGenerator::VisitInScope(Statement* stmt, Scope* scope) {
5770 : DCHECK(scope->declarations()->is_empty());
5771 : CurrentScope current_scope(this, scope);
5772 155417 : ContextScope context_scope(this, scope);
5773 77710 : Visit(stmt);
5774 77710 : }
5775 :
5776 0 : Register BytecodeGenerator::GetRegisterForLocalVariable(Variable* variable) {
5777 : DCHECK_EQ(VariableLocation::LOCAL, variable->location());
5778 15432 : return builder()->Local(variable->index());
5779 : }
5780 :
5781 : FunctionKind BytecodeGenerator::function_kind() const {
5782 2146002 : return info()->literal()->kind();
5783 : }
5784 :
5785 : LanguageMode BytecodeGenerator::language_mode() const {
5786 : return current_scope()->language_mode();
5787 : }
5788 :
5789 : Register BytecodeGenerator::generator_object() const {
5790 : DCHECK(IsResumableFunction(info()->literal()->kind()));
5791 : return incoming_new_target_or_generator_;
5792 : }
5793 :
5794 : FeedbackVectorSpec* BytecodeGenerator::feedback_spec() {
5795 : return info()->feedback_vector_spec();
5796 : }
5797 :
5798 : int BytecodeGenerator::feedback_index(FeedbackSlot slot) const {
5799 : DCHECK(!slot.IsInvalid());
5800 : return FeedbackVector::GetIndex(slot);
5801 : }
5802 :
5803 7709063 : FeedbackSlot BytecodeGenerator::GetCachedLoadGlobalICSlot(
5804 : TypeofMode typeof_mode, Variable* variable) {
5805 : FeedbackSlotCache::SlotKind slot_kind =
5806 : typeof_mode == INSIDE_TYPEOF
5807 : ? FeedbackSlotCache::SlotKind::kLoadGlobalInsideTypeof
5808 7709063 : : FeedbackSlotCache::SlotKind::kLoadGlobalNotInsideTypeof;
5809 : FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, variable));
5810 7709379 : if (!slot.IsInvalid()) {
5811 4628398 : return slot;
5812 : }
5813 : slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
5814 : feedback_slot_cache()->Put(slot_kind, variable, feedback_index(slot));
5815 3080828 : return slot;
5816 : }
5817 :
5818 1465717 : FeedbackSlot BytecodeGenerator::GetCachedStoreGlobalICSlot(
5819 : LanguageMode language_mode, Variable* variable) {
5820 : FeedbackSlotCache::SlotKind slot_kind =
5821 : is_strict(language_mode)
5822 : ? FeedbackSlotCache::SlotKind::kStoreGlobalStrict
5823 1465717 : : FeedbackSlotCache::SlotKind::kStoreGlobalSloppy;
5824 : FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, variable));
5825 1465744 : if (!slot.IsInvalid()) {
5826 264240 : return slot;
5827 : }
5828 : slot = feedback_spec()->AddStoreGlobalICSlot(language_mode);
5829 : feedback_slot_cache()->Put(slot_kind, variable, feedback_index(slot));
5830 1201523 : return slot;
5831 : }
5832 :
5833 1174876 : FeedbackSlot BytecodeGenerator::GetCachedLoadICSlot(const Expression* expr,
5834 : const AstRawString* name) {
5835 1174876 : if (!FLAG_ignition_share_named_property_feedback) {
5836 : return feedback_spec()->AddLoadICSlot();
5837 : }
5838 : FeedbackSlotCache::SlotKind slot_kind =
5839 : FeedbackSlotCache::SlotKind::kLoadProperty;
5840 1174876 : if (!expr->IsVariableProxy()) {
5841 : return feedback_spec()->AddLoadICSlot();
5842 : }
5843 793110 : const VariableProxy* proxy = expr->AsVariableProxy();
5844 : FeedbackSlot slot(
5845 : feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name));
5846 793129 : if (!slot.IsInvalid()) {
5847 270862 : return slot;
5848 : }
5849 : slot = feedback_spec()->AddLoadICSlot();
5850 : feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name,
5851 : feedback_index(slot));
5852 522255 : return slot;
5853 : }
5854 :
5855 2293829 : FeedbackSlot BytecodeGenerator::GetCachedStoreICSlot(const Expression* expr,
5856 : const AstRawString* name) {
5857 2293829 : if (!FLAG_ignition_share_named_property_feedback) {
5858 : return feedback_spec()->AddStoreICSlot(language_mode());
5859 : }
5860 : FeedbackSlotCache::SlotKind slot_kind =
5861 : is_strict(language_mode())
5862 : ? FeedbackSlotCache::SlotKind::kStoreNamedStrict
5863 2293829 : : FeedbackSlotCache::SlotKind::kStoreNamedSloppy;
5864 2293829 : if (!expr->IsVariableProxy()) {
5865 : return feedback_spec()->AddStoreICSlot(language_mode());
5866 : }
5867 23048 : const VariableProxy* proxy = expr->AsVariableProxy();
5868 : FeedbackSlot slot(
5869 : feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name));
5870 23048 : if (!slot.IsInvalid()) {
5871 2519 : return slot;
5872 : }
5873 : slot = feedback_spec()->AddStoreICSlot(language_mode());
5874 : feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name,
5875 : feedback_index(slot));
5876 20529 : return slot;
5877 : }
5878 :
5879 2955080 : int BytecodeGenerator::GetCachedCreateClosureSlot(FunctionLiteral* literal) {
5880 : FeedbackSlotCache::SlotKind slot_kind =
5881 : FeedbackSlotCache::SlotKind::kClosureFeedbackCell;
5882 : int index = feedback_slot_cache()->Get(slot_kind, literal);
5883 2955201 : if (index != -1) {
5884 : return index;
5885 : }
5886 : index = feedback_spec()->AddFeedbackCellForCreateClosure();
5887 : feedback_slot_cache()->Put(slot_kind, literal, index);
5888 2955155 : return index;
5889 : }
5890 :
5891 0 : FeedbackSlot BytecodeGenerator::GetDummyCompareICSlot() {
5892 1710 : return dummy_feedback_slot_.Get();
5893 : }
5894 :
5895 : } // namespace interpreter
5896 : } // namespace internal
5897 122036 : } // namespace v8
|