Line data Source code
1 : // Copyright 2015 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/interpreter/bytecode-generator.h"
6 :
7 : #include "src/api-inl.h"
8 : #include "src/ast/ast-source-ranges.h"
9 : #include "src/ast/scopes.h"
10 : #include "src/builtins/builtins-constructor.h"
11 : #include "src/compiler.h"
12 : #include "src/interpreter/bytecode-flags.h"
13 : #include "src/interpreter/bytecode-jump-table.h"
14 : #include "src/interpreter/bytecode-label.h"
15 : #include "src/interpreter/bytecode-register-allocator.h"
16 : #include "src/interpreter/control-flow-builders.h"
17 : #include "src/objects-inl.h"
18 : #include "src/objects/debug-objects.h"
19 : #include "src/objects/literal-objects-inl.h"
20 : #include "src/objects/smi.h"
21 : #include "src/objects/template-objects-inl.h"
22 : #include "src/parsing/parse-info.h"
23 : #include "src/parsing/token.h"
24 : #include "src/unoptimized-compilation-info.h"
25 :
26 : namespace v8 {
27 : namespace internal {
28 : namespace interpreter {
29 :
30 : // Scoped class tracking context objects created by the visitor. Represents
31 : // mutations of the context chain within the function body, allowing pushing and
32 : // popping of the current {context_register} during visitation.
33 : class BytecodeGenerator::ContextScope {
34 : public:
35 2419083 : ContextScope(BytecodeGenerator* generator, Scope* scope)
36 : : generator_(generator),
37 : scope_(scope),
38 : outer_(generator_->execution_context()),
39 : register_(Register::current_context()),
40 2419083 : depth_(0) {
41 : DCHECK(scope->NeedsContext() || outer_ == nullptr);
42 2419085 : if (outer_) {
43 322408 : depth_ = outer_->depth_ + 1;
44 :
45 : // Push the outer context into a new context register.
46 : Register outer_context_reg =
47 644816 : generator_->register_allocator()->NewRegister();
48 322411 : outer_->set_register(outer_context_reg);
49 644822 : generator_->builder()->PushContext(outer_context_reg);
50 : }
51 2419084 : generator_->set_execution_context(this);
52 2419084 : }
53 :
54 4838096 : ~ContextScope() {
55 2419047 : if (outer_) {
56 : DCHECK_EQ(register_.index(), Register::current_context().index());
57 644818 : generator_->builder()->PopContext(outer_->reg());
58 322411 : outer_->set_register(register_);
59 : }
60 2419049 : generator_->set_execution_context(outer_);
61 2419049 : }
62 :
63 : // Returns the depth of the given |scope| for the current execution context.
64 : int ContextChainDepth(Scope* scope) {
65 2800051 : return scope_->ContextChainLength(scope);
66 : }
67 :
68 : // Returns the execution context at |depth| in the current context chain if it
69 : // is a function local execution context, otherwise returns nullptr.
70 : ContextScope* Previous(int depth) {
71 2760460 : if (depth > depth_) {
72 : return nullptr;
73 : }
74 :
75 : ContextScope* previous = this;
76 2715475 : for (int i = depth; i > 0; --i) {
77 22728 : previous = previous->outer_;
78 : }
79 : return previous;
80 : }
81 :
82 : Register reg() const { return register_; }
83 :
84 : private:
85 : const BytecodeArrayBuilder* builder() const { return generator_->builder(); }
86 :
87 644822 : void set_register(Register reg) { register_ = reg; }
88 :
89 : BytecodeGenerator* generator_;
90 : Scope* scope_;
91 : ContextScope* outer_;
92 : Register register_;
93 : int depth_;
94 : };
95 :
96 : // Scoped class for tracking control statements entered by the
97 : // visitor. The pattern derives AstGraphBuilder::ControlScope.
98 : class BytecodeGenerator::ControlScope {
99 : public:
100 : explicit ControlScope(BytecodeGenerator* generator)
101 : : generator_(generator),
102 : outer_(generator->execution_control()),
103 7706121 : context_(generator->execution_context()) {
104 : generator_->set_execution_control(this);
105 : }
106 15412228 : virtual ~ControlScope() { generator_->set_execution_control(outer()); }
107 :
108 : void Break(Statement* stmt) {
109 46052 : PerformCommand(CMD_BREAK, stmt, kNoSourcePosition);
110 : }
111 : void Continue(Statement* stmt) {
112 3612 : PerformCommand(CMD_CONTINUE, stmt, kNoSourcePosition);
113 : }
114 : void ReturnAccumulator(int source_position = kNoSourcePosition) {
115 2060109 : PerformCommand(CMD_RETURN, nullptr, source_position);
116 : }
117 : void AsyncReturnAccumulator(int source_position = kNoSourcePosition) {
118 8424 : PerformCommand(CMD_ASYNC_RETURN, nullptr, source_position);
119 : }
120 :
121 : class DeferredCommands;
122 :
123 : protected:
124 : enum Command {
125 : CMD_BREAK,
126 : CMD_CONTINUE,
127 : CMD_RETURN,
128 : CMD_ASYNC_RETURN,
129 : CMD_RETHROW
130 : };
131 : static constexpr bool CommandUsesAccumulator(Command command) {
132 : return command != CMD_BREAK && command != CMD_CONTINUE;
133 : }
134 :
135 : void PerformCommand(Command command, Statement* statement,
136 : int source_position);
137 : virtual bool Execute(Command command, Statement* statement,
138 : int source_position) = 0;
139 :
140 : // Helper to pop the context chain to a depth expected by this control scope.
141 : // Note that it is the responsibility of each individual {Execute} method to
142 : // trigger this when commands are handled and control-flow continues locally.
143 : void PopContextToExpectedDepth();
144 :
145 : BytecodeGenerator* generator() const { return generator_; }
146 : ControlScope* outer() const { return outer_; }
147 : ContextScope* context() const { return context_; }
148 :
149 : private:
150 : BytecodeGenerator* generator_;
151 : ControlScope* outer_;
152 : ContextScope* context_;
153 :
154 : DISALLOW_COPY_AND_ASSIGN(ControlScope);
155 : };
156 :
157 : // Helper class for a try-finally control scope. It can record intercepted
158 : // control-flow commands that cause entry into a finally-block, and re-apply
159 : // them after again leaving that block. Special tokens are used to identify
160 : // paths going through the finally-block to dispatch after leaving the block.
161 41424 : class BytecodeGenerator::ControlScope::DeferredCommands final {
162 : public:
163 : // Fixed value tokens for paths we know we need.
164 : // Fallthrough is set to -1 to make it the fallthrough case of the jump table,
165 : // where the remaining cases start at 0.
166 : static const int kFallthroughToken = -1;
167 : // TODO(leszeks): Rethrow being 0 makes it use up a valuable LdaZero, which
168 : // means that other commands (such as break or return) have to use LdaSmi.
169 : // This can very slightly bloat bytecode, so perhaps token values should all
170 : // be shifted down by 1.
171 : static const int kRethrowToken = 0;
172 :
173 : DeferredCommands(BytecodeGenerator* generator, Register token_register,
174 : Register result_register)
175 : : generator_(generator),
176 : deferred_(generator->zone()),
177 : token_register_(token_register),
178 : result_register_(result_register),
179 : return_token_(-1),
180 82850 : async_return_token_(-1) {
181 : // There's always a rethrow path.
182 : // TODO(leszeks): We could decouple deferred_ index and token to allow us
183 : // to still push this lazily.
184 : STATIC_ASSERT(kRethrowToken == 0);
185 82850 : deferred_.push_back({CMD_RETHROW, nullptr, kRethrowToken});
186 : }
187 :
188 : // One recorded control-flow command.
189 : struct Entry {
190 : Command command; // The command type being applied on this path.
191 : Statement* statement; // The target statement for the command or {nullptr}.
192 : int token; // A token identifying this particular path.
193 : };
194 :
195 : // Records a control-flow command while entering the finally-block. This also
196 : // generates a new dispatch token that identifies one particular path. This
197 : // expects the result to be in the accumulator.
198 52560 : void RecordCommand(Command command, Statement* statement) {
199 52560 : int token = GetTokenForCommand(command, statement);
200 :
201 : DCHECK_LT(token, deferred_.size());
202 : DCHECK_EQ(deferred_[token].command, command);
203 : DCHECK_EQ(deferred_[token].statement, statement);
204 : DCHECK_EQ(deferred_[token].token, token);
205 :
206 52560 : if (CommandUsesAccumulator(command)) {
207 52308 : builder()->StoreAccumulatorInRegister(result_register_);
208 : }
209 52560 : builder()->LoadLiteral(Smi::FromInt(token));
210 52561 : builder()->StoreAccumulatorInRegister(token_register_);
211 52561 : if (!CommandUsesAccumulator(command)) {
212 : // If we're not saving the accumulator in the result register, shove a
213 : // harmless value there instead so that it is still considered "killed" in
214 : // the liveness analysis. Normally we would LdaUndefined first, but the
215 : // Smi token value is just as good, and by reusing it we save a bytecode.
216 253 : builder()->StoreAccumulatorInRegister(result_register_);
217 : }
218 52561 : }
219 :
220 : // Records the dispatch token to be used to identify the re-throw path when
221 : // the finally-block has been entered through the exception handler. This
222 : // expects the exception to be in the accumulator.
223 : void RecordHandlerReThrowPath() {
224 : // The accumulator contains the exception object.
225 41425 : RecordCommand(CMD_RETHROW, nullptr);
226 : }
227 :
228 : // Records the dispatch token to be used to identify the implicit fall-through
229 : // path at the end of a try-block into the corresponding finally-block.
230 41425 : void RecordFallThroughPath() {
231 41425 : builder()->LoadLiteral(Smi::FromInt(kFallthroughToken));
232 41425 : builder()->StoreAccumulatorInRegister(token_register_);
233 : // Since we're not saving the accumulator in the result register, shove a
234 : // harmless value there instead so that it is still considered "killed" in
235 : // the liveness analysis. Normally we would LdaUndefined first, but the Smi
236 : // token value is just as good, and by reusing it we save a bytecode.
237 41425 : builder()->StoreAccumulatorInRegister(result_register_);
238 41425 : }
239 :
240 : // Applies all recorded control-flow commands after the finally-block again.
241 : // This generates a dynamic dispatch on the token from the entry point.
242 41424 : void ApplyDeferredCommands() {
243 41424 : if (deferred_.size() == 0) return;
244 :
245 : BytecodeLabel fall_through;
246 :
247 41424 : if (deferred_.size() == 1) {
248 : // For a single entry, just jump to the fallthrough if we don't match the
249 : // entry token.
250 : const Entry& entry = deferred_[0];
251 :
252 : builder()
253 78104 : ->LoadLiteral(Smi::FromInt(entry.token))
254 39053 : .CompareReference(token_register_)
255 39053 : .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &fall_through);
256 :
257 39053 : if (CommandUsesAccumulator(entry.command)) {
258 39053 : builder()->LoadAccumulatorWithRegister(result_register_);
259 : }
260 39053 : execution_control()->PerformCommand(entry.command, entry.statement,
261 39053 : kNoSourcePosition);
262 : } else {
263 : // For multiple entries, build a jump table and switch on the token,
264 : // jumping to the fallthrough if none of them match.
265 :
266 : BytecodeJumpTable* jump_table =
267 4744 : builder()->AllocateJumpTable(static_cast<int>(deferred_.size()), 0);
268 : builder()
269 2372 : ->LoadAccumulatorWithRegister(token_register_)
270 2372 : .SwitchOnSmiNoFeedback(jump_table)
271 2372 : .Jump(&fall_through);
272 8192 : for (const Entry& entry : deferred_) {
273 11640 : builder()->Bind(jump_table, entry.token);
274 :
275 5820 : if (CommandUsesAccumulator(entry.command)) {
276 5567 : builder()->LoadAccumulatorWithRegister(result_register_);
277 : }
278 5820 : execution_control()->PerformCommand(entry.command, entry.statement,
279 5820 : kNoSourcePosition);
280 : }
281 : }
282 :
283 41425 : builder()->Bind(&fall_through);
284 : }
285 :
286 : BytecodeArrayBuilder* builder() { return generator_->builder(); }
287 : ControlScope* execution_control() { return generator_->execution_control(); }
288 :
289 : private:
290 52560 : int GetTokenForCommand(Command command, Statement* statement) {
291 52560 : switch (command) {
292 : case CMD_RETURN:
293 2045 : return GetReturnToken();
294 : case CMD_ASYNC_RETURN:
295 2516 : return GetAsyncReturnToken();
296 : case CMD_RETHROW:
297 : return kRethrowToken;
298 : default:
299 : // TODO(leszeks): We could also search for entries with the same
300 : // command and statement.
301 253 : return GetNewTokenForCommand(command, statement);
302 : }
303 : }
304 :
305 2045 : int GetReturnToken() {
306 2045 : if (return_token_ == -1) {
307 1896 : return_token_ = GetNewTokenForCommand(CMD_RETURN, nullptr);
308 : }
309 2045 : return return_token_;
310 : }
311 :
312 2516 : int GetAsyncReturnToken() {
313 2516 : if (async_return_token_ == -1) {
314 1299 : async_return_token_ = GetNewTokenForCommand(CMD_ASYNC_RETURN, nullptr);
315 : }
316 2516 : return async_return_token_;
317 : }
318 :
319 : int GetNewTokenForCommand(Command command, Statement* statement) {
320 3448 : int token = static_cast<int>(deferred_.size());
321 6896 : deferred_.push_back({command, statement, token});
322 : return token;
323 : }
324 :
325 : BytecodeGenerator* generator_;
326 : ZoneVector<Entry> deferred_;
327 : Register token_register_;
328 : Register result_register_;
329 :
330 : // Tokens for commands that don't need a statement.
331 : int return_token_;
332 : int async_return_token_;
333 : };
334 :
335 : // Scoped class for dealing with control flow reaching the function level.
336 4193320 : class BytecodeGenerator::ControlScopeForTopLevel final
337 : : public BytecodeGenerator::ControlScope {
338 : public:
339 : explicit ControlScopeForTopLevel(BytecodeGenerator* generator)
340 2096682 : : ControlScope(generator) {}
341 :
342 : protected:
343 2100589 : bool Execute(Command command, Statement* statement,
344 : int source_position) override {
345 2100589 : switch (command) {
346 : case CMD_BREAK: // We should never see break/continue in top-level.
347 : case CMD_CONTINUE:
348 0 : UNREACHABLE();
349 : case CMD_RETURN:
350 : // No need to pop contexts, execution leaves the method body.
351 2059930 : generator()->BuildReturn(source_position);
352 2059969 : return true;
353 : case CMD_ASYNC_RETURN:
354 : // No need to pop contexts, execution leaves the method body.
355 7207 : generator()->BuildAsyncReturn(source_position);
356 7207 : return true;
357 : case CMD_RETHROW:
358 : // No need to pop contexts, execution leaves the method body.
359 : generator()->BuildReThrow();
360 33489 : return true;
361 : }
362 : return false;
363 : }
364 : };
365 :
366 : // Scoped class for enabling break inside blocks and switch blocks.
367 10400164 : class BytecodeGenerator::ControlScopeForBreakable final
368 : : public BytecodeGenerator::ControlScope {
369 : public:
370 : ControlScopeForBreakable(BytecodeGenerator* generator,
371 : BreakableStatement* statement,
372 : BreakableControlFlowBuilder* control_builder)
373 : : ControlScope(generator),
374 : statement_(statement),
375 5200072 : control_builder_(control_builder) {}
376 :
377 : protected:
378 413397 : bool Execute(Command command, Statement* statement,
379 : int source_position) override {
380 413397 : if (statement != statement_) return false;
381 26858 : switch (command) {
382 : case CMD_BREAK:
383 : PopContextToExpectedDepth();
384 26858 : control_builder_->Break();
385 26858 : return true;
386 : case CMD_CONTINUE:
387 : case CMD_RETURN:
388 : case CMD_ASYNC_RETURN:
389 : case CMD_RETHROW:
390 : break;
391 : }
392 : return false;
393 : }
394 :
395 : private:
396 : Statement* statement_;
397 : BreakableControlFlowBuilder* control_builder_;
398 : };
399 :
400 : // Scoped class for enabling 'break' and 'continue' in iteration
401 : // constructs, e.g. do...while, while..., for...
402 : class BytecodeGenerator::ControlScopeForIteration final
403 : : public BytecodeGenerator::ControlScope {
404 : public:
405 : ControlScopeForIteration(BytecodeGenerator* generator,
406 : IterationStatement* statement,
407 : LoopBuilder* loop_builder)
408 : : ControlScope(generator),
409 : statement_(statement),
410 260130 : loop_builder_(loop_builder) {
411 260130 : generator->loop_depth_++;
412 : }
413 520270 : ~ControlScopeForIteration() override { generator()->loop_depth_--; }
414 :
415 : protected:
416 68263 : bool Execute(Command command, Statement* statement,
417 : int source_position) override {
418 68263 : if (statement != statement_) return false;
419 22806 : switch (command) {
420 : case CMD_BREAK:
421 : PopContextToExpectedDepth();
422 19194 : loop_builder_->Break();
423 19194 : return true;
424 : case CMD_CONTINUE:
425 : PopContextToExpectedDepth();
426 3612 : loop_builder_->Continue();
427 3612 : return true;
428 : case CMD_RETURN:
429 : case CMD_ASYNC_RETURN:
430 : case CMD_RETHROW:
431 : break;
432 : }
433 : return false;
434 : }
435 :
436 : private:
437 : Statement* statement_;
438 : LoopBuilder* loop_builder_;
439 : };
440 :
441 : // Scoped class for enabling 'throw' in try-catch constructs.
442 215624 : class BytecodeGenerator::ControlScopeForTryCatch final
443 : : public BytecodeGenerator::ControlScope {
444 : public:
445 : ControlScopeForTryCatch(BytecodeGenerator* generator,
446 : TryCatchBuilder* try_catch_builder)
447 107812 : : ControlScope(generator) {}
448 :
449 : protected:
450 14259 : bool Execute(Command command, Statement* statement,
451 : int source_position) override {
452 14259 : switch (command) {
453 : case CMD_BREAK:
454 : case CMD_CONTINUE:
455 : case CMD_RETURN:
456 : case CMD_ASYNC_RETURN:
457 : break;
458 : case CMD_RETHROW:
459 : // No need to pop contexts, execution re-enters the method body via the
460 : // stack unwinding mechanism which itself restores contexts correctly.
461 : generator()->BuildReThrow();
462 1613 : return true;
463 : }
464 : return false;
465 : }
466 : };
467 :
468 : // Scoped class for enabling control flow through try-finally constructs.
469 82850 : class BytecodeGenerator::ControlScopeForTryFinally final
470 : : public BytecodeGenerator::ControlScope {
471 : public:
472 : ControlScopeForTryFinally(BytecodeGenerator* generator,
473 : TryFinallyBuilder* try_finally_builder,
474 : DeferredCommands* commands)
475 : : ControlScope(generator),
476 : try_finally_builder_(try_finally_builder),
477 41425 : commands_(commands) {}
478 :
479 : protected:
480 11136 : bool Execute(Command command, Statement* statement,
481 : int source_position) override {
482 11136 : switch (command) {
483 : case CMD_BREAK:
484 : case CMD_CONTINUE:
485 : case CMD_RETURN:
486 : case CMD_ASYNC_RETURN:
487 : case CMD_RETHROW:
488 : PopContextToExpectedDepth();
489 : // We don't record source_position here since we don't generate return
490 : // bytecode right here and will generate it later as part of finally
491 : // block. Each return bytecode generated in finally block will get own
492 : // return source position from corresponded return statement or we'll
493 : // use end of function if no return statement is presented.
494 11136 : commands_->RecordCommand(command, statement);
495 11136 : try_finally_builder_->LeaveTry();
496 11136 : return true;
497 : }
498 : return false;
499 : }
500 :
501 : private:
502 : TryFinallyBuilder* try_finally_builder_;
503 : DeferredCommands* commands_;
504 : };
505 :
506 : // Allocate and fetch the coverage indices tracking NaryLogical Expressions.
507 21098 : class BytecodeGenerator::NaryCodeCoverageSlots {
508 : public:
509 21098 : NaryCodeCoverageSlots(BytecodeGenerator* generator, NaryOperation* expr)
510 21098 : : generator_(generator) {
511 21098 : if (generator_->block_coverage_builder_ == nullptr) return;
512 592 : for (size_t i = 0; i < expr->subsequent_length(); i++) {
513 488 : coverage_slots_.push_back(
514 244 : generator_->AllocateNaryBlockCoverageSlotIfEnabled(expr, i));
515 : }
516 : }
517 :
518 : int GetSlotFor(size_t subsequent_expr_index) const {
519 80015 : if (generator_->block_coverage_builder_ == nullptr) {
520 : return BlockCoverageBuilder::kNoCoverageArraySlot;
521 : }
522 : DCHECK(coverage_slots_.size() > subsequent_expr_index);
523 220 : return coverage_slots_[subsequent_expr_index];
524 : }
525 :
526 : private:
527 : BytecodeGenerator* generator_;
528 : std::vector<int> coverage_slots_;
529 : };
530 :
531 2163022 : void BytecodeGenerator::ControlScope::PerformCommand(Command command,
532 : Statement* statement,
533 : int source_position) {
534 : ControlScope* current = this;
535 : do {
536 2607663 : if (current->Execute(command, statement, source_position)) {
537 2163078 : return;
538 : }
539 : current = current->outer();
540 444641 : } while (current != nullptr);
541 0 : UNREACHABLE();
542 : }
543 :
544 0 : void BytecodeGenerator::ControlScope::PopContextToExpectedDepth() {
545 : // Pop context to the expected depth. Note that this can in fact pop multiple
546 : // contexts at once because the {PopContext} bytecode takes a saved register.
547 60800 : if (generator()->execution_context() != context()) {
548 9458 : generator()->builder()->PopContext(context()->reg());
549 : }
550 0 : }
551 :
552 : class BytecodeGenerator::RegisterAllocationScope final {
553 : public:
554 : explicit RegisterAllocationScope(BytecodeGenerator* generator)
555 : : generator_(generator),
556 : outer_next_register_index_(
557 44621439 : generator->register_allocator()->next_register_index()) {}
558 :
559 44618545 : ~RegisterAllocationScope() {
560 44618775 : generator_->register_allocator()->ReleaseRegisters(
561 : outer_next_register_index_);
562 : }
563 :
564 : BytecodeGenerator* generator() const { return generator_; }
565 :
566 : private:
567 : BytecodeGenerator* generator_;
568 : int outer_next_register_index_;
569 :
570 : DISALLOW_COPY_AND_ASSIGN(RegisterAllocationScope);
571 : };
572 :
573 : class BytecodeGenerator::AccumulatorPreservingScope final {
574 : public:
575 2457047 : explicit AccumulatorPreservingScope(BytecodeGenerator* generator,
576 : AccumulatorPreservingMode mode)
577 2457047 : : generator_(generator) {
578 2457047 : if (mode == AccumulatorPreservingMode::kPreserve) {
579 : saved_accumulator_register_ =
580 144 : generator_->register_allocator()->NewRegister();
581 144 : generator_->builder()->StoreAccumulatorInRegister(
582 144 : saved_accumulator_register_);
583 : }
584 2457047 : }
585 :
586 2457038 : ~AccumulatorPreservingScope() {
587 2457034 : if (saved_accumulator_register_.is_valid()) {
588 144 : generator_->builder()->LoadAccumulatorWithRegister(
589 144 : saved_accumulator_register_);
590 : }
591 : }
592 :
593 : private:
594 : BytecodeGenerator* generator_;
595 : Register saved_accumulator_register_;
596 :
597 : DISALLOW_COPY_AND_ASSIGN(AccumulatorPreservingScope);
598 : };
599 :
600 : // Scoped base class for determining how the result of an expression will be
601 : // used.
602 : class BytecodeGenerator::ExpressionResultScope {
603 : public:
604 : ExpressionResultScope(BytecodeGenerator* generator, Expression::Context kind)
605 : : outer_(generator->execution_result()),
606 : allocator_(generator),
607 : kind_(kind),
608 89242878 : type_hint_(TypeHint::kAny) {
609 : generator->set_execution_result(this);
610 : }
611 :
612 89237320 : ~ExpressionResultScope() {
613 44618775 : allocator_.generator()->set_execution_result(outer_);
614 44618545 : }
615 :
616 : bool IsEffect() const { return kind_ == Expression::kEffect; }
617 : bool IsValue() const { return kind_ == Expression::kValue; }
618 : bool IsTest() const { return kind_ == Expression::kTest; }
619 :
620 : TestResultScope* AsTest() {
621 : DCHECK(IsTest());
622 : return reinterpret_cast<TestResultScope*>(this);
623 : }
624 :
625 : // Specify expression always returns a Boolean result value.
626 : void SetResultIsBoolean() {
627 : DCHECK_EQ(type_hint_, TypeHint::kAny);
628 1243065 : type_hint_ = TypeHint::kBoolean;
629 : }
630 :
631 : void SetResultIsString() {
632 : DCHECK_EQ(type_hint_, TypeHint::kAny);
633 1759656 : type_hint_ = TypeHint::kString;
634 : }
635 :
636 : TypeHint type_hint() const { return type_hint_; }
637 :
638 : private:
639 : ExpressionResultScope* outer_;
640 : RegisterAllocationScope allocator_;
641 : Expression::Context kind_;
642 : TypeHint type_hint_;
643 :
644 : DISALLOW_COPY_AND_ASSIGN(ExpressionResultScope);
645 : };
646 :
647 : // Scoped class used when the result of the current expression is not
648 : // expected to produce a result.
649 12849507 : class BytecodeGenerator::EffectResultScope final
650 : : public ExpressionResultScope {
651 : public:
652 : explicit EffectResultScope(BytecodeGenerator* generator)
653 : : ExpressionResultScope(generator, Expression::kEffect) {}
654 : };
655 :
656 : // Scoped class used when the result of the current expression to be
657 : // evaluated should go into the interpreter's accumulator.
658 30736570 : class BytecodeGenerator::ValueResultScope final : public ExpressionResultScope {
659 : public:
660 : explicit ValueResultScope(BytecodeGenerator* generator)
661 : : ExpressionResultScope(generator, Expression::kValue) {}
662 : };
663 :
664 : // Scoped class used when the result of the current expression to be
665 : // evaluated is only tested with jumps to two branches.
666 1035159 : class BytecodeGenerator::TestResultScope final : public ExpressionResultScope {
667 : public:
668 : TestResultScope(BytecodeGenerator* generator, BytecodeLabels* then_labels,
669 : BytecodeLabels* else_labels, TestFallthrough fallthrough)
670 : : ExpressionResultScope(generator, Expression::kTest),
671 : result_consumed_by_test_(false),
672 : fallthrough_(fallthrough),
673 : then_labels_(then_labels),
674 1035155 : else_labels_(else_labels) {}
675 :
676 : // Used when code special cases for TestResultScope and consumes any
677 : // possible value by testing and jumping to a then/else label.
678 351989 : void SetResultConsumedByTest() { result_consumed_by_test_ = true; }
679 : bool result_consumed_by_test() { return result_consumed_by_test_; }
680 :
681 : // Inverts the control flow of the operation, swapping the then and else
682 : // labels and the fallthrough.
683 : void InvertControlFlow() {
684 : std::swap(then_labels_, else_labels_);
685 245042 : fallthrough_ = inverted_fallthrough();
686 : }
687 :
688 44107 : BytecodeLabel* NewThenLabel() { return then_labels_->New(); }
689 14317 : BytecodeLabel* NewElseLabel() { return else_labels_->New(); }
690 :
691 : BytecodeLabels* then_labels() const { return then_labels_; }
692 : BytecodeLabels* else_labels() const { return else_labels_; }
693 :
694 : void set_then_labels(BytecodeLabels* then_labels) {
695 : then_labels_ = then_labels;
696 : }
697 : void set_else_labels(BytecodeLabels* else_labels) {
698 : else_labels_ = else_labels;
699 : }
700 :
701 : TestFallthrough fallthrough() const { return fallthrough_; }
702 : TestFallthrough inverted_fallthrough() const {
703 245042 : switch (fallthrough_) {
704 : case TestFallthrough::kThen:
705 : return TestFallthrough::kElse;
706 : case TestFallthrough::kElse:
707 : return TestFallthrough::kThen;
708 : default:
709 : return TestFallthrough::kNone;
710 : }
711 : }
712 : void set_fallthrough(TestFallthrough fallthrough) {
713 : fallthrough_ = fallthrough;
714 : }
715 :
716 : private:
717 : bool result_consumed_by_test_;
718 : TestFallthrough fallthrough_;
719 : BytecodeLabels* then_labels_;
720 : BytecodeLabels* else_labels_;
721 :
722 : DISALLOW_COPY_AND_ASSIGN(TestResultScope);
723 : };
724 :
725 : // Used to build a list of global declaration initial value pairs.
726 : class BytecodeGenerator::GlobalDeclarationsBuilder final : public ZoneObject {
727 : public:
728 : explicit GlobalDeclarationsBuilder(Zone* zone)
729 : : declarations_(0, zone),
730 : constant_pool_entry_(0),
731 2214054 : has_constant_pool_entry_(false) {}
732 :
733 : void AddFunctionDeclaration(const AstRawString* name, FeedbackSlot slot,
734 : int feedback_cell_index, FunctionLiteral* func) {
735 : DCHECK(!slot.IsInvalid());
736 490126 : declarations_.push_back(Declaration(name, slot, feedback_cell_index, func));
737 : }
738 :
739 : void AddUndefinedDeclaration(const AstRawString* name, FeedbackSlot slot) {
740 : DCHECK(!slot.IsInvalid());
741 2548317 : declarations_.push_back(Declaration(name, slot));
742 : }
743 :
744 106821 : Handle<FixedArray> AllocateDeclarations(UnoptimizedCompilationInfo* info,
745 : Handle<Script> script,
746 : Isolate* isolate) {
747 : DCHECK(has_constant_pool_entry_);
748 : int array_index = 0;
749 : Handle<FixedArray> data = isolate->factory()->NewFixedArray(
750 106821 : static_cast<int>(declarations_.size() * 4), AllocationType::kOld);
751 1404697 : for (const Declaration& declaration : declarations_) {
752 1297876 : FunctionLiteral* func = declaration.func;
753 : Handle<Object> initial_value;
754 1297876 : if (func == nullptr) {
755 : initial_value = isolate->factory()->undefined_value();
756 : } else {
757 212067 : initial_value = Compiler::GetSharedFunctionInfo(func, script, isolate);
758 : }
759 :
760 : // Return a null handle if any initial values can't be created. Caller
761 : // will set stack overflow.
762 1297876 : if (initial_value.is_null()) return Handle<FixedArray>();
763 :
764 3893628 : data->set(array_index++, *declaration.name->string());
765 1297876 : data->set(array_index++, Smi::FromInt(declaration.slot.ToInt()));
766 : Object undefined_or_literal_slot;
767 1297876 : if (declaration.feedback_cell_index_for_function == -1) {
768 1085809 : undefined_or_literal_slot = ReadOnlyRoots(isolate).undefined_value();
769 : } else {
770 : undefined_or_literal_slot =
771 212067 : Smi::FromInt(declaration.feedback_cell_index_for_function);
772 : }
773 1297876 : data->set(array_index++, undefined_or_literal_slot);
774 2595752 : data->set(array_index++, *initial_value);
775 : }
776 106821 : return data;
777 : }
778 :
779 : size_t constant_pool_entry() {
780 : DCHECK(has_constant_pool_entry_);
781 : return constant_pool_entry_;
782 : }
783 :
784 : void set_constant_pool_entry(size_t constant_pool_entry) {
785 : DCHECK(!empty());
786 : DCHECK(!has_constant_pool_entry_);
787 117378 : constant_pool_entry_ = constant_pool_entry;
788 117378 : has_constant_pool_entry_ = true;
789 : }
790 :
791 : bool empty() { return declarations_.empty(); }
792 :
793 : private:
794 : struct Declaration {
795 2214047 : Declaration() : slot(FeedbackSlot::Invalid()), func(nullptr) {}
796 : Declaration(const AstRawString* name, FeedbackSlot slot,
797 : int feedback_cell_index, FunctionLiteral* func)
798 : : name(name),
799 : slot(slot),
800 : feedback_cell_index_for_function(feedback_cell_index),
801 245055 : func(func) {}
802 : Declaration(const AstRawString* name, FeedbackSlot slot)
803 : : name(name),
804 : slot(slot),
805 : feedback_cell_index_for_function(-1),
806 1274151 : func(nullptr) {}
807 :
808 : const AstRawString* name;
809 : FeedbackSlot slot;
810 : // Only valid for function declarations. Specifies the index into the
811 : // closure_feedback_cell array used when creating closures of this
812 : // function.
813 : int feedback_cell_index_for_function;
814 : FunctionLiteral* func;
815 : };
816 : ZoneVector<Declaration> declarations_;
817 : size_t constant_pool_entry_;
818 : bool has_constant_pool_entry_;
819 : };
820 :
821 : class BytecodeGenerator::CurrentScope final {
822 : public:
823 : CurrentScope(BytecodeGenerator* generator, Scope* scope)
824 : : generator_(generator), outer_scope_(generator->current_scope()) {
825 5304801 : if (scope != nullptr) {
826 : DCHECK_EQ(outer_scope_, scope->outer_scope());
827 : generator_->set_current_scope(scope);
828 : }
829 : }
830 : ~CurrentScope() {
831 5304887 : if (outer_scope_ != generator_->current_scope()) {
832 : generator_->set_current_scope(outer_scope_);
833 : }
834 : }
835 :
836 : private:
837 : BytecodeGenerator* generator_;
838 : Scope* outer_scope_;
839 : };
840 :
841 : class BytecodeGenerator::FeedbackSlotCache : public ZoneObject {
842 : public:
843 : enum class SlotKind {
844 : kStoreGlobalSloppy,
845 : kStoreGlobalStrict,
846 : kStoreNamedStrict,
847 : kStoreNamedSloppy,
848 : kLoadProperty,
849 : kLoadGlobalNotInsideTypeof,
850 : kLoadGlobalInsideTypeof,
851 : kClosureFeedbackCell
852 : };
853 :
854 : explicit FeedbackSlotCache(Zone* zone) : map_(zone) {}
855 :
856 : void Put(SlotKind slot_kind, Variable* variable, int slot_index) {
857 : PutImpl(slot_kind, 0, variable, slot_index);
858 : }
859 : void Put(SlotKind slot_kind, AstNode* node, int slot_index) {
860 : PutImpl(slot_kind, 0, node, slot_index);
861 : }
862 : void Put(SlotKind slot_kind, int variable_index, const AstRawString* name,
863 : int slot_index) {
864 : PutImpl(slot_kind, variable_index, name, slot_index);
865 : }
866 :
867 : int Get(SlotKind slot_kind, Variable* variable) const {
868 : return GetImpl(slot_kind, 0, variable);
869 : }
870 : int Get(SlotKind slot_kind, AstNode* node) const {
871 : return GetImpl(slot_kind, 0, node);
872 : }
873 : int Get(SlotKind slot_kind, int variable_index,
874 : const AstRawString* name) const {
875 : return GetImpl(slot_kind, variable_index, name);
876 : }
877 :
878 : private:
879 : typedef std::tuple<SlotKind, int, const void*> Key;
880 :
881 : void PutImpl(SlotKind slot_kind, int index, const void* node,
882 : int slot_index) {
883 : Key key = std::make_tuple(slot_kind, index, node);
884 : auto entry = std::make_pair(key, slot_index);
885 : map_.insert(entry);
886 : }
887 :
888 : int GetImpl(SlotKind slot_kind, int index, const void* node) const {
889 : Key key = std::make_tuple(slot_kind, index, node);
890 : auto iter = map_.find(key);
891 12868068 : if (iter != map_.end()) {
892 5154878 : return iter->second;
893 : }
894 : return -1;
895 : }
896 :
897 : ZoneMap<Key, int> map_;
898 : };
899 :
900 : class BytecodeGenerator::IteratorRecord final {
901 : public:
902 : IteratorRecord(Register object_register, Register next_register,
903 : IteratorType type = IteratorType::kNormal)
904 : : type_(type), object_(object_register), next_(next_register) {
905 : DCHECK(object_.is_valid() && next_.is_valid());
906 : }
907 :
908 : inline IteratorType type() const { return type_; }
909 : inline Register object() const { return object_; }
910 : inline Register next() const { return next_; }
911 :
912 : private:
913 : IteratorType type_;
914 : Register object_;
915 : Register next_;
916 : };
917 :
918 : #ifdef DEBUG
919 :
920 : static bool IsInEagerLiterals(
921 : FunctionLiteral* literal,
922 : const std::vector<FunctionLiteral*>& eager_literals) {
923 : for (FunctionLiteral* eager_literal : eager_literals) {
924 : if (literal == eager_literal) return true;
925 : }
926 : return false;
927 : }
928 :
929 : #endif // DEBUG
930 :
931 2096679 : BytecodeGenerator::BytecodeGenerator(
932 : UnoptimizedCompilationInfo* info,
933 : const AstStringConstants* ast_string_constants,
934 : std::vector<FunctionLiteral*>* eager_inner_literals)
935 : : zone_(info->zone()),
936 : builder_(zone(), info->num_parameters_including_this(),
937 2096686 : info->scope()->num_stack_slots(), info->feedback_vector_spec(),
938 : info->SourcePositionRecordingMode()),
939 : info_(info),
940 : ast_string_constants_(ast_string_constants),
941 2096692 : closure_scope_(info->scope()),
942 2096691 : current_scope_(info->scope()),
943 : eager_inner_literals_(eager_inner_literals),
944 : feedback_slot_cache_(new (zone()) FeedbackSlotCache(zone())),
945 : globals_builder_(new (zone()) GlobalDeclarationsBuilder(zone())),
946 : block_coverage_builder_(nullptr),
947 : global_declarations_(0, zone()),
948 : function_literals_(0, zone()),
949 : native_function_literals_(0, zone()),
950 : object_literals_(0, zone()),
951 : array_literals_(0, zone()),
952 : class_literals_(0, zone()),
953 : template_objects_(0, zone()),
954 : execution_control_(nullptr),
955 : execution_context_(nullptr),
956 : execution_result_(nullptr),
957 : incoming_new_target_or_generator_(),
958 : dummy_feedback_slot_(feedback_spec(), FeedbackSlotKind::kCompareOp),
959 : generator_jump_table_(nullptr),
960 : suspend_count_(0),
961 : loop_depth_(0),
962 16773454 : catch_prediction_(HandlerTable::UNCAUGHT) {
963 : DCHECK_EQ(closure_scope(), closure_scope()->GetClosureScope());
964 2096680 : if (info->has_source_range_map()) {
965 : block_coverage_builder_ = new (zone())
966 892 : BlockCoverageBuilder(zone(), builder(), info->source_range_map());
967 : }
968 2096680 : }
969 :
970 2072810 : Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
971 : Isolate* isolate, Handle<Script> script) {
972 : DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
973 : #ifdef DEBUG
974 : // Unoptimized compilation should be context-independent. Verify that we don't
975 : // access the native context by nulling it out during finalization.
976 : SaveAndSwitchContext save(isolate, Context());
977 : #endif
978 :
979 2072810 : AllocateDeferredConstants(isolate, script);
980 :
981 2072801 : if (block_coverage_builder_) {
982 892 : info()->set_coverage_info(
983 : isolate->factory()->NewCoverageInfo(block_coverage_builder_->slots()));
984 892 : if (FLAG_trace_block_coverage) {
985 0 : info()->coverage_info()->Print(info()->literal()->GetDebugName());
986 : }
987 : }
988 :
989 2072801 : if (HasStackOverflow()) return Handle<BytecodeArray>();
990 2072801 : Handle<BytecodeArray> bytecode_array = builder()->ToBytecodeArray(isolate);
991 :
992 2072819 : if (incoming_new_target_or_generator_.is_valid()) {
993 : bytecode_array->set_incoming_new_target_or_generator_register(
994 : incoming_new_target_or_generator_);
995 : }
996 :
997 2072819 : return bytecode_array;
998 : }
999 :
1000 2072802 : void BytecodeGenerator::AllocateDeferredConstants(Isolate* isolate,
1001 : Handle<Script> script) {
1002 : // Build global declaration pair arrays.
1003 2179622 : for (GlobalDeclarationsBuilder* globals_builder : global_declarations_) {
1004 : Handle<FixedArray> declarations =
1005 106821 : globals_builder->AllocateDeclarations(info(), script, isolate);
1006 106821 : if (declarations.is_null()) return SetStackOverflow();
1007 106820 : builder()->SetDeferredConstantPoolEntry(
1008 106821 : globals_builder->constant_pool_entry(), declarations);
1009 : }
1010 :
1011 : // Find or build shared function infos.
1012 4465125 : for (std::pair<FunctionLiteral*, size_t> literal : function_literals_) {
1013 : FunctionLiteral* expr = literal.first;
1014 : Handle<SharedFunctionInfo> shared_info =
1015 2392320 : Compiler::GetSharedFunctionInfo(expr, script, isolate);
1016 2392325 : if (shared_info.is_null()) return SetStackOverflow();
1017 2392325 : builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
1018 : }
1019 :
1020 : // Find or build shared function infos for the native function templates.
1021 2074619 : for (std::pair<NativeFunctionLiteral*, size_t> literal :
1022 1814 : native_function_literals_) {
1023 : NativeFunctionLiteral* expr = literal.first;
1024 : v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1025 :
1026 : // Compute the function template for the native function.
1027 : v8::Local<v8::FunctionTemplate> info =
1028 : expr->extension()->GetNativeFunctionTemplate(
1029 3628 : v8_isolate, Utils::ToLocal(expr->name()));
1030 : DCHECK(!info.IsEmpty());
1031 :
1032 : Handle<SharedFunctionInfo> shared_info =
1033 : FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
1034 1814 : isolate, Utils::OpenHandle(*info), expr->name());
1035 : DCHECK(!shared_info.is_null());
1036 1814 : builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
1037 : }
1038 :
1039 : // Build object literal constant properties
1040 2260207 : for (std::pair<ObjectLiteral*, size_t> literal : object_literals_) {
1041 : ObjectLiteral* object_literal = literal.first;
1042 187401 : if (object_literal->properties_count() > 0) {
1043 : // If constant properties is an empty fixed array, we've already added it
1044 : // to the constant pool when visiting the object literal.
1045 : Handle<ObjectBoilerplateDescription> constant_properties =
1046 : object_literal->GetOrBuildBoilerplateDescription(isolate);
1047 :
1048 187402 : builder()->SetDeferredConstantPoolEntry(literal.second,
1049 187401 : constant_properties);
1050 : }
1051 : }
1052 :
1053 : // Build array literal constant elements
1054 2225047 : for (std::pair<ArrayLiteral*, size_t> literal : array_literals_) {
1055 : ArrayLiteral* array_literal = literal.first;
1056 : Handle<ArrayBoilerplateDescription> constant_elements =
1057 : array_literal->GetOrBuildBoilerplateDescription(isolate);
1058 152241 : builder()->SetDeferredConstantPoolEntry(literal.second, constant_elements);
1059 : }
1060 :
1061 : // Build class literal boilerplates.
1062 2112902 : for (std::pair<ClassLiteral*, size_t> literal : class_literals_) {
1063 : ClassLiteral* class_literal = literal.first;
1064 : Handle<ClassBoilerplate> class_boilerplate =
1065 40096 : ClassBoilerplate::BuildClassBoilerplate(isolate, class_literal);
1066 40096 : builder()->SetDeferredConstantPoolEntry(literal.second, class_boilerplate);
1067 : }
1068 :
1069 : // Build template literals.
1070 2074537 : for (std::pair<GetTemplateObject*, size_t> literal : template_objects_) {
1071 : GetTemplateObject* get_template_object = literal.first;
1072 : Handle<TemplateObjectDescription> description =
1073 1731 : get_template_object->GetOrBuildDescription(isolate);
1074 1731 : builder()->SetDeferredConstantPoolEntry(literal.second, description);
1075 : }
1076 : }
1077 :
1078 2096676 : void BytecodeGenerator::GenerateBytecode(uintptr_t stack_limit) {
1079 : DisallowHeapAllocation no_allocation;
1080 : DisallowHandleAllocation no_handles;
1081 : DisallowHandleDereference no_deref;
1082 :
1083 : InitializeAstVisitor(stack_limit);
1084 :
1085 : // Initialize the incoming context.
1086 4193336 : ContextScope incoming_context(this, closure_scope());
1087 :
1088 : // Initialize control scope.
1089 : ControlScopeForTopLevel control(this);
1090 :
1091 : RegisterAllocationScope register_scope(this);
1092 :
1093 2096682 : AllocateTopLevelRegisters();
1094 :
1095 2096683 : if (info()->literal()->CanSuspend()) {
1096 9541 : BuildGeneratorPrologue();
1097 : }
1098 :
1099 2096676 : if (closure_scope()->NeedsContext()) {
1100 : // Push a new inner context scope for the function.
1101 190411 : BuildNewLocalActivationContext();
1102 380820 : ContextScope local_function_context(this, closure_scope());
1103 190410 : BuildLocalActivationContextInitialization();
1104 190410 : GenerateBytecodeBody();
1105 : } else {
1106 1906265 : GenerateBytecodeBody();
1107 : }
1108 :
1109 : // Check that we are not falling off the end.
1110 : DCHECK(builder()->RemainderOfBlockIsDead());
1111 2096658 : }
1112 :
1113 2096670 : void BytecodeGenerator::GenerateBytecodeBody() {
1114 : // Build the arguments object if it is used.
1115 2096670 : VisitArgumentsObject(closure_scope()->arguments());
1116 :
1117 : // Build rest arguments array if it is used.
1118 : Variable* rest_parameter = closure_scope()->rest_parameter();
1119 2096670 : VisitRestArgumentsArray(rest_parameter);
1120 :
1121 : // Build assignment to the function name or {.this_function}
1122 : // variables if used.
1123 2096683 : VisitThisFunctionVariable(closure_scope()->function_var());
1124 2096679 : VisitThisFunctionVariable(closure_scope()->this_function_var());
1125 :
1126 : // Build assignment to {new.target} variable if it is used.
1127 2096677 : VisitNewTargetVariable(closure_scope()->new_target_var());
1128 :
1129 : // Create a generator object if necessary and initialize the
1130 : // {.generator_object} variable.
1131 4193330 : if (IsResumableFunction(info()->literal()->kind())) {
1132 11220 : BuildGeneratorObjectVariableInitialization();
1133 : }
1134 :
1135 : // Emit tracing call if requested to do so.
1136 2096666 : if (FLAG_trace) builder()->CallRuntime(Runtime::kTraceEnter);
1137 :
1138 : // Emit type profile call.
1139 2096666 : if (info()->collect_type_profile()) {
1140 68 : feedback_spec()->AddTypeProfileSlot();
1141 : int num_parameters = closure_scope()->num_parameters();
1142 260 : for (int i = 0; i < num_parameters; i++) {
1143 96 : Register parameter(builder()->Parameter(i));
1144 96 : builder()->LoadAccumulatorWithRegister(parameter).CollectTypeProfile(
1145 96 : closure_scope()->parameter(i)->initializer_position());
1146 : }
1147 : }
1148 :
1149 : // Visit declarations within the function scope.
1150 2096666 : VisitDeclarations(closure_scope()->declarations());
1151 :
1152 : // Emit initializing assignments for module namespace imports (if any).
1153 2096663 : VisitModuleNamespaceImports();
1154 :
1155 : // Perform a stack-check before the body.
1156 4193330 : builder()->StackCheck(info()->literal()->start_position());
1157 :
1158 : // The derived constructor case is handled in VisitCallSuper.
1159 2114364 : if (IsBaseConstructor(function_kind()) &&
1160 : info()->literal()->requires_instance_members_initializer()) {
1161 565 : BuildInstanceMemberInitialization(Register::function_closure(),
1162 565 : builder()->Receiver());
1163 : }
1164 :
1165 : // Visit statements in the function body.
1166 2096683 : VisitStatements(info()->literal()->body());
1167 :
1168 : // Emit an implicit return instruction in case control flow can fall off the
1169 : // end of the function without an explicit return being present on all paths.
1170 2096623 : if (!builder()->RemainderOfBlockIsDead()) {
1171 373622 : builder()->LoadUndefined();
1172 373624 : BuildReturn();
1173 : }
1174 2096629 : }
1175 :
1176 2096675 : void BytecodeGenerator::AllocateTopLevelRegisters() {
1177 4193353 : if (IsResumableFunction(info()->literal()->kind())) {
1178 : // Either directly use generator_object_var or allocate a new register for
1179 : // the incoming generator object.
1180 : Variable* generator_object_var = closure_scope()->generator_object_var();
1181 11220 : if (generator_object_var->location() == VariableLocation::LOCAL) {
1182 : incoming_new_target_or_generator_ =
1183 11220 : GetRegisterForLocalVariable(generator_object_var);
1184 : } else {
1185 0 : incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1186 : }
1187 2085458 : } else if (closure_scope()->new_target_var()) {
1188 : // Either directly use new_target_var or allocate a new register for
1189 : // the incoming new target object.
1190 : Variable* new_target_var = closure_scope()->new_target_var();
1191 99026 : if (new_target_var->location() == VariableLocation::LOCAL) {
1192 : incoming_new_target_or_generator_ =
1193 4435 : GetRegisterForLocalVariable(new_target_var);
1194 : } else {
1195 94591 : incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1196 : }
1197 : }
1198 2096678 : }
1199 :
1200 9541 : void BytecodeGenerator::BuildGeneratorPrologue() {
1201 : DCHECK_GT(info()->literal()->suspend_count(), 0);
1202 : DCHECK(generator_object().is_valid());
1203 : generator_jump_table_ =
1204 9541 : builder()->AllocateJumpTable(info()->literal()->suspend_count(), 0);
1205 :
1206 : // If the generator is not undefined, this is a resume, so perform state
1207 : // dispatch.
1208 9541 : builder()->SwitchOnGeneratorState(generator_object(), generator_jump_table_);
1209 :
1210 : // Otherwise, fall-through to the ordinary function prologue, after which we
1211 : // will run into the generator object creation and other extra code inserted
1212 : // by the parser.
1213 9541 : }
1214 :
1215 5189406 : void BytecodeGenerator::VisitBlock(Block* stmt) {
1216 : // Visit declarations and statements.
1217 : CurrentScope current_scope(this, stmt->scope());
1218 5189406 : if (stmt->scope() != nullptr && stmt->scope()->NeedsContext()) {
1219 24296 : BuildNewLocalBlockContext(stmt->scope());
1220 48592 : ContextScope scope(this, stmt->scope());
1221 24296 : VisitBlockDeclarationsAndStatements(stmt);
1222 : } else {
1223 5165110 : VisitBlockDeclarationsAndStatements(stmt);
1224 : }
1225 5189488 : }
1226 :
1227 5189445 : void BytecodeGenerator::VisitBlockDeclarationsAndStatements(Block* stmt) {
1228 5189445 : BlockBuilder block_builder(builder(), block_coverage_builder_, stmt);
1229 : ControlScopeForBreakable execution_control(this, stmt, &block_builder);
1230 5189445 : if (stmt->scope() != nullptr) {
1231 190315 : VisitDeclarations(stmt->scope()->declarations());
1232 : }
1233 5189445 : VisitStatements(stmt->statements());
1234 5189480 : }
1235 :
1236 6833801 : void BytecodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
1237 : Variable* variable = decl->var();
1238 : // Unused variables don't need to be visited.
1239 6833801 : if (!variable->is_used()) return;
1240 :
1241 6414470 : switch (variable->location()) {
1242 : case VariableLocation::UNALLOCATED: {
1243 : DCHECK(!variable->binding_needs_init());
1244 : FeedbackSlot slot =
1245 1274145 : GetCachedLoadGlobalICSlot(NOT_INSIDE_TYPEOF, variable);
1246 : globals_builder()->AddUndefinedDeclaration(variable->raw_name(), slot);
1247 : break;
1248 : }
1249 : case VariableLocation::LOCAL:
1250 1046372 : if (variable->binding_needs_init()) {
1251 3266 : Register destination(builder()->Local(variable->index()));
1252 3266 : builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1253 : }
1254 : break;
1255 : case VariableLocation::PARAMETER:
1256 2289940 : if (variable->binding_needs_init()) {
1257 0 : Register destination(builder()->Parameter(variable->index()));
1258 0 : builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1259 : }
1260 : break;
1261 : case VariableLocation::CONTEXT:
1262 1504397 : if (variable->binding_needs_init()) {
1263 : DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1264 728306 : builder()->LoadTheHole().StoreContextSlot(execution_context()->reg(),
1265 728351 : variable->index(), 0);
1266 : }
1267 : break;
1268 : case VariableLocation::LOOKUP: {
1269 : DCHECK_EQ(VariableMode::kDynamic, variable->mode());
1270 : DCHECK(!variable->binding_needs_init());
1271 :
1272 281883 : Register name = register_allocator()->NewRegister();
1273 :
1274 : builder()
1275 281883 : ->LoadLiteral(variable->raw_name())
1276 281883 : .StoreAccumulatorInRegister(name)
1277 281883 : .CallRuntime(Runtime::kDeclareEvalVar, name);
1278 : break;
1279 : }
1280 : case VariableLocation::MODULE:
1281 35057 : if (variable->IsExport() && variable->binding_needs_init()) {
1282 17068 : builder()->LoadTheHole();
1283 17068 : BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1284 : }
1285 : // Nothing to do for imports.
1286 : break;
1287 : }
1288 : }
1289 :
1290 556507 : void BytecodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
1291 : Variable* variable = decl->var();
1292 : DCHECK(variable->mode() == VariableMode::kLet ||
1293 : variable->mode() == VariableMode::kVar ||
1294 : variable->mode() == VariableMode::kDynamic);
1295 : // Unused variables don't need to be visited.
1296 556507 : if (!variable->is_used()) return;
1297 :
1298 550964 : switch (variable->location()) {
1299 : case VariableLocation::UNALLOCATED: {
1300 : FeedbackSlot slot =
1301 245050 : GetCachedLoadGlobalICSlot(NOT_INSIDE_TYPEOF, variable);
1302 245056 : int literal_index = GetCachedCreateClosureSlot(decl->fun());
1303 : globals_builder()->AddFunctionDeclaration(variable->raw_name(), slot,
1304 : literal_index, decl->fun());
1305 245071 : AddToEagerLiteralsIfEager(decl->fun());
1306 : break;
1307 : }
1308 : case VariableLocation::PARAMETER:
1309 : case VariableLocation::LOCAL: {
1310 23912 : VisitFunctionLiteral(decl->fun());
1311 23912 : BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1312 23912 : break;
1313 : }
1314 : case VariableLocation::CONTEXT: {
1315 : DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1316 274446 : VisitFunctionLiteral(decl->fun());
1317 : builder()->StoreContextSlot(execution_context()->reg(), variable->index(),
1318 274446 : 0);
1319 274446 : break;
1320 : }
1321 : case VariableLocation::LOOKUP: {
1322 7186 : RegisterList args = register_allocator()->NewRegisterList(2);
1323 : builder()
1324 7186 : ->LoadLiteral(variable->raw_name())
1325 7186 : .StoreAccumulatorInRegister(args[0]);
1326 7186 : VisitFunctionLiteral(decl->fun());
1327 7186 : builder()->StoreAccumulatorInRegister(args[1]).CallRuntime(
1328 7186 : Runtime::kDeclareEvalFunction, args);
1329 : break;
1330 : }
1331 : case VariableLocation::MODULE:
1332 : DCHECK_EQ(variable->mode(), VariableMode::kLet);
1333 : DCHECK(variable->IsExport());
1334 370 : VisitForAccumulatorValue(decl->fun());
1335 370 : BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1336 370 : break;
1337 : }
1338 : DCHECK_IMPLIES(decl->fun()->ShouldEagerCompile(),
1339 : IsInEagerLiterals(decl->fun(), *eager_inner_literals_));
1340 : }
1341 :
1342 2096665 : void BytecodeGenerator::VisitModuleNamespaceImports() {
1343 4192073 : if (!closure_scope()->is_module_scope()) return;
1344 :
1345 : RegisterAllocationScope register_scope(this);
1346 1257 : Register module_request = register_allocator()->NewRegister();
1347 :
1348 1257 : ModuleDescriptor* descriptor = closure_scope()->AsModuleScope()->module();
1349 1397 : for (auto entry : descriptor->namespace_imports()) {
1350 : builder()
1351 280 : ->LoadLiteral(Smi::FromInt(entry->module_request))
1352 140 : .StoreAccumulatorInRegister(module_request)
1353 140 : .CallRuntime(Runtime::kGetModuleNamespace, module_request);
1354 140 : Variable* var = closure_scope()->LookupInModule(entry->local_name);
1355 140 : BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kElided);
1356 : }
1357 : }
1358 :
1359 2329697 : void BytecodeGenerator::VisitDeclarations(Declaration::List* declarations) {
1360 : RegisterAllocationScope register_scope(this);
1361 : DCHECK(globals_builder()->empty());
1362 9719983 : for (Declaration* decl : *declarations) {
1363 : RegisterAllocationScope register_scope(this);
1364 7390287 : Visit(decl);
1365 : }
1366 2329696 : if (globals_builder()->empty()) return;
1367 :
1368 117373 : globals_builder()->set_constant_pool_entry(
1369 : builder()->AllocateDeferredConstantPoolEntry());
1370 117378 : int encoded_flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) |
1371 117378 : DeclareGlobalsNativeFlag::encode(info()->is_native());
1372 :
1373 : // Emit code to declare globals.
1374 117378 : RegisterList args = register_allocator()->NewRegisterList(3);
1375 : builder()
1376 117374 : ->LoadConstantPoolEntry(globals_builder()->constant_pool_entry())
1377 117373 : .StoreAccumulatorInRegister(args[0])
1378 117374 : .LoadLiteral(Smi::FromInt(encoded_flags))
1379 117375 : .StoreAccumulatorInRegister(args[1])
1380 234754 : .MoveRegister(Register::function_closure(), args[2])
1381 117373 : .CallRuntime(Runtime::kDeclareGlobals, args);
1382 :
1383 : // Push and reset globals builder.
1384 234751 : global_declarations_.push_back(globals_builder());
1385 117375 : globals_builder_ = new (zone()) GlobalDeclarationsBuilder(zone());
1386 : }
1387 :
1388 7366414 : void BytecodeGenerator::VisitStatements(
1389 : const ZonePtrList<Statement>* statements) {
1390 37779992 : for (int i = 0; i < statements->length(); i++) {
1391 : // Allocate an outer register allocations scope for the statement.
1392 : RegisterAllocationScope allocation_scope(this);
1393 17148524 : Statement* stmt = statements->at(i);
1394 17148524 : Visit(stmt);
1395 17148344 : if (builder()->RemainderOfBlockIsDead()) break;
1396 : }
1397 7366187 : }
1398 :
1399 10157010 : void BytecodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1400 : builder()->SetStatementPosition(stmt);
1401 10157010 : VisitForEffect(stmt->expression());
1402 10156894 : }
1403 :
1404 0 : void BytecodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {}
1405 :
1406 578157 : void BytecodeGenerator::VisitIfStatement(IfStatement* stmt) {
1407 : ConditionalControlFlowBuilder conditional_builder(
1408 1734471 : builder(), block_coverage_builder_, stmt);
1409 : builder()->SetStatementPosition(stmt);
1410 :
1411 578157 : if (stmt->condition()->ToBooleanIsTrue()) {
1412 : // Generate then block unconditionally as always true.
1413 429 : conditional_builder.Then();
1414 429 : Visit(stmt->then_statement());
1415 577728 : } else if (stmt->condition()->ToBooleanIsFalse()) {
1416 : // Generate else block unconditionally if it exists.
1417 8301 : if (stmt->HasElseStatement()) {
1418 7043 : conditional_builder.Else();
1419 7043 : Visit(stmt->else_statement());
1420 : }
1421 : } else {
1422 : // TODO(oth): If then statement is BreakStatement or
1423 : // ContinueStatement we can reduce number of generated
1424 : // jump/jump_ifs here. See BasicLoops test.
1425 : VisitForTest(stmt->condition(), conditional_builder.then_labels(),
1426 569427 : conditional_builder.else_labels(), TestFallthrough::kThen);
1427 :
1428 569429 : conditional_builder.Then();
1429 569429 : Visit(stmt->then_statement());
1430 :
1431 569427 : if (stmt->HasElseStatement()) {
1432 42886 : conditional_builder.JumpToEnd();
1433 42886 : conditional_builder.Else();
1434 42886 : Visit(stmt->else_statement());
1435 : }
1436 : }
1437 578157 : }
1438 :
1439 0 : void BytecodeGenerator::VisitSloppyBlockFunctionStatement(
1440 : SloppyBlockFunctionStatement* stmt) {
1441 3072 : Visit(stmt->statement());
1442 0 : }
1443 :
1444 3612 : void BytecodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1445 : AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1446 : builder()->SetStatementPosition(stmt);
1447 : execution_control()->Continue(stmt->target());
1448 3612 : }
1449 :
1450 46052 : void BytecodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1451 : AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1452 : builder()->SetStatementPosition(stmt);
1453 : execution_control()->Break(stmt->target());
1454 46052 : }
1455 :
1456 2059648 : void BytecodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1457 : AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1458 : builder()->SetStatementPosition(stmt);
1459 2059680 : VisitForAccumulatorValue(stmt->expression());
1460 2059710 : if (stmt->is_async_return()) {
1461 : execution_control()->AsyncReturnAccumulator(stmt->end_position());
1462 : } else {
1463 : execution_control()->ReturnAccumulator(stmt->end_position());
1464 : }
1465 2059721 : }
1466 :
1467 2962 : void BytecodeGenerator::VisitWithStatement(WithStatement* stmt) {
1468 : builder()->SetStatementPosition(stmt);
1469 2962 : VisitForAccumulatorValue(stmt->expression());
1470 2962 : BuildNewLocalWithContext(stmt->scope());
1471 2962 : VisitInScope(stmt->statement(), stmt->scope());
1472 2962 : }
1473 :
1474 10627 : void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1475 : // We need this scope because we visit for register values. We have to
1476 : // maintain a execution result scope where registers can be allocated.
1477 : ZonePtrList<CaseClause>* clauses = stmt->cases();
1478 : SwitchBuilder switch_builder(builder(), block_coverage_builder_, stmt,
1479 21254 : clauses->length());
1480 : ControlScopeForBreakable scope(this, stmt, &switch_builder);
1481 : int default_index = -1;
1482 :
1483 : builder()->SetStatementPosition(stmt);
1484 :
1485 : // Keep the switch value in a register until a case matches.
1486 10627 : Register tag = VisitForRegisterValue(stmt->tag());
1487 : FeedbackSlot slot = clauses->length() > 0
1488 : ? feedback_spec()->AddCompareICSlot()
1489 10627 : : FeedbackSlot::Invalid();
1490 :
1491 : // Iterate over all cases and create nodes for label comparison.
1492 171021 : for (int i = 0; i < clauses->length(); i++) {
1493 80197 : CaseClause* clause = clauses->at(i);
1494 :
1495 : // The default is not a test, remember index.
1496 80197 : if (clause->is_default()) {
1497 : default_index = i;
1498 : continue;
1499 : }
1500 :
1501 : // Perform label comparison as if via '===' with tag.
1502 72736 : VisitForAccumulatorValue(clause->label());
1503 : builder()->CompareOperation(Token::Value::EQ_STRICT, tag,
1504 72736 : feedback_index(slot));
1505 72736 : switch_builder.Case(ToBooleanMode::kAlreadyBoolean, i);
1506 : }
1507 :
1508 10627 : if (default_index >= 0) {
1509 : // Emit default jump if there is a default case.
1510 7461 : switch_builder.DefaultAt(default_index);
1511 : } else {
1512 : // Otherwise if we have reached here none of the cases matched, so jump to
1513 : // the end.
1514 : switch_builder.Break();
1515 : }
1516 :
1517 : // Iterate over all cases and create the case bodies.
1518 171021 : for (int i = 0; i < clauses->length(); i++) {
1519 80197 : CaseClause* clause = clauses->at(i);
1520 80197 : switch_builder.SetCaseTarget(i, clause);
1521 80197 : VisitStatements(clause->statements());
1522 : }
1523 10627 : }
1524 :
1525 : template <typename TryBodyFunc, typename CatchBodyFunc>
1526 107810 : void BytecodeGenerator::BuildTryCatch(
1527 : TryBodyFunc try_body_func, CatchBodyFunc catch_body_func,
1528 : HandlerTable::CatchPrediction catch_prediction,
1529 : TryCatchStatement* stmt_for_coverage) {
1530 : TryCatchBuilder try_control_builder(
1531 : builder(),
1532 : stmt_for_coverage == nullptr ? nullptr : block_coverage_builder_,
1533 215619 : stmt_for_coverage, catch_prediction);
1534 :
1535 : // Preserve the context in a dedicated register, so that it can be restored
1536 : // when the handler is entered by the stack-unwinding machinery.
1537 : // TODO(mstarzinger): Be smarter about register allocation.
1538 107811 : Register context = register_allocator()->NewRegister();
1539 107811 : builder()->MoveRegister(Register::current_context(), context);
1540 :
1541 : // Evaluate the try-block inside a control scope. This simulates a handler
1542 : // that is intercepting 'throw' control commands.
1543 107812 : try_control_builder.BeginTry(context);
1544 : {
1545 : ControlScopeForTryCatch scope(this, &try_control_builder);
1546 38001 : try_body_func();
1547 : }
1548 107812 : try_control_builder.EndTry();
1549 :
1550 107808 : catch_body_func(context);
1551 :
1552 107811 : try_control_builder.EndCatch();
1553 107809 : }
1554 :
1555 : template <typename TryBodyFunc, typename FinallyBodyFunc>
1556 41425 : void BytecodeGenerator::BuildTryFinally(
1557 : TryBodyFunc try_body_func, FinallyBodyFunc finally_body_func,
1558 : HandlerTable::CatchPrediction catch_prediction,
1559 : TryFinallyStatement* stmt_for_coverage) {
1560 : // We can't know whether the finally block will override ("catch") an
1561 : // exception thrown in the try block, so we just adopt the outer prediction.
1562 : TryFinallyBuilder try_control_builder(
1563 : builder(),
1564 : stmt_for_coverage == nullptr ? nullptr : block_coverage_builder_,
1565 82849 : stmt_for_coverage, catch_prediction);
1566 :
1567 : // We keep a record of all paths that enter the finally-block to be able to
1568 : // dispatch to the correct continuation point after the statements in the
1569 : // finally-block have been evaluated.
1570 : //
1571 : // The try-finally construct can enter the finally-block in three ways:
1572 : // 1. By exiting the try-block normally, falling through at the end.
1573 : // 2. By exiting the try-block with a function-local control flow transfer
1574 : // (i.e. through break/continue/return statements).
1575 : // 3. By exiting the try-block with a thrown exception.
1576 : //
1577 : // The result register semantics depend on how the block was entered:
1578 : // - ReturnStatement: It represents the return value being returned.
1579 : // - ThrowStatement: It represents the exception being thrown.
1580 : // - BreakStatement/ContinueStatement: Undefined and not used.
1581 : // - Falling through into finally-block: Undefined and not used.
1582 41425 : Register token = register_allocator()->NewRegister();
1583 41425 : Register result = register_allocator()->NewRegister();
1584 : ControlScope::DeferredCommands commands(this, token, result);
1585 :
1586 : // Preserve the context in a dedicated register, so that it can be restored
1587 : // when the handler is entered by the stack-unwinding machinery.
1588 : // TODO(mstarzinger): Be smarter about register allocation.
1589 41425 : Register context = register_allocator()->NewRegister();
1590 41425 : builder()->MoveRegister(Register::current_context(), context);
1591 :
1592 : // Evaluate the try-block inside a control scope. This simulates a handler
1593 : // that is intercepting all control commands.
1594 41425 : try_control_builder.BeginTry(context);
1595 : {
1596 : ControlScopeForTryFinally scope(this, &try_control_builder, &commands);
1597 38001 : try_body_func();
1598 : }
1599 41425 : try_control_builder.EndTry();
1600 :
1601 : // Record fall-through and exception cases.
1602 41425 : commands.RecordFallThroughPath();
1603 41425 : try_control_builder.LeaveTry();
1604 41425 : try_control_builder.BeginHandler();
1605 : commands.RecordHandlerReThrowPath();
1606 :
1607 : // Pending message object is saved on entry.
1608 41425 : try_control_builder.BeginFinally();
1609 41424 : Register message = context; // Reuse register.
1610 :
1611 : // Clear message object as we enter the finally block.
1612 41424 : builder()->LoadTheHole().SetPendingMessage().StoreAccumulatorInRegister(
1613 : message);
1614 :
1615 : // Evaluate the finally-block.
1616 : finally_body_func(token);
1617 41425 : try_control_builder.EndFinally();
1618 :
1619 : // Pending message object is restored on exit.
1620 41425 : builder()->LoadAccumulatorWithRegister(message).SetPendingMessage();
1621 :
1622 : // Dynamic dispatch after the finally-block.
1623 41425 : commands.ApplyDeferredCommands();
1624 41424 : }
1625 :
1626 260129 : void BytecodeGenerator::VisitIterationBody(IterationStatement* stmt,
1627 : LoopBuilder* loop_builder) {
1628 260129 : loop_builder->LoopBody();
1629 : ControlScopeForIteration execution_control(this, stmt, loop_builder);
1630 260130 : builder()->StackCheck(stmt->position());
1631 260136 : Visit(stmt->body());
1632 260131 : loop_builder->BindContinueTarget();
1633 260135 : }
1634 :
1635 1195 : void BytecodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1636 3585 : LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1637 1195 : if (stmt->cond()->ToBooleanIsFalse()) {
1638 347 : VisitIterationBody(stmt, &loop_builder);
1639 848 : } else if (stmt->cond()->ToBooleanIsTrue()) {
1640 220 : loop_builder.LoopHeader();
1641 220 : VisitIterationBody(stmt, &loop_builder);
1642 220 : loop_builder.JumpToHeader(loop_depth_);
1643 : } else {
1644 628 : loop_builder.LoopHeader();
1645 628 : VisitIterationBody(stmt, &loop_builder);
1646 : builder()->SetExpressionAsStatementPosition(stmt->cond());
1647 : BytecodeLabels loop_backbranch(zone());
1648 : VisitForTest(stmt->cond(), &loop_backbranch, loop_builder.break_labels(),
1649 628 : TestFallthrough::kThen);
1650 628 : loop_backbranch.Bind(builder());
1651 628 : loop_builder.JumpToHeader(loop_depth_);
1652 : }
1653 1195 : }
1654 :
1655 13674 : void BytecodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1656 40911 : LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1657 :
1658 13674 : if (stmt->cond()->ToBooleanIsFalse()) {
1659 : // If the condition is false there is no need to generate the loop.
1660 111 : return;
1661 : }
1662 :
1663 13563 : loop_builder.LoopHeader();
1664 13563 : if (!stmt->cond()->ToBooleanIsTrue()) {
1665 : builder()->SetExpressionAsStatementPosition(stmt->cond());
1666 : BytecodeLabels loop_body(zone());
1667 : VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
1668 12543 : TestFallthrough::kThen);
1669 12543 : loop_body.Bind(builder());
1670 : }
1671 13563 : VisitIterationBody(stmt, &loop_builder);
1672 13563 : loop_builder.JumpToHeader(loop_depth_);
1673 : }
1674 :
1675 220812 : void BytecodeGenerator::VisitForStatement(ForStatement* stmt) {
1676 646975 : LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1677 :
1678 220813 : if (stmt->init() != nullptr) {
1679 160466 : Visit(stmt->init());
1680 : }
1681 220817 : if (stmt->cond() && stmt->cond()->ToBooleanIsFalse()) {
1682 : // If the condition is known to be false there is no need to generate
1683 : // body, next or condition blocks. Init block should be generated.
1684 15467 : return;
1685 : }
1686 :
1687 205348 : loop_builder.LoopHeader();
1688 205346 : if (stmt->cond() && !stmt->cond()->ToBooleanIsTrue()) {
1689 : builder()->SetExpressionAsStatementPosition(stmt->cond());
1690 : BytecodeLabels loop_body(zone());
1691 : VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
1692 188451 : TestFallthrough::kThen);
1693 188452 : loop_body.Bind(builder());
1694 : }
1695 205347 : VisitIterationBody(stmt, &loop_builder);
1696 205350 : if (stmt->next() != nullptr) {
1697 : builder()->SetStatementPosition(stmt->next());
1698 180212 : Visit(stmt->next());
1699 : }
1700 205351 : loop_builder.JumpToHeader(loop_depth_);
1701 : }
1702 :
1703 4804 : void BytecodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1704 9582 : if (stmt->subject()->IsNullLiteral() ||
1705 4778 : stmt->subject()->IsUndefinedLiteral()) {
1706 : // ForIn generates lots of code, skip if it wouldn't produce any effects.
1707 51 : return;
1708 : }
1709 :
1710 : BytecodeLabel subject_null_label, subject_undefined_label;
1711 : FeedbackSlot slot = feedback_spec()->AddForInSlot();
1712 :
1713 : // Prepare the state for executing ForIn.
1714 : builder()->SetExpressionAsStatementPosition(stmt->subject());
1715 4753 : VisitForAccumulatorValue(stmt->subject());
1716 4753 : builder()->JumpIfUndefined(&subject_undefined_label);
1717 4753 : builder()->JumpIfNull(&subject_null_label);
1718 4753 : Register receiver = register_allocator()->NewRegister();
1719 4753 : builder()->ToObject(receiver);
1720 :
1721 : // Used as kRegTriple and kRegPair in ForInPrepare and ForInNext.
1722 4753 : RegisterList triple = register_allocator()->NewRegisterList(3);
1723 4753 : Register cache_length = triple[2];
1724 4753 : builder()->ForInEnumerate(receiver);
1725 4753 : builder()->ForInPrepare(triple, feedback_index(slot));
1726 :
1727 : // Set up loop counter
1728 4753 : Register index = register_allocator()->NewRegister();
1729 4753 : builder()->LoadLiteral(Smi::zero());
1730 4753 : builder()->StoreAccumulatorInRegister(index);
1731 :
1732 : // The loop
1733 : {
1734 9506 : LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1735 4753 : loop_builder.LoopHeader();
1736 : builder()->SetExpressionAsStatementPosition(stmt->each());
1737 4752 : builder()->ForInContinue(index, cache_length);
1738 : loop_builder.BreakIfFalse(ToBooleanMode::kAlreadyBoolean);
1739 : builder()->ForInNext(receiver, index, triple.Truncate(2),
1740 4753 : feedback_index(slot));
1741 : loop_builder.ContinueIfUndefined();
1742 :
1743 : // Assign accumulator value to the 'each' target.
1744 : {
1745 : EffectResultScope scope(this);
1746 : // Make sure to preserve the accumulator across the PrepareAssignmentLhs
1747 : // call.
1748 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(
1749 4753 : stmt->each(), AccumulatorPreservingMode::kPreserve);
1750 : builder()->SetExpressionPosition(stmt->each());
1751 4752 : BuildAssignment(lhs_data, Token::ASSIGN, LookupHoistingMode::kNormal);
1752 : }
1753 :
1754 4752 : VisitIterationBody(stmt, &loop_builder);
1755 4752 : builder()->ForInStep(index);
1756 4753 : builder()->StoreAccumulatorInRegister(index);
1757 4753 : loop_builder.JumpToHeader(loop_depth_);
1758 : }
1759 4753 : builder()->Bind(&subject_null_label);
1760 4753 : builder()->Bind(&subject_undefined_label);
1761 : }
1762 :
1763 : // Desugar a for-of statement into an application of the iteration protocol.
1764 : //
1765 : // for (EACH of SUBJECT) BODY
1766 : //
1767 : // becomes
1768 : //
1769 : // iterator = %GetIterator(SUBJECT)
1770 : // try {
1771 : //
1772 : // loop {
1773 : // // Make sure we are considered 'done' if .next(), .done or .value fail.
1774 : // done = true
1775 : // value = iterator.next()
1776 : // if (value.done) break;
1777 : // value = value.value
1778 : // done = false
1779 : //
1780 : // EACH = value
1781 : // BODY
1782 : // }
1783 : // done = true
1784 : //
1785 : // } catch(e) {
1786 : // iteration_continuation = RETHROW
1787 : // } finally {
1788 : // %FinalizeIteration(iterator, done, iteration_continuation)
1789 : // }
1790 35274 : void BytecodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1791 : EffectResultScope effect_scope(this);
1792 :
1793 35274 : builder()->SetExpressionAsStatementPosition(stmt->subject());
1794 35274 : VisitForAccumulatorValue(stmt->subject());
1795 :
1796 : // Store the iterator in a dedicated register so that it can be closed on
1797 : // exit, and the 'done' value in a dedicated register so that it can be
1798 : // changed and accessed independently of the iteration result.
1799 35274 : IteratorRecord iterator = BuildGetIteratorRecord(stmt->type());
1800 35274 : Register done = register_allocator()->NewRegister();
1801 35274 : builder()->LoadFalse();
1802 35274 : builder()->StoreAccumulatorInRegister(done);
1803 :
1804 : BuildTryFinally(
1805 : // Try block.
1806 35274 : [&]() {
1807 564381 : Register next_result = register_allocator()->NewRegister();
1808 :
1809 246918 : LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1810 35274 : loop_builder.LoopHeader();
1811 :
1812 105822 : builder()->LoadTrue().StoreAccumulatorInRegister(done);
1813 :
1814 : // Call the iterator's .next() method. Break from the loop if the `done`
1815 : // property is truthy, otherwise load the value from the iterator result
1816 : // and append the argument.
1817 35274 : builder()->SetExpressionAsStatementPosition(stmt->each());
1818 70548 : BuildIteratorNext(iterator, next_result);
1819 : builder()->LoadNamedProperty(
1820 : next_result, ast_string_constants()->done_string(),
1821 35273 : feedback_index(feedback_spec()->AddLoadICSlot()));
1822 : loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
1823 :
1824 : builder()
1825 : // value = value.value
1826 : ->LoadNamedProperty(
1827 : next_result, ast_string_constants()->value_string(),
1828 35274 : feedback_index(feedback_spec()->AddLoadICSlot()));
1829 : // done = false, before the assignment to each happens, so that done is
1830 : // false if the assignment throws.
1831 : builder()
1832 35274 : ->StoreAccumulatorInRegister(next_result)
1833 35274 : .LoadFalse()
1834 35274 : .StoreAccumulatorInRegister(done);
1835 :
1836 : // Assign to the 'each' target.
1837 70548 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(stmt->each());
1838 35273 : builder()->LoadAccumulatorWithRegister(next_result);
1839 35274 : BuildAssignment(lhs_data, Token::ASSIGN, LookupHoistingMode::kNormal);
1840 :
1841 70548 : VisitIterationBody(stmt, &loop_builder);
1842 :
1843 35274 : loop_builder.JumpToHeader(loop_depth_);
1844 35274 : },
1845 : // Finally block.
1846 : [&](Register iteration_continuation_token) {
1847 : // Finish the iteration in the finally block.
1848 35274 : BuildFinalizeIteration(iterator, done, iteration_continuation_token);
1849 : },
1850 35274 : HandlerTable::UNCAUGHT);
1851 35273 : }
1852 :
1853 69810 : void BytecodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1854 : // Update catch prediction tracking. The updated catch_prediction value lasts
1855 : // until the end of the try_block in the AST node, and does not apply to the
1856 : // catch_block.
1857 69810 : HandlerTable::CatchPrediction outer_catch_prediction = catch_prediction();
1858 69810 : set_catch_prediction(stmt->GetCatchPrediction(outer_catch_prediction));
1859 :
1860 : BuildTryCatch(
1861 : // Try body.
1862 : [&]() {
1863 69811 : Visit(stmt->try_block());
1864 69811 : set_catch_prediction(outer_catch_prediction);
1865 : },
1866 : // Catch body.
1867 69807 : [&](Register context) {
1868 209428 : if (stmt->scope()) {
1869 : // Create a catch scope that binds the exception.
1870 279149 : BuildNewLocalCatchContext(stmt->scope());
1871 69720 : builder()->StoreAccumulatorInRegister(context);
1872 : }
1873 :
1874 : // If requested, clear message object as we enter the catch block.
1875 209433 : if (stmt->ShouldClearPendingException(outer_catch_prediction)) {
1876 69811 : builder()->LoadTheHole().SetPendingMessage();
1877 : }
1878 :
1879 : // Load the catch context into the accumulator.
1880 69810 : builder()->LoadAccumulatorWithRegister(context);
1881 :
1882 : // Evaluate the catch-block.
1883 69810 : if (stmt->scope()) {
1884 69720 : VisitInScope(stmt->catch_block(), stmt->scope());
1885 : } else {
1886 90 : VisitBlock(stmt->catch_block());
1887 : }
1888 69810 : },
1889 69810 : catch_prediction(), stmt);
1890 69810 : }
1891 :
1892 3424 : void BytecodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1893 : BuildTryFinally(
1894 : // Try block.
1895 3424 : [&]() { Visit(stmt->try_block()); },
1896 : // Finally block.
1897 3424 : [&](Register body_continuation_token) { Visit(stmt->finally_block()); },
1898 3424 : catch_prediction(), stmt);
1899 3424 : }
1900 :
1901 0 : void BytecodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1902 : builder()->SetStatementPosition(stmt);
1903 6556 : builder()->Debugger();
1904 0 : }
1905 :
1906 2681675 : void BytecodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1907 : DCHECK(expr->scope()->outer_scope() == current_scope());
1908 8045025 : uint8_t flags = CreateClosureFlags::Encode(
1909 : expr->pretenure(), closure_scope()->is_function_scope(),
1910 2681675 : info()->might_always_opt());
1911 2681683 : size_t entry = builder()->AllocateDeferredConstantPoolEntry();
1912 2681703 : builder()->CreateClosure(entry, GetCachedCreateClosureSlot(expr), flags);
1913 5363443 : function_literals_.push_back(std::make_pair(expr, entry));
1914 2681718 : AddToEagerLiteralsIfEager(expr);
1915 2681730 : }
1916 :
1917 2926779 : void BytecodeGenerator::AddToEagerLiteralsIfEager(FunctionLiteral* literal) {
1918 2926779 : if (eager_inner_literals_ && literal->ShouldEagerCompile()) {
1919 : DCHECK(!IsInEagerLiterals(literal, *eager_inner_literals_));
1920 518877 : eager_inner_literals_->push_back(literal);
1921 : }
1922 2926785 : }
1923 :
1924 : bool BytecodeGenerator::ShouldOptimizeAsOneShot() const {
1925 10575319 : if (!FLAG_enable_one_shot_optimization) return false;
1926 :
1927 10558704 : if (loop_depth_ > 0) return false;
1928 :
1929 18422140 : return info()->literal()->is_toplevel() ||
1930 : info()->literal()->is_oneshot_iife();
1931 : }
1932 :
1933 42716 : void BytecodeGenerator::BuildClassLiteral(ClassLiteral* expr, Register name) {
1934 : size_t class_boilerplate_entry =
1935 42716 : builder()->AllocateDeferredConstantPoolEntry();
1936 85431 : class_literals_.push_back(std::make_pair(expr, class_boilerplate_entry));
1937 :
1938 42715 : VisitDeclarations(expr->scope()->declarations());
1939 42716 : Register class_constructor = register_allocator()->NewRegister();
1940 :
1941 : {
1942 : RegisterAllocationScope register_scope(this);
1943 42714 : RegisterList args = register_allocator()->NewGrowableRegisterList();
1944 :
1945 42714 : Register class_boilerplate = register_allocator()->GrowRegisterList(&args);
1946 : Register class_constructor_in_args =
1947 42716 : register_allocator()->GrowRegisterList(&args);
1948 42716 : Register super_class = register_allocator()->GrowRegisterList(&args);
1949 : DCHECK_EQ(ClassBoilerplate::kFirstDynamicArgumentIndex,
1950 : args.register_count());
1951 :
1952 : VisitForAccumulatorValueOrTheHole(expr->extends());
1953 42715 : builder()->StoreAccumulatorInRegister(super_class);
1954 :
1955 42716 : VisitFunctionLiteral(expr->constructor());
1956 : builder()
1957 42716 : ->StoreAccumulatorInRegister(class_constructor)
1958 42717 : .MoveRegister(class_constructor, class_constructor_in_args)
1959 42717 : .LoadConstantPoolEntry(class_boilerplate_entry)
1960 42716 : .StoreAccumulatorInRegister(class_boilerplate);
1961 :
1962 : // Create computed names and method values nodes to store into the literal.
1963 681305 : for (int i = 0; i < expr->properties()->length(); i++) {
1964 319295 : ClassLiteral::Property* property = expr->properties()->at(i);
1965 319295 : if (property->is_computed_name()) {
1966 6039 : Register key = register_allocator()->GrowRegisterList(&args);
1967 :
1968 : builder()->SetExpressionAsStatementPosition(property->key());
1969 6039 : BuildLoadPropertyKey(property, key);
1970 6039 : if (property->is_static()) {
1971 : // The static prototype property is read only. We handle the non
1972 : // computed property name case in the parser. Since this is the only
1973 : // case where we need to check for an own read only property we
1974 : // special case this so we do not need to do this for every property.
1975 :
1976 : FeedbackSlot slot = GetDummyCompareICSlot();
1977 : BytecodeLabel done;
1978 : builder()
1979 1674 : ->LoadLiteral(ast_string_constants()->prototype_string())
1980 : .CompareOperation(Token::Value::EQ_STRICT, key,
1981 1674 : feedback_index(slot))
1982 1674 : .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &done)
1983 1674 : .CallRuntime(Runtime::kThrowStaticPrototypeError)
1984 1674 : .Bind(&done);
1985 : }
1986 :
1987 6039 : if (property->kind() == ClassLiteral::Property::FIELD) {
1988 : DCHECK(!property->is_private());
1989 : // Initialize field's name variable with the computed name.
1990 : DCHECK_NOT_NULL(property->computed_name_var());
1991 407 : builder()->LoadAccumulatorWithRegister(key);
1992 : BuildVariableAssignment(property->computed_name_var(), Token::INIT,
1993 407 : HoleCheckMode::kElided);
1994 : }
1995 : }
1996 :
1997 319295 : if (property->kind() == ClassLiteral::Property::FIELD) {
1998 1099 : if (property->is_private()) {
1999 : RegisterAllocationScope private_name_register_scope(this);
2000 692 : Register private_name = register_allocator()->NewRegister();
2001 : VisitForRegisterValue(property->key(), private_name);
2002 : builder()
2003 1384 : ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
2004 692 : .StoreAccumulatorInRegister(private_name)
2005 692 : .CallRuntime(Runtime::kCreatePrivateNameSymbol, private_name);
2006 : DCHECK_NOT_NULL(property->private_name_var());
2007 : BuildVariableAssignment(property->private_name_var(), Token::INIT,
2008 692 : HoleCheckMode::kElided);
2009 : }
2010 : // We don't compute field's value here, but instead do it in the
2011 : // initializer function.
2012 1099 : continue;
2013 : }
2014 :
2015 318196 : Register value = register_allocator()->GrowRegisterList(&args);
2016 : VisitForRegisterValue(property->value(), value);
2017 : }
2018 :
2019 42715 : builder()->CallRuntime(Runtime::kDefineClass, args);
2020 : }
2021 42716 : Register prototype = register_allocator()->NewRegister();
2022 42716 : builder()->StoreAccumulatorInRegister(prototype);
2023 :
2024 : // Assign to class variable.
2025 42717 : if (expr->class_variable() != nullptr) {
2026 : DCHECK(expr->class_variable()->IsStackLocal() ||
2027 : expr->class_variable()->IsContextSlot());
2028 38424 : builder()->LoadAccumulatorWithRegister(class_constructor);
2029 : BuildVariableAssignment(expr->class_variable(), Token::INIT,
2030 38424 : HoleCheckMode::kElided);
2031 : }
2032 :
2033 42715 : if (expr->instance_members_initializer_function() != nullptr) {
2034 : Register initializer =
2035 1012 : VisitForRegisterValue(expr->instance_members_initializer_function());
2036 :
2037 1012 : if (FunctionLiteral::NeedsHomeObject(
2038 : expr->instance_members_initializer_function())) {
2039 : FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
2040 24 : builder()->LoadAccumulatorWithRegister(prototype).StoreHomeObjectProperty(
2041 24 : initializer, feedback_index(slot), language_mode());
2042 : }
2043 :
2044 : FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
2045 : builder()
2046 1012 : ->LoadAccumulatorWithRegister(initializer)
2047 1012 : .StoreClassFieldsInitializer(class_constructor, feedback_index(slot))
2048 1012 : .LoadAccumulatorWithRegister(class_constructor);
2049 : }
2050 :
2051 42715 : if (expr->static_fields_initializer() != nullptr) {
2052 : // TODO(gsathya): This can be optimized away to be a part of the
2053 : // class boilerplate in the future. The name argument can be
2054 : // passed to the DefineClass runtime function and have it set
2055 : // there.
2056 517 : if (name.is_valid()) {
2057 6 : Register key = register_allocator()->NewRegister();
2058 : builder()
2059 6 : ->LoadLiteral(ast_string_constants()->name_string())
2060 6 : .StoreAccumulatorInRegister(key);
2061 :
2062 : DataPropertyInLiteralFlags data_property_flags =
2063 : DataPropertyInLiteralFlag::kNoFlags;
2064 : FeedbackSlot slot =
2065 : feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
2066 6 : builder()->LoadAccumulatorWithRegister(name).StoreDataPropertyInLiteral(
2067 6 : class_constructor, key, data_property_flags, feedback_index(slot));
2068 : }
2069 :
2070 517 : RegisterList args = register_allocator()->NewRegisterList(1);
2071 : Register initializer =
2072 517 : VisitForRegisterValue(expr->static_fields_initializer());
2073 :
2074 517 : if (FunctionLiteral::NeedsHomeObject(expr->static_fields_initializer())) {
2075 : FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
2076 : builder()
2077 23 : ->LoadAccumulatorWithRegister(class_constructor)
2078 : .StoreHomeObjectProperty(initializer, feedback_index(slot),
2079 23 : language_mode());
2080 : }
2081 :
2082 : builder()
2083 517 : ->MoveRegister(class_constructor, args[0])
2084 : .CallProperty(initializer, args,
2085 517 : feedback_index(feedback_spec()->AddCallICSlot()));
2086 : }
2087 42715 : builder()->LoadAccumulatorWithRegister(class_constructor);
2088 42716 : }
2089 :
2090 42709 : void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr) {
2091 42709 : VisitClassLiteral(expr, Register::invalid_value());
2092 42711 : }
2093 :
2094 42715 : void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr, Register name) {
2095 : CurrentScope current_scope(this, expr->scope());
2096 : DCHECK_NOT_NULL(expr->scope());
2097 42715 : if (expr->scope()->NeedsContext()) {
2098 35019 : BuildNewLocalBlockContext(expr->scope());
2099 70040 : ContextScope scope(this, expr->scope());
2100 35020 : BuildClassLiteral(expr, name);
2101 : } else {
2102 7696 : BuildClassLiteral(expr, name);
2103 : }
2104 42717 : }
2105 :
2106 1529 : void BytecodeGenerator::VisitInitializeClassMembersStatement(
2107 : InitializeClassMembersStatement* stmt) {
2108 1529 : RegisterList args = register_allocator()->NewRegisterList(3);
2109 3058 : Register constructor = args[0], key = args[1], value = args[2];
2110 1529 : builder()->MoveRegister(builder()->Receiver(), constructor);
2111 :
2112 6151 : for (int i = 0; i < stmt->fields()->length(); i++) {
2113 2311 : ClassLiteral::Property* property = stmt->fields()->at(i);
2114 :
2115 2311 : if (property->is_computed_name()) {
2116 : DCHECK_EQ(property->kind(), ClassLiteral::Property::FIELD);
2117 : DCHECK(!property->is_private());
2118 : Variable* var = property->computed_name_var();
2119 : DCHECK_NOT_NULL(var);
2120 : // The computed name is already evaluated and stored in a
2121 : // variable at class definition time.
2122 407 : BuildVariableLoad(var, HoleCheckMode::kElided);
2123 407 : builder()->StoreAccumulatorInRegister(key);
2124 1904 : } else if (property->kind() == ClassLiteral::Property::FIELD &&
2125 : property->is_private()) {
2126 : Variable* private_name_var = property->private_name_var();
2127 : DCHECK_NOT_NULL(private_name_var);
2128 692 : BuildVariableLoad(private_name_var, HoleCheckMode::kElided);
2129 692 : builder()->StoreAccumulatorInRegister(key);
2130 : } else {
2131 1212 : BuildLoadPropertyKey(property, key);
2132 : }
2133 :
2134 : builder()->SetExpressionAsStatementPosition(property->value());
2135 : VisitForRegisterValue(property->value(), value);
2136 2311 : VisitSetHomeObject(value, constructor, property);
2137 :
2138 : Runtime::FunctionId function_id =
2139 2311 : property->kind() == ClassLiteral::Property::FIELD &&
2140 : !property->is_private()
2141 : ? Runtime::kCreateDataProperty
2142 2311 : : Runtime::kAddPrivateField;
2143 2311 : builder()->CallRuntime(function_id, args);
2144 : }
2145 1529 : }
2146 :
2147 921 : void BytecodeGenerator::BuildInstanceMemberInitialization(Register constructor,
2148 : Register instance) {
2149 921 : RegisterList args = register_allocator()->NewRegisterList(1);
2150 921 : Register initializer = register_allocator()->NewRegister();
2151 :
2152 : FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
2153 : BytecodeLabel done;
2154 :
2155 : builder()
2156 921 : ->LoadClassFieldsInitializer(constructor, feedback_index(slot))
2157 : // TODO(gsathya): This jump can be elided for the base
2158 : // constructor and derived constructor. This is only required
2159 : // when called from an arrow function.
2160 921 : .JumpIfUndefined(&done)
2161 921 : .StoreAccumulatorInRegister(initializer)
2162 921 : .MoveRegister(instance, args[0])
2163 : .CallProperty(initializer, args,
2164 921 : feedback_index(feedback_spec()->AddCallICSlot()))
2165 921 : .Bind(&done);
2166 921 : }
2167 :
2168 1814 : void BytecodeGenerator::VisitNativeFunctionLiteral(
2169 : NativeFunctionLiteral* expr) {
2170 1814 : size_t entry = builder()->AllocateDeferredConstantPoolEntry();
2171 : int index = feedback_spec()->AddFeedbackCellForCreateClosure();
2172 1814 : uint8_t flags = CreateClosureFlags::Encode(false, false, false);
2173 1814 : builder()->CreateClosure(entry, index, flags);
2174 3628 : native_function_literals_.push_back(std::make_pair(expr, entry));
2175 1814 : }
2176 :
2177 0 : void BytecodeGenerator::VisitDoExpression(DoExpression* expr) {
2178 0 : VisitBlock(expr->block());
2179 0 : VisitVariableProxy(expr->result());
2180 0 : }
2181 :
2182 29272 : void BytecodeGenerator::VisitConditional(Conditional* expr) {
2183 : ConditionalControlFlowBuilder conditional_builder(
2184 87816 : builder(), block_coverage_builder_, expr);
2185 :
2186 29272 : if (expr->condition()->ToBooleanIsTrue()) {
2187 : // Generate then block unconditionally as always true.
2188 234 : conditional_builder.Then();
2189 234 : VisitForAccumulatorValue(expr->then_expression());
2190 29038 : } else if (expr->condition()->ToBooleanIsFalse()) {
2191 : // Generate else block unconditionally if it exists.
2192 112 : conditional_builder.Else();
2193 112 : VisitForAccumulatorValue(expr->else_expression());
2194 : } else {
2195 : VisitForTest(expr->condition(), conditional_builder.then_labels(),
2196 28926 : conditional_builder.else_labels(), TestFallthrough::kThen);
2197 :
2198 28926 : conditional_builder.Then();
2199 28926 : VisitForAccumulatorValue(expr->then_expression());
2200 28926 : conditional_builder.JumpToEnd();
2201 :
2202 28926 : conditional_builder.Else();
2203 28926 : VisitForAccumulatorValue(expr->else_expression());
2204 : }
2205 29272 : }
2206 :
2207 10279233 : void BytecodeGenerator::VisitLiteral(Literal* expr) {
2208 10279233 : if (execution_result()->IsEffect()) return;
2209 9154226 : switch (expr->type()) {
2210 : case Literal::kSmi:
2211 6716918 : builder()->LoadLiteral(expr->AsSmiLiteral());
2212 6716927 : break;
2213 : case Literal::kHeapNumber:
2214 605738 : builder()->LoadLiteral(expr->AsNumber());
2215 302869 : break;
2216 : case Literal::kUndefined:
2217 113325 : builder()->LoadUndefined();
2218 113326 : break;
2219 : case Literal::kBoolean:
2220 648811 : builder()->LoadBoolean(expr->ToBooleanIsTrue());
2221 : execution_result()->SetResultIsBoolean();
2222 : break;
2223 : case Literal::kNull:
2224 22170 : builder()->LoadNull();
2225 22169 : break;
2226 : case Literal::kTheHole:
2227 0 : builder()->LoadTheHole();
2228 0 : break;
2229 : case Literal::kString:
2230 1663531 : builder()->LoadLiteral(expr->AsRawString());
2231 : execution_result()->SetResultIsString();
2232 : break;
2233 : case Literal::kSymbol:
2234 1981 : builder()->LoadLiteral(expr->AsSymbol());
2235 1981 : break;
2236 : case Literal::kBigInt:
2237 9038 : builder()->LoadLiteral(expr->AsBigInt());
2238 9037 : break;
2239 : }
2240 : }
2241 :
2242 45009 : void BytecodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
2243 : // Materialize a regular expression literal.
2244 : builder()->CreateRegExpLiteral(
2245 : expr->raw_pattern(), feedback_index(feedback_spec()->AddLiteralSlot()),
2246 45010 : expr->flags());
2247 45010 : }
2248 :
2249 201536 : void BytecodeGenerator::BuildCreateObjectLiteral(Register literal,
2250 : uint8_t flags, size_t entry) {
2251 201536 : if (ShouldOptimizeAsOneShot()) {
2252 113899 : RegisterList args = register_allocator()->NewRegisterList(2);
2253 : builder()
2254 113906 : ->LoadConstantPoolEntry(entry)
2255 113911 : .StoreAccumulatorInRegister(args[0])
2256 113912 : .LoadLiteral(Smi::FromInt(flags))
2257 113909 : .StoreAccumulatorInRegister(args[1])
2258 113911 : .CallRuntime(Runtime::kCreateObjectLiteralWithoutAllocationSite, args)
2259 113912 : .StoreAccumulatorInRegister(literal);
2260 :
2261 : } else {
2262 : // TODO(cbruni): Directly generate runtime call for literals we cannot
2263 : // optimize once the CreateShallowObjectLiteral stub is in sync with the TF
2264 : // optimizations.
2265 : int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2266 : builder()
2267 175276 : ->CreateObjectLiteral(entry, literal_index, flags)
2268 87639 : .StoreAccumulatorInRegister(literal);
2269 : }
2270 201552 : }
2271 :
2272 229754 : void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
2273 229754 : expr->InitDepthAndFlags();
2274 :
2275 : // Fast path for the empty object literal which doesn't need an
2276 : // AllocationSite.
2277 229774 : if (expr->IsEmptyObjectLiteral()) {
2278 : DCHECK(expr->IsFastCloningSupported());
2279 27951 : builder()->CreateEmptyObjectLiteral();
2280 27952 : return;
2281 : }
2282 :
2283 : // Deep-copy the literal boilerplate.
2284 201822 : uint8_t flags = CreateObjectLiteralFlags::Encode(
2285 403628 : expr->ComputeFlags(), expr->IsFastCloningSupported());
2286 :
2287 201804 : Register literal = register_allocator()->NewRegister();
2288 :
2289 : // Create literal object.
2290 : int property_index = 0;
2291 : bool clone_object_spread =
2292 201801 : expr->properties()->first()->kind() == ObjectLiteral::Property::SPREAD;
2293 201801 : if (clone_object_spread) {
2294 : // Avoid the slow path for spreads in the following common cases:
2295 : // 1) `let obj = { ...source }`
2296 : // 2) `let obj = { ...source, override: 1 }`
2297 : // 3) `let obj = { ...source, ...overrides }`
2298 : RegisterAllocationScope register_scope(this);
2299 : Expression* property = expr->properties()->first()->value();
2300 279 : Register from_value = VisitForRegisterValue(property);
2301 :
2302 : BytecodeLabels clone_object(zone());
2303 558 : builder()->JumpIfUndefined(clone_object.New());
2304 279 : builder()->JumpIfNull(clone_object.New());
2305 279 : builder()->ToObject(from_value);
2306 :
2307 279 : clone_object.Bind(builder());
2308 : int clone_index = feedback_index(feedback_spec()->AddCloneObjectSlot());
2309 279 : builder()->CloneObject(from_value, flags, clone_index);
2310 279 : builder()->StoreAccumulatorInRegister(literal);
2311 : property_index++;
2312 : } else {
2313 : size_t entry;
2314 : // If constant properties is an empty fixed array, use a cached empty fixed
2315 : // array to ensure it's only added to the constant pool once.
2316 201522 : if (expr->properties_count() == 0) {
2317 2209 : entry = builder()->EmptyObjectBoilerplateDescriptionConstantPoolEntry();
2318 : } else {
2319 199313 : entry = builder()->AllocateDeferredConstantPoolEntry();
2320 398655 : object_literals_.push_back(std::make_pair(expr, entry));
2321 : }
2322 201541 : BuildCreateObjectLiteral(literal, flags, entry);
2323 : }
2324 :
2325 : // Store computed values into the literal.
2326 : AccessorTable accessor_table(zone());
2327 4232206 : for (; property_index < expr->properties()->length(); property_index++) {
2328 2017104 : ObjectLiteral::Property* property = expr->properties()->at(property_index);
2329 2017104 : if (property->is_computed_name()) break;
2330 2015197 : if (!clone_object_spread && property->IsCompileTimeValue()) continue;
2331 :
2332 : RegisterAllocationScope inner_register_scope(this);
2333 254147 : Literal* key = property->key()->AsLiteral();
2334 254147 : switch (property->kind()) {
2335 : case ObjectLiteral::Property::SPREAD:
2336 0 : UNREACHABLE();
2337 : case ObjectLiteral::Property::CONSTANT:
2338 : case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2339 : DCHECK(clone_object_spread || !property->value()->IsCompileTimeValue());
2340 : V8_FALLTHROUGH;
2341 : case ObjectLiteral::Property::COMPUTED: {
2342 : // It is safe to use [[Put]] here because the boilerplate already
2343 : // contains computed properties with an uninitialized value.
2344 244091 : if (key->IsStringLiteral()) {
2345 : DCHECK(key->IsPropertyName());
2346 243536 : if (property->emit_store()) {
2347 : builder()->SetExpressionPosition(property->value());
2348 243387 : VisitForAccumulatorValue(property->value());
2349 : FeedbackSlot slot = feedback_spec()->AddStoreOwnICSlot();
2350 243391 : if (FunctionLiteral::NeedsHomeObject(property->value())) {
2351 : RegisterAllocationScope register_scope(this);
2352 546 : Register value = register_allocator()->NewRegister();
2353 546 : builder()->StoreAccumulatorInRegister(value);
2354 : builder()->StoreNamedOwnProperty(
2355 546 : literal, key->AsRawPropertyName(), feedback_index(slot));
2356 546 : VisitSetHomeObject(value, literal, property);
2357 : } else {
2358 : builder()->StoreNamedOwnProperty(
2359 242842 : literal, key->AsRawPropertyName(), feedback_index(slot));
2360 : }
2361 : } else {
2362 : builder()->SetExpressionPosition(property->value());
2363 149 : VisitForEffect(property->value());
2364 : }
2365 : } else {
2366 554 : RegisterList args = register_allocator()->NewRegisterList(3);
2367 :
2368 554 : builder()->MoveRegister(literal, args[0]);
2369 : builder()->SetExpressionPosition(property->key());
2370 : VisitForRegisterValue(property->key(), args[1]);
2371 : builder()->SetExpressionPosition(property->value());
2372 : VisitForRegisterValue(property->value(), args[2]);
2373 554 : if (property->emit_store()) {
2374 522 : builder()->CallRuntime(Runtime::kSetKeyedProperty, args);
2375 522 : Register value = args[2];
2376 522 : VisitSetHomeObject(value, literal, property);
2377 : }
2378 : }
2379 : break;
2380 : }
2381 : case ObjectLiteral::Property::PROTOTYPE: {
2382 : // __proto__:null is handled by CreateObjectLiteral.
2383 4096 : if (property->IsNullPrototype()) break;
2384 : DCHECK(property->emit_store());
2385 : DCHECK(!property->NeedsSetFunctionName());
2386 1266 : RegisterList args = register_allocator()->NewRegisterList(2);
2387 1266 : builder()->MoveRegister(literal, args[0]);
2388 : builder()->SetExpressionPosition(property->value());
2389 : VisitForRegisterValue(property->value(), args[1]);
2390 1266 : builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2391 1266 : break;
2392 : }
2393 : case ObjectLiteral::Property::GETTER:
2394 3963 : if (property->emit_store()) {
2395 3861 : accessor_table.lookup(key)->second->getter = property;
2396 : }
2397 : break;
2398 : case ObjectLiteral::Property::SETTER:
2399 1996 : if (property->emit_store()) {
2400 1900 : accessor_table.lookup(key)->second->setter = property;
2401 : }
2402 : break;
2403 : }
2404 : }
2405 :
2406 : // Define accessors, using only a single call to the runtime for each pair of
2407 : // corresponding getters and setters.
2408 206839 : for (AccessorTable::Iterator it = accessor_table.begin();
2409 : it != accessor_table.end(); ++it) {
2410 : RegisterAllocationScope inner_register_scope(this);
2411 5039 : RegisterList args = register_allocator()->NewRegisterList(5);
2412 5039 : builder()->MoveRegister(literal, args[0]);
2413 5039 : VisitForRegisterValue(it->first, args[1]);
2414 5039 : VisitObjectLiteralAccessor(literal, it->second->getter, args[2]);
2415 5039 : VisitObjectLiteralAccessor(literal, it->second->setter, args[3]);
2416 : builder()
2417 5039 : ->LoadLiteral(Smi::FromInt(NONE))
2418 5039 : .StoreAccumulatorInRegister(args[4])
2419 5039 : .CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, args);
2420 : }
2421 :
2422 : // Object literals have two parts. The "static" part on the left contains no
2423 : // computed property names, and so we can compute its map ahead of time; see
2424 : // Runtime_CreateObjectLiteralBoilerplate. The second "dynamic" part starts
2425 : // with the first computed property name and continues with all properties to
2426 : // its right. All the code from above initializes the static component of the
2427 : // object literal, and arranges for the map of the result to reflect the
2428 : // static order in which the keys appear. For the dynamic properties, we
2429 : // compile them into a series of "SetOwnProperty" runtime calls. This will
2430 : // preserve insertion order.
2431 207598 : for (; property_index < expr->properties()->length(); property_index++) {
2432 2899 : ObjectLiteral::Property* property = expr->properties()->at(property_index);
2433 : RegisterAllocationScope inner_register_scope(this);
2434 :
2435 2899 : if (property->IsPrototype()) {
2436 : // __proto__:null is handled by CreateObjectLiteral.
2437 35 : if (property->IsNullPrototype()) continue;
2438 : DCHECK(property->emit_store());
2439 : DCHECK(!property->NeedsSetFunctionName());
2440 30 : RegisterList args = register_allocator()->NewRegisterList(2);
2441 30 : builder()->MoveRegister(literal, args[0]);
2442 : builder()->SetExpressionPosition(property->value());
2443 : VisitForRegisterValue(property->value(), args[1]);
2444 30 : builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2445 30 : continue;
2446 : }
2447 :
2448 2864 : switch (property->kind()) {
2449 : case ObjectLiteral::Property::CONSTANT:
2450 : case ObjectLiteral::Property::COMPUTED:
2451 : case ObjectLiteral::Property::MATERIALIZED_LITERAL: {
2452 2349 : Register key = register_allocator()->NewRegister();
2453 2349 : BuildLoadPropertyKey(property, key);
2454 : builder()->SetExpressionPosition(property->value());
2455 : Register value;
2456 :
2457 : // Static class fields require the name property to be set on
2458 : // the class, meaning we can't wait until the
2459 : // StoreDataPropertyInLiteral call later to set the name.
2460 2361 : if (property->value()->IsClassLiteral() &&
2461 12 : property->value()->AsClassLiteral()->static_fields_initializer() !=
2462 : nullptr) {
2463 6 : value = register_allocator()->NewRegister();
2464 12 : VisitClassLiteral(property->value()->AsClassLiteral(), key);
2465 6 : builder()->StoreAccumulatorInRegister(value);
2466 : } else {
2467 2343 : value = VisitForRegisterValue(property->value());
2468 : }
2469 2349 : VisitSetHomeObject(value, literal, property);
2470 :
2471 : DataPropertyInLiteralFlags data_property_flags =
2472 : DataPropertyInLiteralFlag::kNoFlags;
2473 2349 : if (property->NeedsSetFunctionName()) {
2474 : data_property_flags |= DataPropertyInLiteralFlag::kSetFunctionName;
2475 : }
2476 :
2477 : FeedbackSlot slot =
2478 : feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
2479 : builder()
2480 2349 : ->LoadAccumulatorWithRegister(value)
2481 : .StoreDataPropertyInLiteral(literal, key, data_property_flags,
2482 2349 : feedback_index(slot));
2483 : break;
2484 : }
2485 : case ObjectLiteral::Property::GETTER:
2486 : case ObjectLiteral::Property::SETTER: {
2487 419 : RegisterList args = register_allocator()->NewRegisterList(4);
2488 419 : builder()->MoveRegister(literal, args[0]);
2489 419 : BuildLoadPropertyKey(property, args[1]);
2490 : builder()->SetExpressionPosition(property->value());
2491 : VisitForRegisterValue(property->value(), args[2]);
2492 419 : VisitSetHomeObject(args[2], literal, property);
2493 : builder()
2494 419 : ->LoadLiteral(Smi::FromInt(NONE))
2495 419 : .StoreAccumulatorInRegister(args[3]);
2496 : Runtime::FunctionId function_id =
2497 : property->kind() == ObjectLiteral::Property::GETTER
2498 : ? Runtime::kDefineGetterPropertyUnchecked
2499 419 : : Runtime::kDefineSetterPropertyUnchecked;
2500 419 : builder()->CallRuntime(function_id, args);
2501 : break;
2502 : }
2503 : case ObjectLiteral::Property::SPREAD: {
2504 96 : RegisterList args = register_allocator()->NewRegisterList(2);
2505 96 : builder()->MoveRegister(literal, args[0]);
2506 : builder()->SetExpressionPosition(property->value());
2507 : VisitForRegisterValue(property->value(), args[1]);
2508 96 : builder()->CallRuntime(Runtime::kCopyDataProperties, args);
2509 : break;
2510 : }
2511 : case ObjectLiteral::Property::PROTOTYPE:
2512 0 : UNREACHABLE(); // Handled specially above.
2513 : break;
2514 : }
2515 : }
2516 :
2517 201800 : builder()->LoadAccumulatorWithRegister(literal);
2518 : }
2519 :
2520 : // Fill an array with values from an iterator, starting at a given index. It is
2521 : // guaranteed that the loop will only terminate if the iterator is exhausted, or
2522 : // if one of iterator.next(), value.done, or value.value fail.
2523 : //
2524 : // In pseudocode:
2525 : //
2526 : // loop {
2527 : // value = iterator.next()
2528 : // if (value.done) break;
2529 : // value = value.value
2530 : // array[index++] = value
2531 : // }
2532 2051 : void BytecodeGenerator::BuildFillArrayWithIterator(
2533 : IteratorRecord iterator, Register array, Register index, Register value,
2534 : FeedbackSlot next_value_slot, FeedbackSlot next_done_slot,
2535 : FeedbackSlot index_slot, FeedbackSlot element_slot) {
2536 : DCHECK(array.is_valid());
2537 : DCHECK(index.is_valid());
2538 : DCHECK(value.is_valid());
2539 :
2540 2051 : LoopBuilder loop_builder(builder(), nullptr, nullptr);
2541 2051 : loop_builder.LoopHeader();
2542 :
2543 : // Call the iterator's .next() method. Break from the loop if the `done`
2544 : // property is truthy, otherwise load the value from the iterator result and
2545 : // append the argument.
2546 2051 : BuildIteratorNext(iterator, value);
2547 : builder()->LoadNamedProperty(
2548 : value, ast_string_constants()->done_string(),
2549 2051 : feedback_index(feedback_spec()->AddLoadICSlot()));
2550 : loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
2551 :
2552 2051 : loop_builder.LoopBody();
2553 : builder()
2554 : // value = value.value
2555 : ->LoadNamedProperty(value, ast_string_constants()->value_string(),
2556 2051 : feedback_index(next_value_slot))
2557 : // array[index] = value
2558 2051 : .StoreInArrayLiteral(array, index, feedback_index(element_slot))
2559 : // index++
2560 2051 : .LoadAccumulatorWithRegister(index)
2561 2051 : .UnaryOperation(Token::INC, feedback_index(index_slot))
2562 2051 : .StoreAccumulatorInRegister(index);
2563 2051 : loop_builder.BindContinueTarget();
2564 2051 : loop_builder.JumpToHeader(loop_depth_);
2565 2051 : }
2566 :
2567 379152 : void BytecodeGenerator::BuildCreateArrayLiteral(
2568 : const ZonePtrList<Expression>* elements, ArrayLiteral* expr) {
2569 : RegisterAllocationScope register_scope(this);
2570 379152 : Register index = register_allocator()->NewRegister();
2571 379160 : Register array = register_allocator()->NewRegister();
2572 : SharedFeedbackSlot element_slot(feedback_spec(),
2573 : FeedbackSlotKind::kStoreInArrayLiteral);
2574 : ZonePtrList<Expression>::iterator current = elements->begin();
2575 : ZonePtrList<Expression>::iterator end = elements->end();
2576 : bool is_empty = elements->is_empty();
2577 :
2578 559005 : if (!is_empty && (*current)->IsSpread()) {
2579 : // If we have a leading spread, use CreateArrayFromIterable to create
2580 : // an array from it and then add the remaining components to that array.
2581 2049 : VisitForAccumulatorValue(*current);
2582 2049 : builder()->CreateArrayFromIterable().StoreAccumulatorInRegister(array);
2583 :
2584 2049 : if (++current != end) {
2585 : // If there are remaning elements, prepare the index register that is
2586 : // used for adding those elements. The next index is the length of the
2587 : // newly created array.
2588 : auto length = ast_string_constants()->length_string();
2589 : int length_load_slot = feedback_index(feedback_spec()->AddLoadICSlot());
2590 : builder()
2591 542 : ->LoadNamedProperty(array, length, length_load_slot)
2592 542 : .StoreAccumulatorInRegister(index);
2593 : }
2594 377109 : } else if (expr != nullptr) {
2595 : // There are some elements before the first (if any) spread, and we can
2596 : // use a boilerplate when creating the initial array from those elements.
2597 :
2598 : // First, allocate a constant pool entry for the boilerplate that will
2599 : // be created during finalization, and will contain all the constant
2600 : // elements before the first spread. This also handle the empty array case
2601 : // and one-shot optimization.
2602 377087 : uint8_t flags = CreateArrayLiteralFlags::Encode(
2603 754176 : expr->IsFastCloningSupported(), expr->ComputeFlags());
2604 : bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
2605 : size_t entry;
2606 377086 : if (is_empty && optimize_as_one_shot) {
2607 58766 : entry = builder()->EmptyArrayBoilerplateDescriptionConstantPoolEntry();
2608 318320 : } else if (!is_empty) {
2609 177776 : entry = builder()->AllocateDeferredConstantPoolEntry();
2610 355559 : array_literals_.push_back(std::make_pair(expr, entry));
2611 : }
2612 :
2613 377091 : if (optimize_as_one_shot) {
2614 205486 : RegisterList args = register_allocator()->NewRegisterList(2);
2615 : builder()
2616 205486 : ->LoadConstantPoolEntry(entry)
2617 205491 : .StoreAccumulatorInRegister(args[0])
2618 205492 : .LoadLiteral(Smi::FromInt(flags))
2619 205490 : .StoreAccumulatorInRegister(args[1])
2620 205493 : .CallRuntime(Runtime::kCreateArrayLiteralWithoutAllocationSite, args);
2621 171605 : } else if (is_empty) {
2622 : // Empty array literal fast-path.
2623 : int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2624 : DCHECK(expr->IsFastCloningSupported());
2625 140547 : builder()->CreateEmptyArrayLiteral(literal_index);
2626 : } else {
2627 : // Create array literal from boilerplate.
2628 : int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2629 62116 : builder()->CreateArrayLiteral(entry, literal_index, flags);
2630 : }
2631 377097 : builder()->StoreAccumulatorInRegister(array);
2632 :
2633 : // Insert the missing non-constant elements, up until the first spread
2634 : // index, into the initial array (the remaining elements will be inserted
2635 : // below).
2636 : DCHECK_EQ(current, elements->begin());
2637 : ZonePtrList<Expression>::iterator first_spread_or_end =
2638 431 : expr->first_spread_index() >= 0 ? current + expr->first_spread_index()
2639 377521 : : end;
2640 : int array_index = 0;
2641 12973332 : for (; current != first_spread_or_end; ++current, array_index++) {
2642 6298123 : Expression* subexpr = *current;
2643 : DCHECK(!subexpr->IsSpread());
2644 : // Skip the constants.
2645 6298123 : if (subexpr->IsCompileTimeValue()) continue;
2646 :
2647 : builder()
2648 248030 : ->LoadLiteral(Smi::FromInt(array_index))
2649 248034 : .StoreAccumulatorInRegister(index);
2650 248029 : VisitForAccumulatorValue(subexpr);
2651 : builder()->StoreInArrayLiteral(array, index,
2652 248036 : feedback_index(element_slot.Get()));
2653 : }
2654 :
2655 377088 : if (current != end) {
2656 : // If there are remaining elements, prepare the index register
2657 : // to store the next element, which comes from the first spread.
2658 431 : builder()->LoadLiteral(array_index).StoreAccumulatorInRegister(index);
2659 : }
2660 : } else {
2661 : // In other cases, we prepare an empty array to be filled in below.
2662 : DCHECK(!elements->is_empty());
2663 : int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2664 : builder()
2665 20 : ->CreateEmptyArrayLiteral(literal_index)
2666 20 : .StoreAccumulatorInRegister(array);
2667 : // Prepare the index for the first element.
2668 20 : builder()->LoadLiteral(Smi::FromInt(0)).StoreAccumulatorInRegister(index);
2669 : }
2670 :
2671 : // Now build insertions for the remaining elements from current to end.
2672 : SharedFeedbackSlot index_slot(feedback_spec(), FeedbackSlotKind::kBinaryOp);
2673 : SharedFeedbackSlot length_slot(
2674 : feedback_spec(), feedback_spec()->GetStoreICSlot(LanguageMode::kStrict));
2675 385268 : for (; current != end; ++current) {
2676 3056 : Expression* subexpr = *current;
2677 3056 : if (subexpr->IsSpread()) {
2678 : RegisterAllocationScope scope(this);
2679 1468 : builder()->SetExpressionAsStatementPosition(
2680 : subexpr->AsSpread()->expression());
2681 1468 : VisitForAccumulatorValue(subexpr->AsSpread()->expression());
2682 1468 : IteratorRecord iterator = BuildGetIteratorRecord(IteratorType::kNormal);
2683 :
2684 1468 : Register value = register_allocator()->NewRegister();
2685 1468 : FeedbackSlot next_value_load_slot = feedback_spec()->AddLoadICSlot();
2686 1468 : FeedbackSlot next_done_load_slot = feedback_spec()->AddLoadICSlot();
2687 1468 : FeedbackSlot real_index_slot = index_slot.Get();
2688 1468 : FeedbackSlot real_element_slot = element_slot.Get();
2689 : BuildFillArrayWithIterator(iterator, array, index, value,
2690 : next_value_load_slot, next_done_load_slot,
2691 1468 : real_index_slot, real_element_slot);
2692 1588 : } else if (!subexpr->IsTheHoleLiteral()) {
2693 : // literal[index++] = subexpr
2694 1532 : VisitForAccumulatorValue(subexpr);
2695 : builder()
2696 : ->StoreInArrayLiteral(array, index,
2697 1532 : feedback_index(element_slot.Get()))
2698 1532 : .LoadAccumulatorWithRegister(index);
2699 : // Only increase the index if we are not the last element.
2700 1532 : if (current + 1 != end) {
2701 : builder()
2702 976 : ->UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
2703 976 : .StoreAccumulatorInRegister(index);
2704 : }
2705 : } else {
2706 : // literal.length = ++index
2707 : // length_slot is only used when there are holes.
2708 : auto length = ast_string_constants()->length_string();
2709 : builder()
2710 56 : ->LoadAccumulatorWithRegister(index)
2711 56 : .UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
2712 56 : .StoreAccumulatorInRegister(index)
2713 : .StoreNamedProperty(array, length, feedback_index(length_slot.Get()),
2714 56 : LanguageMode::kStrict);
2715 : }
2716 : }
2717 :
2718 379156 : builder()->LoadAccumulatorWithRegister(array);
2719 379161 : }
2720 :
2721 379122 : void BytecodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
2722 379122 : expr->InitDepthAndFlags();
2723 379125 : BuildCreateArrayLiteral(expr->values(), expr);
2724 379131 : }
2725 :
2726 0 : void BytecodeGenerator::VisitStoreInArrayLiteral(StoreInArrayLiteral* expr) {
2727 : builder()->SetExpressionAsStatementPosition(expr);
2728 : RegisterAllocationScope register_scope(this);
2729 0 : Register array = register_allocator()->NewRegister();
2730 0 : Register index = register_allocator()->NewRegister();
2731 : VisitForRegisterValue(expr->array(), array);
2732 : VisitForRegisterValue(expr->index(), index);
2733 0 : VisitForAccumulatorValue(expr->value());
2734 : builder()->StoreInArrayLiteral(
2735 : array, index,
2736 0 : feedback_index(feedback_spec()->AddStoreInArrayLiteralICSlot()));
2737 0 : }
2738 :
2739 6802559 : void BytecodeGenerator::VisitVariableProxy(VariableProxy* proxy) {
2740 : builder()->SetExpressionPosition(proxy);
2741 6802559 : BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
2742 6802613 : }
2743 :
2744 13580748 : void BytecodeGenerator::BuildVariableLoad(Variable* variable,
2745 : HoleCheckMode hole_check_mode,
2746 : TypeofMode typeof_mode) {
2747 13580748 : switch (variable->location()) {
2748 : case VariableLocation::LOCAL: {
2749 2438061 : Register source(builder()->Local(variable->index()));
2750 : // We need to load the variable into the accumulator, even when in a
2751 : // VisitForRegisterScope, in order to avoid register aliasing if
2752 : // subsequent expressions assign to the same variable.
2753 2438071 : builder()->LoadAccumulatorWithRegister(source);
2754 2438087 : if (hole_check_mode == HoleCheckMode::kRequired) {
2755 2630 : BuildThrowIfHole(variable);
2756 : }
2757 : break;
2758 : }
2759 : case VariableLocation::PARAMETER: {
2760 : Register source;
2761 3835958 : if (variable->IsReceiver()) {
2762 2436930 : source = builder()->Receiver();
2763 : } else {
2764 1399028 : source = builder()->Parameter(variable->index());
2765 : }
2766 : // We need to load the variable into the accumulator, even when in a
2767 : // VisitForRegisterScope, in order to avoid register aliasing if
2768 : // subsequent expressions assign to the same variable.
2769 3835956 : builder()->LoadAccumulatorWithRegister(source);
2770 3835957 : if (hole_check_mode == HoleCheckMode::kRequired) {
2771 2196475 : BuildThrowIfHole(variable);
2772 : }
2773 : break;
2774 : }
2775 : case VariableLocation::UNALLOCATED: {
2776 : // The global identifier "undefined" is immutable. Everything
2777 : // else could be reassigned. For performance, we do a pointer comparison
2778 : // rather than checking if the raw_name is really "undefined".
2779 5885120 : if (variable->raw_name() == ast_string_constants()->undefined_string()) {
2780 80084 : builder()->LoadUndefined();
2781 : } else {
2782 5805036 : FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
2783 : builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
2784 5805123 : typeof_mode);
2785 : }
2786 : break;
2787 : }
2788 : case VariableLocation::CONTEXT: {
2789 : int depth = execution_context()->ContextChainDepth(variable->scope());
2790 : ContextScope* context = execution_context()->Previous(depth);
2791 : Register context_reg;
2792 1036550 : if (context) {
2793 958160 : context_reg = context->reg();
2794 : depth = 0;
2795 : } else {
2796 78390 : context_reg = execution_context()->reg();
2797 : }
2798 :
2799 : BytecodeArrayBuilder::ContextSlotMutability immutable =
2800 : (variable->maybe_assigned() == kNotAssigned)
2801 : ? BytecodeArrayBuilder::kImmutableSlot
2802 1036550 : : BytecodeArrayBuilder::kMutableSlot;
2803 :
2804 : builder()->LoadContextSlot(context_reg, variable->index(), depth,
2805 1036550 : immutable);
2806 1036548 : if (hole_check_mode == HoleCheckMode::kRequired) {
2807 269477 : BuildThrowIfHole(variable);
2808 : }
2809 : break;
2810 : }
2811 : case VariableLocation::LOOKUP: {
2812 383822 : switch (variable->mode()) {
2813 : case VariableMode::kDynamicLocal: {
2814 : Variable* local_variable = variable->local_if_not_shadowed();
2815 : int depth =
2816 : execution_context()->ContextChainDepth(local_variable->scope());
2817 : builder()->LoadLookupContextSlot(variable->raw_name(), typeof_mode,
2818 3375 : local_variable->index(), depth);
2819 3375 : if (hole_check_mode == HoleCheckMode::kRequired) {
2820 1216 : BuildThrowIfHole(variable);
2821 : }
2822 : break;
2823 : }
2824 : case VariableMode::kDynamicGlobal: {
2825 : int depth =
2826 354574 : current_scope()->ContextChainLengthUntilOutermostSloppyEval();
2827 354574 : FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
2828 : builder()->LoadLookupGlobalSlot(variable->raw_name(), typeof_mode,
2829 354574 : feedback_index(slot), depth);
2830 : break;
2831 : }
2832 : default:
2833 25873 : builder()->LoadLookupSlot(variable->raw_name(), typeof_mode);
2834 : }
2835 : break;
2836 : }
2837 : case VariableLocation::MODULE: {
2838 : int depth = execution_context()->ContextChainDepth(variable->scope());
2839 1360 : builder()->LoadModuleVariable(variable->index(), depth);
2840 1360 : if (hole_check_mode == HoleCheckMode::kRequired) {
2841 848 : BuildThrowIfHole(variable);
2842 : }
2843 : break;
2844 : }
2845 : }
2846 13580834 : }
2847 :
2848 4199803 : void BytecodeGenerator::BuildVariableLoadForAccumulatorValue(
2849 : Variable* variable, HoleCheckMode hole_check_mode, TypeofMode typeof_mode) {
2850 : ValueResultScope accumulator_result(this);
2851 4199803 : BuildVariableLoad(variable, hole_check_mode, typeof_mode);
2852 4199825 : }
2853 :
2854 2440722 : void BytecodeGenerator::BuildReturn(int source_position) {
2855 2440722 : if (FLAG_trace) {
2856 : RegisterAllocationScope register_scope(this);
2857 0 : Register result = register_allocator()->NewRegister();
2858 : // Runtime returns {result} value, preserving accumulator.
2859 0 : builder()->StoreAccumulatorInRegister(result).CallRuntime(
2860 0 : Runtime::kTraceExit, result);
2861 : }
2862 2440722 : if (info()->collect_type_profile()) {
2863 136 : builder()->CollectTypeProfile(info()->literal()->return_position());
2864 : }
2865 2440722 : builder()->SetReturnPosition(source_position, info()->literal());
2866 2440757 : builder()->Return();
2867 2440801 : }
2868 :
2869 7207 : void BytecodeGenerator::BuildAsyncReturn(int source_position) {
2870 : RegisterAllocationScope register_scope(this);
2871 :
2872 14414 : if (IsAsyncGeneratorFunction(info()->literal()->kind())) {
2873 1055 : RegisterList args = register_allocator()->NewRegisterList(3);
2874 : builder()
2875 1055 : ->MoveRegister(generator_object(), args[0]) // generator
2876 1055 : .StoreAccumulatorInRegister(args[1]) // value
2877 1055 : .LoadTrue()
2878 1055 : .StoreAccumulatorInRegister(args[2]) // done
2879 1055 : .CallRuntime(Runtime::kInlineAsyncGeneratorResolve, args);
2880 : } else {
2881 : DCHECK(IsAsyncFunction(info()->literal()->kind()));
2882 6152 : RegisterList args = register_allocator()->NewRegisterList(3);
2883 : builder()
2884 6152 : ->MoveRegister(generator_object(), args[0]) // generator
2885 6152 : .StoreAccumulatorInRegister(args[1]) // value
2886 12304 : .LoadBoolean(info()->literal()->CanSuspend())
2887 6152 : .StoreAccumulatorInRegister(args[2]) // can_suspend
2888 6152 : .CallRuntime(Runtime::kInlineAsyncFunctionResolve, args);
2889 : }
2890 :
2891 7207 : BuildReturn(source_position);
2892 7207 : }
2893 :
2894 35102 : void BytecodeGenerator::BuildReThrow() { builder()->ReThrow(); }
2895 :
2896 2507181 : void BytecodeGenerator::BuildThrowIfHole(Variable* variable) {
2897 2507181 : if (variable->is_this()) {
2898 : DCHECK(variable->mode() == VariableMode::kConst);
2899 2196661 : builder()->ThrowSuperNotCalledIfHole();
2900 : } else {
2901 310520 : builder()->ThrowReferenceErrorIfHole(variable->raw_name());
2902 : }
2903 2507182 : }
2904 :
2905 39206 : void BytecodeGenerator::BuildHoleCheckForVariableAssignment(Variable* variable,
2906 : Token::Value op) {
2907 41877 : if (variable->is_this() && variable->mode() == VariableMode::kConst &&
2908 : op == Token::INIT) {
2909 : // Perform an initialization check for 'this'. 'this' variable is the
2910 : // only variable able to trigger bind operations outside the TDZ
2911 : // via 'super' calls.
2912 2671 : builder()->ThrowSuperAlreadyCalledIfNotHole();
2913 : } else {
2914 : // Perform an initialization check for let/const declared variables.
2915 : // E.g. let x = (x = 20); is not allowed.
2916 : DCHECK(IsLexicalVariableMode(variable->mode()));
2917 36535 : BuildThrowIfHole(variable);
2918 : }
2919 39206 : }
2920 :
2921 5674228 : void BytecodeGenerator::BuildVariableAssignment(
2922 : Variable* variable, Token::Value op, HoleCheckMode hole_check_mode,
2923 : LookupHoistingMode lookup_hoisting_mode) {
2924 : VariableMode mode = variable->mode();
2925 : RegisterAllocationScope assignment_register_scope(this);
2926 : BytecodeLabel end_label;
2927 5674228 : switch (variable->location()) {
2928 : case VariableLocation::PARAMETER:
2929 : case VariableLocation::LOCAL: {
2930 : Register destination;
2931 2424817 : if (VariableLocation::PARAMETER == variable->location()) {
2932 27332 : if (variable->IsReceiver()) {
2933 2515 : destination = builder()->Receiver();
2934 : } else {
2935 24817 : destination = builder()->Parameter(variable->index());
2936 : }
2937 : } else {
2938 2397485 : destination = builder()->Local(variable->index());
2939 : }
2940 :
2941 2424829 : if (hole_check_mode == HoleCheckMode::kRequired) {
2942 : // Load destination to check for hole.
2943 5080 : Register value_temp = register_allocator()->NewRegister();
2944 : builder()
2945 5080 : ->StoreAccumulatorInRegister(value_temp)
2946 5080 : .LoadAccumulatorWithRegister(destination);
2947 :
2948 5080 : BuildHoleCheckForVariableAssignment(variable, op);
2949 5080 : builder()->LoadAccumulatorWithRegister(value_temp);
2950 : }
2951 :
2952 2424829 : if (mode != VariableMode::kConst || op == Token::INIT) {
2953 2419967 : builder()->StoreAccumulatorInRegister(destination);
2954 4862 : } else if (variable->throw_on_const_assignment(language_mode())) {
2955 4829 : builder()->CallRuntime(Runtime::kThrowConstAssignError);
2956 : }
2957 : break;
2958 : }
2959 : case VariableLocation::UNALLOCATED: {
2960 1453299 : FeedbackSlot slot = GetCachedStoreGlobalICSlot(language_mode(), variable);
2961 1453300 : builder()->StoreGlobal(variable->raw_name(), feedback_index(slot));
2962 : break;
2963 : }
2964 : case VariableLocation::CONTEXT: {
2965 : int depth = execution_context()->ContextChainDepth(variable->scope());
2966 : ContextScope* context = execution_context()->Previous(depth);
2967 : Register context_reg;
2968 :
2969 1723910 : if (context) {
2970 1711860 : context_reg = context->reg();
2971 : depth = 0;
2972 : } else {
2973 12050 : context_reg = execution_context()->reg();
2974 : }
2975 :
2976 1723910 : if (hole_check_mode == HoleCheckMode::kRequired) {
2977 : // Load destination to check for hole.
2978 34057 : Register value_temp = register_allocator()->NewRegister();
2979 : builder()
2980 34057 : ->StoreAccumulatorInRegister(value_temp)
2981 : .LoadContextSlot(context_reg, variable->index(), depth,
2982 34057 : BytecodeArrayBuilder::kMutableSlot);
2983 :
2984 34057 : BuildHoleCheckForVariableAssignment(variable, op);
2985 34057 : builder()->LoadAccumulatorWithRegister(value_temp);
2986 : }
2987 :
2988 1723910 : if (mode != VariableMode::kConst || op == Token::INIT) {
2989 1695840 : builder()->StoreContextSlot(context_reg, variable->index(), depth);
2990 28070 : } else if (variable->throw_on_const_assignment(language_mode())) {
2991 28048 : builder()->CallRuntime(Runtime::kThrowConstAssignError);
2992 : }
2993 : break;
2994 : }
2995 : case VariableLocation::LOOKUP: {
2996 : builder()->StoreLookupSlot(variable->raw_name(), language_mode(),
2997 37238 : lookup_hoisting_mode);
2998 37238 : break;
2999 : }
3000 : case VariableLocation::MODULE: {
3001 : DCHECK(IsDeclaredVariableMode(mode));
3002 :
3003 34967 : if (mode == VariableMode::kConst && op != Token::INIT) {
3004 110 : builder()->CallRuntime(Runtime::kThrowConstAssignError);
3005 110 : break;
3006 : }
3007 :
3008 : // If we don't throw above, we know that we're dealing with an
3009 : // export because imports are const and we do not generate initializing
3010 : // assignments for them.
3011 : DCHECK(variable->IsExport());
3012 :
3013 : int depth = execution_context()->ContextChainDepth(variable->scope());
3014 34857 : if (hole_check_mode == HoleCheckMode::kRequired) {
3015 69 : Register value_temp = register_allocator()->NewRegister();
3016 : builder()
3017 69 : ->StoreAccumulatorInRegister(value_temp)
3018 69 : .LoadModuleVariable(variable->index(), depth);
3019 69 : BuildHoleCheckForVariableAssignment(variable, op);
3020 69 : builder()->LoadAccumulatorWithRegister(value_temp);
3021 : }
3022 34857 : builder()->StoreModuleVariable(variable->index(), depth);
3023 34857 : break;
3024 : }
3025 : }
3026 5674315 : }
3027 :
3028 2419663 : void BytecodeGenerator::BuildLoadNamedProperty(const Expression* object_expr,
3029 : Register object,
3030 : const AstRawString* name) {
3031 2419663 : if (ShouldOptimizeAsOneShot()) {
3032 1265363 : builder()->LoadNamedPropertyNoFeedback(object, name);
3033 : } else {
3034 1154300 : FeedbackSlot slot = GetCachedLoadICSlot(object_expr, name);
3035 1154301 : builder()->LoadNamedProperty(object, name, feedback_index(slot));
3036 : }
3037 2419678 : }
3038 :
3039 2422850 : void BytecodeGenerator::BuildStoreNamedProperty(const Expression* object_expr,
3040 : Register object,
3041 : const AstRawString* name) {
3042 : Register value;
3043 2422850 : if (!execution_result()->IsEffect()) {
3044 8415 : value = register_allocator()->NewRegister();
3045 8415 : builder()->StoreAccumulatorInRegister(value);
3046 : }
3047 :
3048 2422864 : if (ShouldOptimizeAsOneShot()) {
3049 114166 : builder()->StoreNamedPropertyNoFeedback(object, name, language_mode());
3050 : } else {
3051 2308698 : FeedbackSlot slot = GetCachedStoreICSlot(object_expr, name);
3052 : builder()->StoreNamedProperty(object, name, feedback_index(slot),
3053 2308698 : language_mode());
3054 : }
3055 :
3056 2422866 : if (!execution_result()->IsEffect()) {
3057 8415 : builder()->LoadAccumulatorWithRegister(value);
3058 : }
3059 2422866 : }
3060 :
3061 : // static
3062 : BytecodeGenerator::AssignmentLhsData
3063 0 : BytecodeGenerator::AssignmentLhsData::NonProperty(Expression* expr) {
3064 : return AssignmentLhsData(NON_PROPERTY, expr, RegisterList(), Register(),
3065 0 : Register(), nullptr, nullptr);
3066 : }
3067 : // static
3068 : BytecodeGenerator::AssignmentLhsData
3069 0 : BytecodeGenerator::AssignmentLhsData::NamedProperty(Expression* object_expr,
3070 : Register object,
3071 : const AstRawString* name) {
3072 : return AssignmentLhsData(NAMED_PROPERTY, nullptr, RegisterList(), object,
3073 0 : Register(), object_expr, name);
3074 : }
3075 : // static
3076 : BytecodeGenerator::AssignmentLhsData
3077 0 : BytecodeGenerator::AssignmentLhsData::KeyedProperty(Register object,
3078 : Register key) {
3079 : return AssignmentLhsData(KEYED_PROPERTY, nullptr, RegisterList(), object, key,
3080 0 : nullptr, nullptr);
3081 : }
3082 : // static
3083 : BytecodeGenerator::AssignmentLhsData
3084 0 : BytecodeGenerator::AssignmentLhsData::NamedSuperProperty(
3085 : RegisterList super_property_args) {
3086 : return AssignmentLhsData(NAMED_SUPER_PROPERTY, nullptr, super_property_args,
3087 0 : Register(), Register(), nullptr, nullptr);
3088 : }
3089 : // static
3090 : BytecodeGenerator::AssignmentLhsData
3091 0 : BytecodeGenerator::AssignmentLhsData::KeyedSuperProperty(
3092 : RegisterList super_property_args) {
3093 : return AssignmentLhsData(KEYED_SUPER_PROPERTY, nullptr, super_property_args,
3094 0 : Register(), Register(), nullptr, nullptr);
3095 : }
3096 :
3097 7599094 : BytecodeGenerator::AssignmentLhsData BytecodeGenerator::PrepareAssignmentLhs(
3098 : Expression* lhs, AccumulatorPreservingMode accumulator_preserving_mode) {
3099 : // Left-hand side can only be a property, a global or a variable slot.
3100 7599094 : Property* property = lhs->AsProperty();
3101 7599094 : AssignType assign_type = Property::GetAssignType(property);
3102 :
3103 : // Evaluate LHS expression.
3104 7599104 : switch (assign_type) {
3105 : case NON_PROPERTY:
3106 : return AssignmentLhsData::NonProperty(lhs);
3107 : case NAMED_PROPERTY: {
3108 2422866 : AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3109 2422864 : Register object = VisitForRegisterValue(property->obj());
3110 : const AstRawString* name =
3111 2422850 : property->key()->AsLiteral()->AsRawPropertyName();
3112 : return AssignmentLhsData::NamedProperty(property->obj(), object, name);
3113 : }
3114 : case KEYED_PROPERTY: {
3115 33624 : AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3116 33622 : Register object = VisitForRegisterValue(property->obj());
3117 33624 : Register key = VisitForRegisterValue(property->key());
3118 : return AssignmentLhsData::KeyedProperty(object, key);
3119 : }
3120 : case NAMED_SUPER_PROPERTY: {
3121 220 : AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3122 : RegisterList super_property_args =
3123 220 : register_allocator()->NewRegisterList(4);
3124 : SuperPropertyReference* super_property =
3125 220 : property->obj()->AsSuperPropertyReference();
3126 220 : BuildThisVariableLoad();
3127 220 : builder()->StoreAccumulatorInRegister(super_property_args[0]);
3128 : VisitForRegisterValue(super_property->home_object(),
3129 : super_property_args[1]);
3130 : builder()
3131 440 : ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
3132 220 : .StoreAccumulatorInRegister(super_property_args[2]);
3133 : return AssignmentLhsData::NamedSuperProperty(super_property_args);
3134 : }
3135 : case KEYED_SUPER_PROPERTY: {
3136 340 : AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3137 : RegisterList super_property_args =
3138 340 : register_allocator()->NewRegisterList(4);
3139 : SuperPropertyReference* super_property =
3140 340 : property->obj()->AsSuperPropertyReference();
3141 340 : BuildThisVariableLoad();
3142 340 : builder()->StoreAccumulatorInRegister(super_property_args[0]);
3143 : VisitForRegisterValue(super_property->home_object(),
3144 : super_property_args[1]);
3145 : VisitForRegisterValue(property->key(), super_property_args[2]);
3146 : return AssignmentLhsData::KeyedSuperProperty(super_property_args);
3147 : }
3148 : }
3149 0 : UNREACHABLE();
3150 : }
3151 :
3152 : // Build the iteration finalizer called in the finally block of an iteration
3153 : // protocol execution. This closes the iterator if needed, and suppresses any
3154 : // exception it throws if necessary.
3155 : //
3156 : // In pseudo-code, this builds:
3157 : //
3158 : // if (!done) {
3159 : // let method = iterator.return
3160 : // if (method !== null && method !== undefined) {
3161 : // if (typeof(method) !== "function") throw TypeError
3162 : // try {
3163 : // let return_val = method.call(iterator)
3164 : // if (!%IsObject(return_val)) throw TypeError
3165 : // } catch (e) {
3166 : // if (iteration_continuation != RETHROW)
3167 : // rethrow e
3168 : // }
3169 : // }
3170 : // }
3171 : //
3172 : // For async iterators, iterator.close() becomes await iterator.close().
3173 38000 : void BytecodeGenerator::BuildFinalizeIteration(
3174 : IteratorRecord iterator, Register done,
3175 : Register iteration_continuation_token) {
3176 : RegisterAllocationScope register_scope(this);
3177 : BytecodeLabels iterator_is_done(zone());
3178 :
3179 : // if (!done) {
3180 38000 : builder()->LoadAccumulatorWithRegister(done).JumpIfTrue(
3181 76001 : ToBooleanMode::kConvertToBoolean, iterator_is_done.New());
3182 :
3183 : // method = iterator.return
3184 : // if (method !== null && method !== undefined) {
3185 38001 : Register method = register_allocator()->NewRegister();
3186 : builder()
3187 : ->LoadNamedProperty(iterator.object(),
3188 : ast_string_constants()->return_string(),
3189 38001 : feedback_index(feedback_spec()->AddLoadICSlot()))
3190 38000 : .StoreAccumulatorInRegister(method)
3191 76000 : .JumpIfUndefined(iterator_is_done.New())
3192 76002 : .JumpIfNull(iterator_is_done.New());
3193 :
3194 : // if (typeof(method) !== "function") throw TypeError
3195 : BytecodeLabel if_callable;
3196 : builder()
3197 38001 : ->CompareTypeOf(TestTypeOfFlags::LiteralFlag::kFunction)
3198 38001 : .JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &if_callable);
3199 : {
3200 : // throw %NewTypeError(kReturnMethodNotCallable)
3201 : RegisterAllocationScope register_scope(this);
3202 38001 : RegisterList new_type_error_args = register_allocator()->NewRegisterList(2);
3203 : builder()
3204 38001 : ->LoadLiteral(Smi::FromEnum(MessageTemplate::kReturnMethodNotCallable))
3205 38001 : .StoreAccumulatorInRegister(new_type_error_args[0])
3206 38001 : .LoadLiteral(ast_string_constants()->empty_string())
3207 38001 : .StoreAccumulatorInRegister(new_type_error_args[1])
3208 38001 : .CallRuntime(Runtime::kNewTypeError, new_type_error_args)
3209 38001 : .Throw();
3210 : }
3211 38000 : builder()->Bind(&if_callable);
3212 :
3213 : {
3214 : RegisterAllocationScope register_scope(this);
3215 : BuildTryCatch(
3216 : // try {
3217 : // let return_val = method.call(iterator)
3218 : // if (!%IsObject(return_val)) throw TypeError
3219 : // }
3220 38001 : [&]() {
3221 76002 : RegisterList args(iterator.object());
3222 190344 : builder()->CallProperty(
3223 76002 : method, args, feedback_index(feedback_spec()->AddCallICSlot()));
3224 38001 : if (iterator.type() == IteratorType::kAsync) {
3225 341 : BuildAwait();
3226 : }
3227 76002 : builder()->JumpIfJSReceiver(iterator_is_done.New());
3228 : {
3229 : // Throw this exception inside the try block so that it is
3230 : // suppressed by the iteration continuation if necessary.
3231 : RegisterAllocationScope register_scope(this);
3232 38000 : Register return_result = register_allocator()->NewRegister();
3233 : builder()
3234 38000 : ->StoreAccumulatorInRegister(return_result)
3235 : .CallRuntime(Runtime::kThrowIteratorResultNotAnObject,
3236 38001 : return_result);
3237 : }
3238 38001 : },
3239 :
3240 : // catch (e) {
3241 : // if (iteration_continuation != RETHROW)
3242 : // rethrow e
3243 : // }
3244 38000 : [&](Register context) {
3245 : // Reuse context register to store the exception.
3246 38000 : Register close_exception = context;
3247 114000 : builder()->StoreAccumulatorInRegister(close_exception);
3248 :
3249 : BytecodeLabel suppress_close_exception;
3250 : builder()
3251 : ->LoadLiteral(
3252 38000 : Smi::FromInt(ControlScope::DeferredCommands::kRethrowToken))
3253 76001 : .CompareReference(iteration_continuation_token)
3254 : .JumpIfTrue(ToBooleanMode::kAlreadyBoolean,
3255 38001 : &suppress_close_exception)
3256 38000 : .LoadAccumulatorWithRegister(close_exception)
3257 38001 : .ReThrow()
3258 38001 : .Bind(&suppress_close_exception);
3259 38001 : },
3260 38001 : HandlerTable::UNCAUGHT);
3261 : }
3262 :
3263 38000 : iterator_is_done.Bind(builder());
3264 38002 : }
3265 :
3266 : // Get the default value of a destructuring target. Will mutate the
3267 : // destructuring target expression if there is a default value.
3268 : //
3269 : // For
3270 : // a = b
3271 : // in
3272 : // let {a = b} = c
3273 : // returns b and mutates the input into a.
3274 0 : Expression* BytecodeGenerator::GetDestructuringDefaultValue(
3275 : Expression** target) {
3276 : Expression* default_value = nullptr;
3277 22669 : if ((*target)->IsAssignment()) {
3278 1889 : Assignment* default_init = (*target)->AsAssignment();
3279 : DCHECK_EQ(default_init->op(), Token::ASSIGN);
3280 : default_value = default_init->value();
3281 0 : *target = default_init->target();
3282 : DCHECK((*target)->IsValidReferenceExpression() || (*target)->IsPattern());
3283 : }
3284 0 : return default_value;
3285 : }
3286 :
3287 : // Convert a destructuring assignment to an array literal into a sequence of
3288 : // iterator accesses into the value being assigned (in the accumulator).
3289 : //
3290 : // [a().x, ...b] = accumulator
3291 : //
3292 : // becomes
3293 : //
3294 : // iterator = %GetIterator(accumulator)
3295 : // try {
3296 : //
3297 : // // Individual assignments read off the value from iterator.next() This gets
3298 : // // repeated per destructuring element.
3299 : // if (!done) {
3300 : // // Make sure we are considered 'done' if .next(), .done or .value fail.
3301 : // done = true
3302 : // var next_result = iterator.next()
3303 : // var tmp_done = next_result.done
3304 : // if (!tmp_done) {
3305 : // value = next_result.value
3306 : // done = false
3307 : // }
3308 : // }
3309 : // if (done)
3310 : // value = undefined
3311 : // a().x = value
3312 : //
3313 : // // A spread receives the remaining items in the iterator.
3314 : // var array = []
3315 : // var index = 0
3316 : // %FillArrayWithIterator(iterator, array, index, done)
3317 : // done = true
3318 : // b = array
3319 : //
3320 : // } catch(e) {
3321 : // iteration_continuation = RETHROW
3322 : // } finally {
3323 : // %FinalizeIteration(iterator, done, iteration_continuation)
3324 : // }
3325 2727 : void BytecodeGenerator::BuildDestructuringArrayAssignment(
3326 : ArrayLiteral* pattern, Token::Value op,
3327 : LookupHoistingMode lookup_hoisting_mode) {
3328 : RegisterAllocationScope scope(this);
3329 :
3330 2727 : Register value = register_allocator()->NewRegister();
3331 2727 : builder()->StoreAccumulatorInRegister(value);
3332 :
3333 : // Store the iterator in a dedicated register so that it can be closed on
3334 : // exit, and the 'done' value in a dedicated register so that it can be
3335 : // changed and accessed independently of the iteration result.
3336 2727 : IteratorRecord iterator = BuildGetIteratorRecord(IteratorType::kNormal);
3337 2727 : Register done = register_allocator()->NewRegister();
3338 2727 : builder()->LoadFalse();
3339 2727 : builder()->StoreAccumulatorInRegister(done);
3340 :
3341 : BuildTryFinally(
3342 : // Try block.
3343 2727 : [&]() {
3344 62082 : Register next_result = register_allocator()->NewRegister();
3345 : FeedbackSlot next_value_load_slot = feedback_spec()->AddLoadICSlot();
3346 : FeedbackSlot next_done_load_slot = feedback_spec()->AddLoadICSlot();
3347 :
3348 : Spread* spread = nullptr;
3349 12522 : for (Expression* target : *pattern->values()) {
3350 4117 : if (target->IsSpread()) {
3351 583 : spread = target->AsSpread();
3352 583 : break;
3353 : }
3354 :
3355 : Expression* default_value = GetDestructuringDefaultValue(&target);
3356 3534 : if (!target->IsPattern()) {
3357 : builder()->SetExpressionAsStatementPosition(target);
3358 : }
3359 :
3360 3534 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
3361 :
3362 : // if (!done) {
3363 : // // Make sure we are considered done if .next(), .done or .value
3364 : // // fail.
3365 : // done = true
3366 : // var next_result = iterator.next()
3367 : // var tmp_done = next_result.done
3368 : // if (!tmp_done) {
3369 : // value = next_result.value
3370 : // done = false
3371 : // }
3372 : // }
3373 : // if (done)
3374 : // value = undefined
3375 : BytecodeLabels is_done(zone());
3376 :
3377 14719 : builder()->LoadAccumulatorWithRegister(done);
3378 : builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean,
3379 7068 : is_done.New());
3380 :
3381 3534 : builder()->LoadTrue().StoreAccumulatorInRegister(done);
3382 7651 : BuildIteratorNext(iterator, next_result);
3383 : builder()
3384 : ->LoadNamedProperty(next_result,
3385 : ast_string_constants()->done_string(),
3386 3534 : feedback_index(next_done_load_slot))
3387 7068 : .JumpIfTrue(ToBooleanMode::kConvertToBoolean, is_done.New())
3388 : .LoadNamedProperty(next_result,
3389 : ast_string_constants()->value_string(),
3390 3534 : feedback_index(next_value_load_slot))
3391 3534 : .StoreAccumulatorInRegister(next_result)
3392 3534 : .LoadFalse()
3393 3534 : .StoreAccumulatorInRegister(done)
3394 3534 : .LoadAccumulatorWithRegister(next_result);
3395 :
3396 : // Only do the assignment if this is not a hole (i.e. 'elided').
3397 3534 : if (!target->IsTheHoleLiteral()) {
3398 : // [<pattern> = <init>] = <value>
3399 : // becomes (roughly)
3400 : // temp = <value>.next();
3401 : // <pattern> = temp === undefined ? <init> : temp;
3402 : BytecodeLabel do_assignment;
3403 3425 : if (default_value) {
3404 454 : builder()->JumpIfNotUndefined(&do_assignment);
3405 : // Since done == true => temp == undefined, jump directly to using
3406 : // the default value for that case.
3407 454 : is_done.Bind(builder());
3408 454 : VisitForAccumulatorValue(default_value);
3409 : } else {
3410 2971 : builder()->Jump(&do_assignment);
3411 2971 : is_done.Bind(builder());
3412 2971 : builder()->LoadUndefined();
3413 : }
3414 3425 : builder()->Bind(&do_assignment);
3415 :
3416 8016 : BuildAssignment(lhs_data, op, lookup_hoisting_mode);
3417 : } else {
3418 : DCHECK_EQ(lhs_data.assign_type(), NON_PROPERTY);
3419 109 : is_done.Bind(builder());
3420 : }
3421 : }
3422 :
3423 2727 : if (spread) {
3424 : RegisterAllocationScope scope(this);
3425 :
3426 : // A spread is turned into a loop over the remainer of the iterator.
3427 : Expression* target = spread->expression();
3428 :
3429 583 : if (!target->IsPattern()) {
3430 : builder()->SetExpressionAsStatementPosition(spread);
3431 : }
3432 :
3433 583 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
3434 :
3435 : // var array = [];
3436 583 : Register array = register_allocator()->NewRegister();
3437 : builder()->CreateEmptyArrayLiteral(
3438 583 : feedback_index(feedback_spec()->AddLiteralSlot()));
3439 583 : builder()->StoreAccumulatorInRegister(array);
3440 :
3441 : // var index = 0;
3442 583 : Register index = register_allocator()->NewRegister();
3443 583 : builder()->LoadLiteral(Smi::zero());
3444 583 : builder()->StoreAccumulatorInRegister(index);
3445 :
3446 : // Set done to true, since it's guaranteed to be true by the time the
3447 : // array fill completes.
3448 583 : builder()->LoadTrue().StoreAccumulatorInRegister(done);
3449 :
3450 : // Fill the array with the iterator.
3451 : FeedbackSlot element_slot =
3452 583 : feedback_spec()->AddStoreInArrayLiteralICSlot();
3453 583 : FeedbackSlot index_slot = feedback_spec()->AddBinaryOpICSlot();
3454 : BuildFillArrayWithIterator(iterator, array, index, next_result,
3455 : next_value_load_slot, next_done_load_slot,
3456 583 : index_slot, element_slot);
3457 :
3458 : // Assign the array to the LHS.
3459 583 : builder()->LoadAccumulatorWithRegister(array);
3460 1749 : BuildAssignment(lhs_data, op, lookup_hoisting_mode);
3461 : }
3462 2727 : },
3463 : // Finally block.
3464 : [&](Register iteration_continuation_token) {
3465 : // Finish the iteration in the finally block.
3466 2727 : BuildFinalizeIteration(iterator, done, iteration_continuation_token);
3467 : },
3468 2727 : HandlerTable::UNCAUGHT);
3469 :
3470 2727 : if (!execution_result()->IsEffect()) {
3471 266 : builder()->LoadAccumulatorWithRegister(value);
3472 : }
3473 2727 : }
3474 :
3475 : // Convert a destructuring assignment to an object literal into a sequence of
3476 : // property accesses into the value being assigned (in the accumulator).
3477 : //
3478 : // { y, [x++]: a(), ...b.c } = value
3479 : //
3480 : // becomes
3481 : //
3482 : // var rest_runtime_callargs = new Array(3);
3483 : // rest_runtime_callargs[0] = value;
3484 : //
3485 : // rest_runtime_callargs[1] = value;
3486 : // y = value.y;
3487 : //
3488 : // var temp1 = %ToName(x++);
3489 : // rest_runtime_callargs[2] = temp1;
3490 : // a() = value[temp1];
3491 : //
3492 : // b.c = %CopyDataPropertiesWithExcludedProperties.call(rest_runtime_callargs);
3493 11076 : void BytecodeGenerator::BuildDestructuringObjectAssignment(
3494 : ObjectLiteral* pattern, Token::Value op,
3495 : LookupHoistingMode lookup_hoisting_mode) {
3496 : RegisterAllocationScope scope(this);
3497 :
3498 : // if (value === null || value === undefined)
3499 : // throw new TypeError(kNonCoercible);
3500 : //
3501 : // TODO(leszeks): Eliminate check if value is known to be non-null (e.g.
3502 : // an object literal).
3503 : BytecodeLabel is_null_or_undefined, not_null_or_undefined;
3504 : builder()
3505 11076 : ->JumpIfNull(&is_null_or_undefined)
3506 11076 : .JumpIfNotUndefined(¬_null_or_undefined);
3507 :
3508 : {
3509 11076 : builder()->Bind(&is_null_or_undefined);
3510 : builder()->SetExpressionPosition(pattern);
3511 11076 : builder()->CallRuntime(Runtime::kThrowPatternAssignmentNonCoercible);
3512 : }
3513 :
3514 : // Store the assignment value in a register.
3515 : Register value;
3516 : RegisterList rest_runtime_callargs;
3517 11076 : if (pattern->has_rest_property()) {
3518 : rest_runtime_callargs =
3519 205 : register_allocator()->NewRegisterList(pattern->properties()->length());
3520 205 : value = rest_runtime_callargs[0];
3521 : } else {
3522 10871 : value = register_allocator()->NewRegister();
3523 : }
3524 11076 : builder()->Bind(¬_null_or_undefined).StoreAccumulatorInRegister(value);
3525 :
3526 : int i = 0;
3527 49346 : for (ObjectLiteralProperty* pattern_property : *pattern->properties()) {
3528 : RegisterAllocationScope scope(this);
3529 :
3530 : // The key of the pattern becomes the key into the RHS value, and the value
3531 : // of the pattern becomes the target of the assignment.
3532 : //
3533 : // e.g. { a: b } = o becomes b = o.a
3534 : Expression* pattern_key = pattern_property->key();
3535 : Expression* target = pattern_property->value();
3536 : Expression* default_value = GetDestructuringDefaultValue(&target);
3537 :
3538 19135 : if (!target->IsPattern()) {
3539 : builder()->SetExpressionAsStatementPosition(target);
3540 : }
3541 :
3542 : // Calculate this property's key into the assignment RHS value, additionally
3543 : // storing the key for rest_runtime_callargs if needed.
3544 : //
3545 : // The RHS is accessed using the key either by LoadNamedProperty (if
3546 : // value_name is valid) or by LoadKeyedProperty (otherwise).
3547 : const AstRawString* value_name = nullptr;
3548 : Register value_key;
3549 :
3550 19135 : if (pattern_property->kind() != ObjectLiteralProperty::Kind::SPREAD) {
3551 18930 : if (pattern_key->IsPropertyName()) {
3552 18610 : value_name = pattern_key->AsLiteral()->AsRawPropertyName();
3553 : }
3554 18930 : if (pattern->has_rest_property() || !value_name) {
3555 383 : if (pattern->has_rest_property()) {
3556 129 : value_key = rest_runtime_callargs[i + 1];
3557 : } else {
3558 254 : value_key = register_allocator()->NewRegister();
3559 : }
3560 383 : if (pattern_property->is_computed_name()) {
3561 : // { [a()]: b().x } = c
3562 : // becomes
3563 : // var tmp = a()
3564 : // b().x = c[tmp]
3565 : DCHECK(!pattern_key->IsPropertyName() ||
3566 : !pattern_key->IsNumberLiteral());
3567 270 : VisitForAccumulatorValue(pattern_key);
3568 270 : builder()->ToName(value_key);
3569 : } else {
3570 : // We only need the key for non-computed properties when it is numeric
3571 : // or is being saved for the rest_runtime_callargs.
3572 : DCHECK(
3573 : pattern_key->IsNumberLiteral() ||
3574 : (pattern->has_rest_property() && pattern_key->IsPropertyName()));
3575 : VisitForRegisterValue(pattern_key, value_key);
3576 : }
3577 : }
3578 : }
3579 :
3580 19135 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
3581 :
3582 : // Get the value from the RHS.
3583 19135 : if (pattern_property->kind() == ObjectLiteralProperty::Kind::SPREAD) {
3584 : DCHECK_EQ(i, pattern->properties()->length() - 1);
3585 : DCHECK(!value_key.is_valid());
3586 : DCHECK_NULL(value_name);
3587 : builder()->CallRuntime(Runtime::kCopyDataPropertiesWithExcludedProperties,
3588 205 : rest_runtime_callargs);
3589 18930 : } else if (value_name) {
3590 : builder()->LoadNamedProperty(
3591 18610 : value, value_name, feedback_index(feedback_spec()->AddLoadICSlot()));
3592 : } else {
3593 : DCHECK(value_key.is_valid());
3594 320 : builder()->LoadAccumulatorWithRegister(value_key).LoadKeyedProperty(
3595 320 : value, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
3596 : }
3597 :
3598 : // {<pattern> = <init>} = <value>
3599 : // becomes
3600 : // temp = <value>;
3601 : // <pattern> = temp === undefined ? <init> : temp;
3602 19135 : if (default_value) {
3603 : BytecodeLabel value_not_undefined;
3604 1435 : builder()->JumpIfNotUndefined(&value_not_undefined);
3605 1435 : VisitForAccumulatorValue(default_value);
3606 1435 : builder()->Bind(&value_not_undefined);
3607 : }
3608 :
3609 19135 : BuildAssignment(lhs_data, op, lookup_hoisting_mode);
3610 :
3611 19135 : i++;
3612 : }
3613 :
3614 11076 : if (!execution_result()->IsEffect()) {
3615 193 : builder()->LoadAccumulatorWithRegister(value);
3616 : }
3617 11076 : }
3618 :
3619 7598915 : void BytecodeGenerator::BuildAssignment(
3620 : const AssignmentLhsData& lhs_data, Token::Value op,
3621 : LookupHoistingMode lookup_hoisting_mode) {
3622 : // Assign the value to the LHS.
3623 7598915 : switch (lhs_data.assign_type()) {
3624 : case NON_PROPERTY: {
3625 10283750 : if (ObjectLiteral* pattern = lhs_data.expr()->AsObjectLiteral()) {
3626 : // Split object literals into destructuring.
3627 11076 : BuildDestructuringObjectAssignment(pattern, op, lookup_hoisting_mode);
3628 5130799 : } else if (ArrayLiteral* pattern = lhs_data.expr()->AsArrayLiteral()) {
3629 : // Split object literals into destructuring.
3630 2727 : BuildDestructuringArrayAssignment(pattern, op, lookup_hoisting_mode);
3631 : } else {
3632 : DCHECK(lhs_data.expr()->IsVariableProxy());
3633 : VariableProxy* proxy = lhs_data.expr()->AsVariableProxy();
3634 5128072 : BuildVariableAssignment(proxy->var(), op, proxy->hole_check_mode(),
3635 5128072 : lookup_hoisting_mode);
3636 : }
3637 : break;
3638 : }
3639 : case NAMED_PROPERTY: {
3640 : BuildStoreNamedProperty(lhs_data.object_expr(), lhs_data.object(),
3641 2422859 : lhs_data.name());
3642 2422855 : break;
3643 : }
3644 : case KEYED_PROPERTY: {
3645 : FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
3646 : Register value;
3647 33624 : if (!execution_result()->IsEffect()) {
3648 1959 : value = register_allocator()->NewRegister();
3649 1959 : builder()->StoreAccumulatorInRegister(value);
3650 : }
3651 : builder()->StoreKeyedProperty(lhs_data.object(), lhs_data.key(),
3652 33624 : feedback_index(slot), language_mode());
3653 33624 : if (!execution_result()->IsEffect()) {
3654 1959 : builder()->LoadAccumulatorWithRegister(value);
3655 : }
3656 : break;
3657 : }
3658 : case NAMED_SUPER_PROPERTY: {
3659 : builder()
3660 220 : ->StoreAccumulatorInRegister(lhs_data.super_property_args()[3])
3661 220 : .CallRuntime(Runtime::kStoreToSuper, lhs_data.super_property_args());
3662 220 : break;
3663 : }
3664 : case KEYED_SUPER_PROPERTY: {
3665 : builder()
3666 340 : ->StoreAccumulatorInRegister(lhs_data.super_property_args()[3])
3667 : .CallRuntime(Runtime::kStoreKeyedToSuper,
3668 340 : lhs_data.super_property_args());
3669 340 : break;
3670 : }
3671 : }
3672 7598894 : }
3673 :
3674 7446568 : void BytecodeGenerator::VisitAssignment(Assignment* expr) {
3675 7446568 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(expr->target());
3676 :
3677 7446556 : VisitForAccumulatorValue(expr->value());
3678 :
3679 : builder()->SetExpressionPosition(expr);
3680 7446510 : BuildAssignment(lhs_data, expr->op(), expr->lookup_hoisting_mode());
3681 7446458 : }
3682 :
3683 89257 : void BytecodeGenerator::VisitCompoundAssignment(CompoundAssignment* expr) {
3684 89257 : AssignmentLhsData lhs_data = PrepareAssignmentLhs(expr->target());
3685 :
3686 : // Evaluate the value and potentially handle compound assignments by loading
3687 : // the left-hand side value and performing a binary operation.
3688 89257 : switch (lhs_data.assign_type()) {
3689 : case NON_PROPERTY: {
3690 86611 : VariableProxy* proxy = expr->target()->AsVariableProxy();
3691 86611 : BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
3692 86611 : break;
3693 : }
3694 : case NAMED_PROPERTY: {
3695 : BuildLoadNamedProperty(lhs_data.object_expr(), lhs_data.object(),
3696 1780 : lhs_data.name());
3697 1780 : break;
3698 : }
3699 : case KEYED_PROPERTY: {
3700 : FeedbackSlot slot = feedback_spec()->AddKeyedLoadICSlot();
3701 : builder()
3702 806 : ->LoadAccumulatorWithRegister(lhs_data.key())
3703 806 : .LoadKeyedProperty(lhs_data.object(), feedback_index(slot));
3704 : break;
3705 : }
3706 : case NAMED_SUPER_PROPERTY: {
3707 : builder()->CallRuntime(Runtime::kLoadFromSuper,
3708 20 : lhs_data.super_property_args().Truncate(3));
3709 20 : break;
3710 : }
3711 : case KEYED_SUPER_PROPERTY: {
3712 : builder()->CallRuntime(Runtime::kLoadKeyedFromSuper,
3713 40 : lhs_data.super_property_args().Truncate(3));
3714 40 : break;
3715 : }
3716 : }
3717 89257 : BinaryOperation* binop = expr->AsCompoundAssignment()->binary_operation();
3718 : FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
3719 89257 : if (expr->value()->IsSmiLiteral()) {
3720 : builder()->BinaryOperationSmiLiteral(
3721 : binop->op(), expr->value()->AsLiteral()->AsSmiLiteral(),
3722 64410 : feedback_index(slot));
3723 : } else {
3724 67787 : Register old_value = register_allocator()->NewRegister();
3725 67787 : builder()->StoreAccumulatorInRegister(old_value);
3726 67787 : VisitForAccumulatorValue(expr->value());
3727 67787 : builder()->BinaryOperation(binop->op(), old_value, feedback_index(slot));
3728 : }
3729 :
3730 : builder()->SetExpressionPosition(expr);
3731 89257 : BuildAssignment(lhs_data, expr->op(), expr->lookup_hoisting_mode());
3732 89257 : }
3733 :
3734 : // Suspends the generator to resume at the next suspend_id, with output stored
3735 : // in the accumulator. When the generator is resumed, the sent value is loaded
3736 : // in the accumulator.
3737 24217 : void BytecodeGenerator::BuildSuspendPoint(int position) {
3738 24217 : const int suspend_id = suspend_count_++;
3739 :
3740 24217 : RegisterList registers = register_allocator()->AllLiveRegisters();
3741 :
3742 : // Save context, registers, and state. This bytecode then returns the value
3743 : // in the accumulator.
3744 : builder()->SetExpressionPosition(position);
3745 24217 : builder()->SuspendGenerator(generator_object(), registers, suspend_id);
3746 :
3747 : // Upon resume, we continue here.
3748 24217 : builder()->Bind(generator_jump_table_, suspend_id);
3749 :
3750 : // Clobbers all registers and sets the accumulator to the
3751 : // [[input_or_debug_pos]] slot of the generator object.
3752 24217 : builder()->ResumeGenerator(generator_object(), registers);
3753 24217 : }
3754 :
3755 8435 : void BytecodeGenerator::VisitYield(Yield* expr) {
3756 : builder()->SetExpressionPosition(expr);
3757 8435 : VisitForAccumulatorValue(expr->expression());
3758 :
3759 : // If this is not the first yield
3760 8435 : if (suspend_count_ > 0) {
3761 3438 : if (IsAsyncGeneratorFunction(function_kind())) {
3762 : // AsyncGenerator yields (with the exception of the initial yield)
3763 : // delegate work to the AsyncGeneratorYield stub, which Awaits the operand
3764 : // and on success, wraps the value in an IteratorResult.
3765 : RegisterAllocationScope register_scope(this);
3766 384 : RegisterList args = register_allocator()->NewRegisterList(3);
3767 : builder()
3768 384 : ->MoveRegister(generator_object(), args[0]) // generator
3769 384 : .StoreAccumulatorInRegister(args[1]) // value
3770 768 : .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
3771 384 : .StoreAccumulatorInRegister(args[2]) // is_caught
3772 384 : .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
3773 : } else {
3774 : // Generator yields (with the exception of the initial yield) wrap the
3775 : // value into IteratorResult.
3776 : RegisterAllocationScope register_scope(this);
3777 3054 : RegisterList args = register_allocator()->NewRegisterList(2);
3778 : builder()
3779 3054 : ->StoreAccumulatorInRegister(args[0]) // value
3780 3054 : .LoadFalse()
3781 3054 : .StoreAccumulatorInRegister(args[1]) // done
3782 3054 : .CallRuntime(Runtime::kInlineCreateIterResultObject, args);
3783 : }
3784 : }
3785 :
3786 8435 : BuildSuspendPoint(expr->position());
3787 : // At this point, the generator has been resumed, with the received value in
3788 : // the accumulator.
3789 :
3790 : // TODO(caitp): remove once yield* desugaring for async generators is handled
3791 : // in BytecodeGenerator.
3792 8435 : if (expr->on_abrupt_resume() == Yield::kNoControl) {
3793 : DCHECK(IsAsyncGeneratorFunction(function_kind()));
3794 0 : return;
3795 : }
3796 :
3797 8435 : Register input = register_allocator()->NewRegister();
3798 8435 : builder()->StoreAccumulatorInRegister(input).CallRuntime(
3799 8435 : Runtime::kInlineGeneratorGetResumeMode, generator_object());
3800 :
3801 : // Now dispatch on resume mode.
3802 : STATIC_ASSERT(JSGeneratorObject::kNext + 1 == JSGeneratorObject::kReturn);
3803 : BytecodeJumpTable* jump_table =
3804 8435 : builder()->AllocateJumpTable(2, JSGeneratorObject::kNext);
3805 :
3806 8435 : builder()->SwitchOnSmiNoFeedback(jump_table);
3807 :
3808 : {
3809 : // Resume with throw (switch fallthrough).
3810 : // TODO(leszeks): Add a debug-only check that the accumulator is
3811 : // JSGeneratorObject::kThrow.
3812 : builder()->SetExpressionPosition(expr);
3813 8435 : builder()->LoadAccumulatorWithRegister(input);
3814 8435 : builder()->Throw();
3815 : }
3816 :
3817 : {
3818 : // Resume with return.
3819 8435 : builder()->Bind(jump_table, JSGeneratorObject::kReturn);
3820 8435 : builder()->LoadAccumulatorWithRegister(input);
3821 8435 : if (IsAsyncGeneratorFunction(function_kind())) {
3822 : execution_control()->AsyncReturnAccumulator();
3823 : } else {
3824 : execution_control()->ReturnAccumulator();
3825 : }
3826 : }
3827 :
3828 : {
3829 : // Resume with next.
3830 8435 : builder()->Bind(jump_table, JSGeneratorObject::kNext);
3831 : BuildIncrementBlockCoverageCounterIfEnabled(expr,
3832 : SourceRangeKind::kContinuation);
3833 8435 : builder()->LoadAccumulatorWithRegister(input);
3834 : }
3835 : }
3836 :
3837 : // Desugaring of (yield* iterable)
3838 : //
3839 : // do {
3840 : // const kNext = 0;
3841 : // const kReturn = 1;
3842 : // const kThrow = 2;
3843 : //
3844 : // let output; // uninitialized
3845 : //
3846 : // let iteratorRecord = GetIterator(iterable);
3847 : // let iterator = iteratorRecord.[[Iterator]];
3848 : // let next = iteratorRecord.[[NextMethod]];
3849 : // let input = undefined;
3850 : // let resumeMode = kNext;
3851 : //
3852 : // while (true) {
3853 : // // From the generator to the iterator:
3854 : // // Forward input according to resumeMode and obtain output.
3855 : // switch (resumeMode) {
3856 : // case kNext:
3857 : // output = next.[[Call]](iterator, « »);;
3858 : // break;
3859 : // case kReturn:
3860 : // let iteratorReturn = iterator.return;
3861 : // if (IS_NULL_OR_UNDEFINED(iteratorReturn)) return input;
3862 : // output = iteratorReturn.[[Call]](iterator, «input»);
3863 : // break;
3864 : // case kThrow:
3865 : // let iteratorThrow = iterator.throw;
3866 : // if (IS_NULL_OR_UNDEFINED(iteratorThrow)) {
3867 : // let iteratorReturn = iterator.return;
3868 : // if (!IS_NULL_OR_UNDEFINED(iteratorReturn)) {
3869 : // output = iteratorReturn.[[Call]](iterator, « »);
3870 : // if (IS_ASYNC_GENERATOR) output = await output;
3871 : // if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
3872 : // }
3873 : // throw MakeTypeError(kThrowMethodMissing);
3874 : // }
3875 : // output = iteratorThrow.[[Call]](iterator, «input»);
3876 : // break;
3877 : // }
3878 : //
3879 : // if (IS_ASYNC_GENERATOR) output = await output;
3880 : // if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
3881 : // if (output.done) break;
3882 : //
3883 : // // From the generator to its user:
3884 : // // Forward output, receive new input, and determine resume mode.
3885 : // if (IS_ASYNC_GENERATOR) {
3886 : // // AsyncGeneratorYield abstract operation awaits the operand before
3887 : // // resolving the promise for the current AsyncGeneratorRequest.
3888 : // %_AsyncGeneratorYield(output.value)
3889 : // }
3890 : // input = Suspend(output);
3891 : // resumeMode = %GeneratorGetResumeMode();
3892 : // }
3893 : //
3894 : // if (resumeMode === kReturn) {
3895 : // return output.value;
3896 : // }
3897 : // output.value
3898 : // }
3899 194 : void BytecodeGenerator::VisitYieldStar(YieldStar* expr) {
3900 194 : Register output = register_allocator()->NewRegister();
3901 194 : Register resume_mode = register_allocator()->NewRegister();
3902 : IteratorType iterator_type = IsAsyncGeneratorFunction(function_kind())
3903 : ? IteratorType::kAsync
3904 194 : : IteratorType::kNormal;
3905 :
3906 : {
3907 : RegisterAllocationScope register_scope(this);
3908 194 : RegisterList iterator_and_input = register_allocator()->NewRegisterList(2);
3909 194 : VisitForAccumulatorValue(expr->expression());
3910 : IteratorRecord iterator = BuildGetIteratorRecord(
3911 : register_allocator()->NewRegister() /* next method */,
3912 194 : iterator_and_input[0], iterator_type);
3913 :
3914 194 : Register input = iterator_and_input[1];
3915 194 : builder()->LoadUndefined().StoreAccumulatorInRegister(input);
3916 : builder()
3917 194 : ->LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
3918 194 : .StoreAccumulatorInRegister(resume_mode);
3919 :
3920 : {
3921 : // This loop builder does not construct counters as the loop is not
3922 : // visible to the user, and we therefore neither pass the block coverage
3923 : // builder nor the expression.
3924 : //
3925 : // In addition to the normal suspend for yield*, a yield* in an async
3926 : // generator has 2 additional suspends:
3927 : // - One for awaiting the iterator result of closing the generator when
3928 : // resumed with a "throw" completion, and a throw method is not
3929 : // present on the delegated iterator
3930 : // - One for awaiting the iterator result yielded by the delegated
3931 : // iterator
3932 :
3933 194 : LoopBuilder loop(builder(), nullptr, nullptr);
3934 194 : loop.LoopHeader();
3935 :
3936 : {
3937 : BytecodeLabels after_switch(zone());
3938 : BytecodeJumpTable* switch_jump_table =
3939 194 : builder()->AllocateJumpTable(2, 1);
3940 :
3941 : builder()
3942 194 : ->LoadAccumulatorWithRegister(resume_mode)
3943 194 : .SwitchOnSmiNoFeedback(switch_jump_table);
3944 :
3945 : // Fallthrough to default case.
3946 : // TODO(tebbi): Add debug code to check that {resume_mode} really is
3947 : // {JSGeneratorObject::kNext} in this case.
3948 : STATIC_ASSERT(JSGeneratorObject::kNext == 0);
3949 : {
3950 : FeedbackSlot slot = feedback_spec()->AddCallICSlot();
3951 : builder()->CallProperty(iterator.next(), iterator_and_input,
3952 194 : feedback_index(slot));
3953 194 : builder()->Jump(after_switch.New());
3954 : }
3955 :
3956 : STATIC_ASSERT(JSGeneratorObject::kReturn == 1);
3957 194 : builder()->Bind(switch_jump_table, JSGeneratorObject::kReturn);
3958 : {
3959 : const AstRawString* return_string =
3960 : ast_string_constants()->return_string();
3961 : BytecodeLabels no_return_method(zone());
3962 :
3963 194 : BuildCallIteratorMethod(iterator.object(), return_string,
3964 : iterator_and_input, after_switch.New(),
3965 194 : &no_return_method);
3966 194 : no_return_method.Bind(builder());
3967 194 : builder()->LoadAccumulatorWithRegister(input);
3968 194 : if (iterator_type == IteratorType::kAsync) {
3969 : execution_control()->AsyncReturnAccumulator();
3970 : } else {
3971 : execution_control()->ReturnAccumulator();
3972 : }
3973 : }
3974 :
3975 : STATIC_ASSERT(JSGeneratorObject::kThrow == 2);
3976 194 : builder()->Bind(switch_jump_table, JSGeneratorObject::kThrow);
3977 : {
3978 : const AstRawString* throw_string =
3979 : ast_string_constants()->throw_string();
3980 : BytecodeLabels no_throw_method(zone());
3981 194 : BuildCallIteratorMethod(iterator.object(), throw_string,
3982 : iterator_and_input, after_switch.New(),
3983 194 : &no_throw_method);
3984 :
3985 : // If there is no "throw" method, perform IteratorClose, and finally
3986 : // throw a TypeError.
3987 194 : no_throw_method.Bind(builder());
3988 194 : BuildIteratorClose(iterator, expr);
3989 194 : builder()->CallRuntime(Runtime::kThrowThrowMethodMissing);
3990 : }
3991 :
3992 194 : after_switch.Bind(builder());
3993 : }
3994 :
3995 194 : if (iterator_type == IteratorType::kAsync) {
3996 : // Await the result of the method invocation.
3997 10 : BuildAwait(expr->position());
3998 : }
3999 :
4000 : // Check that output is an object.
4001 : BytecodeLabel check_if_done;
4002 : builder()
4003 194 : ->StoreAccumulatorInRegister(output)
4004 194 : .JumpIfJSReceiver(&check_if_done)
4005 194 : .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, output);
4006 :
4007 194 : builder()->Bind(&check_if_done);
4008 : // Break once output.done is true.
4009 : builder()->LoadNamedProperty(
4010 : output, ast_string_constants()->done_string(),
4011 194 : feedback_index(feedback_spec()->AddLoadICSlot()));
4012 :
4013 : loop.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
4014 :
4015 : // Suspend the current generator.
4016 194 : if (iterator_type == IteratorType::kNormal) {
4017 184 : builder()->LoadAccumulatorWithRegister(output);
4018 : } else {
4019 : RegisterAllocationScope register_scope(this);
4020 : DCHECK_EQ(iterator_type, IteratorType::kAsync);
4021 : // If generatorKind is async, perform AsyncGeneratorYield(output.value),
4022 : // which will await `output.value` before resolving the current
4023 : // AsyncGeneratorRequest's promise.
4024 : builder()->LoadNamedProperty(
4025 : output, ast_string_constants()->value_string(),
4026 10 : feedback_index(feedback_spec()->AddLoadICSlot()));
4027 :
4028 10 : RegisterList args = register_allocator()->NewRegisterList(3);
4029 : builder()
4030 10 : ->MoveRegister(generator_object(), args[0]) // generator
4031 10 : .StoreAccumulatorInRegister(args[1]) // value
4032 20 : .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
4033 10 : .StoreAccumulatorInRegister(args[2]) // is_caught
4034 10 : .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
4035 : }
4036 :
4037 194 : BuildSuspendPoint(expr->position());
4038 194 : builder()->StoreAccumulatorInRegister(input);
4039 : builder()
4040 : ->CallRuntime(Runtime::kInlineGeneratorGetResumeMode,
4041 194 : generator_object())
4042 194 : .StoreAccumulatorInRegister(resume_mode);
4043 :
4044 194 : loop.BindContinueTarget();
4045 194 : loop.JumpToHeader(loop_depth_);
4046 : }
4047 : }
4048 :
4049 : // Decide if we trigger a return or if the yield* expression should just
4050 : // produce a value.
4051 : BytecodeLabel completion_is_output_value;
4052 194 : Register output_value = register_allocator()->NewRegister();
4053 : builder()
4054 : ->LoadNamedProperty(output, ast_string_constants()->value_string(),
4055 194 : feedback_index(feedback_spec()->AddLoadICSlot()))
4056 194 : .StoreAccumulatorInRegister(output_value)
4057 194 : .LoadLiteral(Smi::FromInt(JSGeneratorObject::kReturn))
4058 194 : .CompareReference(resume_mode)
4059 194 : .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &completion_is_output_value)
4060 194 : .LoadAccumulatorWithRegister(output_value);
4061 194 : if (iterator_type == IteratorType::kAsync) {
4062 : execution_control()->AsyncReturnAccumulator();
4063 : } else {
4064 : execution_control()->ReturnAccumulator();
4065 : }
4066 :
4067 194 : builder()->Bind(&completion_is_output_value);
4068 : BuildIncrementBlockCoverageCounterIfEnabled(expr,
4069 : SourceRangeKind::kContinuation);
4070 194 : builder()->LoadAccumulatorWithRegister(output_value);
4071 194 : }
4072 :
4073 15588 : void BytecodeGenerator::BuildAwait(int position) {
4074 : // Rather than HandlerTable::UNCAUGHT, async functions use
4075 : // HandlerTable::ASYNC_AWAIT to communicate that top-level exceptions are
4076 : // transformed into promise rejections. This is necessary to prevent emitting
4077 : // multiple debug events for the same uncaught exception. There is no point
4078 : // in the body of an async function where catch prediction is
4079 : // HandlerTable::UNCAUGHT.
4080 : DCHECK(catch_prediction() != HandlerTable::UNCAUGHT);
4081 :
4082 : {
4083 : // Await(operand) and suspend.
4084 : RegisterAllocationScope register_scope(this);
4085 :
4086 : Runtime::FunctionId await_intrinsic_id;
4087 15588 : if (IsAsyncGeneratorFunction(function_kind())) {
4088 : await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
4089 : ? Runtime::kInlineAsyncGeneratorAwaitUncaught
4090 1308 : : Runtime::kInlineAsyncGeneratorAwaitCaught;
4091 : } else {
4092 : await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
4093 : ? Runtime::kInlineAsyncFunctionAwaitUncaught
4094 14280 : : Runtime::kInlineAsyncFunctionAwaitCaught;
4095 : }
4096 15588 : RegisterList args = register_allocator()->NewRegisterList(2);
4097 : builder()
4098 15588 : ->MoveRegister(generator_object(), args[0])
4099 15588 : .StoreAccumulatorInRegister(args[1])
4100 15588 : .CallRuntime(await_intrinsic_id, args);
4101 : }
4102 :
4103 15588 : BuildSuspendPoint(position);
4104 :
4105 15588 : Register input = register_allocator()->NewRegister();
4106 15588 : Register resume_mode = register_allocator()->NewRegister();
4107 :
4108 : // Now dispatch on resume mode.
4109 : BytecodeLabel resume_next;
4110 : builder()
4111 15588 : ->StoreAccumulatorInRegister(input)
4112 15588 : .CallRuntime(Runtime::kInlineGeneratorGetResumeMode, generator_object())
4113 15588 : .StoreAccumulatorInRegister(resume_mode)
4114 15588 : .LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
4115 15588 : .CompareReference(resume_mode)
4116 15588 : .JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &resume_next);
4117 :
4118 : // Resume with "throw" completion (rethrow the received value).
4119 : // TODO(leszeks): Add a debug-only check that the accumulator is
4120 : // JSGeneratorObject::kThrow.
4121 15588 : builder()->LoadAccumulatorWithRegister(input).ReThrow();
4122 :
4123 : // Resume with next.
4124 15588 : builder()->Bind(&resume_next);
4125 15588 : builder()->LoadAccumulatorWithRegister(input);
4126 15588 : }
4127 :
4128 14886 : void BytecodeGenerator::VisitAwait(Await* expr) {
4129 : builder()->SetExpressionPosition(expr);
4130 14886 : VisitForAccumulatorValue(expr->expression());
4131 14886 : BuildAwait(expr->position());
4132 : BuildIncrementBlockCoverageCounterIfEnabled(expr,
4133 : SourceRangeKind::kContinuation);
4134 14886 : }
4135 :
4136 20436 : void BytecodeGenerator::VisitThrow(Throw* expr) {
4137 : AllocateBlockCoverageSlotIfEnabled(expr, SourceRangeKind::kContinuation);
4138 20436 : VisitForAccumulatorValue(expr->exception());
4139 : builder()->SetExpressionPosition(expr);
4140 20436 : builder()->Throw();
4141 20436 : }
4142 :
4143 2556898 : void BytecodeGenerator::VisitPropertyLoad(Register obj, Property* property) {
4144 2556898 : AssignType property_kind = Property::GetAssignType(property);
4145 2556924 : switch (property_kind) {
4146 : case NON_PROPERTY:
4147 0 : UNREACHABLE();
4148 : case NAMED_PROPERTY: {
4149 : builder()->SetExpressionPosition(property);
4150 : const AstRawString* name =
4151 2417887 : property->key()->AsLiteral()->AsRawPropertyName();
4152 2417887 : BuildLoadNamedProperty(property->obj(), obj, name);
4153 2417895 : break;
4154 : }
4155 : case KEYED_PROPERTY: {
4156 138262 : VisitForAccumulatorValue(property->key());
4157 : builder()->SetExpressionPosition(property);
4158 : builder()->LoadKeyedProperty(
4159 138265 : obj, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
4160 138265 : break;
4161 : }
4162 : case NAMED_SUPER_PROPERTY:
4163 417 : VisitNamedSuperPropertyLoad(property, Register::invalid_value());
4164 417 : break;
4165 : case KEYED_SUPER_PROPERTY:
4166 360 : VisitKeyedSuperPropertyLoad(property, Register::invalid_value());
4167 360 : break;
4168 : }
4169 2556935 : }
4170 :
4171 815209 : void BytecodeGenerator::VisitPropertyLoadForRegister(Register obj,
4172 : Property* expr,
4173 : Register destination) {
4174 : ValueResultScope result_scope(this);
4175 815209 : VisitPropertyLoad(obj, expr);
4176 815221 : builder()->StoreAccumulatorInRegister(destination);
4177 815223 : }
4178 :
4179 886 : void BytecodeGenerator::VisitNamedSuperPropertyLoad(Property* property,
4180 : Register opt_receiver_out) {
4181 : RegisterAllocationScope register_scope(this);
4182 : SuperPropertyReference* super_property =
4183 886 : property->obj()->AsSuperPropertyReference();
4184 886 : RegisterList args = register_allocator()->NewRegisterList(3);
4185 886 : BuildThisVariableLoad();
4186 886 : builder()->StoreAccumulatorInRegister(args[0]);
4187 : VisitForRegisterValue(super_property->home_object(), args[1]);
4188 :
4189 : builder()->SetExpressionPosition(property);
4190 : builder()
4191 1772 : ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
4192 886 : .StoreAccumulatorInRegister(args[2])
4193 886 : .CallRuntime(Runtime::kLoadFromSuper, args);
4194 :
4195 886 : if (opt_receiver_out.is_valid()) {
4196 469 : builder()->MoveRegister(args[0], opt_receiver_out);
4197 : }
4198 886 : }
4199 :
4200 400 : void BytecodeGenerator::VisitKeyedSuperPropertyLoad(Property* property,
4201 : Register opt_receiver_out) {
4202 : RegisterAllocationScope register_scope(this);
4203 : SuperPropertyReference* super_property =
4204 400 : property->obj()->AsSuperPropertyReference();
4205 400 : RegisterList args = register_allocator()->NewRegisterList(3);
4206 400 : BuildThisVariableLoad();
4207 400 : builder()->StoreAccumulatorInRegister(args[0]);
4208 : VisitForRegisterValue(super_property->home_object(), args[1]);
4209 : VisitForRegisterValue(property->key(), args[2]);
4210 :
4211 : builder()->SetExpressionPosition(property);
4212 400 : builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, args);
4213 :
4214 400 : if (opt_receiver_out.is_valid()) {
4215 40 : builder()->MoveRegister(args[0], opt_receiver_out);
4216 : }
4217 400 : }
4218 :
4219 1741720 : void BytecodeGenerator::VisitProperty(Property* expr) {
4220 1741720 : AssignType property_kind = Property::GetAssignType(expr);
4221 1741719 : if (property_kind != NAMED_SUPER_PROPERTY &&
4222 : property_kind != KEYED_SUPER_PROPERTY) {
4223 1740942 : Register obj = VisitForRegisterValue(expr->obj());
4224 1740934 : VisitPropertyLoad(obj, expr);
4225 : } else {
4226 777 : VisitPropertyLoad(Register::invalid_value(), expr);
4227 : }
4228 1741708 : }
4229 :
4230 0 : void BytecodeGenerator::VisitResolvedProperty(ResolvedProperty* expr) {
4231 : // Handled by VisitCall().
4232 0 : UNREACHABLE();
4233 : }
4234 :
4235 0 : void BytecodeGenerator::VisitArguments(const ZonePtrList<Expression>* args,
4236 : RegisterList* arg_regs) {
4237 : // Visit arguments.
4238 17621396 : for (int i = 0; i < static_cast<int>(args->length()); i++) {
4239 6126906 : VisitAndPushIntoRegisterList(args->at(i), arg_regs);
4240 : }
4241 0 : }
4242 :
4243 5158516 : void BytecodeGenerator::VisitCall(Call* expr) {
4244 : Expression* callee_expr = expr->expression();
4245 5158516 : Call::CallType call_type = expr->GetCallType();
4246 :
4247 5158512 : if (call_type == Call::SUPER_CALL) {
4248 4275 : return VisitCallSuper(expr);
4249 : }
4250 :
4251 : // Grow the args list as we visit receiver / arguments to avoid allocating all
4252 : // the registers up-front. Otherwise these registers are unavailable during
4253 : // receiver / argument visiting and we can end up with memory leaks due to
4254 : // registers keeping objects alive.
4255 5154237 : Register callee = register_allocator()->NewRegister();
4256 5154170 : RegisterList args = register_allocator()->NewGrowableRegisterList();
4257 :
4258 : bool implicit_undefined_receiver = false;
4259 : // When a call contains a spread, a Call AST node is only created if there is
4260 : // exactly one spread, and it is the last argument.
4261 : bool is_spread_call = expr->only_last_arg_is_spread();
4262 : bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
4263 :
4264 : // TODO(petermarshall): We have a lot of call bytecodes that are very similar,
4265 : // see if we can reduce the number by adding a separate argument which
4266 : // specifies the call type (e.g., property, spread, tailcall, etc.).
4267 :
4268 : // Prepare the callee and the receiver to the function call. This depends on
4269 : // the semantics of the underlying call type.
4270 5154170 : switch (call_type) {
4271 : case Call::NAMED_PROPERTY_CALL:
4272 : case Call::KEYED_PROPERTY_CALL: {
4273 815213 : Property* property = callee_expr->AsProperty();
4274 815213 : VisitAndPushIntoRegisterList(property->obj(), &args);
4275 815217 : VisitPropertyLoadForRegister(args.last_register(), property, callee);
4276 815221 : break;
4277 : }
4278 : case Call::RESOLVED_PROPERTY_CALL: {
4279 0 : ResolvedProperty* resolved = callee_expr->AsResolvedProperty();
4280 0 : VisitAndPushIntoRegisterList(resolved->object(), &args);
4281 0 : VisitForAccumulatorValue(resolved->property());
4282 0 : builder()->StoreAccumulatorInRegister(callee);
4283 0 : break;
4284 : }
4285 : case Call::GLOBAL_CALL: {
4286 : // Receiver is undefined for global calls.
4287 3809420 : if (!is_spread_call && !optimize_as_one_shot) {
4288 : implicit_undefined_receiver = true;
4289 : } else {
4290 : // TODO(leszeks): There's no special bytecode for tail calls or spread
4291 : // calls with an undefined receiver, so just push undefined ourselves.
4292 702517 : BuildPushUndefinedIntoRegisterList(&args);
4293 : }
4294 : // Load callee as a global variable.
4295 3809418 : VariableProxy* proxy = callee_expr->AsVariableProxy();
4296 : BuildVariableLoadForAccumulatorValue(proxy->var(),
4297 3809418 : proxy->hole_check_mode());
4298 3809428 : builder()->StoreAccumulatorInRegister(callee);
4299 3809437 : break;
4300 : }
4301 : case Call::WITH_CALL: {
4302 4536 : Register receiver = register_allocator()->GrowRegisterList(&args);
4303 : DCHECK(callee_expr->AsVariableProxy()->var()->IsLookupSlot());
4304 : {
4305 : RegisterAllocationScope inner_register_scope(this);
4306 4536 : Register name = register_allocator()->NewRegister();
4307 :
4308 : // Call %LoadLookupSlotForCall to get the callee and receiver.
4309 4536 : RegisterList result_pair = register_allocator()->NewRegisterList(2);
4310 4536 : Variable* variable = callee_expr->AsVariableProxy()->var();
4311 : builder()
4312 4536 : ->LoadLiteral(variable->raw_name())
4313 4536 : .StoreAccumulatorInRegister(name)
4314 : .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, name,
4315 4536 : result_pair)
4316 4536 : .MoveRegister(result_pair[0], callee)
4317 4536 : .MoveRegister(result_pair[1], receiver);
4318 : }
4319 : break;
4320 : }
4321 : case Call::OTHER_CALL: {
4322 : // Receiver is undefined for other calls.
4323 524575 : if (!is_spread_call && !optimize_as_one_shot) {
4324 : implicit_undefined_receiver = true;
4325 : } else {
4326 : // TODO(leszeks): There's no special bytecode for tail calls or spread
4327 : // calls with an undefined receiver, so just push undefined ourselves.
4328 224270 : BuildPushUndefinedIntoRegisterList(&args);
4329 : }
4330 : VisitForRegisterValue(callee_expr, callee);
4331 : break;
4332 : }
4333 : case Call::NAMED_SUPER_PROPERTY_CALL: {
4334 469 : Register receiver = register_allocator()->GrowRegisterList(&args);
4335 469 : Property* property = callee_expr->AsProperty();
4336 469 : VisitNamedSuperPropertyLoad(property, receiver);
4337 469 : builder()->StoreAccumulatorInRegister(callee);
4338 : break;
4339 : }
4340 : case Call::KEYED_SUPER_PROPERTY_CALL: {
4341 40 : Register receiver = register_allocator()->GrowRegisterList(&args);
4342 40 : Property* property = callee_expr->AsProperty();
4343 40 : VisitKeyedSuperPropertyLoad(property, receiver);
4344 40 : builder()->StoreAccumulatorInRegister(callee);
4345 : break;
4346 : }
4347 : case Call::SUPER_CALL:
4348 0 : UNREACHABLE();
4349 : break;
4350 : }
4351 :
4352 : // Evaluate all arguments to the function call and store in sequential args
4353 : // registers.
4354 : VisitArguments(expr->arguments(), &args);
4355 5154241 : int reciever_arg_count = implicit_undefined_receiver ? 0 : 1;
4356 5154241 : CHECK_EQ(reciever_arg_count + expr->arguments()->length(),
4357 : args.register_count());
4358 :
4359 : // Resolve callee for a potential direct eval call. This block will mutate the
4360 : // callee value.
4361 5154241 : if (expr->is_possibly_eval() && expr->arguments()->length() > 0) {
4362 : RegisterAllocationScope inner_register_scope(this);
4363 : // Set up arguments for ResolvePossiblyDirectEval by copying callee, source
4364 : // strings and function closure, and loading language and
4365 : // position.
4366 106665 : Register first_arg = args[reciever_arg_count];
4367 106665 : RegisterList runtime_call_args = register_allocator()->NewRegisterList(6);
4368 : builder()
4369 106666 : ->MoveRegister(callee, runtime_call_args[0])
4370 106666 : .MoveRegister(first_arg, runtime_call_args[1])
4371 213332 : .MoveRegister(Register::function_closure(), runtime_call_args[2])
4372 106666 : .LoadLiteral(Smi::FromEnum(language_mode()))
4373 106665 : .StoreAccumulatorInRegister(runtime_call_args[3])
4374 106666 : .LoadLiteral(Smi::FromInt(current_scope()->start_position()))
4375 106666 : .StoreAccumulatorInRegister(runtime_call_args[4])
4376 106666 : .LoadLiteral(Smi::FromInt(expr->position()))
4377 106666 : .StoreAccumulatorInRegister(runtime_call_args[5]);
4378 :
4379 : // Call ResolvePossiblyDirectEval and modify the callee.
4380 : builder()
4381 106666 : ->CallRuntime(Runtime::kResolvePossiblyDirectEval, runtime_call_args)
4382 106666 : .StoreAccumulatorInRegister(callee);
4383 : }
4384 :
4385 : builder()->SetExpressionPosition(expr);
4386 :
4387 5154242 : if (is_spread_call) {
4388 : DCHECK(!implicit_undefined_receiver);
4389 : builder()->CallWithSpread(callee, args,
4390 1709 : feedback_index(feedback_spec()->AddCallICSlot()));
4391 5152533 : } else if (optimize_as_one_shot) {
4392 : DCHECK(!implicit_undefined_receiver);
4393 1235346 : builder()->CallNoFeedback(callee, args);
4394 7834374 : } else if (call_type == Call::NAMED_PROPERTY_CALL ||
4395 3917187 : call_type == Call::KEYED_PROPERTY_CALL ||
4396 3917187 : call_type == Call::RESOLVED_PROPERTY_CALL) {
4397 : DCHECK(!implicit_undefined_receiver);
4398 : builder()->CallProperty(callee, args,
4399 506716 : feedback_index(feedback_spec()->AddCallICSlot()));
4400 3410474 : } else if (implicit_undefined_receiver) {
4401 : builder()->CallUndefinedReceiver(
4402 3407207 : callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
4403 : } else {
4404 : builder()->CallAnyReceiver(
4405 3271 : callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
4406 : }
4407 : }
4408 :
4409 4275 : void BytecodeGenerator::VisitCallSuper(Call* expr) {
4410 : RegisterAllocationScope register_scope(this);
4411 4275 : SuperCallReference* super = expr->expression()->AsSuperCallReference();
4412 : const ZonePtrList<Expression>* args = expr->arguments();
4413 :
4414 : int first_spread_index = 0;
4415 5517 : for (; first_spread_index < args->length(); first_spread_index++) {
4416 6860 : if (args->at(first_spread_index)->IsSpread()) break;
4417 : }
4418 :
4419 : // Prepare the constructor to the super call.
4420 4275 : Register this_function = VisitForRegisterValue(super->this_function_var());
4421 4275 : Register constructor = register_allocator()->NewRegister();
4422 : builder()
4423 4275 : ->LoadAccumulatorWithRegister(this_function)
4424 4275 : .GetSuperConstructor(constructor);
4425 :
4426 4275 : if (first_spread_index < expr->arguments()->length() - 1) {
4427 : // We rewrite something like
4428 : // super(1, ...x, 2)
4429 : // to
4430 : // %reflect_construct(constructor, [1, ...x, 2], new_target)
4431 : // That is, we implement (non-last-arg) spreads in super calls via our
4432 : // mechanism for spreads in array literals.
4433 :
4434 : // First generate the array containing all arguments.
4435 30 : BuildCreateArrayLiteral(args, nullptr);
4436 :
4437 : // Now pass that array to %reflect_construct.
4438 30 : RegisterList construct_args = register_allocator()->NewRegisterList(3);
4439 30 : builder()->StoreAccumulatorInRegister(construct_args[1]);
4440 30 : builder()->MoveRegister(constructor, construct_args[0]);
4441 : VisitForRegisterValue(super->new_target_var(), construct_args[2]);
4442 30 : builder()->CallJSRuntime(Context::REFLECT_CONSTRUCT_INDEX, construct_args);
4443 : } else {
4444 4245 : RegisterList args_regs = register_allocator()->NewGrowableRegisterList();
4445 : VisitArguments(args, &args_regs);
4446 : // The new target is loaded into the accumulator from the
4447 : // {new.target} variable.
4448 4245 : VisitForAccumulatorValue(super->new_target_var());
4449 : builder()->SetExpressionPosition(expr);
4450 :
4451 : int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
4452 :
4453 4245 : if (first_spread_index == expr->arguments()->length() - 1) {
4454 : builder()->ConstructWithSpread(constructor, args_regs,
4455 2779 : feedback_slot_index);
4456 : } else {
4457 : DCHECK_EQ(first_spread_index, expr->arguments()->length());
4458 : // Call construct.
4459 : // TODO(turbofan): For now we do gather feedback on super constructor
4460 : // calls, utilizing the existing machinery to inline the actual call
4461 : // target and the JSCreate for the implicit receiver allocation. This
4462 : // is not an ideal solution for super constructor calls, but it gets
4463 : // the job done for now. In the long run we might want to revisit this
4464 : // and come up with a better way.
4465 1466 : builder()->Construct(constructor, args_regs, feedback_slot_index);
4466 : }
4467 : }
4468 :
4469 : // Explicit calls to the super constructor using super() perform an
4470 : // implicit binding assignment to the 'this' variable.
4471 : //
4472 : // Default constructors don't need have to do the assignment because
4473 : // 'this' isn't accessed in default constructors.
4474 8550 : if (!IsDefaultConstructor(info()->literal()->kind())) {
4475 2671 : Variable* var = closure_scope()->GetReceiverScope()->receiver();
4476 2671 : BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kRequired);
4477 : }
4478 :
4479 : // The derived constructor has the correct bit set always, so we
4480 : // don't emit code to load and call the initializer if not
4481 : // required.
4482 : //
4483 : // For the arrow function or eval case, we always emit code to load
4484 : // and call the initializer.
4485 : //
4486 : // TODO(gsathya): In the future, we could tag nested arrow functions
4487 : // or eval with the correct bit so that we do the load conditionally
4488 : // if required.
4489 8325 : if (info()->literal()->requires_instance_members_initializer() ||
4490 4050 : !IsDerivedConstructor(info()->literal()->kind())) {
4491 356 : Register instance = register_allocator()->NewRegister();
4492 356 : builder()->StoreAccumulatorInRegister(instance);
4493 356 : BuildInstanceMemberInitialization(this_function, instance);
4494 356 : builder()->LoadAccumulatorWithRegister(instance);
4495 : }
4496 4275 : }
4497 :
4498 144054 : void BytecodeGenerator::VisitCallNew(CallNew* expr) {
4499 144054 : Register constructor = VisitForRegisterValue(expr->expression());
4500 144050 : RegisterList args = register_allocator()->NewGrowableRegisterList();
4501 : VisitArguments(expr->arguments(), &args);
4502 :
4503 : // The accumulator holds new target which is the same as the
4504 : // constructor for CallNew.
4505 : builder()->SetExpressionPosition(expr);
4506 144058 : builder()->LoadAccumulatorWithRegister(constructor);
4507 :
4508 : int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
4509 144055 : if (expr->only_last_arg_is_spread()) {
4510 176 : builder()->ConstructWithSpread(constructor, args, feedback_slot_index);
4511 : } else {
4512 143879 : builder()->Construct(constructor, args, feedback_slot_index);
4513 : }
4514 144057 : }
4515 :
4516 65016 : void BytecodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4517 65016 : if (expr->is_jsruntime()) {
4518 362 : RegisterList args = register_allocator()->NewGrowableRegisterList();
4519 : VisitArguments(expr->arguments(), &args);
4520 362 : builder()->CallJSRuntime(expr->context_index(), args);
4521 : } else {
4522 : // Evaluate all arguments to the runtime call.
4523 64654 : RegisterList args = register_allocator()->NewGrowableRegisterList();
4524 : VisitArguments(expr->arguments(), &args);
4525 64655 : Runtime::FunctionId function_id = expr->function()->function_id;
4526 64655 : builder()->CallRuntime(function_id, args);
4527 : }
4528 65019 : }
4529 :
4530 0 : void BytecodeGenerator::VisitVoid(UnaryOperation* expr) {
4531 3335 : VisitForEffect(expr->expression());
4532 3335 : builder()->LoadUndefined();
4533 0 : }
4534 :
4535 162948 : void BytecodeGenerator::VisitForTypeOfValue(Expression* expr) {
4536 162948 : if (expr->IsVariableProxy()) {
4537 : // Typeof does not throw a reference error on global variables, hence we
4538 : // perform a non-contextual load in case the operand is a variable proxy.
4539 157182 : VariableProxy* proxy = expr->AsVariableProxy();
4540 : BuildVariableLoadForAccumulatorValue(proxy->var(), proxy->hole_check_mode(),
4541 157182 : INSIDE_TYPEOF);
4542 : } else {
4543 5766 : VisitForAccumulatorValue(expr);
4544 : }
4545 162948 : }
4546 :
4547 0 : void BytecodeGenerator::VisitTypeOf(UnaryOperation* expr) {
4548 63513 : VisitForTypeOfValue(expr->expression());
4549 63513 : builder()->TypeOf();
4550 0 : }
4551 :
4552 274783 : void BytecodeGenerator::VisitNot(UnaryOperation* expr) {
4553 274783 : if (execution_result()->IsEffect()) {
4554 154 : VisitForEffect(expr->expression());
4555 274629 : } else if (execution_result()->IsTest()) {
4556 : // No actual logical negation happening, we just swap the control flow, by
4557 : // swapping the target labels and the fallthrough branch, and visit in the
4558 : // same test result context.
4559 : TestResultScope* test_result = execution_result()->AsTest();
4560 : test_result->InvertControlFlow();
4561 245042 : VisitInSameTestExecutionScope(expr->expression());
4562 : } else {
4563 29587 : TypeHint type_hint = VisitForAccumulatorValue(expr->expression());
4564 29587 : builder()->LogicalNot(ToBooleanModeFromTypeHint(type_hint));
4565 : // Always returns a boolean value.
4566 : execution_result()->SetResultIsBoolean();
4567 : }
4568 274783 : }
4569 :
4570 412193 : void BytecodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4571 412193 : switch (expr->op()) {
4572 : case Token::Value::NOT:
4573 274783 : VisitNot(expr);
4574 274782 : break;
4575 : case Token::Value::TYPEOF:
4576 : VisitTypeOf(expr);
4577 : break;
4578 : case Token::Value::VOID:
4579 : VisitVoid(expr);
4580 : break;
4581 : case Token::Value::DELETE:
4582 5961 : VisitDelete(expr);
4583 5961 : break;
4584 : case Token::Value::ADD:
4585 : case Token::Value::SUB:
4586 : case Token::Value::BIT_NOT:
4587 64601 : VisitForAccumulatorValue(expr->expression());
4588 : builder()->SetExpressionPosition(expr);
4589 : builder()->UnaryOperation(
4590 64600 : expr->op(), feedback_index(feedback_spec()->AddBinaryOpICSlot()));
4591 64600 : break;
4592 : default:
4593 0 : UNREACHABLE();
4594 : }
4595 412191 : }
4596 :
4597 5961 : void BytecodeGenerator::VisitDelete(UnaryOperation* unary) {
4598 : Expression* expr = unary->expression();
4599 5961 : if (expr->IsProperty()) {
4600 : // Delete of an object property is allowed both in sloppy
4601 : // and strict modes.
4602 4402 : Property* property = expr->AsProperty();
4603 4402 : Register object = VisitForRegisterValue(property->obj());
4604 4404 : VisitForAccumulatorValue(property->key());
4605 4403 : builder()->Delete(object, language_mode());
4606 2369 : } else if (expr->IsVariableProxy() &&
4607 810 : !expr->AsVariableProxy()->is_new_target()) {
4608 : // Delete of an unqualified identifier is allowed in sloppy mode but is
4609 : // not allowed in strict mode.
4610 : DCHECK(is_sloppy(language_mode()));
4611 798 : Variable* variable = expr->AsVariableProxy()->var();
4612 798 : switch (variable->location()) {
4613 : case VariableLocation::PARAMETER:
4614 : case VariableLocation::LOCAL:
4615 : case VariableLocation::CONTEXT: {
4616 : // Deleting local var/let/const, context variables, and arguments
4617 : // does not have any effect.
4618 166 : builder()->LoadFalse();
4619 166 : break;
4620 : }
4621 : case VariableLocation::UNALLOCATED:
4622 : // TODO(adamk): Falling through to the runtime results in correct
4623 : // behavior, but does unnecessary context-walking (since scope
4624 : // analysis has already proven that the variable doesn't exist in
4625 : // any non-global scope). Consider adding a DeleteGlobal bytecode
4626 : // that knows how to deal with ScriptContexts as well as global
4627 : // object properties.
4628 : case VariableLocation::LOOKUP: {
4629 632 : Register name_reg = register_allocator()->NewRegister();
4630 : builder()
4631 632 : ->LoadLiteral(variable->raw_name())
4632 631 : .StoreAccumulatorInRegister(name_reg)
4633 631 : .CallRuntime(Runtime::kDeleteLookupSlot, name_reg);
4634 : break;
4635 : }
4636 : default:
4637 0 : UNREACHABLE();
4638 : }
4639 : } else {
4640 : // Delete of an unresolvable reference, new.target, and this returns true.
4641 761 : VisitForEffect(expr);
4642 761 : builder()->LoadTrue();
4643 : }
4644 5961 : }
4645 :
4646 244656 : void BytecodeGenerator::VisitCountOperation(CountOperation* expr) {
4647 : DCHECK(expr->expression()->IsValidReferenceExpression());
4648 :
4649 : // Left-hand side can only be a property, a global or a variable slot.
4650 244656 : Property* property = expr->expression()->AsProperty();
4651 244656 : AssignType assign_type = Property::GetAssignType(property);
4652 :
4653 244658 : bool is_postfix = expr->is_postfix() && !execution_result()->IsEffect();
4654 :
4655 : // Evaluate LHS expression and get old value.
4656 : Register object, key, old_value;
4657 : RegisterList super_property_args;
4658 : const AstRawString* name;
4659 244658 : switch (assign_type) {
4660 : case NON_PROPERTY: {
4661 233212 : VariableProxy* proxy = expr->expression()->AsVariableProxy();
4662 : BuildVariableLoadForAccumulatorValue(proxy->var(),
4663 233212 : proxy->hole_check_mode());
4664 233217 : break;
4665 : }
4666 : case NAMED_PROPERTY: {
4667 8554 : object = VisitForRegisterValue(property->obj());
4668 8554 : name = property->key()->AsLiteral()->AsRawPropertyName();
4669 : builder()->LoadNamedProperty(
4670 : object, name,
4671 17108 : feedback_index(GetCachedLoadICSlot(property->obj(), name)));
4672 8554 : break;
4673 : }
4674 : case KEYED_PROPERTY: {
4675 2756 : object = VisitForRegisterValue(property->obj());
4676 : // Use visit for accumulator here since we need the key in the accumulator
4677 : // for the LoadKeyedProperty.
4678 2756 : key = register_allocator()->NewRegister();
4679 2756 : VisitForAccumulatorValue(property->key());
4680 2756 : builder()->StoreAccumulatorInRegister(key).LoadKeyedProperty(
4681 2756 : object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
4682 2756 : break;
4683 : }
4684 : case NAMED_SUPER_PROPERTY: {
4685 45 : super_property_args = register_allocator()->NewRegisterList(4);
4686 45 : RegisterList load_super_args = super_property_args.Truncate(3);
4687 : SuperPropertyReference* super_property =
4688 45 : property->obj()->AsSuperPropertyReference();
4689 45 : BuildThisVariableLoad();
4690 45 : builder()->StoreAccumulatorInRegister(load_super_args[0]);
4691 : VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
4692 : builder()
4693 90 : ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
4694 45 : .StoreAccumulatorInRegister(load_super_args[2])
4695 45 : .CallRuntime(Runtime::kLoadFromSuper, load_super_args);
4696 : break;
4697 : }
4698 : case KEYED_SUPER_PROPERTY: {
4699 90 : super_property_args = register_allocator()->NewRegisterList(4);
4700 90 : RegisterList load_super_args = super_property_args.Truncate(3);
4701 : SuperPropertyReference* super_property =
4702 90 : property->obj()->AsSuperPropertyReference();
4703 90 : BuildThisVariableLoad();
4704 90 : builder()->StoreAccumulatorInRegister(load_super_args[0]);
4705 : VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
4706 : VisitForRegisterValue(property->key(), load_super_args[2]);
4707 90 : builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, load_super_args);
4708 : break;
4709 : }
4710 : }
4711 :
4712 : // Save result for postfix expressions.
4713 : FeedbackSlot count_slot = feedback_spec()->AddBinaryOpICSlot();
4714 244656 : if (is_postfix) {
4715 30488 : old_value = register_allocator()->NewRegister();
4716 : // Convert old value into a number before saving it.
4717 : // TODO(ignition): Think about adding proper PostInc/PostDec bytecodes
4718 : // instead of this ToNumeric + Inc/Dec dance.
4719 : builder()
4720 30488 : ->ToNumeric(feedback_index(count_slot))
4721 30488 : .StoreAccumulatorInRegister(old_value);
4722 : }
4723 :
4724 : // Perform +1/-1 operation.
4725 489312 : builder()->UnaryOperation(expr->op(), feedback_index(count_slot));
4726 :
4727 : // Store the value.
4728 : builder()->SetExpressionPosition(expr);
4729 244660 : switch (assign_type) {
4730 : case NON_PROPERTY: {
4731 233213 : VariableProxy* proxy = expr->expression()->AsVariableProxy();
4732 233213 : BuildVariableAssignment(proxy->var(), expr->op(),
4733 233213 : proxy->hole_check_mode());
4734 233217 : break;
4735 : }
4736 : case NAMED_PROPERTY: {
4737 8554 : FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
4738 : Register value;
4739 8554 : if (!execution_result()->IsEffect()) {
4740 8153 : value = register_allocator()->NewRegister();
4741 8153 : builder()->StoreAccumulatorInRegister(value);
4742 : }
4743 : builder()->StoreNamedProperty(object, name, feedback_index(slot),
4744 8554 : language_mode());
4745 8554 : if (!execution_result()->IsEffect()) {
4746 8153 : builder()->LoadAccumulatorWithRegister(value);
4747 : }
4748 : break;
4749 : }
4750 : case KEYED_PROPERTY: {
4751 : FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
4752 : Register value;
4753 2756 : if (!execution_result()->IsEffect()) {
4754 485 : value = register_allocator()->NewRegister();
4755 485 : builder()->StoreAccumulatorInRegister(value);
4756 : }
4757 : builder()->StoreKeyedProperty(object, key, feedback_index(slot),
4758 2756 : language_mode());
4759 2756 : if (!execution_result()->IsEffect()) {
4760 485 : builder()->LoadAccumulatorWithRegister(value);
4761 : }
4762 : break;
4763 : }
4764 : case NAMED_SUPER_PROPERTY: {
4765 : builder()
4766 45 : ->StoreAccumulatorInRegister(super_property_args[3])
4767 45 : .CallRuntime(Runtime::kStoreToSuper, super_property_args);
4768 45 : break;
4769 : }
4770 : case KEYED_SUPER_PROPERTY: {
4771 : builder()
4772 90 : ->StoreAccumulatorInRegister(super_property_args[3])
4773 90 : .CallRuntime(Runtime::kStoreKeyedToSuper, super_property_args);
4774 90 : break;
4775 : }
4776 : }
4777 :
4778 : // Restore old value for postfix expressions.
4779 244664 : if (is_postfix) {
4780 30488 : builder()->LoadAccumulatorWithRegister(old_value);
4781 : }
4782 244664 : }
4783 :
4784 531896 : void BytecodeGenerator::VisitBinaryOperation(BinaryOperation* binop) {
4785 531896 : switch (binop->op()) {
4786 : case Token::COMMA:
4787 : VisitCommaExpression(binop);
4788 : break;
4789 : case Token::OR:
4790 41162 : VisitLogicalOrExpression(binop);
4791 41162 : break;
4792 : case Token::AND:
4793 84276 : VisitLogicalAndExpression(binop);
4794 84276 : break;
4795 : default:
4796 368808 : VisitArithmeticExpression(binop);
4797 368805 : break;
4798 : }
4799 531893 : }
4800 :
4801 114745 : void BytecodeGenerator::VisitNaryOperation(NaryOperation* expr) {
4802 114745 : switch (expr->op()) {
4803 : case Token::COMMA:
4804 325 : VisitNaryCommaExpression(expr);
4805 325 : break;
4806 : case Token::OR:
4807 20419 : VisitNaryLogicalOrExpression(expr);
4808 20419 : break;
4809 : case Token::AND:
4810 679 : VisitNaryLogicalAndExpression(expr);
4811 679 : break;
4812 : default:
4813 93322 : VisitNaryArithmeticExpression(expr);
4814 93324 : break;
4815 : }
4816 114747 : }
4817 :
4818 66201 : void BytecodeGenerator::BuildLiteralCompareNil(
4819 : Token::Value op, BytecodeArrayBuilder::NilValue nil) {
4820 66201 : if (execution_result()->IsTest()) {
4821 : TestResultScope* test_result = execution_result()->AsTest();
4822 54072 : switch (test_result->fallthrough()) {
4823 : case TestFallthrough::kThen:
4824 28612 : builder()->JumpIfNotNil(test_result->NewElseLabel(), op, nil);
4825 14306 : break;
4826 : case TestFallthrough::kElse:
4827 79532 : builder()->JumpIfNil(test_result->NewThenLabel(), op, nil);
4828 39766 : break;
4829 : case TestFallthrough::kNone:
4830 : builder()
4831 0 : ->JumpIfNil(test_result->NewThenLabel(), op, nil)
4832 0 : .Jump(test_result->NewElseLabel());
4833 : }
4834 : test_result->SetResultConsumedByTest();
4835 : } else {
4836 24258 : builder()->CompareNil(op, nil);
4837 : }
4838 66202 : }
4839 :
4840 889062 : void BytecodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4841 : Expression* sub_expr;
4842 : Literal* literal;
4843 889062 : if (expr->IsLiteralCompareTypeof(&sub_expr, &literal)) {
4844 : // Emit a fast literal comparion for expressions of the form:
4845 : // typeof(x) === 'string'.
4846 99435 : VisitForTypeOfValue(sub_expr);
4847 : builder()->SetExpressionPosition(expr);
4848 : TestTypeOfFlags::LiteralFlag literal_flag =
4849 99435 : TestTypeOfFlags::GetFlagForLiteral(ast_string_constants(), literal);
4850 99435 : if (literal_flag == TestTypeOfFlags::LiteralFlag::kOther) {
4851 243 : builder()->LoadFalse();
4852 : } else {
4853 99192 : builder()->CompareTypeOf(literal_flag);
4854 : }
4855 789628 : } else if (expr->IsLiteralCompareUndefined(&sub_expr)) {
4856 61016 : VisitForAccumulatorValue(sub_expr);
4857 : builder()->SetExpressionPosition(expr);
4858 61017 : BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kUndefinedValue);
4859 728612 : } else if (expr->IsLiteralCompareNull(&sub_expr)) {
4860 5185 : VisitForAccumulatorValue(sub_expr);
4861 : builder()->SetExpressionPosition(expr);
4862 5185 : BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kNullValue);
4863 : } else {
4864 723426 : Register lhs = VisitForRegisterValue(expr->left());
4865 723429 : VisitForAccumulatorValue(expr->right());
4866 : builder()->SetExpressionPosition(expr);
4867 : FeedbackSlot slot;
4868 723429 : if (expr->op() == Token::IN) {
4869 : slot = feedback_spec()->AddKeyedHasICSlot();
4870 719670 : } else if (expr->op() == Token::INSTANCEOF) {
4871 : slot = feedback_spec()->AddInstanceOfSlot();
4872 : } else {
4873 : slot = feedback_spec()->AddCompareICSlot();
4874 : }
4875 723431 : builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
4876 : }
4877 : // Always returns a boolean value.
4878 : execution_result()->SetResultIsBoolean();
4879 889071 : }
4880 :
4881 368808 : void BytecodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
4882 : FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
4883 : Expression* subexpr;
4884 368807 : Smi literal;
4885 368807 : if (expr->IsSmiLiteralOperation(&subexpr, &literal)) {
4886 120823 : TypeHint type_hint = VisitForAccumulatorValue(subexpr);
4887 : builder()->SetExpressionPosition(expr);
4888 : builder()->BinaryOperationSmiLiteral(expr->op(), literal,
4889 120822 : feedback_index(slot));
4890 120822 : if (expr->op() == Token::ADD && type_hint == TypeHint::kString) {
4891 : execution_result()->SetResultIsString();
4892 : }
4893 : } else {
4894 247984 : TypeHint lhs_type = VisitForAccumulatorValue(expr->left());
4895 247985 : Register lhs = register_allocator()->NewRegister();
4896 247985 : builder()->StoreAccumulatorInRegister(lhs);
4897 247985 : TypeHint rhs_type = VisitForAccumulatorValue(expr->right());
4898 359315 : if (expr->op() == Token::ADD &&
4899 111330 : (lhs_type == TypeHint::kString || rhs_type == TypeHint::kString)) {
4900 : execution_result()->SetResultIsString();
4901 : }
4902 :
4903 : builder()->SetExpressionPosition(expr);
4904 247985 : builder()->BinaryOperation(expr->op(), lhs, feedback_index(slot));
4905 : }
4906 368805 : }
4907 :
4908 93321 : void BytecodeGenerator::VisitNaryArithmeticExpression(NaryOperation* expr) {
4909 : // TODO(leszeks): Add support for lhs smi in commutative ops.
4910 93321 : TypeHint type_hint = VisitForAccumulatorValue(expr->first());
4911 :
4912 765804 : for (size_t i = 0; i < expr->subsequent_length(); ++i) {
4913 : RegisterAllocationScope register_scope(this);
4914 336240 : if (expr->subsequent(i)->IsSmiLiteral()) {
4915 : builder()->SetExpressionPosition(expr->subsequent_op_position(i));
4916 : builder()->BinaryOperationSmiLiteral(
4917 : expr->op(), expr->subsequent(i)->AsLiteral()->AsSmiLiteral(),
4918 88792 : feedback_index(feedback_spec()->AddBinaryOpICSlot()));
4919 : } else {
4920 291844 : Register lhs = register_allocator()->NewRegister();
4921 291845 : builder()->StoreAccumulatorInRegister(lhs);
4922 291845 : TypeHint rhs_hint = VisitForAccumulatorValue(expr->subsequent(i));
4923 291844 : if (rhs_hint == TypeHint::kString) type_hint = TypeHint::kString;
4924 : builder()->SetExpressionPosition(expr->subsequent_op_position(i));
4925 : builder()->BinaryOperation(
4926 : expr->op(), lhs,
4927 291844 : feedback_index(feedback_spec()->AddBinaryOpICSlot()));
4928 : }
4929 : }
4930 :
4931 135067 : if (type_hint == TypeHint::kString && expr->op() == Token::ADD) {
4932 : // If any operand of an ADD is a String, a String is produced.
4933 : execution_result()->SetResultIsString();
4934 : }
4935 93324 : }
4936 :
4937 : // Note: the actual spreading is performed by the surrounding expression's
4938 : // visitor.
4939 6713 : void BytecodeGenerator::VisitSpread(Spread* expr) { Visit(expr->expression()); }
4940 :
4941 0 : void BytecodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
4942 0 : UNREACHABLE();
4943 : }
4944 :
4945 283 : void BytecodeGenerator::VisitImportCallExpression(ImportCallExpression* expr) {
4946 283 : RegisterList args = register_allocator()->NewRegisterList(2);
4947 : VisitForRegisterValue(expr->argument(), args[1]);
4948 : builder()
4949 283 : ->MoveRegister(Register::function_closure(), args[0])
4950 283 : .CallRuntime(Runtime::kDynamicImportCall, args);
4951 283 : }
4952 :
4953 39663 : void BytecodeGenerator::BuildGetIterator(IteratorType hint) {
4954 39663 : RegisterList args = register_allocator()->NewRegisterList(1);
4955 39663 : Register method = register_allocator()->NewRegister();
4956 39663 : Register obj = args[0];
4957 :
4958 39663 : if (hint == IteratorType::kAsync) {
4959 : // Set method to GetMethod(obj, @@asyncIterator)
4960 351 : builder()->StoreAccumulatorInRegister(obj).LoadAsyncIteratorProperty(
4961 351 : obj, feedback_index(feedback_spec()->AddLoadICSlot()));
4962 :
4963 : BytecodeLabel async_iterator_undefined, async_iterator_null, done;
4964 : // TODO(ignition): Add a single opcode for JumpIfNullOrUndefined
4965 351 : builder()->JumpIfUndefined(&async_iterator_undefined);
4966 351 : builder()->JumpIfNull(&async_iterator_null);
4967 :
4968 : // Let iterator be Call(method, obj)
4969 351 : builder()->StoreAccumulatorInRegister(method).CallProperty(
4970 351 : method, args, feedback_index(feedback_spec()->AddCallICSlot()));
4971 :
4972 : // If Type(iterator) is not Object, throw a TypeError exception.
4973 351 : builder()->JumpIfJSReceiver(&done);
4974 351 : builder()->CallRuntime(Runtime::kThrowSymbolAsyncIteratorInvalid);
4975 :
4976 351 : builder()->Bind(&async_iterator_undefined);
4977 351 : builder()->Bind(&async_iterator_null);
4978 : // If method is undefined,
4979 : // Let syncMethod be GetMethod(obj, @@iterator)
4980 : builder()
4981 : ->LoadIteratorProperty(obj,
4982 351 : feedback_index(feedback_spec()->AddLoadICSlot()))
4983 351 : .StoreAccumulatorInRegister(method);
4984 :
4985 : // Let syncIterator be Call(syncMethod, obj)
4986 : builder()->CallProperty(method, args,
4987 351 : feedback_index(feedback_spec()->AddCallICSlot()));
4988 :
4989 : // Return CreateAsyncFromSyncIterator(syncIterator)
4990 : // alias `method` register as it's no longer used
4991 351 : Register sync_iter = method;
4992 351 : builder()->StoreAccumulatorInRegister(sync_iter).CallRuntime(
4993 351 : Runtime::kInlineCreateAsyncFromSyncIterator, sync_iter);
4994 :
4995 351 : builder()->Bind(&done);
4996 : } else {
4997 : // Let method be GetMethod(obj, @@iterator).
4998 : builder()
4999 39312 : ->StoreAccumulatorInRegister(obj)
5000 : .LoadIteratorProperty(obj,
5001 39312 : feedback_index(feedback_spec()->AddLoadICSlot()))
5002 39312 : .StoreAccumulatorInRegister(method);
5003 :
5004 : // Let iterator be Call(method, obj).
5005 : builder()->CallProperty(method, args,
5006 39312 : feedback_index(feedback_spec()->AddCallICSlot()));
5007 :
5008 : // If Type(iterator) is not Object, throw a TypeError exception.
5009 : BytecodeLabel no_type_error;
5010 39312 : builder()->JumpIfJSReceiver(&no_type_error);
5011 39312 : builder()->CallRuntime(Runtime::kThrowSymbolIteratorInvalid);
5012 39312 : builder()->Bind(&no_type_error);
5013 : }
5014 39663 : }
5015 :
5016 : // Returns an IteratorRecord which is valid for the lifetime of the current
5017 : // register_allocation_scope.
5018 39663 : BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
5019 : Register next, Register object, IteratorType hint) {
5020 : DCHECK(next.is_valid() && object.is_valid());
5021 39663 : BuildGetIterator(hint);
5022 :
5023 : builder()
5024 39663 : ->StoreAccumulatorInRegister(object)
5025 : .LoadNamedProperty(object, ast_string_constants()->next_string(),
5026 39663 : feedback_index(feedback_spec()->AddLoadICSlot()))
5027 39663 : .StoreAccumulatorInRegister(next);
5028 39663 : return IteratorRecord(object, next, hint);
5029 : }
5030 :
5031 39469 : BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
5032 : IteratorType hint) {
5033 39469 : Register next = register_allocator()->NewRegister();
5034 39469 : Register object = register_allocator()->NewRegister();
5035 39469 : return BuildGetIteratorRecord(next, object, hint);
5036 : }
5037 :
5038 40859 : void BytecodeGenerator::BuildIteratorNext(const IteratorRecord& iterator,
5039 : Register next_result) {
5040 : DCHECK(next_result.is_valid());
5041 : builder()->CallProperty(iterator.next(), RegisterList(iterator.object()),
5042 40859 : feedback_index(feedback_spec()->AddCallICSlot()));
5043 :
5044 40859 : if (iterator.type() == IteratorType::kAsync) {
5045 341 : BuildAwait();
5046 : }
5047 :
5048 : BytecodeLabel is_object;
5049 : builder()
5050 40859 : ->StoreAccumulatorInRegister(next_result)
5051 40859 : .JumpIfJSReceiver(&is_object)
5052 40859 : .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, next_result)
5053 40859 : .Bind(&is_object);
5054 40858 : }
5055 :
5056 582 : void BytecodeGenerator::BuildCallIteratorMethod(Register iterator,
5057 : const AstRawString* method_name,
5058 : RegisterList receiver_and_args,
5059 : BytecodeLabel* if_called,
5060 : BytecodeLabels* if_notcalled) {
5061 : RegisterAllocationScope register_scope(this);
5062 :
5063 582 : Register method = register_allocator()->NewRegister();
5064 : FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
5065 : builder()
5066 582 : ->LoadNamedProperty(iterator, method_name, feedback_index(slot))
5067 1164 : .JumpIfUndefined(if_notcalled->New())
5068 1164 : .JumpIfNull(if_notcalled->New())
5069 582 : .StoreAccumulatorInRegister(method)
5070 : .CallProperty(method, receiver_and_args,
5071 582 : feedback_index(feedback_spec()->AddCallICSlot()))
5072 582 : .Jump(if_called);
5073 582 : }
5074 :
5075 194 : void BytecodeGenerator::BuildIteratorClose(const IteratorRecord& iterator,
5076 : Expression* expr) {
5077 : RegisterAllocationScope register_scope(this);
5078 : BytecodeLabels done(zone());
5079 : BytecodeLabel if_called;
5080 : RegisterList args = RegisterList(iterator.object());
5081 : BuildCallIteratorMethod(iterator.object(),
5082 : ast_string_constants()->return_string(), args,
5083 194 : &if_called, &done);
5084 194 : builder()->Bind(&if_called);
5085 :
5086 194 : if (iterator.type() == IteratorType::kAsync) {
5087 : DCHECK_NOT_NULL(expr);
5088 10 : BuildAwait(expr->position());
5089 : }
5090 :
5091 194 : builder()->JumpIfJSReceiver(done.New());
5092 : {
5093 : RegisterAllocationScope register_scope(this);
5094 194 : Register return_result = register_allocator()->NewRegister();
5095 : builder()
5096 194 : ->StoreAccumulatorInRegister(return_result)
5097 194 : .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, return_result);
5098 : }
5099 :
5100 194 : done.Bind(builder());
5101 194 : }
5102 :
5103 1930 : void BytecodeGenerator::VisitGetTemplateObject(GetTemplateObject* expr) {
5104 : builder()->SetExpressionPosition(expr);
5105 1930 : size_t entry = builder()->AllocateDeferredConstantPoolEntry();
5106 3860 : template_objects_.push_back(std::make_pair(expr, entry));
5107 : FeedbackSlot literal_slot = feedback_spec()->AddLiteralSlot();
5108 1930 : builder()->GetTemplateObject(entry, feedback_index(literal_slot));
5109 1930 : }
5110 :
5111 6993 : void BytecodeGenerator::VisitTemplateLiteral(TemplateLiteral* expr) {
5112 : const ZonePtrList<const AstRawString>& parts = *expr->string_parts();
5113 : const ZonePtrList<Expression>& substitutions = *expr->substitutions();
5114 : // Template strings with no substitutions are turned into StringLiterals.
5115 : DCHECK_GT(substitutions.length(), 0);
5116 : DCHECK_EQ(parts.length(), substitutions.length() + 1);
5117 :
5118 : // Generate string concatenation
5119 : // TODO(caitp): Don't generate feedback slot if it's not used --- introduce
5120 : // a simple, concise, reusable mechanism to lazily create reusable slots.
5121 : FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
5122 6993 : Register last_part = register_allocator()->NewRegister();
5123 : bool last_part_valid = false;
5124 :
5125 : builder()->SetExpressionPosition(expr);
5126 27891 : for (int i = 0; i < substitutions.length(); ++i) {
5127 10449 : if (i != 0) {
5128 3456 : builder()->StoreAccumulatorInRegister(last_part);
5129 : last_part_valid = true;
5130 : }
5131 :
5132 20898 : if (!parts[i]->IsEmpty()) {
5133 7273 : builder()->LoadLiteral(parts[i]);
5134 7273 : if (last_part_valid) {
5135 3381 : builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
5136 : }
5137 7273 : builder()->StoreAccumulatorInRegister(last_part);
5138 : last_part_valid = true;
5139 : }
5140 :
5141 10449 : TypeHint type_hint = VisitForAccumulatorValue(substitutions[i]);
5142 10449 : if (type_hint != TypeHint::kString) {
5143 10439 : builder()->ToString();
5144 : }
5145 10449 : if (last_part_valid) {
5146 7348 : builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
5147 : }
5148 : last_part_valid = false;
5149 : }
5150 :
5151 13986 : if (!parts.last()->IsEmpty()) {
5152 2750 : builder()->StoreAccumulatorInRegister(last_part);
5153 2750 : builder()->LoadLiteral(parts.last());
5154 2750 : builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
5155 : }
5156 6993 : }
5157 :
5158 2490771 : void BytecodeGenerator::BuildThisVariableLoad() {
5159 2490771 : DeclarationScope* receiver_scope = closure_scope()->GetReceiverScope();
5160 : Variable* var = receiver_scope->receiver();
5161 : // TODO(littledan): implement 'this' hole check elimination.
5162 : HoleCheckMode hole_check_mode =
5163 : IsDerivedConstructor(receiver_scope->function_kind())
5164 : ? HoleCheckMode::kRequired
5165 2490772 : : HoleCheckMode::kElided;
5166 2490772 : BuildVariableLoad(var, hole_check_mode);
5167 2490769 : }
5168 :
5169 0 : void BytecodeGenerator::VisitThisExpression(ThisExpression* expr) {
5170 2488790 : BuildThisVariableLoad();
5171 0 : }
5172 :
5173 0 : void BytecodeGenerator::VisitSuperCallReference(SuperCallReference* expr) {
5174 : // Handled by VisitCall().
5175 0 : UNREACHABLE();
5176 : }
5177 :
5178 0 : void BytecodeGenerator::VisitSuperPropertyReference(
5179 : SuperPropertyReference* expr) {
5180 17 : builder()->CallRuntime(Runtime::kThrowUnsupportedSuperError);
5181 0 : }
5182 :
5183 0 : void BytecodeGenerator::VisitCommaExpression(BinaryOperation* binop) {
5184 37650 : VisitForEffect(binop->left());
5185 37650 : Visit(binop->right());
5186 0 : }
5187 :
5188 325 : void BytecodeGenerator::VisitNaryCommaExpression(NaryOperation* expr) {
5189 : DCHECK_GT(expr->subsequent_length(), 0);
5190 :
5191 325 : VisitForEffect(expr->first());
5192 5220793 : for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
5193 2610234 : VisitForEffect(expr->subsequent(i));
5194 : }
5195 325 : Visit(expr->subsequent(expr->subsequent_length() - 1));
5196 325 : }
5197 :
5198 146573 : void BytecodeGenerator::VisitLogicalTestSubExpression(
5199 : Token::Value token, Expression* expr, BytecodeLabels* then_labels,
5200 : BytecodeLabels* else_labels, int coverage_slot) {
5201 : DCHECK(token == Token::OR || token == Token::AND);
5202 :
5203 : BytecodeLabels test_next(zone());
5204 146573 : if (token == Token::OR) {
5205 92441 : VisitForTest(expr, then_labels, &test_next, TestFallthrough::kElse);
5206 : } else {
5207 : DCHECK_EQ(Token::AND, token);
5208 54132 : VisitForTest(expr, &test_next, else_labels, TestFallthrough::kThen);
5209 : }
5210 146573 : test_next.Bind(builder());
5211 :
5212 : BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
5213 146573 : }
5214 :
5215 68065 : void BytecodeGenerator::VisitLogicalTest(Token::Value token, Expression* left,
5216 : Expression* right,
5217 : int right_coverage_slot) {
5218 : DCHECK(token == Token::OR || token == Token::AND);
5219 : TestResultScope* test_result = execution_result()->AsTest();
5220 : BytecodeLabels* then_labels = test_result->then_labels();
5221 : BytecodeLabels* else_labels = test_result->else_labels();
5222 : TestFallthrough fallthrough = test_result->fallthrough();
5223 :
5224 68065 : VisitLogicalTestSubExpression(token, left, then_labels, else_labels,
5225 68065 : right_coverage_slot);
5226 : // The last test has the same then, else and fallthrough as the parent test.
5227 68065 : VisitForTest(right, then_labels, else_labels, fallthrough);
5228 68065 : }
5229 :
5230 20545 : void BytecodeGenerator::VisitNaryLogicalTest(
5231 : Token::Value token, NaryOperation* expr,
5232 : const NaryCodeCoverageSlots* coverage_slots) {
5233 : DCHECK(token == Token::OR || token == Token::AND);
5234 : DCHECK_GT(expr->subsequent_length(), 0);
5235 :
5236 : TestResultScope* test_result = execution_result()->AsTest();
5237 : BytecodeLabels* then_labels = test_result->then_labels();
5238 : BytecodeLabels* else_labels = test_result->else_labels();
5239 : TestFallthrough fallthrough = test_result->fallthrough();
5240 :
5241 20545 : VisitLogicalTestSubExpression(token, expr->first(), then_labels, else_labels,
5242 20545 : coverage_slots->GetSlotFor(0));
5243 136471 : for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
5244 57963 : VisitLogicalTestSubExpression(token, expr->subsequent(i), then_labels,
5245 : else_labels,
5246 57963 : coverage_slots->GetSlotFor(i + 1));
5247 : }
5248 : // The last test has the same then, else and fallthrough as the parent test.
5249 : VisitForTest(expr->subsequent(expr->subsequent_length() - 1), then_labels,
5250 20545 : else_labels, fallthrough);
5251 20545 : }
5252 :
5253 22593 : bool BytecodeGenerator::VisitLogicalOrSubExpression(Expression* expr,
5254 : BytecodeLabels* end_labels,
5255 : int coverage_slot) {
5256 22593 : if (expr->ToBooleanIsTrue()) {
5257 1721 : VisitForAccumulatorValue(expr);
5258 1721 : end_labels->Bind(builder());
5259 1721 : return true;
5260 20872 : } else if (!expr->ToBooleanIsFalse()) {
5261 20690 : TypeHint type_hint = VisitForAccumulatorValue(expr);
5262 : builder()->JumpIfTrue(ToBooleanModeFromTypeHint(type_hint),
5263 41380 : end_labels->New());
5264 : }
5265 :
5266 : BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
5267 :
5268 : return false;
5269 : }
5270 :
5271 31935 : bool BytecodeGenerator::VisitLogicalAndSubExpression(Expression* expr,
5272 : BytecodeLabels* end_labels,
5273 : int coverage_slot) {
5274 31935 : if (expr->ToBooleanIsFalse()) {
5275 169 : VisitForAccumulatorValue(expr);
5276 169 : end_labels->Bind(builder());
5277 169 : return true;
5278 31766 : } else if (!expr->ToBooleanIsTrue()) {
5279 30031 : TypeHint type_hint = VisitForAccumulatorValue(expr);
5280 : builder()->JumpIfFalse(ToBooleanModeFromTypeHint(type_hint),
5281 60062 : end_labels->New());
5282 : }
5283 :
5284 : BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
5285 :
5286 : return false;
5287 : }
5288 :
5289 41162 : void BytecodeGenerator::VisitLogicalOrExpression(BinaryOperation* binop) {
5290 : Expression* left = binop->left();
5291 : Expression* right = binop->right();
5292 :
5293 : int right_coverage_slot =
5294 : AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
5295 :
5296 41162 : if (execution_result()->IsTest()) {
5297 : TestResultScope* test_result = execution_result()->AsTest();
5298 18849 : if (left->ToBooleanIsTrue()) {
5299 4341 : builder()->Jump(test_result->NewThenLabel());
5300 14508 : } else if (left->ToBooleanIsFalse() && right->ToBooleanIsFalse()) {
5301 : BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
5302 0 : builder()->Jump(test_result->NewElseLabel());
5303 : } else {
5304 14508 : VisitLogicalTest(Token::OR, left, right, right_coverage_slot);
5305 : }
5306 : test_result->SetResultConsumedByTest();
5307 : } else {
5308 : BytecodeLabels end_labels(zone());
5309 22313 : if (VisitLogicalOrSubExpression(left, &end_labels, right_coverage_slot)) {
5310 : return;
5311 : }
5312 20624 : VisitForAccumulatorValue(right);
5313 20624 : end_labels.Bind(builder());
5314 : }
5315 : }
5316 :
5317 20419 : void BytecodeGenerator::VisitNaryLogicalOrExpression(NaryOperation* expr) {
5318 : Expression* first = expr->first();
5319 : DCHECK_GT(expr->subsequent_length(), 0);
5320 :
5321 20419 : NaryCodeCoverageSlots coverage_slots(this, expr);
5322 :
5323 20419 : if (execution_result()->IsTest()) {
5324 : TestResultScope* test_result = execution_result()->AsTest();
5325 20280 : if (first->ToBooleanIsTrue()) {
5326 0 : builder()->Jump(test_result->NewThenLabel());
5327 : } else {
5328 20280 : VisitNaryLogicalTest(Token::OR, expr, &coverage_slots);
5329 : }
5330 : test_result->SetResultConsumedByTest();
5331 : } else {
5332 : BytecodeLabels end_labels(zone());
5333 139 : if (VisitLogicalOrSubExpression(first, &end_labels,
5334 : coverage_slots.GetSlotFor(0))) {
5335 : return;
5336 : }
5337 248 : for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
5338 282 : if (VisitLogicalOrSubExpression(expr->subsequent(i), &end_labels,
5339 : coverage_slots.GetSlotFor(i + 1))) {
5340 : return;
5341 : }
5342 : }
5343 : // We have to visit the last value even if it's true, because we need its
5344 : // actual value.
5345 107 : VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
5346 107 : end_labels.Bind(builder());
5347 : }
5348 : }
5349 :
5350 84276 : void BytecodeGenerator::VisitLogicalAndExpression(BinaryOperation* binop) {
5351 : Expression* left = binop->left();
5352 : Expression* right = binop->right();
5353 :
5354 : int right_coverage_slot =
5355 : AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
5356 :
5357 84276 : if (execution_result()->IsTest()) {
5358 : TestResultScope* test_result = execution_result()->AsTest();
5359 53568 : if (left->ToBooleanIsFalse()) {
5360 11 : builder()->Jump(test_result->NewElseLabel());
5361 53557 : } else if (left->ToBooleanIsTrue() && right->ToBooleanIsTrue()) {
5362 : BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
5363 0 : builder()->Jump(test_result->NewThenLabel());
5364 : } else {
5365 53557 : VisitLogicalTest(Token::AND, left, right, right_coverage_slot);
5366 : }
5367 : test_result->SetResultConsumedByTest();
5368 : } else {
5369 : BytecodeLabels end_labels(zone());
5370 30708 : if (VisitLogicalAndSubExpression(left, &end_labels, right_coverage_slot)) {
5371 : return;
5372 : }
5373 30555 : VisitForAccumulatorValue(right);
5374 30554 : end_labels.Bind(builder());
5375 : }
5376 : }
5377 :
5378 679 : void BytecodeGenerator::VisitNaryLogicalAndExpression(NaryOperation* expr) {
5379 : Expression* first = expr->first();
5380 : DCHECK_GT(expr->subsequent_length(), 0);
5381 :
5382 679 : NaryCodeCoverageSlots coverage_slots(this, expr);
5383 :
5384 679 : if (execution_result()->IsTest()) {
5385 : TestResultScope* test_result = execution_result()->AsTest();
5386 265 : if (first->ToBooleanIsFalse()) {
5387 0 : builder()->Jump(test_result->NewElseLabel());
5388 : } else {
5389 265 : VisitNaryLogicalTest(Token::AND, expr, &coverage_slots);
5390 : }
5391 : test_result->SetResultConsumedByTest();
5392 : } else {
5393 : BytecodeLabels end_labels(zone());
5394 414 : if (VisitLogicalAndSubExpression(first, &end_labels,
5395 : coverage_slots.GetSlotFor(0))) {
5396 : return;
5397 : }
5398 1211 : for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
5399 1626 : if (VisitLogicalAndSubExpression(expr->subsequent(i), &end_labels,
5400 : coverage_slots.GetSlotFor(i + 1))) {
5401 : return;
5402 : }
5403 : }
5404 : // We have to visit the last value even if it's false, because we need its
5405 : // actual value.
5406 398 : VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
5407 398 : end_labels.Bind(builder());
5408 : }
5409 : }
5410 :
5411 190411 : void BytecodeGenerator::BuildNewLocalActivationContext() {
5412 : ValueResultScope value_execution_result(this);
5413 : Scope* scope = closure_scope();
5414 : DCHECK_EQ(current_scope(), closure_scope());
5415 :
5416 : // Create the appropriate context.
5417 190411 : if (scope->is_script_scope()) {
5418 11920 : Register scope_reg = register_allocator()->NewRegister();
5419 : builder()
5420 11920 : ->LoadLiteral(scope)
5421 11919 : .StoreAccumulatorInRegister(scope_reg)
5422 11920 : .CallRuntime(Runtime::kNewScriptContext, scope_reg);
5423 178491 : } else if (scope->is_module_scope()) {
5424 : // We don't need to do anything for the outer script scope.
5425 : DCHECK(scope->outer_scope()->is_script_scope());
5426 :
5427 : // A JSFunction representing a module is called with the module object as
5428 : // its sole argument.
5429 1257 : RegisterList args = register_allocator()->NewRegisterList(2);
5430 : builder()
5431 1257 : ->MoveRegister(builder()->Parameter(0), args[0])
5432 1257 : .LoadLiteral(scope)
5433 1257 : .StoreAccumulatorInRegister(args[1])
5434 1257 : .CallRuntime(Runtime::kPushModuleContext, args);
5435 : } else {
5436 : DCHECK(scope->is_function_scope() || scope->is_eval_scope());
5437 177234 : int slot_count = scope->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
5438 177234 : if (slot_count <= ConstructorBuiltins::MaximumFunctionContextSlots()) {
5439 177217 : switch (scope->scope_type()) {
5440 : case EVAL_SCOPE:
5441 42287 : builder()->CreateEvalContext(scope, slot_count);
5442 42287 : break;
5443 : case FUNCTION_SCOPE:
5444 134930 : builder()->CreateFunctionContext(scope, slot_count);
5445 134930 : break;
5446 : default:
5447 0 : UNREACHABLE();
5448 : }
5449 : } else {
5450 17 : Register arg = register_allocator()->NewRegister();
5451 17 : builder()->LoadLiteral(scope).StoreAccumulatorInRegister(arg).CallRuntime(
5452 17 : Runtime::kNewFunctionContext, arg);
5453 : }
5454 : }
5455 190410 : }
5456 :
5457 190410 : void BytecodeGenerator::BuildLocalActivationContextInitialization() {
5458 : DeclarationScope* scope = closure_scope();
5459 :
5460 321833 : if (scope->has_this_declaration() && scope->receiver()->IsContextSlot()) {
5461 : Variable* variable = scope->receiver();
5462 105892 : Register receiver(builder()->Receiver());
5463 : // Context variable (at bottom of the context chain).
5464 : DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
5465 105892 : builder()->LoadAccumulatorWithRegister(receiver).StoreContextSlot(
5466 105892 : execution_context()->reg(), variable->index(), 0);
5467 : }
5468 :
5469 : // Copy parameters into context if necessary.
5470 : int num_parameters = scope->num_parameters();
5471 308748 : for (int i = 0; i < num_parameters; i++) {
5472 : Variable* variable = scope->parameter(i);
5473 86071 : if (!variable->IsContextSlot()) continue;
5474 :
5475 32267 : Register parameter(builder()->Parameter(i));
5476 : // Context variable (at bottom of the context chain).
5477 : DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
5478 32267 : builder()->LoadAccumulatorWithRegister(parameter).StoreContextSlot(
5479 32267 : execution_context()->reg(), variable->index(), 0);
5480 : }
5481 190410 : }
5482 :
5483 59315 : void BytecodeGenerator::BuildNewLocalBlockContext(Scope* scope) {
5484 : ValueResultScope value_execution_result(this);
5485 : DCHECK(scope->is_block_scope());
5486 :
5487 59315 : builder()->CreateBlockContext(scope);
5488 59315 : }
5489 :
5490 2962 : void BytecodeGenerator::BuildNewLocalWithContext(Scope* scope) {
5491 : ValueResultScope value_execution_result(this);
5492 :
5493 2962 : Register extension_object = register_allocator()->NewRegister();
5494 :
5495 2962 : builder()->ToObject(extension_object);
5496 2962 : builder()->CreateWithContext(extension_object, scope);
5497 2962 : }
5498 :
5499 69718 : void BytecodeGenerator::BuildNewLocalCatchContext(Scope* scope) {
5500 : ValueResultScope value_execution_result(this);
5501 : DCHECK(scope->catch_variable()->IsContextSlot());
5502 :
5503 69718 : Register exception = register_allocator()->NewRegister();
5504 69718 : builder()->StoreAccumulatorInRegister(exception);
5505 69719 : builder()->CreateCatchContext(exception, scope);
5506 69720 : }
5507 :
5508 10078 : void BytecodeGenerator::VisitObjectLiteralAccessor(
5509 : Register home_object, ObjectLiteralProperty* property, Register value_out) {
5510 10078 : if (property == nullptr) {
5511 4317 : builder()->LoadNull().StoreAccumulatorInRegister(value_out);
5512 : } else {
5513 : VisitForRegisterValue(property->value(), value_out);
5514 5761 : VisitSetHomeObject(value_out, home_object, property);
5515 : }
5516 10078 : }
5517 :
5518 11908 : void BytecodeGenerator::VisitSetHomeObject(Register value, Register home_object,
5519 : LiteralProperty* property) {
5520 : Expression* expr = property->value();
5521 11908 : if (FunctionLiteral::NeedsHomeObject(expr)) {
5522 : FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
5523 : builder()
5524 659 : ->LoadAccumulatorWithRegister(home_object)
5525 659 : .StoreHomeObjectProperty(value, feedback_index(slot), language_mode());
5526 : }
5527 11908 : }
5528 :
5529 2096669 : void BytecodeGenerator::VisitArgumentsObject(Variable* variable) {
5530 2096669 : if (variable == nullptr) return;
5531 :
5532 : DCHECK(variable->IsContextSlot() || variable->IsStackAllocated());
5533 :
5534 : // Allocate and initialize a new arguments object and assign to the
5535 : // {arguments} variable.
5536 100547 : builder()->CreateArguments(closure_scope()->GetArgumentsType());
5537 100547 : BuildVariableAssignment(variable, Token::ASSIGN, HoleCheckMode::kElided);
5538 : }
5539 :
5540 2096671 : void BytecodeGenerator::VisitRestArgumentsArray(Variable* rest) {
5541 2096671 : if (rest == nullptr) return;
5542 :
5543 : // Allocate and initialize a new rest parameter and assign to the {rest}
5544 : // variable.
5545 4011 : builder()->CreateArguments(CreateArgumentsType::kRestParameter);
5546 : DCHECK(rest->IsContextSlot() || rest->IsStackAllocated());
5547 4011 : BuildVariableAssignment(rest, Token::ASSIGN, HoleCheckMode::kElided);
5548 : }
5549 :
5550 4193338 : void BytecodeGenerator::VisitThisFunctionVariable(Variable* variable) {
5551 4193338 : if (variable == nullptr) return;
5552 :
5553 : // Store the closure we were called with in the given variable.
5554 60218 : builder()->LoadAccumulatorWithRegister(Register::function_closure());
5555 30109 : BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
5556 : }
5557 :
5558 2096667 : void BytecodeGenerator::VisitNewTargetVariable(Variable* variable) {
5559 2096667 : if (variable == nullptr) return;
5560 :
5561 : // The generator resume trampoline abuses the new.target register
5562 : // to pass in the generator object. In ordinary calls, new.target is always
5563 : // undefined because generator functions are non-constructible, so don't
5564 : // assign anything to the new.target variable.
5565 199084 : if (IsResumableFunction(info()->literal()->kind())) return;
5566 :
5567 99026 : if (variable->location() == VariableLocation::LOCAL) {
5568 : // The new.target register was already assigned by entry trampoline.
5569 : DCHECK_EQ(incoming_new_target_or_generator_.index(),
5570 : GetRegisterForLocalVariable(variable).index());
5571 : return;
5572 : }
5573 :
5574 : // Store the new target we were called with in the given variable.
5575 94591 : builder()->LoadAccumulatorWithRegister(incoming_new_target_or_generator_);
5576 94591 : BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
5577 : }
5578 :
5579 11220 : void BytecodeGenerator::BuildGeneratorObjectVariableInitialization() {
5580 : DCHECK(IsResumableFunction(info()->literal()->kind()));
5581 :
5582 : Variable* generator_object_var = closure_scope()->generator_object_var();
5583 : RegisterAllocationScope register_scope(this);
5584 11220 : RegisterList args = register_allocator()->NewRegisterList(2);
5585 : Runtime::FunctionId function_id =
5586 18498 : (IsAsyncFunction(info()->literal()->kind()) &&
5587 7278 : !IsAsyncGeneratorFunction(info()->literal()->kind()))
5588 : ? Runtime::kInlineAsyncFunctionEnter
5589 11220 : : Runtime::kInlineCreateJSGeneratorObject;
5590 : builder()
5591 11220 : ->MoveRegister(Register::function_closure(), args[0])
5592 22440 : .MoveRegister(builder()->Receiver(), args[1])
5593 11220 : .CallRuntime(function_id, args)
5594 11220 : .StoreAccumulatorInRegister(generator_object());
5595 :
5596 11220 : if (generator_object_var->location() == VariableLocation::LOCAL) {
5597 : // The generator object register is already set to the variable's local
5598 : // register.
5599 : DCHECK_EQ(generator_object().index(),
5600 : GetRegisterForLocalVariable(generator_object_var).index());
5601 : } else {
5602 : BuildVariableAssignment(generator_object_var, Token::INIT,
5603 0 : HoleCheckMode::kElided);
5604 : }
5605 11220 : }
5606 :
5607 926778 : void BytecodeGenerator::BuildPushUndefinedIntoRegisterList(
5608 : RegisterList* reg_list) {
5609 926778 : Register reg = register_allocator()->GrowRegisterList(reg_list);
5610 926813 : builder()->LoadUndefined().StoreAccumulatorInRegister(reg);
5611 926823 : }
5612 :
5613 10019 : void BytecodeGenerator::BuildLoadPropertyKey(LiteralProperty* property,
5614 : Register out_reg) {
5615 10019 : if (property->key()->IsStringLiteral()) {
5616 : builder()
5617 4496 : ->LoadLiteral(property->key()->AsLiteral()->AsRawString())
5618 2248 : .StoreAccumulatorInRegister(out_reg);
5619 : } else {
5620 7771 : VisitForAccumulatorValue(property->key());
5621 7771 : builder()->ToName(out_reg);
5622 : }
5623 10019 : }
5624 :
5625 0 : int BytecodeGenerator::AllocateBlockCoverageSlotIfEnabled(
5626 : AstNode* node, SourceRangeKind kind) {
5627 2255186 : return (block_coverage_builder_ == nullptr)
5628 : ? BlockCoverageBuilder::kNoCoverageArraySlot
5629 2255186 : : block_coverage_builder_->AllocateBlockCoverageSlot(node, kind);
5630 : }
5631 :
5632 0 : int BytecodeGenerator::AllocateNaryBlockCoverageSlotIfEnabled(
5633 : NaryOperation* node, size_t index) {
5634 244 : return (block_coverage_builder_ == nullptr)
5635 : ? BlockCoverageBuilder::kNoCoverageArraySlot
5636 : : block_coverage_builder_->AllocateNaryBlockCoverageSlot(node,
5637 244 : index);
5638 : }
5639 :
5640 0 : void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
5641 : AstNode* node, SourceRangeKind kind) {
5642 23515 : if (block_coverage_builder_ == nullptr) return;
5643 152 : block_coverage_builder_->IncrementBlockCounter(node, kind);
5644 : }
5645 :
5646 0 : void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
5647 : int coverage_array_slot) {
5648 199211 : if (block_coverage_builder_ != nullptr) {
5649 : block_coverage_builder_->IncrementBlockCounter(coverage_array_slot);
5650 : }
5651 0 : }
5652 :
5653 : // Visits the expression |expr| and places the result in the accumulator.
5654 17594448 : BytecodeGenerator::TypeHint BytecodeGenerator::VisitForAccumulatorValue(
5655 : Expression* expr) {
5656 : ValueResultScope accumulator_scope(this);
5657 17594448 : Visit(expr);
5658 17594346 : return accumulator_scope.type_hint();
5659 : }
5660 :
5661 0 : void BytecodeGenerator::VisitForAccumulatorValueOrTheHole(Expression* expr) {
5662 42715 : if (expr == nullptr) {
5663 36014 : builder()->LoadTheHole();
5664 : } else {
5665 6701 : VisitForAccumulatorValue(expr);
5666 : }
5667 0 : }
5668 :
5669 : // Visits the expression |expr| and discards the result.
5670 12809613 : void BytecodeGenerator::VisitForEffect(Expression* expr) {
5671 : EffectResultScope effect_scope(this);
5672 12809613 : Visit(expr);
5673 12809493 : }
5674 :
5675 : // Visits the expression |expr| and returns the register containing
5676 : // the expression result.
5677 5133287 : Register BytecodeGenerator::VisitForRegisterValue(Expression* expr) {
5678 5133287 : VisitForAccumulatorValue(expr);
5679 5133298 : Register result = register_allocator()->NewRegister();
5680 5133292 : builder()->StoreAccumulatorInRegister(result);
5681 5133300 : return result;
5682 : }
5683 :
5684 : // Visits the expression |expr| and stores the expression result in
5685 : // |destination|.
5686 : void BytecodeGenerator::VisitForRegisterValue(Expression* expr,
5687 : Register destination) {
5688 : ValueResultScope register_scope(this);
5689 862758 : Visit(expr);
5690 862756 : builder()->StoreAccumulatorInRegister(destination);
5691 : }
5692 :
5693 : // Visits the expression |expr| and pushes the result into a new register
5694 : // added to the end of |reg_list|.
5695 6942020 : void BytecodeGenerator::VisitAndPushIntoRegisterList(Expression* expr,
5696 : RegisterList* reg_list) {
5697 : {
5698 : ValueResultScope register_scope(this);
5699 6942020 : Visit(expr);
5700 : }
5701 : // Grow the register list after visiting the expression to avoid reserving
5702 : // the register across the expression evaluation, which could cause memory
5703 : // leaks for deep expressions due to dead objects being kept alive by pointers
5704 : // in registers.
5705 6942027 : Register destination = register_allocator()->GrowRegisterList(reg_list);
5706 6942030 : builder()->StoreAccumulatorInRegister(destination);
5707 6942066 : }
5708 :
5709 888126 : void BytecodeGenerator::BuildTest(ToBooleanMode mode,
5710 : BytecodeLabels* then_labels,
5711 : BytecodeLabels* else_labels,
5712 : TestFallthrough fallthrough) {
5713 888126 : switch (fallthrough) {
5714 : case TestFallthrough::kThen:
5715 1211248 : builder()->JumpIfFalse(mode, else_labels->New());
5716 605624 : break;
5717 : case TestFallthrough::kElse:
5718 565002 : builder()->JumpIfTrue(mode, then_labels->New());
5719 282501 : break;
5720 : case TestFallthrough::kNone:
5721 0 : builder()->JumpIfTrue(mode, then_labels->New());
5722 0 : builder()->Jump(else_labels->New());
5723 0 : break;
5724 : }
5725 888125 : }
5726 :
5727 : // Visits the expression |expr| for testing its boolean value and jumping to the
5728 : // |then| or |other| label depending on value and short-circuit semantics
5729 1035155 : void BytecodeGenerator::VisitForTest(Expression* expr,
5730 : BytecodeLabels* then_labels,
5731 : BytecodeLabels* else_labels,
5732 : TestFallthrough fallthrough) {
5733 : bool result_consumed;
5734 : TypeHint type_hint;
5735 : {
5736 : // To make sure that all temporary registers are returned before generating
5737 : // jumps below, we ensure that the result scope is deleted before doing so.
5738 : // Dead registers might be materialized otherwise.
5739 : TestResultScope test_result(this, then_labels, else_labels, fallthrough);
5740 1035155 : Visit(expr);
5741 : result_consumed = test_result.result_consumed_by_test();
5742 : type_hint = test_result.type_hint();
5743 : // Labels and fallthrough might have been mutated, so update based on
5744 : // TestResultScope.
5745 : then_labels = test_result.then_labels();
5746 : else_labels = test_result.else_labels();
5747 : fallthrough = test_result.fallthrough();
5748 : }
5749 1035159 : if (!result_consumed) {
5750 : BuildTest(ToBooleanModeFromTypeHint(type_hint), then_labels, else_labels,
5751 683174 : fallthrough);
5752 : }
5753 1035155 : }
5754 :
5755 245042 : void BytecodeGenerator::VisitInSameTestExecutionScope(Expression* expr) {
5756 : DCHECK(execution_result()->IsTest());
5757 : {
5758 : RegisterAllocationScope reg_scope(this);
5759 245042 : Visit(expr);
5760 : }
5761 245042 : if (!execution_result()->AsTest()->result_consumed_by_test()) {
5762 : TestResultScope* result_scope = execution_result()->AsTest();
5763 : BuildTest(ToBooleanModeFromTypeHint(result_scope->type_hint()),
5764 : result_scope->then_labels(), result_scope->else_labels(),
5765 204955 : result_scope->fallthrough());
5766 : result_scope->SetResultConsumedByTest();
5767 : }
5768 245042 : }
5769 :
5770 72680 : void BytecodeGenerator::VisitInScope(Statement* stmt, Scope* scope) {
5771 : DCHECK(scope->declarations()->is_empty());
5772 : CurrentScope current_scope(this, scope);
5773 145363 : ContextScope context_scope(this, scope);
5774 72681 : Visit(stmt);
5775 72682 : }
5776 :
5777 0 : Register BytecodeGenerator::GetRegisterForLocalVariable(Variable* variable) {
5778 : DCHECK_EQ(VariableLocation::LOCAL, variable->location());
5779 15655 : return builder()->Local(variable->index());
5780 : }
5781 :
5782 : FunctionKind BytecodeGenerator::function_kind() const {
5783 2124331 : return info()->literal()->kind();
5784 : }
5785 :
5786 : LanguageMode BytecodeGenerator::language_mode() const {
5787 : return current_scope()->language_mode();
5788 : }
5789 :
5790 : Register BytecodeGenerator::generator_object() const {
5791 : DCHECK(IsResumableFunction(info()->literal()->kind()));
5792 : return incoming_new_target_or_generator_;
5793 : }
5794 :
5795 : FeedbackVectorSpec* BytecodeGenerator::feedback_spec() {
5796 : return info()->feedback_vector_spec();
5797 : }
5798 :
5799 : int BytecodeGenerator::feedback_index(FeedbackSlot slot) const {
5800 : DCHECK(!slot.IsInvalid());
5801 : return FeedbackVector::GetIndex(slot);
5802 : }
5803 :
5804 7678731 : FeedbackSlot BytecodeGenerator::GetCachedLoadGlobalICSlot(
5805 : TypeofMode typeof_mode, Variable* variable) {
5806 : FeedbackSlotCache::SlotKind slot_kind =
5807 : typeof_mode == INSIDE_TYPEOF
5808 : ? FeedbackSlotCache::SlotKind::kLoadGlobalInsideTypeof
5809 7678731 : : FeedbackSlotCache::SlotKind::kLoadGlobalNotInsideTypeof;
5810 : FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, variable));
5811 7678959 : if (!slot.IsInvalid()) {
5812 4619839 : return slot;
5813 : }
5814 : slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
5815 : feedback_slot_cache()->Put(slot_kind, variable, feedback_index(slot));
5816 3059059 : return slot;
5817 : }
5818 :
5819 1453297 : FeedbackSlot BytecodeGenerator::GetCachedStoreGlobalICSlot(
5820 : LanguageMode language_mode, Variable* variable) {
5821 : FeedbackSlotCache::SlotKind slot_kind =
5822 : is_strict(language_mode)
5823 : ? FeedbackSlotCache::SlotKind::kStoreGlobalStrict
5824 1453297 : : FeedbackSlotCache::SlotKind::kStoreGlobalSloppy;
5825 : FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, variable));
5826 1453315 : if (!slot.IsInvalid()) {
5827 264062 : return slot;
5828 : }
5829 : slot = feedback_spec()->AddStoreGlobalICSlot(language_mode);
5830 : feedback_slot_cache()->Put(slot_kind, variable, feedback_index(slot));
5831 1189255 : return slot;
5832 : }
5833 :
5834 1162862 : FeedbackSlot BytecodeGenerator::GetCachedLoadICSlot(const Expression* expr,
5835 : const AstRawString* name) {
5836 1162862 : if (!FLAG_ignition_share_named_property_feedback) {
5837 : return feedback_spec()->AddLoadICSlot();
5838 : }
5839 : FeedbackSlotCache::SlotKind slot_kind =
5840 : FeedbackSlotCache::SlotKind::kLoadProperty;
5841 1162862 : if (!expr->IsVariableProxy()) {
5842 : return feedback_spec()->AddLoadICSlot();
5843 : }
5844 786107 : const VariableProxy* proxy = expr->AsVariableProxy();
5845 : FeedbackSlot slot(
5846 : feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name));
5847 786110 : if (!slot.IsInvalid()) {
5848 268476 : return slot;
5849 : }
5850 : slot = feedback_spec()->AddLoadICSlot();
5851 : feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name,
5852 : feedback_index(slot));
5853 517631 : return slot;
5854 : }
5855 :
5856 2317252 : FeedbackSlot BytecodeGenerator::GetCachedStoreICSlot(const Expression* expr,
5857 : const AstRawString* name) {
5858 2317252 : if (!FLAG_ignition_share_named_property_feedback) {
5859 : return feedback_spec()->AddStoreICSlot(language_mode());
5860 : }
5861 : FeedbackSlotCache::SlotKind slot_kind =
5862 : is_strict(language_mode())
5863 : ? FeedbackSlotCache::SlotKind::kStoreNamedStrict
5864 2317252 : : FeedbackSlotCache::SlotKind::kStoreNamedSloppy;
5865 2317252 : if (!expr->IsVariableProxy()) {
5866 : return feedback_spec()->AddStoreICSlot(language_mode());
5867 : }
5868 22862 : const VariableProxy* proxy = expr->AsVariableProxy();
5869 : FeedbackSlot slot(
5870 : feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name));
5871 22862 : if (!slot.IsInvalid()) {
5872 2503 : return slot;
5873 : }
5874 : slot = feedback_spec()->AddStoreICSlot(language_mode());
5875 : feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name,
5876 : feedback_index(slot));
5877 20359 : return slot;
5878 : }
5879 :
5880 2926740 : int BytecodeGenerator::GetCachedCreateClosureSlot(FunctionLiteral* literal) {
5881 : FeedbackSlotCache::SlotKind slot_kind =
5882 : FeedbackSlotCache::SlotKind::kClosureFeedbackCell;
5883 : int index = feedback_slot_cache()->Get(slot_kind, literal);
5884 2926822 : if (index != -1) {
5885 : return index;
5886 : }
5887 : index = feedback_spec()->AddFeedbackCellForCreateClosure();
5888 : feedback_slot_cache()->Put(slot_kind, literal, index);
5889 2926788 : return index;
5890 : }
5891 :
5892 0 : FeedbackSlot BytecodeGenerator::GetDummyCompareICSlot() {
5893 1674 : return dummy_feedback_slot_.Get();
5894 : }
5895 :
5896 : } // namespace interpreter
5897 : } // namespace internal
5898 120216 : } // namespace v8
|