Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #if V8_TARGET_ARCH_X64
6 :
7 : #include "src/assembler-inl.h"
8 : #include "src/ast/compile-time-value.h"
9 : #include "src/ast/scopes.h"
10 : #include "src/builtins/builtins-constructor.h"
11 : #include "src/code-factory.h"
12 : #include "src/code-stubs.h"
13 : #include "src/codegen.h"
14 : #include "src/compilation-info.h"
15 : #include "src/compiler.h"
16 : #include "src/debug/debug.h"
17 : #include "src/full-codegen/full-codegen.h"
18 : #include "src/heap/heap-inl.h"
19 : #include "src/ic/ic.h"
20 : #include "src/objects-inl.h"
21 :
22 : namespace v8 {
23 : namespace internal {
24 :
25 : #define __ ACCESS_MASM(masm())
26 :
27 : class JumpPatchSite BASE_EMBEDDED {
28 : public:
29 1299194 : explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
30 : #ifdef DEBUG
31 : info_emitted_ = false;
32 : #endif
33 : }
34 :
35 : ~JumpPatchSite() {
36 : DCHECK(patch_site_.is_bound() == info_emitted_);
37 : }
38 :
39 221679 : void EmitJumpIfNotSmi(Register reg,
40 : Label* target,
41 221679 : Label::Distance near_jump = Label::kFar) {
42 221679 : __ testb(reg, Immediate(kSmiTagMask));
43 221679 : EmitJump(not_carry, target, near_jump); // Always taken before patched.
44 221679 : }
45 :
46 241488 : void EmitJumpIfSmi(Register reg,
47 : Label* target,
48 241488 : Label::Distance near_jump = Label::kFar) {
49 241488 : __ testb(reg, Immediate(kSmiTagMask));
50 241488 : EmitJump(carry, target, near_jump); // Never taken before patched.
51 241488 : }
52 :
53 2135221 : void EmitPatchInfo() {
54 1299194 : if (patch_site_.is_bound()) {
55 463167 : int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
56 : DCHECK(is_uint8(delta_to_patch_site));
57 463167 : __ testl(rax, Immediate(delta_to_patch_site));
58 : #ifdef DEBUG
59 : info_emitted_ = true;
60 : #endif
61 : } else {
62 836027 : __ nop(); // Signals no inlined code.
63 : }
64 1299194 : }
65 :
66 : private:
67 : // jc will be patched with jz, jnc will become jnz.
68 1389501 : void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
69 : DCHECK(!patch_site_.is_bound() && !info_emitted_);
70 : DCHECK(cc == carry || cc == not_carry);
71 926334 : __ bind(&patch_site_);
72 463167 : __ j(cc, target, near_jump);
73 463167 : }
74 :
75 : MacroAssembler* masm() { return masm_; }
76 : MacroAssembler* masm_;
77 : Label patch_site_;
78 : #ifdef DEBUG
79 : bool info_emitted_;
80 : #endif
81 : };
82 :
83 :
84 : // Generate code for a JS function. On entry to the function the receiver
85 : // and arguments have been pushed on the stack left to right, with the
86 : // return address on top of them. The actual argument count matches the
87 : // formal parameter count expected by the function.
88 : //
89 : // The live registers are:
90 : // o rdi: the JS function object being called (i.e. ourselves)
91 : // o rdx: the new target value
92 : // o rsi: our context
93 : // o rbp: our caller's frame pointer
94 : // o rsp: stack pointer (pointing to return address)
95 : //
96 : // The function builds a JS frame. Please see JavaScriptFrameConstants in
97 : // frames-x64.h for its layout.
98 15447287 : void FullCodeGenerator::Generate() {
99 1080609 : CompilationInfo* info = info_;
100 : DCHECK_EQ(scope(), info->scope());
101 : profiling_counter_ = isolate()->factory()->NewCell(
102 2161221 : Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
103 1080615 : SetFunctionPosition(literal());
104 : Comment cmnt(masm_, "[ function compiled by full code generator");
105 :
106 1080614 : ProfileEntryHookStub::MaybeCallEntryHook(masm_);
107 :
108 1080613 : if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
109 123 : StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
110 123 : __ movp(rcx, args.GetReceiverOperand());
111 123 : __ AssertNotSmi(rcx);
112 123 : __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rcx);
113 123 : __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
114 : }
115 :
116 : // Open a frame scope to indicate that there is a frame on the stack. The
117 : // MANUAL indicates that the scope shouldn't actually generate code to set up
118 : // the frame (that is done below).
119 1080613 : FrameScope frame_scope(masm_, StackFrame::MANUAL);
120 :
121 1080612 : info->set_prologue_offset(masm_->pc_offset());
122 2161225 : __ Prologue(info->GeneratePreagedPrologue());
123 :
124 : // Increment invocation count for the function.
125 : {
126 : Comment cmnt(masm_, "[ Increment invocation count");
127 1080613 : __ movp(rcx, FieldOperand(rdi, JSFunction::kFeedbackVectorOffset));
128 1080615 : __ movp(rcx, FieldOperand(rcx, Cell::kValueOffset));
129 : __ SmiAddConstant(
130 : FieldOperand(rcx, FeedbackVector::kInvocationCountIndex * kPointerSize +
131 : FeedbackVector::kHeaderSize),
132 2161240 : Smi::FromInt(1));
133 : }
134 :
135 : { Comment cmnt(masm_, "[ Allocate locals");
136 1080617 : int locals_count = info->scope()->num_stack_slots();
137 1080618 : OperandStackDepthIncrement(locals_count);
138 1080612 : if (locals_count == 1) {
139 554734 : __ PushRoot(Heap::kUndefinedValueRootIndex);
140 525878 : } else if (locals_count > 1) {
141 118576 : if (locals_count >= 128) {
142 : Label ok;
143 40 : __ movp(rcx, rsp);
144 80 : __ subp(rcx, Immediate(locals_count * kPointerSize));
145 40 : __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
146 40 : __ j(above_equal, &ok, Label::kNear);
147 40 : __ CallRuntime(Runtime::kThrowStackOverflow);
148 40 : __ bind(&ok);
149 : }
150 118576 : __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
151 : const int kMaxPushes = 32;
152 118576 : if (locals_count >= kMaxPushes) {
153 123 : int loop_iterations = locals_count / kMaxPushes;
154 123 : __ movp(rcx, Immediate(loop_iterations));
155 : Label loop_header;
156 123 : __ bind(&loop_header);
157 : // Do pushes.
158 4059 : for (int i = 0; i < kMaxPushes; i++) {
159 3936 : __ Push(rax);
160 : }
161 : // Continue loop if not done.
162 123 : __ decp(rcx);
163 123 : __ j(not_zero, &loop_header, Label::kNear);
164 : }
165 118576 : int remaining = locals_count % kMaxPushes;
166 : // Emit the remaining pushes.
167 591229 : for (int i = 0; i < remaining; i++) {
168 472653 : __ Push(rax);
169 : }
170 : }
171 : }
172 :
173 : bool function_in_register = true;
174 :
175 : // Possibly allocate a local context.
176 2161217 : if (info->scope()->NeedsContext()) {
177 : Comment cmnt(masm_, "[ Allocate context");
178 : bool need_write_barrier = true;
179 39742 : int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
180 : // Argument to NewContext is the function, which is still in rdi.
181 39742 : if (info->scope()->is_script_scope()) {
182 772 : __ Push(rdi);
183 1544 : __ Push(info->scope()->scope_info());
184 772 : __ CallRuntime(Runtime::kNewScriptContext);
185 : PrepareForBailoutForId(BailoutId::ScriptContext(),
186 772 : BailoutState::TOS_REGISTER);
187 : // The new target value is not used, clobbering is safe.
188 : DCHECK_NULL(info->scope()->new_target_var());
189 : } else {
190 38970 : if (info->scope()->new_target_var() != nullptr) {
191 0 : __ Push(rdx); // Preserve new target.
192 : }
193 38970 : if (slots <= ConstructorBuiltins::MaximumFunctionContextSlots()) {
194 : Callable callable = CodeFactory::FastNewFunctionContext(
195 77934 : isolate(), info->scope()->scope_type());
196 77934 : __ Set(FastNewFunctionContextDescriptor::SlotsRegister(), slots);
197 38967 : __ Call(callable.code(), RelocInfo::CODE_TARGET);
198 : // Result of the FastNewFunctionContext builtin is always in new space.
199 : need_write_barrier = false;
200 : } else {
201 3 : __ Push(rdi);
202 6 : __ Push(Smi::FromInt(info->scope()->scope_type()));
203 3 : __ CallRuntime(Runtime::kNewFunctionContext);
204 : }
205 38970 : if (info->scope()->new_target_var() != nullptr) {
206 0 : __ Pop(rdx); // Restore new target.
207 : }
208 : }
209 : function_in_register = false;
210 : // Context is returned in rax. It replaces the context passed to us.
211 : // It's saved in the stack and kept live in rsi.
212 39742 : __ movp(rsi, rax);
213 119226 : __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
214 :
215 : // Copy any necessary parameters into the context.
216 39742 : int num_parameters = info->scope()->num_parameters();
217 79484 : int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
218 113102 : for (int i = first_parameter; i < num_parameters; i++) {
219 21711 : Variable* var =
220 73360 : (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
221 73360 : if (var->IsContextSlot()) {
222 21711 : int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223 43422 : (num_parameters - 1 - i) * kPointerSize;
224 : // Load parameter from stack.
225 65133 : __ movp(rax, Operand(rbp, parameter_offset));
226 : // Store it in the context.
227 : int context_offset = Context::SlotOffset(var->index());
228 65133 : __ movp(Operand(rsi, context_offset), rax);
229 : // Update the write barrier. This clobbers rax and rbx.
230 21711 : if (need_write_barrier) {
231 : __ RecordWriteContextSlot(
232 : rsi, context_offset, rax, rbx, kDontSaveFPRegs);
233 21711 : } else if (FLAG_debug_code) {
234 : Label done;
235 : __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
236 0 : __ Abort(kExpectedNewSpaceObject);
237 0 : __ bind(&done);
238 : }
239 : }
240 : }
241 : }
242 :
243 : // Register holding this function and new target are both trashed in case we
244 : // bailout here. But since that can happen only when new target is not used
245 : // and we allocate a context, the value of |function_in_register| is correct.
246 : PrepareForBailoutForId(BailoutId::FunctionContext(),
247 1080611 : BailoutState::NO_REGISTERS);
248 :
249 : // We don't support new.target and rest parameters here.
250 : DCHECK_NULL(info->scope()->new_target_var());
251 : DCHECK_NULL(info->scope()->rest_parameter());
252 : DCHECK_NULL(info->scope()->this_function_var());
253 :
254 : // Possibly allocate an arguments object.
255 : DCHECK_EQ(scope(), info->scope());
256 1080608 : Variable* arguments = info->scope()->arguments();
257 1080611 : if (arguments != NULL) {
258 : // Arguments object must be allocated after the context object, in
259 : // case the "arguments" or ".arguments" variables are in the context.
260 : Comment cmnt(masm_, "[ Allocate arguments object");
261 2677 : if (!function_in_register) {
262 2085 : __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
263 : }
264 2677 : if (is_strict(language_mode()) || !has_simple_parameters()) {
265 : __ call(isolate()->builtins()->FastNewStrictArguments(),
266 2592 : RelocInfo::CODE_TARGET);
267 1296 : RestoreContext();
268 2762 : } else if (literal()->has_duplicate_parameters()) {
269 15 : __ Push(rdi);
270 15 : __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
271 : } else {
272 : __ call(isolate()->builtins()->FastNewSloppyArguments(),
273 2732 : RelocInfo::CODE_TARGET);
274 1366 : RestoreContext();
275 : }
276 :
277 2677 : SetVar(arguments, rax, rbx, rdx);
278 : }
279 :
280 1080610 : if (FLAG_trace) {
281 0 : __ CallRuntime(Runtime::kTraceEnter);
282 : }
283 :
284 : // Visit the declarations and body unless there is an illegal
285 : // redeclaration.
286 : PrepareForBailoutForId(BailoutId::FunctionEntry(),
287 1080610 : BailoutState::NO_REGISTERS);
288 : {
289 : Comment cmnt(masm_, "[ Declarations");
290 1080611 : VisitDeclarations(info->scope()->declarations());
291 : }
292 :
293 : // Assert that the declarations do not use ICs. Otherwise the debugger
294 : // won't be able to redirect a PC at an IC to the correct IC in newly
295 : // recompiled code.
296 : DCHECK_EQ(0, ic_total_count_);
297 :
298 : {
299 : Comment cmnt(masm_, "[ Stack check");
300 : PrepareForBailoutForId(BailoutId::Declarations(),
301 1080614 : BailoutState::NO_REGISTERS);
302 : Label ok;
303 1080619 : __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
304 1080620 : __ j(above_equal, &ok, Label::kNear);
305 2161223 : __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
306 1080614 : __ bind(&ok);
307 : }
308 :
309 : {
310 : Comment cmnt(masm_, "[ Body");
311 : DCHECK(loop_depth() == 0);
312 1080614 : VisitStatements(literal()->body());
313 : DCHECK(loop_depth() == 0);
314 : }
315 :
316 : // Always emit a 'return undefined' in case control fell off the end of
317 : // the body.
318 : { Comment cmnt(masm_, "[ return <undefined>;");
319 1080616 : __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
320 1080618 : EmitReturnSequence();
321 1080613 : }
322 1080611 : }
323 :
324 :
325 44102 : void FullCodeGenerator::ClearAccumulator() {
326 44102 : __ Set(rax, 0);
327 44102 : }
328 :
329 :
330 2354915 : void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
331 : __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
332 : __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
333 3532374 : Smi::FromInt(-delta));
334 1177459 : }
335 :
336 :
337 4709827 : void FullCodeGenerator::EmitProfilingCounterReset() {
338 1177455 : int reset_value = FLAG_interrupt_budget;
339 : __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
340 : __ Move(kScratchRegister, Smi::FromInt(reset_value));
341 1177459 : __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
342 1177458 : }
343 :
344 :
345 : static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
346 :
347 :
348 96490 : void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
349 385960 : Label* back_edge_target) {
350 : Comment cmnt(masm_, "[ Back edge bookkeeping");
351 : Label ok;
352 :
353 : DCHECK(back_edge_target->is_bound());
354 96490 : int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
355 : int weight = Min(kMaxBackEdgeWeight,
356 96490 : Max(1, distance / kCodeSizeMultiplier));
357 96490 : EmitProfilingCounterDecrement(weight);
358 :
359 96490 : __ j(positive, &ok, Label::kNear);
360 : {
361 96490 : PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
362 96490 : DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
363 192980 : __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
364 :
365 : // Record a mapping of this PC offset to the OSR id. This is used to find
366 : // the AST id from the unoptimized code in order to use it as a key into
367 : // the deoptimization input data found in the optimized code.
368 96490 : RecordBackEdge(stmt->OsrEntryId());
369 :
370 192980 : EmitProfilingCounterReset();
371 : }
372 96490 : __ bind(&ok);
373 :
374 96490 : PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
375 : // Record a mapping of the OSR id to this PC. This is used if the OSR
376 : // entry becomes the target of a bailout. We don't expect it to be, but
377 : // we want it to work if it is.
378 96490 : PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
379 96490 : }
380 :
381 1080965 : void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
382 6485109 : bool is_tail_call) {
383 : // Pretend that the exit is a backwards jump to the entry.
384 : int weight = 1;
385 1080965 : if (info_->ShouldSelfOptimize()) {
386 999877 : weight = FLAG_interrupt_budget / FLAG_self_opt_count;
387 : } else {
388 81091 : int distance = masm_->pc_offset();
389 81091 : weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
390 : }
391 1080968 : EmitProfilingCounterDecrement(weight);
392 : Label ok;
393 1080969 : __ j(positive, &ok, Label::kNear);
394 : // Don't need to save result register if we are going to do a tail call.
395 1080970 : if (!is_tail_call) {
396 1080620 : __ Push(rax);
397 : }
398 2161935 : __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
399 1080968 : if (!is_tail_call) {
400 1080617 : __ Pop(rax);
401 : }
402 1080967 : EmitProfilingCounterReset();
403 1080968 : __ bind(&ok);
404 1080970 : }
405 :
406 6549456 : void FullCodeGenerator::EmitReturnSequence() {
407 : Comment cmnt(masm_, "[ Return sequence");
408 2194112 : if (return_label_.is_bound()) {
409 2227008 : __ jmp(&return_label_);
410 : } else {
411 2161216 : __ bind(&return_label_);
412 1080617 : if (FLAG_trace) {
413 0 : __ Push(rax);
414 0 : __ CallRuntime(Runtime::kTraceExit);
415 : }
416 1080617 : EmitProfilingCounterHandlingForReturnSequence(false);
417 :
418 1080620 : SetReturnPosition(literal());
419 1080620 : __ leave();
420 :
421 2161218 : int arg_count = info_->scope()->num_parameters() + 1;
422 1080612 : int arguments_bytes = arg_count * kPointerSize;
423 1080612 : __ Ret(arguments_bytes, rcx);
424 : }
425 2194110 : }
426 :
427 11238284 : void FullCodeGenerator::RestoreContext() {
428 16857426 : __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
429 5619142 : }
430 :
431 2643208 : void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
432 : DCHECK(var->IsStackAllocated() || var->IsContextSlot());
433 2643208 : MemOperand operand = codegen()->VarOperand(var, result_register());
434 : codegen()->PushOperand(operand);
435 2643209 : }
436 :
437 :
438 29 : void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
439 29 : }
440 :
441 :
442 11765 : void FullCodeGenerator::AccumulatorValueContext::Plug(
443 : Heap::RootListIndex index) const {
444 11765 : __ LoadRoot(result_register(), index);
445 11765 : }
446 :
447 :
448 6629 : void FullCodeGenerator::StackValueContext::Plug(
449 : Heap::RootListIndex index) const {
450 6629 : codegen()->OperandStackDepthIncrement(1);
451 6629 : __ PushRoot(index);
452 6629 : }
453 :
454 :
455 0 : void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
456 : codegen()->PrepareForBailoutBeforeSplit(condition(),
457 : true,
458 : true_label_,
459 0 : false_label_);
460 0 : if (index == Heap::kUndefinedValueRootIndex ||
461 0 : index == Heap::kNullValueRootIndex ||
462 : index == Heap::kFalseValueRootIndex) {
463 0 : if (false_label_ != fall_through_) __ jmp(false_label_);
464 0 : } else if (index == Heap::kTrueValueRootIndex) {
465 0 : if (true_label_ != fall_through_) __ jmp(true_label_);
466 : } else {
467 0 : __ LoadRoot(result_register(), index);
468 0 : codegen()->DoTest(this);
469 : }
470 0 : }
471 :
472 :
473 604934 : void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
474 604934 : }
475 :
476 :
477 1807648 : void FullCodeGenerator::AccumulatorValueContext::Plug(
478 : Handle<Object> lit) const {
479 1807648 : if (lit->IsSmi()) {
480 1167394 : __ SafeMove(result_register(), Smi::cast(*lit));
481 : } else {
482 640254 : __ Move(result_register(), lit);
483 : }
484 1807648 : }
485 :
486 :
487 771521 : void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
488 1543042 : codegen()->OperandStackDepthIncrement(1);
489 771521 : if (lit->IsSmi()) {
490 445072 : __ SafePush(Smi::cast(*lit));
491 : } else {
492 326449 : __ Push(lit);
493 : }
494 771521 : }
495 :
496 :
497 47488 : void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
498 : codegen()->PrepareForBailoutBeforeSplit(condition(),
499 : true,
500 : true_label_,
501 103145 : false_label_);
502 : DCHECK(lit->IsNullOrUndefined(isolate()) || !lit->IsUndetectable());
503 47484 : if (lit->IsNullOrUndefined(isolate()) || lit->IsFalse(isolate())) {
504 616 : if (false_label_ != fall_through_) __ jmp(false_label_);
505 43191 : } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
506 3724 : if (true_label_ != fall_through_) __ jmp(true_label_);
507 19785 : } else if (lit->IsString()) {
508 18 : if (String::cast(*lit)->length() == 0) {
509 0 : if (false_label_ != fall_through_) __ jmp(false_label_);
510 : } else {
511 18 : if (true_label_ != fall_through_) __ jmp(true_label_);
512 : }
513 19767 : } else if (lit->IsSmi()) {
514 19763 : if (Smi::cast(*lit)->value() == 0) {
515 15027 : if (false_label_ != fall_through_) __ jmp(false_label_);
516 : } else {
517 12858 : if (true_label_ != fall_through_) __ jmp(true_label_);
518 : }
519 : } else {
520 : // For simplicity we always test the accumulator register.
521 4 : __ Move(result_register(), lit);
522 4 : codegen()->DoTest(this);
523 : }
524 23744 : }
525 :
526 :
527 194509 : void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
528 : Register reg) const {
529 : DCHECK(count > 0);
530 389018 : if (count > 1) codegen()->DropOperands(count - 1);
531 583527 : __ movp(Operand(rsp, 0), reg);
532 194509 : }
533 :
534 :
535 123 : void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
536 : Label* materialize_false) const {
537 : DCHECK(materialize_true == materialize_false);
538 123 : __ bind(materialize_true);
539 123 : }
540 :
541 :
542 54288 : void FullCodeGenerator::AccumulatorValueContext::Plug(
543 : Label* materialize_true,
544 : Label* materialize_false) const {
545 : Label done;
546 380016 : __ bind(materialize_true);
547 54288 : __ Move(result_register(), isolate()->factory()->true_value());
548 54288 : __ jmp(&done, Label::kNear);
549 54288 : __ bind(materialize_false);
550 54288 : __ Move(result_register(), isolate()->factory()->false_value());
551 54288 : __ bind(&done);
552 54288 : }
553 :
554 :
555 6586 : void FullCodeGenerator::StackValueContext::Plug(
556 : Label* materialize_true,
557 : Label* materialize_false) const {
558 59274 : codegen()->OperandStackDepthIncrement(1);
559 : Label done;
560 6586 : __ bind(materialize_true);
561 6586 : __ Push(isolate()->factory()->true_value());
562 6586 : __ jmp(&done, Label::kNear);
563 6586 : __ bind(materialize_false);
564 6586 : __ Push(isolate()->factory()->false_value());
565 6586 : __ bind(&done);
566 6586 : }
567 :
568 :
569 694944 : void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
570 : Label* materialize_false) const {
571 : DCHECK(materialize_true == true_label_);
572 : DCHECK(materialize_false == false_label_);
573 694944 : }
574 :
575 :
576 62 : void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
577 : Heap::RootListIndex value_root_index =
578 62 : flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
579 62 : __ LoadRoot(result_register(), value_root_index);
580 62 : }
581 :
582 :
583 27 : void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
584 54 : codegen()->OperandStackDepthIncrement(1);
585 : Heap::RootListIndex value_root_index =
586 27 : flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
587 27 : __ PushRoot(value_root_index);
588 27 : }
589 :
590 :
591 18 : void FullCodeGenerator::TestContext::Plug(bool flag) const {
592 : codegen()->PrepareForBailoutBeforeSplit(condition(),
593 : true,
594 : true_label_,
595 21 : false_label_);
596 9 : if (flag) {
597 3 : if (true_label_ != fall_through_) __ jmp(true_label_);
598 : } else {
599 9 : if (false_label_ != fall_through_) __ jmp(false_label_);
600 : }
601 9 : }
602 :
603 :
604 231709 : void FullCodeGenerator::DoTest(Expression* condition,
605 : Label* if_true,
606 : Label* if_false,
607 463418 : Label* fall_through) {
608 231709 : Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
609 231709 : CallIC(ic, condition->test_id());
610 231709 : __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
611 231709 : Split(equal, if_true, if_false, fall_through);
612 231709 : }
613 :
614 :
615 1488176 : void FullCodeGenerator::Split(Condition cc,
616 : Label* if_true,
617 : Label* if_false,
618 2168687 : Label* fall_through) {
619 1488176 : if (if_false == fall_through) {
620 324466 : __ j(cc, if_true);
621 1163710 : } else if (if_true == fall_through) {
622 483199 : __ j(NegateCondition(cc), if_false);
623 : } else {
624 680511 : __ j(cc, if_true);
625 680511 : __ jmp(if_false);
626 : }
627 1488176 : }
628 :
629 :
630 5175415 : MemOperand FullCodeGenerator::StackOperand(Variable* var) {
631 : DCHECK(var->IsStackAllocated());
632 : // Offset is negative because higher indexes are at lower addresses.
633 5175415 : int offset = -var->index() * kPointerSize;
634 : // Adjust by a (parameter or local) base offset.
635 5175415 : if (var->IsParameter()) {
636 1821979 : offset += kFPOnStackSize + kPCOnStackSize +
637 3643958 : (info_->scope()->num_parameters() - 1) * kPointerSize;
638 : } else {
639 3353436 : offset += JavaScriptFrameConstants::kLocal0Offset;
640 : }
641 5175415 : return Operand(rbp, offset);
642 : }
643 :
644 :
645 13407252 : MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
646 : DCHECK(var->IsContextSlot() || var->IsStackAllocated());
647 7128429 : if (var->IsContextSlot()) {
648 2092941 : int context_chain_length = scope()->ContextChainLength(var->scope());
649 2092941 : __ LoadContext(scratch, context_chain_length);
650 2092941 : return ContextOperand(scratch, var->index());
651 : } else {
652 5035488 : return StackOperand(var);
653 : }
654 : }
655 :
656 :
657 5304185 : void FullCodeGenerator::GetVar(Register dest, Variable* var) {
658 : DCHECK(var->IsContextSlot() || var->IsStackAllocated());
659 2652093 : MemOperand location = VarOperand(var, dest);
660 2652092 : __ movp(dest, location);
661 2652093 : }
662 :
663 :
664 2677 : void FullCodeGenerator::SetVar(Variable* var,
665 : Register src,
666 : Register scratch0,
667 2677 : Register scratch1) {
668 : DCHECK(var->IsContextSlot() || var->IsStackAllocated());
669 : DCHECK(!scratch0.is(src));
670 : DCHECK(!scratch0.is(scratch1));
671 : DCHECK(!scratch1.is(src));
672 2677 : MemOperand location = VarOperand(var, scratch0);
673 2677 : __ movp(location, src);
674 :
675 : // Emit the write barrier code if the location is in the heap.
676 2677 : if (var->IsContextSlot()) {
677 : int offset = Context::SlotOffset(var->index());
678 : __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
679 : }
680 2677 : }
681 :
682 :
683 792432 : void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
684 : bool should_normalize,
685 : Label* if_true,
686 2396514 : Label* if_false) {
687 : // Only prepare for bailouts before splits if we're in a test
688 : // context. Otherwise, we let the Visit function deal with the
689 : // preparation to avoid preparing with the same AST id twice.
690 853429 : if (!context()->IsTest()) return;
691 :
692 : Label skip;
693 1266129 : if (should_normalize) __ jmp(&skip, Label::kNear);
694 731435 : PrepareForBailout(expr, BailoutState::TOS_REGISTER);
695 731434 : if (should_normalize) {
696 534694 : __ CompareRoot(rax, Heap::kTrueValueRootIndex);
697 534694 : Split(equal, if_true, if_false, NULL);
698 534694 : __ bind(&skip);
699 : }
700 : }
701 :
702 :
703 285780 : void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
704 : // The variable in the declaration always resides in the current context.
705 : DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
706 285780 : if (FLAG_debug_code) {
707 : // Check that we're not inside a with or catch context.
708 0 : __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
709 0 : __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
710 0 : __ Check(not_equal, kDeclarationInWithContext);
711 0 : __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
712 0 : __ Check(not_equal, kDeclarationInCatchContext);
713 : }
714 285780 : }
715 :
716 :
717 1573204 : void FullCodeGenerator::VisitVariableDeclaration(
718 1840498 : VariableDeclaration* declaration) {
719 1573204 : VariableProxy* proxy = declaration->proxy();
720 2039275 : Variable* variable = proxy->var();
721 1573204 : switch (variable->location()) {
722 : case VariableLocation::UNALLOCATED: {
723 : DCHECK(!variable->binding_needs_init());
724 723448 : globals_->Add(variable->name(), zone());
725 : FeedbackSlot slot = proxy->VariableFeedbackSlot();
726 : DCHECK(!slot.IsInvalid());
727 723448 : globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
728 723448 : globals_->Add(isolate()->factory()->undefined_value(), zone());
729 723448 : globals_->Add(isolate()->factory()->undefined_value(), zone());
730 : break;
731 : }
732 : case VariableLocation::PARAMETER:
733 : case VariableLocation::LOCAL:
734 752570 : if (variable->binding_needs_init()) {
735 : Comment cmnt(masm_, "[ VariableDeclaration");
736 92454 : __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
737 184908 : __ movp(StackOperand(variable), kScratchRegister);
738 : }
739 : break;
740 :
741 : case VariableLocation::CONTEXT:
742 458910 : if (variable->binding_needs_init()) {
743 : Comment cmnt(masm_, "[ VariableDeclaration");
744 104347 : EmitDebugCheckDeclarationContext(variable);
745 104347 : __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
746 208694 : __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
747 : // No write barrier since the hole value is in old space.
748 104347 : PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
749 : }
750 : break;
751 :
752 : case VariableLocation::LOOKUP:
753 : case VariableLocation::MODULE:
754 0 : UNREACHABLE();
755 : }
756 1573204 : }
757 :
758 :
759 315236 : void FullCodeGenerator::VisitFunctionDeclaration(
760 1070898 : FunctionDeclaration* declaration) {
761 315236 : VariableProxy* proxy = declaration->proxy();
762 764433 : Variable* variable = proxy->var();
763 315236 : switch (variable->location()) {
764 : case VariableLocation::UNALLOCATED: {
765 172662 : globals_->Add(variable->name(), zone());
766 : FeedbackSlot slot = proxy->VariableFeedbackSlot();
767 : DCHECK(!slot.IsInvalid());
768 172662 : globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
769 :
770 : // We need the slot where the literals array lives, too.
771 : slot = declaration->fun()->LiteralFeedbackSlot();
772 : DCHECK(!slot.IsInvalid());
773 172662 : globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
774 :
775 : Handle<SharedFunctionInfo> function =
776 172662 : Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
777 : // Check for stack-overflow exception.
778 401567 : if (function.is_null()) return SetStackOverflow();
779 172662 : globals_->Add(function, zone());
780 86331 : break;
781 : }
782 :
783 : case VariableLocation::PARAMETER:
784 : case VariableLocation::LOCAL: {
785 : Comment cmnt(masm_, "[ FunctionDeclaration");
786 47472 : VisitForAccumulatorValue(declaration->fun());
787 94944 : __ movp(StackOperand(variable), result_register());
788 : break;
789 : }
790 :
791 : case VariableLocation::CONTEXT: {
792 : Comment cmnt(masm_, "[ FunctionDeclaration");
793 181433 : EmitDebugCheckDeclarationContext(variable);
794 181433 : VisitForAccumulatorValue(declaration->fun());
795 362866 : __ movp(ContextOperand(rsi, variable->index()), result_register());
796 : int offset = Context::SlotOffset(variable->index());
797 : // We know that we have written a function, which is not a smi.
798 : __ RecordWriteContextSlot(rsi,
799 : offset,
800 : result_register(),
801 : rcx,
802 : kDontSaveFPRegs,
803 : EMIT_REMEMBERED_SET,
804 : OMIT_SMI_CHECK);
805 181433 : PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
806 : break;
807 : }
808 :
809 : case VariableLocation::LOOKUP:
810 : case VariableLocation::MODULE:
811 0 : UNREACHABLE();
812 : }
813 : }
814 :
815 :
816 235542 : void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
817 : // Call the runtime to declare the globals.
818 39257 : __ Push(pairs);
819 78514 : __ Push(Smi::FromInt(DeclareGlobalsFlags()));
820 39257 : __ EmitLoadFeedbackVector(rax);
821 39257 : __ Push(rax);
822 39257 : __ CallRuntime(Runtime::kDeclareGlobals);
823 : // Return value is ignored.
824 39257 : }
825 :
826 :
827 1172405 : void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
828 : Comment cmnt(masm_, "[ SwitchStatement");
829 : Breakable nested_statement(this, stmt);
830 11652 : SetStatementPosition(stmt);
831 :
832 : // Keep the switch value on the stack until a case matches.
833 11652 : VisitForStackValue(stmt->tag());
834 11652 : PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
835 :
836 : ZoneList<CaseClause*>* clauses = stmt->cases();
837 : CaseClause* default_clause = NULL; // Can occur anywhere in the list.
838 :
839 : Label next_test; // Recycled for each test.
840 : // Compile all the tests with branches to their bodies.
841 185194 : for (int i = 0; i < clauses->length(); i++) {
842 347084 : CaseClause* clause = clauses->at(i);
843 : clause->body_target()->Unuse();
844 :
845 : // The default is not a test, but remember it as final fall through.
846 80945 : if (clause->is_default()) {
847 : default_clause = clause;
848 5105 : continue;
849 : }
850 :
851 : Comment cmnt(masm_, "[ Case comparison");
852 75840 : __ bind(&next_test);
853 : next_test.Unuse();
854 :
855 : // Compile the label expression.
856 75840 : VisitForAccumulatorValue(clause->label());
857 :
858 : // Perform the comparison as if via '==='.
859 227520 : __ movp(rdx, Operand(rsp, 0)); // Switch value.
860 75840 : bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
861 75840 : JumpPatchSite patch_site(masm_);
862 75840 : if (inline_smi_code) {
863 : Label slow_case;
864 5938 : __ movp(rcx, rdx);
865 5938 : __ orp(rcx, rax);
866 5938 : patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
867 :
868 5938 : __ cmpp(rdx, rax);
869 5938 : __ j(not_equal, &next_test);
870 5938 : __ Drop(1); // Switch value is no longer needed.
871 11876 : __ jmp(clause->body_target());
872 5938 : __ bind(&slow_case);
873 : }
874 :
875 : // Record position before stub call for type feedback.
876 75840 : SetExpressionPosition(clause);
877 : Handle<Code> ic =
878 151680 : CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
879 75840 : CallIC(ic, clause->CompareId());
880 75840 : patch_site.EmitPatchInfo();
881 :
882 : Label skip;
883 75840 : __ jmp(&skip, Label::kNear);
884 75840 : PrepareForBailout(clause, BailoutState::TOS_REGISTER);
885 75840 : __ CompareRoot(rax, Heap::kTrueValueRootIndex);
886 75840 : __ j(not_equal, &next_test);
887 75840 : __ Drop(1);
888 151680 : __ jmp(clause->body_target());
889 75840 : __ bind(&skip);
890 :
891 75840 : __ testp(rax, rax);
892 75840 : __ j(not_equal, &next_test);
893 75840 : __ Drop(1); // Switch value is no longer needed.
894 75840 : __ jmp(clause->body_target());
895 : }
896 :
897 : // Discard the test value and jump to the default if present, otherwise to
898 : // the end of the statement.
899 11652 : __ bind(&next_test);
900 11652 : DropOperands(1); // Switch value is no longer needed.
901 11652 : if (default_clause == NULL) {
902 6547 : __ jmp(nested_statement.break_label());
903 : } else {
904 10210 : __ jmp(default_clause->body_target());
905 : }
906 :
907 : // Compile all the case bodies.
908 173542 : for (int i = 0; i < clauses->length(); i++) {
909 : Comment cmnt(masm_, "[ Case body");
910 161890 : CaseClause* clause = clauses->at(i);
911 161890 : __ bind(clause->body_target());
912 80945 : PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
913 80945 : VisitStatements(clause->statements());
914 : }
915 :
916 11652 : __ bind(nested_statement.break_label());
917 11652 : PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
918 11652 : }
919 :
920 :
921 174896 : void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
922 : Comment cmnt(masm_, "[ ForInStatement");
923 2572 : SetStatementPosition(stmt, SKIP_BREAK);
924 :
925 : FeedbackSlot slot = stmt->ForInFeedbackSlot();
926 :
927 : // Get the object to enumerate over.
928 2572 : SetExpressionAsStatementPosition(stmt->enumerable());
929 2572 : VisitForAccumulatorValue(stmt->enumerable());
930 2572 : OperandStackDepthIncrement(5);
931 :
932 : Label loop, exit;
933 : Iteration loop_statement(this, stmt);
934 : increment_loop_depth();
935 :
936 : // If the object is null or undefined, skip over the loop, otherwise convert
937 : // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
938 : Label convert, done_convert;
939 2572 : __ JumpIfSmi(rax, &convert, Label::kNear);
940 2572 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
941 2572 : __ j(above_equal, &done_convert, Label::kNear);
942 2572 : __ CompareRoot(rax, Heap::kNullValueRootIndex);
943 2572 : __ j(equal, &exit);
944 2572 : __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
945 2572 : __ j(equal, &exit);
946 2572 : __ bind(&convert);
947 5144 : __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
948 2572 : RestoreContext();
949 2572 : __ bind(&done_convert);
950 2572 : PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
951 2572 : __ Push(rax);
952 :
953 : // Check cache validity in generated code. If we cannot guarantee cache
954 : // validity, call the runtime system to check cache validity or get the
955 : // property names in a fixed array. Note: Proxies never have an enum cache,
956 : // so will always take the slow path.
957 : Label call_runtime;
958 2572 : __ CheckEnumCache(&call_runtime);
959 :
960 : // The enum cache is valid. Load the map of the object being
961 : // iterated over and use the cache for the iteration.
962 : Label use_cache;
963 2572 : __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
964 2572 : __ jmp(&use_cache, Label::kNear);
965 :
966 : // Get the set of properties to enumerate.
967 2572 : __ bind(&call_runtime);
968 2572 : __ Push(rax); // Duplicate the enumerable object on the stack.
969 2572 : __ CallRuntime(Runtime::kForInEnumerate);
970 2572 : PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
971 :
972 : // If we got a map from the runtime call, we can do a fast
973 : // modification check. Otherwise, we got a fixed array, and we have
974 : // to do a slow check.
975 : Label fixed_array;
976 : __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
977 5144 : Heap::kMetaMapRootIndex);
978 2572 : __ j(not_equal, &fixed_array);
979 :
980 : // We got a map in register rax. Get the enumeration cache from it.
981 2572 : __ bind(&use_cache);
982 :
983 : Label no_descriptors;
984 :
985 2572 : __ EnumLength(rdx, rax);
986 2572 : __ Cmp(rdx, Smi::kZero);
987 2572 : __ j(equal, &no_descriptors);
988 :
989 2572 : __ LoadInstanceDescriptors(rax, rcx);
990 2572 : __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
991 2572 : __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
992 :
993 : // Set up the four remaining stack slots.
994 2572 : __ Push(rax); // Map.
995 2572 : __ Push(rcx); // Enumeration cache.
996 2572 : __ Push(rdx); // Number of valid entries for the map in the enum cache.
997 2572 : __ Push(Smi::kZero); // Initial index.
998 2572 : __ jmp(&loop);
999 :
1000 2572 : __ bind(&no_descriptors);
1001 2572 : __ addp(rsp, Immediate(kPointerSize));
1002 2572 : __ jmp(&exit);
1003 :
1004 : // We got a fixed array in register rax. Iterate through that.
1005 2572 : __ bind(&fixed_array);
1006 :
1007 7716 : __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1008 2572 : __ Push(Smi::FromInt(1)); // Smi(1) indicates slow check
1009 2572 : __ Push(rax); // Array
1010 2572 : __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1011 2572 : __ Push(rax); // Fixed array length (as smi).
1012 2572 : PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1013 2572 : __ Push(Smi::kZero); // Initial index.
1014 :
1015 : // Generate code for doing the condition check.
1016 2572 : __ bind(&loop);
1017 2572 : SetExpressionAsStatementPosition(stmt->each());
1018 :
1019 7716 : __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1020 7716 : __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1021 2572 : __ j(above_equal, loop_statement.break_label());
1022 :
1023 : // Get the current entry of the array into register rax.
1024 7716 : __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1025 2572 : SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1026 : __ movp(rax,
1027 5144 : FieldOperand(rbx, index.reg, index.scale, FixedArray::kHeaderSize));
1028 :
1029 : // Get the expected map from the stack or a smi in the
1030 : // permanent slow case into register rdx.
1031 7716 : __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1032 :
1033 : // Check if the expected map still matches that of the enumerable.
1034 : // If not, we may have to filter the key.
1035 : Label update_each;
1036 7716 : __ movp(rbx, Operand(rsp, 4 * kPointerSize));
1037 2572 : __ cmpp(rdx, FieldOperand(rbx, HeapObject::kMapOffset));
1038 2572 : __ j(equal, &update_each, Label::kNear);
1039 :
1040 : // We need to filter the key, record slow-path here.
1041 : int const vector_index = SmiFromSlot(slot)->value();
1042 2572 : __ EmitLoadFeedbackVector(rdx);
1043 : __ Move(FieldOperand(rdx, FixedArray::OffsetOfElementAt(vector_index)),
1044 5144 : FeedbackVector::MegamorphicSentinel(isolate()));
1045 :
1046 : // rax contains the key. The receiver in rbx is the second argument to
1047 : // ForInFilter. ForInFilter returns undefined if the receiver doesn't
1048 : // have the key or returns the name-converted key.
1049 5144 : __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1050 2572 : RestoreContext();
1051 2572 : PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1052 : __ JumpIfRoot(result_register(), Heap::kUndefinedValueRootIndex,
1053 : loop_statement.continue_label());
1054 :
1055 : // Update the 'each' property or variable from the possibly filtered
1056 : // entry in register rax.
1057 2572 : __ bind(&update_each);
1058 : // Perform the assignment as if via '='.
1059 : { EffectContext context(this);
1060 2572 : EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1061 2572 : PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1062 : }
1063 :
1064 : // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1065 2572 : PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1066 : // Generate code for the body of the loop.
1067 2572 : Visit(stmt->body());
1068 :
1069 : // Generate code for going to the next element by incrementing the
1070 : // index (smi) stored on top of the stack.
1071 2572 : __ bind(loop_statement.continue_label());
1072 2572 : PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1073 5144 : __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1074 :
1075 2572 : EmitBackEdgeBookkeeping(stmt, &loop);
1076 2572 : __ jmp(&loop);
1077 :
1078 : // Remove the pointers stored on the stack.
1079 2572 : __ bind(loop_statement.break_label());
1080 2572 : DropOperands(5);
1081 :
1082 : // Exit and decrement the loop depth.
1083 2572 : PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1084 2572 : __ bind(&exit);
1085 : decrement_loop_depth();
1086 2572 : }
1087 :
1088 12 : void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1089 36 : FeedbackSlot slot) {
1090 : DCHECK(NeedsHomeObject(initializer));
1091 36 : __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1092 : __ movp(StoreDescriptor::ValueRegister(),
1093 36 : Operand(rsp, offset * kPointerSize));
1094 12 : CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1095 12 : }
1096 :
1097 159 : void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1098 : int offset,
1099 477 : FeedbackSlot slot) {
1100 : DCHECK(NeedsHomeObject(initializer));
1101 318 : __ movp(StoreDescriptor::ReceiverRegister(), rax);
1102 : __ movp(StoreDescriptor::ValueRegister(),
1103 477 : Operand(rsp, offset * kPointerSize));
1104 159 : CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1105 159 : }
1106 :
1107 17522030 : void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1108 6285616 : TypeofMode typeof_mode) {
1109 : // Record position before possible IC call.
1110 6112194 : SetExpressionPosition(proxy);
1111 6112198 : PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1112 6146883 : Variable* var = proxy->var();
1113 :
1114 : // Two cases: global variable, and all other types of variables.
1115 6112200 : switch (var->location()) {
1116 : case VariableLocation::UNALLOCATED: {
1117 : Comment cmnt(masm_, "[ Global variable");
1118 814565 : EmitGlobalVariableLoad(proxy, typeof_mode);
1119 814565 : context()->Plug(rax);
1120 : break;
1121 : }
1122 :
1123 : case VariableLocation::PARAMETER:
1124 : case VariableLocation::LOCAL:
1125 : case VariableLocation::CONTEXT: {
1126 : DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1127 : Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1128 : : "[ Stack slot");
1129 5297636 : if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1130 : // Throw a reference error when using an uninitialized let/const
1131 : // binding in harmony mode.
1132 : DCHECK(IsLexicalVariableMode(var->mode()));
1133 : Label done;
1134 34683 : GetVar(rax, var);
1135 34683 : __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1136 34683 : __ j(not_equal, &done, Label::kNear);
1137 34683 : __ Push(var->name());
1138 34683 : __ CallRuntime(Runtime::kThrowReferenceError);
1139 34683 : __ bind(&done);
1140 34683 : context()->Plug(rax);
1141 : break;
1142 : }
1143 5262953 : context()->Plug(var);
1144 5262953 : break;
1145 : }
1146 :
1147 : case VariableLocation::LOOKUP:
1148 : case VariableLocation::MODULE:
1149 0 : UNREACHABLE();
1150 : }
1151 6112200 : }
1152 :
1153 :
1154 2571 : void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1155 1858 : Expression* expression = (property == NULL) ? NULL : property->value();
1156 1858 : if (expression == NULL) {
1157 701 : OperandStackDepthIncrement(1);
1158 701 : __ PushRoot(Heap::kNullValueRootIndex);
1159 : } else {
1160 1157 : VisitForStackValue(expression);
1161 1157 : if (NeedsHomeObject(expression)) {
1162 : DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1163 : property->kind() == ObjectLiteral::Property::SETTER);
1164 12 : int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1165 12 : EmitSetHomeObject(expression, offset, property->GetSlot());
1166 : }
1167 : }
1168 1858 : }
1169 :
1170 :
1171 2043462 : void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1172 : Comment cmnt(masm_, "[ ObjectLiteral");
1173 :
1174 : Handle<BoilerplateDescription> constant_properties =
1175 : expr->GetOrBuildConstantProperties(isolate());
1176 144384 : int flags = expr->ComputeFlags();
1177 144384 : if (MustCreateObjectLiteralWithRuntime(expr)) {
1178 222804 : __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1179 111402 : __ Push(SmiFromSlot(expr->literal_slot()));
1180 111402 : __ Push(constant_properties);
1181 111402 : __ Push(Smi::FromInt(flags));
1182 111402 : __ CallRuntime(Runtime::kCreateObjectLiteral);
1183 : } else {
1184 98946 : __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1185 : __ Move(rbx, SmiFromSlot(expr->literal_slot()));
1186 32982 : __ Move(rcx, constant_properties);
1187 : __ Move(rdx, Smi::FromInt(flags));
1188 : Callable callable = CodeFactory::FastCloneShallowObject(
1189 32982 : isolate(), expr->properties_count());
1190 32982 : __ Call(callable.code(), RelocInfo::CODE_TARGET);
1191 32982 : RestoreContext();
1192 : }
1193 144384 : PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1194 :
1195 : // If result_saved is true the result is on top of the stack. If
1196 : // result_saved is false the result is in rax.
1197 : bool result_saved = false;
1198 :
1199 : AccessorTable accessor_table(zone());
1200 1264644 : for (int i = 0; i < expr->properties()->length(); i++) {
1201 692367 : ObjectLiteral::Property* property = expr->properties()->at(i);
1202 : DCHECK(!property->is_computed_name());
1203 487938 : if (property->IsCompileTimeValue()) continue;
1204 :
1205 598962 : Literal* key = property->key()->AsLiteral();
1206 : Expression* value = property->value();
1207 204429 : if (!result_saved) {
1208 65134 : PushOperand(rax); // Save result on the stack
1209 : result_saved = true;
1210 : }
1211 204429 : switch (property->kind()) {
1212 : case ObjectLiteral::Property::SPREAD:
1213 : case ObjectLiteral::Property::CONSTANT:
1214 0 : UNREACHABLE();
1215 : case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1216 : DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1217 : // Fall through.
1218 : case ObjectLiteral::Property::COMPUTED:
1219 : // It is safe to use [[Put]] here because the boilerplate already
1220 : // contains computed properties with an uninitialized value.
1221 190419 : if (key->IsStringLiteral()) {
1222 : DCHECK(key->IsPropertyName());
1223 190170 : if (property->emit_store()) {
1224 190104 : VisitForAccumulatorValue(value);
1225 : DCHECK(StoreDescriptor::ValueRegister().is(rax));
1226 570312 : __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1227 190104 : CallStoreIC(property->GetSlot(0), key->value(), kStoreOwn);
1228 190104 : PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1229 :
1230 190104 : if (NeedsHomeObject(value)) {
1231 159 : EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1232 : }
1233 : } else {
1234 66 : VisitForEffect(value);
1235 : }
1236 : break;
1237 : }
1238 498 : PushOperand(Operand(rsp, 0)); // Duplicate receiver.
1239 249 : VisitForStackValue(key);
1240 249 : VisitForStackValue(value);
1241 249 : if (property->emit_store()) {
1242 230 : if (NeedsHomeObject(value)) {
1243 0 : EmitSetHomeObject(value, 2, property->GetSlot());
1244 : }
1245 230 : PushOperand(Smi::FromInt(SLOPPY)); // Language mode
1246 230 : CallRuntimeWithOperands(Runtime::kSetProperty);
1247 : } else {
1248 19 : DropOperands(3);
1249 : }
1250 : break;
1251 : case ObjectLiteral::Property::PROTOTYPE:
1252 25548 : PushOperand(Operand(rsp, 0)); // Duplicate receiver.
1253 12774 : VisitForStackValue(value);
1254 : DCHECK(property->emit_store());
1255 12774 : CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1256 : PrepareForBailoutForId(expr->GetIdForPropertySet(i),
1257 12774 : BailoutState::NO_REGISTERS);
1258 12774 : break;
1259 : case ObjectLiteral::Property::GETTER:
1260 803 : if (property->emit_store()) {
1261 766 : AccessorTable::Iterator it = accessor_table.lookup(key);
1262 1532 : it->second->bailout_id = expr->GetIdForPropertySet(i);
1263 766 : it->second->getter = property;
1264 : }
1265 : break;
1266 : case ObjectLiteral::Property::SETTER:
1267 433 : if (property->emit_store()) {
1268 398 : AccessorTable::Iterator it = accessor_table.lookup(key);
1269 796 : it->second->bailout_id = expr->GetIdForPropertySet(i);
1270 398 : it->second->setter = property;
1271 : }
1272 : break;
1273 : }
1274 : }
1275 :
1276 : // Emit code to define accessors, using only a single call to the runtime for
1277 : // each pair of corresponding getters and setters.
1278 289697 : for (AccessorTable::Iterator it = accessor_table.begin();
1279 : it != accessor_table.end();
1280 : ++it) {
1281 1858 : PushOperand(Operand(rsp, 0)); // Duplicate receiver.
1282 929 : VisitForStackValue(it->first);
1283 929 : EmitAccessor(it->second->getter);
1284 929 : EmitAccessor(it->second->setter);
1285 929 : PushOperand(Smi::FromInt(NONE));
1286 929 : CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1287 929 : PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1288 : }
1289 :
1290 144384 : if (result_saved) {
1291 65134 : context()->PlugTOS();
1292 : } else {
1293 79250 : context()->Plug(rax);
1294 : }
1295 144384 : }
1296 :
1297 :
1298 862344 : void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1299 : Comment cmnt(masm_, "[ ArrayLiteral");
1300 :
1301 : Handle<ConstantElementsPair> constant_elements =
1302 : expr->GetOrBuildConstantElements(isolate());
1303 :
1304 74125 : if (MustCreateArrayLiteralWithRuntime(expr)) {
1305 2994 : __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1306 1497 : __ Push(SmiFromSlot(expr->literal_slot()));
1307 1497 : __ Push(constant_elements);
1308 1497 : __ Push(Smi::FromInt(expr->ComputeFlags()));
1309 1497 : __ CallRuntime(Runtime::kCreateArrayLiteral);
1310 : } else {
1311 217884 : __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1312 : __ Move(rbx, SmiFromSlot(expr->literal_slot()));
1313 72628 : __ Move(rcx, constant_elements);
1314 : Callable callable =
1315 72628 : CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE);
1316 72628 : __ Call(callable.code(), RelocInfo::CODE_TARGET);
1317 72628 : RestoreContext();
1318 : }
1319 74125 : PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1320 :
1321 : bool result_saved = false; // Is the result saved to the stack?
1322 : ZoneList<Expression*>* subexprs = expr->values();
1323 74125 : int length = subexprs->length();
1324 :
1325 : // Emit code to evaluate all the non-constant subexpressions and to store
1326 : // them into the newly cloned array.
1327 3974686 : for (int array_index = 0; array_index < length; array_index++) {
1328 3900561 : Expression* subexpr = subexprs->at(array_index);
1329 : DCHECK(!subexpr->IsSpread());
1330 :
1331 : // If the subexpression is a literal or a simple materialized literal it
1332 : // is already set in the cloned array.
1333 3900561 : if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1334 :
1335 134672 : if (!result_saved) {
1336 25436 : PushOperand(rax); // array literal
1337 : result_saved = true;
1338 : }
1339 134672 : VisitForAccumulatorValue(subexpr);
1340 :
1341 134672 : __ Move(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
1342 404016 : __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1343 134672 : CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1344 :
1345 : PrepareForBailoutForId(expr->GetIdForElement(array_index),
1346 134672 : BailoutState::NO_REGISTERS);
1347 : }
1348 :
1349 74125 : if (result_saved) {
1350 25436 : context()->PlugTOS();
1351 : } else {
1352 48689 : context()->Plug(rax);
1353 : }
1354 74125 : }
1355 :
1356 :
1357 11185541 : void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1358 : DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1359 :
1360 : Comment cmnt(masm_, "[ Assignment");
1361 :
1362 3821340 : Property* property = expr->target()->AsProperty();
1363 2777696 : LhsKind assign_type = Property::GetAssignType(property);
1364 :
1365 : // Evaluate LHS expression.
1366 2777698 : switch (assign_type) {
1367 : case VARIABLE:
1368 : // Nothing to do here.
1369 : break;
1370 : case NAMED_PROPERTY:
1371 905052 : if (expr->is_compound()) {
1372 : // We need the receiver both on the stack and in the register.
1373 216 : VisitForStackValue(property->obj());
1374 648 : __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1375 : } else {
1376 904836 : VisitForStackValue(property->obj());
1377 : }
1378 : break;
1379 : case KEYED_PROPERTY: {
1380 69299 : if (expr->is_compound()) {
1381 468 : VisitForStackValue(property->obj());
1382 468 : VisitForStackValue(property->key());
1383 1404 : __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
1384 1404 : __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
1385 : } else {
1386 68831 : VisitForStackValue(property->obj());
1387 68831 : VisitForStackValue(property->key());
1388 : }
1389 : break;
1390 : }
1391 : case NAMED_SUPER_PROPERTY:
1392 : case KEYED_SUPER_PROPERTY:
1393 0 : UNREACHABLE();
1394 : break;
1395 : }
1396 :
1397 : // For compound assignments we need another deoptimization point after the
1398 : // variable/property load.
1399 2777698 : if (expr->is_compound()) {
1400 : { AccumulatorValueContext context(this);
1401 55084 : switch (assign_type) {
1402 : case VARIABLE:
1403 108800 : EmitVariableLoad(expr->target()->AsVariableProxy());
1404 54400 : PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1405 54400 : break;
1406 : case NAMED_PROPERTY:
1407 216 : EmitNamedPropertyLoad(property);
1408 : PrepareForBailoutForId(property->LoadId(),
1409 216 : BailoutState::TOS_REGISTER);
1410 216 : break;
1411 : case KEYED_PROPERTY:
1412 468 : EmitKeyedPropertyLoad(property);
1413 : PrepareForBailoutForId(property->LoadId(),
1414 468 : BailoutState::TOS_REGISTER);
1415 468 : break;
1416 : case NAMED_SUPER_PROPERTY:
1417 : case KEYED_SUPER_PROPERTY:
1418 0 : UNREACHABLE();
1419 : break;
1420 : }
1421 : }
1422 :
1423 55084 : Token::Value op = expr->binary_op();
1424 55084 : PushOperand(rax); // Left operand goes on the stack.
1425 55084 : VisitForAccumulatorValue(expr->value());
1426 :
1427 : AccumulatorValueContext context(this);
1428 55084 : if (ShouldInlineSmiCase(op)) {
1429 : EmitInlineSmiBinaryOp(expr->binary_operation(),
1430 : op,
1431 : expr->target(),
1432 27200 : expr->value());
1433 : } else {
1434 27884 : EmitBinaryOp(expr->binary_operation(), op);
1435 : }
1436 : // Deoptimization point in case the binary operation may have side effects.
1437 55084 : PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1438 : } else {
1439 2722614 : VisitForAccumulatorValue(expr->value());
1440 : }
1441 :
1442 2777694 : SetExpressionPosition(expr);
1443 :
1444 : // Store the value.
1445 2777689 : switch (assign_type) {
1446 : case VARIABLE: {
1447 5410027 : VariableProxy* proxy = expr->target()->AsVariableProxy();
1448 : EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1449 1803344 : proxy->hole_check_mode());
1450 1803350 : PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1451 1803350 : context()->Plug(rax);
1452 1803345 : break;
1453 : }
1454 : case NAMED_PROPERTY:
1455 905052 : EmitNamedPropertyAssignment(expr);
1456 905052 : break;
1457 : case KEYED_PROPERTY:
1458 69299 : EmitKeyedPropertyAssignment(expr);
1459 69299 : break;
1460 : case NAMED_SUPER_PROPERTY:
1461 : case KEYED_SUPER_PROPERTY:
1462 0 : UNREACHABLE();
1463 : break;
1464 : }
1465 2777695 : }
1466 :
1467 0 : void FullCodeGenerator::VisitSuspend(Suspend* expr) {
1468 : // Resumable functions are not supported.
1469 0 : UNREACHABLE();
1470 : }
1471 :
1472 3159175 : void FullCodeGenerator::PushOperand(MemOperand operand) {
1473 3159175 : OperandStackDepthIncrement(1);
1474 3159175 : __ Push(operand);
1475 0 : }
1476 :
1477 0 : void FullCodeGenerator::EmitOperandStackDepthCheck() {
1478 0 : if (FLAG_debug_code) {
1479 0 : int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1480 0 : operand_stack_depth_ * kPointerSize;
1481 0 : __ movp(rax, rbp);
1482 0 : __ subp(rax, rsp);
1483 0 : __ cmpp(rax, Immediate(expected_diff));
1484 0 : __ Assert(equal, kUnexpectedStackDepth);
1485 : }
1486 0 : }
1487 :
1488 0 : void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1489 : Label allocate, done_allocate;
1490 :
1491 : __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &allocate,
1492 0 : NO_ALLOCATION_FLAGS);
1493 0 : __ jmp(&done_allocate, Label::kNear);
1494 :
1495 0 : __ bind(&allocate);
1496 0 : __ Push(Smi::FromInt(JSIteratorResult::kSize));
1497 0 : __ CallRuntime(Runtime::kAllocateInNewSpace);
1498 :
1499 0 : __ bind(&done_allocate);
1500 0 : __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx);
1501 0 : __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
1502 0 : __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
1503 0 : __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
1504 0 : __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
1505 0 : __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset));
1506 : __ LoadRoot(FieldOperand(rax, JSIteratorResult::kDoneOffset),
1507 0 : done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1508 : STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1509 0 : OperandStackDepthDecrement(1);
1510 0 : }
1511 :
1512 :
1513 241488 : void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1514 : Token::Value op,
1515 : Expression* left,
1516 2414880 : Expression* right) {
1517 : // Do combined smi check of the operands. Left operand is on the
1518 : // stack (popped into rdx). Right operand is in rax but moved into
1519 : // rcx to make the shifts easier.
1520 : Label done, stub_call, smi_case;
1521 241488 : PopOperand(rdx);
1522 241488 : __ movp(rcx, rax);
1523 241488 : __ orp(rax, rdx);
1524 241488 : JumpPatchSite patch_site(masm_);
1525 241488 : patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
1526 :
1527 241488 : __ bind(&stub_call);
1528 241488 : __ movp(rax, rcx);
1529 482976 : Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1530 241488 : CallIC(code, expr->BinaryOperationFeedbackId());
1531 241488 : patch_site.EmitPatchInfo();
1532 241488 : __ jmp(&done, Label::kNear);
1533 :
1534 241488 : __ bind(&smi_case);
1535 241488 : switch (op) {
1536 : case Token::SAR:
1537 20785 : __ SmiShiftArithmeticRight(rax, rdx, rcx);
1538 20785 : break;
1539 : case Token::SHL:
1540 5117 : __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
1541 5117 : break;
1542 : case Token::SHR:
1543 4678 : __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
1544 4678 : break;
1545 : case Token::ADD:
1546 149656 : __ SmiAdd(rax, rdx, rcx, &stub_call);
1547 149656 : break;
1548 : case Token::SUB:
1549 9896 : __ SmiSub(rax, rdx, rcx, &stub_call);
1550 9896 : break;
1551 : case Token::MUL:
1552 9993 : __ SmiMul(rax, rdx, rcx, &stub_call);
1553 9993 : break;
1554 : case Token::BIT_OR:
1555 37261 : __ SmiOr(rax, rdx, rcx);
1556 37261 : break;
1557 : case Token::BIT_AND:
1558 3798 : __ SmiAnd(rax, rdx, rcx);
1559 3798 : break;
1560 : case Token::BIT_XOR:
1561 304 : __ SmiXor(rax, rdx, rcx);
1562 304 : break;
1563 : default:
1564 0 : UNREACHABLE();
1565 : break;
1566 : }
1567 :
1568 241488 : __ bind(&done);
1569 241488 : context()->Plug(rax);
1570 241488 : }
1571 :
1572 :
1573 1369491 : void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1574 456497 : PopOperand(rdx);
1575 912994 : Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1576 456497 : JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1577 456497 : CallIC(code, expr->BinaryOperationFeedbackId());
1578 456497 : patch_site.EmitPatchInfo();
1579 456497 : context()->Plug(rax);
1580 456497 : }
1581 :
1582 5171 : void FullCodeGenerator::EmitAssignment(Expression* expr, FeedbackSlot slot) {
1583 : DCHECK(expr->IsValidReferenceExpressionOrThis());
1584 :
1585 2626 : Property* prop = expr->AsProperty();
1586 2572 : LhsKind assign_type = Property::GetAssignType(prop);
1587 :
1588 2572 : switch (assign_type) {
1589 : case VARIABLE: {
1590 5090 : VariableProxy* proxy = expr->AsVariableProxy();
1591 : EffectContext context(this);
1592 : EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
1593 2545 : proxy->hole_check_mode());
1594 : break;
1595 : }
1596 : case NAMED_PROPERTY: {
1597 3 : PushOperand(rax); // Preserve value.
1598 3 : VisitForAccumulatorValue(prop->obj());
1599 6 : __ Move(StoreDescriptor::ReceiverRegister(), rax);
1600 3 : PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
1601 9 : CallStoreIC(slot, prop->key()->AsLiteral()->value());
1602 3 : break;
1603 : }
1604 : case KEYED_PROPERTY: {
1605 24 : PushOperand(rax); // Preserve value.
1606 24 : VisitForStackValue(prop->obj());
1607 24 : VisitForAccumulatorValue(prop->key());
1608 48 : __ Move(StoreDescriptor::NameRegister(), rax);
1609 24 : PopOperand(StoreDescriptor::ReceiverRegister());
1610 24 : PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
1611 24 : CallKeyedStoreIC(slot);
1612 24 : break;
1613 : }
1614 : case NAMED_SUPER_PROPERTY:
1615 : case KEYED_SUPER_PROPERTY:
1616 0 : UNREACHABLE();
1617 : break;
1618 : }
1619 2572 : context()->Plug(rax);
1620 2572 : }
1621 :
1622 :
1623 1809391 : void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
1624 3253888 : Variable* var, MemOperand location) {
1625 1809391 : __ movp(location, rax);
1626 1809389 : if (var->IsContextSlot()) {
1627 481499 : __ movp(rdx, rax);
1628 : __ RecordWriteContextSlot(
1629 : rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
1630 : }
1631 1809389 : }
1632 :
1633 1997576 : void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
1634 : FeedbackSlot slot,
1635 197974 : HoleCheckMode hole_check_mode) {
1636 1890332 : if (var->IsUnallocated()) {
1637 : // Global var, const, or let.
1638 59873 : __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
1639 59873 : CallStoreIC(slot, var->name(), kStoreGlobal);
1640 :
1641 1830459 : } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
1642 : DCHECK(!var->IsLookupSlot());
1643 : DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1644 27715 : MemOperand location = VarOperand(var, rcx);
1645 : // Perform an initialization check for lexically declared variables.
1646 27715 : if (hole_check_mode == HoleCheckMode::kRequired) {
1647 : Label assign;
1648 19508 : __ movp(rdx, location);
1649 19508 : __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1650 19508 : __ j(not_equal, &assign, Label::kNear);
1651 19508 : __ Push(var->name());
1652 19508 : __ CallRuntime(Runtime::kThrowReferenceError);
1653 19508 : __ bind(&assign);
1654 : }
1655 27715 : if (var->mode() != CONST) {
1656 6639 : EmitStoreToStackLocalOrContextSlot(var, location);
1657 42152 : } else if (var->throw_on_const_assignment(language_mode())) {
1658 21053 : __ CallRuntime(Runtime::kThrowConstAssignError);
1659 : }
1660 :
1661 1802744 : } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
1662 : // Initializing assignment to const {this} needs a write barrier.
1663 : DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1664 : Label uninitialized_this;
1665 0 : MemOperand location = VarOperand(var, rcx);
1666 0 : __ movp(rdx, location);
1667 0 : __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1668 0 : __ j(equal, &uninitialized_this);
1669 0 : __ Push(var->name());
1670 0 : __ CallRuntime(Runtime::kThrowReferenceError);
1671 0 : __ bind(&uninitialized_this);
1672 0 : EmitStoreToStackLocalOrContextSlot(var, location);
1673 :
1674 : } else {
1675 : DCHECK(var->mode() != CONST || op == Token::INIT);
1676 : DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1677 : DCHECK(!var->IsLookupSlot());
1678 : // Assignment to var or initializing assignment to let/const in harmony
1679 : // mode.
1680 1802744 : MemOperand location = VarOperand(var, rcx);
1681 1802902 : if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
1682 : // Check for an uninitialized let binding.
1683 0 : __ movp(rdx, location);
1684 0 : __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1685 0 : __ Check(equal, kLetBindingReInitialization);
1686 : }
1687 1802754 : EmitStoreToStackLocalOrContextSlot(var, location);
1688 : }
1689 1890340 : }
1690 :
1691 :
1692 1810104 : void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1693 : // Assignment to a property, using a named store IC.
1694 1810104 : Property* prop = expr->target()->AsProperty();
1695 : DCHECK(prop != NULL);
1696 : DCHECK(prop->key()->IsLiteral());
1697 :
1698 905052 : PopOperand(StoreDescriptor::ReceiverRegister());
1699 2715156 : CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
1700 :
1701 905052 : PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1702 905052 : context()->Plug(rax);
1703 905052 : }
1704 :
1705 :
1706 138598 : void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1707 : // Assignment to a property, using a keyed store IC.
1708 69299 : PopOperand(StoreDescriptor::NameRegister()); // Key.
1709 69299 : PopOperand(StoreDescriptor::ReceiverRegister());
1710 : DCHECK(StoreDescriptor::ValueRegister().is(rax));
1711 69299 : CallKeyedStoreIC(expr->AssignmentSlot());
1712 :
1713 69299 : PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1714 69299 : context()->Plug(rax);
1715 69299 : }
1716 :
1717 : // Code common for calls using the IC.
1718 2121917 : void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
1719 : Expression* callee = expr->expression();
1720 :
1721 : // Get the target function.
1722 : ConvertReceiverMode convert_mode;
1723 813184 : if (callee->IsVariableProxy()) {
1724 : { StackValueContext context(this);
1725 635270 : EmitVariableLoad(callee->AsVariableProxy());
1726 317635 : PrepareForBailout(callee, BailoutState::NO_REGISTERS);
1727 : }
1728 : // Push undefined as receiver. This is patched in the Call builtin if it
1729 : // is a sloppy mode method.
1730 317635 : PushOperand(isolate()->factory()->undefined_value());
1731 : convert_mode = ConvertReceiverMode::kNullOrUndefined;
1732 : } else {
1733 : // Load the function from the receiver.
1734 : DCHECK(callee->IsProperty());
1735 : DCHECK(!callee->AsProperty()->IsSuperAccess());
1736 1486647 : __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1737 991098 : EmitNamedPropertyLoad(callee->AsProperty());
1738 : PrepareForBailoutForId(callee->AsProperty()->LoadId(),
1739 495549 : BailoutState::TOS_REGISTER);
1740 : // Push the target function under the receiver.
1741 991098 : PushOperand(Operand(rsp, 0));
1742 1486647 : __ movp(Operand(rsp, kPointerSize), rax);
1743 : convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
1744 : }
1745 :
1746 813184 : EmitCall(expr, convert_mode);
1747 813184 : }
1748 :
1749 :
1750 : // Common code for calls using the IC.
1751 3714 : void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
1752 5571 : Expression* key) {
1753 : // Load the key.
1754 1857 : VisitForAccumulatorValue(key);
1755 :
1756 : Expression* callee = expr->expression();
1757 :
1758 : // Load the function from the receiver.
1759 : DCHECK(callee->IsProperty());
1760 5571 : __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1761 3714 : __ Move(LoadDescriptor::NameRegister(), rax);
1762 3714 : EmitKeyedPropertyLoad(callee->AsProperty());
1763 : PrepareForBailoutForId(callee->AsProperty()->LoadId(),
1764 1857 : BailoutState::TOS_REGISTER);
1765 :
1766 : // Push the target function under the receiver.
1767 3714 : PushOperand(Operand(rsp, 0));
1768 5571 : __ movp(Operand(rsp, kPointerSize), rax);
1769 :
1770 1857 : EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
1771 1857 : }
1772 :
1773 :
1774 9948419 : void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
1775 : // Load the arguments.
1776 : ZoneList<Expression*>* args = expr->arguments();
1777 1105374 : int arg_count = args->length();
1778 2676853 : for (int i = 0; i < arg_count; i++) {
1779 1571480 : VisitForStackValue(args->at(i));
1780 : }
1781 :
1782 1105373 : PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
1783 1105377 : SetCallPosition(expr, expr->tail_call_mode());
1784 1105381 : if (expr->tail_call_mode() == TailCallMode::kAllow) {
1785 350 : if (FLAG_trace) {
1786 0 : __ CallRuntime(Runtime::kTraceTailCall);
1787 : }
1788 : // Update profiling counters before the tail call since we will
1789 : // not return to this function.
1790 350 : EmitProfilingCounterHandlingForReturnSequence(true);
1791 : }
1792 : Handle<Code> code =
1793 : CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
1794 2210762 : .code();
1795 2210762 : __ Set(rdx, IntFromSlot(expr->CallFeedbackICSlot()));
1796 3316144 : __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
1797 2210764 : __ Set(rax, arg_count);
1798 1105378 : CallIC(code);
1799 1105381 : OperandStackDepthDecrement(arg_count + 1);
1800 :
1801 1105381 : RecordJSReturnSite(expr);
1802 1105380 : RestoreContext();
1803 : // Discard the function left on TOS.
1804 1105380 : context()->DropAndPlug(1, rax);
1805 1105379 : }
1806 :
1807 990208 : void FullCodeGenerator::VisitCallNew(CallNew* expr) {
1808 : Comment cmnt(masm_, "[ CallNew");
1809 : // According to ECMA-262, section 11.2.2, page 44, the function
1810 : // expression in new calls must be evaluated before the
1811 : // arguments.
1812 :
1813 : // Push constructor on the stack. If it's not a function it's used as
1814 : // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
1815 : // ignored.
1816 : DCHECK(!expr->expression()->IsSuperPropertyReference());
1817 123776 : VisitForStackValue(expr->expression());
1818 :
1819 : // Push the arguments ("left-to-right") on the stack.
1820 : ZoneList<Expression*>* args = expr->arguments();
1821 123776 : int arg_count = args->length();
1822 258174 : for (int i = 0; i < arg_count; i++) {
1823 134398 : VisitForStackValue(args->at(i));
1824 : }
1825 :
1826 : // Call the construct call builtin that handles allocation and
1827 : // constructor invocation.
1828 : SetConstructCallPosition(expr);
1829 :
1830 : // Load function and argument count into rdi and rax.
1831 247552 : __ Set(rax, arg_count);
1832 371328 : __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
1833 :
1834 : // Record call targets in unoptimized code, but not in the snapshot.
1835 123776 : __ EmitLoadFeedbackVector(rbx);
1836 : __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
1837 :
1838 : CallConstructStub stub(isolate());
1839 123776 : CallIC(stub.GetCode());
1840 123776 : OperandStackDepthDecrement(arg_count + 1);
1841 123776 : PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
1842 123776 : RestoreContext();
1843 123776 : context()->Plug(rax);
1844 123776 : }
1845 :
1846 15810 : void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
1847 : ZoneList<Expression*>* args = expr->arguments();
1848 : DCHECK(args->length() == 1);
1849 :
1850 3162 : VisitForAccumulatorValue(args->at(0));
1851 :
1852 : Label materialize_true, materialize_false;
1853 3162 : Label* if_true = NULL;
1854 3162 : Label* if_false = NULL;
1855 3162 : Label* fall_through = NULL;
1856 : context()->PrepareTest(&materialize_true, &materialize_false,
1857 3162 : &if_true, &if_false, &fall_through);
1858 :
1859 3162 : PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1860 6324 : __ JumpIfSmi(rax, if_true);
1861 6324 : __ jmp(if_false);
1862 :
1863 3162 : context()->Plug(if_true, if_false);
1864 3162 : }
1865 :
1866 :
1867 8915 : void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
1868 : ZoneList<Expression*>* args = expr->arguments();
1869 : DCHECK(args->length() == 1);
1870 :
1871 1783 : VisitForAccumulatorValue(args->at(0));
1872 :
1873 : Label materialize_true, materialize_false;
1874 1783 : Label* if_true = NULL;
1875 1783 : Label* if_false = NULL;
1876 1783 : Label* fall_through = NULL;
1877 : context()->PrepareTest(&materialize_true, &materialize_false,
1878 1783 : &if_true, &if_false, &fall_through);
1879 :
1880 3566 : __ JumpIfSmi(rax, if_false);
1881 1783 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rbx);
1882 1783 : PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1883 1783 : Split(above_equal, if_true, if_false, fall_through);
1884 :
1885 1783 : context()->Plug(if_true, if_false);
1886 1783 : }
1887 :
1888 :
1889 18415 : void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
1890 : ZoneList<Expression*>* args = expr->arguments();
1891 : DCHECK(args->length() == 1);
1892 :
1893 3683 : VisitForAccumulatorValue(args->at(0));
1894 :
1895 : Label materialize_true, materialize_false;
1896 3683 : Label* if_true = NULL;
1897 3683 : Label* if_false = NULL;
1898 3683 : Label* fall_through = NULL;
1899 : context()->PrepareTest(&materialize_true, &materialize_false,
1900 3683 : &if_true, &if_false, &fall_through);
1901 :
1902 7366 : __ JumpIfSmi(rax, if_false);
1903 3683 : __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
1904 3683 : PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1905 3683 : Split(equal, if_true, if_false, fall_through);
1906 :
1907 3683 : context()->Plug(if_true, if_false);
1908 3683 : }
1909 :
1910 :
1911 575 : void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
1912 : ZoneList<Expression*>* args = expr->arguments();
1913 : DCHECK(args->length() == 1);
1914 :
1915 115 : VisitForAccumulatorValue(args->at(0));
1916 :
1917 : Label materialize_true, materialize_false;
1918 115 : Label* if_true = NULL;
1919 115 : Label* if_false = NULL;
1920 115 : Label* fall_through = NULL;
1921 : context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
1922 115 : &if_false, &fall_through);
1923 :
1924 230 : __ JumpIfSmi(rax, if_false);
1925 115 : __ CmpObjectType(rax, JS_TYPED_ARRAY_TYPE, rbx);
1926 115 : PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1927 115 : Split(equal, if_true, if_false, fall_through);
1928 :
1929 115 : context()->Plug(if_true, if_false);
1930 115 : }
1931 :
1932 :
1933 7025 : void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
1934 : ZoneList<Expression*>* args = expr->arguments();
1935 : DCHECK(args->length() == 1);
1936 :
1937 1405 : VisitForAccumulatorValue(args->at(0));
1938 :
1939 : Label materialize_true, materialize_false;
1940 1405 : Label* if_true = NULL;
1941 1405 : Label* if_false = NULL;
1942 1405 : Label* fall_through = NULL;
1943 : context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
1944 1405 : &if_false, &fall_through);
1945 :
1946 :
1947 2810 : __ JumpIfSmi(rax, if_false);
1948 1405 : __ CmpObjectType(rax, JS_PROXY_TYPE, rbx);
1949 1405 : PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1950 1405 : Split(equal, if_true, if_false, fall_through);
1951 :
1952 1405 : context()->Plug(if_true, if_false);
1953 1405 : }
1954 :
1955 641388 : void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
1956 : ZoneList<Expression*>* args = expr->arguments();
1957 : DCHECK(args->length() == 1);
1958 : Label done, null, function, non_function_constructor;
1959 :
1960 29154 : VisitForAccumulatorValue(args->at(0));
1961 :
1962 : // If the object is not a JSReceiver, we return null.
1963 29154 : __ JumpIfSmi(rax, &null, Label::kNear);
1964 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1965 29154 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rax);
1966 29154 : __ j(below, &null, Label::kNear);
1967 :
1968 : // Return 'Function' for JSFunction and JSBoundFunction objects.
1969 29154 : __ CmpInstanceType(rax, FIRST_FUNCTION_TYPE);
1970 : STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
1971 29154 : __ j(above_equal, &function, Label::kNear);
1972 :
1973 : // Check if the constructor in the map is a JS function.
1974 29154 : __ GetMapConstructor(rax, rax, rbx);
1975 29154 : __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
1976 29154 : __ j(not_equal, &non_function_constructor, Label::kNear);
1977 :
1978 : // rax now contains the constructor function. Grab the
1979 : // instance class name from there.
1980 29154 : __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
1981 29154 : __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
1982 29154 : __ jmp(&done, Label::kNear);
1983 :
1984 : // Non-JS objects have class null.
1985 29154 : __ bind(&null);
1986 29154 : __ LoadRoot(rax, Heap::kNullValueRootIndex);
1987 29154 : __ jmp(&done, Label::kNear);
1988 :
1989 : // Functions have class 'Function'.
1990 29154 : __ bind(&function);
1991 29154 : __ LoadRoot(rax, Heap::kFunction_stringRootIndex);
1992 29154 : __ jmp(&done, Label::kNear);
1993 :
1994 : // Objects with a non-function constructor have class 'Object'.
1995 29154 : __ bind(&non_function_constructor);
1996 29154 : __ LoadRoot(rax, Heap::kObject_stringRootIndex);
1997 :
1998 : // All done.
1999 29154 : __ bind(&done);
2000 :
2001 29154 : context()->Plug(rax);
2002 29154 : }
2003 :
2004 640 : void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2005 : ZoneList<Expression*>* args = expr->arguments();
2006 : DCHECK(args->length() == 2);
2007 :
2008 64 : VisitForStackValue(args->at(0));
2009 64 : VisitForAccumulatorValue(args->at(1));
2010 :
2011 : Register object = rbx;
2012 : Register index = rax;
2013 : Register result = rdx;
2014 :
2015 64 : PopOperand(object);
2016 :
2017 : Label need_conversion;
2018 : Label index_out_of_range;
2019 : Label done;
2020 : StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2021 : &need_conversion, &index_out_of_range);
2022 64 : generator.GenerateFast(masm_);
2023 64 : __ jmp(&done);
2024 :
2025 64 : __ bind(&index_out_of_range);
2026 : // When the index is out of range, the spec requires us to return
2027 : // NaN.
2028 64 : __ LoadRoot(result, Heap::kNanValueRootIndex);
2029 64 : __ jmp(&done);
2030 :
2031 64 : __ bind(&need_conversion);
2032 : // Move the undefined value into the result register, which will
2033 : // trigger conversion.
2034 64 : __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2035 64 : __ jmp(&done);
2036 :
2037 : NopRuntimeCallHelper call_helper;
2038 64 : generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2039 :
2040 64 : __ bind(&done);
2041 64 : context()->Plug(result);
2042 64 : }
2043 :
2044 :
2045 140076 : void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2046 : ZoneList<Expression*>* args = expr->arguments();
2047 : DCHECK_LE(2, args->length());
2048 : // Push target, receiver and arguments onto the stack.
2049 124842 : for (Expression* const arg : *args) {
2050 78150 : VisitForStackValue(arg);
2051 : }
2052 23346 : PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2053 : // Move target to rdi.
2054 23346 : int const argc = args->length() - 2;
2055 70038 : __ movp(rdi, Operand(rsp, (argc + 1) * kPointerSize));
2056 : // Call the target.
2057 46692 : __ Set(rax, argc);
2058 46692 : __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2059 23346 : OperandStackDepthDecrement(argc + 1);
2060 23346 : RestoreContext();
2061 : // Discard the function left on TOS.
2062 23346 : context()->DropAndPlug(1, rax);
2063 23346 : }
2064 :
2065 0 : void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2066 : ZoneList<Expression*>* args = expr->arguments();
2067 : DCHECK_EQ(1, args->length());
2068 0 : VisitForAccumulatorValue(args->at(0));
2069 0 : __ AssertFunction(rax);
2070 0 : __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
2071 0 : __ movp(rax, FieldOperand(rax, Map::kPrototypeOffset));
2072 0 : context()->Plug(rax);
2073 0 : }
2074 :
2075 0 : void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2076 : DCHECK(expr->arguments()->length() == 0);
2077 : ExternalReference debug_is_active =
2078 0 : ExternalReference::debug_is_active_address(isolate());
2079 : __ Move(kScratchRegister, debug_is_active);
2080 0 : __ movzxbp(rax, Operand(kScratchRegister, 0));
2081 0 : __ Integer32ToSmi(rax, rax);
2082 0 : context()->Plug(rax);
2083 0 : }
2084 :
2085 :
2086 585 : void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2087 : ZoneList<Expression*>* args = expr->arguments();
2088 : DCHECK_EQ(2, args->length());
2089 45 : VisitForStackValue(args->at(0));
2090 45 : VisitForStackValue(args->at(1));
2091 :
2092 : Label runtime, done;
2093 :
2094 : __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &runtime,
2095 45 : NO_ALLOCATION_FLAGS);
2096 45 : __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx);
2097 45 : __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2098 45 : __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
2099 45 : __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
2100 45 : __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
2101 90 : __ Pop(FieldOperand(rax, JSIteratorResult::kDoneOffset));
2102 90 : __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset));
2103 : STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2104 45 : __ jmp(&done, Label::kNear);
2105 :
2106 45 : __ bind(&runtime);
2107 45 : CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2108 :
2109 45 : __ bind(&done);
2110 45 : context()->Plug(rax);
2111 45 : }
2112 :
2113 :
2114 41972 : void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2115 : // Push function.
2116 20986 : __ LoadNativeContextSlot(expr->context_index(), rax);
2117 20986 : PushOperand(rax);
2118 :
2119 : // Push undefined as receiver.
2120 20986 : OperandStackDepthIncrement(1);
2121 20986 : __ PushRoot(Heap::kUndefinedValueRootIndex);
2122 20986 : }
2123 :
2124 :
2125 104930 : void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2126 : ZoneList<Expression*>* args = expr->arguments();
2127 20986 : int arg_count = args->length();
2128 :
2129 20986 : SetCallPosition(expr);
2130 62958 : __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2131 41972 : __ Set(rax, arg_count);
2132 : __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2133 41972 : RelocInfo::CODE_TARGET);
2134 20986 : OperandStackDepthDecrement(arg_count + 1);
2135 20986 : RestoreContext();
2136 20986 : }
2137 :
2138 :
2139 1557012 : void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2140 245660 : switch (expr->op()) {
2141 : case Token::DELETE: {
2142 : Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2143 12958 : Property* property = expr->expression()->AsProperty();
2144 4879 : VariableProxy* proxy = expr->expression()->AsVariableProxy();
2145 :
2146 4574 : if (property != NULL) {
2147 4192 : VisitForStackValue(property->obj());
2148 4192 : VisitForStackValue(property->key());
2149 8384 : PushOperand(Smi::FromInt(language_mode()));
2150 4192 : CallRuntimeWithOperands(Runtime::kDeleteProperty);
2151 4192 : context()->Plug(rax);
2152 382 : } else if (proxy != NULL) {
2153 249 : Variable* var = proxy->var();
2154 : // Delete of an unqualified identifier is disallowed in strict mode but
2155 : // "delete this" is allowed.
2156 : bool is_this = var->is_this();
2157 : DCHECK(is_sloppy(language_mode()) || is_this);
2158 305 : if (var->IsUnallocated()) {
2159 249 : __ movp(rax, NativeContextOperand());
2160 498 : __ Push(ContextOperand(rax, Context::EXTENSION_INDEX));
2161 249 : __ Push(var->name());
2162 249 : __ Push(Smi::FromInt(SLOPPY));
2163 249 : __ CallRuntime(Runtime::kDeleteProperty);
2164 249 : context()->Plug(rax);
2165 : } else {
2166 : DCHECK(!var->IsLookupSlot());
2167 : DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2168 : // Result of deleting non-global variables is false. 'this' is
2169 : // not really a variable, though we implement it as one. The
2170 : // subexpression does not have side effects.
2171 56 : context()->Plug(is_this);
2172 : }
2173 : } else {
2174 : // Result of deleting non-property, non-variable reference is true.
2175 : // The subexpression may have side effects.
2176 77 : VisitForEffect(expr->expression());
2177 77 : context()->Plug(true);
2178 : }
2179 : break;
2180 : }
2181 :
2182 : case Token::VOID: {
2183 : Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2184 18423 : VisitForEffect(expr->expression());
2185 18423 : context()->Plug(Heap::kUndefinedValueRootIndex);
2186 : break;
2187 : }
2188 :
2189 : case Token::NOT: {
2190 : Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2191 197027 : if (context()->IsEffect()) {
2192 : // Unary NOT has no side effects so it's only necessary to visit the
2193 : // subexpression. Match the optimizing compiler by not branching.
2194 23 : VisitForEffect(expr->expression());
2195 197004 : } else if (context()->IsTest()) {
2196 352232 : const TestContext* test = TestContext::cast(context());
2197 : // The labels are swapped for the recursive call.
2198 : VisitForControl(expr->expression(),
2199 : test->false_label(),
2200 : test->true_label(),
2201 176116 : test->fall_through());
2202 352232 : context()->Plug(test->true_label(), test->false_label());
2203 : } else {
2204 : // We handle value contexts explicitly rather than simply visiting
2205 : // for control and plugging the control flow into the context,
2206 : // because we need to prepare a pair of extra administrative AST ids
2207 : // for the optimizing compiler.
2208 : DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2209 : Label materialize_true, materialize_false, done;
2210 : VisitForControl(expr->expression(),
2211 : &materialize_false,
2212 : &materialize_true,
2213 20888 : &materialize_true);
2214 20888 : if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2215 20888 : __ bind(&materialize_true);
2216 : PrepareForBailoutForId(expr->MaterializeTrueId(),
2217 20888 : BailoutState::NO_REGISTERS);
2218 20888 : if (context()->IsAccumulatorValue()) {
2219 12885 : __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2220 : } else {
2221 8003 : __ PushRoot(Heap::kTrueValueRootIndex);
2222 : }
2223 20888 : __ jmp(&done, Label::kNear);
2224 20888 : __ bind(&materialize_false);
2225 : PrepareForBailoutForId(expr->MaterializeFalseId(),
2226 20888 : BailoutState::NO_REGISTERS);
2227 20888 : if (context()->IsAccumulatorValue()) {
2228 12885 : __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2229 : } else {
2230 8003 : __ PushRoot(Heap::kFalseValueRootIndex);
2231 : }
2232 20888 : __ bind(&done);
2233 : }
2234 : break;
2235 : }
2236 :
2237 : case Token::TYPEOF: {
2238 : Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2239 : {
2240 : AccumulatorValueContext context(this);
2241 25636 : VisitForTypeofValue(expr->expression());
2242 : }
2243 25636 : __ movp(rbx, rax);
2244 51272 : __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
2245 25636 : context()->Plug(rax);
2246 : break;
2247 : }
2248 :
2249 : default:
2250 0 : UNREACHABLE();
2251 : }
2252 245660 : }
2253 :
2254 :
2255 1564088 : void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
2256 : DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
2257 :
2258 : Comment cmnt(masm_, "[ CountOperation");
2259 :
2260 97294 : Property* prop = expr->expression()->AsProperty();
2261 88728 : LhsKind assign_type = Property::GetAssignType(prop);
2262 :
2263 : // Evaluate expression and get value.
2264 88728 : if (assign_type == VARIABLE) {
2265 : DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
2266 : AccumulatorValueContext context(this);
2267 168890 : EmitVariableLoad(expr->expression()->AsVariableProxy());
2268 : } else {
2269 : // Reserve space for result of postfix operation.
2270 7302 : if (expr->is_postfix() && !context()->IsEffect()) {
2271 2695 : PushOperand(Smi::kZero);
2272 : }
2273 4283 : switch (assign_type) {
2274 : case NAMED_PROPERTY: {
2275 3899 : VisitForStackValue(prop->obj());
2276 11697 : __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2277 3899 : EmitNamedPropertyLoad(prop);
2278 3899 : break;
2279 : }
2280 :
2281 : case KEYED_PROPERTY: {
2282 384 : VisitForStackValue(prop->obj());
2283 384 : VisitForStackValue(prop->key());
2284 : // Leave receiver on stack
2285 1152 : __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
2286 : // Copy of key, needed for later store.
2287 1152 : __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
2288 384 : EmitKeyedPropertyLoad(prop);
2289 384 : break;
2290 : }
2291 :
2292 : case NAMED_SUPER_PROPERTY:
2293 : case KEYED_SUPER_PROPERTY:
2294 : case VARIABLE:
2295 0 : UNREACHABLE();
2296 : }
2297 : }
2298 :
2299 : // We need a second deoptimization point after loading the value
2300 : // in case evaluating the property load my have a side effect.
2301 88728 : if (assign_type == VARIABLE) {
2302 84445 : PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
2303 : } else {
2304 4283 : PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2305 : }
2306 :
2307 : // Inline smi case if we are in a loop.
2308 : Label done, stub_call;
2309 88728 : JumpPatchSite patch_site(masm_);
2310 88728 : if (ShouldInlineSmiCase(expr->op())) {
2311 : Label slow;
2312 70529 : patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
2313 :
2314 : // Save result for postfix expressions.
2315 70529 : if (expr->is_postfix()) {
2316 48001 : if (!context()->IsEffect()) {
2317 : // Save the result on the stack. If we have a named or keyed property
2318 : // we store the result under the receiver that is currently on top
2319 : // of the stack.
2320 8123 : switch (assign_type) {
2321 : case VARIABLE:
2322 8123 : __ Push(rax);
2323 8123 : break;
2324 : case NAMED_PROPERTY:
2325 0 : __ movp(Operand(rsp, kPointerSize), rax);
2326 0 : break;
2327 : case KEYED_PROPERTY:
2328 0 : __ movp(Operand(rsp, 2 * kPointerSize), rax);
2329 0 : break;
2330 : case NAMED_SUPER_PROPERTY:
2331 : case KEYED_SUPER_PROPERTY:
2332 0 : UNREACHABLE();
2333 : break;
2334 : }
2335 : }
2336 : }
2337 :
2338 : SmiOperationConstraints constraints =
2339 : SmiOperationConstraint::kPreserveSourceRegister |
2340 70529 : SmiOperationConstraint::kBailoutOnNoOverflow;
2341 70529 : if (expr->op() == Token::INC) {
2342 : __ SmiAddConstant(rax, rax, Smi::FromInt(1), constraints, &done,
2343 66852 : Label::kNear);
2344 : } else {
2345 : __ SmiSubConstant(rax, rax, Smi::FromInt(1), constraints, &done,
2346 3677 : Label::kNear);
2347 : }
2348 70529 : __ jmp(&stub_call, Label::kNear);
2349 70529 : __ bind(&slow);
2350 : }
2351 :
2352 : // Convert old value into a number.
2353 177456 : __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
2354 88728 : RestoreContext();
2355 88728 : PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
2356 :
2357 : // Save result for postfix expressions.
2358 88728 : if (expr->is_postfix()) {
2359 59585 : if (!context()->IsEffect()) {
2360 : // Save the result on the stack. If we have a named or keyed property
2361 : // we store the result under the receiver that is currently on top
2362 : // of the stack.
2363 15267 : switch (assign_type) {
2364 : case VARIABLE:
2365 12572 : PushOperand(rax);
2366 12572 : break;
2367 : case NAMED_PROPERTY:
2368 8037 : __ movp(Operand(rsp, kPointerSize), rax);
2369 2679 : break;
2370 : case KEYED_PROPERTY:
2371 48 : __ movp(Operand(rsp, 2 * kPointerSize), rax);
2372 16 : break;
2373 : case NAMED_SUPER_PROPERTY:
2374 : case KEYED_SUPER_PROPERTY:
2375 0 : UNREACHABLE();
2376 : break;
2377 : }
2378 : }
2379 : }
2380 :
2381 88728 : SetExpressionPosition(expr);
2382 :
2383 : // Call stub for +1/-1.
2384 88728 : __ bind(&stub_call);
2385 88728 : __ movp(rdx, rax);
2386 : __ Move(rax, Smi::FromInt(1));
2387 : Handle<Code> code =
2388 177456 : CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
2389 88728 : CallIC(code, expr->CountBinOpFeedbackId());
2390 88728 : patch_site.EmitPatchInfo();
2391 88728 : __ bind(&done);
2392 :
2393 : // Store the value returned in rax.
2394 88728 : switch (assign_type) {
2395 : case VARIABLE: {
2396 253335 : VariableProxy* proxy = expr->expression()->AsVariableProxy();
2397 84445 : if (expr->is_postfix()) {
2398 : // Perform the assignment as if via '='.
2399 : { EffectContext context(this);
2400 : EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
2401 56566 : proxy->hole_check_mode());
2402 : PrepareForBailoutForId(expr->AssignmentId(),
2403 56566 : BailoutState::TOS_REGISTER);
2404 56566 : context.Plug(rax);
2405 : }
2406 : // For all contexts except kEffect: We have the result on
2407 : // top of the stack.
2408 56566 : if (!context()->IsEffect()) {
2409 12572 : context()->PlugTOS();
2410 : }
2411 : } else {
2412 : // Perform the assignment as if via '='.
2413 : EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
2414 27879 : proxy->hole_check_mode());
2415 : PrepareForBailoutForId(expr->AssignmentId(),
2416 27879 : BailoutState::TOS_REGISTER);
2417 27879 : context()->Plug(rax);
2418 : }
2419 : break;
2420 : }
2421 : case NAMED_PROPERTY: {
2422 3899 : PopOperand(StoreDescriptor::ReceiverRegister());
2423 11697 : CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
2424 3899 : PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2425 3899 : if (expr->is_postfix()) {
2426 2904 : if (!context()->IsEffect()) {
2427 2679 : context()->PlugTOS();
2428 : }
2429 : } else {
2430 995 : context()->Plug(rax);
2431 : }
2432 : break;
2433 : }
2434 : case KEYED_PROPERTY: {
2435 384 : PopOperand(StoreDescriptor::NameRegister());
2436 384 : PopOperand(StoreDescriptor::ReceiverRegister());
2437 384 : CallKeyedStoreIC(expr->CountSlot());
2438 384 : PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2439 384 : if (expr->is_postfix()) {
2440 115 : if (!context()->IsEffect()) {
2441 16 : context()->PlugTOS();
2442 : }
2443 : } else {
2444 269 : context()->Plug(rax);
2445 : }
2446 : break;
2447 : }
2448 : case NAMED_SUPER_PROPERTY:
2449 : case KEYED_SUPER_PROPERTY:
2450 0 : UNREACHABLE();
2451 : break;
2452 : }
2453 88728 : }
2454 :
2455 :
2456 58573 : void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
2457 : Expression* sub_expr,
2458 418288 : Handle<String> check) {
2459 : Label materialize_true, materialize_false;
2460 58573 : Label* if_true = NULL;
2461 58573 : Label* if_false = NULL;
2462 58573 : Label* fall_through = NULL;
2463 : context()->PrepareTest(&materialize_true, &materialize_false,
2464 58573 : &if_true, &if_false, &fall_through);
2465 :
2466 : { AccumulatorValueContext context(this);
2467 58573 : VisitForTypeofValue(sub_expr);
2468 : }
2469 58573 : PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2470 :
2471 : Factory* factory = isolate()->factory();
2472 58573 : if (String::Equals(check, factory->number_string())) {
2473 35548 : __ JumpIfSmi(rax, if_true);
2474 17774 : __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
2475 17774 : __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
2476 17774 : Split(equal, if_true, if_false, fall_through);
2477 40799 : } else if (String::Equals(check, factory->string_string())) {
2478 9500 : __ JumpIfSmi(rax, if_false);
2479 4750 : __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
2480 4750 : Split(below, if_true, if_false, fall_through);
2481 36049 : } else if (String::Equals(check, factory->symbol_string())) {
2482 6928 : __ JumpIfSmi(rax, if_false);
2483 3464 : __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
2484 3464 : Split(equal, if_true, if_false, fall_through);
2485 32585 : } else if (String::Equals(check, factory->boolean_string())) {
2486 2351 : __ CompareRoot(rax, Heap::kTrueValueRootIndex);
2487 4702 : __ j(equal, if_true);
2488 2351 : __ CompareRoot(rax, Heap::kFalseValueRootIndex);
2489 2351 : Split(equal, if_true, if_false, fall_through);
2490 30234 : } else if (String::Equals(check, factory->undefined_string())) {
2491 1694 : __ CompareRoot(rax, Heap::kNullValueRootIndex);
2492 3388 : __ j(equal, if_false);
2493 3388 : __ JumpIfSmi(rax, if_false);
2494 : // Check for undetectable objects => true.
2495 1694 : __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
2496 : __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
2497 3388 : Immediate(1 << Map::kIsUndetectable));
2498 1694 : Split(not_zero, if_true, if_false, fall_through);
2499 28540 : } else if (String::Equals(check, factory->function_string())) {
2500 26766 : __ JumpIfSmi(rax, if_false);
2501 : // Check for callable and not undetectable objects => true.
2502 13383 : __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
2503 13383 : __ movzxbl(rdx, FieldOperand(rdx, Map::kBitFieldOffset));
2504 : __ andb(rdx,
2505 13383 : Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
2506 13383 : __ cmpb(rdx, Immediate(1 << Map::kIsCallable));
2507 13383 : Split(equal, if_true, if_false, fall_through);
2508 15157 : } else if (String::Equals(check, factory->object_string())) {
2509 30110 : __ JumpIfSmi(rax, if_false);
2510 15055 : __ CompareRoot(rax, Heap::kNullValueRootIndex);
2511 30110 : __ j(equal, if_true);
2512 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2513 15055 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rdx);
2514 30110 : __ j(below, if_false);
2515 : // Check for callable or undetectable objects => false.
2516 : __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
2517 30110 : Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
2518 15055 : Split(zero, if_true, if_false, fall_through);
2519 : } else {
2520 153 : if (if_false != fall_through) __ jmp(if_false);
2521 : }
2522 58573 : context()->Plug(if_true, if_false);
2523 58573 : }
2524 :
2525 :
2526 4143940 : void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
2527 : Comment cmnt(masm_, "[ CompareOperation");
2528 :
2529 : // First we try a fast inlined version of the compare when one of
2530 : // the operands is a literal.
2531 690862 : if (TryLiteralCompare(expr)) return;
2532 :
2533 : // Always perform the comparison for its control flow. Pack the result
2534 : // into the expression's context after the comparison is performed.
2535 : Label materialize_true, materialize_false;
2536 448492 : Label* if_true = NULL;
2537 448492 : Label* if_false = NULL;
2538 448492 : Label* fall_through = NULL;
2539 : context()->PrepareTest(&materialize_true, &materialize_false,
2540 448492 : &if_true, &if_false, &fall_through);
2541 :
2542 : Token::Value op = expr->op();
2543 448492 : VisitForStackValue(expr->left());
2544 448492 : switch (op) {
2545 : case Token::IN:
2546 6334 : VisitForStackValue(expr->right());
2547 6334 : SetExpressionPosition(expr);
2548 6334 : EmitHasProperty();
2549 6334 : PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
2550 6334 : __ CompareRoot(rax, Heap::kTrueValueRootIndex);
2551 6334 : Split(equal, if_true, if_false, fall_through);
2552 6334 : break;
2553 :
2554 : case Token::INSTANCEOF: {
2555 5517 : VisitForAccumulatorValue(expr->right());
2556 5517 : SetExpressionPosition(expr);
2557 5517 : PopOperand(rdx);
2558 11034 : __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
2559 5517 : RestoreContext();
2560 5517 : PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
2561 5517 : __ CompareRoot(rax, Heap::kTrueValueRootIndex);
2562 5517 : Split(equal, if_true, if_false, fall_through);
2563 5517 : break;
2564 : }
2565 :
2566 : default: {
2567 436641 : VisitForAccumulatorValue(expr->right());
2568 436641 : SetExpressionPosition(expr);
2569 436641 : Condition cc = CompareIC::ComputeCondition(op);
2570 436641 : PopOperand(rdx);
2571 :
2572 436641 : bool inline_smi_code = ShouldInlineSmiCase(op);
2573 436641 : JumpPatchSite patch_site(masm_);
2574 436641 : if (inline_smi_code) {
2575 : Label slow_case;
2576 145212 : __ movp(rcx, rdx);
2577 145212 : __ orp(rcx, rax);
2578 145212 : patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
2579 145212 : __ cmpp(rdx, rax);
2580 145212 : Split(cc, if_true, if_false, NULL);
2581 145212 : __ bind(&slow_case);
2582 : }
2583 :
2584 873282 : Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2585 436641 : CallIC(ic, expr->CompareOperationFeedbackId());
2586 436641 : patch_site.EmitPatchInfo();
2587 :
2588 436641 : PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2589 436641 : __ testp(rax, rax);
2590 436641 : Split(cc, if_true, if_false, fall_through);
2591 : }
2592 : }
2593 :
2594 : // Convert the result of the comparison into one expected for this
2595 : // expression's context.
2596 448492 : context()->Plug(if_true, if_false);
2597 : }
2598 :
2599 :
2600 125224 : void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
2601 : Expression* sub_expr,
2602 196970 : NilValue nil) {
2603 : Label materialize_true, materialize_false;
2604 62612 : Label* if_true = NULL;
2605 62612 : Label* if_false = NULL;
2606 62612 : Label* fall_through = NULL;
2607 : context()->PrepareTest(&materialize_true, &materialize_false,
2608 62612 : &if_true, &if_false, &fall_through);
2609 :
2610 62612 : VisitForAccumulatorValue(sub_expr);
2611 62612 : PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2612 62612 : if (expr->op() == Token::EQ_STRICT) {
2613 : Heap::RootListIndex nil_value = nil == kNullValue ?
2614 : Heap::kNullValueRootIndex :
2615 58045 : Heap::kUndefinedValueRootIndex;
2616 58045 : __ CompareRoot(rax, nil_value);
2617 58045 : Split(equal, if_true, if_false, fall_through);
2618 : } else {
2619 9134 : __ JumpIfSmi(rax, if_false);
2620 4567 : __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
2621 : __ testb(FieldOperand(rax, Map::kBitFieldOffset),
2622 9134 : Immediate(1 << Map::kIsUndetectable));
2623 4567 : Split(not_zero, if_true, if_false, fall_through);
2624 : }
2625 62612 : context()->Plug(if_true, if_false);
2626 62612 : }
2627 :
2628 :
2629 12976260 : Register FullCodeGenerator::result_register() {
2630 12976260 : return rax;
2631 : }
2632 :
2633 :
2634 54818 : Register FullCodeGenerator::context_register() {
2635 54818 : return rsi;
2636 : }
2637 :
2638 180973 : void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
2639 : DCHECK(IsAligned(frame_offset, kPointerSize));
2640 271462 : __ movp(value, Operand(rbp, frame_offset));
2641 90489 : }
2642 :
2643 23340 : void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
2644 : DCHECK(IsAligned(frame_offset, kPointerSize));
2645 35010 : __ movp(Operand(rbp, frame_offset), value);
2646 11670 : }
2647 :
2648 :
2649 14124 : void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
2650 14124 : __ movp(dst, ContextOperand(rsi, context_index));
2651 7062 : }
2652 :
2653 :
2654 4653 : void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
2655 4608 : DeclarationScope* closure_scope = scope()->GetClosureScope();
2656 4608 : if (closure_scope->is_script_scope() ||
2657 : closure_scope->is_module_scope()) {
2658 : // Contexts nested in the native context have a canonical empty function
2659 : // as their closure, not the anonymous closure containing the global
2660 : // code.
2661 45 : __ movp(rax, NativeContextOperand());
2662 45 : PushOperand(ContextOperand(rax, Context::CLOSURE_INDEX));
2663 4563 : } else if (closure_scope->is_eval_scope()) {
2664 : // Contexts created by a call to eval have the same closure as the
2665 : // context calling eval, not the anonymous closure containing the eval
2666 : // code. Fetch it from the context.
2667 3916 : PushOperand(ContextOperand(rsi, Context::CLOSURE_INDEX));
2668 : } else {
2669 : DCHECK(closure_scope->is_function_scope());
2670 1294 : PushOperand(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2671 : }
2672 4608 : }
2673 :
2674 :
2675 : #undef __
2676 :
2677 :
2678 : static const byte kJnsInstruction = 0x79;
2679 : static const byte kNopByteOne = 0x66;
2680 : static const byte kNopByteTwo = 0x90;
2681 : #ifdef DEBUG
2682 : static const byte kCallInstruction = 0xe8;
2683 : #endif
2684 :
2685 :
2686 9655 : void BackEdgeTable::PatchAt(Code* unoptimized_code,
2687 : Address pc,
2688 : BackEdgeState target_state,
2689 : Code* replacement_code) {
2690 9655 : Address call_target_address = pc - kIntSize;
2691 : Address jns_instr_address = call_target_address - 3;
2692 : Address jns_offset_address = call_target_address - 2;
2693 :
2694 9655 : switch (target_state) {
2695 : case INTERRUPT:
2696 : // sub <profiling_counter>, <delta> ;; Not changed
2697 : // jns ok
2698 : // call <interrupt stub>
2699 : // ok:
2700 4824 : *jns_instr_address = kJnsInstruction;
2701 4824 : *jns_offset_address = kJnsOffset;
2702 4824 : break;
2703 : case ON_STACK_REPLACEMENT:
2704 : // sub <profiling_counter>, <delta> ;; Not changed
2705 : // nop
2706 : // nop
2707 : // call <on-stack replacment>
2708 : // ok:
2709 4831 : *jns_instr_address = kNopByteOne;
2710 4831 : *jns_offset_address = kNopByteTwo;
2711 4831 : break;
2712 : }
2713 :
2714 : Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
2715 : call_target_address, unoptimized_code,
2716 9655 : replacement_code->entry());
2717 : unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
2718 9655 : unoptimized_code, call_target_address, replacement_code);
2719 9655 : }
2720 :
2721 :
2722 0 : BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
2723 : Isolate* isolate,
2724 : Code* unoptimized_code,
2725 : Address pc) {
2726 : Address call_target_address = pc - kIntSize;
2727 : Address jns_instr_address = call_target_address - 3;
2728 : DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
2729 :
2730 0 : if (*jns_instr_address == kJnsInstruction) {
2731 : DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
2732 : DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
2733 : Assembler::target_address_at(call_target_address,
2734 : unoptimized_code));
2735 : return INTERRUPT;
2736 : }
2737 :
2738 : DCHECK_EQ(kNopByteOne, *jns_instr_address);
2739 : DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
2740 :
2741 : DCHECK_EQ(
2742 : isolate->builtins()->OnStackReplacement()->entry(),
2743 : Assembler::target_address_at(call_target_address, unoptimized_code));
2744 0 : return ON_STACK_REPLACEMENT;
2745 : }
2746 :
2747 : } // namespace internal
2748 : } // namespace v8
2749 :
2750 : #endif // V8_TARGET_ARCH_X64
|