Line data Source code
1 : // Copyright 2013 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #if V8_TARGET_ARCH_X64
6 :
7 : #include "src/crankshaft/x64/lithium-codegen-x64.h"
8 :
9 : #include "src/base/bits.h"
10 : #include "src/builtins/builtins-constructor.h"
11 : #include "src/code-factory.h"
12 : #include "src/code-stubs.h"
13 : #include "src/crankshaft/hydrogen-osr.h"
14 : #include "src/ic/ic.h"
15 : #include "src/ic/stub-cache.h"
16 : #include "src/objects-inl.h"
17 :
18 : namespace v8 {
19 : namespace internal {
20 :
21 :
22 : // When invoking builtins, we need to record the safepoint in the middle of
23 : // the invoke instruction sequence generated by the macro assembler.
24 : class SafepointGenerator final : public CallWrapper {
25 : public:
26 : SafepointGenerator(LCodeGen* codegen,
27 : LPointerMap* pointers,
28 : Safepoint::DeoptMode mode)
29 : : codegen_(codegen),
30 : pointers_(pointers),
31 866353 : deopt_mode_(mode) { }
32 0 : virtual ~SafepointGenerator() {}
33 :
34 22529 : void BeforeCall(int call_size) const override {}
35 :
36 870975 : void AfterCall() const override {
37 870975 : codegen_->RecordSafepoint(pointers_, deopt_mode_);
38 870975 : }
39 :
40 : private:
41 : LCodeGen* codegen_;
42 : LPointerMap* pointers_;
43 : Safepoint::DeoptMode deopt_mode_;
44 : };
45 :
46 :
47 : #define __ masm()->
48 :
49 279085 : bool LCodeGen::GenerateCode() {
50 : LPhase phase("Z_Code generation", chunk());
51 : DCHECK(is_unused());
52 279085 : status_ = GENERATING;
53 :
54 : // Open a frame scope to indicate that there is a frame on the stack. The
55 : // MANUAL indicates that the scope shouldn't actually generate code to set up
56 : // the frame (that is done in GeneratePrologue).
57 558170 : FrameScope frame_scope(masm_, StackFrame::MANUAL);
58 :
59 558169 : return GeneratePrologue() &&
60 558132 : GenerateBody() &&
61 558095 : GenerateDeferredCode() &&
62 837180 : GenerateJumpTable() &&
63 279085 : GenerateSafepointTable();
64 : }
65 :
66 :
67 279048 : void LCodeGen::FinishCode(Handle<Code> code) {
68 : DCHECK(is_done());
69 558096 : code->set_stack_slots(GetTotalFrameSlotCount());
70 558096 : code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
71 279048 : PopulateDeoptimizationData(code);
72 279048 : }
73 :
74 :
75 : #ifdef _MSC_VER
76 : void LCodeGen::MakeSureStackPagesMapped(int offset) {
77 : const int kPageSize = 4 * KB;
78 : for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
79 : __ movp(Operand(rsp, offset), rax);
80 : }
81 : }
82 : #endif
83 :
84 :
85 108 : void LCodeGen::SaveCallerDoubles() {
86 : DCHECK(info()->saves_caller_doubles());
87 : DCHECK(NeedsEagerFrame());
88 132 : Comment(";;; Save clobbered callee double registers");
89 : int count = 0;
90 54 : BitVector* doubles = chunk()->allocated_double_registers();
91 : BitVector::Iterator save_iterator(doubles);
92 264 : while (!save_iterator.Done()) {
93 : __ Movsd(MemOperand(rsp, count * kDoubleSize),
94 156 : XMMRegister::from_code(save_iterator.Current()));
95 78 : save_iterator.Advance();
96 78 : count++;
97 : }
98 54 : }
99 :
100 :
101 356 : void LCodeGen::RestoreCallerDoubles() {
102 : DCHECK(info()->saves_caller_doubles());
103 : DCHECK(NeedsEagerFrame());
104 452 : Comment(";;; Restore clobbered callee double registers");
105 178 : BitVector* doubles = chunk()->allocated_double_registers();
106 : BitVector::Iterator save_iterator(doubles);
107 : int count = 0;
108 1082 : while (!save_iterator.Done()) {
109 : __ Movsd(XMMRegister::from_code(save_iterator.Current()),
110 548 : MemOperand(rsp, count * kDoubleSize));
111 274 : save_iterator.Advance();
112 274 : count++;
113 : }
114 178 : }
115 :
116 :
117 279085 : bool LCodeGen::GeneratePrologue() {
118 : DCHECK(is_generating());
119 :
120 1879559 : if (info()->IsOptimizing()) {
121 255552 : ProfileEntryHookStub::MaybeCallEntryHook(masm_);
122 : }
123 :
124 279085 : info()->set_prologue_offset(masm_->pc_offset());
125 279085 : if (NeedsEagerFrame()) {
126 : DCHECK(!frame_is_built_);
127 260480 : frame_is_built_ = true;
128 260480 : if (info()->IsStub()) {
129 4928 : __ StubPrologue(StackFrame::STUB);
130 : } else {
131 511104 : __ Prologue(info()->GeneratePreagedPrologue());
132 : }
133 : }
134 :
135 : // Reserve space for the stack slots needed by the code.
136 : int slots = GetStackSlotCount();
137 279085 : if (slots > 0) {
138 260480 : if (FLAG_debug_code) {
139 96 : __ subp(rsp, Immediate(slots * kPointerSize));
140 : #ifdef _MSC_VER
141 : MakeSureStackPagesMapped(slots * kPointerSize);
142 : #endif
143 48 : __ Push(rax);
144 96 : __ Set(rax, slots);
145 48 : __ Set(kScratchRegister, kSlotsZapValue);
146 : Label loop;
147 48 : __ bind(&loop);
148 : __ movp(MemOperand(rsp, rax, times_pointer_size, 0),
149 144 : kScratchRegister);
150 48 : __ decl(rax);
151 48 : __ j(not_zero, &loop);
152 48 : __ Pop(rax);
153 : } else {
154 520864 : __ subp(rsp, Immediate(slots * kPointerSize));
155 : #ifdef _MSC_VER
156 : MakeSureStackPagesMapped(slots * kPointerSize);
157 : #endif
158 : }
159 :
160 260480 : if (info()->saves_caller_doubles()) {
161 54 : SaveCallerDoubles();
162 : }
163 : }
164 279085 : return !is_aborted();
165 : }
166 :
167 :
168 255552 : void LCodeGen::DoPrologue(LPrologue* instr) {
169 322360 : Comment(";;; Prologue begin");
170 :
171 : // Possibly allocate a local context.
172 511104 : if (info_->scope()->NeedsContext()) {
173 6469 : Comment(";;; Allocate local context");
174 : bool need_write_barrier = true;
175 : // Argument to NewContext is the function, which is still in rdi.
176 6469 : int slots = info_->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
177 : Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
178 6469 : if (info()->scope()->is_script_scope()) {
179 532 : __ Push(rdi);
180 1064 : __ Push(info()->scope()->scope_info());
181 532 : __ CallRuntime(Runtime::kNewScriptContext);
182 : deopt_mode = Safepoint::kLazyDeopt;
183 : } else {
184 5937 : if (slots <= ConstructorBuiltins::MaximumFunctionContextSlots()) {
185 : Callable callable = CodeFactory::FastNewFunctionContext(
186 5937 : isolate(), info()->scope()->scope_type());
187 11874 : __ Set(FastNewFunctionContextDescriptor::SlotsRegister(), slots);
188 5937 : __ Call(callable.code(), RelocInfo::CODE_TARGET);
189 : // Result of FastNewFunctionContextStub is always in new space.
190 : need_write_barrier = false;
191 : } else {
192 0 : __ Push(rdi);
193 0 : __ Push(Smi::FromInt(info()->scope()->scope_type()));
194 0 : __ CallRuntime(Runtime::kNewFunctionContext);
195 : }
196 : }
197 6469 : RecordSafepoint(deopt_mode);
198 :
199 : // Context is returned in rax. It replaces the context passed to us.
200 : // It's saved in the stack and kept live in rsi.
201 6469 : __ movp(rsi, rax);
202 19407 : __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
203 :
204 : // Copy any necessary parameters into the context.
205 6469 : int num_parameters = info()->scope()->num_parameters();
206 12938 : int first_parameter = info()->scope()->has_this_declaration() ? -1 : 0;
207 15169 : for (int i = first_parameter; i < num_parameters; i++) {
208 6779 : Variable* var = (i == -1) ? info()->scope()->receiver()
209 13533 : : info()->scope()->parameter(i);
210 8700 : if (var->IsContextSlot()) {
211 2912 : int parameter_offset = StandardFrameConstants::kCallerSPOffset +
212 5824 : (num_parameters - 1 - i) * kPointerSize;
213 : // Load parameter from stack.
214 8736 : __ movp(rax, Operand(rbp, parameter_offset));
215 : // Store it in the context.
216 : int context_offset = Context::SlotOffset(var->index());
217 8736 : __ movp(Operand(rsi, context_offset), rax);
218 : // Update the write barrier. This clobbers rax and rbx.
219 2912 : if (need_write_barrier) {
220 : __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs);
221 2912 : } else if (FLAG_debug_code) {
222 : Label done;
223 : __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
224 0 : __ Abort(kExpectedNewSpaceObject);
225 0 : __ bind(&done);
226 : }
227 : }
228 : }
229 6469 : Comment(";;; End allocate local context");
230 : }
231 :
232 255552 : Comment(";;; Prologue end");
233 255552 : }
234 :
235 :
236 16472 : void LCodeGen::GenerateOsrPrologue() {
237 : // Generate the OSR entry prologue at the first unknown OSR value, or if there
238 : // are none, at the OSR entrypoint instruction.
239 32944 : if (osr_pc_offset_ >= 0) return;
240 :
241 4738 : osr_pc_offset_ = masm()->pc_offset();
242 :
243 : // Adjust the frame size, subsuming the unoptimized frame into the
244 : // optimized frame.
245 2369 : int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots();
246 : DCHECK(slots >= 0);
247 4738 : __ subp(rsp, Immediate(slots * kPointerSize));
248 : }
249 :
250 :
251 30835953 : void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
252 30835953 : if (instr->IsCall()) {
253 1677367 : EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
254 : }
255 30835927 : if (!instr->IsLazyBailout() && !instr->IsGap()) {
256 : safepoints_.BumpLastLazySafepointIndex();
257 : }
258 30835878 : }
259 :
260 :
261 37969944 : void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) {
262 : if (FLAG_debug_code && FLAG_enable_slow_asserts && instr->HasResult() &&
263 : instr->hydrogen_value()->representation().IsInteger32() &&
264 : instr->result()->IsRegister()) {
265 8493 : __ AssertZeroExtended(ToRegister(instr->result()));
266 : }
267 :
268 37969944 : if (instr->HasResult() && instr->MustSignExtendResult(chunk())) {
269 : // We sign extend the dehoisted key at the definition point when the pointer
270 : // size is 64-bit. For x32 port, we sign extend the dehoisted key at the use
271 : // points and MustSignExtendResult is always false. We can't use
272 : // STATIC_ASSERT here as the pointer size is 32-bit for x32.
273 : DCHECK(kPointerSize == kInt64Size);
274 16986 : if (instr->result()->IsRegister()) {
275 16986 : Register result_reg = ToRegister(instr->result());
276 8493 : __ movsxlq(result_reg, result_reg);
277 : } else {
278 : // Sign extend the 32bit result in the stack slots.
279 : DCHECK(instr->result()->IsStackSlot());
280 0 : Operand src = ToOperand(instr->result());
281 0 : __ movsxlq(kScratchRegister, src);
282 0 : __ movq(src, kScratchRegister);
283 : }
284 : }
285 30835950 : }
286 :
287 :
288 279047 : bool LCodeGen::GenerateJumpTable() {
289 3442061 : if (jump_table_.length() == 0) return !is_aborted();
290 :
291 : Label needs_frame;
292 113516 : Comment(";;; -------------------- Jump table --------------------");
293 1342340 : for (int i = 0; i < jump_table_.length(); i++) {
294 : Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
295 1115308 : __ bind(&table_entry->label);
296 557654 : Address entry = table_entry->address;
297 557654 : DeoptComment(table_entry->deopt_info);
298 557654 : if (table_entry->needs_frame) {
299 : DCHECK(!info()->saves_caller_doubles());
300 24143 : __ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
301 24143 : __ call(&needs_frame);
302 : } else {
303 533511 : if (info()->saves_caller_doubles()) {
304 : DCHECK(info()->IsStub());
305 124 : RestoreCallerDoubles();
306 : }
307 533511 : __ call(entry, RelocInfo::RUNTIME_ENTRY);
308 : }
309 : }
310 :
311 113516 : if (needs_frame.is_linked()) {
312 18464 : __ bind(&needs_frame);
313 : /* stack layout
314 : 3: return address <-- rsp
315 : 2: garbage
316 : 1: garbage
317 : 0: garbage
318 : */
319 : // Reserve space for stub marker.
320 18464 : __ subp(rsp, Immediate(TypedFrameConstants::kFrameTypeSize));
321 : __ Push(MemOperand(
322 36928 : rsp, TypedFrameConstants::kFrameTypeSize)); // Copy return address.
323 18464 : __ Push(kScratchRegister);
324 :
325 : /* stack layout
326 : 3: return address
327 : 2: garbage
328 : 1: return address
329 : 0: entry address <-- rsp
330 : */
331 :
332 : // Create a stack frame.
333 55392 : __ movp(MemOperand(rsp, 3 * kPointerSize), rbp);
334 55392 : __ leap(rbp, MemOperand(rsp, 3 * kPointerSize));
335 :
336 : // This variant of deopt can only be used with stubs. Since we don't
337 : // have a function pointer to install in the stack frame that we're
338 : // building, install a special marker there instead.
339 : DCHECK(info()->IsStub());
340 : __ movp(MemOperand(rsp, 2 * kPointerSize),
341 55392 : Immediate(StackFrame::TypeToMarker(StackFrame::STUB)));
342 :
343 : /* stack layout
344 : 3: old rbp
345 : 2: stub marker
346 : 1: return address
347 : 0: entry address <-- rsp
348 : */
349 18464 : __ ret(0);
350 : }
351 :
352 113516 : return !is_aborted();
353 : }
354 :
355 :
356 279047 : bool LCodeGen::GenerateDeferredCode() {
357 : DCHECK(is_generating());
358 491112 : if (deferred_.length() > 0) {
359 578288 : for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
360 308316 : LDeferredCode* code = deferred_[i];
361 :
362 : HValue* value =
363 308316 : instructions_->at(code->instruction_index())->hydrogen_value();
364 1248509 : RecordAndWritePosition(value->position());
365 :
366 : Comment(";;; <@%d,#%d> "
367 : "-------------------- Deferred %s --------------------",
368 : code->instruction_index(),
369 154158 : code->instr()->hydrogen_value()->id(),
370 462474 : code->instr()->Mnemonic());
371 308316 : __ bind(code->entry());
372 154158 : if (NeedsDeferredFrame()) {
373 2646 : Comment(";;; Build frame");
374 : DCHECK(!frame_is_built_);
375 : DCHECK(info()->IsStub());
376 2646 : frame_is_built_ = true;
377 : // Build the frame in such a way that esi isn't trashed.
378 2646 : __ pushq(rbp); // Caller's frame pointer.
379 2646 : __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::STUB)));
380 7938 : __ leap(rbp, Operand(rsp, TypedFrameConstants::kFixedFrameSizeFromFp));
381 2646 : Comment(";;; Deferred code");
382 : }
383 154158 : code->Generate();
384 154158 : if (NeedsDeferredFrame()) {
385 2646 : __ bind(code->done());
386 2646 : Comment(";;; Destroy frame");
387 : DCHECK(frame_is_built_);
388 2646 : frame_is_built_ = false;
389 2646 : __ movp(rsp, rbp);
390 2646 : __ popq(rbp);
391 : }
392 154158 : __ jmp(code->exit());
393 : }
394 : }
395 :
396 : // Deferred code is the last part of the instruction sequence. Mark
397 : // the generated code as done unless we bailed out.
398 279047 : if (!is_aborted()) status_ = DONE;
399 279047 : return !is_aborted();
400 : }
401 :
402 :
403 0 : bool LCodeGen::GenerateSafepointTable() {
404 : DCHECK(is_done());
405 279048 : safepoints_.Emit(masm(), GetTotalFrameSlotCount());
406 0 : return !is_aborted();
407 : }
408 :
409 :
410 0 : Register LCodeGen::ToRegister(int index) const {
411 0 : return Register::from_code(index);
412 : }
413 :
414 :
415 0 : XMMRegister LCodeGen::ToDoubleRegister(int index) const {
416 0 : return XMMRegister::from_code(index);
417 : }
418 :
419 :
420 6055551 : Register LCodeGen::ToRegister(LOperand* op) const {
421 : DCHECK(op->IsRegister());
422 6055551 : return ToRegister(op->index());
423 : }
424 :
425 :
426 157923 : XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
427 : DCHECK(op->IsDoubleRegister());
428 157923 : return ToDoubleRegister(op->index());
429 : }
430 :
431 :
432 253963 : bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const {
433 507926 : return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
434 : }
435 :
436 :
437 0 : bool LCodeGen::IsExternalConstant(LConstantOperand* op) const {
438 153436 : return chunk_->LookupLiteralRepresentation(op).IsExternal();
439 : }
440 :
441 :
442 29333 : bool LCodeGen::IsDehoistedKeyConstant(LConstantOperand* op) const {
443 58666 : return op->IsConstantOperand() &&
444 58666 : chunk_->IsDehoistedKey(chunk_->LookupConstant(op));
445 : }
446 :
447 :
448 151130 : bool LCodeGen::IsSmiConstant(LConstantOperand* op) const {
449 302260 : return chunk_->LookupLiteralRepresentation(op).IsSmi();
450 : }
451 :
452 :
453 31424 : int32_t LCodeGen::ToInteger32(LConstantOperand* op) const {
454 31424 : return ToRepresentation(op, Representation::Integer32());
455 : }
456 :
457 :
458 0 : int32_t LCodeGen::ToRepresentation(LConstantOperand* op,
459 : const Representation& r) const {
460 800904 : HConstant* constant = chunk_->LookupConstant(op);
461 : int32_t value = constant->Integer32Value();
462 188578 : if (r.IsInteger32()) return value;
463 : DCHECK(SmiValuesAre31Bits() && r.IsSmiOrTagged());
464 : return static_cast<int32_t>(reinterpret_cast<intptr_t>(Smi::FromInt(value)));
465 : }
466 :
467 :
468 5 : Smi* LCodeGen::ToSmi(LConstantOperand* op) const {
469 95 : HConstant* constant = chunk_->LookupConstant(op);
470 5 : return Smi::FromInt(constant->Integer32Value());
471 : }
472 :
473 :
474 0 : double LCodeGen::ToDouble(LConstantOperand* op) const {
475 9145 : HConstant* constant = chunk_->LookupConstant(op);
476 : DCHECK(constant->HasDoubleValue());
477 0 : return constant->DoubleValue();
478 : }
479 :
480 :
481 0 : ExternalReference LCodeGen::ToExternalReference(LConstantOperand* op) const {
482 0 : HConstant* constant = chunk_->LookupConstant(op);
483 : DCHECK(constant->HasExternalReferenceValue());
484 0 : return constant->ExternalReferenceValue();
485 : }
486 :
487 :
488 1777146 : Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
489 1777146 : HConstant* constant = chunk_->LookupConstant(op);
490 : DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
491 1777146 : return constant->handle(isolate());
492 : }
493 :
494 :
495 : static int ArgumentsOffsetWithoutFrame(int index) {
496 : DCHECK(index < 0);
497 0 : return -(index + 1) * kPointerSize + kPCOnStackSize;
498 : }
499 :
500 :
501 3289165 : Operand LCodeGen::ToOperand(LOperand* op) const {
502 : // Does not handle registers. In X64 assembler, plain registers are not
503 : // representable as an Operand.
504 : DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
505 3289165 : if (NeedsEagerFrame()) {
506 3289165 : return Operand(rbp, FrameSlotToFPOffset(op->index()));
507 : } else {
508 : // Retrieve parameter without eager stack-frame relative to the
509 : // stack-pointer.
510 0 : return Operand(rsp, ArgumentsOffsetWithoutFrame(op->index()));
511 : }
512 : }
513 :
514 :
515 11509843 : void LCodeGen::WriteTranslation(LEnvironment* environment,
516 : Translation* translation) {
517 9256928 : if (environment == NULL) return;
518 :
519 : // The translation includes one command per value in the environment.
520 : int translation_size = environment->translation_size();
521 :
522 4587586 : WriteTranslation(environment->outer(), translation);
523 4587585 : WriteTranslationFrame(environment, translation);
524 :
525 4587587 : int object_index = 0;
526 4587587 : int dematerialized_index = 0;
527 33519327 : for (int i = 0; i < translation_size; ++i) {
528 28931741 : LOperand* value = environment->values()->at(i);
529 : AddToTranslation(
530 : environment, translation, value, environment->HasTaggedValueAt(i),
531 57863486 : environment->HasUint32ValueAt(i), &object_index, &dematerialized_index);
532 : }
533 : }
534 :
535 :
536 28985819 : void LCodeGen::AddToTranslation(LEnvironment* environment,
537 : Translation* translation,
538 : LOperand* op,
539 : bool is_tagged,
540 : bool is_uint32,
541 : int* object_index_pointer,
542 13239200 : int* dematerialized_index_pointer) {
543 28973723 : if (op == LEnvironment::materialization_marker()) {
544 13588 : int object_index = (*object_index_pointer)++;
545 13588 : if (environment->ObjectIsDuplicateAt(object_index)) {
546 : int dupe_of = environment->ObjectDuplicateOfAt(object_index);
547 1492 : translation->DuplicateObject(dupe_of);
548 1492 : return;
549 : }
550 : int object_length = environment->ObjectLengthAt(object_index);
551 12096 : if (environment->ObjectIsArgumentsAt(object_index)) {
552 6368 : translation->BeginArgumentsObject(object_length);
553 : } else {
554 5728 : translation->BeginCapturedObject(object_length);
555 : }
556 12096 : int dematerialized_index = *dematerialized_index_pointer;
557 12096 : int env_offset = environment->translation_size() + dematerialized_index;
558 12096 : *dematerialized_index_pointer += object_length;
559 54050 : for (int i = 0; i < object_length; ++i) {
560 83908 : LOperand* value = environment->values()->at(env_offset + i);
561 : AddToTranslation(environment,
562 : translation,
563 : value,
564 : environment->HasTaggedValueAt(env_offset + i),
565 : environment->HasUint32ValueAt(env_offset + i),
566 : object_index_pointer,
567 83908 : dematerialized_index_pointer);
568 : }
569 : return;
570 : }
571 :
572 28960135 : if (op->IsStackSlot()) {
573 : int index = op->index();
574 13348933 : if (is_tagged) {
575 12835533 : translation->StoreStackSlot(index);
576 513400 : } else if (is_uint32) {
577 1833 : translation->StoreUint32StackSlot(index);
578 : } else {
579 511567 : translation->StoreInt32StackSlot(index);
580 : }
581 15611202 : } else if (op->IsDoubleStackSlot()) {
582 : int index = op->index();
583 130801 : translation->StoreDoubleStackSlot(index);
584 15480401 : } else if (op->IsRegister()) {
585 2210431 : Register reg = ToRegister(op);
586 2210431 : if (is_tagged) {
587 2079496 : translation->StoreRegister(reg);
588 130935 : } else if (is_uint32) {
589 90 : translation->StoreUint32Register(reg);
590 : } else {
591 130845 : translation->StoreInt32Register(reg);
592 : }
593 13269970 : } else if (op->IsDoubleRegister()) {
594 30770 : XMMRegister reg = ToDoubleRegister(op);
595 30770 : translation->StoreDoubleRegister(reg);
596 13239200 : } else if (op->IsConstantOperand()) {
597 13239200 : HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op));
598 13239201 : int src_index = DefineDeoptimizationLiteral(constant->handle(isolate()));
599 13239201 : translation->StoreLiteral(src_index);
600 : } else {
601 0 : UNREACHABLE();
602 : }
603 : }
604 :
605 :
606 575985 : void LCodeGen::CallCodeGeneric(Handle<Code> code,
607 : RelocInfo::Mode mode,
608 : LInstruction* instr,
609 : SafepointMode safepoint_mode,
610 : int argc) {
611 : DCHECK(instr != NULL);
612 823531 : __ call(code, mode);
613 575986 : RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc);
614 :
615 : // Signal that we don't inline smi code before these stubs in the
616 : // optimizing code generator.
617 575986 : if (code->kind() == Code::BINARY_OP_IC ||
618 : code->kind() == Code::COMPARE_IC) {
619 247546 : __ nop();
620 : }
621 575986 : }
622 :
623 :
624 0 : void LCodeGen::CallCode(Handle<Code> code,
625 : RelocInfo::Mode mode,
626 : LInstruction* instr) {
627 575986 : CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0);
628 0 : }
629 :
630 :
631 116305 : void LCodeGen::CallRuntime(const Runtime::Function* function,
632 : int num_arguments,
633 : LInstruction* instr,
634 : SaveFPRegsMode save_doubles) {
635 : DCHECK(instr != NULL);
636 : DCHECK(instr->HasPointerMap());
637 :
638 116305 : __ CallRuntime(function, num_arguments, save_doubles);
639 :
640 116305 : RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
641 116305 : }
642 :
643 :
644 21452 : void LCodeGen::LoadContextFromDeferred(LOperand* context) {
645 21452 : if (context->IsRegister()) {
646 1133 : if (!ToRegister(context).is(rsi)) {
647 21398 : __ movp(rsi, ToRegister(context));
648 : }
649 20319 : } else if (context->IsStackSlot()) {
650 28904 : __ movp(rsi, ToOperand(context));
651 5867 : } else if (context->IsConstantOperand()) {
652 : HConstant* constant =
653 5867 : chunk_->LookupConstant(LConstantOperand::cast(context));
654 11734 : __ Move(rsi, Handle<Object>::cast(constant->handle(isolate())));
655 : } else {
656 0 : UNREACHABLE();
657 : }
658 21452 : }
659 :
660 :
661 :
662 21452 : void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
663 : int argc,
664 : LInstruction* instr,
665 : LOperand* context) {
666 21452 : LoadContextFromDeferred(context);
667 :
668 21452 : __ CallRuntimeSaveDoubles(id);
669 : RecordSafepointWithRegisters(
670 : instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
671 21452 : }
672 :
673 :
674 2490315 : void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
675 : Safepoint::DeoptMode mode) {
676 4824986 : environment->set_has_been_used();
677 2490315 : if (!environment->HasBeenRegistered()) {
678 : // Physical stack frame layout:
679 : // -x ............. -4 0 ..................................... y
680 : // [incoming arguments] [spill slots] [pushed outgoing arguments]
681 :
682 : // Layout of the environment:
683 : // 0 ..................................................... size-1
684 : // [parameters] [locals] [expression stack including arguments]
685 :
686 : // Layout of the translation:
687 : // 0 ........................................................ size - 1 + 4
688 : // [expression stack including arguments] [locals] [4 words] [parameters]
689 : // |>------------ translation_size ------------<|
690 :
691 : int frame_count = 0;
692 : int jsframe_count = 0;
693 9175172 : for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
694 4587586 : ++frame_count;
695 4587586 : if (e->frame_type() == JS_FUNCTION) {
696 3995333 : ++jsframe_count;
697 : }
698 : }
699 4669341 : Translation translation(&translations_, frame_count, jsframe_count, zone());
700 2334671 : WriteTranslation(environment, &translation);
701 2334671 : int deoptimization_index = deoptimizations_.length();
702 2334671 : int pc_offset = masm()->pc_offset();
703 : environment->Register(deoptimization_index,
704 : translation.index(),
705 2334671 : (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
706 : deoptimizations_.Add(environment, environment->zone());
707 : }
708 2490317 : }
709 :
710 779377 : void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
711 : DeoptimizeReason deopt_reason,
712 : Deoptimizer::BailoutType bailout_type) {
713 779377 : LEnvironment* environment = instr->environment();
714 779377 : RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
715 : DCHECK(environment->HasBeenRegistered());
716 : int id = environment->deoptimization_index();
717 : Address entry =
718 2979145 : Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
719 779377 : if (entry == NULL) {
720 0 : Abort(kBailoutWasNotPrepared);
721 779377 : return;
722 : }
723 :
724 779377 : if (DeoptEveryNTimes()) {
725 332 : ExternalReference count = ExternalReference::stress_deopt_count(isolate());
726 : Label no_deopt;
727 332 : __ pushfq();
728 332 : __ pushq(rax);
729 332 : Operand count_operand = masm()->ExternalOperand(count, kScratchRegister);
730 332 : __ movl(rax, count_operand);
731 332 : __ subl(rax, Immediate(1));
732 332 : __ j(not_zero, &no_deopt, Label::kNear);
733 332 : if (FLAG_trap_on_deopt) __ int3();
734 664 : __ movl(rax, Immediate(FLAG_deopt_every_n_times));
735 332 : __ movl(count_operand, rax);
736 332 : __ popq(rax);
737 332 : __ popfq();
738 : DCHECK(frame_is_built_);
739 332 : __ call(entry, RelocInfo::RUNTIME_ENTRY);
740 332 : __ bind(&no_deopt);
741 332 : __ movl(count_operand, rax);
742 332 : __ popq(rax);
743 332 : __ popfq();
744 : }
745 :
746 779377 : if (info()->ShouldTrapOnDeopt()) {
747 : Label done;
748 0 : if (cc != no_condition) {
749 0 : __ j(NegateCondition(cc), &done, Label::kNear);
750 : }
751 0 : __ int3();
752 0 : __ bind(&done);
753 : }
754 :
755 779377 : Deoptimizer::DeoptInfo deopt_info = MakeDeoptInfo(instr, deopt_reason, id);
756 :
757 : DCHECK(info()->IsStub() || frame_is_built_);
758 : // Go through jump table if we need to handle condition, build frame, or
759 : // restore caller doubles.
760 857459 : if (cc == no_condition && frame_is_built_ &&
761 : !info()->saves_caller_doubles()) {
762 78082 : DeoptComment(deopt_info);
763 78082 : __ call(entry, RelocInfo::RUNTIME_ENTRY);
764 : } else {
765 : Deoptimizer::JumpTableEntry table_entry(entry, deopt_info, bailout_type,
766 701295 : !frame_is_built_);
767 : // We often have several deopts to the same entry, reuse the last
768 : // jump entry if this is the case.
769 2103646 : if (FLAG_trace_deopt || isolate()->is_profiling() ||
770 1989932 : jump_table_.is_empty() ||
771 : !table_entry.IsEquivalentTo(jump_table_.last())) {
772 : jump_table_.Add(table_entry, zone());
773 : }
774 701295 : if (cc == no_condition) {
775 28232 : __ jmp(&jump_table_.last().label);
776 : } else {
777 1374358 : __ j(cc, &jump_table_.last().label);
778 : }
779 : }
780 : }
781 :
782 0 : void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
783 : DeoptimizeReason deopt_reason) {
784 747000 : Deoptimizer::BailoutType bailout_type = info()->IsStub()
785 : ? Deoptimizer::LAZY
786 747000 : : Deoptimizer::EAGER;
787 747000 : DeoptimizeIf(cc, instr, deopt_reason, bailout_type);
788 0 : }
789 :
790 :
791 834725 : void LCodeGen::RecordSafepointWithLazyDeopt(
792 : LInstruction* instr, SafepointMode safepoint_mode, int argc) {
793 834725 : if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
794 : RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
795 : } else {
796 : DCHECK(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS);
797 : RecordSafepointWithRegisters(
798 : instr->pointer_map(), argc, Safepoint::kLazyDeopt);
799 : }
800 834726 : }
801 :
802 :
803 1763018 : void LCodeGen::RecordSafepoint(
804 : LPointerMap* pointers,
805 : Safepoint::Kind kind,
806 : int arguments,
807 : Safepoint::DeoptMode deopt_mode) {
808 : DCHECK(kind == expected_safepoint_kind_);
809 :
810 1763018 : const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
811 :
812 7244491 : Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
813 1763018 : kind, arguments, deopt_mode);
814 23293748 : for (int i = 0; i < operands->length(); i++) {
815 21530729 : LOperand* pointer = operands->at(i);
816 9883855 : if (pointer->IsStackSlot()) {
817 5322041 : safepoint.DefinePointerSlot(pointer->index(), zone());
818 4561814 : } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
819 159432 : safepoint.DefinePointerRegister(ToRegister(pointer), zone());
820 : }
821 : }
822 1763019 : }
823 :
824 :
825 0 : void LCodeGen::RecordSafepoint(LPointerMap* pointers,
826 : Safepoint::DeoptMode deopt_mode) {
827 1678024 : RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
828 0 : }
829 :
830 :
831 6469 : void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
832 6469 : LPointerMap empty_pointers(zone());
833 : RecordSafepoint(&empty_pointers, deopt_mode);
834 6469 : }
835 :
836 :
837 0 : void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
838 : int arguments,
839 : Safepoint::DeoptMode deopt_mode) {
840 84995 : RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
841 0 : }
842 :
843 :
844 : static const char* LabelType(LLabel* label) {
845 2019401 : if (label->is_loop_header()) return " (loop header)";
846 1968775 : if (label->is_osr_entry()) return " (OSR entry)";
847 : return "";
848 : }
849 :
850 :
851 2019401 : void LCodeGen::DoLabel(LLabel* label) {
852 : Comment(";;; <@%d,#%d> -------------------- B%d%s --------------------",
853 : current_instruction_,
854 : label->hydrogen_value()->id(),
855 : label->block_id(),
856 4038802 : LabelType(label));
857 4038802 : __ bind(label->label());
858 2019401 : current_block_ = label->block_id();
859 2019401 : DoGap(label);
860 2019401 : }
861 :
862 :
863 0 : void LCodeGen::DoParallelMove(LParallelMove* move) {
864 24564091 : resolver_.Resolve(move);
865 0 : }
866 :
867 :
868 17437326 : void LCodeGen::DoGap(LGap* gap) {
869 87186733 : for (int i = LGap::FIRST_INNER_POSITION;
870 : i <= LGap::LAST_INNER_POSITION;
871 : i++) {
872 : LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
873 : LParallelMove* move = gap->GetParallelMove(inner_pos);
874 69749321 : if (move != NULL) DoParallelMove(move);
875 : }
876 17437412 : }
877 :
878 :
879 15417994 : void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
880 15417994 : DoGap(instr);
881 15418028 : }
882 :
883 :
884 450577 : void LCodeGen::DoParameter(LParameter* instr) {
885 : // Nothing to do.
886 450577 : }
887 :
888 :
889 14103 : void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
890 14103 : GenerateOsrPrologue();
891 14103 : }
892 :
893 :
894 2128 : void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) {
895 : Register dividend = ToRegister(instr->dividend());
896 : int32_t divisor = instr->divisor();
897 : DCHECK(dividend.is(ToRegister(instr->result())));
898 :
899 : // Theoretically, a variation of the branch-free code for integer division by
900 : // a power of 2 (calculating the remainder via an additional multiplication
901 : // (which gets simplified to an 'and') and subtraction) should be faster, and
902 : // this is exactly what GCC and clang emit. Nevertheless, benchmarks seem to
903 : // indicate that positive dividends are heavily favored, so the branching
904 : // version performs better.
905 : HMod* hmod = instr->hydrogen();
906 1064 : int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
907 : Label dividend_is_not_negative, done;
908 3168 : if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) {
909 9432 : __ testl(dividend, dividend);
910 1040 : __ j(not_sign, ÷nd_is_not_negative, Label::kNear);
911 : // Note that this is correct even for kMinInt operands.
912 1040 : __ negl(dividend);
913 1040 : __ andl(dividend, Immediate(mask));
914 1040 : __ negl(dividend);
915 1040 : if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
916 : DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero);
917 : }
918 1040 : __ jmp(&done, Label::kNear);
919 : }
920 :
921 1064 : __ bind(÷nd_is_not_negative);
922 1064 : __ andl(dividend, Immediate(mask));
923 1064 : __ bind(&done);
924 1064 : }
925 :
926 :
927 3134 : void LCodeGen::DoModByConstI(LModByConstI* instr) {
928 : Register dividend = ToRegister(instr->dividend());
929 : int32_t divisor = instr->divisor();
930 : DCHECK(ToRegister(instr->result()).is(rax));
931 :
932 1567 : if (divisor == 0) {
933 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero);
934 1567 : return;
935 : }
936 :
937 4980 : __ TruncatingDiv(dividend, Abs(divisor));
938 1566 : __ imull(rdx, rdx, Immediate(Abs(divisor)));
939 1566 : __ movl(rax, dividend);
940 1566 : __ subl(rax, rdx);
941 :
942 : // Check for negative zero.
943 : HMod* hmod = instr->hydrogen();
944 3132 : if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
945 : Label remainder_not_zero;
946 94 : __ j(not_zero, &remainder_not_zero, Label::kNear);
947 94 : __ cmpl(dividend, Immediate(0));
948 : DeoptimizeIf(less, instr, DeoptimizeReason::kMinusZero);
949 94 : __ bind(&remainder_not_zero);
950 : }
951 : }
952 :
953 :
954 403 : void LCodeGen::DoModI(LModI* instr) {
955 : HMod* hmod = instr->hydrogen();
956 :
957 : Register left_reg = ToRegister(instr->left());
958 : DCHECK(left_reg.is(rax));
959 : Register right_reg = ToRegister(instr->right());
960 : DCHECK(!right_reg.is(rax));
961 : DCHECK(!right_reg.is(rdx));
962 403 : Register result_reg = ToRegister(instr->result());
963 : DCHECK(result_reg.is(rdx));
964 :
965 : Label done;
966 : // Check for x % 0, idiv would signal a divide error. We have to
967 : // deopt in this case because we can't return a NaN.
968 1918 : if (hmod->CheckFlag(HValue::kCanBeDivByZero)) {
969 4304 : __ testl(right_reg, right_reg);
970 : DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero);
971 : }
972 :
973 : // Check for kMinInt % -1, idiv would signal a divide error. We
974 : // have to deopt if we care about -0, because we can't return that.
975 403 : if (hmod->CheckFlag(HValue::kCanOverflow)) {
976 : Label no_overflow_possible;
977 306 : __ cmpl(left_reg, Immediate(kMinInt));
978 306 : __ j(not_zero, &no_overflow_possible, Label::kNear);
979 306 : __ cmpl(right_reg, Immediate(-1));
980 306 : if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
981 : DeoptimizeIf(equal, instr, DeoptimizeReason::kMinusZero);
982 : } else {
983 162 : __ j(not_equal, &no_overflow_possible, Label::kNear);
984 162 : __ Set(result_reg, 0);
985 162 : __ jmp(&done, Label::kNear);
986 : }
987 306 : __ bind(&no_overflow_possible);
988 : }
989 :
990 : // Sign extend dividend in eax into edx:eax, since we are using only the low
991 : // 32 bits of the values.
992 403 : __ cdq();
993 :
994 : // If we care about -0, test if the dividend is <0 and the result is 0.
995 403 : if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
996 : Label positive_left;
997 165 : __ testl(left_reg, left_reg);
998 165 : __ j(not_sign, &positive_left, Label::kNear);
999 165 : __ idivl(right_reg);
1000 165 : __ testl(result_reg, result_reg);
1001 : DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero);
1002 165 : __ jmp(&done, Label::kNear);
1003 165 : __ bind(&positive_left);
1004 : }
1005 403 : __ idivl(right_reg);
1006 403 : __ bind(&done);
1007 403 : }
1008 :
1009 :
1010 1006 : void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) {
1011 : Register dividend = ToRegister(instr->dividend());
1012 : int32_t divisor = instr->divisor();
1013 : DCHECK(dividend.is(ToRegister(instr->result())));
1014 :
1015 : // If the divisor is positive, things are easy: There can be no deopts and we
1016 : // can simply do an arithmetic right shift.
1017 768 : if (divisor == 1) return;
1018 : int32_t shift = WhichPowerOf2Abs(divisor);
1019 503 : if (divisor > 1) {
1020 1931 : __ sarl(dividend, Immediate(shift));
1021 232 : return;
1022 : }
1023 :
1024 : // If the divisor is negative, we have to negate and handle edge cases.
1025 271 : __ negl(dividend);
1026 542 : if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1027 : DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero);
1028 : }
1029 :
1030 : // Dividing by -1 is basically negation, unless we overflow.
1031 271 : if (divisor == -1) {
1032 66 : if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) {
1033 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
1034 : }
1035 : return;
1036 : }
1037 :
1038 : // If the negation could not overflow, simply shifting is OK.
1039 476 : if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) {
1040 0 : __ sarl(dividend, Immediate(shift));
1041 0 : return;
1042 : }
1043 :
1044 : Label not_kmin_int, done;
1045 238 : __ j(no_overflow, ¬_kmin_int, Label::kNear);
1046 476 : __ movl(dividend, Immediate(kMinInt / divisor));
1047 238 : __ jmp(&done, Label::kNear);
1048 238 : __ bind(¬_kmin_int);
1049 238 : __ sarl(dividend, Immediate(shift));
1050 238 : __ bind(&done);
1051 : }
1052 :
1053 :
1054 3888 : void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) {
1055 : Register dividend = ToRegister(instr->dividend());
1056 : int32_t divisor = instr->divisor();
1057 : DCHECK(ToRegister(instr->result()).is(rdx));
1058 :
1059 1944 : if (divisor == 0) {
1060 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero);
1061 7 : return;
1062 : }
1063 :
1064 : // Check for (0 / -x) that will produce negative zero.
1065 : HMathFloorOfDiv* hdiv = instr->hydrogen();
1066 5811 : if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1067 20289 : __ testl(dividend, dividend);
1068 : DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero);
1069 : }
1070 :
1071 : // Easy case: We need no dynamic check for the dividend and the flooring
1072 : // division is the same as the truncating division.
1073 4859 : if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) ||
1074 952 : (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) {
1075 0 : __ TruncatingDiv(dividend, Abs(divisor));
1076 0 : if (divisor < 0) __ negl(rdx);
1077 : return;
1078 : }
1079 :
1080 : // In the general case we may need to adjust before and after the truncating
1081 : // division to get a flooring division.
1082 : Register temp = ToRegister(instr->temp3());
1083 : DCHECK(!temp.is(dividend) && !temp.is(rax) && !temp.is(rdx));
1084 : Label needs_adjustment, done;
1085 1937 : __ cmpl(dividend, Immediate(0));
1086 3874 : __ j(divisor > 0 ? less : greater, &needs_adjustment, Label::kNear);
1087 1937 : __ TruncatingDiv(dividend, Abs(divisor));
1088 2889 : if (divisor < 0) __ negl(rdx);
1089 1937 : __ jmp(&done, Label::kNear);
1090 1937 : __ bind(&needs_adjustment);
1091 5811 : __ leal(temp, Operand(dividend, divisor > 0 ? 1 : -1));
1092 1937 : __ TruncatingDiv(temp, Abs(divisor));
1093 2889 : if (divisor < 0) __ negl(rdx);
1094 1937 : __ decl(rdx);
1095 1937 : __ bind(&done);
1096 : }
1097 :
1098 :
1099 : // TODO(svenpanne) Refactor this to avoid code duplication with DoDivI.
1100 71 : void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) {
1101 : HBinaryOperation* hdiv = instr->hydrogen();
1102 : Register dividend = ToRegister(instr->dividend());
1103 : Register divisor = ToRegister(instr->divisor());
1104 : Register remainder = ToRegister(instr->temp());
1105 142 : Register result = ToRegister(instr->result());
1106 : DCHECK(dividend.is(rax));
1107 : DCHECK(remainder.is(rdx));
1108 : DCHECK(result.is(rax));
1109 : DCHECK(!divisor.is(rax));
1110 : DCHECK(!divisor.is(rdx));
1111 :
1112 : // Check for x / 0.
1113 284 : if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) {
1114 1153 : __ testl(divisor, divisor);
1115 : DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero);
1116 : }
1117 :
1118 : // Check for (0 / -x) that will produce negative zero.
1119 71 : if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) {
1120 : Label dividend_not_zero;
1121 65 : __ testl(dividend, dividend);
1122 65 : __ j(not_zero, ÷nd_not_zero, Label::kNear);
1123 65 : __ testl(divisor, divisor);
1124 : DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero);
1125 65 : __ bind(÷nd_not_zero);
1126 : }
1127 :
1128 : // Check for (kMinInt / -1).
1129 71 : if (hdiv->CheckFlag(HValue::kCanOverflow)) {
1130 : Label dividend_not_min_int;
1131 65 : __ cmpl(dividend, Immediate(kMinInt));
1132 65 : __ j(not_zero, ÷nd_not_min_int, Label::kNear);
1133 65 : __ cmpl(divisor, Immediate(-1));
1134 : DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow);
1135 65 : __ bind(÷nd_not_min_int);
1136 : }
1137 :
1138 : // Sign extend to rdx (= remainder).
1139 71 : __ cdq();
1140 71 : __ idivl(divisor);
1141 :
1142 : Label done;
1143 71 : __ testl(remainder, remainder);
1144 71 : __ j(zero, &done, Label::kNear);
1145 71 : __ xorl(remainder, divisor);
1146 71 : __ sarl(remainder, Immediate(31));
1147 71 : __ addl(result, remainder);
1148 71 : __ bind(&done);
1149 71 : }
1150 :
1151 :
1152 1074 : void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) {
1153 : Register dividend = ToRegister(instr->dividend());
1154 : int32_t divisor = instr->divisor();
1155 537 : Register result = ToRegister(instr->result());
1156 : DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor)));
1157 : DCHECK(!result.is(dividend));
1158 :
1159 : // Check for (0 / -x) that will produce negative zero.
1160 : HDiv* hdiv = instr->hydrogen();
1161 2148 : if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1162 2863 : __ testl(dividend, dividend);
1163 : DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero);
1164 : }
1165 : // Check for (kMinInt / -1).
1166 537 : if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) {
1167 30 : __ cmpl(dividend, Immediate(kMinInt));
1168 : DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow);
1169 : }
1170 : // Deoptimize if remainder will not be 0.
1171 537 : if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1172 537 : divisor != 1 && divisor != -1) {
1173 125 : int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1174 125 : __ testl(dividend, Immediate(mask));
1175 : DeoptimizeIf(not_zero, instr, DeoptimizeReason::kLostPrecision);
1176 : }
1177 537 : __ Move(result, dividend);
1178 : int32_t shift = WhichPowerOf2Abs(divisor);
1179 537 : if (shift > 0) {
1180 : // The arithmetic shift is always OK, the 'if' is an optimization only.
1181 920 : if (shift > 1) __ sarl(result, Immediate(31));
1182 1012 : __ shrl(result, Immediate(32 - shift));
1183 506 : __ addl(result, dividend);
1184 506 : __ sarl(result, Immediate(shift));
1185 : }
1186 764 : if (divisor < 0) __ negl(result);
1187 537 : }
1188 :
1189 :
1190 2834 : void LCodeGen::DoDivByConstI(LDivByConstI* instr) {
1191 : Register dividend = ToRegister(instr->dividend());
1192 : int32_t divisor = instr->divisor();
1193 : DCHECK(ToRegister(instr->result()).is(rdx));
1194 :
1195 1417 : if (divisor == 0) {
1196 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero);
1197 1417 : return;
1198 : }
1199 :
1200 : // Check for (0 / -x) that will produce negative zero.
1201 : HDiv* hdiv = instr->hydrogen();
1202 4251 : if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1203 2162 : __ testl(dividend, dividend);
1204 : DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero);
1205 : }
1206 :
1207 1417 : __ TruncatingDiv(dividend, Abs(divisor));
1208 2096 : if (divisor < 0) __ negl(rdx);
1209 :
1210 1417 : if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1211 22 : __ movl(rax, rdx);
1212 22 : __ imull(rax, rax, Immediate(divisor));
1213 22 : __ subl(rax, dividend);
1214 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kLostPrecision);
1215 : }
1216 : }
1217 :
1218 :
1219 : // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI.
1220 453 : void LCodeGen::DoDivI(LDivI* instr) {
1221 : HBinaryOperation* hdiv = instr->hydrogen();
1222 : Register dividend = ToRegister(instr->dividend());
1223 : Register divisor = ToRegister(instr->divisor());
1224 : Register remainder = ToRegister(instr->temp());
1225 : DCHECK(dividend.is(rax));
1226 : DCHECK(remainder.is(rdx));
1227 : DCHECK(ToRegister(instr->result()).is(rax));
1228 : DCHECK(!divisor.is(rax));
1229 : DCHECK(!divisor.is(rdx));
1230 :
1231 : // Check for x / 0.
1232 2265 : if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) {
1233 4098 : __ testl(divisor, divisor);
1234 : DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero);
1235 : }
1236 :
1237 : // Check for (0 / -x) that will produce negative zero.
1238 453 : if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) {
1239 : Label dividend_not_zero;
1240 242 : __ testl(dividend, dividend);
1241 242 : __ j(not_zero, ÷nd_not_zero, Label::kNear);
1242 242 : __ testl(divisor, divisor);
1243 : DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero);
1244 242 : __ bind(÷nd_not_zero);
1245 : }
1246 :
1247 : // Check for (kMinInt / -1).
1248 453 : if (hdiv->CheckFlag(HValue::kCanOverflow)) {
1249 : Label dividend_not_min_int;
1250 375 : __ cmpl(dividend, Immediate(kMinInt));
1251 375 : __ j(not_zero, ÷nd_not_min_int, Label::kNear);
1252 375 : __ cmpl(divisor, Immediate(-1));
1253 : DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow);
1254 375 : __ bind(÷nd_not_min_int);
1255 : }
1256 :
1257 : // Sign extend to rdx (= remainder).
1258 453 : __ cdq();
1259 453 : __ idivl(divisor);
1260 :
1261 453 : if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) {
1262 : // Deoptimize if remainder is not 0.
1263 275 : __ testl(remainder, remainder);
1264 : DeoptimizeIf(not_zero, instr, DeoptimizeReason::kLostPrecision);
1265 : }
1266 453 : }
1267 :
1268 :
1269 7271 : void LCodeGen::DoMulI(LMulI* instr) {
1270 : Register left = ToRegister(instr->left());
1271 : LOperand* right = instr->right();
1272 :
1273 14542 : if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1274 4824 : if (instr->hydrogen_value()->representation().IsSmi()) {
1275 23855 : __ movp(kScratchRegister, left);
1276 : } else {
1277 3567 : __ movl(kScratchRegister, left);
1278 : }
1279 : }
1280 :
1281 : bool can_overflow =
1282 7271 : instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1283 7271 : if (right->IsConstantOperand()) {
1284 : int32_t right_value = ToInteger32(LConstantOperand::cast(right));
1285 6084 : if (right_value == -1) {
1286 2845 : __ negl(left);
1287 3239 : } else if (right_value == 0) {
1288 48 : __ xorl(left, left);
1289 3191 : } else if (right_value == 2) {
1290 177 : __ addl(left, left);
1291 3014 : } else if (!can_overflow) {
1292 : // If the multiplication is known to not overflow, we
1293 : // can use operations that don't set the overflow flag
1294 : // correctly.
1295 615 : switch (right_value) {
1296 : case 1:
1297 : // Do nothing.
1298 : break;
1299 : case 3:
1300 36 : __ leal(left, Operand(left, left, times_2, 0));
1301 12 : break;
1302 : case 4:
1303 36 : __ shll(left, Immediate(2));
1304 36 : break;
1305 : case 5:
1306 42 : __ leal(left, Operand(left, left, times_4, 0));
1307 14 : break;
1308 : case 8:
1309 487 : __ shll(left, Immediate(3));
1310 487 : break;
1311 : case 9:
1312 0 : __ leal(left, Operand(left, left, times_8, 0));
1313 0 : break;
1314 : case 16:
1315 1 : __ shll(left, Immediate(4));
1316 1 : break;
1317 : default:
1318 65 : __ imull(left, left, Immediate(right_value));
1319 65 : break;
1320 : }
1321 : } else {
1322 2399 : __ imull(left, left, Immediate(right_value));
1323 : }
1324 1187 : } else if (right->IsStackSlot()) {
1325 91 : if (instr->hydrogen_value()->representation().IsSmi()) {
1326 0 : __ SmiToInteger64(left, left);
1327 0 : __ imulp(left, ToOperand(right));
1328 : } else {
1329 182 : __ imull(left, ToOperand(right));
1330 : }
1331 : } else {
1332 1096 : if (instr->hydrogen_value()->representation().IsSmi()) {
1333 271 : __ SmiToInteger64(left, left);
1334 271 : __ imulp(left, ToRegister(right));
1335 : } else {
1336 825 : __ imull(left, ToRegister(right));
1337 : }
1338 : }
1339 :
1340 7271 : if (can_overflow) {
1341 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
1342 : }
1343 :
1344 14542 : if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1345 : // Bail out if the result is supposed to be negative zero.
1346 : Label done;
1347 3822 : if (instr->hydrogen_value()->representation().IsSmi()) {
1348 255 : __ testp(left, left);
1349 : } else {
1350 3567 : __ testl(left, left);
1351 : }
1352 3822 : __ j(not_zero, &done, Label::kNear);
1353 3822 : if (right->IsConstantOperand()) {
1354 : // Constant can't be represented as 32-bit Smi due to immediate size
1355 : // limit.
1356 : DCHECK(SmiValuesAre32Bits()
1357 : ? !instr->hydrogen_value()->representation().IsSmi()
1358 : : SmiValuesAre31Bits());
1359 2820 : if (ToInteger32(LConstantOperand::cast(right)) < 0) {
1360 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero);
1361 23 : } else if (ToInteger32(LConstantOperand::cast(right)) == 0) {
1362 23 : __ cmpl(kScratchRegister, Immediate(0));
1363 : DeoptimizeIf(less, instr, DeoptimizeReason::kMinusZero);
1364 : }
1365 1002 : } else if (right->IsStackSlot()) {
1366 69 : if (instr->hydrogen_value()->representation().IsSmi()) {
1367 0 : __ orp(kScratchRegister, ToOperand(right));
1368 : } else {
1369 138 : __ orl(kScratchRegister, ToOperand(right));
1370 : }
1371 : DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero);
1372 : } else {
1373 : // Test the non-zero operand for negative sign.
1374 933 : if (instr->hydrogen_value()->representation().IsSmi()) {
1375 255 : __ orp(kScratchRegister, ToRegister(right));
1376 : } else {
1377 678 : __ orl(kScratchRegister, ToRegister(right));
1378 : }
1379 : DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero);
1380 : }
1381 3822 : __ bind(&done);
1382 : }
1383 7271 : }
1384 :
1385 :
1386 88975 : void LCodeGen::DoBitI(LBitI* instr) {
1387 : LOperand* left = instr->left();
1388 : LOperand* right = instr->right();
1389 : DCHECK(left->Equals(instr->result()));
1390 : DCHECK(left->IsRegister());
1391 :
1392 88975 : if (right->IsConstantOperand()) {
1393 : int32_t right_operand =
1394 : ToRepresentation(LConstantOperand::cast(right),
1395 : instr->hydrogen()->right()->representation());
1396 56538 : switch (instr->op()) {
1397 : case Token::BIT_AND:
1398 88975 : __ andl(ToRegister(left), Immediate(right_operand));
1399 47761 : break;
1400 : case Token::BIT_OR:
1401 869 : __ orl(ToRegister(left), Immediate(right_operand));
1402 869 : break;
1403 : case Token::BIT_XOR:
1404 7908 : if (right_operand == int32_t(~0)) {
1405 1432 : __ notl(ToRegister(left));
1406 : } else {
1407 6476 : __ xorl(ToRegister(left), Immediate(right_operand));
1408 : }
1409 : break;
1410 : default:
1411 0 : UNREACHABLE();
1412 : break;
1413 : }
1414 32437 : } else if (right->IsStackSlot()) {
1415 1994 : switch (instr->op()) {
1416 : case Token::BIT_AND:
1417 1107 : if (instr->IsInteger32()) {
1418 2214 : __ andl(ToRegister(left), ToOperand(right));
1419 : } else {
1420 0 : __ andp(ToRegister(left), ToOperand(right));
1421 : }
1422 : break;
1423 : case Token::BIT_OR:
1424 380 : if (instr->IsInteger32()) {
1425 760 : __ orl(ToRegister(left), ToOperand(right));
1426 : } else {
1427 0 : __ orp(ToRegister(left), ToOperand(right));
1428 : }
1429 : break;
1430 : case Token::BIT_XOR:
1431 507 : if (instr->IsInteger32()) {
1432 1014 : __ xorl(ToRegister(left), ToOperand(right));
1433 : } else {
1434 0 : __ xorp(ToRegister(left), ToOperand(right));
1435 : }
1436 : break;
1437 : default:
1438 0 : UNREACHABLE();
1439 : break;
1440 : }
1441 : } else {
1442 : DCHECK(right->IsRegister());
1443 30443 : switch (instr->op()) {
1444 : case Token::BIT_AND:
1445 10844 : if (instr->IsInteger32()) {
1446 10532 : __ andl(ToRegister(left), ToRegister(right));
1447 : } else {
1448 312 : __ andp(ToRegister(left), ToRegister(right));
1449 : }
1450 : break;
1451 : case Token::BIT_OR:
1452 7005 : if (instr->IsInteger32()) {
1453 6693 : __ orl(ToRegister(left), ToRegister(right));
1454 : } else {
1455 312 : __ orp(ToRegister(left), ToRegister(right));
1456 : }
1457 : break;
1458 : case Token::BIT_XOR:
1459 12594 : if (instr->IsInteger32()) {
1460 12326 : __ xorl(ToRegister(left), ToRegister(right));
1461 : } else {
1462 268 : __ xorp(ToRegister(left), ToRegister(right));
1463 : }
1464 : break;
1465 : default:
1466 0 : UNREACHABLE();
1467 : break;
1468 : }
1469 : }
1470 88975 : }
1471 :
1472 :
1473 76008 : void LCodeGen::DoShiftI(LShiftI* instr) {
1474 : LOperand* left = instr->left();
1475 : LOperand* right = instr->right();
1476 : DCHECK(left->Equals(instr->result()));
1477 : DCHECK(left->IsRegister());
1478 36218 : if (right->IsRegister()) {
1479 : DCHECK(ToRegister(right).is(rcx));
1480 :
1481 5055 : switch (instr->op()) {
1482 : case Token::ROR:
1483 35440 : __ rorl_cl(ToRegister(left));
1484 : break;
1485 : case Token::SAR:
1486 1407 : __ sarl_cl(ToRegister(left));
1487 : break;
1488 : case Token::SHR:
1489 1775 : __ shrl_cl(ToRegister(left));
1490 1775 : if (instr->can_deopt()) {
1491 157 : __ testl(ToRegister(left), ToRegister(left));
1492 : DeoptimizeIf(negative, instr, DeoptimizeReason::kNegativeValue);
1493 : }
1494 : break;
1495 : case Token::SHL:
1496 1422 : __ shll_cl(ToRegister(left));
1497 : break;
1498 : default:
1499 0 : UNREACHABLE();
1500 : break;
1501 : }
1502 : } else {
1503 : int32_t value = ToInteger32(LConstantOperand::cast(right));
1504 31163 : uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1505 31163 : switch (instr->op()) {
1506 : case Token::ROR:
1507 48 : if (shift_count != 0) {
1508 96 : __ rorl(ToRegister(left), Immediate(shift_count));
1509 : }
1510 : break;
1511 : case Token::SAR:
1512 11435 : if (shift_count != 0) {
1513 22662 : __ sarl(ToRegister(left), Immediate(shift_count));
1514 : }
1515 : break;
1516 : case Token::SHR:
1517 5810 : if (shift_count != 0) {
1518 8026 : __ shrl(ToRegister(left), Immediate(shift_count));
1519 1797 : } else if (instr->can_deopt()) {
1520 1036 : __ testl(ToRegister(left), ToRegister(left));
1521 : DeoptimizeIf(negative, instr, DeoptimizeReason::kNegativeValue);
1522 : }
1523 : break;
1524 : case Token::SHL:
1525 13870 : if (shift_count != 0) {
1526 13800 : if (instr->hydrogen_value()->representation().IsSmi()) {
1527 : if (SmiValuesAre32Bits()) {
1528 0 : __ shlp(ToRegister(left), Immediate(shift_count));
1529 : } else {
1530 : DCHECK(SmiValuesAre31Bits());
1531 : if (instr->can_deopt()) {
1532 : if (shift_count != 1) {
1533 : __ shll(ToRegister(left), Immediate(shift_count - 1));
1534 : }
1535 : __ Integer32ToSmi(ToRegister(left), ToRegister(left));
1536 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
1537 : } else {
1538 : __ shll(ToRegister(left), Immediate(shift_count));
1539 : }
1540 : }
1541 : } else {
1542 27600 : __ shll(ToRegister(left), Immediate(shift_count));
1543 : }
1544 : }
1545 : break;
1546 : default:
1547 0 : UNREACHABLE();
1548 : break;
1549 : }
1550 : }
1551 36218 : }
1552 :
1553 :
1554 13202 : void LCodeGen::DoSubI(LSubI* instr) {
1555 : LOperand* left = instr->left();
1556 : LOperand* right = instr->right();
1557 : DCHECK(left->Equals(instr->result()));
1558 :
1559 13202 : if (right->IsConstantOperand()) {
1560 : int32_t right_operand =
1561 : ToRepresentation(LConstantOperand::cast(right),
1562 : instr->hydrogen()->right()->representation());
1563 13202 : __ subl(ToRegister(left), Immediate(right_operand));
1564 1747 : } else if (right->IsRegister()) {
1565 1747 : if (instr->hydrogen_value()->representation().IsSmi()) {
1566 289 : __ subp(ToRegister(left), ToRegister(right));
1567 : } else {
1568 882 : __ subl(ToRegister(left), ToRegister(right));
1569 : }
1570 : } else {
1571 576 : if (instr->hydrogen_value()->representation().IsSmi()) {
1572 0 : __ subp(ToRegister(left), ToOperand(right));
1573 : } else {
1574 1152 : __ subl(ToRegister(left), ToOperand(right));
1575 : }
1576 : }
1577 :
1578 26404 : if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1579 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
1580 : }
1581 13202 : }
1582 :
1583 :
1584 507746 : void LCodeGen::DoConstantI(LConstantI* instr) {
1585 507746 : Register dst = ToRegister(instr->result());
1586 507746 : if (instr->value() == 0) {
1587 507746 : __ xorl(dst, dst);
1588 : } else {
1589 452147 : __ movl(dst, Immediate(instr->value()));
1590 : }
1591 507746 : }
1592 :
1593 :
1594 413115 : void LCodeGen::DoConstantS(LConstantS* instr) {
1595 826230 : __ Move(ToRegister(instr->result()), instr->value());
1596 413115 : }
1597 :
1598 :
1599 42996 : void LCodeGen::DoConstantD(LConstantD* instr) {
1600 85992 : __ Move(ToDoubleRegister(instr->result()), instr->bits());
1601 42996 : }
1602 :
1603 :
1604 7588 : void LCodeGen::DoConstantE(LConstantE* instr) {
1605 15176 : __ LoadAddress(ToRegister(instr->result()), instr->value());
1606 7588 : }
1607 :
1608 :
1609 2597750 : void LCodeGen::DoConstantT(LConstantT* instr) {
1610 7793250 : Handle<Object> object = instr->value(isolate());
1611 : AllowDeferredHandleDereference smi_check;
1612 5195500 : __ Move(ToRegister(instr->result()), object);
1613 2597751 : }
1614 :
1615 :
1616 28640 : Operand LCodeGen::BuildSeqStringOperand(Register string,
1617 : LOperand* index,
1618 : String::Encoding encoding) {
1619 28640 : if (index->IsConstantOperand()) {
1620 : int offset = ToInteger32(LConstantOperand::cast(index));
1621 0 : if (encoding == String::TWO_BYTE_ENCODING) {
1622 0 : offset *= kUC16Size;
1623 : }
1624 : STATIC_ASSERT(kCharSize == 1);
1625 : return FieldOperand(string, SeqString::kHeaderSize + offset);
1626 : }
1627 : return FieldOperand(
1628 : string, ToRegister(index),
1629 : encoding == String::ONE_BYTE_ENCODING ? times_1 : times_2,
1630 28640 : SeqString::kHeaderSize);
1631 : }
1632 :
1633 :
1634 14320 : void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) {
1635 14320 : String::Encoding encoding = instr->hydrogen()->encoding();
1636 14320 : Register result = ToRegister(instr->result());
1637 : Register string = ToRegister(instr->string());
1638 :
1639 14320 : if (FLAG_debug_code) {
1640 14320 : __ Push(string);
1641 0 : __ movp(string, FieldOperand(string, HeapObject::kMapOffset));
1642 0 : __ movzxbp(string, FieldOperand(string, Map::kInstanceTypeOffset));
1643 :
1644 0 : __ andb(string, Immediate(kStringRepresentationMask | kStringEncodingMask));
1645 : static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1646 : static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1647 : __ cmpp(string, Immediate(encoding == String::ONE_BYTE_ENCODING
1648 0 : ? one_byte_seq_type : two_byte_seq_type));
1649 0 : __ Check(equal, kUnexpectedStringType);
1650 0 : __ Pop(string);
1651 : }
1652 :
1653 14320 : Operand operand = BuildSeqStringOperand(string, instr->index(), encoding);
1654 14320 : if (encoding == String::ONE_BYTE_ENCODING) {
1655 7160 : __ movzxbl(result, operand);
1656 : } else {
1657 7160 : __ movzxwl(result, operand);
1658 : }
1659 14320 : }
1660 :
1661 :
1662 14320 : void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
1663 14320 : String::Encoding encoding = instr->hydrogen()->encoding();
1664 14320 : Register string = ToRegister(instr->string());
1665 :
1666 14320 : if (FLAG_debug_code) {
1667 0 : Register value = ToRegister(instr->value());
1668 0 : Register index = ToRegister(instr->index());
1669 : static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1670 : static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1671 : int encoding_mask =
1672 : instr->hydrogen()->encoding() == String::ONE_BYTE_ENCODING
1673 0 : ? one_byte_seq_type : two_byte_seq_type;
1674 14320 : __ EmitSeqStringSetCharCheck(string, index, value, encoding_mask);
1675 : }
1676 :
1677 14320 : Operand operand = BuildSeqStringOperand(string, instr->index(), encoding);
1678 14320 : if (instr->value()->IsConstantOperand()) {
1679 : int value = ToInteger32(LConstantOperand::cast(instr->value()));
1680 : DCHECK_LE(0, value);
1681 0 : if (encoding == String::ONE_BYTE_ENCODING) {
1682 : DCHECK_LE(value, String::kMaxOneByteCharCode);
1683 0 : __ movb(operand, Immediate(value));
1684 : } else {
1685 : DCHECK_LE(value, String::kMaxUtf16CodeUnit);
1686 0 : __ movw(operand, Immediate(value));
1687 : }
1688 : } else {
1689 14320 : Register value = ToRegister(instr->value());
1690 14320 : if (encoding == String::ONE_BYTE_ENCODING) {
1691 7160 : __ movb(operand, value);
1692 : } else {
1693 7160 : __ movw(operand, value);
1694 : }
1695 : }
1696 14320 : }
1697 :
1698 :
1699 147627 : void LCodeGen::DoAddI(LAddI* instr) {
1700 : LOperand* left = instr->left();
1701 : LOperand* right = instr->right();
1702 :
1703 : Representation target_rep = instr->hydrogen()->representation();
1704 147627 : bool is_p = target_rep.IsSmi() || target_rep.IsExternal();
1705 :
1706 206894 : if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) {
1707 34617 : if (right->IsConstantOperand()) {
1708 : // No support for smi-immediates for 32-bit SMI.
1709 : DCHECK(SmiValuesAre32Bits() ? !target_rep.IsSmi() : SmiValuesAre31Bits());
1710 : int32_t offset =
1711 : ToRepresentation(LConstantOperand::cast(right),
1712 : instr->hydrogen()->right()->representation());
1713 23789 : if (is_p) {
1714 147627 : __ leap(ToRegister(instr->result()),
1715 0 : MemOperand(ToRegister(left), offset));
1716 : } else {
1717 23789 : __ leal(ToRegister(instr->result()),
1718 95156 : MemOperand(ToRegister(left), offset));
1719 : }
1720 : } else {
1721 10828 : Operand address(ToRegister(left), ToRegister(right), times_1, 0);
1722 10828 : if (is_p) {
1723 72 : __ leap(ToRegister(instr->result()), address);
1724 : } else {
1725 21584 : __ leal(ToRegister(instr->result()), address);
1726 : }
1727 : }
1728 : } else {
1729 113010 : if (right->IsConstantOperand()) {
1730 : // No support for smi-immediates for 32-bit SMI.
1731 : DCHECK(SmiValuesAre32Bits() ? !target_rep.IsSmi() : SmiValuesAre31Bits());
1732 : int32_t right_operand =
1733 : ToRepresentation(LConstantOperand::cast(right),
1734 : instr->hydrogen()->right()->representation());
1735 96650 : if (is_p) {
1736 0 : __ addp(ToRegister(left), Immediate(right_operand));
1737 : } else {
1738 96650 : __ addl(ToRegister(left), Immediate(right_operand));
1739 : }
1740 16360 : } else if (right->IsRegister()) {
1741 14827 : if (is_p) {
1742 2905 : __ addp(ToRegister(left), ToRegister(right));
1743 : } else {
1744 11922 : __ addl(ToRegister(left), ToRegister(right));
1745 : }
1746 : } else {
1747 1533 : if (is_p) {
1748 0 : __ addp(ToRegister(left), ToOperand(right));
1749 : } else {
1750 3066 : __ addl(ToRegister(left), ToOperand(right));
1751 : }
1752 : }
1753 226020 : if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1754 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
1755 : }
1756 : }
1757 147627 : }
1758 :
1759 :
1760 1582 : void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
1761 : LOperand* left = instr->left();
1762 : LOperand* right = instr->right();
1763 : DCHECK(left->Equals(instr->result()));
1764 1582 : HMathMinMax::Operation operation = instr->hydrogen()->operation();
1765 1582 : if (instr->hydrogen()->representation().IsSmiOrInteger32()) {
1766 : Label return_left;
1767 : Condition condition = (operation == HMathMinMax::kMathMin)
1768 : ? less_equal
1769 213 : : greater_equal;
1770 : Register left_reg = ToRegister(left);
1771 213 : if (right->IsConstantOperand()) {
1772 : Immediate right_imm = Immediate(
1773 : ToRepresentation(LConstantOperand::cast(right),
1774 : instr->hydrogen()->right()->representation()));
1775 : DCHECK(SmiValuesAre32Bits()
1776 : ? !instr->hydrogen()->representation().IsSmi()
1777 : : SmiValuesAre31Bits());
1778 23979 : __ cmpl(left_reg, right_imm);
1779 146 : __ j(condition, &return_left, Label::kNear);
1780 146 : __ movl(left_reg, right_imm);
1781 67 : } else if (right->IsRegister()) {
1782 : Register right_reg = ToRegister(right);
1783 67 : if (instr->hydrogen_value()->representation().IsSmi()) {
1784 0 : __ cmpp(left_reg, right_reg);
1785 : } else {
1786 41 : __ cmpl(left_reg, right_reg);
1787 : }
1788 41 : __ j(condition, &return_left, Label::kNear);
1789 41 : __ movp(left_reg, right_reg);
1790 : } else {
1791 26 : Operand right_op = ToOperand(right);
1792 26 : if (instr->hydrogen_value()->representation().IsSmi()) {
1793 0 : __ cmpp(left_reg, right_op);
1794 : } else {
1795 26 : __ cmpl(left_reg, right_op);
1796 : }
1797 26 : __ j(condition, &return_left, Label::kNear);
1798 26 : __ movp(left_reg, right_op);
1799 : }
1800 213 : __ bind(&return_left);
1801 : } else {
1802 : DCHECK(instr->hydrogen()->representation().IsDouble());
1803 : Label not_nan, distinct, return_left, return_right;
1804 1369 : Condition condition = (operation == HMathMinMax::kMathMin) ? below : above;
1805 1369 : XMMRegister left_reg = ToDoubleRegister(left);
1806 1369 : XMMRegister right_reg = ToDoubleRegister(right);
1807 1369 : __ Ucomisd(left_reg, right_reg);
1808 1369 : __ j(parity_odd, ¬_nan, Label::kNear); // Both are not NaN.
1809 :
1810 : // One of the numbers is NaN. Find which one and return it.
1811 1369 : __ Ucomisd(left_reg, left_reg);
1812 1369 : __ j(parity_even, &return_left, Label::kNear); // left is NaN.
1813 1369 : __ jmp(&return_right, Label::kNear); // right is NaN.
1814 :
1815 1369 : __ bind(¬_nan);
1816 1369 : __ j(not_equal, &distinct, Label::kNear); // left != right.
1817 :
1818 : // left == right
1819 1369 : XMMRegister xmm_scratch = double_scratch0();
1820 1369 : __ Xorpd(xmm_scratch, xmm_scratch);
1821 1369 : __ Ucomisd(left_reg, xmm_scratch);
1822 1369 : __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1823 :
1824 : // At this point, both left and right are either +0 or -0.
1825 1369 : if (operation == HMathMinMax::kMathMin) {
1826 714 : __ Orpd(left_reg, right_reg);
1827 : } else {
1828 655 : __ Andpd(left_reg, right_reg);
1829 : }
1830 1369 : __ jmp(&return_left, Label::kNear);
1831 :
1832 1369 : __ bind(&distinct);
1833 1369 : __ j(condition, &return_left, Label::kNear);
1834 :
1835 1369 : __ bind(&return_right);
1836 1369 : __ Movapd(left_reg, right_reg);
1837 :
1838 1369 : __ bind(&return_left);
1839 : }
1840 1582 : }
1841 :
1842 :
1843 165904 : void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1844 82952 : XMMRegister left = ToDoubleRegister(instr->left());
1845 82952 : XMMRegister right = ToDoubleRegister(instr->right());
1846 165904 : XMMRegister result = ToDoubleRegister(instr->result());
1847 82952 : switch (instr->op()) {
1848 : case Token::ADD:
1849 45920 : if (CpuFeatures::IsSupported(AVX)) {
1850 104088 : CpuFeatureScope scope(masm(), AVX);
1851 45913 : __ vaddsd(result, left, right);
1852 : } else {
1853 : DCHECK(result.is(left));
1854 7 : __ addsd(left, right);
1855 : }
1856 : break;
1857 : case Token::SUB:
1858 3100 : if (CpuFeatures::IsSupported(AVX)) {
1859 : CpuFeatureScope scope(masm(), AVX);
1860 3095 : __ vsubsd(result, left, right);
1861 : } else {
1862 : DCHECK(result.is(left));
1863 5 : __ subsd(left, right);
1864 : }
1865 : break;
1866 : case Token::MUL:
1867 12796 : if (CpuFeatures::IsSupported(AVX)) {
1868 : CpuFeatureScope scope(masm(), AVX);
1869 12787 : __ vmulsd(result, left, right);
1870 : } else {
1871 : DCHECK(result.is(left));
1872 9 : __ mulsd(left, right);
1873 : }
1874 : break;
1875 : case Token::DIV:
1876 18911 : if (CpuFeatures::IsSupported(AVX)) {
1877 : CpuFeatureScope scope(masm(), AVX);
1878 18902 : __ vdivsd(result, left, right);
1879 : } else {
1880 : DCHECK(result.is(left));
1881 9 : __ divsd(left, right);
1882 : }
1883 : // Don't delete this mov. It may improve performance on some CPUs,
1884 : // when there is a (v)mulsd depending on the result
1885 18911 : __ Movapd(result, result);
1886 18911 : break;
1887 : case Token::MOD: {
1888 : DCHECK(left.is(xmm0));
1889 : DCHECK(right.is(xmm1));
1890 : DCHECK(result.is(xmm0));
1891 2225 : __ PrepareCallCFunction(2);
1892 : __ CallCFunction(
1893 4450 : ExternalReference::mod_two_doubles_operation(isolate()), 2);
1894 2225 : break;
1895 : }
1896 : default:
1897 0 : UNREACHABLE();
1898 : break;
1899 : }
1900 82952 : }
1901 :
1902 :
1903 47548 : void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1904 : DCHECK(ToRegister(instr->context()).is(rsi));
1905 : DCHECK(ToRegister(instr->left()).is(rdx));
1906 : DCHECK(ToRegister(instr->right()).is(rax));
1907 : DCHECK(ToRegister(instr->result()).is(rax));
1908 :
1909 95096 : Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), instr->op()).code();
1910 : CallCode(code, RelocInfo::CODE_TARGET, instr);
1911 47548 : }
1912 :
1913 :
1914 : template<class InstrType>
1915 491056 : void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
1916 491056 : int left_block = instr->TrueDestination(chunk_);
1917 491056 : int right_block = instr->FalseDestination(chunk_);
1918 :
1919 987564 : int next_block = GetNextEmittedBlock();
1920 :
1921 491056 : if (right_block == left_block || cc == no_condition) {
1922 601 : EmitGoto(left_block);
1923 490455 : } else if (left_block == next_block) {
1924 313780 : __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1925 333565 : } else if (right_block == next_block) {
1926 655024 : __ j(cc, chunk_->GetAssemblyLabel(left_block));
1927 : } else {
1928 12106 : __ j(cc, chunk_->GetAssemblyLabel(left_block));
1929 6053 : if (cc != always) {
1930 12106 : __ jmp(chunk_->GetAssemblyLabel(right_block));
1931 : }
1932 : }
1933 491056 : }
1934 :
1935 :
1936 : template <class InstrType>
1937 319 : void LCodeGen::EmitTrueBranch(InstrType instr, Condition cc) {
1938 319 : int true_block = instr->TrueDestination(chunk_);
1939 319 : __ j(cc, chunk_->GetAssemblyLabel(true_block));
1940 319 : }
1941 :
1942 :
1943 : template <class InstrType>
1944 622 : void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) {
1945 622 : int false_block = instr->FalseDestination(chunk_);
1946 622 : __ j(cc, chunk_->GetAssemblyLabel(false_block));
1947 622 : }
1948 :
1949 :
1950 0 : void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
1951 0 : __ int3();
1952 0 : }
1953 :
1954 :
1955 326114 : void LCodeGen::DoBranch(LBranch* instr) {
1956 : Representation r = instr->hydrogen()->value()->representation();
1957 326114 : if (r.IsInteger32()) {
1958 : DCHECK(!info()->IsStub());
1959 : Register reg = ToRegister(instr->value());
1960 2599608 : __ testl(reg, reg);
1961 573 : EmitBranch(instr, not_zero);
1962 325541 : } else if (r.IsSmi()) {
1963 : DCHECK(!info()->IsStub());
1964 : Register reg = ToRegister(instr->value());
1965 0 : __ testp(reg, reg);
1966 0 : EmitBranch(instr, not_zero);
1967 325541 : } else if (r.IsDouble()) {
1968 : DCHECK(!info()->IsStub());
1969 21 : XMMRegister reg = ToDoubleRegister(instr->value());
1970 21 : XMMRegister xmm_scratch = double_scratch0();
1971 21 : __ Xorpd(xmm_scratch, xmm_scratch);
1972 21 : __ Ucomisd(reg, xmm_scratch);
1973 21 : EmitBranch(instr, not_equal);
1974 : } else {
1975 : DCHECK(r.IsTagged());
1976 : Register reg = ToRegister(instr->value());
1977 : HType type = instr->hydrogen()->value()->type();
1978 325520 : if (type.IsBoolean()) {
1979 : DCHECK(!info()->IsStub());
1980 204641 : __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1981 204641 : EmitBranch(instr, equal);
1982 120879 : } else if (type.IsSmi()) {
1983 : DCHECK(!info()->IsStub());
1984 0 : __ SmiCompare(reg, Smi::kZero);
1985 0 : EmitBranch(instr, not_equal);
1986 120879 : } else if (type.IsJSArray()) {
1987 : DCHECK(!info()->IsStub());
1988 0 : EmitBranch(instr, no_condition);
1989 120879 : } else if (type.IsHeapNumber()) {
1990 : DCHECK(!info()->IsStub());
1991 0 : XMMRegister xmm_scratch = double_scratch0();
1992 0 : __ Xorpd(xmm_scratch, xmm_scratch);
1993 0 : __ Ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset));
1994 0 : EmitBranch(instr, not_equal);
1995 120879 : } else if (type.IsString()) {
1996 : DCHECK(!info()->IsStub());
1997 82 : __ cmpp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
1998 82 : EmitBranch(instr, not_equal);
1999 : } else {
2000 : ToBooleanHints expected = instr->hydrogen()->expected_input_types();
2001 : // Avoid deopts in the case where we've never executed this path before.
2002 120797 : if (expected == ToBooleanHint::kNone) expected = ToBooleanHint::kAny;
2003 :
2004 120797 : if (expected & ToBooleanHint::kUndefined) {
2005 : // undefined -> false.
2006 75695 : __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
2007 151390 : __ j(equal, instr->FalseLabel(chunk_));
2008 : }
2009 120797 : if (expected & ToBooleanHint::kBoolean) {
2010 : // true -> true.
2011 107540 : __ CompareRoot(reg, Heap::kTrueValueRootIndex);
2012 215080 : __ j(equal, instr->TrueLabel(chunk_));
2013 : // false -> false.
2014 107540 : __ CompareRoot(reg, Heap::kFalseValueRootIndex);
2015 215080 : __ j(equal, instr->FalseLabel(chunk_));
2016 : }
2017 120797 : if (expected & ToBooleanHint::kNull) {
2018 : // 'null' -> false.
2019 71864 : __ CompareRoot(reg, Heap::kNullValueRootIndex);
2020 143728 : __ j(equal, instr->FalseLabel(chunk_));
2021 : }
2022 :
2023 120797 : if (expected & ToBooleanHint::kSmallInteger) {
2024 : // Smis: 0 -> false, all other -> true.
2025 71473 : __ Cmp(reg, Smi::kZero);
2026 142946 : __ j(equal, instr->FalseLabel(chunk_));
2027 142946 : __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
2028 49324 : } else if (expected & ToBooleanHint::kNeedsMap) {
2029 : // If we need a map later and have a Smi -> deopt.
2030 8703 : __ testb(reg, Immediate(kSmiTagMask));
2031 : DeoptimizeIf(zero, instr, DeoptimizeReason::kSmi);
2032 : }
2033 :
2034 : const Register map = kScratchRegister;
2035 120797 : if (expected & ToBooleanHint::kNeedsMap) {
2036 78631 : __ movp(map, FieldOperand(reg, HeapObject::kMapOffset));
2037 :
2038 78631 : if (expected & ToBooleanHint::kCanBeUndetectable) {
2039 : // Undetectable -> false.
2040 : __ testb(FieldOperand(map, Map::kBitFieldOffset),
2041 151794 : Immediate(1 << Map::kIsUndetectable));
2042 151794 : __ j(not_zero, instr->FalseLabel(chunk_));
2043 : }
2044 : }
2045 :
2046 120797 : if (expected & ToBooleanHint::kReceiver) {
2047 : // spec object -> true.
2048 75897 : __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
2049 151794 : __ j(above_equal, instr->TrueLabel(chunk_));
2050 : }
2051 :
2052 120797 : if (expected & ToBooleanHint::kString) {
2053 : // String value -> false iff empty.
2054 : Label not_string;
2055 72474 : __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2056 72474 : __ j(above_equal, ¬_string, Label::kNear);
2057 72474 : __ cmpp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
2058 144948 : __ j(not_zero, instr->TrueLabel(chunk_));
2059 144948 : __ jmp(instr->FalseLabel(chunk_));
2060 72474 : __ bind(¬_string);
2061 : }
2062 :
2063 120797 : if (expected & ToBooleanHint::kSymbol) {
2064 : // Symbol value -> true.
2065 69841 : __ CmpInstanceType(map, SYMBOL_TYPE);
2066 139682 : __ j(equal, instr->TrueLabel(chunk_));
2067 : }
2068 :
2069 120797 : if (expected & ToBooleanHint::kHeapNumber) {
2070 : // heap number -> false iff +0, -0, or NaN.
2071 : Label not_heap_number;
2072 69875 : __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
2073 69875 : __ j(not_equal, ¬_heap_number, Label::kNear);
2074 69875 : XMMRegister xmm_scratch = double_scratch0();
2075 69875 : __ Xorpd(xmm_scratch, xmm_scratch);
2076 139750 : __ Ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset));
2077 139750 : __ j(zero, instr->FalseLabel(chunk_));
2078 139750 : __ jmp(instr->TrueLabel(chunk_));
2079 69875 : __ bind(¬_heap_number);
2080 : }
2081 :
2082 120797 : if (expected != ToBooleanHint::kAny) {
2083 : // We've seen something for the first time -> deopt.
2084 : // This can only happen if we are not generic already.
2085 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kUnexpectedObject);
2086 : }
2087 : }
2088 : }
2089 326114 : }
2090 :
2091 :
2092 1017147 : void LCodeGen::EmitGoto(int block) {
2093 1017147 : if (!IsNextEmittedBlock(block)) {
2094 222078 : __ jmp(chunk_->GetAssemblyLabel(chunk_->LookupDestination(block)));
2095 : }
2096 1017147 : }
2097 :
2098 :
2099 1007401 : void LCodeGen::DoGoto(LGoto* instr) {
2100 1007401 : EmitGoto(instr->block_id());
2101 1007401 : }
2102 :
2103 :
2104 360749 : inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
2105 : Condition cond = no_condition;
2106 360749 : switch (op) {
2107 : case Token::EQ:
2108 : case Token::EQ_STRICT:
2109 : cond = equal;
2110 : break;
2111 : case Token::NE:
2112 : case Token::NE_STRICT:
2113 : cond = not_equal;
2114 13105 : break;
2115 : case Token::LT:
2116 52325 : cond = is_unsigned ? below : less;
2117 52325 : break;
2118 : case Token::GT:
2119 34945 : cond = is_unsigned ? above : greater;
2120 34945 : break;
2121 : case Token::LTE:
2122 5677 : cond = is_unsigned ? below_equal : less_equal;
2123 5677 : break;
2124 : case Token::GTE:
2125 7799 : cond = is_unsigned ? above_equal : greater_equal;
2126 7799 : break;
2127 : case Token::IN:
2128 : case Token::INSTANCEOF:
2129 : default:
2130 0 : UNREACHABLE();
2131 : }
2132 360749 : return cond;
2133 : }
2134 :
2135 :
2136 160751 : void LCodeGen::DoCompareNumericAndBranch(LCompareNumericAndBranch* instr) {
2137 : LOperand* left = instr->left();
2138 : LOperand* right = instr->right();
2139 : bool is_unsigned =
2140 144066 : instr->is_double() ||
2141 448850 : instr->hydrogen()->left()->CheckFlag(HInstruction::kUint32) ||
2142 144033 : instr->hydrogen()->right()->CheckFlag(HInstruction::kUint32);
2143 321502 : Condition cc = TokenToCondition(instr->op(), is_unsigned);
2144 :
2145 170292 : if (left->IsConstantOperand() && right->IsConstantOperand()) {
2146 : // We can statically evaluate the comparison.
2147 : double left_val = ToDouble(LConstantOperand::cast(left));
2148 : double right_val = ToDouble(LConstantOperand::cast(right));
2149 9145 : int next_block = Token::EvalComparison(instr->op(), left_val, right_val)
2150 895 : ? instr->TrueDestination(chunk_)
2151 10040 : : instr->FalseDestination(chunk_);
2152 9145 : EmitGoto(next_block);
2153 : } else {
2154 151606 : if (instr->is_double()) {
2155 : // Don't base result on EFLAGS when a NaN is involved. Instead
2156 : // jump to the false block.
2157 166560 : __ Ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
2158 29908 : __ j(parity_even, instr->FalseLabel(chunk_));
2159 : } else {
2160 : int32_t value;
2161 136652 : if (right->IsConstantOperand()) {
2162 : value = ToInteger32(LConstantOperand::cast(right));
2163 82298 : if (instr->hydrogen_value()->representation().IsSmi()) {
2164 0 : __ Cmp(ToRegister(left), Smi::FromInt(value));
2165 : } else {
2166 81902 : __ cmpl(ToRegister(left), Immediate(value));
2167 : }
2168 54750 : } else if (left->IsConstantOperand()) {
2169 : value = ToInteger32(LConstantOperand::cast(left));
2170 396 : if (instr->hydrogen_value()->representation().IsSmi()) {
2171 0 : if (right->IsRegister()) {
2172 0 : __ Cmp(ToRegister(right), Smi::FromInt(value));
2173 : } else {
2174 0 : __ Cmp(ToOperand(right), Smi::FromInt(value));
2175 : }
2176 396 : } else if (right->IsRegister()) {
2177 364 : __ cmpl(ToRegister(right), Immediate(value));
2178 : } else {
2179 64 : __ cmpl(ToOperand(right), Immediate(value));
2180 : }
2181 : // We commuted the operands, so commute the condition.
2182 396 : cc = CommuteCondition(cc);
2183 54354 : } else if (instr->hydrogen_value()->representation().IsSmi()) {
2184 3747 : if (right->IsRegister()) {
2185 3663 : __ cmpp(ToRegister(left), ToRegister(right));
2186 : } else {
2187 168 : __ cmpp(ToRegister(left), ToOperand(right));
2188 : }
2189 : } else {
2190 50607 : if (right->IsRegister()) {
2191 38766 : __ cmpl(ToRegister(left), ToRegister(right));
2192 : } else {
2193 23682 : __ cmpl(ToRegister(left), ToOperand(right));
2194 : }
2195 : }
2196 : }
2197 151606 : EmitBranch(instr, cc);
2198 : }
2199 160751 : }
2200 :
2201 :
2202 41466 : void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
2203 41466 : Register left = ToRegister(instr->left());
2204 :
2205 41466 : if (instr->right()->IsConstantOperand()) {
2206 30627 : Handle<Object> right = ToHandle(LConstantOperand::cast(instr->right()));
2207 41466 : __ Cmp(left, right);
2208 : } else {
2209 10839 : Register right = ToRegister(instr->right());
2210 10839 : __ cmpp(left, right);
2211 : }
2212 41466 : EmitBranch(instr, equal);
2213 41466 : }
2214 :
2215 :
2216 33 : void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
2217 33 : if (instr->hydrogen()->representation().IsTagged()) {
2218 24 : Register input_reg = ToRegister(instr->object());
2219 93 : __ Cmp(input_reg, factory()->the_hole_value());
2220 24 : EmitBranch(instr, equal);
2221 33 : return;
2222 : }
2223 :
2224 9 : XMMRegister input_reg = ToDoubleRegister(instr->object());
2225 9 : __ Ucomisd(input_reg, input_reg);
2226 9 : EmitFalseBranch(instr, parity_odd);
2227 :
2228 9 : __ subp(rsp, Immediate(kDoubleSize));
2229 18 : __ Movsd(MemOperand(rsp, 0), input_reg);
2230 9 : __ addp(rsp, Immediate(kDoubleSize));
2231 :
2232 : int offset = sizeof(kHoleNanUpper32);
2233 27 : __ cmpl(MemOperand(rsp, -offset), Immediate(kHoleNanUpper32));
2234 9 : EmitBranch(instr, equal);
2235 : }
2236 :
2237 :
2238 426 : Condition LCodeGen::EmitIsString(Register input,
2239 : Register temp1,
2240 : Label* is_not_string,
2241 : SmiCheck check_needed = INLINE_SMI_CHECK) {
2242 426 : if (check_needed == INLINE_SMI_CHECK) {
2243 425 : __ JumpIfSmi(input, is_not_string);
2244 : }
2245 :
2246 426 : Condition cond = masm_->IsObjectStringType(input, temp1, temp1);
2247 :
2248 426 : return cond;
2249 : }
2250 :
2251 :
2252 426 : void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
2253 426 : Register reg = ToRegister(instr->value());
2254 426 : Register temp = ToRegister(instr->temp());
2255 :
2256 : SmiCheck check_needed =
2257 : instr->hydrogen()->value()->type().IsHeapObject()
2258 426 : ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
2259 :
2260 : Condition true_cond = EmitIsString(
2261 426 : reg, temp, instr->FalseLabel(chunk_), check_needed);
2262 :
2263 426 : EmitBranch(instr, true_cond);
2264 426 : }
2265 :
2266 :
2267 8832 : void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
2268 : Condition is_smi;
2269 8832 : if (instr->value()->IsRegister()) {
2270 6870 : Register input = ToRegister(instr->value());
2271 8832 : is_smi = masm()->CheckSmi(input);
2272 : } else {
2273 1962 : Operand input = ToOperand(instr->value());
2274 1962 : is_smi = masm()->CheckSmi(input);
2275 : }
2276 8832 : EmitBranch(instr, is_smi);
2277 8832 : }
2278 :
2279 :
2280 1183 : void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
2281 : Register input = ToRegister(instr->value());
2282 : Register temp = ToRegister(instr->temp());
2283 :
2284 1183 : if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2285 3477 : __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2286 : }
2287 1183 : __ movp(temp, FieldOperand(input, HeapObject::kMapOffset));
2288 : __ testb(FieldOperand(temp, Map::kBitFieldOffset),
2289 2366 : Immediate(1 << Map::kIsUndetectable));
2290 1183 : EmitBranch(instr, not_zero);
2291 1183 : }
2292 :
2293 :
2294 13249 : void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2295 : DCHECK(ToRegister(instr->context()).is(rsi));
2296 : DCHECK(ToRegister(instr->left()).is(rdx));
2297 : DCHECK(ToRegister(instr->right()).is(rax));
2298 :
2299 39747 : Handle<Code> code = CodeFactory::StringCompare(isolate(), instr->op()).code();
2300 : CallCode(code, RelocInfo::CODE_TARGET, instr);
2301 13249 : __ CompareRoot(rax, Heap::kTrueValueRootIndex);
2302 13249 : EmitBranch(instr, equal);
2303 13249 : }
2304 :
2305 :
2306 1882 : static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
2307 : InstanceType from = instr->from();
2308 : InstanceType to = instr->to();
2309 1882 : if (from == FIRST_TYPE) return to;
2310 : DCHECK(from == to || to == LAST_TYPE);
2311 : return from;
2312 : }
2313 :
2314 :
2315 1882 : static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
2316 : InstanceType from = instr->from();
2317 : InstanceType to = instr->to();
2318 1882 : if (from == to) return equal;
2319 1030 : if (to == LAST_TYPE) return above_equal;
2320 0 : if (from == FIRST_TYPE) return below_equal;
2321 0 : UNREACHABLE();
2322 : return equal;
2323 : }
2324 :
2325 :
2326 1882 : void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
2327 1882 : Register input = ToRegister(instr->value());
2328 :
2329 1882 : if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2330 3533 : __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2331 : }
2332 :
2333 1882 : __ CmpObjectType(input, TestType(instr->hydrogen()), kScratchRegister);
2334 1882 : EmitBranch(instr, BranchCondition(instr->hydrogen()));
2335 1882 : }
2336 :
2337 : // Branches to a label or falls through with the answer in the z flag.
2338 : // Trashes the temp register.
2339 7327 : void LCodeGen::EmitClassOfTest(Label* is_true, Label* is_false,
2340 : Handle<String> class_name, Register input,
2341 : Register temp, Register temp2) {
2342 : DCHECK(!input.is(temp));
2343 : DCHECK(!input.is(temp2));
2344 : DCHECK(!temp.is(temp2));
2345 :
2346 58616 : __ JumpIfSmi(input, is_false);
2347 :
2348 7327 : __ CmpObjectType(input, FIRST_FUNCTION_TYPE, temp);
2349 : STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2350 14654 : if (String::Equals(isolate()->factory()->Function_string(), class_name)) {
2351 6 : __ j(above_equal, is_true);
2352 : } else {
2353 7321 : __ j(above_equal, is_false);
2354 : }
2355 :
2356 : // Check if the constructor in the map is a function.
2357 7327 : __ GetMapConstructor(temp, temp, kScratchRegister);
2358 :
2359 : // Objects with a non-function constructor have class 'Object'.
2360 7327 : __ CmpInstanceType(kScratchRegister, JS_FUNCTION_TYPE);
2361 14654 : if (String::Equals(class_name, isolate()->factory()->Object_string())) {
2362 0 : __ j(not_equal, is_true);
2363 : } else {
2364 7327 : __ j(not_equal, is_false);
2365 : }
2366 :
2367 : // temp now contains the constructor function. Grab the
2368 : // instance class name from there.
2369 7327 : __ movp(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
2370 : __ movp(temp,
2371 7327 : FieldOperand(temp, SharedFunctionInfo::kInstanceClassNameOffset));
2372 : // The class name we are testing against is internalized since it's a literal.
2373 : // The name in the constructor is internalized because of the way the context
2374 : // is booted. This routine isn't expected to work for random API-created
2375 : // classes and it doesn't have to because you can't access it with natives
2376 : // syntax. Since both sides are internalized it is sufficient to use an
2377 : // identity comparison.
2378 : DCHECK(class_name->IsInternalizedString());
2379 7327 : __ Cmp(temp, class_name);
2380 : // End with the answer in the z flag.
2381 7327 : }
2382 :
2383 7327 : void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
2384 7327 : Register input = ToRegister(instr->value());
2385 7327 : Register temp = ToRegister(instr->temp());
2386 7327 : Register temp2 = ToRegister(instr->temp2());
2387 7327 : Handle<String> class_name = instr->hydrogen()->class_name();
2388 :
2389 : EmitClassOfTest(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_),
2390 7327 : class_name, input, temp, temp2);
2391 :
2392 7327 : EmitBranch(instr, equal);
2393 7327 : }
2394 :
2395 17544 : void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2396 : Register reg = ToRegister(instr->value());
2397 :
2398 17544 : __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
2399 17544 : EmitBranch(instr, equal);
2400 17544 : }
2401 :
2402 :
2403 319 : void LCodeGen::DoHasInPrototypeChainAndBranch(
2404 : LHasInPrototypeChainAndBranch* instr) {
2405 : Register const object = ToRegister(instr->object());
2406 : Register const object_map = kScratchRegister;
2407 : Register const object_prototype = object_map;
2408 319 : Register const prototype = ToRegister(instr->prototype());
2409 :
2410 : // The {object} must be a spec object. It's sufficient to know that {object}
2411 : // is not a smi, since all other non-spec objects have {null} prototypes and
2412 : // will be ruled out below.
2413 319 : if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
2414 3165 : Condition is_smi = __ CheckSmi(object);
2415 294 : EmitFalseBranch(instr, is_smi);
2416 : }
2417 :
2418 : // Loop through the {object}s prototype chain looking for the {prototype}.
2419 319 : __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
2420 : Label loop;
2421 319 : __ bind(&loop);
2422 :
2423 : // Deoptimize if the object needs to be access checked.
2424 : __ testb(FieldOperand(object_map, Map::kBitFieldOffset),
2425 638 : Immediate(1 << Map::kIsAccessCheckNeeded));
2426 : DeoptimizeIf(not_zero, instr, DeoptimizeReason::kAccessCheck);
2427 : // Deoptimize for proxies.
2428 319 : __ CmpInstanceType(object_map, JS_PROXY_TYPE);
2429 : DeoptimizeIf(equal, instr, DeoptimizeReason::kProxy);
2430 :
2431 319 : __ movp(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
2432 319 : __ CompareRoot(object_prototype, Heap::kNullValueRootIndex);
2433 319 : EmitFalseBranch(instr, equal);
2434 319 : __ cmpp(object_prototype, prototype);
2435 319 : EmitTrueBranch(instr, equal);
2436 319 : __ movp(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
2437 319 : __ jmp(&loop);
2438 319 : }
2439 :
2440 :
2441 199998 : void LCodeGen::DoCmpT(LCmpT* instr) {
2442 : DCHECK(ToRegister(instr->context()).is(rsi));
2443 : Token::Value op = instr->op();
2444 :
2445 1799982 : Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2446 : CallCode(ic, RelocInfo::CODE_TARGET, instr);
2447 :
2448 199998 : Condition condition = TokenToCondition(op, false);
2449 : Label true_value, done;
2450 199998 : __ testp(rax, rax);
2451 199998 : __ j(condition, &true_value, Label::kNear);
2452 399996 : __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2453 199998 : __ jmp(&done, Label::kNear);
2454 199998 : __ bind(&true_value);
2455 399996 : __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2456 199998 : __ bind(&done);
2457 199998 : }
2458 :
2459 :
2460 353697 : void LCodeGen::DoReturn(LReturn* instr) {
2461 1703765 : if (FLAG_trace && info()->IsOptimizing()) {
2462 : // Preserve the return value on the stack and rely on the runtime call
2463 : // to return the value in the same register. We're leaving the code
2464 : // managed by the register allocator and tearing down the frame, it's
2465 : // safe to write to the context register.
2466 0 : __ Push(rax);
2467 0 : __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2468 0 : __ CallRuntime(Runtime::kTraceExit);
2469 : }
2470 353697 : if (info()->saves_caller_doubles()) {
2471 54 : RestoreCallerDoubles();
2472 : }
2473 353697 : if (NeedsEagerFrame()) {
2474 321336 : __ movp(rsp, rbp);
2475 321337 : __ popq(rbp);
2476 : }
2477 353697 : if (instr->has_constant_parameter_count()) {
2478 353698 : __ Ret((ToInteger32(instr->constant_parameter_count()) + 1) * kPointerSize,
2479 707396 : rcx);
2480 : } else {
2481 : DCHECK(info()->IsStub()); // Functions would need to drop one more value.
2482 : Register reg = ToRegister(instr->parameter_count());
2483 : // The argument count parameter is a smi
2484 0 : __ SmiToInteger32(reg, reg);
2485 0 : Register return_addr_reg = reg.is(rcx) ? rbx : rcx;
2486 : __ PopReturnAddressTo(return_addr_reg);
2487 0 : __ shlp(reg, Immediate(kPointerSizeLog2));
2488 0 : __ addp(rsp, reg);
2489 0 : __ jmp(return_addr_reg);
2490 : }
2491 353698 : }
2492 :
2493 :
2494 299284 : void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2495 299284 : Register context = ToRegister(instr->context());
2496 299284 : Register result = ToRegister(instr->result());
2497 311992 : __ movp(result, ContextOperand(context, instr->slot_index()));
2498 299284 : if (instr->hydrogen()->RequiresHoleCheck()) {
2499 12708 : __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2500 12708 : if (instr->hydrogen()->DeoptimizesOnHole()) {
2501 : DeoptimizeIf(equal, instr, DeoptimizeReason::kHole);
2502 : } else {
2503 : Label is_not_hole;
2504 0 : __ j(not_equal, &is_not_hole, Label::kNear);
2505 0 : __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2506 0 : __ bind(&is_not_hole);
2507 : }
2508 : }
2509 299284 : }
2510 :
2511 :
2512 69884 : void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2513 : Register context = ToRegister(instr->context());
2514 : Register value = ToRegister(instr->value());
2515 :
2516 69884 : Operand target = ContextOperand(context, instr->slot_index());
2517 :
2518 : Label skip_assignment;
2519 69884 : if (instr->hydrogen()->RequiresHoleCheck()) {
2520 178747 : __ CompareRoot(target, Heap::kTheHoleValueRootIndex);
2521 608 : if (instr->hydrogen()->DeoptimizesOnHole()) {
2522 : DeoptimizeIf(equal, instr, DeoptimizeReason::kHole);
2523 : } else {
2524 0 : __ j(not_equal, &skip_assignment);
2525 : }
2526 : }
2527 69884 : __ movp(target, value);
2528 :
2529 69884 : if (instr->hydrogen()->NeedsWriteBarrier()) {
2530 : SmiCheck check_needed =
2531 : instr->hydrogen()->value()->type().IsHeapObject()
2532 38371 : ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
2533 : int offset = Context::SlotOffset(instr->slot_index());
2534 : Register scratch = ToRegister(instr->temp());
2535 : __ RecordWriteContextSlot(context,
2536 : offset,
2537 : value,
2538 : scratch,
2539 : kSaveFPRegs,
2540 : EMIT_REMEMBERED_SET,
2541 : check_needed);
2542 : }
2543 :
2544 69884 : __ bind(&skip_assignment);
2545 69884 : }
2546 :
2547 :
2548 240928 : void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2549 : HObjectAccess access = instr->hydrogen()->access();
2550 : int offset = access.offset();
2551 :
2552 240928 : if (access.IsExternalMemory()) {
2553 0 : Register result = ToRegister(instr->result());
2554 0 : if (instr->object()->IsConstantOperand()) {
2555 : DCHECK(result.is(rax));
2556 243603 : __ load_rax(ToExternalReference(LConstantOperand::cast(instr->object())));
2557 : } else {
2558 0 : Register object = ToRegister(instr->object());
2559 0 : __ Load(result, MemOperand(object, offset), access.representation());
2560 : }
2561 : return;
2562 : }
2563 :
2564 : Register object = ToRegister(instr->object());
2565 240928 : if (instr->hydrogen()->representation().IsDouble()) {
2566 : DCHECK(access.IsInobject());
2567 2970 : XMMRegister result = ToDoubleRegister(instr->result());
2568 2970 : __ Movsd(result, FieldOperand(object, offset));
2569 : return;
2570 : }
2571 :
2572 239443 : Register result = ToRegister(instr->result());
2573 239443 : if (!access.IsInobject()) {
2574 2651 : __ movp(result, FieldOperand(object, JSObject::kPropertiesOffset));
2575 : object = result;
2576 : }
2577 :
2578 : Representation representation = access.representation();
2579 318871 : if (representation.IsSmi() && SmiValuesAre32Bits() &&
2580 : instr->hydrogen()->representation().IsInteger32()) {
2581 79428 : if (FLAG_debug_code) {
2582 12 : Register scratch = kScratchRegister;
2583 24 : __ Load(scratch, FieldOperand(object, offset), representation);
2584 12 : __ AssertSmi(scratch);
2585 : }
2586 :
2587 : // Read int value directly from upper half of the smi.
2588 : STATIC_ASSERT(kSmiTag == 0);
2589 : DCHECK(kSmiTagSize + kSmiShiftSize == 32);
2590 79428 : offset += kPointerSize / 2;
2591 : representation = Representation::Integer32();
2592 : }
2593 478886 : __ Load(result, FieldOperand(object, offset), representation);
2594 : }
2595 :
2596 :
2597 5770 : void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2598 : Register function = ToRegister(instr->function());
2599 5770 : Register result = ToRegister(instr->result());
2600 :
2601 : // Get the prototype or initial map from the function.
2602 34620 : __ movp(result,
2603 5770 : FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2604 :
2605 : // Check that the function has a prototype or an initial map.
2606 5770 : __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2607 : DeoptimizeIf(equal, instr, DeoptimizeReason::kHole);
2608 :
2609 : // If the function does not have an initial map, we're done.
2610 : Label done;
2611 5770 : __ CmpObjectType(result, MAP_TYPE, kScratchRegister);
2612 5770 : __ j(not_equal, &done, Label::kNear);
2613 :
2614 : // Get the prototype from the initial map.
2615 5770 : __ movp(result, FieldOperand(result, Map::kPrototypeOffset));
2616 :
2617 : // All done.
2618 5770 : __ bind(&done);
2619 5770 : }
2620 :
2621 :
2622 5202 : void LCodeGen::DoLoadRoot(LLoadRoot* instr) {
2623 10404 : Register result = ToRegister(instr->result());
2624 5202 : __ LoadRoot(result, instr->index());
2625 5202 : }
2626 :
2627 :
2628 594 : void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2629 : Register arguments = ToRegister(instr->arguments());
2630 594 : Register result = ToRegister(instr->result());
2631 :
2632 778 : if (instr->length()->IsConstantOperand() &&
2633 : instr->index()->IsConstantOperand()) {
2634 : int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2635 : int32_t const_length = ToInteger32(LConstantOperand::cast(instr->length()));
2636 184 : if (const_index >= 0 && const_index < const_length) {
2637 : StackArgumentsAccessor args(arguments, const_length,
2638 : ARGUMENTS_DONT_CONTAIN_RECEIVER);
2639 1004 : __ movp(result, args.GetArgumentOperand(const_index));
2640 0 : } else if (FLAG_debug_code) {
2641 0 : __ int3();
2642 : }
2643 : } else {
2644 : Register length = ToRegister(instr->length());
2645 : // There are two words between the frame pointer and the last argument.
2646 : // Subtracting from length accounts for one of them add one more.
2647 410 : if (instr->index()->IsRegister()) {
2648 410 : __ subl(length, ToRegister(instr->index()));
2649 : } else {
2650 0 : __ subl(length, ToOperand(instr->index()));
2651 : }
2652 : StackArgumentsAccessor args(arguments, length,
2653 : ARGUMENTS_DONT_CONTAIN_RECEIVER);
2654 820 : __ movp(result, args.GetArgumentOperand(0));
2655 : }
2656 594 : }
2657 :
2658 :
2659 2026 : void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
2660 : ElementsKind elements_kind = instr->elements_kind();
2661 : LOperand* key = instr->key();
2662 : if (kPointerSize == kInt32Size && !key->IsConstantOperand()) {
2663 : Register key_reg = ToRegister(key);
2664 : Representation key_representation =
2665 : instr->hydrogen()->key()->representation();
2666 : if (ExternalArrayOpRequiresTemp(key_representation, elements_kind)) {
2667 2107 : __ SmiToInteger64(key_reg, key_reg);
2668 : } else if (instr->hydrogen()->IsDehoisted()) {
2669 : // Sign extend key because it could be a 32 bit negative value
2670 : // and the dehoisted address computation happens in 64 bits
2671 : __ movsxlq(key_reg, key_reg);
2672 : }
2673 : }
2674 : Operand operand(BuildFastArrayOperand(
2675 : instr->elements(),
2676 : key,
2677 : instr->hydrogen()->key()->representation(),
2678 : elements_kind,
2679 2026 : instr->base_offset()));
2680 :
2681 2026 : if (elements_kind == FLOAT32_ELEMENTS) {
2682 1394 : XMMRegister result(ToDoubleRegister(instr->result()));
2683 697 : __ Cvtss2sd(result, operand);
2684 1329 : } else if (elements_kind == FLOAT64_ELEMENTS) {
2685 464 : __ Movsd(ToDoubleRegister(instr->result()), operand);
2686 : } else {
2687 1097 : Register result(ToRegister(instr->result()));
2688 1097 : switch (elements_kind) {
2689 : case INT8_ELEMENTS:
2690 163 : __ movsxbl(result, operand);
2691 163 : break;
2692 : case UINT8_ELEMENTS:
2693 : case UINT8_CLAMPED_ELEMENTS:
2694 367 : __ movzxbl(result, operand);
2695 : break;
2696 : case INT16_ELEMENTS:
2697 100 : __ movsxwl(result, operand);
2698 100 : break;
2699 : case UINT16_ELEMENTS:
2700 112 : __ movzxwl(result, operand);
2701 : break;
2702 : case INT32_ELEMENTS:
2703 151 : __ movl(result, operand);
2704 : break;
2705 : case UINT32_ELEMENTS:
2706 204 : __ movl(result, operand);
2707 408 : if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
2708 81 : __ testl(result, result);
2709 : DeoptimizeIf(negative, instr, DeoptimizeReason::kNegativeValue);
2710 : }
2711 : break;
2712 : case FLOAT32_ELEMENTS:
2713 : case FLOAT64_ELEMENTS:
2714 : case FAST_ELEMENTS:
2715 : case FAST_SMI_ELEMENTS:
2716 : case FAST_DOUBLE_ELEMENTS:
2717 : case FAST_HOLEY_ELEMENTS:
2718 : case FAST_HOLEY_SMI_ELEMENTS:
2719 : case FAST_HOLEY_DOUBLE_ELEMENTS:
2720 : case DICTIONARY_ELEMENTS:
2721 : case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
2722 : case SLOW_SLOPPY_ARGUMENTS_ELEMENTS:
2723 : case FAST_STRING_WRAPPER_ELEMENTS:
2724 : case SLOW_STRING_WRAPPER_ELEMENTS:
2725 : case NO_ELEMENTS:
2726 0 : UNREACHABLE();
2727 : break;
2728 : }
2729 : }
2730 2026 : }
2731 :
2732 :
2733 5794 : void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
2734 11588 : XMMRegister result(ToDoubleRegister(instr->result()));
2735 : LOperand* key = instr->key();
2736 : if (kPointerSize == kInt32Size && !key->IsConstantOperand() &&
2737 : instr->hydrogen()->IsDehoisted()) {
2738 : // Sign extend key because it could be a 32 bit negative value
2739 : // and the dehoisted address computation happens in 64 bits
2740 6232 : __ movsxlq(ToRegister(key), ToRegister(key));
2741 : }
2742 5794 : if (instr->hydrogen()->RequiresHoleCheck()) {
2743 : Operand hole_check_operand = BuildFastArrayOperand(
2744 : instr->elements(),
2745 : key,
2746 : instr->hydrogen()->key()->representation(),
2747 : FAST_DOUBLE_ELEMENTS,
2748 876 : instr->base_offset() + sizeof(kHoleNanLower32));
2749 438 : __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32));
2750 : DeoptimizeIf(equal, instr, DeoptimizeReason::kHole);
2751 : }
2752 :
2753 : Operand double_load_operand = BuildFastArrayOperand(
2754 : instr->elements(),
2755 : key,
2756 : instr->hydrogen()->key()->representation(),
2757 : FAST_DOUBLE_ELEMENTS,
2758 5794 : instr->base_offset());
2759 5794 : __ Movsd(result, double_load_operand);
2760 5794 : }
2761 :
2762 :
2763 41330 : void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
2764 39676 : HLoadKeyed* hinstr = instr->hydrogen();
2765 41330 : Register result = ToRegister(instr->result());
2766 : LOperand* key = instr->key();
2767 41330 : bool requires_hole_check = hinstr->RequiresHoleCheck();
2768 : Representation representation = hinstr->representation();
2769 41330 : int offset = instr->base_offset();
2770 :
2771 : if (kPointerSize == kInt32Size && !key->IsConstantOperand() &&
2772 : instr->hydrogen()->IsDehoisted()) {
2773 : // Sign extend key because it could be a 32 bit negative value
2774 : // and the dehoisted address computation happens in 64 bits
2775 54949 : __ movsxlq(ToRegister(key), ToRegister(key));
2776 : }
2777 48483 : if (representation.IsInteger32() && SmiValuesAre32Bits() &&
2778 : hinstr->elements_kind() == FAST_SMI_ELEMENTS) {
2779 : DCHECK(!requires_hole_check);
2780 7153 : if (FLAG_debug_code) {
2781 0 : Register scratch = kScratchRegister;
2782 : __ Load(scratch,
2783 : BuildFastArrayOperand(instr->elements(),
2784 : key,
2785 : instr->hydrogen()->key()->representation(),
2786 : FAST_ELEMENTS,
2787 : offset),
2788 0 : Representation::Smi());
2789 0 : __ AssertSmi(scratch);
2790 : }
2791 : // Read int value directly from upper half of the smi.
2792 : STATIC_ASSERT(kSmiTag == 0);
2793 : DCHECK(kSmiTagSize + kSmiShiftSize == 32);
2794 7153 : offset += kPointerSize / 2;
2795 : }
2796 :
2797 : __ Load(result,
2798 : BuildFastArrayOperand(instr->elements(), key,
2799 : instr->hydrogen()->key()->representation(),
2800 41330 : FAST_ELEMENTS, offset),
2801 82660 : representation);
2802 :
2803 : // Check for the hole value.
2804 41330 : if (requires_hole_check) {
2805 1654 : if (IsFastSmiElementsKind(hinstr->elements_kind())) {
2806 493 : Condition smi = __ CheckSmi(result);
2807 : DeoptimizeIf(NegateCondition(smi), instr, DeoptimizeReason::kNotASmi);
2808 : } else {
2809 1161 : __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2810 : DeoptimizeIf(equal, instr, DeoptimizeReason::kHole);
2811 : }
2812 39676 : } else if (hinstr->hole_mode() == CONVERT_HOLE_TO_UNDEFINED) {
2813 : DCHECK(hinstr->elements_kind() == FAST_HOLEY_ELEMENTS);
2814 : Label done;
2815 2393 : __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2816 2393 : __ j(not_equal, &done);
2817 2393 : if (info()->IsStub()) {
2818 : // A stub can safely convert the hole to undefined only if the array
2819 : // protector cell contains (Smi) Isolate::kProtectorValid. Otherwise
2820 : // it needs to bail out.
2821 0 : __ LoadRoot(result, Heap::kArrayProtectorRootIndex);
2822 : __ Cmp(FieldOperand(result, PropertyCell::kValueOffset),
2823 0 : Smi::FromInt(Isolate::kProtectorValid));
2824 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kHole);
2825 : }
2826 4786 : __ Move(result, isolate()->factory()->undefined_value());
2827 2393 : __ bind(&done);
2828 : }
2829 41330 : }
2830 :
2831 :
2832 49150 : void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
2833 49150 : if (instr->is_fixed_typed_array()) {
2834 2026 : DoLoadKeyedExternalArray(instr);
2835 47124 : } else if (instr->hydrogen()->representation().IsDouble()) {
2836 5794 : DoLoadKeyedFixedDoubleArray(instr);
2837 : } else {
2838 41330 : DoLoadKeyedFixedArray(instr);
2839 : }
2840 49150 : }
2841 :
2842 :
2843 77340 : Operand LCodeGen::BuildFastArrayOperand(
2844 : LOperand* elements_pointer,
2845 : LOperand* key,
2846 : Representation key_representation,
2847 : ElementsKind elements_kind,
2848 : uint32_t offset) {
2849 77340 : Register elements_pointer_reg = ToRegister(elements_pointer);
2850 77340 : int shift_size = ElementsKindToShiftSize(elements_kind);
2851 77340 : if (key->IsConstantOperand()) {
2852 : int32_t constant_value = ToInteger32(LConstantOperand::cast(key));
2853 34540 : if (constant_value & 0xF0000000) {
2854 5 : Abort(kArrayIndexConstantValueTooBig);
2855 : }
2856 : return Operand(elements_pointer_reg,
2857 34540 : (constant_value << shift_size) + offset);
2858 : } else {
2859 : // Guaranteed by ArrayInstructionInterface::KeyedAccessIndexRequirement().
2860 : DCHECK(key_representation.IsInteger32());
2861 :
2862 42800 : ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
2863 : return Operand(elements_pointer_reg,
2864 : ToRegister(key),
2865 : scale_factor,
2866 85600 : offset);
2867 : }
2868 : }
2869 :
2870 :
2871 717 : void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2872 717 : Register result = ToRegister(instr->result());
2873 :
2874 717 : if (instr->hydrogen()->from_inlined()) {
2875 4632 : __ leap(result, Operand(rsp, -kFPOnStackSize + -kPCOnStackSize));
2876 533 : } else if (instr->hydrogen()->arguments_adaptor()) {
2877 : // Check for arguments adapter frame.
2878 : Label done, adapted;
2879 1599 : __ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2880 : __ cmpp(Operand(result, CommonFrameConstants::kContextOrFrameTypeOffset),
2881 1599 : Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2882 533 : __ j(equal, &adapted, Label::kNear);
2883 :
2884 : // No arguments adaptor frame.
2885 533 : __ movp(result, rbp);
2886 533 : __ jmp(&done, Label::kNear);
2887 :
2888 : // Arguments adaptor frame present.
2889 533 : __ bind(&adapted);
2890 1599 : __ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2891 :
2892 : // Result is the frame pointer for the frame if not adapted and for the real
2893 : // frame below the adaptor frame if adapted.
2894 533 : __ bind(&done);
2895 : } else {
2896 0 : __ movp(result, rbp);
2897 : }
2898 717 : }
2899 :
2900 :
2901 1066 : void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2902 533 : Register result = ToRegister(instr->result());
2903 :
2904 : Label done;
2905 :
2906 : // If no arguments adaptor frame the number of arguments is fixed.
2907 533 : if (instr->elements()->IsRegister()) {
2908 3198 : __ cmpp(rbp, ToRegister(instr->elements()));
2909 : } else {
2910 0 : __ cmpp(rbp, ToOperand(instr->elements()));
2911 : }
2912 1066 : __ movl(result, Immediate(scope()->num_parameters()));
2913 533 : __ j(equal, &done, Label::kNear);
2914 :
2915 : // Arguments adaptor frame present. Get argument length from there.
2916 1599 : __ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2917 : __ SmiToInteger32(result,
2918 : Operand(result,
2919 1066 : ArgumentsAdaptorFrameConstants::kLengthOffset));
2920 :
2921 : // Argument length is in result register.
2922 533 : __ bind(&done);
2923 533 : }
2924 :
2925 :
2926 4467 : void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
2927 : Register receiver = ToRegister(instr->receiver());
2928 : Register function = ToRegister(instr->function());
2929 :
2930 : // If the receiver is null or undefined, we have to pass the global
2931 : // object as a receiver to normal functions. Values have to be
2932 : // passed unchanged to builtins and strict-mode functions.
2933 : Label global_object, receiver_ok;
2934 4467 : Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
2935 :
2936 4467 : if (!instr->hydrogen()->known_function()) {
2937 : // Do not transform the receiver to object for strict mode
2938 : // functions.
2939 75939 : __ movp(kScratchRegister,
2940 4467 : FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2941 : __ testb(FieldOperand(kScratchRegister,
2942 : SharedFunctionInfo::kStrictModeByteOffset),
2943 8934 : Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
2944 4467 : __ j(not_equal, &receiver_ok, dist);
2945 :
2946 : // Do not transform the receiver to object for builtins.
2947 : __ testb(FieldOperand(kScratchRegister,
2948 : SharedFunctionInfo::kNativeByteOffset),
2949 8934 : Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2950 4467 : __ j(not_equal, &receiver_ok, dist);
2951 : }
2952 :
2953 : // Normal function. Replace undefined or null with global receiver.
2954 4467 : __ CompareRoot(receiver, Heap::kNullValueRootIndex);
2955 4467 : __ j(equal, &global_object, dist);
2956 4467 : __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
2957 4467 : __ j(equal, &global_object, dist);
2958 :
2959 : // The receiver should be a JS object.
2960 4467 : Condition is_smi = __ CheckSmi(receiver);
2961 : DeoptimizeIf(is_smi, instr, DeoptimizeReason::kSmi);
2962 4467 : __ CmpObjectType(receiver, FIRST_JS_RECEIVER_TYPE, kScratchRegister);
2963 : DeoptimizeIf(below, instr, DeoptimizeReason::kNotAJavaScriptObject);
2964 :
2965 4467 : __ jmp(&receiver_ok, dist);
2966 4467 : __ bind(&global_object);
2967 4467 : __ movp(receiver, FieldOperand(function, JSFunction::kContextOffset));
2968 8934 : __ movp(receiver, ContextOperand(receiver, Context::NATIVE_CONTEXT_INDEX));
2969 8934 : __ movp(receiver, ContextOperand(receiver, Context::GLOBAL_PROXY_INDEX));
2970 :
2971 4467 : __ bind(&receiver_ok);
2972 4467 : }
2973 :
2974 :
2975 136 : void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2976 : Register receiver = ToRegister(instr->receiver());
2977 136 : Register function = ToRegister(instr->function());
2978 : Register length = ToRegister(instr->length());
2979 : Register elements = ToRegister(instr->elements());
2980 : DCHECK(receiver.is(rax)); // Used for parameter count.
2981 : DCHECK(function.is(rdi)); // Required by InvokeFunction.
2982 : DCHECK(ToRegister(instr->result()).is(rax));
2983 :
2984 : // Copy the arguments to this function possibly from the
2985 : // adaptor frame below it.
2986 : const uint32_t kArgumentsLimit = 1 * KB;
2987 1496 : __ cmpp(length, Immediate(kArgumentsLimit));
2988 : DeoptimizeIf(above, instr, DeoptimizeReason::kTooManyArguments);
2989 :
2990 136 : __ Push(receiver);
2991 136 : __ movp(receiver, length);
2992 :
2993 : // Loop through the arguments pushing them onto the execution
2994 : // stack.
2995 : Label invoke, loop;
2996 : // length is a small non-negative integer, due to the test above.
2997 136 : __ testl(length, length);
2998 136 : __ j(zero, &invoke, Label::kNear);
2999 136 : __ bind(&loop);
3000 : StackArgumentsAccessor args(elements, length,
3001 : ARGUMENTS_DONT_CONTAIN_RECEIVER);
3002 272 : __ Push(args.GetArgumentOperand(0));
3003 136 : __ decl(length);
3004 136 : __ j(not_zero, &loop);
3005 :
3006 : // Invoke the function.
3007 136 : __ bind(&invoke);
3008 :
3009 : InvokeFlag flag = CALL_FUNCTION;
3010 272 : if (instr->hydrogen()->tail_call_mode() == TailCallMode::kAllow) {
3011 : DCHECK(!info()->saves_caller_doubles());
3012 : // TODO(ishell): drop current frame before pushing arguments to the stack.
3013 : flag = JUMP_FUNCTION;
3014 : ParameterCount actual(rax);
3015 : // It is safe to use rbx, rcx and r8 as scratch registers here given that
3016 : // 1) we are not going to return to caller function anyway,
3017 : // 2) rbx (expected number of arguments) will be initialized below.
3018 0 : PrepareForTailCall(actual, rbx, rcx, r8);
3019 : }
3020 :
3021 : DCHECK(instr->HasPointerMap());
3022 : LPointerMap* pointers = instr->pointer_map();
3023 : SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
3024 : ParameterCount actual(rax);
3025 136 : __ InvokeFunction(function, no_reg, actual, flag, safepoint_generator);
3026 136 : }
3027 :
3028 :
3029 1346240 : void LCodeGen::DoPushArgument(LPushArgument* instr) {
3030 : LOperand* argument = instr->value();
3031 1346240 : EmitPushTaggedOperand(argument);
3032 1346240 : }
3033 :
3034 :
3035 200 : void LCodeGen::DoDrop(LDrop* instr) {
3036 200 : __ Drop(instr->count());
3037 200 : }
3038 :
3039 :
3040 21933 : void LCodeGen::DoThisFunction(LThisFunction* instr) {
3041 21933 : Register result = ToRegister(instr->result());
3042 43866 : __ movp(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3043 21933 : }
3044 :
3045 :
3046 281454 : void LCodeGen::DoContext(LContext* instr) {
3047 281454 : Register result = ToRegister(instr->result());
3048 539375 : if (info()->IsOptimizing()) {
3049 773763 : __ movp(result, Operand(rbp, StandardFrameConstants::kContextOffset));
3050 : } else {
3051 : // If there is no frame, the context must be in rsi.
3052 : DCHECK(result.is(rsi));
3053 : }
3054 281454 : }
3055 :
3056 :
3057 12333 : void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3058 : DCHECK(ToRegister(instr->context()).is(rsi));
3059 36999 : __ Push(instr->hydrogen()->declarations());
3060 24666 : __ Push(Smi::FromInt(instr->hydrogen()->flags()));
3061 12333 : __ Push(instr->hydrogen()->feedback_vector());
3062 12333 : CallRuntime(Runtime::kDeclareGlobals, instr);
3063 12333 : }
3064 :
3065 121583 : void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
3066 : int formal_parameter_count, int arity,
3067 : bool is_tail_call, LInstruction* instr) {
3068 : bool dont_adapt_arguments =
3069 121583 : formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3070 : bool can_invoke_directly =
3071 121583 : dont_adapt_arguments || formal_parameter_count == arity;
3072 :
3073 : Register function_reg = rdi;
3074 : LPointerMap* pointers = instr->pointer_map();
3075 :
3076 121583 : if (can_invoke_directly) {
3077 : // Change context.
3078 554829 : __ movp(rsi, FieldOperand(function_reg, JSFunction::kContextOffset));
3079 :
3080 : // Always initialize new target and number of actual arguments.
3081 108312 : __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
3082 216624 : __ Set(rax, arity);
3083 :
3084 : bool is_self_call = function.is_identical_to(info()->closure());
3085 :
3086 : // Invoke function.
3087 108311 : if (is_self_call) {
3088 : Handle<Code> self(reinterpret_cast<Code**>(__ CodeObject().location()));
3089 325 : if (is_tail_call) {
3090 13 : __ Jump(self, RelocInfo::CODE_TARGET);
3091 : } else {
3092 312 : __ Call(self, RelocInfo::CODE_TARGET);
3093 : }
3094 : } else {
3095 107986 : Operand target = FieldOperand(function_reg, JSFunction::kCodeEntryOffset);
3096 107986 : if (is_tail_call) {
3097 9 : __ Jump(target);
3098 : } else {
3099 107977 : __ Call(target);
3100 : }
3101 : }
3102 :
3103 108312 : if (!is_tail_call) {
3104 : // Set up deoptimization.
3105 108290 : RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
3106 : }
3107 : } else {
3108 : // We need to adapt arguments.
3109 : SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3110 : ParameterCount actual(arity);
3111 : ParameterCount expected(formal_parameter_count);
3112 13271 : InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION;
3113 13271 : __ InvokeFunction(function_reg, no_reg, expected, actual, flag, generator);
3114 : }
3115 121583 : }
3116 :
3117 :
3118 848446 : void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) {
3119 : DCHECK(ToRegister(instr->result()).is(rax));
3120 :
3121 848446 : if (instr->hydrogen()->IsTailCall()) {
3122 848446 : if (NeedsEagerFrame()) __ leave();
3123 :
3124 0 : if (instr->target()->IsConstantOperand()) {
3125 : LConstantOperand* target = LConstantOperand::cast(instr->target());
3126 0 : Handle<Code> code = Handle<Code>::cast(ToHandle(target));
3127 0 : __ jmp(code, RelocInfo::CODE_TARGET);
3128 : } else {
3129 : DCHECK(instr->target()->IsRegister());
3130 0 : Register target = ToRegister(instr->target());
3131 0 : __ addp(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
3132 0 : __ jmp(target);
3133 : }
3134 : } else {
3135 : LPointerMap* pointers = instr->pointer_map();
3136 : SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3137 :
3138 848446 : if (instr->target()->IsConstantOperand()) {
3139 : LConstantOperand* target = LConstantOperand::cast(instr->target());
3140 848446 : Handle<Code> code = Handle<Code>::cast(ToHandle(target));
3141 : generator.BeforeCall(__ CallSize(code));
3142 848446 : __ call(code, RelocInfo::CODE_TARGET);
3143 : } else {
3144 : DCHECK(instr->target()->IsRegister());
3145 : Register target = ToRegister(instr->target());
3146 : generator.BeforeCall(__ CallSize(target));
3147 0 : __ addp(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
3148 0 : __ call(target);
3149 : }
3150 848446 : generator.AfterCall();
3151 : }
3152 848446 : }
3153 :
3154 :
3155 79 : void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
3156 : Register input_reg = ToRegister(instr->value());
3157 : __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
3158 1207 : Heap::kHeapNumberMapRootIndex);
3159 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber);
3160 :
3161 : Label slow, allocated, done;
3162 : uint32_t available_regs = rax.bit() | rcx.bit() | rdx.bit() | rbx.bit();
3163 79 : available_regs &= ~input_reg.bit();
3164 79 : if (instr->context()->IsRegister()) {
3165 : // Make sure that the context isn't overwritten in the AllocateHeapNumber
3166 : // macro below.
3167 10 : available_regs &= ~ToRegister(instr->context()).bit();
3168 : }
3169 :
3170 : Register tmp =
3171 79 : Register::from_code(base::bits::CountTrailingZeros32(available_regs));
3172 79 : available_regs &= ~tmp.bit();
3173 : Register tmp2 =
3174 79 : Register::from_code(base::bits::CountTrailingZeros32(available_regs));
3175 :
3176 : // Preserve the value of all registers.
3177 : PushSafepointRegistersScope scope(this);
3178 :
3179 79 : __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3180 : // Check the sign of the argument. If the argument is positive, just
3181 : // return it. We do not need to patch the stack since |input| and
3182 : // |result| are the same register and |input| will be restored
3183 : // unchanged by popping safepoint registers.
3184 79 : __ testl(tmp, Immediate(HeapNumber::kSignMask));
3185 79 : __ j(zero, &done);
3186 :
3187 79 : __ AllocateHeapNumber(tmp, tmp2, &slow);
3188 79 : __ jmp(&allocated, Label::kNear);
3189 :
3190 : // Slow case: Call the runtime system to do the number allocation.
3191 79 : __ bind(&slow);
3192 : CallRuntimeFromDeferred(
3193 79 : Runtime::kAllocateHeapNumber, 0, instr, instr->context());
3194 : // Set the pointer to the new heap number in tmp.
3195 101 : if (!tmp.is(rax)) __ movp(tmp, rax);
3196 : // Restore input_reg after call to runtime.
3197 79 : __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
3198 :
3199 79 : __ bind(&allocated);
3200 79 : __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset));
3201 79 : __ shlq(tmp2, Immediate(1));
3202 79 : __ shrq(tmp2, Immediate(1));
3203 79 : __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2);
3204 79 : __ StoreToSafepointRegisterSlot(input_reg, tmp);
3205 :
3206 79 : __ bind(&done);
3207 79 : }
3208 :
3209 :
3210 52 : void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) {
3211 : Register input_reg = ToRegister(instr->value());
3212 208 : __ testl(input_reg, input_reg);
3213 : Label is_positive;
3214 52 : __ j(not_sign, &is_positive, Label::kNear);
3215 52 : __ negl(input_reg); // Sets flags.
3216 : DeoptimizeIf(negative, instr, DeoptimizeReason::kOverflow);
3217 52 : __ bind(&is_positive);
3218 52 : }
3219 :
3220 :
3221 79 : void LCodeGen::EmitSmiMathAbs(LMathAbs* instr) {
3222 : Register input_reg = ToRegister(instr->value());
3223 316 : __ testp(input_reg, input_reg);
3224 : Label is_positive;
3225 79 : __ j(not_sign, &is_positive, Label::kNear);
3226 79 : __ negp(input_reg); // Sets flags.
3227 : DeoptimizeIf(negative, instr, DeoptimizeReason::kOverflow);
3228 79 : __ bind(&is_positive);
3229 79 : }
3230 :
3231 :
3232 303 : void LCodeGen::DoMathAbs(LMathAbs* instr) {
3233 : // Class for deferred case.
3234 0 : class DeferredMathAbsTaggedHeapNumber final : public LDeferredCode {
3235 : public:
3236 : DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr)
3237 79 : : LDeferredCode(codegen), instr_(instr) { }
3238 79 : void Generate() override {
3239 79 : codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3240 79 : }
3241 158 : LInstruction* instr() override { return instr_; }
3242 :
3243 : private:
3244 : LMathAbs* instr_;
3245 : };
3246 :
3247 : DCHECK(instr->value()->Equals(instr->result()));
3248 : Representation r = instr->hydrogen()->value()->representation();
3249 :
3250 303 : if (r.IsDouble()) {
3251 172 : XMMRegister scratch = double_scratch0();
3252 172 : XMMRegister input_reg = ToDoubleRegister(instr->value());
3253 581 : __ Xorpd(scratch, scratch);
3254 172 : __ Subsd(scratch, input_reg);
3255 172 : __ Andpd(input_reg, scratch);
3256 131 : } else if (r.IsInteger32()) {
3257 52 : EmitIntegerMathAbs(instr);
3258 79 : } else if (r.IsSmi()) {
3259 0 : EmitSmiMathAbs(instr);
3260 : } else { // Tagged case.
3261 : DeferredMathAbsTaggedHeapNumber* deferred =
3262 : new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
3263 79 : Register input_reg = ToRegister(instr->value());
3264 : // Smi check.
3265 158 : __ JumpIfNotSmi(input_reg, deferred->entry());
3266 79 : EmitSmiMathAbs(instr);
3267 79 : __ bind(deferred->exit());
3268 : }
3269 303 : }
3270 :
3271 4495 : void LCodeGen::DoMathFloorD(LMathFloorD* instr) {
3272 8990 : XMMRegister output_reg = ToDoubleRegister(instr->result());
3273 4495 : XMMRegister input_reg = ToDoubleRegister(instr->value());
3274 4495 : CpuFeatureScope scope(masm(), SSE4_1);
3275 4495 : __ Roundsd(output_reg, input_reg, kRoundDown);
3276 4495 : }
3277 :
3278 18289 : void LCodeGen::DoMathFloorI(LMathFloorI* instr) {
3279 18289 : XMMRegister xmm_scratch = double_scratch0();
3280 18289 : Register output_reg = ToRegister(instr->result());
3281 18289 : XMMRegister input_reg = ToDoubleRegister(instr->value());
3282 :
3283 18289 : if (CpuFeatures::IsSupported(SSE4_1)) {
3284 91661 : CpuFeatureScope scope(masm(), SSE4_1);
3285 36538 : if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3286 : // Deoptimize if minus zero.
3287 18237 : __ Movq(output_reg, input_reg);
3288 18237 : __ subq(output_reg, Immediate(1));
3289 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kMinusZero);
3290 : }
3291 18269 : __ Roundsd(xmm_scratch, input_reg, kRoundDown);
3292 18269 : __ Cvttsd2si(output_reg, xmm_scratch);
3293 18269 : __ cmpl(output_reg, Immediate(0x1));
3294 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
3295 : } else {
3296 : Label negative_sign, done;
3297 : // Deoptimize on unordered.
3298 20 : __ Xorpd(xmm_scratch, xmm_scratch); // Zero the register.
3299 20 : __ Ucomisd(input_reg, xmm_scratch);
3300 : DeoptimizeIf(parity_even, instr, DeoptimizeReason::kNaN);
3301 20 : __ j(below, &negative_sign, Label::kNear);
3302 :
3303 40 : if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3304 : // Check for negative zero.
3305 : Label positive_sign;
3306 20 : __ j(above, &positive_sign, Label::kNear);
3307 20 : __ Movmskpd(output_reg, input_reg);
3308 20 : __ testl(output_reg, Immediate(1));
3309 : DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero);
3310 20 : __ Set(output_reg, 0);
3311 20 : __ jmp(&done);
3312 20 : __ bind(&positive_sign);
3313 : }
3314 :
3315 : // Use truncating instruction (OK because input is positive).
3316 20 : __ Cvttsd2si(output_reg, input_reg);
3317 : // Overflow is signalled with minint.
3318 20 : __ cmpl(output_reg, Immediate(0x1));
3319 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
3320 20 : __ jmp(&done, Label::kNear);
3321 :
3322 : // Non-zero negative reaches here.
3323 20 : __ bind(&negative_sign);
3324 : // Truncate, then compare and compensate.
3325 20 : __ Cvttsd2si(output_reg, input_reg);
3326 20 : __ Cvtlsi2sd(xmm_scratch, output_reg);
3327 20 : __ Ucomisd(input_reg, xmm_scratch);
3328 20 : __ j(equal, &done, Label::kNear);
3329 20 : __ subl(output_reg, Immediate(1));
3330 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
3331 :
3332 20 : __ bind(&done);
3333 : }
3334 18289 : }
3335 :
3336 627 : void LCodeGen::DoMathRoundD(LMathRoundD* instr) {
3337 : XMMRegister xmm_scratch = double_scratch0();
3338 1254 : XMMRegister output_reg = ToDoubleRegister(instr->result());
3339 627 : XMMRegister input_reg = ToDoubleRegister(instr->value());
3340 5016 : CpuFeatureScope scope(masm(), SSE4_1);
3341 : Label done;
3342 627 : __ Roundsd(output_reg, input_reg, kRoundUp);
3343 : __ Move(xmm_scratch, -0.5);
3344 627 : __ Addsd(xmm_scratch, output_reg);
3345 627 : __ Ucomisd(xmm_scratch, input_reg);
3346 627 : __ j(below_equal, &done, Label::kNear);
3347 : __ Move(xmm_scratch, 1.0);
3348 627 : __ Subsd(output_reg, xmm_scratch);
3349 627 : __ bind(&done);
3350 627 : }
3351 :
3352 695 : void LCodeGen::DoMathRoundI(LMathRoundI* instr) {
3353 695 : const XMMRegister xmm_scratch = double_scratch0();
3354 695 : Register output_reg = ToRegister(instr->result());
3355 695 : XMMRegister input_reg = ToDoubleRegister(instr->value());
3356 695 : XMMRegister input_temp = ToDoubleRegister(instr->temp());
3357 : static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5
3358 : static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5
3359 :
3360 : Label done, round_to_zero, below_one_half;
3361 695 : Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
3362 18044 : __ movq(kScratchRegister, one_half);
3363 695 : __ Movq(xmm_scratch, kScratchRegister);
3364 695 : __ Ucomisd(xmm_scratch, input_reg);
3365 695 : __ j(above, &below_one_half, Label::kNear);
3366 :
3367 : // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x).
3368 695 : __ Addsd(xmm_scratch, input_reg);
3369 695 : __ Cvttsd2si(output_reg, xmm_scratch);
3370 : // Overflow is signalled with minint.
3371 695 : __ cmpl(output_reg, Immediate(0x1));
3372 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
3373 695 : __ jmp(&done, dist);
3374 :
3375 695 : __ bind(&below_one_half);
3376 1390 : __ movq(kScratchRegister, minus_one_half);
3377 695 : __ Movq(xmm_scratch, kScratchRegister);
3378 695 : __ Ucomisd(xmm_scratch, input_reg);
3379 695 : __ j(below_equal, &round_to_zero, Label::kNear);
3380 :
3381 : // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then
3382 : // compare and compensate.
3383 695 : __ Movapd(input_temp, input_reg); // Do not alter input_reg.
3384 695 : __ Subsd(input_temp, xmm_scratch);
3385 695 : __ Cvttsd2si(output_reg, input_temp);
3386 : // Catch minint due to overflow, and to prevent overflow when compensating.
3387 695 : __ cmpl(output_reg, Immediate(0x1));
3388 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
3389 :
3390 695 : __ Cvtlsi2sd(xmm_scratch, output_reg);
3391 695 : __ Ucomisd(xmm_scratch, input_temp);
3392 695 : __ j(equal, &done, dist);
3393 695 : __ subl(output_reg, Immediate(1));
3394 : // No overflow because we already ruled out minint.
3395 695 : __ jmp(&done, dist);
3396 :
3397 695 : __ bind(&round_to_zero);
3398 : // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if
3399 : // we can ignore the difference between a result of -0 and +0.
3400 1390 : if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3401 682 : __ Movq(output_reg, input_reg);
3402 682 : __ testq(output_reg, output_reg);
3403 : DeoptimizeIf(negative, instr, DeoptimizeReason::kMinusZero);
3404 : }
3405 695 : __ Set(output_reg, 0);
3406 695 : __ bind(&done);
3407 695 : }
3408 :
3409 :
3410 628 : void LCodeGen::DoMathFround(LMathFround* instr) {
3411 628 : XMMRegister input_reg = ToDoubleRegister(instr->value());
3412 1256 : XMMRegister output_reg = ToDoubleRegister(instr->result());
3413 628 : __ Cvtsd2ss(output_reg, input_reg);
3414 628 : __ Cvtss2sd(output_reg, output_reg);
3415 628 : }
3416 :
3417 :
3418 61 : void LCodeGen::DoMathSqrt(LMathSqrt* instr) {
3419 122 : XMMRegister output = ToDoubleRegister(instr->result());
3420 61 : if (instr->value()->IsDoubleRegister()) {
3421 58 : XMMRegister input = ToDoubleRegister(instr->value());
3422 61 : __ Sqrtsd(output, input);
3423 : } else {
3424 3 : Operand input = ToOperand(instr->value());
3425 3 : __ Sqrtsd(output, input);
3426 : }
3427 61 : }
3428 :
3429 :
3430 41 : void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
3431 41 : XMMRegister xmm_scratch = double_scratch0();
3432 41 : XMMRegister input_reg = ToDoubleRegister(instr->value());
3433 : DCHECK(ToDoubleRegister(instr->result()).is(input_reg));
3434 :
3435 : // Note that according to ECMA-262 15.8.2.13:
3436 : // Math.pow(-Infinity, 0.5) == Infinity
3437 : // Math.sqrt(-Infinity) == NaN
3438 : Label done, sqrt;
3439 : // Check base for -Infinity. According to IEEE-754, double-precision
3440 : // -Infinity has the highest 12 bits set and the lowest 52 bits cleared.
3441 492 : __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000));
3442 41 : __ Movq(xmm_scratch, kScratchRegister);
3443 41 : __ Ucomisd(xmm_scratch, input_reg);
3444 : // Comparing -Infinity with NaN results in "unordered", which sets the
3445 : // zero flag as if both were equal. However, it also sets the carry flag.
3446 41 : __ j(not_equal, &sqrt, Label::kNear);
3447 41 : __ j(carry, &sqrt, Label::kNear);
3448 : // If input is -Infinity, return Infinity.
3449 41 : __ Xorpd(input_reg, input_reg);
3450 41 : __ Subsd(input_reg, xmm_scratch);
3451 41 : __ jmp(&done, Label::kNear);
3452 :
3453 : // Square root.
3454 41 : __ bind(&sqrt);
3455 41 : __ Xorpd(xmm_scratch, xmm_scratch);
3456 41 : __ Addsd(input_reg, xmm_scratch); // Convert -0 to +0.
3457 41 : __ Sqrtsd(input_reg, input_reg);
3458 41 : __ bind(&done);
3459 41 : }
3460 :
3461 :
3462 827 : void LCodeGen::DoPower(LPower* instr) {
3463 : Representation exponent_type = instr->hydrogen()->right()->representation();
3464 : // Having marked this as a call, we can use any registers.
3465 : // Just make sure that the input/output registers are the expected ones.
3466 :
3467 827 : Register tagged_exponent = MathPowTaggedDescriptor::exponent();
3468 : DCHECK(!instr->right()->IsRegister() ||
3469 : ToRegister(instr->right()).is(tagged_exponent));
3470 : DCHECK(!instr->right()->IsDoubleRegister() ||
3471 : ToDoubleRegister(instr->right()).is(xmm1));
3472 : DCHECK(ToDoubleRegister(instr->left()).is(xmm2));
3473 : DCHECK(ToDoubleRegister(instr->result()).is(xmm3));
3474 :
3475 827 : if (exponent_type.IsSmi()) {
3476 1064 : MathPowStub stub(isolate(), MathPowStub::TAGGED);
3477 0 : __ CallStub(&stub);
3478 827 : } else if (exponent_type.IsTagged()) {
3479 : Label no_deopt;
3480 79 : __ JumpIfSmi(tagged_exponent, &no_deopt, Label::kNear);
3481 79 : __ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, rcx);
3482 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber);
3483 79 : __ bind(&no_deopt);
3484 79 : MathPowStub stub(isolate(), MathPowStub::TAGGED);
3485 79 : __ CallStub(&stub);
3486 748 : } else if (exponent_type.IsInteger32()) {
3487 360 : MathPowStub stub(isolate(), MathPowStub::INTEGER);
3488 360 : __ CallStub(&stub);
3489 : } else {
3490 : DCHECK(exponent_type.IsDouble());
3491 388 : MathPowStub stub(isolate(), MathPowStub::DOUBLE);
3492 388 : __ CallStub(&stub);
3493 : }
3494 827 : }
3495 :
3496 60 : void LCodeGen::DoMathCos(LMathCos* instr) {
3497 : DCHECK(ToDoubleRegister(instr->value()).is(xmm0));
3498 : DCHECK(ToDoubleRegister(instr->result()).is(xmm0));
3499 120 : __ PrepareCallCFunction(1);
3500 120 : __ CallCFunction(ExternalReference::ieee754_cos_function(isolate()), 1);
3501 60 : }
3502 :
3503 58 : void LCodeGen::DoMathExp(LMathExp* instr) {
3504 : DCHECK(ToDoubleRegister(instr->value()).is(xmm0));
3505 : DCHECK(ToDoubleRegister(instr->result()).is(xmm0));
3506 116 : __ PrepareCallCFunction(1);
3507 116 : __ CallCFunction(ExternalReference::ieee754_exp_function(isolate()), 1);
3508 58 : }
3509 :
3510 90 : void LCodeGen::DoMathSin(LMathSin* instr) {
3511 : DCHECK(ToDoubleRegister(instr->value()).is(xmm0));
3512 : DCHECK(ToDoubleRegister(instr->result()).is(xmm0));
3513 180 : __ PrepareCallCFunction(1);
3514 180 : __ CallCFunction(ExternalReference::ieee754_sin_function(isolate()), 1);
3515 90 : }
3516 :
3517 291 : void LCodeGen::DoMathLog(LMathLog* instr) {
3518 : DCHECK(ToDoubleRegister(instr->value()).is(xmm0));
3519 : DCHECK(ToDoubleRegister(instr->result()).is(xmm0));
3520 582 : __ PrepareCallCFunction(1);
3521 582 : __ CallCFunction(ExternalReference::ieee754_log_function(isolate()), 1);
3522 291 : }
3523 :
3524 :
3525 42 : void LCodeGen::DoMathClz32(LMathClz32* instr) {
3526 42 : Register input = ToRegister(instr->value());
3527 84 : Register result = ToRegister(instr->result());
3528 :
3529 42 : __ Lzcntl(result, input);
3530 42 : }
3531 :
3532 36 : void LCodeGen::PrepareForTailCall(const ParameterCount& actual,
3533 : Register scratch1, Register scratch2,
3534 : Register scratch3) {
3535 : #if DEBUG
3536 : if (actual.is_reg()) {
3537 : DCHECK(!AreAliased(actual.reg(), scratch1, scratch2, scratch3));
3538 : } else {
3539 : DCHECK(!AreAliased(scratch1, scratch2, scratch3));
3540 : }
3541 : #endif
3542 36 : if (FLAG_code_comments) {
3543 0 : if (actual.is_reg()) {
3544 : Comment(";;; PrepareForTailCall, actual: %s {",
3545 : RegisterConfiguration::Crankshaft()->GetGeneralRegisterName(
3546 396 : actual.reg().code()));
3547 : } else {
3548 0 : Comment(";;; PrepareForTailCall, actual: %d {", actual.immediate());
3549 : }
3550 : }
3551 :
3552 : // Check if next frame is an arguments adaptor frame.
3553 36 : Register caller_args_count_reg = scratch1;
3554 : Label no_arguments_adaptor, formal_parameter_count_loaded;
3555 108 : __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3556 : __ cmpp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
3557 108 : Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
3558 36 : __ j(not_equal, &no_arguments_adaptor, Label::kNear);
3559 :
3560 : // Drop current frame and load arguments count from arguments adaptor frame.
3561 36 : __ movp(rbp, scratch2);
3562 : __ SmiToInteger32(
3563 : caller_args_count_reg,
3564 72 : Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
3565 36 : __ jmp(&formal_parameter_count_loaded, Label::kNear);
3566 :
3567 36 : __ bind(&no_arguments_adaptor);
3568 : // Load caller's formal parameter count.
3569 : __ movp(caller_args_count_reg,
3570 72 : Immediate(info()->literal()->parameter_count()));
3571 :
3572 36 : __ bind(&formal_parameter_count_loaded);
3573 : __ PrepareForTailCall(actual, caller_args_count_reg, scratch2, scratch3,
3574 36 : ReturnAddressState::kNotOnStack);
3575 36 : Comment(";;; }");
3576 36 : }
3577 :
3578 126083 : void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3579 247666 : HInvokeFunction* hinstr = instr->hydrogen();
3580 : DCHECK(ToRegister(instr->context()).is(rsi));
3581 : DCHECK(ToRegister(instr->function()).is(rdi));
3582 : DCHECK(instr->HasPointerMap());
3583 :
3584 126083 : bool is_tail_call = hinstr->tail_call_mode() == TailCallMode::kAllow;
3585 :
3586 126083 : if (is_tail_call) {
3587 : DCHECK(!info()->saves_caller_doubles());
3588 : ParameterCount actual(instr->arity());
3589 : // It is safe to use rbx, rcx and r8 as scratch registers here given that
3590 : // 1) we are not going to return to caller function anyway,
3591 : // 2) rbx (expected number of arguments) will be initialized below.
3592 36 : PrepareForTailCall(actual, rbx, rcx, r8);
3593 : }
3594 :
3595 : Handle<JSFunction> known_function = hinstr->known_function();
3596 126083 : if (known_function.is_null()) {
3597 : LPointerMap* pointers = instr->pointer_map();
3598 : SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3599 : ParameterCount actual(instr->arity());
3600 4500 : InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION;
3601 4500 : __ InvokeFunction(rdi, no_reg, actual, flag, generator);
3602 : } else {
3603 : CallKnownFunction(known_function, hinstr->formal_parameter_count(),
3604 243166 : instr->arity(), is_tail_call, instr);
3605 : }
3606 126083 : }
3607 :
3608 :
3609 402 : void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
3610 : DCHECK(ToRegister(instr->context()).is(rsi));
3611 : DCHECK(ToRegister(instr->constructor()).is(rdi));
3612 : DCHECK(ToRegister(instr->result()).is(rax));
3613 :
3614 1195 : __ Set(rax, instr->arity());
3615 402 : __ Move(rbx, instr->hydrogen()->site());
3616 :
3617 402 : ElementsKind kind = instr->hydrogen()->elements_kind();
3618 : AllocationSiteOverrideMode override_mode =
3619 : (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE)
3620 : ? DISABLE_ALLOCATION_SITES
3621 402 : : DONT_OVERRIDE;
3622 :
3623 402 : if (instr->arity() == 0) {
3624 0 : ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
3625 0 : CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3626 402 : } else if (instr->arity() == 1) {
3627 : Label done;
3628 336 : if (IsFastPackedElementsKind(kind)) {
3629 : Label packed_case;
3630 : // We might need a change here
3631 : // look at the first argument
3632 33 : __ movp(rcx, Operand(rsp, 0));
3633 11 : __ testp(rcx, rcx);
3634 11 : __ j(zero, &packed_case, Label::kNear);
3635 :
3636 : ElementsKind holey_kind = GetHoleyElementsKind(kind);
3637 : ArraySingleArgumentConstructorStub stub(isolate(),
3638 : holey_kind,
3639 11 : override_mode);
3640 11 : CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3641 11 : __ jmp(&done, Label::kNear);
3642 11 : __ bind(&packed_case);
3643 : }
3644 :
3645 336 : ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
3646 336 : CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3647 336 : __ bind(&done);
3648 : } else {
3649 66 : ArrayNArgumentsConstructorStub stub(isolate());
3650 66 : CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3651 : }
3652 402 : }
3653 :
3654 :
3655 103220 : void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3656 : DCHECK(ToRegister(instr->context()).is(rsi));
3657 103220 : CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles());
3658 103220 : }
3659 :
3660 :
3661 0 : void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) {
3662 : Register function = ToRegister(instr->function());
3663 : Register code_object = ToRegister(instr->code_object());
3664 0 : __ leap(code_object, FieldOperand(code_object, Code::kHeaderSize));
3665 0 : __ movp(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object);
3666 0 : }
3667 :
3668 :
3669 7160 : void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
3670 7160 : Register result = ToRegister(instr->result());
3671 7160 : Register base = ToRegister(instr->base_object());
3672 7160 : if (instr->offset()->IsConstantOperand()) {
3673 : LConstantOperand* offset = LConstantOperand::cast(instr->offset());
3674 10740 : __ leap(result, Operand(base, ToInteger32(offset)));
3675 : } else {
3676 3580 : Register offset = ToRegister(instr->offset());
3677 10740 : __ leap(result, Operand(base, offset, times_1, 0));
3678 : }
3679 7160 : }
3680 :
3681 :
3682 176411 : void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3683 176411 : HStoreNamedField* hinstr = instr->hydrogen();
3684 : Representation representation = instr->representation();
3685 :
3686 : HObjectAccess access = hinstr->access();
3687 : int offset = access.offset();
3688 :
3689 176411 : if (access.IsExternalMemory()) {
3690 : DCHECK(!hinstr->NeedsWriteBarrier());
3691 0 : Register value = ToRegister(instr->value());
3692 0 : if (instr->object()->IsConstantOperand()) {
3693 : DCHECK(value.is(rax));
3694 : LConstantOperand* object = LConstantOperand::cast(instr->object());
3695 395087 : __ store_rax(ToExternalReference(object));
3696 : } else {
3697 0 : Register object = ToRegister(instr->object());
3698 0 : __ Store(MemOperand(object, offset), value, representation);
3699 : }
3700 : return;
3701 : }
3702 :
3703 : Register object = ToRegister(instr->object());
3704 176411 : __ AssertNotSmi(object);
3705 :
3706 : DCHECK(!representation.IsSmi() ||
3707 : !instr->value()->IsConstantOperand() ||
3708 : IsInteger32Constant(LConstantOperand::cast(instr->value())));
3709 : if (!FLAG_unbox_double_fields && representation.IsDouble()) {
3710 : DCHECK(access.IsInobject());
3711 : DCHECK(!hinstr->has_transition());
3712 : DCHECK(!hinstr->NeedsWriteBarrier());
3713 : XMMRegister value = ToDoubleRegister(instr->value());
3714 : __ Movsd(FieldOperand(object, offset), value);
3715 : return;
3716 : }
3717 :
3718 176411 : if (hinstr->has_transition()) {
3719 9704 : Handle<Map> transition = hinstr->transition_map();
3720 9704 : AddDeprecationDependency(transition);
3721 9704 : if (!hinstr->NeedsWriteBarrierForMap()) {
3722 2288 : __ Move(FieldOperand(object, HeapObject::kMapOffset), transition);
3723 : } else {
3724 8560 : Register temp = ToRegister(instr->temp());
3725 8560 : __ Move(kScratchRegister, transition);
3726 8560 : __ movp(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister);
3727 : // Update the write barrier for the map field.
3728 : __ RecordWriteForMap(object,
3729 : kScratchRegister,
3730 : temp,
3731 8560 : kSaveFPRegs);
3732 : }
3733 : }
3734 :
3735 : // Do the store.
3736 : Register write_register = object;
3737 176411 : if (!access.IsInobject()) {
3738 : write_register = ToRegister(instr->temp());
3739 639 : __ movp(write_register, FieldOperand(object, JSObject::kPropertiesOffset));
3740 : }
3741 :
3742 217484 : if (representation.IsSmi() && SmiValuesAre32Bits() &&
3743 : hinstr->value()->representation().IsInteger32()) {
3744 : DCHECK(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY);
3745 1354 : if (FLAG_debug_code) {
3746 0 : Register scratch = kScratchRegister;
3747 0 : __ Load(scratch, FieldOperand(write_register, offset), representation);
3748 0 : __ AssertSmi(scratch);
3749 : }
3750 : // Store int value directly to upper half of the smi.
3751 : STATIC_ASSERT(kSmiTag == 0);
3752 : DCHECK(kSmiTagSize + kSmiShiftSize == 32);
3753 1354 : offset += kPointerSize / 2;
3754 : representation = Representation::Integer32();
3755 : }
3756 :
3757 176411 : Operand operand = FieldOperand(write_register, offset);
3758 :
3759 176411 : if (FLAG_unbox_double_fields && representation.IsDouble()) {
3760 : DCHECK(access.IsInobject());
3761 273 : XMMRegister value = ToDoubleRegister(instr->value());
3762 273 : __ Movsd(operand, value);
3763 :
3764 176138 : } else if (instr->value()->IsRegister()) {
3765 75824 : Register value = ToRegister(instr->value());
3766 75824 : __ Store(operand, value, representation);
3767 : } else {
3768 : LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
3769 100314 : if (IsInteger32Constant(operand_value)) {
3770 : DCHECK(!hinstr->NeedsWriteBarrier());
3771 : int32_t value = ToInteger32(operand_value);
3772 23596 : if (representation.IsSmi()) {
3773 10295 : __ Move(operand, Smi::FromInt(value));
3774 :
3775 : } else {
3776 13301 : __ movl(operand, Immediate(value));
3777 : }
3778 :
3779 76718 : } else if (IsExternalConstant(operand_value)) {
3780 : DCHECK(!hinstr->NeedsWriteBarrier());
3781 : ExternalReference ptr = ToExternalReference(operand_value);
3782 : __ Move(kScratchRegister, ptr);
3783 0 : __ movp(operand, kScratchRegister);
3784 : } else {
3785 76718 : Handle<Object> handle_value = ToHandle(operand_value);
3786 : DCHECK(!hinstr->NeedsWriteBarrier());
3787 76718 : __ Move(operand, handle_value);
3788 : }
3789 : }
3790 :
3791 176411 : if (hinstr->NeedsWriteBarrier()) {
3792 14802 : Register value = ToRegister(instr->value());
3793 29260 : Register temp = access.IsInobject() ? ToRegister(instr->temp()) : object;
3794 : // Update the write barrier for the object for in-object properties.
3795 : __ RecordWriteField(write_register,
3796 : offset,
3797 : value,
3798 : temp,
3799 : kSaveFPRegs,
3800 : EMIT_REMEMBERED_SET,
3801 : hinstr->SmiCheckForWriteBarrier(),
3802 14802 : hinstr->PointersToHereCheckForValue());
3803 : }
3804 : }
3805 :
3806 :
3807 30937 : void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3808 : Representation representation = instr->hydrogen()->length()->representation();
3809 : DCHECK(representation.Equals(instr->hydrogen()->index()->representation()));
3810 : DCHECK(representation.IsSmiOrInteger32());
3811 :
3812 30937 : Condition cc = instr->hydrogen()->allow_equality() ? below : below_equal;
3813 30937 : if (instr->length()->IsConstantOperand()) {
3814 : int32_t length = ToInteger32(LConstantOperand::cast(instr->length()));
3815 6384 : Register index = ToRegister(instr->index());
3816 6384 : if (representation.IsSmi()) {
3817 30937 : __ Cmp(index, Smi::FromInt(length));
3818 : } else {
3819 6384 : __ cmpl(index, Immediate(length));
3820 : }
3821 6384 : cc = CommuteCondition(cc);
3822 24553 : } else if (instr->index()->IsConstantOperand()) {
3823 : int32_t index = ToInteger32(LConstantOperand::cast(instr->index()));
3824 4362 : if (instr->length()->IsRegister()) {
3825 4335 : Register length = ToRegister(instr->length());
3826 4335 : if (representation.IsSmi()) {
3827 0 : __ Cmp(length, Smi::FromInt(index));
3828 : } else {
3829 4335 : __ cmpl(length, Immediate(index));
3830 : }
3831 : } else {
3832 27 : Operand length = ToOperand(instr->length());
3833 27 : if (representation.IsSmi()) {
3834 0 : __ Cmp(length, Smi::FromInt(index));
3835 : } else {
3836 27 : __ cmpl(length, Immediate(index));
3837 : }
3838 : }
3839 : } else {
3840 : Register index = ToRegister(instr->index());
3841 20191 : if (instr->length()->IsRegister()) {
3842 20091 : Register length = ToRegister(instr->length());
3843 20091 : if (representation.IsSmi()) {
3844 0 : __ cmpp(length, index);
3845 : } else {
3846 20091 : __ cmpl(length, index);
3847 : }
3848 : } else {
3849 100 : Operand length = ToOperand(instr->length());
3850 100 : if (representation.IsSmi()) {
3851 0 : __ cmpp(length, index);
3852 : } else {
3853 100 : __ cmpl(length, index);
3854 : }
3855 : }
3856 : }
3857 30948 : if (FLAG_debug_code && instr->hydrogen()->skip_check()) {
3858 : Label done;
3859 0 : __ j(NegateCondition(cc), &done, Label::kNear);
3860 0 : __ int3();
3861 0 : __ bind(&done);
3862 : } else {
3863 : DeoptimizeIf(cc, instr, DeoptimizeReason::kOutOfBounds);
3864 : }
3865 30937 : }
3866 :
3867 :
3868 1795 : void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
3869 : ElementsKind elements_kind = instr->elements_kind();
3870 : LOperand* key = instr->key();
3871 : if (kPointerSize == kInt32Size && !key->IsConstantOperand()) {
3872 : Register key_reg = ToRegister(key);
3873 : Representation key_representation =
3874 : instr->hydrogen()->key()->representation();
3875 : if (ExternalArrayOpRequiresTemp(key_representation, elements_kind)) {
3876 2080 : __ SmiToInteger64(key_reg, key_reg);
3877 : } else if (instr->hydrogen()->IsDehoisted()) {
3878 : // Sign extend key because it could be a 32 bit negative value
3879 : // and the dehoisted address computation happens in 64 bits
3880 : __ movsxlq(key_reg, key_reg);
3881 : }
3882 : }
3883 : Operand operand(BuildFastArrayOperand(
3884 : instr->elements(),
3885 : key,
3886 : instr->hydrogen()->key()->representation(),
3887 : elements_kind,
3888 1795 : instr->base_offset()));
3889 :
3890 1795 : if (elements_kind == FLOAT32_ELEMENTS) {
3891 285 : XMMRegister value(ToDoubleRegister(instr->value()));
3892 285 : __ Cvtsd2ss(value, value);
3893 285 : __ Movss(operand, value);
3894 1510 : } else if (elements_kind == FLOAT64_ELEMENTS) {
3895 142 : __ Movsd(operand, ToDoubleRegister(instr->value()));
3896 : } else {
3897 : Register value(ToRegister(instr->value()));
3898 1368 : switch (elements_kind) {
3899 : case INT8_ELEMENTS:
3900 : case UINT8_ELEMENTS:
3901 : case UINT8_CLAMPED_ELEMENTS:
3902 715 : __ movb(operand, value);
3903 715 : break;
3904 : case INT16_ELEMENTS:
3905 : case UINT16_ELEMENTS:
3906 224 : __ movw(operand, value);
3907 224 : break;
3908 : case INT32_ELEMENTS:
3909 : case UINT32_ELEMENTS:
3910 429 : __ movl(operand, value);
3911 : break;
3912 : case FLOAT32_ELEMENTS:
3913 : case FLOAT64_ELEMENTS:
3914 : case FAST_ELEMENTS:
3915 : case FAST_SMI_ELEMENTS:
3916 : case FAST_DOUBLE_ELEMENTS:
3917 : case FAST_HOLEY_ELEMENTS:
3918 : case FAST_HOLEY_SMI_ELEMENTS:
3919 : case FAST_HOLEY_DOUBLE_ELEMENTS:
3920 : case DICTIONARY_ELEMENTS:
3921 : case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
3922 : case SLOW_SLOPPY_ARGUMENTS_ELEMENTS:
3923 : case FAST_STRING_WRAPPER_ELEMENTS:
3924 : case SLOW_STRING_WRAPPER_ELEMENTS:
3925 : case NO_ELEMENTS:
3926 0 : UNREACHABLE();
3927 : break;
3928 : }
3929 : }
3930 1795 : }
3931 :
3932 :
3933 5963 : void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
3934 5963 : XMMRegister value = ToDoubleRegister(instr->value());
3935 : LOperand* key = instr->key();
3936 : if (kPointerSize == kInt32Size && !key->IsConstantOperand() &&
3937 : instr->hydrogen()->IsDehoisted()) {
3938 : // Sign extend key because it could be a 32 bit negative value
3939 : // and the dehoisted address computation happens in 64 bits
3940 6359 : __ movsxlq(ToRegister(key), ToRegister(key));
3941 : }
3942 5963 : if (instr->NeedsCanonicalization()) {
3943 198 : XMMRegister xmm_scratch = double_scratch0();
3944 : // Turn potential sNaN value into qNaN.
3945 198 : __ Xorpd(xmm_scratch, xmm_scratch);
3946 198 : __ Subsd(value, xmm_scratch);
3947 : }
3948 :
3949 : Operand double_store_operand = BuildFastArrayOperand(
3950 : instr->elements(),
3951 : key,
3952 : instr->hydrogen()->key()->representation(),
3953 : FAST_DOUBLE_ELEMENTS,
3954 5963 : instr->base_offset());
3955 :
3956 5963 : __ Movsd(double_store_operand, value);
3957 5963 : }
3958 :
3959 :
3960 19994 : void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
3961 : HStoreKeyed* hinstr = instr->hydrogen();
3962 : LOperand* key = instr->key();
3963 19994 : int offset = instr->base_offset();
3964 : Representation representation = hinstr->value()->representation();
3965 :
3966 : if (kPointerSize == kInt32Size && !key->IsConstantOperand() &&
3967 : instr->hydrogen()->IsDehoisted()) {
3968 : // Sign extend key because it could be a 32 bit negative value
3969 : // and the dehoisted address computation happens in 64 bits
3970 33054 : __ movsxlq(ToRegister(key), ToRegister(key));
3971 : }
3972 19994 : if (representation.IsInteger32() && SmiValuesAre32Bits()) {
3973 : DCHECK(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY);
3974 : DCHECK(hinstr->elements_kind() == FAST_SMI_ELEMENTS);
3975 1459 : if (FLAG_debug_code) {
3976 0 : Register scratch = kScratchRegister;
3977 : __ Load(scratch,
3978 : BuildFastArrayOperand(instr->elements(),
3979 : key,
3980 : instr->hydrogen()->key()->representation(),
3981 : FAST_ELEMENTS,
3982 : offset),
3983 0 : Representation::Smi());
3984 0 : __ AssertSmi(scratch);
3985 : }
3986 : // Store int value directly to upper half of the smi.
3987 : STATIC_ASSERT(kSmiTag == 0);
3988 : DCHECK(kSmiTagSize + kSmiShiftSize == 32);
3989 1459 : offset += kPointerSize / 2;
3990 : }
3991 :
3992 : Operand operand =
3993 : BuildFastArrayOperand(instr->elements(),
3994 : key,
3995 : instr->hydrogen()->key()->representation(),
3996 : FAST_ELEMENTS,
3997 39988 : offset);
3998 19994 : if (instr->value()->IsRegister()) {
3999 17470 : __ Store(operand, ToRegister(instr->value()), representation);
4000 : } else {
4001 : LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4002 2524 : if (IsInteger32Constant(operand_value)) {
4003 : int32_t value = ToInteger32(operand_value);
4004 1275 : if (representation.IsSmi()) {
4005 659 : __ Move(operand, Smi::FromInt(value));
4006 :
4007 : } else {
4008 616 : __ movl(operand, Immediate(value));
4009 : }
4010 : } else {
4011 1249 : Handle<Object> handle_value = ToHandle(operand_value);
4012 1249 : __ Move(operand, handle_value);
4013 : }
4014 : }
4015 :
4016 19994 : if (hinstr->NeedsWriteBarrier()) {
4017 6530 : Register elements = ToRegister(instr->elements());
4018 : DCHECK(instr->value()->IsRegister());
4019 6530 : Register value = ToRegister(instr->value());
4020 : DCHECK(!key->IsConstantOperand());
4021 : SmiCheck check_needed = hinstr->value()->type().IsHeapObject()
4022 6530 : ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
4023 : // Compute address of modified element and store it into key register.
4024 : Register key_reg(ToRegister(key));
4025 6530 : __ leap(key_reg, operand);
4026 : __ RecordWrite(elements,
4027 : key_reg,
4028 : value,
4029 : kSaveFPRegs,
4030 : EMIT_REMEMBERED_SET,
4031 : check_needed,
4032 6530 : hinstr->PointersToHereCheckForValue());
4033 : }
4034 19994 : }
4035 :
4036 :
4037 27752 : void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
4038 27752 : if (instr->is_fixed_typed_array()) {
4039 1795 : DoStoreKeyedExternalArray(instr);
4040 25957 : } else if (instr->hydrogen()->value()->representation().IsDouble()) {
4041 5963 : DoStoreKeyedFixedDoubleArray(instr);
4042 : } else {
4043 19994 : DoStoreKeyedFixedArray(instr);
4044 : }
4045 27752 : }
4046 :
4047 :
4048 2543 : void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) {
4049 0 : class DeferredMaybeGrowElements final : public LDeferredCode {
4050 : public:
4051 : DeferredMaybeGrowElements(LCodeGen* codegen, LMaybeGrowElements* instr)
4052 2543 : : LDeferredCode(codegen), instr_(instr) {}
4053 2539 : void Generate() override { codegen()->DoDeferredMaybeGrowElements(instr_); }
4054 5078 : LInstruction* instr() override { return instr_; }
4055 :
4056 : private:
4057 : LMaybeGrowElements* instr_;
4058 : };
4059 :
4060 : Register result = rax;
4061 : DeferredMaybeGrowElements* deferred =
4062 12715 : new (zone()) DeferredMaybeGrowElements(this, instr);
4063 : LOperand* key = instr->key();
4064 : LOperand* current_capacity = instr->current_capacity();
4065 :
4066 : DCHECK(instr->hydrogen()->key()->representation().IsInteger32());
4067 : DCHECK(instr->hydrogen()->current_capacity()->representation().IsInteger32());
4068 : DCHECK(key->IsConstantOperand() || key->IsRegister());
4069 : DCHECK(current_capacity->IsConstantOperand() ||
4070 : current_capacity->IsRegister());
4071 :
4072 2637 : if (key->IsConstantOperand() && current_capacity->IsConstantOperand()) {
4073 : int32_t constant_key = ToInteger32(LConstantOperand::cast(key));
4074 : int32_t constant_capacity =
4075 : ToInteger32(LConstantOperand::cast(current_capacity));
4076 0 : if (constant_key >= constant_capacity) {
4077 : // Deferred case.
4078 0 : __ jmp(deferred->entry());
4079 : }
4080 2543 : } else if (key->IsConstantOperand()) {
4081 : int32_t constant_key = ToInteger32(LConstantOperand::cast(key));
4082 94 : __ cmpl(ToRegister(current_capacity), Immediate(constant_key));
4083 188 : __ j(less_equal, deferred->entry());
4084 2449 : } else if (current_capacity->IsConstantOperand()) {
4085 : int32_t constant_capacity =
4086 : ToInteger32(LConstantOperand::cast(current_capacity));
4087 0 : __ cmpl(ToRegister(key), Immediate(constant_capacity));
4088 0 : __ j(greater_equal, deferred->entry());
4089 : } else {
4090 2449 : __ cmpl(ToRegister(key), ToRegister(current_capacity));
4091 4898 : __ j(greater_equal, deferred->entry());
4092 : }
4093 :
4094 2543 : if (instr->elements()->IsRegister()) {
4095 2538 : __ movp(result, ToRegister(instr->elements()));
4096 : } else {
4097 10 : __ movp(result, ToOperand(instr->elements()));
4098 : }
4099 :
4100 2543 : __ bind(deferred->exit());
4101 2543 : }
4102 :
4103 :
4104 2539 : void LCodeGen::DoDeferredMaybeGrowElements(LMaybeGrowElements* instr) {
4105 : // TODO(3095996): Get rid of this. For now, we need to make the
4106 : // result register contain a valid pointer because it is already
4107 : // contained in the register pointer map.
4108 : Register result = rax;
4109 17683 : __ Move(result, Smi::kZero);
4110 :
4111 : // We have to call a stub.
4112 : {
4113 : PushSafepointRegistersScope scope(this);
4114 2539 : if (instr->object()->IsConstantOperand()) {
4115 : LConstantOperand* constant_object =
4116 : LConstantOperand::cast(instr->object());
4117 0 : if (IsSmiConstant(constant_object)) {
4118 : Smi* immediate = ToSmi(constant_object);
4119 : __ Move(result, immediate);
4120 : } else {
4121 0 : Handle<Object> handle_value = ToHandle(constant_object);
4122 0 : __ Move(result, handle_value);
4123 : }
4124 2539 : } else if (instr->object()->IsRegister()) {
4125 2539 : __ Move(result, ToRegister(instr->object()));
4126 : } else {
4127 0 : __ movp(result, ToOperand(instr->object()));
4128 : }
4129 :
4130 : LOperand* key = instr->key();
4131 2539 : if (key->IsConstantOperand()) {
4132 : __ Move(rbx, ToSmi(LConstantOperand::cast(key)));
4133 : } else {
4134 2449 : __ Move(rbx, ToRegister(key));
4135 2449 : __ Integer32ToSmi(rbx, rbx);
4136 : }
4137 :
4138 2539 : GrowArrayElementsStub stub(isolate(), instr->hydrogen()->kind());
4139 2539 : __ CallStub(&stub);
4140 2539 : RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
4141 2539 : __ StoreToSafepointRegisterSlot(result, result);
4142 : }
4143 :
4144 : // Deopt on smi, which means the elements array changed to dictionary mode.
4145 2539 : Condition is_smi = __ CheckSmi(result);
4146 : DeoptimizeIf(is_smi, instr, DeoptimizeReason::kSmi);
4147 2539 : }
4148 :
4149 :
4150 754 : void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4151 : Register object_reg = ToRegister(instr->object());
4152 :
4153 : Handle<Map> from_map = instr->original_map();
4154 : Handle<Map> to_map = instr->transitioned_map();
4155 : ElementsKind from_kind = instr->from_kind();
4156 : ElementsKind to_kind = instr->to_kind();
4157 :
4158 : Label not_applicable;
4159 3368 : __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
4160 754 : __ j(not_equal, ¬_applicable);
4161 754 : if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
4162 : Register new_map_reg = ToRegister(instr->new_map_temp());
4163 : __ Move(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
4164 352 : __ movp(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
4165 : // Write barrier.
4166 : __ RecordWriteForMap(object_reg, new_map_reg, ToRegister(instr->temp()),
4167 352 : kDontSaveFPRegs);
4168 : } else {
4169 : DCHECK(object_reg.is(rax));
4170 : DCHECK(ToRegister(instr->context()).is(rsi));
4171 : PushSafepointRegistersScope scope(this);
4172 402 : __ Move(rbx, to_map);
4173 402 : TransitionElementsKindStub stub(isolate(), from_kind, to_kind);
4174 402 : __ CallStub(&stub);
4175 402 : RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
4176 : }
4177 754 : __ bind(¬_applicable);
4178 754 : }
4179 :
4180 :
4181 37 : void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4182 37 : Register object = ToRegister(instr->object());
4183 37 : Register temp = ToRegister(instr->temp());
4184 : Label no_memento_found;
4185 74 : __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found);
4186 : DeoptimizeIf(equal, instr, DeoptimizeReason::kMementoFound);
4187 37 : __ bind(&no_memento_found);
4188 37 : }
4189 :
4190 :
4191 21096 : void LCodeGen::DoStringAdd(LStringAdd* instr) {
4192 : DCHECK(ToRegister(instr->context()).is(rsi));
4193 : DCHECK(ToRegister(instr->left()).is(rdx));
4194 : DCHECK(ToRegister(instr->right()).is(rax));
4195 : StringAddStub stub(isolate(),
4196 : instr->hydrogen()->flags(),
4197 21096 : instr->hydrogen()->pretenure_flag());
4198 21096 : CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4199 21096 : }
4200 :
4201 :
4202 374 : void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4203 0 : class DeferredStringCharCodeAt final : public LDeferredCode {
4204 : public:
4205 : DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
4206 374 : : LDeferredCode(codegen), instr_(instr) { }
4207 374 : void Generate() override { codegen()->DoDeferredStringCharCodeAt(instr_); }
4208 748 : LInstruction* instr() override { return instr_; }
4209 :
4210 : private:
4211 : LStringCharCodeAt* instr_;
4212 : };
4213 :
4214 : DeferredStringCharCodeAt* deferred =
4215 1122 : new(zone()) DeferredStringCharCodeAt(this, instr);
4216 :
4217 : StringCharLoadGenerator::Generate(masm(),
4218 : ToRegister(instr->string()),
4219 : ToRegister(instr->index()),
4220 374 : ToRegister(instr->result()),
4221 1122 : deferred->entry());
4222 374 : __ bind(deferred->exit());
4223 374 : }
4224 :
4225 :
4226 374 : void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
4227 374 : Register string = ToRegister(instr->string());
4228 748 : Register result = ToRegister(instr->result());
4229 :
4230 : // TODO(3095996): Get rid of this. For now, we need to make the
4231 : // result register contain a valid pointer because it is already
4232 : // contained in the register pointer map.
4233 2618 : __ Set(result, 0);
4234 :
4235 : PushSafepointRegistersScope scope(this);
4236 374 : __ Push(string);
4237 : // Push the index as a smi. This is safe because of the checks in
4238 : // DoStringCharCodeAt above.
4239 : STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
4240 374 : if (instr->index()->IsConstantOperand()) {
4241 : int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index()));
4242 0 : __ Push(Smi::FromInt(const_index));
4243 : } else {
4244 374 : Register index = ToRegister(instr->index());
4245 374 : __ Integer32ToSmi(index, index);
4246 374 : __ Push(index);
4247 : }
4248 : CallRuntimeFromDeferred(
4249 374 : Runtime::kStringCharCodeAtRT, 2, instr, instr->context());
4250 374 : __ AssertSmi(rax);
4251 374 : __ SmiToInteger32(rax, rax);
4252 374 : __ StoreToSafepointRegisterSlot(result, rax);
4253 374 : }
4254 :
4255 :
4256 471 : void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
4257 0 : class DeferredStringCharFromCode final : public LDeferredCode {
4258 : public:
4259 : DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
4260 471 : : LDeferredCode(codegen), instr_(instr) { }
4261 471 : void Generate() override {
4262 471 : codegen()->DoDeferredStringCharFromCode(instr_);
4263 471 : }
4264 942 : LInstruction* instr() override { return instr_; }
4265 :
4266 : private:
4267 : LStringCharFromCode* instr_;
4268 : };
4269 :
4270 : DeferredStringCharFromCode* deferred =
4271 4239 : new(zone()) DeferredStringCharFromCode(this, instr);
4272 :
4273 : DCHECK(instr->hydrogen()->value()->representation().IsInteger32());
4274 : Register char_code = ToRegister(instr->char_code());
4275 471 : Register result = ToRegister(instr->result());
4276 : DCHECK(!char_code.is(result));
4277 :
4278 471 : __ cmpl(char_code, Immediate(String::kMaxOneByteCharCode));
4279 942 : __ j(above, deferred->entry());
4280 471 : __ movsxlq(char_code, char_code);
4281 471 : __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
4282 : __ movp(result, FieldOperand(result,
4283 : char_code, times_pointer_size,
4284 471 : FixedArray::kHeaderSize));
4285 471 : __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
4286 471 : __ j(equal, deferred->entry());
4287 471 : __ bind(deferred->exit());
4288 471 : }
4289 :
4290 :
4291 471 : void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
4292 471 : Register char_code = ToRegister(instr->char_code());
4293 942 : Register result = ToRegister(instr->result());
4294 :
4295 : // TODO(3095996): Get rid of this. For now, we need to make the
4296 : // result register contain a valid pointer because it is already
4297 : // contained in the register pointer map.
4298 1884 : __ Set(result, 0);
4299 :
4300 : PushSafepointRegistersScope scope(this);
4301 471 : __ Integer32ToSmi(char_code, char_code);
4302 471 : __ Push(char_code);
4303 : CallRuntimeFromDeferred(Runtime::kStringCharFromCode, 1, instr,
4304 471 : instr->context());
4305 471 : __ StoreToSafepointRegisterSlot(result, rax);
4306 471 : }
4307 :
4308 :
4309 22389 : void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4310 : LOperand* input = instr->value();
4311 : DCHECK(input->IsRegister() || input->IsStackSlot());
4312 22389 : LOperand* output = instr->result();
4313 : DCHECK(output->IsDoubleRegister());
4314 22389 : if (input->IsRegister()) {
4315 22389 : __ Cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
4316 : } else {
4317 8716 : __ Cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
4318 : }
4319 22389 : }
4320 :
4321 :
4322 650 : void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
4323 : LOperand* input = instr->value();
4324 650 : LOperand* output = instr->result();
4325 :
4326 650 : __ LoadUint32(ToDoubleRegister(output), ToRegister(input));
4327 650 : }
4328 :
4329 :
4330 0 : void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
4331 0 : class DeferredNumberTagI final : public LDeferredCode {
4332 : public:
4333 : DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
4334 : : LDeferredCode(codegen), instr_(instr) { }
4335 0 : void Generate() override {
4336 : codegen()->DoDeferredNumberTagIU(instr_, instr_->value(), instr_->temp1(),
4337 0 : instr_->temp2(), SIGNED_INT32);
4338 0 : }
4339 0 : LInstruction* instr() override { return instr_; }
4340 :
4341 : private:
4342 : LNumberTagI* instr_;
4343 : };
4344 :
4345 : LOperand* input = instr->value();
4346 : DCHECK(input->IsRegister() && input->Equals(instr->result()));
4347 0 : Register reg = ToRegister(input);
4348 :
4349 : if (SmiValuesAre32Bits()) {
4350 0 : __ Integer32ToSmi(reg, reg);
4351 : } else {
4352 : DCHECK(SmiValuesAre31Bits());
4353 : DeferredNumberTagI* deferred = new(zone()) DeferredNumberTagI(this, instr);
4354 : __ Integer32ToSmi(reg, reg);
4355 : __ j(overflow, deferred->entry());
4356 : __ bind(deferred->exit());
4357 : }
4358 0 : }
4359 :
4360 :
4361 776 : void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
4362 0 : class DeferredNumberTagU final : public LDeferredCode {
4363 : public:
4364 : DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr)
4365 776 : : LDeferredCode(codegen), instr_(instr) { }
4366 776 : void Generate() override {
4367 : codegen()->DoDeferredNumberTagIU(instr_, instr_->value(), instr_->temp1(),
4368 1552 : instr_->temp2(), UNSIGNED_INT32);
4369 776 : }
4370 1552 : LInstruction* instr() override { return instr_; }
4371 :
4372 : private:
4373 : LNumberTagU* instr_;
4374 : };
4375 :
4376 : LOperand* input = instr->value();
4377 : DCHECK(input->IsRegister() && input->Equals(instr->result()));
4378 776 : Register reg = ToRegister(input);
4379 :
4380 3880 : DeferredNumberTagU* deferred = new(zone()) DeferredNumberTagU(this, instr);
4381 776 : __ cmpl(reg, Immediate(Smi::kMaxValue));
4382 1552 : __ j(above, deferred->entry());
4383 776 : __ Integer32ToSmi(reg, reg);
4384 776 : __ bind(deferred->exit());
4385 776 : }
4386 :
4387 :
4388 776 : void LCodeGen::DoDeferredNumberTagIU(LInstruction* instr,
4389 : LOperand* value,
4390 : LOperand* temp1,
4391 : LOperand* temp2,
4392 : IntegerSignedness signedness) {
4393 : Label done, slow;
4394 : Register reg = ToRegister(value);
4395 776 : Register tmp = ToRegister(temp1);
4396 776 : XMMRegister temp_xmm = ToDoubleRegister(temp2);
4397 :
4398 : // Load value into temp_xmm which will be preserved across potential call to
4399 : // runtime (MacroAssembler::EnterExitFrameEpilogue preserves only allocatable
4400 : // XMM registers on x64).
4401 776 : if (signedness == SIGNED_INT32) {
4402 : DCHECK(SmiValuesAre31Bits());
4403 : // There was overflow, so bits 30 and 31 of the original integer
4404 : // disagree. Try to allocate a heap number in new space and store
4405 : // the value in there. If that fails, call the runtime system.
4406 7754 : __ SmiToInteger32(reg, reg);
4407 0 : __ xorl(reg, Immediate(0x80000000));
4408 0 : __ Cvtlsi2sd(temp_xmm, reg);
4409 : } else {
4410 : DCHECK(signedness == UNSIGNED_INT32);
4411 776 : __ LoadUint32(temp_xmm, reg);
4412 : }
4413 :
4414 776 : if (FLAG_inline_new) {
4415 773 : __ AllocateHeapNumber(reg, tmp, &slow);
4416 773 : __ jmp(&done, kPointerSize == kInt64Size ? Label::kNear : Label::kFar);
4417 : }
4418 :
4419 : // Slow case: Call the runtime system to do the number allocation.
4420 776 : __ bind(&slow);
4421 : {
4422 : // Put a valid pointer value in the stack slot where the result
4423 : // register is stored, as this register is in the pointer map, but contains
4424 : // an integer value.
4425 776 : __ Set(reg, 0);
4426 :
4427 : // Preserve the value of all registers.
4428 : PushSafepointRegistersScope scope(this);
4429 : // Reset the context register.
4430 776 : if (!reg.is(rsi)) {
4431 776 : __ Set(rsi, 0);
4432 : }
4433 776 : __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4434 : RecordSafepointWithRegisters(
4435 : instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4436 776 : __ StoreToSafepointRegisterSlot(reg, rax);
4437 : }
4438 :
4439 : // Done. Put the value in temp_xmm into the value of the allocated heap
4440 : // number.
4441 776 : __ bind(&done);
4442 1552 : __ Movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm);
4443 776 : }
4444 :
4445 :
4446 26492 : void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
4447 0 : class DeferredNumberTagD final : public LDeferredCode {
4448 : public:
4449 : DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
4450 26492 : : LDeferredCode(codegen), instr_(instr) { }
4451 26492 : void Generate() override { codegen()->DoDeferredNumberTagD(instr_); }
4452 52984 : LInstruction* instr() override { return instr_; }
4453 :
4454 : private:
4455 : LNumberTagD* instr_;
4456 : };
4457 :
4458 26492 : XMMRegister input_reg = ToDoubleRegister(instr->value());
4459 26492 : Register reg = ToRegister(instr->result());
4460 26492 : Register tmp = ToRegister(instr->temp());
4461 :
4462 105968 : DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
4463 26492 : if (FLAG_inline_new) {
4464 52984 : __ AllocateHeapNumber(reg, tmp, deferred->entry());
4465 : } else {
4466 0 : __ jmp(deferred->entry());
4467 : }
4468 26492 : __ bind(deferred->exit());
4469 52984 : __ Movsd(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
4470 26492 : }
4471 :
4472 :
4473 26492 : void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
4474 : // TODO(3095996): Get rid of this. For now, we need to make the
4475 : // result register contain a valid pointer because it is already
4476 : // contained in the register pointer map.
4477 26492 : Register reg = ToRegister(instr->result());
4478 132266 : __ Move(reg, Smi::kZero);
4479 :
4480 : {
4481 : PushSafepointRegistersScope scope(this);
4482 : // Reset the context register.
4483 26492 : if (!reg.is(rsi)) {
4484 : __ Move(rsi, 0);
4485 : }
4486 26492 : __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4487 : RecordSafepointWithRegisters(
4488 : instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4489 26492 : __ movp(kScratchRegister, rax);
4490 : }
4491 26492 : __ movp(reg, kScratchRegister);
4492 26492 : }
4493 :
4494 :
4495 123017 : void LCodeGen::DoSmiTag(LSmiTag* instr) {
4496 : HChange* hchange = instr->hydrogen();
4497 123017 : Register input = ToRegister(instr->value());
4498 246034 : Register output = ToRegister(instr->result());
4499 369267 : if (hchange->CheckFlag(HValue::kCanOverflow) &&
4500 216 : hchange->value()->CheckFlag(HValue::kUint32)) {
4501 123233 : Condition is_smi = __ CheckUInteger32ValidSmiValue(input);
4502 : DeoptimizeIf(NegateCondition(is_smi), instr, DeoptimizeReason::kOverflow);
4503 : }
4504 123017 : __ Integer32ToSmi(output, input);
4505 123233 : if (hchange->CheckFlag(HValue::kCanOverflow) &&
4506 216 : !hchange->value()->CheckFlag(HValue::kUint32)) {
4507 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
4508 : }
4509 123017 : }
4510 :
4511 :
4512 8748 : void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
4513 : DCHECK(instr->value()->Equals(instr->result()));
4514 4374 : Register input = ToRegister(instr->value());
4515 4374 : if (instr->needs_check()) {
4516 8748 : Condition is_smi = __ CheckSmi(input);
4517 : DeoptimizeIf(NegateCondition(is_smi), instr, DeoptimizeReason::kNotASmi);
4518 : } else {
4519 4374 : __ AssertSmi(input);
4520 : }
4521 4374 : __ SmiToInteger32(input, input);
4522 4374 : }
4523 :
4524 :
4525 52946 : void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg,
4526 : XMMRegister result_reg, NumberUntagDMode mode) {
4527 : bool can_convert_undefined_to_nan = instr->truncating();
4528 : bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero();
4529 :
4530 : Label convert, load_smi, done;
4531 :
4532 52946 : if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
4533 : // Smi check.
4534 695084 : __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
4535 :
4536 : // Heap number map check.
4537 : __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
4538 101610 : Heap::kHeapNumberMapRootIndex);
4539 :
4540 : // On x64 it is safe to load at heap number offset before evaluating the map
4541 : // check, since all heap objects are at least two words long.
4542 101610 : __ Movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
4543 :
4544 50805 : if (can_convert_undefined_to_nan) {
4545 46680 : __ j(not_equal, &convert, Label::kNear);
4546 : } else {
4547 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber);
4548 : }
4549 :
4550 50805 : if (deoptimize_on_minus_zero) {
4551 0 : XMMRegister xmm_scratch = double_scratch0();
4552 0 : __ Xorpd(xmm_scratch, xmm_scratch);
4553 0 : __ Ucomisd(xmm_scratch, result_reg);
4554 0 : __ j(not_equal, &done, Label::kNear);
4555 0 : __ Movmskpd(kScratchRegister, result_reg);
4556 0 : __ testl(kScratchRegister, Immediate(1));
4557 : DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero);
4558 : }
4559 50805 : __ jmp(&done, Label::kNear);
4560 :
4561 50805 : if (can_convert_undefined_to_nan) {
4562 46680 : __ bind(&convert);
4563 :
4564 : // Convert undefined (and hole) to NaN. Compute NaN as 0/0.
4565 46680 : __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
4566 : DeoptimizeIf(not_equal, instr,
4567 : DeoptimizeReason::kNotAHeapNumberUndefined);
4568 :
4569 46680 : __ Xorpd(result_reg, result_reg);
4570 46680 : __ Divsd(result_reg, result_reg);
4571 46680 : __ jmp(&done, Label::kNear);
4572 : }
4573 : } else {
4574 : DCHECK(mode == NUMBER_CANDIDATE_IS_SMI);
4575 : }
4576 :
4577 : // Smi to XMM conversion
4578 52946 : __ bind(&load_smi);
4579 52946 : __ SmiToInteger32(kScratchRegister, input_reg);
4580 52946 : __ Cvtlsi2sd(result_reg, kScratchRegister);
4581 52946 : __ bind(&done);
4582 52946 : }
4583 :
4584 :
4585 69565 : void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr, Label* done) {
4586 : Register input_reg = ToRegister(instr->value());
4587 :
4588 69565 : if (instr->truncating()) {
4589 : Register input_map_reg = kScratchRegister;
4590 : Label truncate;
4591 : Label::Distance truncate_distance =
4592 16381 : DeoptEveryNTimes() ? Label::kFar : Label::kNear;
4593 375089 : __ movp(input_map_reg, FieldOperand(input_reg, HeapObject::kMapOffset));
4594 : __ JumpIfRoot(input_map_reg, Heap::kHeapNumberMapRootIndex, &truncate,
4595 : truncate_distance);
4596 16381 : __ CmpInstanceType(input_map_reg, ODDBALL_TYPE);
4597 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotANumberOrOddball);
4598 16381 : __ bind(&truncate);
4599 16381 : __ TruncateHeapNumberToI(input_reg, input_reg);
4600 : } else {
4601 53184 : XMMRegister scratch = ToDoubleRegister(instr->temp());
4602 : DCHECK(!scratch.is(double_scratch0()));
4603 : __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
4604 106368 : Heap::kHeapNumberMapRootIndex);
4605 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber);
4606 : __ Movsd(double_scratch0(),
4607 106368 : FieldOperand(input_reg, HeapNumber::kValueOffset));
4608 53184 : __ Cvttsd2si(input_reg, double_scratch0());
4609 53184 : __ Cvtlsi2sd(scratch, input_reg);
4610 53184 : __ Ucomisd(double_scratch0(), scratch);
4611 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kLostPrecision);
4612 : DeoptimizeIf(parity_even, instr, DeoptimizeReason::kNaN);
4613 53184 : if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) {
4614 6816 : __ testl(input_reg, input_reg);
4615 6816 : __ j(not_zero, done);
4616 6816 : __ Movmskpd(input_reg, double_scratch0());
4617 6816 : __ andl(input_reg, Immediate(1));
4618 : DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero);
4619 : }
4620 : }
4621 69565 : }
4622 :
4623 :
4624 69565 : void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
4625 0 : class DeferredTaggedToI final : public LDeferredCode {
4626 : public:
4627 : DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
4628 69565 : : LDeferredCode(codegen), instr_(instr) { }
4629 139130 : void Generate() override { codegen()->DoDeferredTaggedToI(instr_, done()); }
4630 139130 : LInstruction* instr() override { return instr_; }
4631 :
4632 : private:
4633 : LTaggedToI* instr_;
4634 : };
4635 :
4636 : LOperand* input = instr->value();
4637 : DCHECK(input->IsRegister());
4638 : DCHECK(input->Equals(instr->result()));
4639 69565 : Register input_reg = ToRegister(input);
4640 :
4641 69565 : if (instr->hydrogen()->value()->representation().IsSmi()) {
4642 208695 : __ SmiToInteger32(input_reg, input_reg);
4643 : } else {
4644 : DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
4645 139130 : __ JumpIfNotSmi(input_reg, deferred->entry());
4646 69565 : __ SmiToInteger32(input_reg, input_reg);
4647 69565 : __ bind(deferred->exit());
4648 : }
4649 69565 : }
4650 :
4651 :
4652 52946 : void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
4653 : LOperand* input = instr->value();
4654 : DCHECK(input->IsRegister());
4655 52946 : LOperand* result = instr->result();
4656 : DCHECK(result->IsDoubleRegister());
4657 :
4658 52946 : Register input_reg = ToRegister(input);
4659 52946 : XMMRegister result_reg = ToDoubleRegister(result);
4660 :
4661 : HValue* value = instr->hydrogen()->value();
4662 : NumberUntagDMode mode = value->representation().IsSmi()
4663 52946 : ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED;
4664 :
4665 52946 : EmitNumberUntagD(instr, input_reg, result_reg, mode);
4666 52946 : }
4667 :
4668 :
4669 8815 : void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
4670 : LOperand* input = instr->value();
4671 : DCHECK(input->IsDoubleRegister());
4672 8815 : LOperand* result = instr->result();
4673 : DCHECK(result->IsRegister());
4674 :
4675 8815 : XMMRegister input_reg = ToDoubleRegister(input);
4676 8815 : Register result_reg = ToRegister(result);
4677 :
4678 8815 : if (instr->truncating()) {
4679 18465 : __ TruncateDoubleToI(result_reg, input_reg);
4680 : } else {
4681 : Label lost_precision, is_nan, minus_zero, done;
4682 1930 : XMMRegister xmm_scratch = double_scratch0();
4683 1930 : Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
4684 : __ DoubleToI(result_reg, input_reg, xmm_scratch,
4685 : instr->hydrogen()->GetMinusZeroMode(), &lost_precision,
4686 1930 : &is_nan, &minus_zero, dist);
4687 1930 : __ jmp(&done, dist);
4688 1930 : __ bind(&lost_precision);
4689 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kLostPrecision);
4690 1930 : __ bind(&is_nan);
4691 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kNaN);
4692 1930 : __ bind(&minus_zero);
4693 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero);
4694 1930 : __ bind(&done);
4695 : }
4696 8815 : }
4697 :
4698 :
4699 82 : void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) {
4700 : LOperand* input = instr->value();
4701 : DCHECK(input->IsDoubleRegister());
4702 82 : LOperand* result = instr->result();
4703 : DCHECK(result->IsRegister());
4704 :
4705 82 : XMMRegister input_reg = ToDoubleRegister(input);
4706 82 : Register result_reg = ToRegister(result);
4707 :
4708 : Label lost_precision, is_nan, minus_zero, done;
4709 82 : XMMRegister xmm_scratch = double_scratch0();
4710 82 : Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
4711 : __ DoubleToI(result_reg, input_reg, xmm_scratch,
4712 : instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan,
4713 492 : &minus_zero, dist);
4714 82 : __ jmp(&done, dist);
4715 82 : __ bind(&lost_precision);
4716 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kLostPrecision);
4717 82 : __ bind(&is_nan);
4718 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kNaN);
4719 82 : __ bind(&minus_zero);
4720 : DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero);
4721 82 : __ bind(&done);
4722 82 : __ Integer32ToSmi(result_reg, result_reg);
4723 : DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow);
4724 82 : }
4725 :
4726 :
4727 21426 : void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
4728 : LOperand* input = instr->value();
4729 21426 : Condition cc = masm()->CheckSmi(ToRegister(input));
4730 : DeoptimizeIf(NegateCondition(cc), instr, DeoptimizeReason::kNotASmi);
4731 21426 : }
4732 :
4733 :
4734 86332 : void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4735 86332 : if (!instr->hydrogen()->value()->type().IsHeapObject()) {
4736 : LOperand* input = instr->value();
4737 86332 : Condition cc = masm()->CheckSmi(ToRegister(input));
4738 : DeoptimizeIf(cc, instr, DeoptimizeReason::kSmi);
4739 : }
4740 86332 : }
4741 :
4742 :
4743 2360 : void LCodeGen::DoCheckArrayBufferNotNeutered(
4744 : LCheckArrayBufferNotNeutered* instr) {
4745 : Register view = ToRegister(instr->view());
4746 :
4747 4720 : __ movp(kScratchRegister,
4748 2360 : FieldOperand(view, JSArrayBufferView::kBufferOffset));
4749 : __ testb(FieldOperand(kScratchRegister, JSArrayBuffer::kBitFieldOffset),
4750 4720 : Immediate(1 << JSArrayBuffer::WasNeutered::kShift));
4751 : DeoptimizeIf(not_zero, instr, DeoptimizeReason::kOutOfBounds);
4752 2360 : }
4753 :
4754 :
4755 42686 : void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4756 : Register input = ToRegister(instr->value());
4757 :
4758 107486 : __ movp(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
4759 :
4760 85372 : if (instr->hydrogen()->is_interval_check()) {
4761 : InstanceType first;
4762 : InstanceType last;
4763 1290 : instr->hydrogen()->GetCheckInterval(&first, &last);
4764 :
4765 : __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
4766 5160 : Immediate(static_cast<int8_t>(first)));
4767 :
4768 : // If there is only one type in the interval check for equality.
4769 1290 : if (first == last) {
4770 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongInstanceType);
4771 : } else {
4772 : DeoptimizeIf(below, instr, DeoptimizeReason::kWrongInstanceType);
4773 : // Omit check for the last type.
4774 1290 : if (last != LAST_TYPE) {
4775 : __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
4776 0 : Immediate(static_cast<int8_t>(last)));
4777 : DeoptimizeIf(above, instr, DeoptimizeReason::kWrongInstanceType);
4778 : }
4779 : }
4780 : } else {
4781 : uint8_t mask;
4782 : uint8_t tag;
4783 41396 : instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4784 :
4785 82792 : if (base::bits::IsPowerOfTwo32(mask)) {
4786 : DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag));
4787 : __ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
4788 91017 : Immediate(mask));
4789 30339 : DeoptimizeIf(tag == 0 ? not_zero : zero, instr,
4790 30339 : DeoptimizeReason::kWrongInstanceType);
4791 : } else {
4792 : __ movzxbl(kScratchRegister,
4793 11057 : FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
4794 22114 : __ andb(kScratchRegister, Immediate(mask));
4795 22114 : __ cmpb(kScratchRegister, Immediate(tag));
4796 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongInstanceType);
4797 : }
4798 : }
4799 42686 : }
4800 :
4801 :
4802 35054 : void LCodeGen::DoCheckValue(LCheckValue* instr) {
4803 35054 : Register reg = ToRegister(instr->value());
4804 35054 : __ Cmp(reg, instr->hydrogen()->object().handle());
4805 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kValueMismatch);
4806 35054 : }
4807 :
4808 :
4809 1542 : void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
4810 : Label deopt, done;
4811 : // If the map is not deprecated the migration attempt does not make sense.
4812 18504 : __ Push(object);
4813 1542 : __ movp(object, FieldOperand(object, HeapObject::kMapOffset));
4814 : __ testl(FieldOperand(object, Map::kBitField3Offset),
4815 1542 : Immediate(Map::Deprecated::kMask));
4816 1542 : __ Pop(object);
4817 1542 : __ j(zero, &deopt);
4818 :
4819 : {
4820 : PushSafepointRegistersScope scope(this);
4821 1542 : __ Push(object);
4822 :
4823 1542 : __ Set(rsi, 0);
4824 1542 : __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
4825 : RecordSafepointWithRegisters(
4826 : instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
4827 :
4828 1542 : __ testp(rax, Immediate(kSmiTagMask));
4829 : }
4830 1542 : __ j(not_zero, &done);
4831 :
4832 1542 : __ bind(&deopt);
4833 : DeoptimizeIf(always, instr, DeoptimizeReason::kInstanceMigrationFailed);
4834 :
4835 1542 : __ bind(&done);
4836 1542 : }
4837 :
4838 :
4839 181391 : void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4840 0 : class DeferredCheckMaps final : public LDeferredCode {
4841 : public:
4842 : DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
4843 3084 : : LDeferredCode(codegen), instr_(instr), object_(object) {
4844 1542 : SetExit(check_maps());
4845 : }
4846 1542 : void Generate() override {
4847 1542 : codegen()->DoDeferredInstanceMigration(instr_, object_);
4848 1542 : }
4849 : Label* check_maps() { return &check_maps_; }
4850 3084 : LInstruction* instr() override { return instr_; }
4851 :
4852 : private:
4853 : LCheckMaps* instr_;
4854 : Label check_maps_;
4855 : Register object_;
4856 : };
4857 :
4858 362782 : if (instr->hydrogen()->IsStabilityCheck()) {
4859 442230 : const UniqueSet<Map>* maps = instr->hydrogen()->maps();
4860 442230 : for (int i = 0; i < maps->size(); ++i) {
4861 266068 : AddStabilityDependency(maps->at(i).handle());
4862 : }
4863 110211 : return;
4864 : }
4865 :
4866 : LOperand* input = instr->value();
4867 : DCHECK(input->IsRegister());
4868 : Register reg = ToRegister(input);
4869 :
4870 : DeferredCheckMaps* deferred = NULL;
4871 71180 : if (instr->hydrogen()->HasMigrationTarget()) {
4872 : deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
4873 1542 : __ bind(deferred->check_maps());
4874 : }
4875 :
4876 221718 : const UniqueSet<Map>* maps = instr->hydrogen()->maps();
4877 : Label success;
4878 150538 : for (int i = 0; i < maps->size() - 1; i++) {
4879 4089 : Handle<Map> map = maps->at(i).handle();
4880 4089 : __ CompareMap(reg, map);
4881 4089 : __ j(equal, &success, Label::kNear);
4882 : }
4883 :
4884 71180 : Handle<Map> map = maps->at(maps->size() - 1).handle();
4885 71180 : __ CompareMap(reg, map);
4886 142360 : if (instr->hydrogen()->HasMigrationTarget()) {
4887 3084 : __ j(not_equal, deferred->entry());
4888 : } else {
4889 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongMap);
4890 : }
4891 :
4892 71180 : __ bind(&success);
4893 : }
4894 :
4895 :
4896 223 : void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4897 223 : XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4898 223 : XMMRegister xmm_scratch = double_scratch0();
4899 446 : Register result_reg = ToRegister(instr->result());
4900 223 : __ ClampDoubleToUint8(value_reg, xmm_scratch, result_reg);
4901 223 : }
4902 :
4903 :
4904 84 : void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4905 : DCHECK(instr->unclamped()->Equals(instr->result()));
4906 168 : Register value_reg = ToRegister(instr->result());
4907 84 : __ ClampUint8(value_reg);
4908 84 : }
4909 :
4910 :
4911 21 : void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4912 : DCHECK(instr->unclamped()->Equals(instr->result()));
4913 : Register input_reg = ToRegister(instr->unclamped());
4914 21 : XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm());
4915 21 : XMMRegister xmm_scratch = double_scratch0();
4916 : Label is_smi, done, heap_number;
4917 21 : Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
4918 294 : __ JumpIfSmi(input_reg, &is_smi, dist);
4919 :
4920 : // Check for heap number
4921 : __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
4922 63 : factory()->heap_number_map());
4923 21 : __ j(equal, &heap_number, Label::kNear);
4924 :
4925 : // Check for undefined. Undefined is converted to zero for clamping
4926 : // conversions.
4927 21 : __ Cmp(input_reg, factory()->undefined_value());
4928 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumberUndefined);
4929 21 : __ xorl(input_reg, input_reg);
4930 21 : __ jmp(&done, Label::kNear);
4931 :
4932 : // Heap number
4933 21 : __ bind(&heap_number);
4934 42 : __ Movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset));
4935 21 : __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg);
4936 21 : __ jmp(&done, Label::kNear);
4937 :
4938 : // smi
4939 21 : __ bind(&is_smi);
4940 21 : __ SmiToInteger32(input_reg, input_reg);
4941 21 : __ ClampUint8(input_reg);
4942 :
4943 21 : __ bind(&done);
4944 21 : }
4945 :
4946 :
4947 20528 : void LCodeGen::DoAllocate(LAllocate* instr) {
4948 0 : class DeferredAllocate final : public LDeferredCode {
4949 : public:
4950 : DeferredAllocate(LCodeGen* codegen, LAllocate* instr)
4951 20528 : : LDeferredCode(codegen), instr_(instr) { }
4952 20528 : void Generate() override { codegen()->DoDeferredAllocate(instr_); }
4953 41056 : LInstruction* instr() override { return instr_; }
4954 :
4955 : private:
4956 : LAllocate* instr_;
4957 : };
4958 :
4959 : DeferredAllocate* deferred =
4960 62237 : new(zone()) DeferredAllocate(this, instr);
4961 :
4962 20528 : Register result = ToRegister(instr->result());
4963 : Register temp = ToRegister(instr->temp());
4964 :
4965 : // Allocate memory for the object.
4966 : AllocationFlags flags = NO_ALLOCATION_FLAGS;
4967 41056 : if (instr->hydrogen()->MustAllocateDoubleAligned()) {
4968 : flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
4969 : }
4970 20528 : if (instr->hydrogen()->IsOldSpaceAllocation()) {
4971 : DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
4972 8 : flags = static_cast<AllocationFlags>(flags | PRETENURE);
4973 : }
4974 :
4975 20528 : if (instr->hydrogen()->IsAllocationFoldingDominator()) {
4976 2035 : flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDING_DOMINATOR);
4977 : }
4978 : DCHECK(!instr->hydrogen()->IsAllocationFolded());
4979 :
4980 20528 : if (instr->size()->IsConstantOperand()) {
4981 : int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
4982 12838 : CHECK(size <= kMaxRegularHeapObjectSize);
4983 25676 : __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
4984 : } else {
4985 7690 : Register size = ToRegister(instr->size());
4986 15380 : __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
4987 : }
4988 :
4989 20528 : __ bind(deferred->exit());
4990 :
4991 41056 : if (instr->hydrogen()->MustPrefillWithFiller()) {
4992 129 : if (instr->size()->IsConstantOperand()) {
4993 : int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
4994 242 : __ movl(temp, Immediate((size / kPointerSize) - 1));
4995 : } else {
4996 : temp = ToRegister(instr->size());
4997 8 : __ sarp(temp, Immediate(kPointerSizeLog2));
4998 8 : __ decl(temp);
4999 : }
5000 : Label loop;
5001 129 : __ bind(&loop);
5002 : __ Move(FieldOperand(result, temp, times_pointer_size, 0),
5003 387 : isolate()->factory()->one_pointer_filler_map());
5004 129 : __ decl(temp);
5005 129 : __ j(not_zero, &loop);
5006 : }
5007 20528 : }
5008 :
5009 4821 : void LCodeGen::DoFastAllocate(LFastAllocate* instr) {
5010 : DCHECK(instr->hydrogen()->IsAllocationFolded());
5011 : DCHECK(!instr->hydrogen()->IsAllocationFoldingDominator());
5012 9642 : Register result = ToRegister(instr->result());
5013 4821 : Register temp = ToRegister(instr->temp());
5014 :
5015 : AllocationFlags flags = ALLOCATION_FOLDED;
5016 9642 : if (instr->hydrogen()->MustAllocateDoubleAligned()) {
5017 : flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
5018 : }
5019 4821 : if (instr->hydrogen()->IsOldSpaceAllocation()) {
5020 : DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5021 44 : flags = static_cast<AllocationFlags>(flags | PRETENURE);
5022 : }
5023 4821 : if (instr->size()->IsConstantOperand()) {
5024 : int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5025 4821 : CHECK(size <= kMaxRegularHeapObjectSize);
5026 4821 : __ FastAllocate(size, result, temp, flags);
5027 : } else {
5028 0 : Register size = ToRegister(instr->size());
5029 0 : __ FastAllocate(size, result, temp, flags);
5030 : }
5031 4821 : }
5032 :
5033 20528 : void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
5034 20528 : Register result = ToRegister(instr->result());
5035 :
5036 : // TODO(3095996): Get rid of this. For now, we need to make the
5037 : // result register contain a valid pointer because it is already
5038 : // contained in the register pointer map.
5039 95907 : __ Move(result, Smi::kZero);
5040 :
5041 : PushSafepointRegistersScope scope(this);
5042 20528 : if (instr->size()->IsRegister()) {
5043 7690 : Register size = ToRegister(instr->size());
5044 : DCHECK(!size.is(result));
5045 7690 : __ Integer32ToSmi(size, size);
5046 7690 : __ Push(size);
5047 : } else {
5048 : int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5049 12838 : __ Push(Smi::FromInt(size));
5050 : }
5051 :
5052 : int flags = 0;
5053 41056 : if (instr->hydrogen()->IsOldSpaceAllocation()) {
5054 : DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5055 : flags = AllocateTargetSpace::update(flags, OLD_SPACE);
5056 : } else {
5057 : flags = AllocateTargetSpace::update(flags, NEW_SPACE);
5058 : }
5059 20528 : __ Push(Smi::FromInt(flags));
5060 :
5061 : CallRuntimeFromDeferred(
5062 20528 : Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
5063 20528 : __ StoreToSafepointRegisterSlot(result, rax);
5064 :
5065 41056 : if (instr->hydrogen()->IsAllocationFoldingDominator()) {
5066 : AllocationFlags allocation_flags = NO_ALLOCATION_FLAGS;
5067 2035 : if (instr->hydrogen()->IsOldSpaceAllocation()) {
5068 : DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5069 7 : allocation_flags = static_cast<AllocationFlags>(flags | PRETENURE);
5070 : }
5071 : // If the allocation folding dominator allocate triggered a GC, allocation
5072 : // happend in the runtime. We have to reset the top pointer to virtually
5073 : // undo the allocation.
5074 : ExternalReference allocation_top =
5075 2035 : AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
5076 2035 : __ subp(rax, Immediate(kHeapObjectTag));
5077 2035 : __ Store(allocation_top, rax);
5078 2035 : __ addp(rax, Immediate(kHeapObjectTag));
5079 : }
5080 20528 : }
5081 :
5082 :
5083 38130 : void LCodeGen::DoTypeof(LTypeof* instr) {
5084 : DCHECK(ToRegister(instr->context()).is(rsi));
5085 : DCHECK(ToRegister(instr->value()).is(rbx));
5086 : Label end, do_call;
5087 38130 : Register value_register = ToRegister(instr->value());
5088 190650 : __ JumpIfNotSmi(value_register, &do_call);
5089 76260 : __ Move(rax, isolate()->factory()->number_string());
5090 38130 : __ jmp(&end);
5091 38130 : __ bind(&do_call);
5092 38130 : Callable callable = CodeFactory::Typeof(isolate());
5093 : CallCode(callable.code(), RelocInfo::CODE_TARGET, instr);
5094 38130 : __ bind(&end);
5095 38130 : }
5096 :
5097 :
5098 1346240 : void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
5099 : DCHECK(!operand->IsDoubleRegister());
5100 1346240 : if (operand->IsConstantOperand()) {
5101 1346240 : __ Push(ToHandle(LConstantOperand::cast(operand)));
5102 645835 : } else if (operand->IsRegister()) {
5103 336413 : __ Push(ToRegister(operand));
5104 : } else {
5105 618844 : __ Push(ToOperand(operand));
5106 : }
5107 1346240 : }
5108 :
5109 :
5110 42191 : void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
5111 42191 : Register input = ToRegister(instr->value());
5112 42191 : Condition final_branch_condition = EmitTypeofIs(instr, input);
5113 42191 : if (final_branch_condition != no_condition) {
5114 42191 : EmitBranch(instr, final_branch_condition);
5115 : }
5116 42191 : }
5117 :
5118 :
5119 42191 : Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) {
5120 42191 : Label* true_label = instr->TrueLabel(chunk_);
5121 42191 : Label* false_label = instr->FalseLabel(chunk_);
5122 42191 : Handle<String> type_name = instr->type_literal();
5123 42191 : int left_block = instr->TrueDestination(chunk_);
5124 42191 : int right_block = instr->FalseDestination(chunk_);
5125 214782 : int next_block = GetNextEmittedBlock();
5126 :
5127 : Label::Distance true_distance = left_block == next_block ? Label::kNear
5128 42191 : : Label::kFar;
5129 : Label::Distance false_distance = right_block == next_block ? Label::kNear
5130 42191 : : Label::kFar;
5131 : Condition final_branch_condition = no_condition;
5132 42191 : Factory* factory = isolate()->factory();
5133 42191 : if (String::Equals(type_name, factory->number_string())) {
5134 14091 : __ JumpIfSmi(input, true_label, true_distance);
5135 : __ CompareRoot(FieldOperand(input, HeapObject::kMapOffset),
5136 28182 : Heap::kHeapNumberMapRootIndex);
5137 :
5138 : final_branch_condition = equal;
5139 :
5140 28100 : } else if (String::Equals(type_name, factory->string_string())) {
5141 1709 : __ JumpIfSmi(input, false_label, false_distance);
5142 1709 : __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
5143 : final_branch_condition = below;
5144 :
5145 26391 : } else if (String::Equals(type_name, factory->symbol_string())) {
5146 752 : __ JumpIfSmi(input, false_label, false_distance);
5147 752 : __ CmpObjectType(input, SYMBOL_TYPE, input);
5148 : final_branch_condition = equal;
5149 :
5150 25639 : } else if (String::Equals(type_name, factory->boolean_string())) {
5151 567 : __ CompareRoot(input, Heap::kTrueValueRootIndex);
5152 567 : __ j(equal, true_label, true_distance);
5153 567 : __ CompareRoot(input, Heap::kFalseValueRootIndex);
5154 : final_branch_condition = equal;
5155 :
5156 25072 : } else if (String::Equals(type_name, factory->undefined_string())) {
5157 416 : __ CompareRoot(input, Heap::kNullValueRootIndex);
5158 416 : __ j(equal, false_label, false_distance);
5159 416 : __ JumpIfSmi(input, false_label, false_distance);
5160 : // Check for undetectable objects => true.
5161 416 : __ movp(input, FieldOperand(input, HeapObject::kMapOffset));
5162 : __ testb(FieldOperand(input, Map::kBitFieldOffset),
5163 832 : Immediate(1 << Map::kIsUndetectable));
5164 : final_branch_condition = not_zero;
5165 :
5166 24656 : } else if (String::Equals(type_name, factory->function_string())) {
5167 12230 : __ JumpIfSmi(input, false_label, false_distance);
5168 : // Check for callable and not undetectable objects => true.
5169 12230 : __ movp(input, FieldOperand(input, HeapObject::kMapOffset));
5170 12230 : __ movzxbl(input, FieldOperand(input, Map::kBitFieldOffset));
5171 : __ andb(input,
5172 12230 : Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
5173 12230 : __ cmpb(input, Immediate(1 << Map::kIsCallable));
5174 : final_branch_condition = equal;
5175 :
5176 12426 : } else if (String::Equals(type_name, factory->object_string())) {
5177 12426 : __ JumpIfSmi(input, false_label, false_distance);
5178 12426 : __ CompareRoot(input, Heap::kNullValueRootIndex);
5179 12426 : __ j(equal, true_label, true_distance);
5180 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
5181 12426 : __ CmpObjectType(input, FIRST_JS_RECEIVER_TYPE, input);
5182 12426 : __ j(below, false_label, false_distance);
5183 : // Check for callable or undetectable objects => false.
5184 : __ testb(FieldOperand(input, Map::kBitFieldOffset),
5185 24852 : Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
5186 : final_branch_condition = zero;
5187 :
5188 : } else {
5189 0 : __ jmp(false_label, false_distance);
5190 : }
5191 :
5192 42191 : return final_branch_condition;
5193 : }
5194 :
5195 :
5196 1987655 : void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
5197 5962964 : if (info()->ShouldEnsureSpaceForLazyDeopt()) {
5198 : // Ensure that we have enough space after the previous lazy-bailout
5199 : // instruction for patching the code here.
5200 1950001 : int current_pc = masm()->pc_offset();
5201 1950001 : if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5202 208611 : int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5203 208611 : __ Nop(padding_size);
5204 : }
5205 : }
5206 3975308 : last_lazy_deopt_pc_ = masm()->pc_offset();
5207 1987654 : }
5208 :
5209 :
5210 1677365 : void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
5211 3354730 : last_lazy_deopt_pc_ = masm()->pc_offset();
5212 : DCHECK(instr->HasEnvironment());
5213 3354732 : LEnvironment* env = instr->environment();
5214 1677365 : RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5215 1677367 : safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5216 1677367 : }
5217 :
5218 :
5219 32377 : void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
5220 32377 : Deoptimizer::BailoutType type = instr->hydrogen()->type();
5221 : // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the
5222 : // needed return address), even though the implementation of LAZY and EAGER is
5223 : // now identical. When LAZY is eventually completely folded into EAGER, remove
5224 : // the special case below.
5225 32377 : if (info()->IsStub() && type == Deoptimizer::EAGER) {
5226 : type = Deoptimizer::LAZY;
5227 : }
5228 32377 : DeoptimizeIf(no_condition, instr, instr->hydrogen()->reason(), type);
5229 32377 : }
5230 :
5231 :
5232 3040 : void LCodeGen::DoDummy(LDummy* instr) {
5233 : // Nothing to see here, move on!
5234 3040 : }
5235 :
5236 :
5237 5531 : void LCodeGen::DoDummyUse(LDummyUse* instr) {
5238 : // Nothing to see here, move on!
5239 5531 : }
5240 :
5241 :
5242 31204 : void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
5243 : PushSafepointRegistersScope scope(this);
5244 62408 : __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
5245 31204 : __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5246 31204 : RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
5247 : DCHECK(instr->HasEnvironment());
5248 31204 : LEnvironment* env = instr->environment();
5249 31204 : safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5250 31204 : }
5251 :
5252 :
5253 286755 : void LCodeGen::DoStackCheck(LStackCheck* instr) {
5254 0 : class DeferredStackCheck final : public LDeferredCode {
5255 : public:
5256 : DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
5257 31204 : : LDeferredCode(codegen), instr_(instr) { }
5258 31204 : void Generate() override { codegen()->DoDeferredStackCheck(instr_); }
5259 62408 : LInstruction* instr() override { return instr_; }
5260 :
5261 : private:
5262 : LStackCheck* instr_;
5263 : };
5264 :
5265 : DCHECK(instr->HasEnvironment());
5266 286755 : LEnvironment* env = instr->environment();
5267 : // There is no LLazyBailout instruction for stack-checks. We have to
5268 : // prepare for lazy deoptimization explicitly here.
5269 286755 : if (instr->hydrogen()->is_function_entry()) {
5270 : // Perform stack overflow check.
5271 : Label done;
5272 635919 : __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
5273 255552 : __ j(above_equal, &done, Label::kNear);
5274 :
5275 : DCHECK(instr->context()->IsRegister());
5276 : DCHECK(ToRegister(instr->context()).is(rsi));
5277 : CallCode(isolate()->builtins()->StackCheck(),
5278 : RelocInfo::CODE_TARGET,
5279 255550 : instr);
5280 255552 : __ bind(&done);
5281 : } else {
5282 : DCHECK(instr->hydrogen()->is_backwards_branch());
5283 : // Perform stack overflow check if this goto needs it before jumping.
5284 : DeferredStackCheck* deferred_stack_check =
5285 : new(zone()) DeferredStackCheck(this, instr);
5286 31204 : __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
5287 62408 : __ j(below, deferred_stack_check->entry());
5288 31204 : EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
5289 62408 : __ bind(instr->done_label());
5290 : deferred_stack_check->SetExit(instr->done_label());
5291 31204 : RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5292 : // Don't record a deoptimization index for the safepoint here.
5293 : // This will be done explicitly when emitting call and the safepoint in
5294 : // the deferred code.
5295 : }
5296 286756 : }
5297 :
5298 :
5299 2369 : void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
5300 : // This is a pseudo-instruction that ensures that the environment here is
5301 : // properly registered for deoptimization and records the assembler's PC
5302 : // offset.
5303 2369 : LEnvironment* environment = instr->environment();
5304 :
5305 : // If the environment were already registered, we would have no way of
5306 : // backpatching it with the spill slot operands.
5307 : DCHECK(!environment->HasBeenRegistered());
5308 2369 : RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
5309 :
5310 2369 : GenerateOsrPrologue();
5311 2369 : }
5312 :
5313 :
5314 752 : void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5315 : DCHECK(ToRegister(instr->context()).is(rsi));
5316 :
5317 : Label use_cache, call_runtime;
5318 4512 : __ CheckEnumCache(&call_runtime);
5319 :
5320 752 : __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
5321 752 : __ jmp(&use_cache, Label::kNear);
5322 :
5323 : // Get the set of properties to enumerate.
5324 752 : __ bind(&call_runtime);
5325 752 : __ Push(rax);
5326 752 : CallRuntime(Runtime::kForInEnumerate, instr);
5327 752 : __ bind(&use_cache);
5328 752 : }
5329 :
5330 :
5331 1728 : void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5332 1728 : Register map = ToRegister(instr->map());
5333 1728 : Register result = ToRegister(instr->result());
5334 : Label load_cache, done;
5335 17280 : __ EnumLength(result, map);
5336 1728 : __ Cmp(result, Smi::kZero);
5337 1728 : __ j(not_equal, &load_cache, Label::kNear);
5338 1728 : __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex);
5339 1728 : __ jmp(&done, Label::kNear);
5340 1728 : __ bind(&load_cache);
5341 1728 : __ LoadInstanceDescriptors(map, result);
5342 : __ movp(result,
5343 1728 : FieldOperand(result, DescriptorArray::kEnumCacheOffset));
5344 : __ movp(result,
5345 1728 : FieldOperand(result, FixedArray::SizeFor(instr->idx())));
5346 1728 : __ bind(&done);
5347 1728 : Condition cc = masm()->CheckSmi(result);
5348 : DeoptimizeIf(cc, instr, DeoptimizeReason::kNoCache);
5349 1728 : }
5350 :
5351 :
5352 1206 : void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5353 : Register object = ToRegister(instr->value());
5354 1206 : __ cmpp(ToRegister(instr->map()),
5355 1206 : FieldOperand(object, HeapObject::kMapOffset));
5356 : DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongMap);
5357 1206 : }
5358 :
5359 :
5360 588 : void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
5361 : Register object,
5362 : Register index) {
5363 : PushSafepointRegistersScope scope(this);
5364 2352 : __ Push(object);
5365 588 : __ Push(index);
5366 588 : __ xorp(rsi, rsi);
5367 588 : __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
5368 : RecordSafepointWithRegisters(
5369 : instr->pointer_map(), 2, Safepoint::kNoLazyDeopt);
5370 588 : __ StoreToSafepointRegisterSlot(object, rax);
5371 588 : }
5372 :
5373 :
5374 588 : void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5375 0 : class DeferredLoadMutableDouble final : public LDeferredCode {
5376 : public:
5377 : DeferredLoadMutableDouble(LCodeGen* codegen,
5378 : LLoadFieldByIndex* instr,
5379 : Register object,
5380 : Register index)
5381 : : LDeferredCode(codegen),
5382 : instr_(instr),
5383 : object_(object),
5384 588 : index_(index) {
5385 : }
5386 588 : void Generate() override {
5387 588 : codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_);
5388 588 : }
5389 1176 : LInstruction* instr() override { return instr_; }
5390 :
5391 : private:
5392 : LLoadFieldByIndex* instr_;
5393 : Register object_;
5394 : Register index_;
5395 : };
5396 :
5397 : Register object = ToRegister(instr->object());
5398 : Register index = ToRegister(instr->index());
5399 :
5400 : DeferredLoadMutableDouble* deferred;
5401 9408 : deferred = new(zone()) DeferredLoadMutableDouble(this, instr, object, index);
5402 :
5403 : Label out_of_object, done;
5404 : __ Move(kScratchRegister, Smi::FromInt(1));
5405 588 : __ testp(index, kScratchRegister);
5406 1176 : __ j(not_zero, deferred->entry());
5407 :
5408 588 : __ sarp(index, Immediate(1));
5409 :
5410 588 : __ SmiToInteger32(index, index);
5411 588 : __ cmpl(index, Immediate(0));
5412 588 : __ j(less, &out_of_object, Label::kNear);
5413 : __ movp(object, FieldOperand(object,
5414 : index,
5415 : times_pointer_size,
5416 588 : JSObject::kHeaderSize));
5417 588 : __ jmp(&done, Label::kNear);
5418 :
5419 588 : __ bind(&out_of_object);
5420 588 : __ movp(object, FieldOperand(object, JSObject::kPropertiesOffset));
5421 588 : __ negl(index);
5422 : // Index is now equal to out of object property index plus 1.
5423 : __ movp(object, FieldOperand(object,
5424 : index,
5425 : times_pointer_size,
5426 588 : FixedArray::kHeaderSize - kPointerSize));
5427 588 : __ bind(deferred->exit());
5428 588 : __ bind(&done);
5429 588 : }
5430 :
5431 : #undef __
5432 :
5433 : } // namespace internal
5434 : } // namespace v8
5435 :
5436 : #endif // V8_TARGET_ARCH_X64
|