Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #if V8_TARGET_ARCH_X64
6 :
7 : #include "src/base/bits.h"
8 : #include "src/base/division-by-constant.h"
9 : #include "src/base/utils/random-number-generator.h"
10 : #include "src/bootstrapper.h"
11 : #include "src/callable.h"
12 : #include "src/code-factory.h"
13 : #include "src/counters.h"
14 : #include "src/debug/debug.h"
15 : #include "src/external-reference-table.h"
16 : #include "src/frames-inl.h"
17 : #include "src/globals.h"
18 : #include "src/heap/heap-inl.h" // For MemoryChunk.
19 : #include "src/macro-assembler.h"
20 : #include "src/objects-inl.h"
21 : #include "src/objects/smi.h"
22 : #include "src/register-configuration.h"
23 : #include "src/snapshot/embedded-data.h"
24 : #include "src/snapshot/snapshot.h"
25 : #include "src/string-constants.h"
26 : #include "src/x64/assembler-x64.h"
27 :
28 : // Satisfy cpplint check, but don't include platform-specific header. It is
29 : // included recursively via macro-assembler.h.
30 : #if 0
31 : #include "src/x64/macro-assembler-x64.h"
32 : #endif
33 :
34 : namespace v8 {
35 : namespace internal {
36 :
37 1792 : Operand StackArgumentsAccessor::GetArgumentOperand(int index) {
38 : DCHECK_GE(index, 0);
39 1792 : int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0;
40 : int displacement_to_last_argument =
41 1792 : base_reg_ == rsp ? kPCOnStackSize : kFPOnStackSize + kPCOnStackSize;
42 1792 : displacement_to_last_argument += extra_displacement_to_last_argument_;
43 1792 : if (argument_count_reg_ == no_reg) {
44 : // argument[0] is at base_reg_ + displacement_to_last_argument +
45 : // (argument_count_immediate_ + receiver - 1) * kSystemPointerSize.
46 : DCHECK_GT(argument_count_immediate_ + receiver, 0);
47 : return Operand(base_reg_,
48 : displacement_to_last_argument +
49 0 : (argument_count_immediate_ + receiver - 1 - index) *
50 0 : kSystemPointerSize);
51 : } else {
52 : // argument[0] is at base_reg_ + displacement_to_last_argument +
53 : // argument_count_reg_ * times_system_pointer_size + (receiver - 1) *
54 : // kSystemPointerSize.
55 : return Operand(base_reg_, argument_count_reg_, times_system_pointer_size,
56 : displacement_to_last_argument +
57 1792 : (receiver - 1 - index) * kSystemPointerSize);
58 : }
59 : }
60 :
61 0 : StackArgumentsAccessor::StackArgumentsAccessor(
62 0 : Register base_reg, const ParameterCount& parameter_count,
63 : StackArgumentsAccessorReceiverMode receiver_mode,
64 : int extra_displacement_to_last_argument)
65 : : base_reg_(base_reg),
66 : argument_count_reg_(parameter_count.is_reg() ? parameter_count.reg()
67 : : no_reg),
68 : argument_count_immediate_(
69 : parameter_count.is_immediate() ? parameter_count.immediate() : 0),
70 : receiver_mode_(receiver_mode),
71 : extra_displacement_to_last_argument_(
72 672 : extra_displacement_to_last_argument) {}
73 :
74 392 : void MacroAssembler::Load(Register destination, ExternalReference source) {
75 392 : if (root_array_available_ && options().enable_root_array_delta_access) {
76 0 : intptr_t delta = RootRegisterOffsetForExternalReference(isolate(), source);
77 0 : if (is_int32(delta)) {
78 0 : movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
79 392 : return;
80 : }
81 : }
82 : // Safe code.
83 392 : if (destination == rax && !options().isolate_independent_code) {
84 0 : load_rax(source);
85 : } else {
86 392 : movq(destination, ExternalReferenceAsOperand(source));
87 : }
88 : }
89 :
90 :
91 46281 : void MacroAssembler::Store(ExternalReference destination, Register source) {
92 46281 : if (root_array_available_ && options().enable_root_array_delta_access) {
93 : intptr_t delta =
94 0 : RootRegisterOffsetForExternalReference(isolate(), destination);
95 0 : if (is_int32(delta)) {
96 0 : movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
97 46281 : return;
98 : }
99 : }
100 : // Safe code.
101 46281 : if (source == rax && !options().isolate_independent_code) {
102 0 : store_rax(destination);
103 : } else {
104 46281 : movq(ExternalReferenceAsOperand(destination), source);
105 : }
106 : }
107 :
108 39088 : void TurboAssembler::LoadFromConstantsTable(Register destination,
109 : int constant_index) {
110 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable));
111 39088 : LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
112 : LoadTaggedPointerField(
113 : destination,
114 39088 : FieldOperand(destination, FixedArray::OffsetOfElementAt(constant_index)));
115 39088 : }
116 :
117 18816 : void TurboAssembler::LoadRootRegisterOffset(Register destination,
118 : intptr_t offset) {
119 : DCHECK(is_int32(offset));
120 18816 : if (offset == 0) {
121 896 : Move(destination, kRootRegister);
122 : } else {
123 35840 : leaq(destination, Operand(kRootRegister, static_cast<int32_t>(offset)));
124 : }
125 18816 : }
126 :
127 799516 : void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) {
128 1599032 : movq(destination, Operand(kRootRegister, offset));
129 799516 : }
130 :
131 949448 : void TurboAssembler::LoadAddress(Register destination,
132 : ExternalReference source) {
133 949448 : if (root_array_available_ && options().enable_root_array_delta_access) {
134 1148 : intptr_t delta = RootRegisterOffsetForExternalReference(isolate(), source);
135 1148 : if (is_int32(delta)) {
136 2296 : leaq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
137 1148 : return;
138 : }
139 : }
140 : // Safe code.
141 : if (FLAG_embedded_builtins) {
142 948300 : if (root_array_available_ && options().isolate_independent_code) {
143 45808 : IndirectLoadExternalReference(destination, source);
144 45808 : return;
145 : }
146 : }
147 902492 : Move(destination, source);
148 : }
149 :
150 1142579 : Operand TurboAssembler::ExternalReferenceAsOperand(ExternalReference reference,
151 : Register scratch) {
152 1142579 : if (root_array_available_ && options().enable_root_array_delta_access) {
153 : int64_t delta =
154 189220 : RootRegisterOffsetForExternalReference(isolate(), reference);
155 3524 : if (is_int32(delta)) {
156 3524 : return Operand(kRootRegister, static_cast<int32_t>(delta));
157 : }
158 : }
159 1139055 : if (root_array_available_ && options().isolate_independent_code) {
160 92848 : if (IsAddressableThroughRootRegister(isolate(), reference)) {
161 : // Some external references can be efficiently loaded as an offset from
162 : // kRootRegister.
163 : intptr_t offset =
164 92232 : RootRegisterOffsetForExternalReference(isolate(), reference);
165 92232 : CHECK(is_int32(offset));
166 92232 : return Operand(kRootRegister, static_cast<int32_t>(offset));
167 : } else {
168 : // Otherwise, do a memory load from the external reference table.
169 : movq(scratch, Operand(kRootRegister,
170 : RootRegisterOffsetForExternalReferenceTableEntry(
171 1232 : isolate(), reference)));
172 616 : return Operand(scratch, 0);
173 : }
174 : }
175 1046207 : Move(scratch, reference);
176 1046208 : return Operand(scratch, 0);
177 : }
178 :
179 56 : void MacroAssembler::PushAddress(ExternalReference source) {
180 56 : LoadAddress(kScratchRegister, source);
181 : Push(kScratchRegister);
182 56 : }
183 :
184 1894076 : void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
185 : DCHECK(root_array_available_);
186 : movq(destination,
187 3788164 : Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
188 1894088 : }
189 :
190 672 : void MacroAssembler::PushRoot(RootIndex index) {
191 : DCHECK(root_array_available_);
192 1344 : Push(Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
193 672 : }
194 :
195 557730 : void TurboAssembler::CompareRoot(Register with, RootIndex index) {
196 : DCHECK(root_array_available_);
197 557730 : if (IsInRange(index, RootIndex::kFirstStrongOrReadOnlyRoot,
198 : RootIndex::kLastStrongOrReadOnlyRoot)) {
199 : cmp_tagged(with,
200 1064 : Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
201 : } else {
202 : // Some smi roots contain system pointer size values like stack limits.
203 556666 : cmpq(with, Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
204 : }
205 557731 : }
206 :
207 0 : void TurboAssembler::CompareRoot(Operand with, RootIndex index) {
208 : DCHECK(root_array_available_);
209 : DCHECK(!with.AddressUsesRegister(kScratchRegister));
210 0 : LoadRoot(kScratchRegister, index);
211 0 : if (IsInRange(index, RootIndex::kFirstStrongOrReadOnlyRoot,
212 : RootIndex::kLastStrongOrReadOnlyRoot)) {
213 0 : cmp_tagged(with, kScratchRegister);
214 : } else {
215 : // Some smi roots contain system pointer size values like stack limits.
216 0 : cmpq(with, kScratchRegister);
217 : }
218 0 : }
219 :
220 768176 : void TurboAssembler::LoadTaggedPointerField(Register destination,
221 : Operand field_operand) {
222 : #ifdef V8_COMPRESS_POINTERS
223 : DecompressTaggedPointer(destination, field_operand);
224 : #else
225 768176 : mov_tagged(destination, field_operand);
226 : #endif
227 768177 : }
228 :
229 336 : void TurboAssembler::LoadAnyTaggedField(Register destination,
230 : Operand field_operand,
231 : Register scratch) {
232 : #ifdef V8_COMPRESS_POINTERS
233 : DecompressAnyTagged(destination, field_operand, scratch);
234 : #else
235 336 : mov_tagged(destination, field_operand);
236 : #endif
237 336 : }
238 :
239 112 : void TurboAssembler::PushTaggedPointerField(Operand field_operand,
240 : Register scratch) {
241 : #ifdef V8_COMPRESS_POINTERS
242 : DCHECK(!field_operand.AddressUsesRegister(scratch));
243 : DecompressTaggedPointer(scratch, field_operand);
244 : Push(scratch);
245 : #else
246 : Push(field_operand);
247 : #endif
248 112 : }
249 :
250 112 : void TurboAssembler::PushTaggedAnyField(Operand field_operand,
251 : Register scratch1, Register scratch2) {
252 : #ifdef V8_COMPRESS_POINTERS
253 : DCHECK(!AreAliased(scratch1, scratch2));
254 : DCHECK(!field_operand.AddressUsesRegister(scratch1));
255 : DCHECK(!field_operand.AddressUsesRegister(scratch2));
256 : DecompressAnyTagged(scratch1, field_operand, scratch2);
257 : Push(scratch1);
258 : #else
259 : Push(field_operand);
260 : #endif
261 112 : }
262 :
263 280 : void TurboAssembler::SmiUntagField(Register dst, Operand src) {
264 280 : SmiUntag(dst, src);
265 280 : }
266 :
267 0 : void TurboAssembler::StoreTaggedField(Operand dst_field_operand,
268 : Immediate value) {
269 : #ifdef V8_COMPRESS_POINTERS
270 : RecordComment("[ StoreTagged");
271 : movl(dst_field_operand, value);
272 : movl(Operand(dst_field_operand, 4), Immediate(0));
273 : RecordComment("]");
274 : #else
275 0 : movq(dst_field_operand, value);
276 : #endif
277 0 : }
278 :
279 325745 : void TurboAssembler::StoreTaggedField(Operand dst_field_operand,
280 : Register value) {
281 : #ifdef V8_COMPRESS_POINTERS
282 : RecordComment("[ StoreTagged");
283 : movl(dst_field_operand, value);
284 : movl(Operand(dst_field_operand, 4), Immediate(0));
285 : RecordComment("]");
286 : #else
287 325745 : movq(dst_field_operand, value);
288 : #endif
289 325745 : }
290 :
291 0 : void TurboAssembler::DecompressTaggedSigned(Register destination,
292 : Operand field_operand) {
293 0 : RecordComment("[ DecompressTaggedSigned");
294 0 : movsxlq(destination, field_operand);
295 0 : RecordComment("]");
296 0 : }
297 :
298 0 : void TurboAssembler::DecompressTaggedPointer(Register destination,
299 : Operand field_operand) {
300 0 : RecordComment("[ DecompressTaggedPointer");
301 0 : movsxlq(destination, field_operand);
302 0 : addq(destination, kRootRegister);
303 0 : RecordComment("]");
304 0 : }
305 :
306 0 : void TurboAssembler::DecompressAnyTagged(Register destination,
307 : Operand field_operand,
308 : Register scratch) {
309 : DCHECK(!AreAliased(destination, scratch));
310 0 : RecordComment("[ DecompressAnyTagged");
311 0 : movsxlq(destination, field_operand);
312 : // Branchlessly compute |masked_root|:
313 : // masked_root = HAS_SMI_TAG(destination) ? 0 : kRootRegister;
314 : STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag < 32));
315 0 : Register masked_root = scratch;
316 : movl(masked_root, destination);
317 0 : andl(masked_root, Immediate(kSmiTagMask));
318 : negq(masked_root);
319 0 : andq(masked_root, kRootRegister);
320 : // Now this add operation will either leave the value unchanged if it is a smi
321 : // or add the isolate root if it is a heap object.
322 0 : addq(destination, masked_root);
323 0 : RecordComment("]");
324 0 : }
325 :
326 112 : void MacroAssembler::RecordWriteField(Register object, int offset,
327 : Register value, Register dst,
328 : SaveFPRegsMode save_fp,
329 : RememberedSetAction remembered_set_action,
330 : SmiCheck smi_check) {
331 : // First, check if a write barrier is even needed. The tests below
332 : // catch stores of Smis.
333 112 : Label done;
334 :
335 : // Skip barrier if writing a smi.
336 112 : if (smi_check == INLINE_SMI_CHECK) {
337 56 : JumpIfSmi(value, &done);
338 : }
339 :
340 : // Although the object register is tagged, the offset is relative to the start
341 : // of the object, so the offset must be a multiple of kTaggedSize.
342 : DCHECK(IsAligned(offset, kTaggedSize));
343 :
344 112 : leaq(dst, FieldOperand(object, offset));
345 224 : if (emit_debug_code()) {
346 0 : Label ok;
347 0 : testb(dst, Immediate(kTaggedSize - 1));
348 0 : j(zero, &ok, Label::kNear);
349 0 : int3();
350 0 : bind(&ok);
351 : }
352 :
353 : RecordWrite(object, dst, value, save_fp, remembered_set_action,
354 112 : OMIT_SMI_CHECK);
355 :
356 112 : bind(&done);
357 :
358 : // Clobber clobbered input registers when running with the debug-code flag
359 : // turned on to provoke errors.
360 112 : if (emit_debug_code()) {
361 : Move(value, kZapValue, RelocInfo::NONE);
362 : Move(dst, kZapValue, RelocInfo::NONE);
363 : }
364 112 : }
365 :
366 325745 : void TurboAssembler::SaveRegisters(RegList registers) {
367 : DCHECK_GT(NumRegs(registers), 0);
368 5537665 : for (int i = 0; i < Register::kNumRegisters; ++i) {
369 5211920 : if ((registers >> i) & 1u) {
370 1628725 : pushq(Register::from_code(i));
371 : }
372 : }
373 325745 : }
374 :
375 325745 : void TurboAssembler::RestoreRegisters(RegList registers) {
376 : DCHECK_GT(NumRegs(registers), 0);
377 5537665 : for (int i = Register::kNumRegisters - 1; i >= 0; --i) {
378 5211920 : if ((registers >> i) & 1u) {
379 1628725 : popq(Register::from_code(i));
380 : }
381 : }
382 325745 : }
383 :
384 325593 : void TurboAssembler::CallRecordWriteStub(
385 : Register object, Register address,
386 : RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
387 : CallRecordWriteStub(
388 : object, address, remembered_set_action, fp_mode,
389 : isolate()->builtins()->builtin_handle(Builtins::kRecordWrite),
390 325593 : kNullAddress);
391 325593 : }
392 :
393 152 : void TurboAssembler::CallRecordWriteStub(
394 : Register object, Register address,
395 : RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
396 : Address wasm_target) {
397 : CallRecordWriteStub(object, address, remembered_set_action, fp_mode,
398 152 : Handle<Code>::null(), wasm_target);
399 152 : }
400 :
401 325745 : void TurboAssembler::CallRecordWriteStub(
402 : Register object, Register address,
403 : RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
404 : Handle<Code> code_target, Address wasm_target) {
405 : DCHECK_NE(code_target.is_null(), wasm_target == kNullAddress);
406 :
407 : RecordWriteDescriptor descriptor;
408 : RegList registers = descriptor.allocatable_registers();
409 :
410 325745 : SaveRegisters(registers);
411 :
412 : Register object_parameter(
413 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kObject));
414 : Register slot_parameter(
415 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kSlot));
416 : Register remembered_set_parameter(
417 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kRememberedSet));
418 : Register fp_mode_parameter(
419 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kFPMode));
420 :
421 : // Prepare argument registers for calling RecordWrite
422 : // slot_parameter <= address
423 : // object_parameter <= object
424 325745 : if (slot_parameter != object) {
425 : // Normal case
426 184119 : Move(slot_parameter, address);
427 184119 : Move(object_parameter, object);
428 141626 : } else if (object_parameter != address) {
429 : // Only slot_parameter and object are the same register
430 : // object_parameter <= object
431 : // slot_parameter <= address
432 141575 : Move(object_parameter, object);
433 141575 : Move(slot_parameter, address);
434 : } else {
435 : // slot_parameter \/ address
436 : // object_parameter /\ object
437 51 : xchgq(slot_parameter, object_parameter);
438 : }
439 :
440 : Smi smi_rsa = Smi::FromEnum(remembered_set_action);
441 : Smi smi_fm = Smi::FromEnum(fp_mode);
442 325745 : Move(remembered_set_parameter, smi_rsa);
443 325745 : if (smi_rsa != smi_fm) {
444 281933 : Move(fp_mode_parameter, smi_fm);
445 : } else {
446 43812 : movq(fp_mode_parameter, remembered_set_parameter);
447 : }
448 325745 : if (code_target.is_null()) {
449 : // Use {near_call} for direct Wasm call within a module.
450 152 : near_call(wasm_target, RelocInfo::WASM_STUB_CALL);
451 : } else {
452 325593 : Call(code_target, RelocInfo::CODE_TARGET);
453 : }
454 :
455 325745 : RestoreRegisters(registers);
456 325745 : }
457 :
458 112 : void MacroAssembler::RecordWrite(Register object, Register address,
459 : Register value, SaveFPRegsMode fp_mode,
460 : RememberedSetAction remembered_set_action,
461 : SmiCheck smi_check) {
462 : DCHECK(object != value);
463 : DCHECK(object != address);
464 : DCHECK(value != address);
465 112 : AssertNotSmi(object);
466 :
467 168 : if (remembered_set_action == OMIT_REMEMBERED_SET &&
468 56 : !FLAG_incremental_marking) {
469 0 : return;
470 : }
471 :
472 224 : if (emit_debug_code()) {
473 0 : Label ok;
474 0 : cmp_tagged(value, Operand(address, 0));
475 0 : j(equal, &ok, Label::kNear);
476 0 : int3();
477 0 : bind(&ok);
478 : }
479 :
480 : // First, check if a write barrier is even needed. The tests below
481 : // catch stores of smis and stores into the young generation.
482 112 : Label done;
483 :
484 112 : if (smi_check == INLINE_SMI_CHECK) {
485 : // Skip barrier if writing a smi.
486 0 : JumpIfSmi(value, &done);
487 : }
488 :
489 : CheckPageFlag(value,
490 : value, // Used as scratch.
491 : MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
492 112 : Label::kNear);
493 :
494 : CheckPageFlag(object,
495 : value, // Used as scratch.
496 : MemoryChunk::kPointersFromHereAreInterestingMask,
497 : zero,
498 : &done,
499 112 : Label::kNear);
500 :
501 112 : CallRecordWriteStub(object, address, remembered_set_action, fp_mode);
502 :
503 112 : bind(&done);
504 :
505 : // Count number of write barriers in generated code.
506 224 : isolate()->counters()->write_barriers_static()->Increment();
507 112 : IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
508 :
509 : // Clobber clobbered registers when running with the debug-code flag
510 : // turned on to provoke errors.
511 112 : if (emit_debug_code()) {
512 : Move(address, kZapValue, RelocInfo::NONE);
513 : Move(value, kZapValue, RelocInfo::NONE);
514 : }
515 : }
516 :
517 118 : void TurboAssembler::Assert(Condition cc, AbortReason reason) {
518 118 : if (emit_debug_code()) Check(cc, reason);
519 118 : }
520 :
521 364695 : void TurboAssembler::AssertUnreachable(AbortReason reason) {
522 364695 : if (emit_debug_code()) Abort(reason);
523 364695 : }
524 :
525 185 : void TurboAssembler::Check(Condition cc, AbortReason reason) {
526 185 : Label L;
527 185 : j(cc, &L, Label::kNear);
528 185 : Abort(reason);
529 : // Control will not return here.
530 185 : bind(&L);
531 185 : }
532 :
533 0 : void TurboAssembler::CheckStackAlignment() {
534 0 : int frame_alignment = base::OS::ActivationFrameAlignment();
535 0 : int frame_alignment_mask = frame_alignment - 1;
536 0 : if (frame_alignment > kSystemPointerSize) {
537 : DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
538 0 : Label alignment_as_expected;
539 0 : testq(rsp, Immediate(frame_alignment_mask));
540 0 : j(zero, &alignment_as_expected, Label::kNear);
541 : // Abort if stack is not aligned.
542 0 : int3();
543 0 : bind(&alignment_as_expected);
544 : }
545 0 : }
546 :
547 5233 : void TurboAssembler::Abort(AbortReason reason) {
548 : #ifdef DEBUG
549 : const char* msg = GetAbortReason(reason);
550 : RecordComment("Abort message: ");
551 : RecordComment(msg);
552 : #endif
553 :
554 : // Avoid emitting call to builtin if requested.
555 20912 : if (trap_on_abort()) {
556 0 : int3();
557 0 : return;
558 : }
559 :
560 5233 : if (should_abort_hard()) {
561 : // We don't care if we constructed a frame. Just pretend we did.
562 10 : FrameScope assume_frame(this, StackFrame::NONE);
563 10 : movl(arg_reg_1, Immediate(static_cast<int>(reason)));
564 10 : PrepareCallCFunction(1);
565 10 : LoadAddress(rax, ExternalReference::abort_with_reason());
566 10 : call(rax);
567 10 : return;
568 : }
569 :
570 5223 : Move(rdx, Smi::FromInt(static_cast<int>(reason)));
571 :
572 5223 : if (!has_frame()) {
573 : // We don't actually want to generate a pile of code for this, so just
574 : // claim there is a stack frame, without generating one.
575 56 : FrameScope scope(this, StackFrame::NONE);
576 56 : Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
577 : } else {
578 5167 : Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
579 : }
580 : // Control will not return here.
581 5223 : int3();
582 : }
583 :
584 104 : void TurboAssembler::CallRuntimeWithCEntry(Runtime::FunctionId fid,
585 : Register centry) {
586 104 : const Runtime::Function* f = Runtime::FunctionForId(fid);
587 : // TODO(1236192): Most runtime routines don't need the number of
588 : // arguments passed in because it is constant. At some point we
589 : // should remove this need and make the runtime routine entry code
590 : // smarter.
591 104 : Set(rax, f->nargs);
592 104 : LoadAddress(rbx, ExternalReference::Create(f));
593 : DCHECK(!AreAliased(centry, rax, rbx));
594 : DCHECK(centry == rcx);
595 104 : CallCodeObject(centry);
596 103 : }
597 :
598 1960 : void MacroAssembler::CallRuntime(const Runtime::Function* f,
599 : int num_arguments,
600 : SaveFPRegsMode save_doubles) {
601 : // If the expected number of arguments of the runtime function is
602 : // constant, we check that the actual number of arguments match the
603 : // expectation.
604 1960 : CHECK(f->nargs < 0 || f->nargs == num_arguments);
605 :
606 : // TODO(1236192): Most runtime routines don't need the number of
607 : // arguments passed in because it is constant. At some point we
608 : // should remove this need and make the runtime routine entry code
609 : // smarter.
610 1960 : Set(rax, num_arguments);
611 1960 : LoadAddress(rbx, ExternalReference::Create(f));
612 : Handle<Code> code =
613 1960 : CodeFactory::CEntry(isolate(), f->result_size, save_doubles);
614 1960 : Call(code, RelocInfo::CODE_TARGET);
615 1960 : }
616 :
617 840 : void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
618 : // ----------- S t a t e -------------
619 : // -- rsp[0] : return address
620 : // -- rsp[8] : argument num_arguments - 1
621 : // ...
622 : // -- rsp[8 * num_arguments] : argument 0 (receiver)
623 : //
624 : // For runtime functions with variable arguments:
625 : // -- rax : number of arguments
626 : // -----------------------------------
627 :
628 840 : const Runtime::Function* function = Runtime::FunctionForId(fid);
629 : DCHECK_EQ(1, function->result_size);
630 840 : if (function->nargs >= 0) {
631 840 : Set(rax, function->nargs);
632 : }
633 840 : JumpToExternalReference(ExternalReference::Create(fid));
634 840 : }
635 :
636 840 : void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
637 : bool builtin_exit_frame) {
638 : // Set the entry point and jump to the C entry runtime stub.
639 840 : LoadAddress(rbx, ext);
640 : Handle<Code> code = CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs,
641 840 : kArgvOnStack, builtin_exit_frame);
642 840 : Jump(code, RelocInfo::CODE_TARGET);
643 840 : }
644 :
645 : static constexpr Register saved_regs[] = {rax, rcx, rdx, rbx, rbp, rsi,
646 : rdi, r8, r9, r10, r11};
647 :
648 : static constexpr int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
649 :
650 676 : int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
651 : Register exclusion1,
652 : Register exclusion2,
653 : Register exclusion3) const {
654 : int bytes = 0;
655 8112 : for (int i = 0; i < kNumberOfSavedRegs; i++) {
656 7436 : Register reg = saved_regs[i];
657 7436 : if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
658 6760 : bytes += kSystemPointerSize;
659 : }
660 : }
661 :
662 : // R12 to r15 are callee save on all platforms.
663 676 : if (fp_mode == kSaveFPRegs) {
664 340 : bytes += kDoubleSize * XMMRegister::kNumRegisters;
665 : }
666 :
667 676 : return bytes;
668 : }
669 :
670 676 : int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
671 : Register exclusion2, Register exclusion3) {
672 : // We don't allow a GC during a store buffer overflow so there is no need to
673 : // store the registers in any particular way, but we do have to store and
674 : // restore them.
675 : int bytes = 0;
676 8112 : for (int i = 0; i < kNumberOfSavedRegs; i++) {
677 7436 : Register reg = saved_regs[i];
678 7436 : if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
679 6760 : pushq(reg);
680 6760 : bytes += kSystemPointerSize;
681 : }
682 : }
683 :
684 : // R12 to r15 are callee save on all platforms.
685 676 : if (fp_mode == kSaveFPRegs) {
686 : int delta = kDoubleSize * XMMRegister::kNumRegisters;
687 340 : subq(rsp, Immediate(delta));
688 5780 : for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
689 : XMMRegister reg = XMMRegister::from_code(i);
690 10880 : Movsd(Operand(rsp, i * kDoubleSize), reg);
691 : }
692 340 : bytes += delta;
693 : }
694 :
695 676 : return bytes;
696 : }
697 :
698 676 : int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
699 : Register exclusion2, Register exclusion3) {
700 : int bytes = 0;
701 676 : if (fp_mode == kSaveFPRegs) {
702 5440 : for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
703 : XMMRegister reg = XMMRegister::from_code(i);
704 10880 : Movsd(reg, Operand(rsp, i * kDoubleSize));
705 : }
706 : int delta = kDoubleSize * XMMRegister::kNumRegisters;
707 340 : addq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
708 : bytes += delta;
709 : }
710 :
711 7436 : for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
712 7436 : Register reg = saved_regs[i];
713 7436 : if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
714 6760 : popq(reg);
715 6760 : bytes += kSystemPointerSize;
716 : }
717 : }
718 :
719 676 : return bytes;
720 : }
721 :
722 10133 : void TurboAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
723 10133 : if (CpuFeatures::IsSupported(AVX)) {
724 : CpuFeatureScope scope(this, AVX);
725 10101 : vcvtss2sd(dst, src, src);
726 : } else {
727 32 : cvtss2sd(dst, src);
728 : }
729 10133 : }
730 :
731 11301 : void TurboAssembler::Cvtss2sd(XMMRegister dst, Operand src) {
732 11301 : if (CpuFeatures::IsSupported(AVX)) {
733 : CpuFeatureScope scope(this, AVX);
734 11301 : vcvtss2sd(dst, dst, src);
735 : } else {
736 0 : cvtss2sd(dst, src);
737 : }
738 11301 : }
739 :
740 6154 : void TurboAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
741 6154 : if (CpuFeatures::IsSupported(AVX)) {
742 : CpuFeatureScope scope(this, AVX);
743 6135 : vcvtsd2ss(dst, src, src);
744 : } else {
745 19 : cvtsd2ss(dst, src);
746 : }
747 6154 : }
748 :
749 11872 : void TurboAssembler::Cvtsd2ss(XMMRegister dst, Operand src) {
750 11872 : if (CpuFeatures::IsSupported(AVX)) {
751 : CpuFeatureScope scope(this, AVX);
752 11872 : vcvtsd2ss(dst, dst, src);
753 : } else {
754 0 : cvtsd2ss(dst, src);
755 : }
756 11872 : }
757 :
758 368833 : void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
759 368833 : if (CpuFeatures::IsSupported(AVX)) {
760 : CpuFeatureScope scope(this, AVX);
761 366849 : vxorpd(dst, dst, dst);
762 : vcvtlsi2sd(dst, dst, src);
763 : } else {
764 1984 : xorpd(dst, dst);
765 1984 : cvtlsi2sd(dst, src);
766 : }
767 368837 : }
768 :
769 3914 : void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Operand src) {
770 3914 : if (CpuFeatures::IsSupported(AVX)) {
771 : CpuFeatureScope scope(this, AVX);
772 3912 : vxorpd(dst, dst, dst);
773 : vcvtlsi2sd(dst, dst, src);
774 : } else {
775 2 : xorpd(dst, dst);
776 2 : cvtlsi2sd(dst, src);
777 : }
778 3915 : }
779 :
780 1136 : void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
781 1136 : if (CpuFeatures::IsSupported(AVX)) {
782 : CpuFeatureScope scope(this, AVX);
783 1122 : vxorps(dst, dst, dst);
784 : vcvtlsi2ss(dst, dst, src);
785 : } else {
786 14 : xorps(dst, dst);
787 14 : cvtlsi2ss(dst, src);
788 : }
789 1136 : }
790 :
791 8 : void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Operand src) {
792 8 : if (CpuFeatures::IsSupported(AVX)) {
793 : CpuFeatureScope scope(this, AVX);
794 8 : vxorps(dst, dst, dst);
795 : vcvtlsi2ss(dst, dst, src);
796 : } else {
797 0 : xorps(dst, dst);
798 0 : cvtlsi2ss(dst, src);
799 : }
800 8 : }
801 :
802 404 : void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
803 404 : if (CpuFeatures::IsSupported(AVX)) {
804 : CpuFeatureScope scope(this, AVX);
805 404 : vxorps(dst, dst, dst);
806 : vcvtqsi2ss(dst, dst, src);
807 : } else {
808 0 : xorps(dst, dst);
809 0 : cvtqsi2ss(dst, src);
810 : }
811 404 : }
812 :
813 0 : void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Operand src) {
814 0 : if (CpuFeatures::IsSupported(AVX)) {
815 : CpuFeatureScope scope(this, AVX);
816 0 : vxorps(dst, dst, dst);
817 : vcvtqsi2ss(dst, dst, src);
818 : } else {
819 0 : xorps(dst, dst);
820 0 : cvtqsi2ss(dst, src);
821 : }
822 0 : }
823 :
824 19885 : void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
825 19885 : if (CpuFeatures::IsSupported(AVX)) {
826 : CpuFeatureScope scope(this, AVX);
827 19581 : vxorpd(dst, dst, dst);
828 : vcvtqsi2sd(dst, dst, src);
829 : } else {
830 304 : xorpd(dst, dst);
831 304 : cvtqsi2sd(dst, src);
832 : }
833 19886 : }
834 :
835 2118 : void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Operand src) {
836 2118 : if (CpuFeatures::IsSupported(AVX)) {
837 : CpuFeatureScope scope(this, AVX);
838 2082 : vxorpd(dst, dst, dst);
839 : vcvtqsi2sd(dst, dst, src);
840 : } else {
841 36 : xorpd(dst, dst);
842 36 : cvtqsi2sd(dst, src);
843 : }
844 2118 : }
845 :
846 88 : void TurboAssembler::Cvtlui2ss(XMMRegister dst, Register src) {
847 : // Zero-extend the 32 bit value to 64 bit.
848 88 : movl(kScratchRegister, src);
849 88 : Cvtqsi2ss(dst, kScratchRegister);
850 88 : }
851 :
852 0 : void TurboAssembler::Cvtlui2ss(XMMRegister dst, Operand src) {
853 : // Zero-extend the 32 bit value to 64 bit.
854 0 : movl(kScratchRegister, src);
855 0 : Cvtqsi2ss(dst, kScratchRegister);
856 0 : }
857 :
858 402 : void TurboAssembler::Cvtlui2sd(XMMRegister dst, Register src) {
859 : // Zero-extend the 32 bit value to 64 bit.
860 402 : movl(kScratchRegister, src);
861 402 : Cvtqsi2sd(dst, kScratchRegister);
862 402 : }
863 :
864 10344 : void TurboAssembler::Cvtlui2sd(XMMRegister dst, Operand src) {
865 : // Zero-extend the 32 bit value to 64 bit.
866 10344 : movl(kScratchRegister, src);
867 10344 : Cvtqsi2sd(dst, kScratchRegister);
868 10344 : }
869 :
870 60 : void TurboAssembler::Cvtqui2ss(XMMRegister dst, Register src) {
871 60 : Label done;
872 60 : Cvtqsi2ss(dst, src);
873 60 : testq(src, src);
874 60 : j(positive, &done, Label::kNear);
875 :
876 : // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
877 60 : if (src != kScratchRegister) movq(kScratchRegister, src);
878 : shrq(kScratchRegister, Immediate(1));
879 : // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
880 60 : Label msb_not_set;
881 60 : j(not_carry, &msb_not_set, Label::kNear);
882 60 : orq(kScratchRegister, Immediate(1));
883 60 : bind(&msb_not_set);
884 60 : Cvtqsi2ss(dst, kScratchRegister);
885 60 : addss(dst, dst);
886 60 : bind(&done);
887 60 : }
888 :
889 0 : void TurboAssembler::Cvtqui2ss(XMMRegister dst, Operand src) {
890 0 : movq(kScratchRegister, src);
891 0 : Cvtqui2ss(dst, kScratchRegister);
892 0 : }
893 :
894 3628 : void TurboAssembler::Cvtqui2sd(XMMRegister dst, Register src) {
895 3628 : Label done;
896 3628 : Cvtqsi2sd(dst, src);
897 3628 : testq(src, src);
898 3628 : j(positive, &done, Label::kNear);
899 :
900 : // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
901 3628 : if (src != kScratchRegister) movq(kScratchRegister, src);
902 : shrq(kScratchRegister, Immediate(1));
903 : // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
904 3628 : Label msb_not_set;
905 3628 : j(not_carry, &msb_not_set, Label::kNear);
906 3628 : orq(kScratchRegister, Immediate(1));
907 3628 : bind(&msb_not_set);
908 3628 : Cvtqsi2sd(dst, kScratchRegister);
909 3628 : addsd(dst, dst);
910 3628 : bind(&done);
911 3628 : }
912 :
913 1120 : void TurboAssembler::Cvtqui2sd(XMMRegister dst, Operand src) {
914 1120 : movq(kScratchRegister, src);
915 1120 : Cvtqui2sd(dst, kScratchRegister);
916 1120 : }
917 :
918 460 : void TurboAssembler::Cvttss2si(Register dst, XMMRegister src) {
919 460 : if (CpuFeatures::IsSupported(AVX)) {
920 : CpuFeatureScope scope(this, AVX);
921 460 : vcvttss2si(dst, src);
922 : } else {
923 0 : cvttss2si(dst, src);
924 : }
925 460 : }
926 :
927 0 : void TurboAssembler::Cvttss2si(Register dst, Operand src) {
928 0 : if (CpuFeatures::IsSupported(AVX)) {
929 : CpuFeatureScope scope(this, AVX);
930 0 : vcvttss2si(dst, src);
931 : } else {
932 0 : cvttss2si(dst, src);
933 : }
934 0 : }
935 :
936 105327 : void TurboAssembler::Cvttsd2si(Register dst, XMMRegister src) {
937 105327 : if (CpuFeatures::IsSupported(AVX)) {
938 : CpuFeatureScope scope(this, AVX);
939 104831 : vcvttsd2si(dst, src);
940 : } else {
941 496 : cvttsd2si(dst, src);
942 : }
943 105327 : }
944 :
945 20313 : void TurboAssembler::Cvttsd2si(Register dst, Operand src) {
946 20313 : if (CpuFeatures::IsSupported(AVX)) {
947 : CpuFeatureScope scope(this, AVX);
948 20313 : vcvttsd2si(dst, src);
949 : } else {
950 0 : cvttsd2si(dst, src);
951 : }
952 20313 : }
953 :
954 364 : void TurboAssembler::Cvttss2siq(Register dst, XMMRegister src) {
955 364 : if (CpuFeatures::IsSupported(AVX)) {
956 : CpuFeatureScope scope(this, AVX);
957 364 : vcvttss2siq(dst, src);
958 : } else {
959 0 : cvttss2siq(dst, src);
960 : }
961 364 : }
962 :
963 0 : void TurboAssembler::Cvttss2siq(Register dst, Operand src) {
964 0 : if (CpuFeatures::IsSupported(AVX)) {
965 : CpuFeatureScope scope(this, AVX);
966 0 : vcvttss2siq(dst, src);
967 : } else {
968 0 : cvttss2siq(dst, src);
969 : }
970 0 : }
971 :
972 62149 : void TurboAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
973 62149 : if (CpuFeatures::IsSupported(AVX)) {
974 : CpuFeatureScope scope(this, AVX);
975 61710 : vcvttsd2siq(dst, src);
976 : } else {
977 439 : cvttsd2siq(dst, src);
978 : }
979 62153 : }
980 :
981 1 : void TurboAssembler::Cvttsd2siq(Register dst, Operand src) {
982 1 : if (CpuFeatures::IsSupported(AVX)) {
983 : CpuFeatureScope scope(this, AVX);
984 1 : vcvttsd2siq(dst, src);
985 : } else {
986 0 : cvttsd2siq(dst, src);
987 : }
988 1 : }
989 :
990 : namespace {
991 : template <typename OperandOrXMMRegister, bool is_double>
992 3888 : void ConvertFloatToUint64(TurboAssembler* tasm, Register dst,
993 : OperandOrXMMRegister src, Label* fail) {
994 3888 : Label success;
995 : // There does not exist a native float-to-uint instruction, so we have to use
996 : // a float-to-int, and postprocess the result.
997 : if (is_double) {
998 3800 : tasm->Cvttsd2siq(dst, src);
999 : } else {
1000 88 : tasm->Cvttss2siq(dst, src);
1001 : }
1002 : // If the result of the conversion is positive, we are already done.
1003 3888 : tasm->testq(dst, dst);
1004 3888 : tasm->j(positive, &success);
1005 : // The result of the first conversion was negative, which means that the
1006 : // input value was not within the positive int64 range. We subtract 2^63
1007 : // and convert it again to see if it is within the uint64 range.
1008 : if (is_double) {
1009 : tasm->Move(kScratchDoubleReg, -9223372036854775808.0);
1010 3800 : tasm->addsd(kScratchDoubleReg, src);
1011 3800 : tasm->Cvttsd2siq(dst, kScratchDoubleReg);
1012 : } else {
1013 : tasm->Move(kScratchDoubleReg, -9223372036854775808.0f);
1014 88 : tasm->addss(kScratchDoubleReg, src);
1015 88 : tasm->Cvttss2siq(dst, kScratchDoubleReg);
1016 : }
1017 : tasm->testq(dst, dst);
1018 : // The only possible negative value here is 0x80000000000000000, which is
1019 : // used on x64 to indicate an integer overflow.
1020 3888 : tasm->j(negative, fail ? fail : &success);
1021 : // The input value is within uint64 range and the second conversion worked
1022 : // successfully, but we still have to undo the subtraction we did
1023 : // earlier.
1024 3888 : tasm->Set(kScratchRegister, 0x8000000000000000);
1025 3888 : tasm->orq(dst, kScratchRegister);
1026 3888 : tasm->bind(&success);
1027 3888 : }
1028 : } // namespace
1029 :
1030 0 : void TurboAssembler::Cvttsd2uiq(Register dst, Operand src, Label* success) {
1031 0 : ConvertFloatToUint64<Operand, true>(this, dst, src, success);
1032 0 : }
1033 :
1034 3800 : void TurboAssembler::Cvttsd2uiq(Register dst, XMMRegister src, Label* success) {
1035 3800 : ConvertFloatToUint64<XMMRegister, true>(this, dst, src, success);
1036 3800 : }
1037 :
1038 0 : void TurboAssembler::Cvttss2uiq(Register dst, Operand src, Label* success) {
1039 0 : ConvertFloatToUint64<Operand, false>(this, dst, src, success);
1040 0 : }
1041 :
1042 88 : void TurboAssembler::Cvttss2uiq(Register dst, XMMRegister src, Label* success) {
1043 88 : ConvertFloatToUint64<XMMRegister, false>(this, dst, src, success);
1044 88 : }
1045 :
1046 45 : void MacroAssembler::Load(Register dst, Operand src, Representation r) {
1047 : DCHECK(!r.IsDouble());
1048 45 : if (r.IsInteger8()) {
1049 5 : movsxbq(dst, src);
1050 40 : } else if (r.IsUInteger8()) {
1051 5 : movzxbl(dst, src);
1052 35 : } else if (r.IsInteger16()) {
1053 5 : movsxwq(dst, src);
1054 30 : } else if (r.IsUInteger16()) {
1055 5 : movzxwl(dst, src);
1056 25 : } else if (r.IsInteger32()) {
1057 5 : movl(dst, src);
1058 : } else {
1059 20 : movq(dst, src);
1060 : }
1061 45 : }
1062 :
1063 45 : void MacroAssembler::Store(Operand dst, Register src, Representation r) {
1064 : DCHECK(!r.IsDouble());
1065 45 : if (r.IsInteger8() || r.IsUInteger8()) {
1066 10 : movb(dst, src);
1067 35 : } else if (r.IsInteger16() || r.IsUInteger16()) {
1068 10 : movw(dst, src);
1069 25 : } else if (r.IsInteger32()) {
1070 5 : movl(dst, src);
1071 : } else {
1072 20 : if (r.IsHeapObject()) {
1073 5 : AssertNotSmi(src);
1074 15 : } else if (r.IsSmi()) {
1075 5 : AssertSmi(src);
1076 : }
1077 20 : movq(dst, src);
1078 : }
1079 45 : }
1080 :
1081 2608653 : void TurboAssembler::Set(Register dst, int64_t x) {
1082 2608653 : if (x == 0) {
1083 506541 : xorl(dst, dst);
1084 2102112 : } else if (is_uint32(x)) {
1085 434587 : movl(dst, Immediate(static_cast<uint32_t>(x)));
1086 1667525 : } else if (is_int32(x)) {
1087 171206 : movq(dst, Immediate(static_cast<int32_t>(x)));
1088 : } else {
1089 1496319 : movq(dst, x);
1090 : }
1091 2608664 : }
1092 :
1093 13056 : void TurboAssembler::Set(Operand dst, intptr_t x) {
1094 13056 : if (is_int32(x)) {
1095 9046 : movq(dst, Immediate(static_cast<int32_t>(x)));
1096 : } else {
1097 4010 : Set(kScratchRegister, x);
1098 4010 : movq(dst, kScratchRegister);
1099 : }
1100 13056 : }
1101 :
1102 :
1103 : // ----------------------------------------------------------------------------
1104 : // Smi tagging, untagging and tag detection.
1105 :
1106 168 : Register TurboAssembler::GetSmiConstant(Smi source) {
1107 : STATIC_ASSERT(kSmiTag == 0);
1108 : int value = source->value();
1109 168 : if (value == 0) {
1110 0 : xorl(kScratchRegister, kScratchRegister);
1111 0 : return kScratchRegister;
1112 : }
1113 168 : Move(kScratchRegister, source);
1114 168 : return kScratchRegister;
1115 : }
1116 :
1117 2433770 : void TurboAssembler::Move(Register dst, Smi source) {
1118 : STATIC_ASSERT(kSmiTag == 0);
1119 : int value = source->value();
1120 2433770 : if (value == 0) {
1121 763945 : xorl(dst, dst);
1122 : } else {
1123 : Move(dst, source.ptr(), RelocInfo::NONE);
1124 : }
1125 2433773 : }
1126 :
1127 5274844 : void TurboAssembler::Move(Register dst, ExternalReference ext) {
1128 : if (FLAG_embedded_builtins) {
1129 5274844 : if (root_array_available_ && options().isolate_independent_code) {
1130 763676 : IndirectLoadExternalReference(dst, ext);
1131 6038530 : return;
1132 : }
1133 : }
1134 4511168 : movq(dst, Immediate64(ext.address(), RelocInfo::EXTERNAL_REFERENCE));
1135 : }
1136 :
1137 1582 : void MacroAssembler::SmiTag(Register dst, Register src) {
1138 : STATIC_ASSERT(kSmiTag == 0);
1139 1582 : if (dst != src) {
1140 361 : movq(dst, src);
1141 : }
1142 : DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
1143 1582 : shlq(dst, Immediate(kSmiShift));
1144 1582 : }
1145 :
1146 4336 : void TurboAssembler::SmiUntag(Register dst, Register src) {
1147 : STATIC_ASSERT(kSmiTag == 0);
1148 4336 : if (dst != src) {
1149 0 : movq(dst, src);
1150 : }
1151 : DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
1152 4336 : sarq(dst, Immediate(kSmiShift));
1153 4336 : }
1154 :
1155 1904 : void TurboAssembler::SmiUntag(Register dst, Operand src) {
1156 : if (SmiValuesAre32Bits()) {
1157 3808 : movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1158 : // Sign extend to 64-bit.
1159 1904 : movsxlq(dst, dst);
1160 : } else {
1161 : DCHECK(SmiValuesAre31Bits());
1162 : movq(dst, src);
1163 : sarq(dst, Immediate(kSmiShift));
1164 : }
1165 1904 : }
1166 :
1167 165 : void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1168 165 : AssertSmi(smi1);
1169 165 : AssertSmi(smi2);
1170 165 : cmpq(smi1, smi2);
1171 165 : }
1172 :
1173 224 : void MacroAssembler::SmiCompare(Register dst, Smi src) {
1174 224 : AssertSmi(dst);
1175 224 : Cmp(dst, src);
1176 224 : }
1177 :
1178 224 : void MacroAssembler::Cmp(Register dst, Smi src) {
1179 : DCHECK_NE(dst, kScratchRegister);
1180 224 : if (src->value() == 0) {
1181 56 : test_tagged(dst, dst);
1182 : } else {
1183 168 : Register constant_reg = GetSmiConstant(src);
1184 168 : cmp_tagged(dst, constant_reg);
1185 : }
1186 224 : }
1187 :
1188 0 : void MacroAssembler::SmiCompare(Register dst, Operand src) {
1189 0 : AssertSmi(dst);
1190 0 : AssertSmi(src);
1191 0 : cmp_tagged(dst, src);
1192 0 : }
1193 :
1194 0 : void MacroAssembler::SmiCompare(Operand dst, Register src) {
1195 0 : AssertSmi(dst);
1196 0 : AssertSmi(src);
1197 0 : cmp_tagged(dst, src);
1198 0 : }
1199 :
1200 0 : void MacroAssembler::SmiCompare(Operand dst, Smi src) {
1201 0 : AssertSmi(dst);
1202 : if (SmiValuesAre32Bits()) {
1203 0 : cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1204 : } else {
1205 : DCHECK(SmiValuesAre31Bits());
1206 : cmpl(dst, Immediate(src));
1207 : }
1208 0 : }
1209 :
1210 0 : void MacroAssembler::Cmp(Operand dst, Smi src) {
1211 : // The Operand cannot use the smi register.
1212 0 : Register smi_reg = GetSmiConstant(src);
1213 : DCHECK(!dst.AddressUsesRegister(smi_reg));
1214 0 : cmp_tagged(dst, smi_reg);
1215 0 : }
1216 :
1217 :
1218 40 : Condition TurboAssembler::CheckSmi(Register src) {
1219 : STATIC_ASSERT(kSmiTag == 0);
1220 301425 : testb(src, Immediate(kSmiTagMask));
1221 40 : return zero;
1222 : }
1223 :
1224 0 : Condition TurboAssembler::CheckSmi(Operand src) {
1225 : STATIC_ASSERT(kSmiTag == 0);
1226 0 : testb(src, Immediate(kSmiTagMask));
1227 0 : return zero;
1228 : }
1229 :
1230 301329 : void TurboAssembler::JumpIfSmi(Register src, Label* on_smi,
1231 : Label::Distance near_jump) {
1232 : Condition smi = CheckSmi(src);
1233 301329 : j(smi, on_smi, near_jump);
1234 301329 : }
1235 :
1236 56 : void MacroAssembler::JumpIfNotSmi(Register src,
1237 : Label* on_not_smi,
1238 : Label::Distance near_jump) {
1239 : Condition smi = CheckSmi(src);
1240 56 : j(NegateCondition(smi), on_not_smi, near_jump);
1241 56 : }
1242 :
1243 0 : void MacroAssembler::JumpIfNotSmi(Operand src, Label* on_not_smi,
1244 : Label::Distance near_jump) {
1245 : Condition smi = CheckSmi(src);
1246 0 : j(NegateCondition(smi), on_not_smi, near_jump);
1247 0 : }
1248 :
1249 0 : void MacroAssembler::SmiAddConstant(Operand dst, Smi constant) {
1250 0 : if (constant->value() != 0) {
1251 : if (SmiValuesAre32Bits()) {
1252 : addl(Operand(dst, kSmiShift / kBitsPerByte),
1253 0 : Immediate(constant->value()));
1254 : } else {
1255 : DCHECK(SmiValuesAre31Bits());
1256 : if (kTaggedSize == kInt64Size) {
1257 : // Sign-extend value after addition
1258 : movl(kScratchRegister, dst);
1259 : addl(kScratchRegister, Immediate(constant));
1260 : movsxlq(kScratchRegister, kScratchRegister);
1261 : movq(dst, kScratchRegister);
1262 : } else {
1263 : DCHECK_EQ(kTaggedSize, kInt32Size);
1264 : addl(dst, Immediate(constant));
1265 : }
1266 : }
1267 : }
1268 0 : }
1269 :
1270 568 : SmiIndex MacroAssembler::SmiToIndex(Register dst,
1271 : Register src,
1272 : int shift) {
1273 : if (SmiValuesAre32Bits()) {
1274 : DCHECK(is_uint6(shift));
1275 : // There is a possible optimization if shift is in the range 60-63, but that
1276 : // will (and must) never happen.
1277 568 : if (dst != src) {
1278 200 : movq(dst, src);
1279 : }
1280 568 : if (shift < kSmiShift) {
1281 568 : sarq(dst, Immediate(kSmiShift - shift));
1282 : } else {
1283 0 : shlq(dst, Immediate(shift - kSmiShift));
1284 : }
1285 568 : return SmiIndex(dst, times_1);
1286 : } else {
1287 : DCHECK(SmiValuesAre31Bits());
1288 : if (dst != src) {
1289 : mov_tagged(dst, src);
1290 : }
1291 : // We have to sign extend the index register to 64-bit as the SMI might
1292 : // be negative.
1293 : movsxlq(dst, dst);
1294 : if (shift < kSmiShift) {
1295 : sarq(dst, Immediate(kSmiShift - shift));
1296 : } else if (shift != kSmiShift) {
1297 : if (shift - kSmiShift <= static_cast<int>(times_8)) {
1298 : return SmiIndex(dst, static_cast<ScaleFactor>(shift - kSmiShift));
1299 : }
1300 : shlq(dst, Immediate(shift - kSmiShift));
1301 : }
1302 : return SmiIndex(dst, times_1);
1303 : }
1304 : }
1305 :
1306 56 : void TurboAssembler::Push(Smi source) {
1307 56 : intptr_t smi = static_cast<intptr_t>(source.ptr());
1308 56 : if (is_int32(smi)) {
1309 56 : Push(Immediate(static_cast<int32_t>(smi)));
1310 56 : return;
1311 : }
1312 0 : int first_byte_set = base::bits::CountTrailingZeros64(smi) / 8;
1313 0 : int last_byte_set = (63 - base::bits::CountLeadingZeros64(smi)) / 8;
1314 0 : if (first_byte_set == last_byte_set) {
1315 : // This sequence has only 7 bytes, compared to the 12 bytes below.
1316 : Push(Immediate(0));
1317 : movb(Operand(rsp, first_byte_set),
1318 0 : Immediate(static_cast<int8_t>(smi >> (8 * first_byte_set))));
1319 0 : return;
1320 : }
1321 0 : Register constant = GetSmiConstant(source);
1322 : Push(constant);
1323 : }
1324 :
1325 : // ----------------------------------------------------------------------------
1326 :
1327 691748 : void TurboAssembler::Move(Register dst, Register src) {
1328 691748 : if (dst != src) {
1329 641668 : movq(dst, src);
1330 : }
1331 691748 : }
1332 :
1333 1776218 : void TurboAssembler::MoveNumber(Register dst, double value) {
1334 : int32_t smi;
1335 1776218 : if (DoubleToSmiInteger(value, &smi)) {
1336 1735668 : Move(dst, Smi::FromInt(smi));
1337 : } else {
1338 40550 : movq_heap_number(dst, value);
1339 : }
1340 1776220 : }
1341 :
1342 131058 : void TurboAssembler::Move(XMMRegister dst, uint32_t src) {
1343 131058 : if (src == 0) {
1344 : Xorps(dst, dst);
1345 : } else {
1346 : unsigned nlz = base::bits::CountLeadingZeros(src);
1347 : unsigned ntz = base::bits::CountTrailingZeros(src);
1348 : unsigned pop = base::bits::CountPopulation(src);
1349 : DCHECK_NE(0u, pop);
1350 121418 : if (pop + ntz + nlz == 32) {
1351 : Pcmpeqd(dst, dst);
1352 54187 : if (ntz) Pslld(dst, static_cast<byte>(ntz + nlz));
1353 54188 : if (nlz) Psrld(dst, static_cast<byte>(nlz));
1354 : } else {
1355 67231 : movl(kScratchRegister, Immediate(src));
1356 : Movd(dst, kScratchRegister);
1357 : }
1358 : }
1359 131059 : }
1360 :
1361 427748 : void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
1362 427748 : if (src == 0) {
1363 : Xorpd(dst, dst);
1364 : } else {
1365 : unsigned nlz = base::bits::CountLeadingZeros(src);
1366 : unsigned ntz = base::bits::CountTrailingZeros(src);
1367 : unsigned pop = base::bits::CountPopulation(src);
1368 : DCHECK_NE(0u, pop);
1369 319544 : if (pop + ntz + nlz == 64) {
1370 : Pcmpeqd(dst, dst);
1371 211409 : if (ntz) Psllq(dst, static_cast<byte>(ntz + nlz));
1372 211408 : if (nlz) Psrlq(dst, static_cast<byte>(nlz));
1373 : } else {
1374 108137 : uint32_t lower = static_cast<uint32_t>(src);
1375 108137 : uint32_t upper = static_cast<uint32_t>(src >> 32);
1376 108137 : if (upper == 0) {
1377 31 : Move(dst, lower);
1378 : } else {
1379 108106 : movq(kScratchRegister, src);
1380 : Movq(dst, kScratchRegister);
1381 : }
1382 : }
1383 : }
1384 427758 : }
1385 :
1386 : // ----------------------------------------------------------------------------
1387 :
1388 5 : void MacroAssembler::Absps(XMMRegister dst) {
1389 : Andps(dst, ExternalReferenceAsOperand(
1390 5 : ExternalReference::address_of_float_abs_constant()));
1391 5 : }
1392 :
1393 5 : void MacroAssembler::Negps(XMMRegister dst) {
1394 : Xorps(dst, ExternalReferenceAsOperand(
1395 5 : ExternalReference::address_of_float_neg_constant()));
1396 5 : }
1397 :
1398 5 : void MacroAssembler::Abspd(XMMRegister dst) {
1399 : Andps(dst, ExternalReferenceAsOperand(
1400 5 : ExternalReference::address_of_double_abs_constant()));
1401 5 : }
1402 :
1403 5 : void MacroAssembler::Negpd(XMMRegister dst) {
1404 : Xorps(dst, ExternalReferenceAsOperand(
1405 5 : ExternalReference::address_of_double_neg_constant()));
1406 5 : }
1407 :
1408 0 : void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1409 : AllowDeferredHandleDereference smi_check;
1410 0 : if (source->IsSmi()) {
1411 0 : Cmp(dst, Smi::cast(*source));
1412 : } else {
1413 0 : Move(kScratchRegister, Handle<HeapObject>::cast(source));
1414 0 : cmp_tagged(dst, kScratchRegister);
1415 : }
1416 0 : }
1417 :
1418 112 : void MacroAssembler::Cmp(Operand dst, Handle<Object> source) {
1419 : AllowDeferredHandleDereference smi_check;
1420 224 : if (source->IsSmi()) {
1421 0 : Cmp(dst, Smi::cast(*source));
1422 : } else {
1423 112 : Move(kScratchRegister, Handle<HeapObject>::cast(source));
1424 112 : cmp_tagged(dst, kScratchRegister);
1425 : }
1426 112 : }
1427 :
1428 21 : void TurboAssembler::Push(Handle<HeapObject> source) {
1429 21 : Move(kScratchRegister, source);
1430 : Push(kScratchRegister);
1431 21 : }
1432 :
1433 6846532 : void TurboAssembler::Move(Register result, Handle<HeapObject> object,
1434 : RelocInfo::Mode rmode) {
1435 : if (FLAG_embedded_builtins) {
1436 6846532 : if (root_array_available_ && options().isolate_independent_code) {
1437 48048 : IndirectLoadConstant(result, object);
1438 6894576 : return;
1439 : }
1440 : }
1441 6798484 : movq(result, Immediate64(object.address(), rmode));
1442 : }
1443 :
1444 0 : void TurboAssembler::Move(Operand dst, Handle<HeapObject> object,
1445 : RelocInfo::Mode rmode) {
1446 0 : Move(kScratchRegister, object, rmode);
1447 0 : movq(dst, kScratchRegister);
1448 0 : }
1449 :
1450 2088 : void TurboAssembler::MoveStringConstant(Register result,
1451 : const StringConstantBase* string,
1452 : RelocInfo::Mode rmode) {
1453 2088 : movq_string(result, string);
1454 2088 : }
1455 :
1456 288 : void MacroAssembler::Drop(int stack_elements) {
1457 288 : if (stack_elements > 0) {
1458 576 : addq(rsp, Immediate(stack_elements * kSystemPointerSize));
1459 : }
1460 288 : }
1461 :
1462 :
1463 56 : void MacroAssembler::DropUnderReturnAddress(int stack_elements,
1464 : Register scratch) {
1465 : DCHECK_GT(stack_elements, 0);
1466 56 : if (stack_elements == 1) {
1467 56 : popq(MemOperand(rsp, 0));
1468 112 : return;
1469 : }
1470 :
1471 : PopReturnAddressTo(scratch);
1472 0 : Drop(stack_elements);
1473 : PushReturnAddressFrom(scratch);
1474 : }
1475 :
1476 663785 : void TurboAssembler::Push(Register src) { pushq(src); }
1477 :
1478 58841 : void TurboAssembler::Push(Operand src) { pushq(src); }
1479 :
1480 790722 : void MacroAssembler::PushQuad(Operand src) { pushq(src); }
1481 :
1482 1372545 : void TurboAssembler::Push(Immediate value) { pushq(value); }
1483 :
1484 0 : void MacroAssembler::PushImm32(int32_t imm32) { pushq_imm32(imm32); }
1485 :
1486 3384 : void MacroAssembler::Pop(Register dst) { popq(dst); }
1487 :
1488 44265 : void MacroAssembler::Pop(Operand dst) { popq(dst); }
1489 :
1490 702864 : void MacroAssembler::PopQuad(Operand dst) { popq(dst); }
1491 :
1492 0 : void TurboAssembler::Jump(ExternalReference ext) {
1493 0 : LoadAddress(kScratchRegister, ext);
1494 0 : jmp(kScratchRegister);
1495 0 : }
1496 :
1497 0 : void TurboAssembler::Jump(Operand op) { jmp(op); }
1498 :
1499 1024 : void TurboAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1500 : Move(kScratchRegister, destination, rmode);
1501 1024 : jmp(kScratchRegister);
1502 1024 : }
1503 :
1504 505223 : void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
1505 : Condition cc) {
1506 : DCHECK_IMPLIES(options().isolate_independent_code,
1507 : Builtins::IsIsolateIndependentBuiltin(*code_object));
1508 505223 : if (options().inline_offheap_trampolines) {
1509 456897 : int builtin_index = Builtins::kNoBuiltinId;
1510 456897 : if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1511 : Builtins::IsIsolateIndependent(builtin_index)) {
1512 456691 : Label skip;
1513 456691 : if (cc != always) {
1514 456687 : if (cc == never) return;
1515 456686 : j(NegateCondition(cc), &skip, Label::kNear);
1516 : }
1517 : // Inline the trampoline.
1518 456681 : RecordCommentForOffHeapTrampoline(builtin_index);
1519 456688 : CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1520 456687 : EmbeddedData d = EmbeddedData::FromBlob();
1521 456687 : Address entry = d.InstructionStartOfBuiltin(builtin_index);
1522 : Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1523 456690 : jmp(kScratchRegister);
1524 456690 : bind(&skip);
1525 456690 : return;
1526 : }
1527 : }
1528 48534 : j(cc, code_object, rmode);
1529 : }
1530 :
1531 31065684 : void MacroAssembler::JumpToInstructionStream(Address entry) {
1532 : Move(kOffHeapTrampolineRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1533 31065685 : jmp(kOffHeapTrampolineRegister);
1534 31065685 : }
1535 :
1536 0 : void TurboAssembler::Call(ExternalReference ext) {
1537 0 : LoadAddress(kScratchRegister, ext);
1538 0 : call(kScratchRegister);
1539 0 : }
1540 :
1541 2488 : void TurboAssembler::Call(Operand op) {
1542 2488 : if (!CpuFeatures::IsSupported(ATOM)) {
1543 2488 : call(op);
1544 : } else {
1545 0 : movq(kScratchRegister, op);
1546 0 : call(kScratchRegister);
1547 : }
1548 2488 : }
1549 :
1550 0 : void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1551 : Move(kScratchRegister, destination, rmode);
1552 0 : call(kScratchRegister);
1553 0 : }
1554 :
1555 5087468 : void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1556 : DCHECK_IMPLIES(options().isolate_independent_code,
1557 : Builtins::IsIsolateIndependentBuiltin(*code_object));
1558 5087468 : if (options().inline_offheap_trampolines) {
1559 4217322 : int builtin_index = Builtins::kNoBuiltinId;
1560 4217322 : if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1561 : Builtins::IsIsolateIndependent(builtin_index)) {
1562 : // Inline the trampoline.
1563 4201106 : RecordCommentForOffHeapTrampoline(builtin_index);
1564 4201106 : CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1565 4201103 : EmbeddedData d = EmbeddedData::FromBlob();
1566 4201103 : Address entry = d.InstructionStartOfBuiltin(builtin_index);
1567 : Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1568 4201108 : call(kScratchRegister);
1569 5087482 : return;
1570 : }
1571 : }
1572 : DCHECK(RelocInfo::IsCodeTarget(rmode));
1573 886370 : call(code_object, rmode);
1574 : }
1575 :
1576 2488 : void TurboAssembler::CallBuiltinPointer(Register builtin_pointer) {
1577 : #if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1578 : STATIC_ASSERT(kSmiShiftSize == 0);
1579 : STATIC_ASSERT(kSmiTagSize == 1);
1580 : STATIC_ASSERT(kSmiTag == 0);
1581 :
1582 : // The builtin_pointer register contains the builtin index as a Smi.
1583 : // Untagging is folded into the indexing operand below (we use times_4 instead
1584 : // of times_8 since smis are already shifted by one).
1585 : Call(Operand(kRootRegister, builtin_pointer, times_4,
1586 : IsolateData::builtin_entry_table_offset()));
1587 : #else // defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1588 : STATIC_ASSERT(kSmiShiftSize == 31);
1589 : STATIC_ASSERT(kSmiTagSize == 1);
1590 : STATIC_ASSERT(kSmiTag == 0);
1591 :
1592 : // The builtin_pointer register contains the builtin index as a Smi.
1593 2488 : SmiUntag(builtin_pointer, builtin_pointer);
1594 : Call(Operand(kRootRegister, builtin_pointer, times_8,
1595 2488 : IsolateData::builtin_entry_table_offset()));
1596 : #endif // defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1597 2488 : }
1598 :
1599 38679 : void TurboAssembler::LoadCodeObjectEntry(Register destination,
1600 : Register code_object) {
1601 : // Code objects are called differently depending on whether we are generating
1602 : // builtin code (which will later be embedded into the binary) or compiling
1603 : // user JS code at runtime.
1604 : // * Builtin code runs in --jitless mode and thus must not call into on-heap
1605 : // Code targets. Instead, we dispatch through the builtins entry table.
1606 : // * Codegen at runtime does not have this restriction and we can use the
1607 : // shorter, branchless instruction sequence. The assumption here is that
1608 : // targets are usually generated code and not builtin Code objects.
1609 :
1610 38679 : if (options().isolate_independent_code) {
1611 : DCHECK(root_array_available());
1612 8904 : Label if_code_is_builtin, out;
1613 :
1614 : // Check whether the Code object is a builtin. If so, call its (off-heap)
1615 : // entry point directly without going through the (on-heap) trampoline.
1616 : // Otherwise, just call the Code object as always.
1617 : cmpl(FieldOperand(code_object, Code::kBuiltinIndexOffset),
1618 8904 : Immediate(Builtins::kNoBuiltinId));
1619 8904 : j(not_equal, &if_code_is_builtin);
1620 :
1621 : // A non-builtin Code object, the entry point is at
1622 : // Code::raw_instruction_start().
1623 8904 : Move(destination, code_object);
1624 8904 : addq(destination, Immediate(Code::kHeaderSize - kHeapObjectTag));
1625 8904 : jmp(&out);
1626 :
1627 : // A builtin Code object, the entry point is loaded from the builtin entry
1628 : // table.
1629 8904 : bind(&if_code_is_builtin);
1630 : movl(destination, FieldOperand(code_object, Code::kBuiltinIndexOffset));
1631 : movq(destination,
1632 : Operand(kRootRegister, destination, times_system_pointer_size,
1633 17808 : IsolateData::builtin_entry_table_offset()));
1634 :
1635 8904 : bind(&out);
1636 : } else {
1637 29775 : Move(destination, code_object);
1638 29776 : addq(destination, Immediate(Code::kHeaderSize - kHeapObjectTag));
1639 : }
1640 38681 : }
1641 :
1642 24064 : void TurboAssembler::CallCodeObject(Register code_object) {
1643 24064 : LoadCodeObjectEntry(code_object, code_object);
1644 24065 : call(code_object);
1645 24064 : }
1646 :
1647 728 : void TurboAssembler::JumpCodeObject(Register code_object) {
1648 728 : LoadCodeObjectEntry(code_object, code_object);
1649 728 : jmp(code_object);
1650 728 : }
1651 :
1652 0 : void TurboAssembler::RetpolineCall(Register reg) {
1653 0 : Label setup_return, setup_target, inner_indirect_branch, capture_spec;
1654 :
1655 0 : jmp(&setup_return); // Jump past the entire retpoline below.
1656 :
1657 0 : bind(&inner_indirect_branch);
1658 0 : call(&setup_target);
1659 :
1660 0 : bind(&capture_spec);
1661 0 : pause();
1662 0 : jmp(&capture_spec);
1663 :
1664 0 : bind(&setup_target);
1665 0 : movq(Operand(rsp, 0), reg);
1666 0 : ret(0);
1667 :
1668 0 : bind(&setup_return);
1669 0 : call(&inner_indirect_branch); // Callee will return after this instruction.
1670 0 : }
1671 :
1672 0 : void TurboAssembler::RetpolineCall(Address destination, RelocInfo::Mode rmode) {
1673 : Move(kScratchRegister, destination, rmode);
1674 0 : RetpolineCall(kScratchRegister);
1675 0 : }
1676 :
1677 0 : void TurboAssembler::RetpolineJump(Register reg) {
1678 0 : Label setup_target, capture_spec;
1679 :
1680 0 : call(&setup_target);
1681 :
1682 0 : bind(&capture_spec);
1683 0 : pause();
1684 0 : jmp(&capture_spec);
1685 :
1686 0 : bind(&setup_target);
1687 0 : movq(Operand(rsp, 0), reg);
1688 0 : ret(0);
1689 0 : }
1690 :
1691 38276 : void TurboAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
1692 38276 : if (imm8 == 0) {
1693 : Movd(dst, src);
1694 : return;
1695 : }
1696 37260 : if (CpuFeatures::IsSupported(SSE4_1)) {
1697 : CpuFeatureScope sse_scope(this, SSE4_1);
1698 37050 : pextrd(dst, src, imm8);
1699 : return;
1700 : }
1701 : DCHECK_EQ(1, imm8);
1702 210 : movq(dst, src);
1703 : shrq(dst, Immediate(32));
1704 : }
1705 :
1706 416 : void TurboAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
1707 416 : if (CpuFeatures::IsSupported(SSE4_1)) {
1708 : CpuFeatureScope sse_scope(this, SSE4_1);
1709 414 : pinsrd(dst, src, imm8);
1710 416 : return;
1711 : }
1712 : Movd(kScratchDoubleReg, src);
1713 2 : if (imm8 == 1) {
1714 : punpckldq(dst, kScratchDoubleReg);
1715 : } else {
1716 : DCHECK_EQ(0, imm8);
1717 : Movss(dst, kScratchDoubleReg);
1718 : }
1719 : }
1720 :
1721 1488 : void TurboAssembler::Pinsrd(XMMRegister dst, Operand src, int8_t imm8) {
1722 1488 : if (CpuFeatures::IsSupported(SSE4_1)) {
1723 : CpuFeatureScope sse_scope(this, SSE4_1);
1724 1488 : pinsrd(dst, src, imm8);
1725 1488 : return;
1726 : }
1727 : Movd(kScratchDoubleReg, src);
1728 0 : if (imm8 == 1) {
1729 : punpckldq(dst, kScratchDoubleReg);
1730 : } else {
1731 : DCHECK_EQ(0, imm8);
1732 : Movss(dst, kScratchDoubleReg);
1733 : }
1734 : }
1735 :
1736 446 : void TurboAssembler::Lzcntl(Register dst, Register src) {
1737 446 : if (CpuFeatures::IsSupported(LZCNT)) {
1738 : CpuFeatureScope scope(this, LZCNT);
1739 444 : lzcntl(dst, src);
1740 446 : return;
1741 : }
1742 2 : Label not_zero_src;
1743 2 : bsrl(dst, src);
1744 2 : j(not_zero, ¬_zero_src, Label::kNear);
1745 2 : Set(dst, 63); // 63^31 == 32
1746 2 : bind(¬_zero_src);
1747 2 : xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
1748 : }
1749 :
1750 0 : void TurboAssembler::Lzcntl(Register dst, Operand src) {
1751 0 : if (CpuFeatures::IsSupported(LZCNT)) {
1752 : CpuFeatureScope scope(this, LZCNT);
1753 0 : lzcntl(dst, src);
1754 0 : return;
1755 : }
1756 0 : Label not_zero_src;
1757 0 : bsrl(dst, src);
1758 0 : j(not_zero, ¬_zero_src, Label::kNear);
1759 0 : Set(dst, 63); // 63^31 == 32
1760 0 : bind(¬_zero_src);
1761 0 : xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
1762 : }
1763 :
1764 36 : void TurboAssembler::Lzcntq(Register dst, Register src) {
1765 36 : if (CpuFeatures::IsSupported(LZCNT)) {
1766 : CpuFeatureScope scope(this, LZCNT);
1767 36 : lzcntq(dst, src);
1768 36 : return;
1769 : }
1770 0 : Label not_zero_src;
1771 0 : bsrq(dst, src);
1772 0 : j(not_zero, ¬_zero_src, Label::kNear);
1773 0 : Set(dst, 127); // 127^63 == 64
1774 0 : bind(¬_zero_src);
1775 0 : xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
1776 : }
1777 :
1778 0 : void TurboAssembler::Lzcntq(Register dst, Operand src) {
1779 0 : if (CpuFeatures::IsSupported(LZCNT)) {
1780 : CpuFeatureScope scope(this, LZCNT);
1781 0 : lzcntq(dst, src);
1782 0 : return;
1783 : }
1784 0 : Label not_zero_src;
1785 0 : bsrq(dst, src);
1786 0 : j(not_zero, ¬_zero_src, Label::kNear);
1787 0 : Set(dst, 127); // 127^63 == 64
1788 0 : bind(¬_zero_src);
1789 0 : xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
1790 : }
1791 :
1792 44 : void TurboAssembler::Tzcntq(Register dst, Register src) {
1793 44 : if (CpuFeatures::IsSupported(BMI1)) {
1794 : CpuFeatureScope scope(this, BMI1);
1795 44 : tzcntq(dst, src);
1796 44 : return;
1797 : }
1798 0 : Label not_zero_src;
1799 0 : bsfq(dst, src);
1800 0 : j(not_zero, ¬_zero_src, Label::kNear);
1801 : // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
1802 0 : Set(dst, 64);
1803 0 : bind(¬_zero_src);
1804 : }
1805 :
1806 0 : void TurboAssembler::Tzcntq(Register dst, Operand src) {
1807 0 : if (CpuFeatures::IsSupported(BMI1)) {
1808 : CpuFeatureScope scope(this, BMI1);
1809 0 : tzcntq(dst, src);
1810 0 : return;
1811 : }
1812 0 : Label not_zero_src;
1813 0 : bsfq(dst, src);
1814 0 : j(not_zero, ¬_zero_src, Label::kNear);
1815 : // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
1816 0 : Set(dst, 64);
1817 0 : bind(¬_zero_src);
1818 : }
1819 :
1820 332 : void TurboAssembler::Tzcntl(Register dst, Register src) {
1821 332 : if (CpuFeatures::IsSupported(BMI1)) {
1822 : CpuFeatureScope scope(this, BMI1);
1823 332 : tzcntl(dst, src);
1824 332 : return;
1825 : }
1826 0 : Label not_zero_src;
1827 0 : bsfl(dst, src);
1828 0 : j(not_zero, ¬_zero_src, Label::kNear);
1829 0 : Set(dst, 32); // The result of tzcnt is 32 if src = 0.
1830 0 : bind(¬_zero_src);
1831 : }
1832 :
1833 0 : void TurboAssembler::Tzcntl(Register dst, Operand src) {
1834 0 : if (CpuFeatures::IsSupported(BMI1)) {
1835 : CpuFeatureScope scope(this, BMI1);
1836 0 : tzcntl(dst, src);
1837 0 : return;
1838 : }
1839 0 : Label not_zero_src;
1840 0 : bsfl(dst, src);
1841 0 : j(not_zero, ¬_zero_src, Label::kNear);
1842 0 : Set(dst, 32); // The result of tzcnt is 32 if src = 0.
1843 0 : bind(¬_zero_src);
1844 : }
1845 :
1846 64 : void TurboAssembler::Popcntl(Register dst, Register src) {
1847 64 : if (CpuFeatures::IsSupported(POPCNT)) {
1848 : CpuFeatureScope scope(this, POPCNT);
1849 64 : popcntl(dst, src);
1850 64 : return;
1851 : }
1852 0 : UNREACHABLE();
1853 : }
1854 :
1855 0 : void TurboAssembler::Popcntl(Register dst, Operand src) {
1856 0 : if (CpuFeatures::IsSupported(POPCNT)) {
1857 : CpuFeatureScope scope(this, POPCNT);
1858 0 : popcntl(dst, src);
1859 0 : return;
1860 : }
1861 0 : UNREACHABLE();
1862 : }
1863 :
1864 44 : void TurboAssembler::Popcntq(Register dst, Register src) {
1865 44 : if (CpuFeatures::IsSupported(POPCNT)) {
1866 : CpuFeatureScope scope(this, POPCNT);
1867 44 : popcntq(dst, src);
1868 44 : return;
1869 : }
1870 0 : UNREACHABLE();
1871 : }
1872 :
1873 0 : void TurboAssembler::Popcntq(Register dst, Operand src) {
1874 0 : if (CpuFeatures::IsSupported(POPCNT)) {
1875 : CpuFeatureScope scope(this, POPCNT);
1876 0 : popcntq(dst, src);
1877 0 : return;
1878 : }
1879 0 : UNREACHABLE();
1880 : }
1881 :
1882 :
1883 0 : void MacroAssembler::Pushad() {
1884 : Push(rax);
1885 : Push(rcx);
1886 : Push(rdx);
1887 : Push(rbx);
1888 : // Not pushing rsp or rbp.
1889 : Push(rsi);
1890 : Push(rdi);
1891 : Push(r8);
1892 : Push(r9);
1893 : // r10 is kScratchRegister.
1894 : Push(r11);
1895 : Push(r12);
1896 : // r13 is kRootRegister.
1897 : Push(r14);
1898 : Push(r15);
1899 : STATIC_ASSERT(12 == kNumSafepointSavedRegisters);
1900 : // Use lea for symmetry with Popad.
1901 : int sp_delta = (kNumSafepointRegisters - kNumSafepointSavedRegisters) *
1902 : kSystemPointerSize;
1903 0 : leaq(rsp, Operand(rsp, -sp_delta));
1904 0 : }
1905 :
1906 :
1907 0 : void MacroAssembler::Popad() {
1908 : // Popad must not change the flags, so use lea instead of addq.
1909 : int sp_delta = (kNumSafepointRegisters - kNumSafepointSavedRegisters) *
1910 : kSystemPointerSize;
1911 0 : leaq(rsp, Operand(rsp, sp_delta));
1912 : Pop(r15);
1913 : Pop(r14);
1914 : Pop(r12);
1915 : Pop(r11);
1916 : Pop(r9);
1917 : Pop(r8);
1918 : Pop(rdi);
1919 : Pop(rsi);
1920 : Pop(rbx);
1921 : Pop(rdx);
1922 : Pop(rcx);
1923 : Pop(rax);
1924 0 : }
1925 :
1926 :
1927 : // Order general registers are pushed by Pushad:
1928 : // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
1929 : const int
1930 : MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1931 : 0,
1932 : 1,
1933 : 2,
1934 : 3,
1935 : -1,
1936 : -1,
1937 : 4,
1938 : 5,
1939 : 6,
1940 : 7,
1941 : -1,
1942 : 8,
1943 : 9,
1944 : -1,
1945 : 10,
1946 : 11
1947 : };
1948 :
1949 168 : void MacroAssembler::PushStackHandler() {
1950 : // Adjust this code if not the case.
1951 : STATIC_ASSERT(StackHandlerConstants::kSize == 2 * kSystemPointerSize);
1952 : STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1953 :
1954 : Push(Immediate(0)); // Padding.
1955 :
1956 : // Link the current handler as the next handler.
1957 : ExternalReference handler_address =
1958 168 : ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
1959 168 : Push(ExternalReferenceAsOperand(handler_address));
1960 :
1961 : // Set this new handler as the current one.
1962 168 : movq(ExternalReferenceAsOperand(handler_address), rsp);
1963 168 : }
1964 :
1965 :
1966 168 : void MacroAssembler::PopStackHandler() {
1967 : STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1968 : ExternalReference handler_address =
1969 168 : ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
1970 168 : Pop(ExternalReferenceAsOperand(handler_address));
1971 168 : addq(rsp, Immediate(StackHandlerConstants::kSize - kSystemPointerSize));
1972 168 : }
1973 :
1974 1422 : void TurboAssembler::Ret() { ret(0); }
1975 :
1976 2246803 : void TurboAssembler::Ret(int bytes_dropped, Register scratch) {
1977 2246803 : if (is_uint16(bytes_dropped)) {
1978 2246799 : ret(bytes_dropped);
1979 : } else {
1980 : PopReturnAddressTo(scratch);
1981 4 : addq(rsp, Immediate(bytes_dropped));
1982 : PushReturnAddressFrom(scratch);
1983 4 : ret(0);
1984 : }
1985 2246943 : }
1986 :
1987 560 : void MacroAssembler::CmpObjectType(Register heap_object,
1988 : InstanceType type,
1989 : Register map) {
1990 : LoadTaggedPointerField(map,
1991 560 : FieldOperand(heap_object, HeapObject::kMapOffset));
1992 560 : CmpInstanceType(map, type);
1993 560 : }
1994 :
1995 :
1996 1064 : void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1997 2128 : cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
1998 1064 : }
1999 :
2000 0 : void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
2001 : XMMRegister scratch, Label* lost_precision,
2002 : Label* is_nan, Label::Distance dst) {
2003 0 : Cvttsd2si(result_reg, input_reg);
2004 0 : Cvtlsi2sd(kScratchDoubleReg, result_reg);
2005 : Ucomisd(kScratchDoubleReg, input_reg);
2006 0 : j(not_equal, lost_precision, dst);
2007 0 : j(parity_even, is_nan, dst); // NaN.
2008 0 : }
2009 :
2010 :
2011 117 : void MacroAssembler::AssertNotSmi(Register object) {
2012 117 : if (emit_debug_code()) {
2013 : Condition is_smi = CheckSmi(object);
2014 0 : Check(NegateCondition(is_smi), AbortReason::kOperandIsASmi);
2015 : }
2016 117 : }
2017 :
2018 :
2019 559 : void MacroAssembler::AssertSmi(Register object) {
2020 559 : if (emit_debug_code()) {
2021 : Condition is_smi = CheckSmi(object);
2022 0 : Check(is_smi, AbortReason::kOperandIsNotASmi);
2023 : }
2024 559 : }
2025 :
2026 0 : void MacroAssembler::AssertSmi(Operand object) {
2027 0 : if (emit_debug_code()) {
2028 : Condition is_smi = CheckSmi(object);
2029 0 : Check(is_smi, AbortReason::kOperandIsNotASmi);
2030 : }
2031 0 : }
2032 :
2033 1358422 : void TurboAssembler::AssertZeroExtended(Register int32_register) {
2034 1358422 : if (emit_debug_code()) {
2035 : DCHECK_NE(int32_register, kScratchRegister);
2036 66 : movq(kScratchRegister, int64_t{0x0000000100000000});
2037 66 : cmpq(kScratchRegister, int32_register);
2038 66 : Check(above_equal, AbortReason::k32BitValueInRegisterIsNotZeroExtended);
2039 : }
2040 1358422 : }
2041 :
2042 112 : void MacroAssembler::AssertConstructor(Register object) {
2043 112 : if (emit_debug_code()) {
2044 0 : testb(object, Immediate(kSmiTagMask));
2045 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotAConstructor);
2046 : Push(object);
2047 : LoadTaggedPointerField(object,
2048 0 : FieldOperand(object, HeapObject::kMapOffset));
2049 : testb(FieldOperand(object, Map::kBitFieldOffset),
2050 0 : Immediate(Map::IsConstructorBit::kMask));
2051 : Pop(object);
2052 0 : Check(not_zero, AbortReason::kOperandIsNotAConstructor);
2053 : }
2054 112 : }
2055 :
2056 280 : void MacroAssembler::AssertFunction(Register object) {
2057 280 : if (emit_debug_code()) {
2058 0 : testb(object, Immediate(kSmiTagMask));
2059 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotAFunction);
2060 : Push(object);
2061 0 : CmpObjectType(object, JS_FUNCTION_TYPE, object);
2062 : Pop(object);
2063 0 : Check(equal, AbortReason::kOperandIsNotAFunction);
2064 : }
2065 280 : }
2066 :
2067 :
2068 112 : void MacroAssembler::AssertBoundFunction(Register object) {
2069 112 : if (emit_debug_code()) {
2070 0 : testb(object, Immediate(kSmiTagMask));
2071 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotABoundFunction);
2072 : Push(object);
2073 0 : CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
2074 : Pop(object);
2075 0 : Check(equal, AbortReason::kOperandIsNotABoundFunction);
2076 : }
2077 112 : }
2078 :
2079 56 : void MacroAssembler::AssertGeneratorObject(Register object) {
2080 112 : if (!emit_debug_code()) return;
2081 0 : testb(object, Immediate(kSmiTagMask));
2082 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
2083 :
2084 : // Load map
2085 0 : Register map = object;
2086 : Push(object);
2087 0 : LoadTaggedPointerField(map, FieldOperand(object, HeapObject::kMapOffset));
2088 :
2089 0 : Label do_check;
2090 : // Check if JSGeneratorObject
2091 0 : CmpInstanceType(map, JS_GENERATOR_OBJECT_TYPE);
2092 0 : j(equal, &do_check);
2093 :
2094 : // Check if JSAsyncFunctionObject
2095 0 : CmpInstanceType(map, JS_ASYNC_FUNCTION_OBJECT_TYPE);
2096 0 : j(equal, &do_check);
2097 :
2098 : // Check if JSAsyncGeneratorObject
2099 0 : CmpInstanceType(map, JS_ASYNC_GENERATOR_OBJECT_TYPE);
2100 :
2101 0 : bind(&do_check);
2102 : // Restore generator object to register and perform assertion
2103 : Pop(object);
2104 0 : Check(equal, AbortReason::kOperandIsNotAGeneratorObject);
2105 : }
2106 :
2107 112 : void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
2108 112 : if (emit_debug_code()) {
2109 0 : Label done_checking;
2110 0 : AssertNotSmi(object);
2111 0 : Cmp(object, isolate()->factory()->undefined_value());
2112 0 : j(equal, &done_checking);
2113 0 : Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
2114 0 : Assert(equal, AbortReason::kExpectedUndefinedOrCell);
2115 0 : bind(&done_checking);
2116 : }
2117 112 : }
2118 :
2119 56 : void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
2120 56 : cmpl(in_out, Immediate(kClearedWeakHeapObjectLower32));
2121 56 : j(equal, target_if_cleared);
2122 :
2123 56 : andq(in_out, Immediate(~static_cast<int32_t>(kWeakHeapObjectMask)));
2124 56 : }
2125 :
2126 168 : void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2127 : DCHECK_GT(value, 0);
2128 168 : if (FLAG_native_code_counters && counter->Enabled()) {
2129 : Operand counter_operand =
2130 0 : ExternalReferenceAsOperand(ExternalReference::Create(counter));
2131 0 : if (value == 1) {
2132 0 : incl(counter_operand);
2133 : } else {
2134 0 : addl(counter_operand, Immediate(value));
2135 : }
2136 : }
2137 168 : }
2138 :
2139 :
2140 0 : void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2141 : DCHECK_GT(value, 0);
2142 0 : if (FLAG_native_code_counters && counter->Enabled()) {
2143 : Operand counter_operand =
2144 0 : ExternalReferenceAsOperand(ExternalReference::Create(counter));
2145 0 : if (value == 1) {
2146 0 : decl(counter_operand);
2147 : } else {
2148 0 : subl(counter_operand, Immediate(value));
2149 : }
2150 : }
2151 0 : }
2152 :
2153 56 : void MacroAssembler::MaybeDropFrames() {
2154 : // Check whether we need to drop frames to restart a function on the stack.
2155 : ExternalReference restart_fp =
2156 112 : ExternalReference::debug_restart_fp_address(isolate());
2157 56 : Load(rbx, restart_fp);
2158 56 : testq(rbx, rbx);
2159 :
2160 56 : Label dont_drop;
2161 56 : j(zero, &dont_drop, Label::kNear);
2162 56 : Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET);
2163 :
2164 56 : bind(&dont_drop);
2165 56 : }
2166 :
2167 1344 : void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
2168 : Register caller_args_count_reg,
2169 : Register scratch0, Register scratch1) {
2170 : #if DEBUG
2171 : if (callee_args_count.is_reg()) {
2172 : DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
2173 : scratch1));
2174 : } else {
2175 : DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
2176 : }
2177 : #endif
2178 :
2179 : // Calculate the destination address where we will put the return address
2180 : // after we drop current frame.
2181 1344 : Register new_sp_reg = scratch0;
2182 1344 : if (callee_args_count.is_reg()) {
2183 1344 : subq(caller_args_count_reg, callee_args_count.reg());
2184 : leaq(new_sp_reg,
2185 : Operand(rbp, caller_args_count_reg, times_system_pointer_size,
2186 2688 : StandardFrameConstants::kCallerPCOffset));
2187 : } else {
2188 : leaq(new_sp_reg,
2189 : Operand(rbp, caller_args_count_reg, times_system_pointer_size,
2190 : StandardFrameConstants::kCallerPCOffset -
2191 0 : callee_args_count.immediate() * kSystemPointerSize));
2192 : }
2193 :
2194 1344 : if (FLAG_debug_code) {
2195 0 : cmpq(rsp, new_sp_reg);
2196 0 : Check(below, AbortReason::kStackAccessBelowStackPointer);
2197 : }
2198 :
2199 : // Copy return address from caller's frame to current frame's return address
2200 : // to avoid its trashing and let the following loop copy it to the right
2201 : // place.
2202 1344 : Register tmp_reg = scratch1;
2203 2688 : movq(tmp_reg, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
2204 2688 : movq(Operand(rsp, 0), tmp_reg);
2205 :
2206 : // Restore caller's frame pointer now as it could be overwritten by
2207 : // the copying loop.
2208 2688 : movq(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2209 :
2210 : // +2 here is to copy both receiver and return address.
2211 1344 : Register count_reg = caller_args_count_reg;
2212 1344 : if (callee_args_count.is_reg()) {
2213 2688 : leaq(count_reg, Operand(callee_args_count.reg(), 2));
2214 : } else {
2215 0 : movq(count_reg, Immediate(callee_args_count.immediate() + 2));
2216 : // TODO(ishell): Unroll copying loop for small immediate values.
2217 : }
2218 :
2219 : // Now copy callee arguments to the caller frame going backwards to avoid
2220 : // callee arguments corruption (source and destination areas could overlap).
2221 1344 : Label loop, entry;
2222 1344 : jmp(&entry, Label::kNear);
2223 1344 : bind(&loop);
2224 : decq(count_reg);
2225 2688 : movq(tmp_reg, Operand(rsp, count_reg, times_system_pointer_size, 0));
2226 2688 : movq(Operand(new_sp_reg, count_reg, times_system_pointer_size, 0), tmp_reg);
2227 1344 : bind(&entry);
2228 1344 : cmpq(count_reg, Immediate(0));
2229 1344 : j(not_equal, &loop, Label::kNear);
2230 :
2231 : // Leave current frame.
2232 : movq(rsp, new_sp_reg);
2233 1344 : }
2234 :
2235 112 : void MacroAssembler::InvokeFunction(Register function, Register new_target,
2236 : const ParameterCount& actual,
2237 : InvokeFlag flag) {
2238 : LoadTaggedPointerField(
2239 112 : rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2240 : movzxwq(rbx,
2241 112 : FieldOperand(rbx, SharedFunctionInfo::kFormalParameterCountOffset));
2242 :
2243 : ParameterCount expected(rbx);
2244 112 : InvokeFunction(function, new_target, expected, actual, flag);
2245 112 : }
2246 :
2247 168 : void MacroAssembler::InvokeFunction(Register function, Register new_target,
2248 : const ParameterCount& expected,
2249 : const ParameterCount& actual,
2250 : InvokeFlag flag) {
2251 : DCHECK(function == rdi);
2252 : LoadTaggedPointerField(rsi,
2253 168 : FieldOperand(function, JSFunction::kContextOffset));
2254 168 : InvokeFunctionCode(rdi, new_target, expected, actual, flag);
2255 168 : }
2256 :
2257 336 : void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
2258 : const ParameterCount& expected,
2259 : const ParameterCount& actual,
2260 : InvokeFlag flag) {
2261 : // You can't call a function without a valid frame.
2262 : DCHECK(flag == JUMP_FUNCTION || has_frame());
2263 : DCHECK(function == rdi);
2264 : DCHECK_IMPLIES(new_target.is_valid(), new_target == rdx);
2265 :
2266 : // On function call, call into the debugger if necessary.
2267 336 : CheckDebugHook(function, new_target, expected, actual);
2268 :
2269 : // Clear the new.target register if not given.
2270 336 : if (!new_target.is_valid()) {
2271 224 : LoadRoot(rdx, RootIndex::kUndefinedValue);
2272 : }
2273 :
2274 336 : Label done;
2275 336 : bool definitely_mismatches = false;
2276 : InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2277 336 : Label::kNear);
2278 336 : if (!definitely_mismatches) {
2279 : // We call indirectly through the code field in the function to
2280 : // allow recompilation to take effect without changing any of the
2281 : // call sites.
2282 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
2283 : LoadTaggedPointerField(rcx,
2284 336 : FieldOperand(function, JSFunction::kCodeOffset));
2285 336 : if (flag == CALL_FUNCTION) {
2286 112 : CallCodeObject(rcx);
2287 : } else {
2288 : DCHECK(flag == JUMP_FUNCTION);
2289 224 : JumpCodeObject(rcx);
2290 : }
2291 336 : bind(&done);
2292 : }
2293 336 : }
2294 :
2295 336 : void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2296 0 : const ParameterCount& actual, Label* done,
2297 : bool* definitely_mismatches,
2298 : InvokeFlag flag,
2299 : Label::Distance near_jump) {
2300 : bool definitely_matches = false;
2301 336 : *definitely_mismatches = false;
2302 336 : Label invoke;
2303 336 : if (expected.is_immediate()) {
2304 : DCHECK(actual.is_immediate());
2305 0 : Set(rax, actual.immediate());
2306 0 : if (expected.immediate() == actual.immediate()) {
2307 : definitely_matches = true;
2308 : } else {
2309 0 : if (expected.immediate() ==
2310 : SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
2311 : // Don't worry about adapting arguments for built-ins that
2312 : // don't want that done. Skip adaption code by making it look
2313 : // like we have a match between expected and actual number of
2314 : // arguments.
2315 : definitely_matches = true;
2316 : } else {
2317 0 : *definitely_mismatches = true;
2318 0 : Set(rbx, expected.immediate());
2319 : }
2320 : }
2321 : } else {
2322 336 : if (actual.is_immediate()) {
2323 : // Expected is in register, actual is immediate. This is the
2324 : // case when we invoke function values without going through the
2325 : // IC mechanism.
2326 0 : Set(rax, actual.immediate());
2327 0 : cmpq(expected.reg(), Immediate(actual.immediate()));
2328 0 : j(equal, &invoke, Label::kNear);
2329 : DCHECK(expected.reg() == rbx);
2330 336 : } else if (expected.reg() != actual.reg()) {
2331 : // Both expected and actual are in (different) registers. This
2332 : // is the case when we invoke functions using call and apply.
2333 280 : cmpq(expected.reg(), actual.reg());
2334 280 : j(equal, &invoke, Label::kNear);
2335 : DCHECK(actual.reg() == rax);
2336 : DCHECK(expected.reg() == rbx);
2337 : } else {
2338 : definitely_matches = true;
2339 56 : Move(rax, actual.reg());
2340 : }
2341 : }
2342 :
2343 336 : if (!definitely_matches) {
2344 280 : Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
2345 280 : if (flag == CALL_FUNCTION) {
2346 112 : Call(adaptor, RelocInfo::CODE_TARGET);
2347 112 : if (!*definitely_mismatches) {
2348 112 : jmp(done, near_jump);
2349 : }
2350 : } else {
2351 168 : Jump(adaptor, RelocInfo::CODE_TARGET);
2352 : }
2353 280 : bind(&invoke);
2354 : }
2355 336 : }
2356 :
2357 336 : void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
2358 : const ParameterCount& expected,
2359 : const ParameterCount& actual) {
2360 336 : Label skip_hook;
2361 : ExternalReference debug_hook_active =
2362 672 : ExternalReference::debug_hook_on_function_call_address(isolate());
2363 : Operand debug_hook_active_operand =
2364 336 : ExternalReferenceAsOperand(debug_hook_active);
2365 336 : cmpb(debug_hook_active_operand, Immediate(0));
2366 336 : j(equal, &skip_hook);
2367 :
2368 : {
2369 : FrameScope frame(this,
2370 336 : has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2371 336 : if (expected.is_reg()) {
2372 336 : SmiTag(expected.reg(), expected.reg());
2373 : Push(expected.reg());
2374 : }
2375 336 : if (actual.is_reg()) {
2376 336 : SmiTag(actual.reg(), actual.reg());
2377 : Push(actual.reg());
2378 336 : SmiUntag(actual.reg(), actual.reg());
2379 : }
2380 336 : if (new_target.is_valid()) {
2381 : Push(new_target);
2382 : }
2383 : Push(fun);
2384 : Push(fun);
2385 336 : Push(StackArgumentsAccessor(rbp, actual).GetReceiverOperand());
2386 336 : CallRuntime(Runtime::kDebugOnFunctionCall);
2387 : Pop(fun);
2388 336 : if (new_target.is_valid()) {
2389 : Pop(new_target);
2390 : }
2391 336 : if (actual.is_reg()) {
2392 : Pop(actual.reg());
2393 336 : SmiUntag(actual.reg(), actual.reg());
2394 : }
2395 336 : if (expected.is_reg()) {
2396 : Pop(expected.reg());
2397 336 : SmiUntag(expected.reg(), expected.reg());
2398 336 : }
2399 : }
2400 336 : bind(&skip_hook);
2401 336 : }
2402 :
2403 610961 : void TurboAssembler::StubPrologue(StackFrame::Type type) {
2404 610961 : pushq(rbp); // Caller's frame pointer.
2405 : movq(rbp, rsp);
2406 : Push(Immediate(StackFrame::TypeToMarker(type)));
2407 611127 : }
2408 :
2409 636032 : void TurboAssembler::Prologue() {
2410 636032 : pushq(rbp); // Caller's frame pointer.
2411 : movq(rbp, rsp);
2412 : Push(rsi); // Callee's context.
2413 : Push(rdi); // Callee's JS function.
2414 636035 : }
2415 :
2416 595603 : void TurboAssembler::EnterFrame(StackFrame::Type type) {
2417 595603 : pushq(rbp);
2418 : movq(rbp, rsp);
2419 : Push(Immediate(StackFrame::TypeToMarker(type)));
2420 595594 : }
2421 :
2422 948159 : void TurboAssembler::LeaveFrame(StackFrame::Type type) {
2423 948159 : if (emit_debug_code()) {
2424 : cmpq(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
2425 0 : Immediate(StackFrame::TypeToMarker(type)));
2426 0 : Check(equal, AbortReason::kStackFrameTypesMustMatch);
2427 : }
2428 948159 : movq(rsp, rbp);
2429 948121 : popq(rbp);
2430 948133 : }
2431 :
2432 672 : void MacroAssembler::EnterExitFramePrologue(bool save_rax,
2433 : StackFrame::Type frame_type) {
2434 : DCHECK(frame_type == StackFrame::EXIT ||
2435 : frame_type == StackFrame::BUILTIN_EXIT);
2436 :
2437 : // Set up the frame structure on the stack.
2438 : // All constants are relative to the frame pointer of the exit frame.
2439 : DCHECK_EQ(kFPOnStackSize + kPCOnStackSize,
2440 : ExitFrameConstants::kCallerSPDisplacement);
2441 : DCHECK_EQ(kFPOnStackSize, ExitFrameConstants::kCallerPCOffset);
2442 : DCHECK_EQ(0 * kSystemPointerSize, ExitFrameConstants::kCallerFPOffset);
2443 672 : pushq(rbp);
2444 : movq(rbp, rsp);
2445 :
2446 : // Reserve room for entry stack pointer and push the code object.
2447 : Push(Immediate(StackFrame::TypeToMarker(frame_type)));
2448 : DCHECK_EQ(-2 * kSystemPointerSize, ExitFrameConstants::kSPOffset);
2449 : Push(Immediate(0)); // Saved entry sp, patched before call.
2450 2688 : Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2451 : Push(kScratchRegister); // Accessed from ExitFrame::code_slot.
2452 :
2453 : // Save the frame pointer and the context in top.
2454 672 : if (save_rax) {
2455 : movq(r14, rax); // Backup rax in callee-save register.
2456 : }
2457 :
2458 : Store(
2459 : ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()),
2460 672 : rbp);
2461 : Store(ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()),
2462 672 : rsi);
2463 : Store(
2464 : ExternalReference::Create(IsolateAddressId::kCFunctionAddress, isolate()),
2465 672 : rbx);
2466 672 : }
2467 :
2468 :
2469 672 : void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2470 : bool save_doubles) {
2471 : #ifdef _WIN64
2472 : const int kShadowSpace = 4;
2473 : arg_stack_space += kShadowSpace;
2474 : #endif
2475 : // Optionally save all XMM registers.
2476 672 : if (save_doubles) {
2477 224 : int space = XMMRegister::kNumRegisters * kDoubleSize +
2478 224 : arg_stack_space * kSystemPointerSize;
2479 224 : subq(rsp, Immediate(space));
2480 : int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
2481 7168 : const RegisterConfiguration* config = RegisterConfiguration::Default();
2482 7168 : for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
2483 : DoubleRegister reg =
2484 : DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
2485 6720 : Movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
2486 : }
2487 448 : } else if (arg_stack_space > 0) {
2488 224 : subq(rsp, Immediate(arg_stack_space * kSystemPointerSize));
2489 : }
2490 :
2491 : // Get the required frame alignment for the OS.
2492 672 : const int kFrameAlignment = base::OS::ActivationFrameAlignment();
2493 672 : if (kFrameAlignment > 0) {
2494 : DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
2495 : DCHECK(is_int8(kFrameAlignment));
2496 1344 : andq(rsp, Immediate(-kFrameAlignment));
2497 : }
2498 :
2499 : // Patch the saved entry sp.
2500 1344 : movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2501 672 : }
2502 :
2503 448 : void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles,
2504 : StackFrame::Type frame_type) {
2505 448 : EnterExitFramePrologue(true, frame_type);
2506 :
2507 : // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
2508 : // so it must be retained across the C-call.
2509 : int offset = StandardFrameConstants::kCallerSPOffset - kSystemPointerSize;
2510 896 : leaq(r15, Operand(rbp, r14, times_system_pointer_size, offset));
2511 :
2512 448 : EnterExitFrameEpilogue(arg_stack_space, save_doubles);
2513 448 : }
2514 :
2515 :
2516 224 : void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
2517 224 : EnterExitFramePrologue(false, StackFrame::EXIT);
2518 224 : EnterExitFrameEpilogue(arg_stack_space, false);
2519 224 : }
2520 :
2521 :
2522 560 : void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
2523 : // Registers:
2524 : // r15 : argv
2525 560 : if (save_doubles) {
2526 : int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
2527 7168 : const RegisterConfiguration* config = RegisterConfiguration::Default();
2528 7168 : for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
2529 : DoubleRegister reg =
2530 : DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
2531 6720 : Movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
2532 : }
2533 : }
2534 :
2535 560 : if (pop_arguments) {
2536 : // Get the return address from the stack and restore the frame pointer.
2537 896 : movq(rcx, Operand(rbp, kFPOnStackSize));
2538 896 : movq(rbp, Operand(rbp, 0 * kSystemPointerSize));
2539 :
2540 : // Drop everything up to and including the arguments and the receiver
2541 : // from the caller stack.
2542 896 : leaq(rsp, Operand(r15, 1 * kSystemPointerSize));
2543 :
2544 : PushReturnAddressFrom(rcx);
2545 : } else {
2546 : // Otherwise just leave the exit frame.
2547 112 : leave();
2548 : }
2549 :
2550 560 : LeaveExitFrameEpilogue();
2551 560 : }
2552 :
2553 112 : void MacroAssembler::LeaveApiExitFrame() {
2554 112 : movq(rsp, rbp);
2555 112 : popq(rbp);
2556 :
2557 112 : LeaveExitFrameEpilogue();
2558 112 : }
2559 :
2560 672 : void MacroAssembler::LeaveExitFrameEpilogue() {
2561 : // Restore current context from top and clear it in debug mode.
2562 : ExternalReference context_address =
2563 1344 : ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
2564 672 : Operand context_operand = ExternalReferenceAsOperand(context_address);
2565 672 : movq(rsi, context_operand);
2566 : #ifdef DEBUG
2567 : movq(context_operand, Immediate(Context::kInvalidContext));
2568 : #endif
2569 :
2570 : // Clear the top frame.
2571 : ExternalReference c_entry_fp_address =
2572 672 : ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
2573 672 : Operand c_entry_fp_operand = ExternalReferenceAsOperand(c_entry_fp_address);
2574 : movq(c_entry_fp_operand, Immediate(0));
2575 672 : }
2576 :
2577 :
2578 : #ifdef _WIN64
2579 : static const int kRegisterPassedArguments = 4;
2580 : #else
2581 : static const int kRegisterPassedArguments = 6;
2582 : #endif
2583 :
2584 :
2585 336 : void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
2586 336 : LoadTaggedPointerField(dst, NativeContextOperand());
2587 336 : LoadTaggedPointerField(dst, ContextOperand(dst, index));
2588 336 : }
2589 :
2590 :
2591 0 : int TurboAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2592 : // On Windows 64 stack slots are reserved by the caller for all arguments
2593 : // including the ones passed in registers, and space is always allocated for
2594 : // the four register arguments even if the function takes fewer than four
2595 : // arguments.
2596 : // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2597 : // and the caller does not reserve stack slots for them.
2598 : DCHECK_GE(num_arguments, 0);
2599 : #ifdef _WIN64
2600 : const int kMinimumStackSlots = kRegisterPassedArguments;
2601 : if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2602 : return num_arguments;
2603 : #else
2604 1523788 : if (num_arguments < kRegisterPassedArguments) return 0;
2605 92810 : return num_arguments - kRegisterPassedArguments;
2606 : #endif
2607 : }
2608 :
2609 761899 : void TurboAssembler::PrepareCallCFunction(int num_arguments) {
2610 761899 : int frame_alignment = base::OS::ActivationFrameAlignment();
2611 : DCHECK_NE(frame_alignment, 0);
2612 : DCHECK_GE(num_arguments, 0);
2613 :
2614 : // Make stack end at alignment and allocate space for arguments and old rsp.
2615 761899 : movq(kScratchRegister, rsp);
2616 : DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
2617 : int argument_slots_on_stack =
2618 : ArgumentStackSlotsForCFunctionCall(num_arguments);
2619 1523798 : subq(rsp, Immediate((argument_slots_on_stack + 1) * kSystemPointerSize));
2620 1523798 : andq(rsp, Immediate(-frame_alignment));
2621 : movq(Operand(rsp, argument_slots_on_stack * kSystemPointerSize),
2622 1523798 : kScratchRegister);
2623 761899 : }
2624 :
2625 760749 : void TurboAssembler::CallCFunction(ExternalReference function,
2626 : int num_arguments) {
2627 760749 : LoadAddress(rax, function);
2628 760749 : CallCFunction(rax, num_arguments);
2629 760749 : }
2630 :
2631 761889 : void TurboAssembler::CallCFunction(Register function, int num_arguments) {
2632 : DCHECK_LE(num_arguments, kMaxCParameters);
2633 : DCHECK(has_frame());
2634 : // Check stack alignment.
2635 761889 : if (emit_debug_code()) {
2636 0 : CheckStackAlignment();
2637 : }
2638 :
2639 : // Save the frame pointer and PC so that the stack layout remains iterable,
2640 : // even without an ExitFrame which normally exists between JS and C frames.
2641 2250972 : if (isolate() != nullptr) {
2642 363597 : Label get_pc;
2643 : DCHECK(!AreAliased(kScratchRegister, function));
2644 727194 : leaq(kScratchRegister, Operand(&get_pc, 0));
2645 363597 : bind(&get_pc);
2646 : movq(ExternalReferenceAsOperand(
2647 : ExternalReference::fast_c_call_caller_pc_address(isolate())),
2648 363597 : kScratchRegister);
2649 : movq(ExternalReferenceAsOperand(
2650 : ExternalReference::fast_c_call_caller_fp_address(isolate())),
2651 363597 : rbp);
2652 : }
2653 :
2654 761889 : call(function);
2655 :
2656 761889 : if (isolate() != nullptr) {
2657 : // We don't unset the PC; the FP is the source of truth.
2658 : movq(ExternalReferenceAsOperand(
2659 : ExternalReference::fast_c_call_caller_fp_address(isolate())),
2660 363597 : Immediate(0));
2661 : }
2662 :
2663 : DCHECK_NE(base::OS::ActivationFrameAlignment(), 0);
2664 : DCHECK_GE(num_arguments, 0);
2665 : int argument_slots_on_stack =
2666 : ArgumentStackSlotsForCFunctionCall(num_arguments);
2667 1523778 : movq(rsp, Operand(rsp, argument_slots_on_stack * kSystemPointerSize));
2668 761889 : }
2669 :
2670 651490 : void TurboAssembler::CheckPageFlag(Register object, Register scratch, int mask,
2671 : Condition cc, Label* condition_met,
2672 : Label::Distance condition_met_distance) {
2673 : DCHECK(cc == zero || cc == not_zero);
2674 651490 : if (scratch == object) {
2675 112 : andq(scratch, Immediate(~kPageAlignmentMask));
2676 : } else {
2677 651378 : movq(scratch, Immediate(~kPageAlignmentMask));
2678 651378 : andq(scratch, object);
2679 : }
2680 651490 : if (mask < (1 << kBitsPerByte)) {
2681 : testb(Operand(scratch, MemoryChunk::kFlagsOffset),
2682 1302980 : Immediate(static_cast<uint8_t>(mask)));
2683 : } else {
2684 0 : testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2685 : }
2686 651490 : j(cc, condition_met, condition_met_distance);
2687 651490 : }
2688 :
2689 110 : void TurboAssembler::ComputeCodeStartAddress(Register dst) {
2690 110 : Label current;
2691 110 : bind(¤t);
2692 110 : int pc = pc_offset();
2693 : // Load effective address to get the address of the current instruction.
2694 220 : leaq(dst, Operand(¤t, -pc));
2695 110 : }
2696 :
2697 560 : void TurboAssembler::ResetSpeculationPoisonRegister() {
2698 : // TODO(tebbi): Perhaps, we want to put an lfence here.
2699 560 : Set(kSpeculationPoisonRegister, -1);
2700 560 : }
2701 :
2702 3306864 : void TurboAssembler::CallForDeoptimization(Address target, int deopt_id) {
2703 : NoRootArrayScope no_root_array(this);
2704 : // Save the deopt id in r13 (we don't need the roots array from now on).
2705 3306864 : movq(r13, Immediate(deopt_id));
2706 3306864 : call(target, RelocInfo::RUNTIME_ENTRY);
2707 3306865 : }
2708 :
2709 : } // namespace internal
2710 178779 : } // namespace v8
2711 :
2712 : #endif // V8_TARGET_ARCH_X64
|