Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #if V8_TARGET_ARCH_X64
6 :
7 : #include "src/base/bits.h"
8 : #include "src/base/division-by-constant.h"
9 : #include "src/base/utils/random-number-generator.h"
10 : #include "src/bootstrapper.h"
11 : #include "src/callable.h"
12 : #include "src/code-factory.h"
13 : #include "src/counters.h"
14 : #include "src/debug/debug.h"
15 : #include "src/external-reference-table.h"
16 : #include "src/frames-inl.h"
17 : #include "src/globals.h"
18 : #include "src/heap/heap-inl.h" // For MemoryChunk.
19 : #include "src/macro-assembler.h"
20 : #include "src/objects-inl.h"
21 : #include "src/objects/smi.h"
22 : #include "src/register-configuration.h"
23 : #include "src/snapshot/embedded-data.h"
24 : #include "src/snapshot/snapshot.h"
25 : #include "src/string-constants.h"
26 : #include "src/x64/assembler-x64.h"
27 :
28 : // Satisfy cpplint check, but don't include platform-specific header. It is
29 : // included recursively via macro-assembler.h.
30 : #if 0
31 : #include "src/x64/macro-assembler-x64.h"
32 : #endif
33 :
34 : namespace v8 {
35 : namespace internal {
36 :
37 1792 : Operand StackArgumentsAccessor::GetArgumentOperand(int index) {
38 : DCHECK_GE(index, 0);
39 1792 : int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0;
40 : int displacement_to_last_argument =
41 1792 : base_reg_ == rsp ? kPCOnStackSize : kFPOnStackSize + kPCOnStackSize;
42 1792 : displacement_to_last_argument += extra_displacement_to_last_argument_;
43 1792 : if (argument_count_reg_ == no_reg) {
44 : // argument[0] is at base_reg_ + displacement_to_last_argument +
45 : // (argument_count_immediate_ + receiver - 1) * kSystemPointerSize.
46 : DCHECK_GT(argument_count_immediate_ + receiver, 0);
47 : return Operand(base_reg_,
48 : displacement_to_last_argument +
49 0 : (argument_count_immediate_ + receiver - 1 - index) *
50 0 : kSystemPointerSize);
51 : } else {
52 : // argument[0] is at base_reg_ + displacement_to_last_argument +
53 : // argument_count_reg_ * times_system_pointer_size + (receiver - 1) *
54 : // kSystemPointerSize.
55 : return Operand(base_reg_, argument_count_reg_, times_system_pointer_size,
56 : displacement_to_last_argument +
57 1792 : (receiver - 1 - index) * kSystemPointerSize);
58 : }
59 : }
60 :
61 0 : StackArgumentsAccessor::StackArgumentsAccessor(
62 : Register base_reg, const ParameterCount& parameter_count,
63 : StackArgumentsAccessorReceiverMode receiver_mode,
64 : int extra_displacement_to_last_argument)
65 : : base_reg_(base_reg),
66 : argument_count_reg_(parameter_count.is_reg() ? parameter_count.reg()
67 : : no_reg),
68 : argument_count_immediate_(
69 : parameter_count.is_immediate() ? parameter_count.immediate() : 0),
70 : receiver_mode_(receiver_mode),
71 : extra_displacement_to_last_argument_(
72 672 : extra_displacement_to_last_argument) {}
73 :
74 392 : void MacroAssembler::Load(Register destination, ExternalReference source) {
75 392 : if (root_array_available_ && options().enable_root_array_delta_access) {
76 0 : intptr_t delta = RootRegisterOffsetForExternalReference(isolate(), source);
77 0 : if (is_int32(delta)) {
78 0 : movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
79 0 : return;
80 : }
81 : }
82 : // Safe code.
83 392 : if (destination == rax && !options().isolate_independent_code) {
84 0 : load_rax(source);
85 : } else {
86 392 : movq(destination, ExternalReferenceAsOperand(source));
87 : }
88 : }
89 :
90 :
91 47127 : void MacroAssembler::Store(ExternalReference destination, Register source) {
92 47127 : if (root_array_available_ && options().enable_root_array_delta_access) {
93 : intptr_t delta =
94 0 : RootRegisterOffsetForExternalReference(isolate(), destination);
95 0 : if (is_int32(delta)) {
96 0 : movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
97 0 : return;
98 : }
99 : }
100 : // Safe code.
101 47127 : if (source == rax && !options().isolate_independent_code) {
102 0 : store_rax(destination);
103 : } else {
104 47127 : movq(ExternalReferenceAsOperand(destination), source);
105 : }
106 : }
107 :
108 40544 : void TurboAssembler::LoadFromConstantsTable(Register destination,
109 : int constant_index) {
110 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable));
111 40544 : LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
112 : LoadTaggedPointerField(
113 : destination,
114 : FieldOperand(destination, FixedArray::OffsetOfElementAt(constant_index)));
115 40544 : }
116 :
117 19264 : void TurboAssembler::LoadRootRegisterOffset(Register destination,
118 : intptr_t offset) {
119 : DCHECK(is_int32(offset));
120 19264 : if (offset == 0) {
121 : Move(destination, kRootRegister);
122 : } else {
123 36736 : leaq(destination, Operand(kRootRegister, static_cast<int32_t>(offset)));
124 : }
125 19264 : }
126 :
127 769216 : void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) {
128 1538432 : movq(destination, Operand(kRootRegister, offset));
129 769216 : }
130 :
131 953843 : void TurboAssembler::LoadAddress(Register destination,
132 : ExternalReference source) {
133 953843 : if (root_array_available_ && options().enable_root_array_delta_access) {
134 1170 : intptr_t delta = RootRegisterOffsetForExternalReference(isolate(), source);
135 1170 : if (is_int32(delta)) {
136 2340 : leaq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
137 1170 : return;
138 : }
139 : }
140 : // Safe code.
141 : if (FLAG_embedded_builtins) {
142 952673 : if (root_array_available_ && options().isolate_independent_code) {
143 45528 : IndirectLoadExternalReference(destination, source);
144 45528 : return;
145 : }
146 : }
147 907145 : Move(destination, source);
148 : }
149 :
150 1148500 : Operand TurboAssembler::ExternalReferenceAsOperand(ExternalReference reference,
151 : Register scratch) {
152 1148500 : if (root_array_available_ && options().enable_root_array_delta_access) {
153 : int64_t delta =
154 3590 : RootRegisterOffsetForExternalReference(isolate(), reference);
155 3590 : if (is_int32(delta)) {
156 3590 : return Operand(kRootRegister, static_cast<int32_t>(delta));
157 : }
158 : }
159 1144910 : if (root_array_available_ && options().isolate_independent_code) {
160 91504 : if (IsAddressableThroughRootRegister(isolate(), reference)) {
161 : // Some external references can be efficiently loaded as an offset from
162 : // kRootRegister.
163 : intptr_t offset =
164 90888 : RootRegisterOffsetForExternalReference(isolate(), reference);
165 90888 : CHECK(is_int32(offset));
166 90888 : return Operand(kRootRegister, static_cast<int32_t>(offset));
167 : } else {
168 : // Otherwise, do a memory load from the external reference table.
169 1232 : movq(scratch, Operand(kRootRegister,
170 : RootRegisterOffsetForExternalReferenceTableEntry(
171 : isolate(), reference)));
172 616 : return Operand(scratch, 0);
173 : }
174 : }
175 1053406 : Move(scratch, reference);
176 1053406 : return Operand(scratch, 0);
177 : }
178 :
179 112 : void MacroAssembler::PushAddress(ExternalReference source) {
180 112 : LoadAddress(kScratchRegister, source);
181 : Push(kScratchRegister);
182 112 : }
183 :
184 1944413 : void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
185 : DCHECK(root_array_available_);
186 3888834 : movq(destination,
187 : Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
188 1944421 : }
189 :
190 672 : void MacroAssembler::PushRoot(RootIndex index) {
191 : DCHECK(root_array_available_);
192 1344 : Push(Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
193 672 : }
194 :
195 563864 : void TurboAssembler::CompareRoot(Register with, RootIndex index) {
196 : DCHECK(root_array_available_);
197 563864 : if (IsInRange(index, RootIndex::kFirstStrongOrReadOnlyRoot,
198 : RootIndex::kLastStrongOrReadOnlyRoot)) {
199 2016 : cmp_tagged(with,
200 1008 : Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
201 : } else {
202 : // Some smi roots contain system pointer size values like stack limits.
203 562856 : cmpq(with, Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
204 : }
205 563870 : }
206 :
207 0 : void TurboAssembler::CompareRoot(Operand with, RootIndex index) {
208 : DCHECK(root_array_available_);
209 : DCHECK(!with.AddressUsesRegister(kScratchRegister));
210 0 : LoadRoot(kScratchRegister, index);
211 0 : if (IsInRange(index, RootIndex::kFirstStrongOrReadOnlyRoot,
212 : RootIndex::kLastStrongOrReadOnlyRoot)) {
213 0 : cmp_tagged(with, kScratchRegister);
214 : } else {
215 : // Some smi roots contain system pointer size values like stack limits.
216 0 : cmpq(with, kScratchRegister);
217 : }
218 0 : }
219 :
220 592640 : void TurboAssembler::LoadTaggedPointerField(Register destination,
221 : Operand field_operand) {
222 : #ifdef V8_COMPRESS_POINTERS
223 : DecompressTaggedPointer(destination, field_operand);
224 : #else
225 634696 : mov_tagged(destination, field_operand);
226 : #endif
227 592644 : }
228 :
229 336 : void TurboAssembler::LoadAnyTaggedField(Register destination,
230 : Operand field_operand,
231 : Register scratch) {
232 : #ifdef V8_COMPRESS_POINTERS
233 : DecompressAnyTagged(destination, field_operand, scratch);
234 : #else
235 336 : mov_tagged(destination, field_operand);
236 : #endif
237 336 : }
238 :
239 112 : void TurboAssembler::PushTaggedPointerField(Operand field_operand,
240 : Register scratch) {
241 : #ifdef V8_COMPRESS_POINTERS
242 : DCHECK(!field_operand.AddressUsesRegister(scratch));
243 : DecompressTaggedPointer(scratch, field_operand);
244 : Push(scratch);
245 : #else
246 : Push(field_operand);
247 : #endif
248 112 : }
249 :
250 112 : void TurboAssembler::PushTaggedAnyField(Operand field_operand,
251 : Register scratch1, Register scratch2) {
252 : #ifdef V8_COMPRESS_POINTERS
253 : DCHECK(!AreAliased(scratch1, scratch2));
254 : DCHECK(!field_operand.AddressUsesRegister(scratch1));
255 : DCHECK(!field_operand.AddressUsesRegister(scratch2));
256 : DecompressAnyTagged(scratch1, field_operand, scratch2);
257 : Push(scratch1);
258 : #else
259 : Push(field_operand);
260 : #endif
261 112 : }
262 :
263 280 : void TurboAssembler::SmiUntagField(Register dst, Operand src) {
264 280 : SmiUntag(dst, src);
265 280 : }
266 :
267 0 : void TurboAssembler::StoreTaggedField(Operand dst_field_operand,
268 : Immediate value) {
269 : #ifdef V8_COMPRESS_POINTERS
270 : RecordComment("[ StoreTagged");
271 : movl(dst_field_operand, value);
272 : RecordComment("]");
273 : #else
274 0 : movq(dst_field_operand, value);
275 : #endif
276 0 : }
277 :
278 319911 : void TurboAssembler::StoreTaggedField(Operand dst_field_operand,
279 : Register value) {
280 : #ifdef V8_COMPRESS_POINTERS
281 : RecordComment("[ StoreTagged");
282 : movl(dst_field_operand, value);
283 : RecordComment("]");
284 : #else
285 319911 : movq(dst_field_operand, value);
286 : #endif
287 319911 : }
288 :
289 0 : void TurboAssembler::DecompressTaggedSigned(Register destination,
290 : Operand field_operand) {
291 0 : RecordComment("[ DecompressTaggedSigned");
292 0 : movsxlq(destination, field_operand);
293 0 : RecordComment("]");
294 0 : }
295 :
296 0 : void TurboAssembler::DecompressTaggedPointer(Register destination,
297 : Register source) {
298 0 : RecordComment("[ DecompressTaggedPointer");
299 0 : movsxlq(destination, source);
300 : addq(destination, kRootRegister);
301 0 : RecordComment("]");
302 0 : }
303 :
304 0 : void TurboAssembler::DecompressTaggedPointer(Register destination,
305 : Operand field_operand) {
306 0 : RecordComment("[ DecompressTaggedPointer");
307 0 : movsxlq(destination, field_operand);
308 : addq(destination, kRootRegister);
309 0 : RecordComment("]");
310 0 : }
311 :
312 0 : void TurboAssembler::DecompressAnyTagged(Register destination,
313 : Operand field_operand,
314 : Register scratch) {
315 : DCHECK(!AreAliased(destination, scratch));
316 0 : RecordComment("[ DecompressAnyTagged");
317 0 : movsxlq(destination, field_operand);
318 : if (kUseBranchlessPtrDecompression) {
319 : // Branchlessly compute |masked_root|:
320 : // masked_root = HAS_SMI_TAG(destination) ? 0 : kRootRegister;
321 : STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag < 32));
322 0 : Register masked_root = scratch;
323 : movl(masked_root, destination);
324 : andl(masked_root, Immediate(kSmiTagMask));
325 : negq(masked_root);
326 : andq(masked_root, kRootRegister);
327 : // Now this add operation will either leave the value unchanged if it is
328 : // a smi or add the isolate root if it is a heap object.
329 : addq(destination, masked_root);
330 : } else {
331 : Label done;
332 : JumpIfSmi(destination, &done);
333 : addq(destination, kRootRegister);
334 : bind(&done);
335 : }
336 0 : RecordComment("]");
337 0 : }
338 :
339 112 : void MacroAssembler::RecordWriteField(Register object, int offset,
340 : Register value, Register dst,
341 : SaveFPRegsMode save_fp,
342 : RememberedSetAction remembered_set_action,
343 : SmiCheck smi_check) {
344 : // First, check if a write barrier is even needed. The tests below
345 : // catch stores of Smis.
346 112 : Label done;
347 :
348 : // Skip barrier if writing a smi.
349 112 : if (smi_check == INLINE_SMI_CHECK) {
350 56 : JumpIfSmi(value, &done);
351 : }
352 :
353 : // Although the object register is tagged, the offset is relative to the start
354 : // of the object, so the offset must be a multiple of kTaggedSize.
355 : DCHECK(IsAligned(offset, kTaggedSize));
356 :
357 112 : leaq(dst, FieldOperand(object, offset));
358 112 : if (emit_debug_code()) {
359 0 : Label ok;
360 0 : testb(dst, Immediate(kTaggedSize - 1));
361 0 : j(zero, &ok, Label::kNear);
362 0 : int3();
363 0 : bind(&ok);
364 : }
365 :
366 : RecordWrite(object, dst, value, save_fp, remembered_set_action,
367 112 : OMIT_SMI_CHECK);
368 :
369 112 : bind(&done);
370 :
371 : // Clobber clobbered input registers when running with the debug-code flag
372 : // turned on to provoke errors.
373 112 : if (emit_debug_code()) {
374 : Move(value, kZapValue, RelocInfo::NONE);
375 : Move(dst, kZapValue, RelocInfo::NONE);
376 : }
377 112 : }
378 :
379 319910 : void TurboAssembler::SaveRegisters(RegList registers) {
380 : DCHECK_GT(NumRegs(registers), 0);
381 10557024 : for (int i = 0; i < Register::kNumRegisters; ++i) {
382 5118557 : if ((registers >> i) & 1u) {
383 1599552 : pushq(Register::from_code(i));
384 : }
385 : }
386 319910 : }
387 :
388 319910 : void TurboAssembler::RestoreRegisters(RegList registers) {
389 : DCHECK_GT(NumRegs(registers), 0);
390 10557038 : for (int i = Register::kNumRegisters - 1; i >= 0; --i) {
391 5118564 : if ((registers >> i) & 1u) {
392 1599553 : popq(Register::from_code(i));
393 : }
394 : }
395 319910 : }
396 :
397 112 : void TurboAssembler::CallEphemeronKeyBarrier(Register object, Register address,
398 : SaveFPRegsMode fp_mode) {
399 : EphemeronKeyBarrierDescriptor descriptor;
400 : RegList registers = descriptor.allocatable_registers();
401 :
402 112 : SaveRegisters(registers);
403 :
404 : Register object_parameter(
405 112 : descriptor.GetRegisterParameter(EphemeronKeyBarrierDescriptor::kObject));
406 : Register slot_parameter(descriptor.GetRegisterParameter(
407 112 : EphemeronKeyBarrierDescriptor::kSlotAddress));
408 : Register fp_mode_parameter(
409 112 : descriptor.GetRegisterParameter(EphemeronKeyBarrierDescriptor::kFPMode));
410 :
411 112 : MovePair(slot_parameter, address, object_parameter, object);
412 112 : Smi smi_fm = Smi::FromEnum(fp_mode);
413 112 : Move(fp_mode_parameter, smi_fm);
414 112 : Call(isolate()->builtins()->builtin_handle(Builtins::kEphemeronKeyBarrier),
415 112 : RelocInfo::CODE_TARGET);
416 :
417 112 : RestoreRegisters(registers);
418 112 : }
419 :
420 319513 : void TurboAssembler::CallRecordWriteStub(
421 : Register object, Register address,
422 : RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
423 319513 : CallRecordWriteStub(
424 : object, address, remembered_set_action, fp_mode,
425 : isolate()->builtins()->builtin_handle(Builtins::kRecordWrite),
426 319513 : kNullAddress);
427 319513 : }
428 :
429 286 : void TurboAssembler::CallRecordWriteStub(
430 : Register object, Register address,
431 : RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
432 : Address wasm_target) {
433 : CallRecordWriteStub(object, address, remembered_set_action, fp_mode,
434 286 : Handle<Code>::null(), wasm_target);
435 286 : }
436 :
437 319799 : void TurboAssembler::CallRecordWriteStub(
438 : Register object, Register address,
439 : RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
440 : Handle<Code> code_target, Address wasm_target) {
441 : DCHECK_NE(code_target.is_null(), wasm_target == kNullAddress);
442 :
443 : RecordWriteDescriptor descriptor;
444 : RegList registers = descriptor.allocatable_registers();
445 :
446 319799 : SaveRegisters(registers);
447 :
448 : Register object_parameter(
449 319798 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kObject));
450 : Register slot_parameter(
451 319798 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kSlot));
452 : Register remembered_set_parameter(
453 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kRememberedSet));
454 : Register fp_mode_parameter(
455 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kFPMode));
456 :
457 : // Prepare argument registers for calling RecordWrite
458 : // slot_parameter <= address
459 : // object_parameter <= object
460 319798 : MovePair(slot_parameter, address, object_parameter, object);
461 :
462 : Smi smi_rsa = Smi::FromEnum(remembered_set_action);
463 : Smi smi_fm = Smi::FromEnum(fp_mode);
464 319799 : Move(remembered_set_parameter, smi_rsa);
465 319799 : if (smi_rsa != smi_fm) {
466 276354 : Move(fp_mode_parameter, smi_fm);
467 : } else {
468 43445 : movq(fp_mode_parameter, remembered_set_parameter);
469 : }
470 319798 : if (code_target.is_null()) {
471 : // Use {near_call} for direct Wasm call within a module.
472 285 : near_call(wasm_target, RelocInfo::WASM_STUB_CALL);
473 : } else {
474 319513 : Call(code_target, RelocInfo::CODE_TARGET);
475 : }
476 :
477 319798 : RestoreRegisters(registers);
478 319798 : }
479 :
480 112 : void MacroAssembler::RecordWrite(Register object, Register address,
481 : Register value, SaveFPRegsMode fp_mode,
482 : RememberedSetAction remembered_set_action,
483 : SmiCheck smi_check) {
484 : DCHECK(object != value);
485 : DCHECK(object != address);
486 : DCHECK(value != address);
487 112 : AssertNotSmi(object);
488 :
489 168 : if (remembered_set_action == OMIT_REMEMBERED_SET &&
490 56 : !FLAG_incremental_marking) {
491 0 : return;
492 : }
493 :
494 112 : if (emit_debug_code()) {
495 0 : Label ok;
496 0 : cmp_tagged(value, Operand(address, 0));
497 0 : j(equal, &ok, Label::kNear);
498 0 : int3();
499 0 : bind(&ok);
500 : }
501 :
502 : // First, check if a write barrier is even needed. The tests below
503 : // catch stores of smis and stores into the young generation.
504 112 : Label done;
505 :
506 112 : if (smi_check == INLINE_SMI_CHECK) {
507 : // Skip barrier if writing a smi.
508 0 : JumpIfSmi(value, &done);
509 : }
510 :
511 112 : CheckPageFlag(value,
512 : value, // Used as scratch.
513 : MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
514 112 : Label::kNear);
515 :
516 : CheckPageFlag(object,
517 : value, // Used as scratch.
518 : MemoryChunk::kPointersFromHereAreInterestingMask,
519 : zero,
520 : &done,
521 112 : Label::kNear);
522 :
523 112 : CallRecordWriteStub(object, address, remembered_set_action, fp_mode);
524 :
525 112 : bind(&done);
526 :
527 : // Clobber clobbered registers when running with the debug-code flag
528 : // turned on to provoke errors.
529 112 : if (emit_debug_code()) {
530 : Move(address, kZapValue, RelocInfo::NONE);
531 : Move(value, kZapValue, RelocInfo::NONE);
532 : }
533 : }
534 :
535 122 : void TurboAssembler::Assert(Condition cc, AbortReason reason) {
536 122 : if (emit_debug_code()) Check(cc, reason);
537 122 : }
538 :
539 303870 : void TurboAssembler::AssertUnreachable(AbortReason reason) {
540 303870 : if (emit_debug_code()) Abort(reason);
541 303870 : }
542 :
543 188 : void TurboAssembler::Check(Condition cc, AbortReason reason) {
544 188 : Label L;
545 188 : j(cc, &L, Label::kNear);
546 188 : Abort(reason);
547 : // Control will not return here.
548 188 : bind(&L);
549 188 : }
550 :
551 0 : void TurboAssembler::CheckStackAlignment() {
552 0 : int frame_alignment = base::OS::ActivationFrameAlignment();
553 0 : int frame_alignment_mask = frame_alignment - 1;
554 0 : if (frame_alignment > kSystemPointerSize) {
555 : DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
556 0 : Label alignment_as_expected;
557 0 : testq(rsp, Immediate(frame_alignment_mask));
558 0 : j(zero, &alignment_as_expected, Label::kNear);
559 : // Abort if stack is not aligned.
560 0 : int3();
561 0 : bind(&alignment_as_expected);
562 : }
563 0 : }
564 :
565 4874 : void TurboAssembler::Abort(AbortReason reason) {
566 : #ifdef DEBUG
567 : const char* msg = GetAbortReason(reason);
568 : RecordComment("Abort message: ");
569 : RecordComment(msg);
570 : #endif
571 :
572 : // Avoid emitting call to builtin if requested.
573 4874 : if (trap_on_abort()) {
574 0 : int3();
575 0 : return;
576 : }
577 :
578 4874 : if (should_abort_hard()) {
579 : // We don't care if we constructed a frame. Just pretend we did.
580 10 : FrameScope assume_frame(this, StackFrame::NONE);
581 10 : movl(arg_reg_1, Immediate(static_cast<int>(reason)));
582 10 : PrepareCallCFunction(1);
583 10 : LoadAddress(rax, ExternalReference::abort_with_reason());
584 10 : call(rax);
585 : return;
586 : }
587 :
588 4864 : Move(rdx, Smi::FromInt(static_cast<int>(reason)));
589 :
590 4864 : if (!has_frame()) {
591 : // We don't actually want to generate a pile of code for this, so just
592 : // claim there is a stack frame, without generating one.
593 56 : FrameScope scope(this, StackFrame::NONE);
594 56 : Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
595 : } else {
596 4808 : Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
597 : }
598 : // Control will not return here.
599 4864 : int3();
600 : }
601 :
602 75 : void TurboAssembler::CallRuntimeWithCEntry(Runtime::FunctionId fid,
603 : Register centry) {
604 75 : const Runtime::Function* f = Runtime::FunctionForId(fid);
605 : // TODO(1236192): Most runtime routines don't need the number of
606 : // arguments passed in because it is constant. At some point we
607 : // should remove this need and make the runtime routine entry code
608 : // smarter.
609 76 : Set(rax, f->nargs);
610 76 : LoadAddress(rbx, ExternalReference::Create(f));
611 : DCHECK(!AreAliased(centry, rax, rbx));
612 : DCHECK(centry == rcx);
613 76 : CallCodeObject(centry);
614 75 : }
615 :
616 1960 : void MacroAssembler::CallRuntime(const Runtime::Function* f,
617 : int num_arguments,
618 : SaveFPRegsMode save_doubles) {
619 : // If the expected number of arguments of the runtime function is
620 : // constant, we check that the actual number of arguments match the
621 : // expectation.
622 1960 : CHECK(f->nargs < 0 || f->nargs == num_arguments);
623 :
624 : // TODO(1236192): Most runtime routines don't need the number of
625 : // arguments passed in because it is constant. At some point we
626 : // should remove this need and make the runtime routine entry code
627 : // smarter.
628 1960 : Set(rax, num_arguments);
629 1960 : LoadAddress(rbx, ExternalReference::Create(f));
630 : Handle<Code> code =
631 1960 : CodeFactory::CEntry(isolate(), f->result_size, save_doubles);
632 1960 : Call(code, RelocInfo::CODE_TARGET);
633 1960 : }
634 :
635 840 : void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
636 : // ----------- S t a t e -------------
637 : // -- rsp[0] : return address
638 : // -- rsp[8] : argument num_arguments - 1
639 : // ...
640 : // -- rsp[8 * num_arguments] : argument 0 (receiver)
641 : //
642 : // For runtime functions with variable arguments:
643 : // -- rax : number of arguments
644 : // -----------------------------------
645 :
646 840 : const Runtime::Function* function = Runtime::FunctionForId(fid);
647 : DCHECK_EQ(1, function->result_size);
648 840 : if (function->nargs >= 0) {
649 840 : Set(rax, function->nargs);
650 : }
651 840 : JumpToExternalReference(ExternalReference::Create(fid));
652 840 : }
653 :
654 840 : void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
655 : bool builtin_exit_frame) {
656 : // Set the entry point and jump to the C entry runtime stub.
657 840 : LoadAddress(rbx, ext);
658 : Handle<Code> code = CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs,
659 840 : kArgvOnStack, builtin_exit_frame);
660 840 : Jump(code, RelocInfo::CODE_TARGET);
661 840 : }
662 :
663 : static constexpr Register saved_regs[] = {rax, rcx, rdx, rbx, rbp, rsi,
664 : rdi, r8, r9, r10, r11};
665 :
666 : static constexpr int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
667 :
668 900 : int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
669 : Register exclusion1,
670 : Register exclusion2,
671 : Register exclusion3) const {
672 : int bytes = 0;
673 20700 : for (int i = 0; i < kNumberOfSavedRegs; i++) {
674 9900 : Register reg = saved_regs[i];
675 9900 : if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
676 9000 : bytes += kSystemPointerSize;
677 : }
678 : }
679 :
680 : // R12 to r15 are callee save on all platforms.
681 900 : if (fp_mode == kSaveFPRegs) {
682 452 : bytes += kDoubleSize * XMMRegister::kNumRegisters;
683 : }
684 :
685 900 : return bytes;
686 : }
687 :
688 900 : int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
689 : Register exclusion2, Register exclusion3) {
690 : // We don't allow a GC during a store buffer overflow so there is no need to
691 : // store the registers in any particular way, but we do have to store and
692 : // restore them.
693 : int bytes = 0;
694 20700 : for (int i = 0; i < kNumberOfSavedRegs; i++) {
695 9900 : Register reg = saved_regs[i];
696 9900 : if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
697 9000 : pushq(reg);
698 9000 : bytes += kSystemPointerSize;
699 : }
700 : }
701 :
702 : // R12 to r15 are callee save on all platforms.
703 900 : if (fp_mode == kSaveFPRegs) {
704 : int delta = kDoubleSize * XMMRegister::kNumRegisters;
705 452 : subq(rsp, Immediate(delta));
706 14916 : for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
707 : XMMRegister reg = XMMRegister::from_code(i);
708 14464 : Movsd(Operand(rsp, i * kDoubleSize), reg);
709 : }
710 452 : bytes += delta;
711 : }
712 :
713 900 : return bytes;
714 : }
715 :
716 900 : int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
717 : Register exclusion2, Register exclusion3) {
718 : int bytes = 0;
719 900 : if (fp_mode == kSaveFPRegs) {
720 14916 : for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
721 : XMMRegister reg = XMMRegister::from_code(i);
722 14464 : Movsd(reg, Operand(rsp, i * kDoubleSize));
723 : }
724 : int delta = kDoubleSize * XMMRegister::kNumRegisters;
725 452 : addq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
726 : bytes += delta;
727 : }
728 :
729 20700 : for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
730 9900 : Register reg = saved_regs[i];
731 9900 : if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
732 9000 : popq(reg);
733 9000 : bytes += kSystemPointerSize;
734 : }
735 : }
736 :
737 900 : return bytes;
738 : }
739 :
740 9140 : void TurboAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
741 9140 : if (CpuFeatures::IsSupported(AVX)) {
742 : CpuFeatureScope scope(this, AVX);
743 9124 : vcvtss2sd(dst, src, src);
744 : } else {
745 16 : cvtss2sd(dst, src);
746 : }
747 9139 : }
748 :
749 11373 : void TurboAssembler::Cvtss2sd(XMMRegister dst, Operand src) {
750 11373 : if (CpuFeatures::IsSupported(AVX)) {
751 : CpuFeatureScope scope(this, AVX);
752 11373 : vcvtss2sd(dst, dst, src);
753 : } else {
754 0 : cvtss2sd(dst, src);
755 : }
756 11373 : }
757 :
758 6160 : void TurboAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
759 6160 : if (CpuFeatures::IsSupported(AVX)) {
760 : CpuFeatureScope scope(this, AVX);
761 6141 : vcvtsd2ss(dst, src, src);
762 : } else {
763 19 : cvtsd2ss(dst, src);
764 : }
765 6160 : }
766 :
767 11874 : void TurboAssembler::Cvtsd2ss(XMMRegister dst, Operand src) {
768 11874 : if (CpuFeatures::IsSupported(AVX)) {
769 : CpuFeatureScope scope(this, AVX);
770 11874 : vcvtsd2ss(dst, dst, src);
771 : } else {
772 0 : cvtsd2ss(dst, src);
773 : }
774 11874 : }
775 :
776 375013 : void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
777 375013 : if (CpuFeatures::IsSupported(AVX)) {
778 : CpuFeatureScope scope(this, AVX);
779 372983 : vxorpd(dst, dst, dst);
780 : vcvtlsi2sd(dst, dst, src);
781 : } else {
782 2030 : xorpd(dst, dst);
783 2030 : cvtlsi2sd(dst, src);
784 : }
785 375015 : }
786 :
787 3884 : void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Operand src) {
788 3884 : if (CpuFeatures::IsSupported(AVX)) {
789 : CpuFeatureScope scope(this, AVX);
790 3882 : vxorpd(dst, dst, dst);
791 : vcvtlsi2sd(dst, dst, src);
792 : } else {
793 2 : xorpd(dst, dst);
794 2 : cvtlsi2sd(dst, src);
795 : }
796 3885 : }
797 :
798 1095 : void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
799 1095 : if (CpuFeatures::IsSupported(AVX)) {
800 : CpuFeatureScope scope(this, AVX);
801 1081 : vxorps(dst, dst, dst);
802 : vcvtlsi2ss(dst, dst, src);
803 : } else {
804 14 : xorps(dst, dst);
805 14 : cvtlsi2ss(dst, src);
806 : }
807 1095 : }
808 :
809 8 : void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Operand src) {
810 8 : if (CpuFeatures::IsSupported(AVX)) {
811 : CpuFeatureScope scope(this, AVX);
812 8 : vxorps(dst, dst, dst);
813 : vcvtlsi2ss(dst, dst, src);
814 : } else {
815 0 : xorps(dst, dst);
816 0 : cvtlsi2ss(dst, src);
817 : }
818 8 : }
819 :
820 291 : void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
821 291 : if (CpuFeatures::IsSupported(AVX)) {
822 : CpuFeatureScope scope(this, AVX);
823 291 : vxorps(dst, dst, dst);
824 : vcvtqsi2ss(dst, dst, src);
825 : } else {
826 0 : xorps(dst, dst);
827 0 : cvtqsi2ss(dst, src);
828 : }
829 291 : }
830 :
831 0 : void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Operand src) {
832 0 : if (CpuFeatures::IsSupported(AVX)) {
833 : CpuFeatureScope scope(this, AVX);
834 0 : vxorps(dst, dst, dst);
835 : vcvtqsi2ss(dst, dst, src);
836 : } else {
837 0 : xorps(dst, dst);
838 0 : cvtqsi2ss(dst, src);
839 : }
840 0 : }
841 :
842 19140 : void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
843 19140 : if (CpuFeatures::IsSupported(AVX)) {
844 : CpuFeatureScope scope(this, AVX);
845 18848 : vxorpd(dst, dst, dst);
846 : vcvtqsi2sd(dst, dst, src);
847 : } else {
848 292 : xorpd(dst, dst);
849 292 : cvtqsi2sd(dst, src);
850 : }
851 19153 : }
852 :
853 2000 : void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Operand src) {
854 2000 : if (CpuFeatures::IsSupported(AVX)) {
855 : CpuFeatureScope scope(this, AVX);
856 1966 : vxorpd(dst, dst, dst);
857 : vcvtqsi2sd(dst, dst, src);
858 : } else {
859 34 : xorpd(dst, dst);
860 34 : cvtqsi2sd(dst, src);
861 : }
862 2000 : }
863 :
864 88 : void TurboAssembler::Cvtlui2ss(XMMRegister dst, Register src) {
865 : // Zero-extend the 32 bit value to 64 bit.
866 88 : movl(kScratchRegister, src);
867 88 : Cvtqsi2ss(dst, kScratchRegister);
868 88 : }
869 :
870 0 : void TurboAssembler::Cvtlui2ss(XMMRegister dst, Operand src) {
871 : // Zero-extend the 32 bit value to 64 bit.
872 0 : movl(kScratchRegister, src);
873 0 : Cvtqsi2ss(dst, kScratchRegister);
874 0 : }
875 :
876 392 : void TurboAssembler::Cvtlui2sd(XMMRegister dst, Register src) {
877 : // Zero-extend the 32 bit value to 64 bit.
878 392 : movl(kScratchRegister, src);
879 392 : Cvtqsi2sd(dst, kScratchRegister);
880 392 : }
881 :
882 9578 : void TurboAssembler::Cvtlui2sd(XMMRegister dst, Operand src) {
883 : // Zero-extend the 32 bit value to 64 bit.
884 9578 : movl(kScratchRegister, src);
885 9579 : Cvtqsi2sd(dst, kScratchRegister);
886 9578 : }
887 :
888 45 : void TurboAssembler::Cvtqui2ss(XMMRegister dst, Register src) {
889 45 : Label done;
890 45 : Cvtqsi2ss(dst, src);
891 45 : testq(src, src);
892 45 : j(positive, &done, Label::kNear);
893 :
894 : // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
895 45 : if (src != kScratchRegister) movq(kScratchRegister, src);
896 : shrq(kScratchRegister, Immediate(1));
897 : // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
898 45 : Label msb_not_set;
899 45 : j(not_carry, &msb_not_set, Label::kNear);
900 : orq(kScratchRegister, Immediate(1));
901 45 : bind(&msb_not_set);
902 45 : Cvtqsi2ss(dst, kScratchRegister);
903 45 : addss(dst, dst);
904 45 : bind(&done);
905 45 : }
906 :
907 0 : void TurboAssembler::Cvtqui2ss(XMMRegister dst, Operand src) {
908 0 : movq(kScratchRegister, src);
909 0 : Cvtqui2ss(dst, kScratchRegister);
910 0 : }
911 :
912 3659 : void TurboAssembler::Cvtqui2sd(XMMRegister dst, Register src) {
913 3659 : Label done;
914 3659 : Cvtqsi2sd(dst, src);
915 3660 : testq(src, src);
916 3660 : j(positive, &done, Label::kNear);
917 :
918 : // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
919 3660 : if (src != kScratchRegister) movq(kScratchRegister, src);
920 : shrq(kScratchRegister, Immediate(1));
921 : // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
922 3660 : Label msb_not_set;
923 3660 : j(not_carry, &msb_not_set, Label::kNear);
924 : orq(kScratchRegister, Immediate(1));
925 3660 : bind(&msb_not_set);
926 3660 : Cvtqsi2sd(dst, kScratchRegister);
927 3660 : addsd(dst, dst);
928 3660 : bind(&done);
929 3660 : }
930 :
931 1232 : void TurboAssembler::Cvtqui2sd(XMMRegister dst, Operand src) {
932 1232 : movq(kScratchRegister, src);
933 1232 : Cvtqui2sd(dst, kScratchRegister);
934 1232 : }
935 :
936 435 : void TurboAssembler::Cvttss2si(Register dst, XMMRegister src) {
937 435 : if (CpuFeatures::IsSupported(AVX)) {
938 : CpuFeatureScope scope(this, AVX);
939 435 : vcvttss2si(dst, src);
940 : } else {
941 0 : cvttss2si(dst, src);
942 : }
943 435 : }
944 :
945 0 : void TurboAssembler::Cvttss2si(Register dst, Operand src) {
946 0 : if (CpuFeatures::IsSupported(AVX)) {
947 : CpuFeatureScope scope(this, AVX);
948 0 : vcvttss2si(dst, src);
949 : } else {
950 0 : cvttss2si(dst, src);
951 : }
952 0 : }
953 :
954 107904 : void TurboAssembler::Cvttsd2si(Register dst, XMMRegister src) {
955 107904 : if (CpuFeatures::IsSupported(AVX)) {
956 : CpuFeatureScope scope(this, AVX);
957 107382 : vcvttsd2si(dst, src);
958 : } else {
959 522 : cvttsd2si(dst, src);
960 : }
961 107904 : }
962 :
963 20268 : void TurboAssembler::Cvttsd2si(Register dst, Operand src) {
964 20268 : if (CpuFeatures::IsSupported(AVX)) {
965 : CpuFeatureScope scope(this, AVX);
966 20268 : vcvttsd2si(dst, src);
967 : } else {
968 0 : cvttsd2si(dst, src);
969 : }
970 20268 : }
971 :
972 278 : void TurboAssembler::Cvttss2siq(Register dst, XMMRegister src) {
973 278 : if (CpuFeatures::IsSupported(AVX)) {
974 : CpuFeatureScope scope(this, AVX);
975 278 : vcvttss2siq(dst, src);
976 : } else {
977 0 : cvttss2siq(dst, src);
978 : }
979 278 : }
980 :
981 0 : void TurboAssembler::Cvttss2siq(Register dst, Operand src) {
982 0 : if (CpuFeatures::IsSupported(AVX)) {
983 : CpuFeatureScope scope(this, AVX);
984 0 : vcvttss2siq(dst, src);
985 : } else {
986 0 : cvttss2siq(dst, src);
987 : }
988 0 : }
989 :
990 61513 : void TurboAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
991 61513 : if (CpuFeatures::IsSupported(AVX)) {
992 : CpuFeatureScope scope(this, AVX);
993 61102 : vcvttsd2siq(dst, src);
994 : } else {
995 411 : cvttsd2siq(dst, src);
996 : }
997 61517 : }
998 :
999 1 : void TurboAssembler::Cvttsd2siq(Register dst, Operand src) {
1000 1 : if (CpuFeatures::IsSupported(AVX)) {
1001 : CpuFeatureScope scope(this, AVX);
1002 1 : vcvttsd2siq(dst, src);
1003 : } else {
1004 0 : cvttsd2siq(dst, src);
1005 : }
1006 1 : }
1007 :
1008 : namespace {
1009 : template <typename OperandOrXMMRegister, bool is_double>
1010 3059 : void ConvertFloatToUint64(TurboAssembler* tasm, Register dst,
1011 : OperandOrXMMRegister src, Label* fail) {
1012 3059 : Label success;
1013 : // There does not exist a native float-to-uint instruction, so we have to use
1014 : // a float-to-int, and postprocess the result.
1015 : if (is_double) {
1016 2991 : tasm->Cvttsd2siq(dst, src);
1017 : } else {
1018 68 : tasm->Cvttss2siq(dst, src);
1019 : }
1020 : // If the result of the conversion is positive, we are already done.
1021 3059 : tasm->testq(dst, dst);
1022 3059 : tasm->j(positive, &success);
1023 : // The result of the first conversion was negative, which means that the
1024 : // input value was not within the positive int64 range. We subtract 2^63
1025 : // and convert it again to see if it is within the uint64 range.
1026 : if (is_double) {
1027 : tasm->Move(kScratchDoubleReg, -9223372036854775808.0);
1028 2991 : tasm->addsd(kScratchDoubleReg, src);
1029 2991 : tasm->Cvttsd2siq(dst, kScratchDoubleReg);
1030 : } else {
1031 : tasm->Move(kScratchDoubleReg, -9223372036854775808.0f);
1032 68 : tasm->addss(kScratchDoubleReg, src);
1033 68 : tasm->Cvttss2siq(dst, kScratchDoubleReg);
1034 : }
1035 : tasm->testq(dst, dst);
1036 : // The only possible negative value here is 0x80000000000000000, which is
1037 : // used on x64 to indicate an integer overflow.
1038 3059 : tasm->j(negative, fail ? fail : &success);
1039 : // The input value is within uint64 range and the second conversion worked
1040 : // successfully, but we still have to undo the subtraction we did
1041 : // earlier.
1042 3059 : tasm->Set(kScratchRegister, 0x8000000000000000);
1043 : tasm->orq(dst, kScratchRegister);
1044 3059 : tasm->bind(&success);
1045 3059 : }
1046 : } // namespace
1047 :
1048 0 : void TurboAssembler::Cvttsd2uiq(Register dst, Operand src, Label* success) {
1049 0 : ConvertFloatToUint64<Operand, true>(this, dst, src, success);
1050 0 : }
1051 :
1052 2991 : void TurboAssembler::Cvttsd2uiq(Register dst, XMMRegister src, Label* success) {
1053 2991 : ConvertFloatToUint64<XMMRegister, true>(this, dst, src, success);
1054 2991 : }
1055 :
1056 0 : void TurboAssembler::Cvttss2uiq(Register dst, Operand src, Label* success) {
1057 0 : ConvertFloatToUint64<Operand, false>(this, dst, src, success);
1058 0 : }
1059 :
1060 68 : void TurboAssembler::Cvttss2uiq(Register dst, XMMRegister src, Label* success) {
1061 68 : ConvertFloatToUint64<XMMRegister, false>(this, dst, src, success);
1062 68 : }
1063 :
1064 2722224 : void TurboAssembler::Set(Register dst, int64_t x) {
1065 2722224 : if (x == 0) {
1066 868325 : xorl(dst, dst);
1067 1853899 : } else if (is_uint32(x)) {
1068 414671 : movl(dst, Immediate(static_cast<uint32_t>(x)));
1069 1439228 : } else if (is_int32(x)) {
1070 171437 : movq(dst, Immediate(static_cast<int32_t>(x)));
1071 : } else {
1072 1267791 : movq(dst, x);
1073 : }
1074 2722247 : }
1075 :
1076 12984 : void TurboAssembler::Set(Operand dst, intptr_t x) {
1077 12984 : if (is_int32(x)) {
1078 8540 : movq(dst, Immediate(static_cast<int32_t>(x)));
1079 : } else {
1080 4444 : Set(kScratchRegister, x);
1081 4444 : movq(dst, kScratchRegister);
1082 : }
1083 12984 : }
1084 :
1085 :
1086 : // ----------------------------------------------------------------------------
1087 : // Smi tagging, untagging and tag detection.
1088 :
1089 168 : Register TurboAssembler::GetSmiConstant(Smi source) {
1090 : STATIC_ASSERT(kSmiTag == 0);
1091 : int value = source->value();
1092 168 : if (value == 0) {
1093 0 : xorl(kScratchRegister, kScratchRegister);
1094 0 : return kScratchRegister;
1095 : }
1096 168 : Move(kScratchRegister, source);
1097 168 : return kScratchRegister;
1098 : }
1099 :
1100 2170698 : void TurboAssembler::Move(Register dst, Smi source) {
1101 : STATIC_ASSERT(kSmiTag == 0);
1102 : int value = source->value();
1103 2170698 : if (value == 0) {
1104 768629 : xorl(dst, dst);
1105 : } else {
1106 : Move(dst, source.ptr(), RelocInfo::NONE);
1107 : }
1108 2170700 : }
1109 :
1110 5643775 : void TurboAssembler::Move(Register dst, ExternalReference ext) {
1111 : if (FLAG_embedded_builtins) {
1112 5643775 : if (root_array_available_ && options().isolate_independent_code) {
1113 733880 : IndirectLoadExternalReference(dst, ext);
1114 733880 : return;
1115 : }
1116 : }
1117 4909895 : movq(dst, Immediate64(ext.address(), RelocInfo::EXTERNAL_REFERENCE));
1118 : }
1119 :
1120 1246 : void MacroAssembler::SmiTag(Register dst, Register src) {
1121 : STATIC_ASSERT(kSmiTag == 0);
1122 1246 : if (dst != src) {
1123 305 : movq(dst, src);
1124 : }
1125 : DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
1126 1246 : shlq(dst, Immediate(kSmiShift));
1127 1246 : }
1128 :
1129 5176 : void TurboAssembler::SmiUntag(Register dst, Register src) {
1130 : STATIC_ASSERT(kSmiTag == 0);
1131 5176 : if (dst != src) {
1132 0 : movq(dst, src);
1133 : }
1134 : DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
1135 5176 : sarq(dst, Immediate(kSmiShift));
1136 5176 : }
1137 :
1138 1848 : void TurboAssembler::SmiUntag(Register dst, Operand src) {
1139 : if (SmiValuesAre32Bits()) {
1140 3696 : movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1141 : // Sign extend to 64-bit.
1142 1848 : movsxlq(dst, dst);
1143 : } else {
1144 : DCHECK(SmiValuesAre31Bits());
1145 : #ifdef V8_COMPRESS_POINTERS
1146 : movsxlq(dst, src);
1147 : #else
1148 : movq(dst, src);
1149 : #endif
1150 : sarq(dst, Immediate(kSmiShift));
1151 : }
1152 1848 : }
1153 :
1154 165 : void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1155 165 : AssertSmi(smi1);
1156 165 : AssertSmi(smi2);
1157 165 : cmp_tagged(smi1, smi2);
1158 165 : }
1159 :
1160 224 : void MacroAssembler::SmiCompare(Register dst, Smi src) {
1161 224 : AssertSmi(dst);
1162 224 : Cmp(dst, src);
1163 224 : }
1164 :
1165 224 : void MacroAssembler::Cmp(Register dst, Smi src) {
1166 : DCHECK_NE(dst, kScratchRegister);
1167 224 : if (src->value() == 0) {
1168 56 : test_tagged(dst, dst);
1169 : } else {
1170 168 : Register constant_reg = GetSmiConstant(src);
1171 168 : cmp_tagged(dst, constant_reg);
1172 : }
1173 224 : }
1174 :
1175 0 : void MacroAssembler::SmiCompare(Register dst, Operand src) {
1176 0 : AssertSmi(dst);
1177 0 : AssertSmi(src);
1178 0 : cmp_tagged(dst, src);
1179 0 : }
1180 :
1181 0 : void MacroAssembler::SmiCompare(Operand dst, Register src) {
1182 0 : AssertSmi(dst);
1183 0 : AssertSmi(src);
1184 0 : cmp_tagged(dst, src);
1185 0 : }
1186 :
1187 0 : void MacroAssembler::SmiCompare(Operand dst, Smi src) {
1188 0 : AssertSmi(dst);
1189 : if (SmiValuesAre32Bits()) {
1190 0 : cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1191 : } else {
1192 : DCHECK(SmiValuesAre31Bits());
1193 : cmpl(dst, Immediate(src));
1194 : }
1195 0 : }
1196 :
1197 0 : void MacroAssembler::Cmp(Operand dst, Smi src) {
1198 : // The Operand cannot use the smi register.
1199 0 : Register smi_reg = GetSmiConstant(src);
1200 : DCHECK(!dst.AddressUsesRegister(smi_reg));
1201 0 : cmp_tagged(dst, smi_reg);
1202 0 : }
1203 :
1204 :
1205 40 : Condition TurboAssembler::CheckSmi(Register src) {
1206 : STATIC_ASSERT(kSmiTag == 0);
1207 298408 : testb(src, Immediate(kSmiTagMask));
1208 40 : return zero;
1209 : }
1210 :
1211 0 : Condition TurboAssembler::CheckSmi(Operand src) {
1212 : STATIC_ASSERT(kSmiTag == 0);
1213 0 : testb(src, Immediate(kSmiTagMask));
1214 0 : return zero;
1215 : }
1216 :
1217 298312 : void TurboAssembler::JumpIfSmi(Register src, Label* on_smi,
1218 : Label::Distance near_jump) {
1219 : Condition smi = CheckSmi(src);
1220 298313 : j(smi, on_smi, near_jump);
1221 298313 : }
1222 :
1223 56 : void MacroAssembler::JumpIfNotSmi(Register src,
1224 : Label* on_not_smi,
1225 : Label::Distance near_jump) {
1226 : Condition smi = CheckSmi(src);
1227 56 : j(NegateCondition(smi), on_not_smi, near_jump);
1228 56 : }
1229 :
1230 0 : void MacroAssembler::JumpIfNotSmi(Operand src, Label* on_not_smi,
1231 : Label::Distance near_jump) {
1232 : Condition smi = CheckSmi(src);
1233 0 : j(NegateCondition(smi), on_not_smi, near_jump);
1234 0 : }
1235 :
1236 0 : void MacroAssembler::SmiAddConstant(Operand dst, Smi constant) {
1237 0 : if (constant->value() != 0) {
1238 : if (SmiValuesAre32Bits()) {
1239 0 : addl(Operand(dst, kSmiShift / kBitsPerByte),
1240 0 : Immediate(constant->value()));
1241 : } else {
1242 : DCHECK(SmiValuesAre31Bits());
1243 : if (kTaggedSize == kInt64Size) {
1244 : // Sign-extend value after addition
1245 : movl(kScratchRegister, dst);
1246 : addl(kScratchRegister, Immediate(constant));
1247 : movsxlq(kScratchRegister, kScratchRegister);
1248 : movq(dst, kScratchRegister);
1249 : } else {
1250 : DCHECK_EQ(kTaggedSize, kInt32Size);
1251 : addl(dst, Immediate(constant));
1252 : }
1253 : }
1254 : }
1255 0 : }
1256 :
1257 568 : SmiIndex MacroAssembler::SmiToIndex(Register dst,
1258 : Register src,
1259 : int shift) {
1260 : if (SmiValuesAre32Bits()) {
1261 : DCHECK(is_uint6(shift));
1262 : // There is a possible optimization if shift is in the range 60-63, but that
1263 : // will (and must) never happen.
1264 568 : if (dst != src) {
1265 200 : movq(dst, src);
1266 : }
1267 568 : if (shift < kSmiShift) {
1268 568 : sarq(dst, Immediate(kSmiShift - shift));
1269 : } else {
1270 0 : shlq(dst, Immediate(shift - kSmiShift));
1271 : }
1272 568 : return SmiIndex(dst, times_1);
1273 : } else {
1274 : DCHECK(SmiValuesAre31Bits());
1275 : if (dst != src) {
1276 : mov_tagged(dst, src);
1277 : }
1278 : // We have to sign extend the index register to 64-bit as the SMI might
1279 : // be negative.
1280 : movsxlq(dst, dst);
1281 : if (shift < kSmiShift) {
1282 : sarq(dst, Immediate(kSmiShift - shift));
1283 : } else if (shift != kSmiShift) {
1284 : if (shift - kSmiShift <= static_cast<int>(times_8)) {
1285 : return SmiIndex(dst, static_cast<ScaleFactor>(shift - kSmiShift));
1286 : }
1287 : shlq(dst, Immediate(shift - kSmiShift));
1288 : }
1289 : return SmiIndex(dst, times_1);
1290 : }
1291 : }
1292 :
1293 56 : void TurboAssembler::Push(Smi source) {
1294 56 : intptr_t smi = static_cast<intptr_t>(source.ptr());
1295 56 : if (is_int32(smi)) {
1296 56 : Push(Immediate(static_cast<int32_t>(smi)));
1297 56 : return;
1298 : }
1299 0 : int first_byte_set = base::bits::CountTrailingZeros64(smi) / 8;
1300 0 : int last_byte_set = (63 - base::bits::CountLeadingZeros64(smi)) / 8;
1301 0 : if (first_byte_set == last_byte_set) {
1302 : // This sequence has only 7 bytes, compared to the 12 bytes below.
1303 : Push(Immediate(0));
1304 0 : movb(Operand(rsp, first_byte_set),
1305 0 : Immediate(static_cast<int8_t>(smi >> (8 * first_byte_set))));
1306 0 : return;
1307 : }
1308 0 : Register constant = GetSmiConstant(source);
1309 : Push(constant);
1310 : }
1311 :
1312 : // ----------------------------------------------------------------------------
1313 :
1314 728 : void TurboAssembler::Move(Register dst, Register src) {
1315 1048440 : if (dst != src) {
1316 631175 : movq(dst, src);
1317 : }
1318 728 : }
1319 :
1320 319911 : void TurboAssembler::MovePair(Register dst0, Register src0, Register dst1,
1321 : Register src1) {
1322 319911 : if (dst0 != src1) {
1323 : // Normal case: Writing to dst0 does not destroy src1.
1324 : Move(dst0, src0);
1325 : Move(dst1, src1);
1326 1756 : } else if (dst1 != src0) {
1327 : // Only dst0 and src1 are the same register,
1328 : // but writing to dst1 does not destroy src0.
1329 : Move(dst1, src1);
1330 : Move(dst0, src0);
1331 : } else {
1332 : // dst0 == src1, and dst1 == src0, a swap is required:
1333 : // dst0 \/ src0
1334 : // dst1 /\ src1
1335 14 : xchgq(dst0, dst1);
1336 : }
1337 319911 : }
1338 :
1339 1523037 : void TurboAssembler::MoveNumber(Register dst, double value) {
1340 : int32_t smi;
1341 1523037 : if (DoubleToSmiInteger(value, &smi)) {
1342 1483446 : Move(dst, Smi::FromInt(smi));
1343 : } else {
1344 39591 : movq_heap_number(dst, value);
1345 : }
1346 1523036 : }
1347 :
1348 170010 : void TurboAssembler::Move(XMMRegister dst, uint32_t src) {
1349 170010 : if (src == 0) {
1350 : Xorps(dst, dst);
1351 : } else {
1352 : unsigned nlz = base::bits::CountLeadingZeros(src);
1353 : unsigned ntz = base::bits::CountTrailingZeros(src);
1354 : unsigned pop = base::bits::CountPopulation(src);
1355 : DCHECK_NE(0u, pop);
1356 158849 : if (pop + ntz + nlz == 32) {
1357 : Pcmpeqd(dst, dst);
1358 70836 : if (ntz) Pslld(dst, static_cast<byte>(ntz + nlz));
1359 70835 : if (nlz) Psrld(dst, static_cast<byte>(nlz));
1360 : } else {
1361 88015 : movl(kScratchRegister, Immediate(src));
1362 : Movd(dst, kScratchRegister);
1363 : }
1364 : }
1365 170008 : }
1366 :
1367 464665 : void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
1368 464665 : if (src == 0) {
1369 : Xorpd(dst, dst);
1370 : } else {
1371 : unsigned nlz = base::bits::CountLeadingZeros(src);
1372 : unsigned ntz = base::bits::CountTrailingZeros(src);
1373 : unsigned pop = base::bits::CountPopulation(src);
1374 : DCHECK_NE(0u, pop);
1375 358945 : if (pop + ntz + nlz == 64) {
1376 : Pcmpeqd(dst, dst);
1377 230432 : if (ntz) Psllq(dst, static_cast<byte>(ntz + nlz));
1378 230436 : if (nlz) Psrlq(dst, static_cast<byte>(nlz));
1379 : } else {
1380 128513 : uint32_t lower = static_cast<uint32_t>(src);
1381 128513 : uint32_t upper = static_cast<uint32_t>(src >> 32);
1382 128513 : if (upper == 0) {
1383 40 : Move(dst, lower);
1384 : } else {
1385 128473 : movq(kScratchRegister, src);
1386 : Movq(dst, kScratchRegister);
1387 : }
1388 : }
1389 : }
1390 464687 : }
1391 :
1392 : // ----------------------------------------------------------------------------
1393 :
1394 5 : void MacroAssembler::Absps(XMMRegister dst) {
1395 5 : Andps(dst, ExternalReferenceAsOperand(
1396 : ExternalReference::address_of_float_abs_constant()));
1397 5 : }
1398 :
1399 5 : void MacroAssembler::Negps(XMMRegister dst) {
1400 5 : Xorps(dst, ExternalReferenceAsOperand(
1401 : ExternalReference::address_of_float_neg_constant()));
1402 5 : }
1403 :
1404 5 : void MacroAssembler::Abspd(XMMRegister dst) {
1405 5 : Andps(dst, ExternalReferenceAsOperand(
1406 : ExternalReference::address_of_double_abs_constant()));
1407 5 : }
1408 :
1409 5 : void MacroAssembler::Negpd(XMMRegister dst) {
1410 5 : Xorps(dst, ExternalReferenceAsOperand(
1411 : ExternalReference::address_of_double_neg_constant()));
1412 5 : }
1413 :
1414 0 : void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1415 : AllowDeferredHandleDereference smi_check;
1416 0 : if (source->IsSmi()) {
1417 0 : Cmp(dst, Smi::cast(*source));
1418 : } else {
1419 0 : Move(kScratchRegister, Handle<HeapObject>::cast(source));
1420 0 : cmp_tagged(dst, kScratchRegister);
1421 : }
1422 0 : }
1423 :
1424 112 : void MacroAssembler::Cmp(Operand dst, Handle<Object> source) {
1425 : AllowDeferredHandleDereference smi_check;
1426 112 : if (source->IsSmi()) {
1427 : Cmp(dst, Smi::cast(*source));
1428 : } else {
1429 112 : Move(kScratchRegister, Handle<HeapObject>::cast(source));
1430 112 : cmp_tagged(dst, kScratchRegister);
1431 : }
1432 112 : }
1433 :
1434 56 : void MacroAssembler::JumpIfIsInRange(Register value, unsigned lower_limit,
1435 : unsigned higher_limit, Label* on_in_range,
1436 : Label::Distance near_jump) {
1437 56 : if (lower_limit != 0) {
1438 112 : leal(kScratchRegister, Operand(value, 0u - lower_limit));
1439 56 : cmpl(kScratchRegister, Immediate(higher_limit - lower_limit));
1440 : } else {
1441 0 : cmpl(value, Immediate(higher_limit));
1442 : }
1443 56 : j(below_equal, on_in_range, near_jump);
1444 56 : }
1445 :
1446 21 : void TurboAssembler::Push(Handle<HeapObject> source) {
1447 21 : Move(kScratchRegister, source);
1448 : Push(kScratchRegister);
1449 21 : }
1450 :
1451 6299252 : void TurboAssembler::Move(Register result, Handle<HeapObject> object,
1452 : RelocInfo::Mode rmode) {
1453 : if (FLAG_embedded_builtins) {
1454 6299252 : if (root_array_available_ && options().isolate_independent_code) {
1455 49728 : IndirectLoadConstant(result, object);
1456 49728 : return;
1457 : }
1458 : }
1459 6249524 : movq(result, Immediate64(object.address(), rmode));
1460 : }
1461 :
1462 0 : void TurboAssembler::Move(Operand dst, Handle<HeapObject> object,
1463 : RelocInfo::Mode rmode) {
1464 0 : Move(kScratchRegister, object, rmode);
1465 0 : movq(dst, kScratchRegister);
1466 0 : }
1467 :
1468 2120 : void TurboAssembler::MoveStringConstant(Register result,
1469 : const StringConstantBase* string,
1470 : RelocInfo::Mode rmode) {
1471 2120 : movq_string(result, string);
1472 2120 : }
1473 :
1474 288 : void MacroAssembler::Drop(int stack_elements) {
1475 288 : if (stack_elements > 0) {
1476 288 : addq(rsp, Immediate(stack_elements * kSystemPointerSize));
1477 : }
1478 288 : }
1479 :
1480 :
1481 56 : void MacroAssembler::DropUnderReturnAddress(int stack_elements,
1482 : Register scratch) {
1483 : DCHECK_GT(stack_elements, 0);
1484 56 : if (stack_elements == 1) {
1485 56 : popq(MemOperand(rsp, 0));
1486 56 : return;
1487 : }
1488 :
1489 : PopReturnAddressTo(scratch);
1490 0 : Drop(stack_elements);
1491 : PushReturnAddressFrom(scratch);
1492 : }
1493 :
1494 673439 : void TurboAssembler::Push(Register src) { pushq(src); }
1495 :
1496 60627 : void TurboAssembler::Push(Operand src) { pushq(src); }
1497 :
1498 805927 : void MacroAssembler::PushQuad(Operand src) { pushq(src); }
1499 :
1500 1726258 : void TurboAssembler::Push(Immediate value) { pushq(value); }
1501 :
1502 0 : void MacroAssembler::PushImm32(int32_t imm32) { pushq_imm32(imm32); }
1503 :
1504 3104 : void MacroAssembler::Pop(Register dst) { popq(dst); }
1505 :
1506 45111 : void MacroAssembler::Pop(Operand dst) { popq(dst); }
1507 :
1508 716393 : void MacroAssembler::PopQuad(Operand dst) { popq(dst); }
1509 :
1510 0 : void TurboAssembler::Jump(ExternalReference ext) {
1511 0 : LoadAddress(kScratchRegister, ext);
1512 0 : jmp(kScratchRegister);
1513 0 : }
1514 :
1515 0 : void TurboAssembler::Jump(Operand op) { jmp(op); }
1516 :
1517 1024 : void TurboAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1518 : Move(kScratchRegister, destination, rmode);
1519 1024 : jmp(kScratchRegister);
1520 1024 : }
1521 :
1522 514044 : void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
1523 : Condition cc) {
1524 : DCHECK_IMPLIES(options().isolate_independent_code,
1525 : Builtins::IsIsolateIndependentBuiltin(*code_object));
1526 514044 : if (options().inline_offheap_trampolines) {
1527 464095 : int builtin_index = Builtins::kNoBuiltinId;
1528 464095 : if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1529 : Builtins::IsIsolateIndependent(builtin_index)) {
1530 463892 : Label skip;
1531 463892 : if (cc != always) {
1532 463886 : if (cc == never) return;
1533 463885 : j(NegateCondition(cc), &skip, Label::kNear);
1534 : }
1535 : // Inline the trampoline.
1536 463881 : RecordCommentForOffHeapTrampoline(builtin_index);
1537 463883 : CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1538 463890 : EmbeddedData d = EmbeddedData::FromBlob();
1539 463890 : Address entry = d.InstructionStartOfBuiltin(builtin_index);
1540 : Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1541 463890 : jmp(kScratchRegister);
1542 463891 : bind(&skip);
1543 463890 : return;
1544 : }
1545 : }
1546 50157 : j(cc, code_object, rmode);
1547 : }
1548 :
1549 36091339 : void MacroAssembler::JumpToInstructionStream(Address entry) {
1550 : Move(kOffHeapTrampolineRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1551 36091337 : jmp(kOffHeapTrampolineRegister);
1552 36091337 : }
1553 :
1554 0 : void TurboAssembler::Call(ExternalReference ext) {
1555 0 : LoadAddress(kScratchRegister, ext);
1556 0 : call(kScratchRegister);
1557 0 : }
1558 :
1559 3608 : void TurboAssembler::Call(Operand op) {
1560 3608 : if (!CpuFeatures::IsSupported(ATOM)) {
1561 3608 : call(op);
1562 : } else {
1563 0 : movq(kScratchRegister, op);
1564 0 : call(kScratchRegister);
1565 : }
1566 3608 : }
1567 :
1568 0 : void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1569 : Move(kScratchRegister, destination, rmode);
1570 0 : call(kScratchRegister);
1571 0 : }
1572 :
1573 4830112 : void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1574 : DCHECK_IMPLIES(options().isolate_independent_code,
1575 : Builtins::IsIsolateIndependentBuiltin(*code_object));
1576 4830112 : if (options().inline_offheap_trampolines) {
1577 3997610 : int builtin_index = Builtins::kNoBuiltinId;
1578 3997610 : if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1579 : Builtins::IsIsolateIndependent(builtin_index)) {
1580 : // Inline the trampoline.
1581 3981388 : RecordCommentForOffHeapTrampoline(builtin_index);
1582 3981392 : CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1583 3981394 : EmbeddedData d = EmbeddedData::FromBlob();
1584 3981394 : Address entry = d.InstructionStartOfBuiltin(builtin_index);
1585 : Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1586 3981403 : call(kScratchRegister);
1587 : return;
1588 : }
1589 : }
1590 : DCHECK(RelocInfo::IsCodeTarget(rmode));
1591 848726 : call(code_object, rmode);
1592 : }
1593 :
1594 3608 : void TurboAssembler::CallBuiltinPointer(Register builtin_pointer) {
1595 : #if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1596 : STATIC_ASSERT(kSmiShiftSize == 0);
1597 : STATIC_ASSERT(kSmiTagSize == 1);
1598 : STATIC_ASSERT(kSmiTag == 0);
1599 :
1600 : // The builtin_pointer register contains the builtin index as a Smi.
1601 : // Untagging is folded into the indexing operand below (we use times_4 instead
1602 : // of times_8 since smis are already shifted by one).
1603 : Call(Operand(kRootRegister, builtin_pointer, times_4,
1604 : IsolateData::builtin_entry_table_offset()));
1605 : #else // defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1606 : STATIC_ASSERT(kSmiShiftSize == 31);
1607 : STATIC_ASSERT(kSmiTagSize == 1);
1608 : STATIC_ASSERT(kSmiTag == 0);
1609 :
1610 : // The builtin_pointer register contains the builtin index as a Smi.
1611 3608 : SmiUntag(builtin_pointer, builtin_pointer);
1612 7216 : Call(Operand(kRootRegister, builtin_pointer, times_8,
1613 3608 : IsolateData::builtin_entry_table_offset()));
1614 : #endif // defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1615 3608 : }
1616 :
1617 406966 : void TurboAssembler::LoadCodeObjectEntry(Register destination,
1618 : Register code_object) {
1619 : // Code objects are called differently depending on whether we are generating
1620 : // builtin code (which will later be embedded into the binary) or compiling
1621 : // user JS code at runtime.
1622 : // * Builtin code runs in --jitless mode and thus must not call into on-heap
1623 : // Code targets. Instead, we dispatch through the builtins entry table.
1624 : // * Codegen at runtime does not have this restriction and we can use the
1625 : // shorter, branchless instruction sequence. The assumption here is that
1626 : // targets are usually generated code and not builtin Code objects.
1627 :
1628 406966 : if (options().isolate_independent_code) {
1629 : DCHECK(root_array_available());
1630 9688 : Label if_code_is_off_heap, out;
1631 :
1632 : // Check whether the Code object is an off-heap trampoline. If so, call its
1633 : // (off-heap) entry point directly without going through the (on-heap)
1634 : // trampoline. Otherwise, just call the Code object as always.
1635 9688 : testl(FieldOperand(code_object, Code::kFlagsOffset),
1636 : Immediate(Code::IsOffHeapTrampoline::kMask));
1637 9688 : j(not_equal, &if_code_is_off_heap);
1638 :
1639 : // Not an off-heap trampoline, the entry point is at
1640 : // Code::raw_instruction_start().
1641 : Move(destination, code_object);
1642 : addq(destination, Immediate(Code::kHeaderSize - kHeapObjectTag));
1643 9688 : jmp(&out);
1644 :
1645 : // An off-heap trampoline, the entry point is loaded from the builtin entry
1646 : // table.
1647 9688 : bind(&if_code_is_off_heap);
1648 : movl(destination, FieldOperand(code_object, Code::kBuiltinIndexOffset));
1649 19376 : movq(destination,
1650 : Operand(kRootRegister, destination, times_system_pointer_size,
1651 : IsolateData::builtin_entry_table_offset()));
1652 :
1653 9688 : bind(&out);
1654 : } else {
1655 : Move(destination, code_object);
1656 397278 : addq(destination, Immediate(Code::kHeaderSize - kHeapObjectTag));
1657 : }
1658 406966 : }
1659 :
1660 24106 : void TurboAssembler::CallCodeObject(Register code_object) {
1661 24106 : LoadCodeObjectEntry(code_object, code_object);
1662 24107 : call(code_object);
1663 24106 : }
1664 :
1665 728 : void TurboAssembler::JumpCodeObject(Register code_object) {
1666 728 : LoadCodeObjectEntry(code_object, code_object);
1667 728 : jmp(code_object);
1668 728 : }
1669 :
1670 0 : void TurboAssembler::RetpolineCall(Register reg) {
1671 0 : Label setup_return, setup_target, inner_indirect_branch, capture_spec;
1672 :
1673 0 : jmp(&setup_return); // Jump past the entire retpoline below.
1674 :
1675 0 : bind(&inner_indirect_branch);
1676 0 : call(&setup_target);
1677 :
1678 0 : bind(&capture_spec);
1679 0 : pause();
1680 0 : jmp(&capture_spec);
1681 :
1682 0 : bind(&setup_target);
1683 0 : movq(Operand(rsp, 0), reg);
1684 0 : ret(0);
1685 :
1686 0 : bind(&setup_return);
1687 0 : call(&inner_indirect_branch); // Callee will return after this instruction.
1688 0 : }
1689 :
1690 0 : void TurboAssembler::RetpolineCall(Address destination, RelocInfo::Mode rmode) {
1691 : Move(kScratchRegister, destination, rmode);
1692 0 : RetpolineCall(kScratchRegister);
1693 0 : }
1694 :
1695 0 : void TurboAssembler::RetpolineJump(Register reg) {
1696 0 : Label setup_target, capture_spec;
1697 :
1698 0 : call(&setup_target);
1699 :
1700 0 : bind(&capture_spec);
1701 0 : pause();
1702 0 : jmp(&capture_spec);
1703 :
1704 0 : bind(&setup_target);
1705 0 : movq(Operand(rsp, 0), reg);
1706 0 : ret(0);
1707 0 : }
1708 :
1709 43395 : void TurboAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
1710 43395 : if (imm8 == 0) {
1711 : Movd(dst, src);
1712 : return;
1713 : }
1714 42902 : if (CpuFeatures::IsSupported(SSE4_1)) {
1715 : CpuFeatureScope sse_scope(this, SSE4_1);
1716 42566 : pextrd(dst, src, imm8);
1717 : return;
1718 : }
1719 : DCHECK_EQ(1, imm8);
1720 336 : movq(dst, src);
1721 : shrq(dst, Immediate(32));
1722 : }
1723 :
1724 416 : void TurboAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
1725 416 : if (CpuFeatures::IsSupported(SSE4_1)) {
1726 : CpuFeatureScope sse_scope(this, SSE4_1);
1727 414 : pinsrd(dst, src, imm8);
1728 : return;
1729 : }
1730 : Movd(kScratchDoubleReg, src);
1731 2 : if (imm8 == 1) {
1732 : punpckldq(dst, kScratchDoubleReg);
1733 : } else {
1734 : DCHECK_EQ(0, imm8);
1735 : Movss(dst, kScratchDoubleReg);
1736 : }
1737 : }
1738 :
1739 1488 : void TurboAssembler::Pinsrd(XMMRegister dst, Operand src, int8_t imm8) {
1740 1488 : if (CpuFeatures::IsSupported(SSE4_1)) {
1741 : CpuFeatureScope sse_scope(this, SSE4_1);
1742 1488 : pinsrd(dst, src, imm8);
1743 : return;
1744 : }
1745 : Movd(kScratchDoubleReg, src);
1746 0 : if (imm8 == 1) {
1747 : punpckldq(dst, kScratchDoubleReg);
1748 : } else {
1749 : DCHECK_EQ(0, imm8);
1750 : Movss(dst, kScratchDoubleReg);
1751 : }
1752 : }
1753 :
1754 626 : void TurboAssembler::Lzcntl(Register dst, Register src) {
1755 626 : if (CpuFeatures::IsSupported(LZCNT)) {
1756 : CpuFeatureScope scope(this, LZCNT);
1757 0 : lzcntl(dst, src);
1758 : return;
1759 : }
1760 626 : Label not_zero_src;
1761 626 : bsrl(dst, src);
1762 626 : j(not_zero, ¬_zero_src, Label::kNear);
1763 626 : Set(dst, 63); // 63^31 == 32
1764 626 : bind(¬_zero_src);
1765 : xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
1766 : }
1767 :
1768 40 : void TurboAssembler::Lzcntl(Register dst, Operand src) {
1769 40 : if (CpuFeatures::IsSupported(LZCNT)) {
1770 : CpuFeatureScope scope(this, LZCNT);
1771 0 : lzcntl(dst, src);
1772 : return;
1773 : }
1774 40 : Label not_zero_src;
1775 40 : bsrl(dst, src);
1776 40 : j(not_zero, ¬_zero_src, Label::kNear);
1777 40 : Set(dst, 63); // 63^31 == 32
1778 40 : bind(¬_zero_src);
1779 : xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
1780 : }
1781 :
1782 34 : void TurboAssembler::Lzcntq(Register dst, Register src) {
1783 34 : if (CpuFeatures::IsSupported(LZCNT)) {
1784 : CpuFeatureScope scope(this, LZCNT);
1785 0 : lzcntq(dst, src);
1786 : return;
1787 : }
1788 34 : Label not_zero_src;
1789 34 : bsrq(dst, src);
1790 34 : j(not_zero, ¬_zero_src, Label::kNear);
1791 34 : Set(dst, 127); // 127^63 == 64
1792 34 : bind(¬_zero_src);
1793 : xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
1794 : }
1795 :
1796 0 : void TurboAssembler::Lzcntq(Register dst, Operand src) {
1797 0 : if (CpuFeatures::IsSupported(LZCNT)) {
1798 : CpuFeatureScope scope(this, LZCNT);
1799 0 : lzcntq(dst, src);
1800 : return;
1801 : }
1802 0 : Label not_zero_src;
1803 0 : bsrq(dst, src);
1804 0 : j(not_zero, ¬_zero_src, Label::kNear);
1805 0 : Set(dst, 127); // 127^63 == 64
1806 0 : bind(¬_zero_src);
1807 : xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
1808 : }
1809 :
1810 34 : void TurboAssembler::Tzcntq(Register dst, Register src) {
1811 34 : if (CpuFeatures::IsSupported(BMI1)) {
1812 : CpuFeatureScope scope(this, BMI1);
1813 0 : tzcntq(dst, src);
1814 : return;
1815 : }
1816 34 : Label not_zero_src;
1817 34 : bsfq(dst, src);
1818 34 : j(not_zero, ¬_zero_src, Label::kNear);
1819 : // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
1820 34 : Set(dst, 64);
1821 34 : bind(¬_zero_src);
1822 : }
1823 :
1824 0 : void TurboAssembler::Tzcntq(Register dst, Operand src) {
1825 0 : if (CpuFeatures::IsSupported(BMI1)) {
1826 : CpuFeatureScope scope(this, BMI1);
1827 0 : tzcntq(dst, src);
1828 : return;
1829 : }
1830 0 : Label not_zero_src;
1831 0 : bsfq(dst, src);
1832 0 : j(not_zero, ¬_zero_src, Label::kNear);
1833 : // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
1834 0 : Set(dst, 64);
1835 0 : bind(¬_zero_src);
1836 : }
1837 :
1838 332 : void TurboAssembler::Tzcntl(Register dst, Register src) {
1839 332 : if (CpuFeatures::IsSupported(BMI1)) {
1840 : CpuFeatureScope scope(this, BMI1);
1841 0 : tzcntl(dst, src);
1842 : return;
1843 : }
1844 332 : Label not_zero_src;
1845 332 : bsfl(dst, src);
1846 332 : j(not_zero, ¬_zero_src, Label::kNear);
1847 332 : Set(dst, 32); // The result of tzcnt is 32 if src = 0.
1848 332 : bind(¬_zero_src);
1849 : }
1850 :
1851 0 : void TurboAssembler::Tzcntl(Register dst, Operand src) {
1852 0 : if (CpuFeatures::IsSupported(BMI1)) {
1853 : CpuFeatureScope scope(this, BMI1);
1854 0 : tzcntl(dst, src);
1855 : return;
1856 : }
1857 0 : Label not_zero_src;
1858 0 : bsfl(dst, src);
1859 0 : j(not_zero, ¬_zero_src, Label::kNear);
1860 0 : Set(dst, 32); // The result of tzcnt is 32 if src = 0.
1861 0 : bind(¬_zero_src);
1862 : }
1863 :
1864 80 : void TurboAssembler::Popcntl(Register dst, Register src) {
1865 80 : if (CpuFeatures::IsSupported(POPCNT)) {
1866 : CpuFeatureScope scope(this, POPCNT);
1867 80 : popcntl(dst, src);
1868 80 : return;
1869 : }
1870 0 : UNREACHABLE();
1871 : }
1872 :
1873 0 : void TurboAssembler::Popcntl(Register dst, Operand src) {
1874 0 : if (CpuFeatures::IsSupported(POPCNT)) {
1875 : CpuFeatureScope scope(this, POPCNT);
1876 0 : popcntl(dst, src);
1877 0 : return;
1878 : }
1879 0 : UNREACHABLE();
1880 : }
1881 :
1882 42 : void TurboAssembler::Popcntq(Register dst, Register src) {
1883 42 : if (CpuFeatures::IsSupported(POPCNT)) {
1884 : CpuFeatureScope scope(this, POPCNT);
1885 42 : popcntq(dst, src);
1886 42 : return;
1887 : }
1888 0 : UNREACHABLE();
1889 : }
1890 :
1891 0 : void TurboAssembler::Popcntq(Register dst, Operand src) {
1892 0 : if (CpuFeatures::IsSupported(POPCNT)) {
1893 : CpuFeatureScope scope(this, POPCNT);
1894 0 : popcntq(dst, src);
1895 0 : return;
1896 : }
1897 0 : UNREACHABLE();
1898 : }
1899 :
1900 :
1901 0 : void MacroAssembler::Pushad() {
1902 : Push(rax);
1903 : Push(rcx);
1904 : Push(rdx);
1905 : Push(rbx);
1906 : // Not pushing rsp or rbp.
1907 : Push(rsi);
1908 : Push(rdi);
1909 : Push(r8);
1910 : Push(r9);
1911 : // r10 is kScratchRegister.
1912 : Push(r11);
1913 : Push(r12);
1914 : // r13 is kRootRegister.
1915 : Push(r14);
1916 : Push(r15);
1917 : STATIC_ASSERT(12 == kNumSafepointSavedRegisters);
1918 : // Use lea for symmetry with Popad.
1919 : int sp_delta = (kNumSafepointRegisters - kNumSafepointSavedRegisters) *
1920 : kSystemPointerSize;
1921 0 : leaq(rsp, Operand(rsp, -sp_delta));
1922 0 : }
1923 :
1924 :
1925 0 : void MacroAssembler::Popad() {
1926 : // Popad must not change the flags, so use lea instead of addq.
1927 : int sp_delta = (kNumSafepointRegisters - kNumSafepointSavedRegisters) *
1928 : kSystemPointerSize;
1929 0 : leaq(rsp, Operand(rsp, sp_delta));
1930 : Pop(r15);
1931 : Pop(r14);
1932 : Pop(r12);
1933 : Pop(r11);
1934 : Pop(r9);
1935 : Pop(r8);
1936 : Pop(rdi);
1937 : Pop(rsi);
1938 : Pop(rbx);
1939 : Pop(rdx);
1940 : Pop(rcx);
1941 : Pop(rax);
1942 0 : }
1943 :
1944 :
1945 : // Order general registers are pushed by Pushad:
1946 : // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
1947 : const int
1948 : MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1949 : 0,
1950 : 1,
1951 : 2,
1952 : 3,
1953 : -1,
1954 : -1,
1955 : 4,
1956 : 5,
1957 : 6,
1958 : 7,
1959 : -1,
1960 : 8,
1961 : 9,
1962 : -1,
1963 : 10,
1964 : 11
1965 : };
1966 :
1967 168 : void MacroAssembler::PushStackHandler() {
1968 : // Adjust this code if not the case.
1969 : STATIC_ASSERT(StackHandlerConstants::kSize == 2 * kSystemPointerSize);
1970 : STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1971 :
1972 : Push(Immediate(0)); // Padding.
1973 :
1974 : // Link the current handler as the next handler.
1975 : ExternalReference handler_address =
1976 168 : ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
1977 168 : Push(ExternalReferenceAsOperand(handler_address));
1978 :
1979 : // Set this new handler as the current one.
1980 168 : movq(ExternalReferenceAsOperand(handler_address), rsp);
1981 168 : }
1982 :
1983 :
1984 168 : void MacroAssembler::PopStackHandler() {
1985 : STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1986 : ExternalReference handler_address =
1987 168 : ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
1988 168 : Pop(ExternalReferenceAsOperand(handler_address));
1989 : addq(rsp, Immediate(StackHandlerConstants::kSize - kSystemPointerSize));
1990 168 : }
1991 :
1992 1392 : void TurboAssembler::Ret() { ret(0); }
1993 :
1994 2733077 : void TurboAssembler::Ret(int bytes_dropped, Register scratch) {
1995 2733077 : if (is_uint16(bytes_dropped)) {
1996 2733073 : ret(bytes_dropped);
1997 : } else {
1998 : PopReturnAddressTo(scratch);
1999 : addq(rsp, Immediate(bytes_dropped));
2000 : PushReturnAddressFrom(scratch);
2001 4 : ret(0);
2002 : }
2003 2733605 : }
2004 :
2005 560 : void MacroAssembler::CmpObjectType(Register heap_object,
2006 : InstanceType type,
2007 : Register map) {
2008 : LoadTaggedPointerField(map,
2009 : FieldOperand(heap_object, HeapObject::kMapOffset));
2010 560 : CmpInstanceType(map, type);
2011 560 : }
2012 :
2013 :
2014 1120 : void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
2015 2240 : cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
2016 1120 : }
2017 :
2018 0 : void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
2019 : XMMRegister scratch, Label* lost_precision,
2020 : Label* is_nan, Label::Distance dst) {
2021 0 : Cvttsd2si(result_reg, input_reg);
2022 0 : Cvtlsi2sd(kScratchDoubleReg, result_reg);
2023 : Ucomisd(kScratchDoubleReg, input_reg);
2024 0 : j(not_equal, lost_precision, dst);
2025 0 : j(parity_even, is_nan, dst); // NaN.
2026 0 : }
2027 :
2028 :
2029 112 : void MacroAssembler::AssertNotSmi(Register object) {
2030 112 : if (emit_debug_code()) {
2031 : Condition is_smi = CheckSmi(object);
2032 0 : Check(NegateCondition(is_smi), AbortReason::kOperandIsASmi);
2033 : }
2034 112 : }
2035 :
2036 :
2037 554 : void MacroAssembler::AssertSmi(Register object) {
2038 554 : if (emit_debug_code()) {
2039 : Condition is_smi = CheckSmi(object);
2040 0 : Check(is_smi, AbortReason::kOperandIsNotASmi);
2041 : }
2042 554 : }
2043 :
2044 0 : void MacroAssembler::AssertSmi(Operand object) {
2045 0 : if (emit_debug_code()) {
2046 : Condition is_smi = CheckSmi(object);
2047 0 : Check(is_smi, AbortReason::kOperandIsNotASmi);
2048 : }
2049 0 : }
2050 :
2051 1702192 : void TurboAssembler::AssertZeroExtended(Register int32_register) {
2052 1702192 : if (emit_debug_code()) {
2053 : DCHECK_NE(int32_register, kScratchRegister);
2054 65 : movq(kScratchRegister, int64_t{0x0000000100000000});
2055 : cmpq(kScratchRegister, int32_register);
2056 65 : Check(above_equal, AbortReason::k32BitValueInRegisterIsNotZeroExtended);
2057 : }
2058 1702192 : }
2059 :
2060 112 : void MacroAssembler::AssertConstructor(Register object) {
2061 112 : if (emit_debug_code()) {
2062 0 : testb(object, Immediate(kSmiTagMask));
2063 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotAConstructor);
2064 : Push(object);
2065 : LoadTaggedPointerField(object,
2066 : FieldOperand(object, HeapObject::kMapOffset));
2067 0 : testb(FieldOperand(object, Map::kBitFieldOffset),
2068 0 : Immediate(Map::IsConstructorBit::kMask));
2069 : Pop(object);
2070 0 : Check(not_zero, AbortReason::kOperandIsNotAConstructor);
2071 : }
2072 112 : }
2073 :
2074 280 : void MacroAssembler::AssertFunction(Register object) {
2075 280 : if (emit_debug_code()) {
2076 0 : testb(object, Immediate(kSmiTagMask));
2077 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotAFunction);
2078 : Push(object);
2079 0 : CmpObjectType(object, JS_FUNCTION_TYPE, object);
2080 : Pop(object);
2081 0 : Check(equal, AbortReason::kOperandIsNotAFunction);
2082 : }
2083 280 : }
2084 :
2085 :
2086 112 : void MacroAssembler::AssertBoundFunction(Register object) {
2087 112 : if (emit_debug_code()) {
2088 0 : testb(object, Immediate(kSmiTagMask));
2089 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotABoundFunction);
2090 : Push(object);
2091 0 : CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
2092 : Pop(object);
2093 0 : Check(equal, AbortReason::kOperandIsNotABoundFunction);
2094 : }
2095 112 : }
2096 :
2097 56 : void MacroAssembler::AssertGeneratorObject(Register object) {
2098 112 : if (!emit_debug_code()) return;
2099 0 : testb(object, Immediate(kSmiTagMask));
2100 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
2101 :
2102 : // Load map
2103 0 : Register map = object;
2104 : Push(object);
2105 : LoadTaggedPointerField(map, FieldOperand(object, HeapObject::kMapOffset));
2106 :
2107 0 : Label do_check;
2108 : // Check if JSGeneratorObject
2109 0 : CmpInstanceType(map, JS_GENERATOR_OBJECT_TYPE);
2110 0 : j(equal, &do_check);
2111 :
2112 : // Check if JSAsyncFunctionObject
2113 0 : CmpInstanceType(map, JS_ASYNC_FUNCTION_OBJECT_TYPE);
2114 0 : j(equal, &do_check);
2115 :
2116 : // Check if JSAsyncGeneratorObject
2117 0 : CmpInstanceType(map, JS_ASYNC_GENERATOR_OBJECT_TYPE);
2118 :
2119 0 : bind(&do_check);
2120 : // Restore generator object to register and perform assertion
2121 : Pop(object);
2122 0 : Check(equal, AbortReason::kOperandIsNotAGeneratorObject);
2123 : }
2124 :
2125 112 : void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
2126 112 : if (emit_debug_code()) {
2127 0 : Label done_checking;
2128 0 : AssertNotSmi(object);
2129 0 : Cmp(object, isolate()->factory()->undefined_value());
2130 0 : j(equal, &done_checking);
2131 0 : Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
2132 0 : Assert(equal, AbortReason::kExpectedUndefinedOrCell);
2133 0 : bind(&done_checking);
2134 : }
2135 112 : }
2136 :
2137 56 : void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
2138 56 : cmpl(in_out, Immediate(kClearedWeakHeapObjectLower32));
2139 56 : j(equal, target_if_cleared);
2140 :
2141 : andq(in_out, Immediate(~static_cast<int32_t>(kWeakHeapObjectMask)));
2142 56 : }
2143 :
2144 56 : void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2145 : DCHECK_GT(value, 0);
2146 56 : if (FLAG_native_code_counters && counter->Enabled()) {
2147 : Operand counter_operand =
2148 0 : ExternalReferenceAsOperand(ExternalReference::Create(counter));
2149 : // This operation has to be exactly 32-bit wide in case the external
2150 : // reference table redirects the counter to a uint32_t dummy_stats_counter_
2151 : // field.
2152 0 : if (value == 1) {
2153 0 : incl(counter_operand);
2154 : } else {
2155 0 : addl(counter_operand, Immediate(value));
2156 : }
2157 : }
2158 56 : }
2159 :
2160 :
2161 0 : void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2162 : DCHECK_GT(value, 0);
2163 0 : if (FLAG_native_code_counters && counter->Enabled()) {
2164 : Operand counter_operand =
2165 0 : ExternalReferenceAsOperand(ExternalReference::Create(counter));
2166 : // This operation has to be exactly 32-bit wide in case the external
2167 : // reference table redirects the counter to a uint32_t dummy_stats_counter_
2168 : // field.
2169 0 : if (value == 1) {
2170 0 : decl(counter_operand);
2171 : } else {
2172 0 : subl(counter_operand, Immediate(value));
2173 : }
2174 : }
2175 0 : }
2176 :
2177 56 : void MacroAssembler::MaybeDropFrames() {
2178 : // Check whether we need to drop frames to restart a function on the stack.
2179 : ExternalReference restart_fp =
2180 56 : ExternalReference::debug_restart_fp_address(isolate());
2181 56 : Load(rbx, restart_fp);
2182 56 : testq(rbx, rbx);
2183 :
2184 56 : Label dont_drop;
2185 56 : j(zero, &dont_drop, Label::kNear);
2186 56 : Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET);
2187 :
2188 56 : bind(&dont_drop);
2189 56 : }
2190 :
2191 1288 : void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
2192 : Register caller_args_count_reg,
2193 : Register scratch0, Register scratch1) {
2194 : #if DEBUG
2195 : if (callee_args_count.is_reg()) {
2196 : DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
2197 : scratch1));
2198 : } else {
2199 : DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
2200 : }
2201 : #endif
2202 :
2203 : // Calculate the destination address where we will put the return address
2204 : // after we drop current frame.
2205 1288 : Register new_sp_reg = scratch0;
2206 1288 : if (callee_args_count.is_reg()) {
2207 1288 : subq(caller_args_count_reg, callee_args_count.reg());
2208 2576 : leaq(new_sp_reg,
2209 : Operand(rbp, caller_args_count_reg, times_system_pointer_size,
2210 : StandardFrameConstants::kCallerPCOffset));
2211 : } else {
2212 0 : leaq(new_sp_reg,
2213 : Operand(rbp, caller_args_count_reg, times_system_pointer_size,
2214 : StandardFrameConstants::kCallerPCOffset -
2215 : callee_args_count.immediate() * kSystemPointerSize));
2216 : }
2217 :
2218 1288 : if (FLAG_debug_code) {
2219 0 : cmpq(rsp, new_sp_reg);
2220 0 : Check(below, AbortReason::kStackAccessBelowStackPointer);
2221 : }
2222 :
2223 : // Copy return address from caller's frame to current frame's return address
2224 : // to avoid its trashing and let the following loop copy it to the right
2225 : // place.
2226 1288 : Register tmp_reg = scratch1;
2227 2576 : movq(tmp_reg, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
2228 2576 : movq(Operand(rsp, 0), tmp_reg);
2229 :
2230 : // Restore caller's frame pointer now as it could be overwritten by
2231 : // the copying loop.
2232 2576 : movq(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2233 :
2234 : // +2 here is to copy both receiver and return address.
2235 1288 : Register count_reg = caller_args_count_reg;
2236 1288 : if (callee_args_count.is_reg()) {
2237 2576 : leaq(count_reg, Operand(callee_args_count.reg(), 2));
2238 : } else {
2239 0 : movq(count_reg, Immediate(callee_args_count.immediate() + 2));
2240 : // TODO(ishell): Unroll copying loop for small immediate values.
2241 : }
2242 :
2243 : // Now copy callee arguments to the caller frame going backwards to avoid
2244 : // callee arguments corruption (source and destination areas could overlap).
2245 1288 : Label loop, entry;
2246 1288 : jmp(&entry, Label::kNear);
2247 1288 : bind(&loop);
2248 : decq(count_reg);
2249 2576 : movq(tmp_reg, Operand(rsp, count_reg, times_system_pointer_size, 0));
2250 2576 : movq(Operand(new_sp_reg, count_reg, times_system_pointer_size, 0), tmp_reg);
2251 1288 : bind(&entry);
2252 : cmpq(count_reg, Immediate(0));
2253 1288 : j(not_equal, &loop, Label::kNear);
2254 :
2255 : // Leave current frame.
2256 : movq(rsp, new_sp_reg);
2257 1288 : }
2258 :
2259 112 : void MacroAssembler::InvokeFunction(Register function, Register new_target,
2260 : const ParameterCount& actual,
2261 : InvokeFlag flag) {
2262 : LoadTaggedPointerField(
2263 : rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2264 112 : movzxwq(rbx,
2265 : FieldOperand(rbx, SharedFunctionInfo::kFormalParameterCountOffset));
2266 :
2267 : ParameterCount expected(rbx);
2268 112 : InvokeFunction(function, new_target, expected, actual, flag);
2269 112 : }
2270 :
2271 168 : void MacroAssembler::InvokeFunction(Register function, Register new_target,
2272 : const ParameterCount& expected,
2273 : const ParameterCount& actual,
2274 : InvokeFlag flag) {
2275 : DCHECK(function == rdi);
2276 : LoadTaggedPointerField(rsi,
2277 : FieldOperand(function, JSFunction::kContextOffset));
2278 168 : InvokeFunctionCode(rdi, new_target, expected, actual, flag);
2279 168 : }
2280 :
2281 336 : void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
2282 : const ParameterCount& expected,
2283 : const ParameterCount& actual,
2284 : InvokeFlag flag) {
2285 : // You can't call a function without a valid frame.
2286 : DCHECK(flag == JUMP_FUNCTION || has_frame());
2287 : DCHECK(function == rdi);
2288 : DCHECK_IMPLIES(new_target.is_valid(), new_target == rdx);
2289 :
2290 : // On function call, call into the debugger if necessary.
2291 336 : CheckDebugHook(function, new_target, expected, actual);
2292 :
2293 : // Clear the new.target register if not given.
2294 336 : if (!new_target.is_valid()) {
2295 224 : LoadRoot(rdx, RootIndex::kUndefinedValue);
2296 : }
2297 :
2298 336 : Label done;
2299 336 : bool definitely_mismatches = false;
2300 : InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2301 336 : Label::kNear);
2302 336 : if (!definitely_mismatches) {
2303 : // We call indirectly through the code field in the function to
2304 : // allow recompilation to take effect without changing any of the
2305 : // call sites.
2306 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
2307 : LoadTaggedPointerField(rcx,
2308 : FieldOperand(function, JSFunction::kCodeOffset));
2309 336 : if (flag == CALL_FUNCTION) {
2310 112 : CallCodeObject(rcx);
2311 : } else {
2312 : DCHECK(flag == JUMP_FUNCTION);
2313 224 : JumpCodeObject(rcx);
2314 : }
2315 336 : bind(&done);
2316 : }
2317 336 : }
2318 :
2319 336 : void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2320 : const ParameterCount& actual, Label* done,
2321 : bool* definitely_mismatches,
2322 : InvokeFlag flag,
2323 : Label::Distance near_jump) {
2324 : bool definitely_matches = false;
2325 336 : *definitely_mismatches = false;
2326 336 : Label invoke;
2327 336 : if (expected.is_immediate()) {
2328 : DCHECK(actual.is_immediate());
2329 0 : Set(rax, actual.immediate());
2330 0 : if (expected.immediate() == actual.immediate()) {
2331 : definitely_matches = true;
2332 : } else {
2333 0 : if (expected.immediate() ==
2334 : SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
2335 : // Don't worry about adapting arguments for built-ins that
2336 : // don't want that done. Skip adaption code by making it look
2337 : // like we have a match between expected and actual number of
2338 : // arguments.
2339 : definitely_matches = true;
2340 : } else {
2341 0 : *definitely_mismatches = true;
2342 0 : Set(rbx, expected.immediate());
2343 : }
2344 : }
2345 : } else {
2346 336 : if (actual.is_immediate()) {
2347 : // Expected is in register, actual is immediate. This is the
2348 : // case when we invoke function values without going through the
2349 : // IC mechanism.
2350 0 : Set(rax, actual.immediate());
2351 0 : cmpq(expected.reg(), Immediate(actual.immediate()));
2352 0 : j(equal, &invoke, Label::kNear);
2353 : DCHECK(expected.reg() == rbx);
2354 336 : } else if (expected.reg() != actual.reg()) {
2355 : // Both expected and actual are in (different) registers. This
2356 : // is the case when we invoke functions using call and apply.
2357 280 : cmpq(expected.reg(), actual.reg());
2358 280 : j(equal, &invoke, Label::kNear);
2359 : DCHECK(actual.reg() == rax);
2360 : DCHECK(expected.reg() == rbx);
2361 : } else {
2362 : definitely_matches = true;
2363 : Move(rax, actual.reg());
2364 : }
2365 : }
2366 :
2367 336 : if (!definitely_matches) {
2368 280 : Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
2369 280 : if (flag == CALL_FUNCTION) {
2370 112 : Call(adaptor, RelocInfo::CODE_TARGET);
2371 112 : if (!*definitely_mismatches) {
2372 112 : jmp(done, near_jump);
2373 : }
2374 : } else {
2375 168 : Jump(adaptor, RelocInfo::CODE_TARGET);
2376 : }
2377 280 : bind(&invoke);
2378 : }
2379 336 : }
2380 :
2381 336 : void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
2382 : const ParameterCount& expected,
2383 : const ParameterCount& actual) {
2384 336 : Label skip_hook;
2385 : ExternalReference debug_hook_active =
2386 336 : ExternalReference::debug_hook_on_function_call_address(isolate());
2387 : Operand debug_hook_active_operand =
2388 336 : ExternalReferenceAsOperand(debug_hook_active);
2389 336 : cmpb(debug_hook_active_operand, Immediate(0));
2390 336 : j(equal, &skip_hook);
2391 :
2392 : {
2393 : FrameScope frame(this,
2394 672 : has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2395 336 : if (expected.is_reg()) {
2396 336 : SmiTag(expected.reg(), expected.reg());
2397 : Push(expected.reg());
2398 : }
2399 336 : if (actual.is_reg()) {
2400 336 : SmiTag(actual.reg(), actual.reg());
2401 : Push(actual.reg());
2402 336 : SmiUntag(actual.reg(), actual.reg());
2403 : }
2404 336 : if (new_target.is_valid()) {
2405 : Push(new_target);
2406 : }
2407 : Push(fun);
2408 : Push(fun);
2409 336 : Push(StackArgumentsAccessor(rbp, actual).GetReceiverOperand());
2410 336 : CallRuntime(Runtime::kDebugOnFunctionCall);
2411 : Pop(fun);
2412 336 : if (new_target.is_valid()) {
2413 : Pop(new_target);
2414 : }
2415 336 : if (actual.is_reg()) {
2416 : Pop(actual.reg());
2417 336 : SmiUntag(actual.reg(), actual.reg());
2418 : }
2419 336 : if (expected.is_reg()) {
2420 : Pop(expected.reg());
2421 336 : SmiUntag(expected.reg(), expected.reg());
2422 : }
2423 : }
2424 336 : bind(&skip_hook);
2425 336 : }
2426 :
2427 1102881 : void TurboAssembler::StubPrologue(StackFrame::Type type) {
2428 1102881 : pushq(rbp); // Caller's frame pointer.
2429 : movq(rbp, rsp);
2430 : Push(Immediate(StackFrame::TypeToMarker(type)));
2431 1102665 : }
2432 :
2433 645109 : void TurboAssembler::Prologue() {
2434 645109 : pushq(rbp); // Caller's frame pointer.
2435 : movq(rbp, rsp);
2436 : Push(rsi); // Callee's context.
2437 : Push(rdi); // Callee's JS function.
2438 645114 : }
2439 :
2440 457453 : void TurboAssembler::EnterFrame(StackFrame::Type type) {
2441 457453 : pushq(rbp);
2442 : movq(rbp, rsp);
2443 : Push(Immediate(StackFrame::TypeToMarker(type)));
2444 457408 : }
2445 :
2446 821117 : void TurboAssembler::LeaveFrame(StackFrame::Type type) {
2447 821117 : if (emit_debug_code()) {
2448 0 : cmpq(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
2449 0 : Immediate(StackFrame::TypeToMarker(type)));
2450 0 : Check(equal, AbortReason::kStackFrameTypesMustMatch);
2451 : }
2452 821117 : movq(rsp, rbp);
2453 821118 : popq(rbp);
2454 821028 : }
2455 :
2456 672 : void MacroAssembler::EnterExitFramePrologue(bool save_rax,
2457 : StackFrame::Type frame_type) {
2458 : DCHECK(frame_type == StackFrame::EXIT ||
2459 : frame_type == StackFrame::BUILTIN_EXIT);
2460 :
2461 : // Set up the frame structure on the stack.
2462 : // All constants are relative to the frame pointer of the exit frame.
2463 : DCHECK_EQ(kFPOnStackSize + kPCOnStackSize,
2464 : ExitFrameConstants::kCallerSPDisplacement);
2465 : DCHECK_EQ(kFPOnStackSize, ExitFrameConstants::kCallerPCOffset);
2466 : DCHECK_EQ(0 * kSystemPointerSize, ExitFrameConstants::kCallerFPOffset);
2467 672 : pushq(rbp);
2468 : movq(rbp, rsp);
2469 :
2470 : // Reserve room for entry stack pointer.
2471 : Push(Immediate(StackFrame::TypeToMarker(frame_type)));
2472 : DCHECK_EQ(-2 * kSystemPointerSize, ExitFrameConstants::kSPOffset);
2473 : Push(Immediate(0)); // Saved entry sp, patched before call.
2474 :
2475 : // Save the frame pointer and the context in top.
2476 672 : if (save_rax) {
2477 : movq(r14, rax); // Backup rax in callee-save register.
2478 : }
2479 :
2480 672 : Store(
2481 : ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()),
2482 672 : rbp);
2483 672 : Store(ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()),
2484 672 : rsi);
2485 672 : Store(
2486 : ExternalReference::Create(IsolateAddressId::kCFunctionAddress, isolate()),
2487 672 : rbx);
2488 672 : }
2489 :
2490 :
2491 672 : void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2492 : bool save_doubles) {
2493 : #ifdef _WIN64
2494 : const int kShadowSpace = 4;
2495 : arg_stack_space += kShadowSpace;
2496 : #endif
2497 : // Optionally save all XMM registers.
2498 672 : if (save_doubles) {
2499 224 : int space = XMMRegister::kNumRegisters * kDoubleSize +
2500 224 : arg_stack_space * kSystemPointerSize;
2501 224 : subq(rsp, Immediate(space));
2502 : int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
2503 224 : const RegisterConfiguration* config = RegisterConfiguration::Default();
2504 6944 : for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
2505 : DoubleRegister reg =
2506 : DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
2507 6720 : Movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
2508 : }
2509 448 : } else if (arg_stack_space > 0) {
2510 112 : subq(rsp, Immediate(arg_stack_space * kSystemPointerSize));
2511 : }
2512 :
2513 : // Get the required frame alignment for the OS.
2514 672 : const int kFrameAlignment = base::OS::ActivationFrameAlignment();
2515 672 : if (kFrameAlignment > 0) {
2516 : DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
2517 : DCHECK(is_int8(kFrameAlignment));
2518 672 : andq(rsp, Immediate(-kFrameAlignment));
2519 : }
2520 :
2521 : // Patch the saved entry sp.
2522 1344 : movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2523 672 : }
2524 :
2525 448 : void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles,
2526 : StackFrame::Type frame_type) {
2527 448 : EnterExitFramePrologue(true, frame_type);
2528 :
2529 : // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
2530 : // so it must be retained across the C-call.
2531 : int offset = StandardFrameConstants::kCallerSPOffset - kSystemPointerSize;
2532 896 : leaq(r15, Operand(rbp, r14, times_system_pointer_size, offset));
2533 :
2534 448 : EnterExitFrameEpilogue(arg_stack_space, save_doubles);
2535 448 : }
2536 :
2537 :
2538 224 : void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
2539 224 : EnterExitFramePrologue(false, StackFrame::EXIT);
2540 224 : EnterExitFrameEpilogue(arg_stack_space, false);
2541 224 : }
2542 :
2543 :
2544 560 : void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
2545 : // Registers:
2546 : // r15 : argv
2547 560 : if (save_doubles) {
2548 : int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
2549 224 : const RegisterConfiguration* config = RegisterConfiguration::Default();
2550 6944 : for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
2551 : DoubleRegister reg =
2552 : DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
2553 6720 : Movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
2554 : }
2555 : }
2556 :
2557 560 : if (pop_arguments) {
2558 : // Get the return address from the stack and restore the frame pointer.
2559 896 : movq(rcx, Operand(rbp, kFPOnStackSize));
2560 896 : movq(rbp, Operand(rbp, 0 * kSystemPointerSize));
2561 :
2562 : // Drop everything up to and including the arguments and the receiver
2563 : // from the caller stack.
2564 896 : leaq(rsp, Operand(r15, 1 * kSystemPointerSize));
2565 :
2566 : PushReturnAddressFrom(rcx);
2567 : } else {
2568 : // Otherwise just leave the exit frame.
2569 112 : leave();
2570 : }
2571 :
2572 560 : LeaveExitFrameEpilogue();
2573 560 : }
2574 :
2575 112 : void MacroAssembler::LeaveApiExitFrame() {
2576 112 : movq(rsp, rbp);
2577 112 : popq(rbp);
2578 :
2579 112 : LeaveExitFrameEpilogue();
2580 112 : }
2581 :
2582 672 : void MacroAssembler::LeaveExitFrameEpilogue() {
2583 : // Restore current context from top and clear it in debug mode.
2584 : ExternalReference context_address =
2585 672 : ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
2586 672 : Operand context_operand = ExternalReferenceAsOperand(context_address);
2587 672 : movq(rsi, context_operand);
2588 : #ifdef DEBUG
2589 : movq(context_operand, Immediate(Context::kInvalidContext));
2590 : #endif
2591 :
2592 : // Clear the top frame.
2593 : ExternalReference c_entry_fp_address =
2594 672 : ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
2595 672 : Operand c_entry_fp_operand = ExternalReferenceAsOperand(c_entry_fp_address);
2596 : movq(c_entry_fp_operand, Immediate(0));
2597 672 : }
2598 :
2599 :
2600 : #ifdef _WIN64
2601 : static const int kRegisterPassedArguments = 4;
2602 : #else
2603 : static const int kRegisterPassedArguments = 6;
2604 : #endif
2605 :
2606 :
2607 336 : void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
2608 336 : LoadTaggedPointerField(dst, NativeContextOperand());
2609 : LoadTaggedPointerField(dst, ContextOperand(dst, index));
2610 336 : }
2611 :
2612 :
2613 0 : int TurboAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2614 : // On Windows 64 stack slots are reserved by the caller for all arguments
2615 : // including the ones passed in registers, and space is always allocated for
2616 : // the four register arguments even if the function takes fewer than four
2617 : // arguments.
2618 : // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2619 : // and the caller does not reserve stack slots for them.
2620 : DCHECK_GE(num_arguments, 0);
2621 : #ifdef _WIN64
2622 : const int kMinimumStackSlots = kRegisterPassedArguments;
2623 : if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2624 : return num_arguments;
2625 : #else
2626 1528636 : if (num_arguments < kRegisterPassedArguments) return 0;
2627 94502 : return num_arguments - kRegisterPassedArguments;
2628 : #endif
2629 : }
2630 :
2631 764323 : void TurboAssembler::PrepareCallCFunction(int num_arguments) {
2632 764323 : int frame_alignment = base::OS::ActivationFrameAlignment();
2633 : DCHECK_NE(frame_alignment, 0);
2634 : DCHECK_GE(num_arguments, 0);
2635 :
2636 : // Make stack end at alignment and allocate space for arguments and old rsp.
2637 764323 : movq(kScratchRegister, rsp);
2638 : DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
2639 : int argument_slots_on_stack =
2640 : ArgumentStackSlotsForCFunctionCall(num_arguments);
2641 764323 : subq(rsp, Immediate((argument_slots_on_stack + 1) * kSystemPointerSize));
2642 764322 : andq(rsp, Immediate(-frame_alignment));
2643 1528645 : movq(Operand(rsp, argument_slots_on_stack * kSystemPointerSize),
2644 : kScratchRegister);
2645 764323 : }
2646 :
2647 763172 : void TurboAssembler::CallCFunction(ExternalReference function,
2648 : int num_arguments) {
2649 763172 : LoadAddress(rax, function);
2650 763172 : CallCFunction(rax, num_arguments);
2651 763173 : }
2652 :
2653 764312 : void TurboAssembler::CallCFunction(Register function, int num_arguments) {
2654 : DCHECK_LE(num_arguments, kMaxCParameters);
2655 : DCHECK(has_frame());
2656 : // Check stack alignment.
2657 764312 : if (emit_debug_code()) {
2658 0 : CheckStackAlignment();
2659 : }
2660 :
2661 : // Save the frame pointer and PC so that the stack layout remains iterable,
2662 : // even without an ExitFrame which normally exists between JS and C frames.
2663 764312 : if (isolate() != nullptr) {
2664 365288 : Label get_pc;
2665 : DCHECK(!AreAliased(kScratchRegister, function));
2666 730577 : leaq(kScratchRegister, Operand(&get_pc, 0));
2667 365289 : bind(&get_pc);
2668 365289 : movq(ExternalReferenceAsOperand(
2669 : ExternalReference::fast_c_call_caller_pc_address(isolate())),
2670 : kScratchRegister);
2671 365289 : movq(ExternalReferenceAsOperand(
2672 : ExternalReference::fast_c_call_caller_fp_address(isolate())),
2673 : rbp);
2674 : }
2675 :
2676 764312 : call(function);
2677 :
2678 764312 : if (isolate() != nullptr) {
2679 : // We don't unset the PC; the FP is the source of truth.
2680 365288 : movq(ExternalReferenceAsOperand(
2681 : ExternalReference::fast_c_call_caller_fp_address(isolate())),
2682 : Immediate(0));
2683 : }
2684 :
2685 : DCHECK_NE(base::OS::ActivationFrameAlignment(), 0);
2686 : DCHECK_GE(num_arguments, 0);
2687 : int argument_slots_on_stack =
2688 : ArgumentStackSlotsForCFunctionCall(num_arguments);
2689 1528626 : movq(rsp, Operand(rsp, argument_slots_on_stack * kSystemPointerSize));
2690 764313 : }
2691 :
2692 639821 : void TurboAssembler::CheckPageFlag(Register object, Register scratch, int mask,
2693 : Condition cc, Label* condition_met,
2694 : Label::Distance condition_met_distance) {
2695 : DCHECK(cc == zero || cc == not_zero);
2696 639821 : if (scratch == object) {
2697 112 : andq(scratch, Immediate(~kPageAlignmentMask));
2698 : } else {
2699 639709 : movq(scratch, Immediate(~kPageAlignmentMask));
2700 : andq(scratch, object);
2701 : }
2702 639822 : if (mask < (1 << kBitsPerByte)) {
2703 1919466 : testb(Operand(scratch, MemoryChunk::kFlagsOffset),
2704 639822 : Immediate(static_cast<uint8_t>(mask)));
2705 : } else {
2706 0 : testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2707 : }
2708 639822 : j(cc, condition_met, condition_met_distance);
2709 639822 : }
2710 :
2711 114 : void TurboAssembler::ComputeCodeStartAddress(Register dst) {
2712 114 : Label current;
2713 114 : bind(¤t);
2714 : int pc = pc_offset();
2715 : // Load effective address to get the address of the current instruction.
2716 228 : leaq(dst, Operand(¤t, -pc));
2717 114 : }
2718 :
2719 560 : void TurboAssembler::ResetSpeculationPoisonRegister() {
2720 : // TODO(tebbi): Perhaps, we want to put an lfence here.
2721 560 : Set(kSpeculationPoisonRegister, -1);
2722 560 : }
2723 :
2724 3329973 : void TurboAssembler::CallForDeoptimization(Address target, int deopt_id) {
2725 : NoRootArrayScope no_root_array(this);
2726 : // Save the deopt id in r13 (we don't need the roots array from now on).
2727 3329973 : movq(r13, Immediate(deopt_id));
2728 3329977 : call(target, RelocInfo::RUNTIME_ENTRY);
2729 3329979 : }
2730 :
2731 : } // namespace internal
2732 122036 : } // namespace v8
2733 :
2734 : #endif // V8_TARGET_ARCH_X64
|