Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #if V8_TARGET_ARCH_X64
6 :
7 : #include "src/base/bits.h"
8 : #include "src/base/division-by-constant.h"
9 : #include "src/base/utils/random-number-generator.h"
10 : #include "src/bootstrapper.h"
11 : #include "src/callable.h"
12 : #include "src/code-factory.h"
13 : #include "src/counters.h"
14 : #include "src/debug/debug.h"
15 : #include "src/external-reference-table.h"
16 : #include "src/frames-inl.h"
17 : #include "src/globals.h"
18 : #include "src/macro-assembler.h"
19 : #include "src/objects-inl.h"
20 : #include "src/objects/smi.h"
21 : #include "src/register-configuration.h"
22 : #include "src/snapshot/embedded-data.h"
23 : #include "src/snapshot/snapshot.h"
24 : #include "src/string-constants.h"
25 : #include "src/x64/assembler-x64.h"
26 :
27 : // Satisfy cpplint check, but don't include platform-specific header. It is
28 : // included recursively via macro-assembler.h.
29 : #if 0
30 : #include "src/x64/macro-assembler-x64.h"
31 : #endif
32 :
33 : namespace v8 {
34 : namespace internal {
35 :
36 1792 : Operand StackArgumentsAccessor::GetArgumentOperand(int index) {
37 : DCHECK_GE(index, 0);
38 1792 : int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0;
39 : int displacement_to_last_argument =
40 1792 : base_reg_ == rsp ? kPCOnStackSize : kFPOnStackSize + kPCOnStackSize;
41 1792 : displacement_to_last_argument += extra_displacement_to_last_argument_;
42 1792 : if (argument_count_reg_ == no_reg) {
43 : // argument[0] is at base_reg_ + displacement_to_last_argument +
44 : // (argument_count_immediate_ + receiver - 1) * kSystemPointerSize.
45 : DCHECK_GT(argument_count_immediate_ + receiver, 0);
46 : return Operand(base_reg_,
47 : displacement_to_last_argument +
48 0 : (argument_count_immediate_ + receiver - 1 - index) *
49 0 : kSystemPointerSize);
50 : } else {
51 : // argument[0] is at base_reg_ + displacement_to_last_argument +
52 : // argument_count_reg_ * times_pointer_size + (receiver - 1) *
53 : // kSystemPointerSize.
54 : return Operand(base_reg_, argument_count_reg_, times_pointer_size,
55 : displacement_to_last_argument +
56 1792 : (receiver - 1 - index) * kSystemPointerSize);
57 : }
58 : }
59 :
60 0 : StackArgumentsAccessor::StackArgumentsAccessor(
61 0 : Register base_reg, const ParameterCount& parameter_count,
62 : StackArgumentsAccessorReceiverMode receiver_mode,
63 : int extra_displacement_to_last_argument)
64 : : base_reg_(base_reg),
65 : argument_count_reg_(parameter_count.is_reg() ? parameter_count.reg()
66 : : no_reg),
67 : argument_count_immediate_(
68 : parameter_count.is_immediate() ? parameter_count.immediate() : 0),
69 : receiver_mode_(receiver_mode),
70 : extra_displacement_to_last_argument_(
71 672 : extra_displacement_to_last_argument) {}
72 :
73 392 : void MacroAssembler::Load(Register destination, ExternalReference source) {
74 392 : if (root_array_available_ && options().enable_root_array_delta_access) {
75 0 : intptr_t delta = RootRegisterOffsetForExternalReference(isolate(), source);
76 0 : if (is_int32(delta)) {
77 0 : movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
78 392 : return;
79 : }
80 : }
81 : // Safe code.
82 392 : if (destination == rax && !options().isolate_independent_code) {
83 0 : load_rax(source);
84 : } else {
85 392 : movp(destination, ExternalReferenceAsOperand(source));
86 : }
87 : }
88 :
89 :
90 47481 : void MacroAssembler::Store(ExternalReference destination, Register source) {
91 47481 : if (root_array_available_ && options().enable_root_array_delta_access) {
92 : intptr_t delta =
93 0 : RootRegisterOffsetForExternalReference(isolate(), destination);
94 0 : if (is_int32(delta)) {
95 0 : movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
96 47481 : return;
97 : }
98 : }
99 : // Safe code.
100 47481 : if (source == rax && !options().isolate_independent_code) {
101 0 : store_rax(destination);
102 : } else {
103 47481 : movp(ExternalReferenceAsOperand(destination), source);
104 : }
105 : }
106 :
107 40432 : void TurboAssembler::LoadFromConstantsTable(Register destination,
108 : int constant_index) {
109 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable));
110 40432 : LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
111 : LoadTaggedPointerField(
112 : destination,
113 40432 : FieldOperand(destination, FixedArray::OffsetOfElementAt(constant_index)));
114 40432 : }
115 :
116 17136 : void TurboAssembler::LoadRootRegisterOffset(Register destination,
117 : intptr_t offset) {
118 : DCHECK(is_int32(offset));
119 17136 : if (offset == 0) {
120 896 : Move(destination, kRootRegister);
121 : } else {
122 32480 : leap(destination, Operand(kRootRegister, static_cast<int32_t>(offset)));
123 : }
124 17136 : }
125 :
126 806240 : void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) {
127 1612480 : movp(destination, Operand(kRootRegister, offset));
128 806240 : }
129 :
130 1255757 : void TurboAssembler::LoadAddress(Register destination,
131 : ExternalReference source) {
132 1255757 : if (root_array_available_ && options().enable_root_array_delta_access) {
133 1228 : intptr_t delta = RootRegisterOffsetForExternalReference(isolate(), source);
134 1228 : if (is_int32(delta)) {
135 2456 : leap(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
136 1228 : return;
137 : }
138 : }
139 : // Safe code.
140 : if (FLAG_embedded_builtins) {
141 1254529 : if (root_array_available_ && options().isolate_independent_code) {
142 45752 : IndirectLoadExternalReference(destination, source);
143 45752 : return;
144 : }
145 : }
146 1208777 : Move(destination, source);
147 : }
148 :
149 1180224 : Operand TurboAssembler::ExternalReferenceAsOperand(ExternalReference reference,
150 : Register scratch) {
151 1180224 : if (root_array_available_ && options().enable_root_array_delta_access) {
152 : int64_t delta =
153 187795 : RootRegisterOffsetForExternalReference(isolate(), reference);
154 3779 : if (is_int32(delta)) {
155 3779 : return Operand(kRootRegister, static_cast<int32_t>(delta));
156 : }
157 : }
158 1176445 : if (root_array_available_ && options().isolate_independent_code) {
159 92008 : if (IsAddressableThroughRootRegister(isolate(), reference)) {
160 : // Some external references can be efficiently loaded as an offset from
161 : // kRootRegister.
162 : intptr_t offset =
163 84168 : RootRegisterOffsetForExternalReference(isolate(), reference);
164 84168 : CHECK(is_int32(offset));
165 84168 : return Operand(kRootRegister, static_cast<int32_t>(offset));
166 : } else {
167 : // Otherwise, do a memory load from the external reference table.
168 : movp(scratch, Operand(kRootRegister,
169 : RootRegisterOffsetForExternalReferenceTableEntry(
170 15680 : isolate(), reference)));
171 7840 : return Operand(scratch, 0);
172 : }
173 : }
174 1084437 : Move(scratch, reference);
175 1084440 : return Operand(scratch, 0);
176 : }
177 :
178 56 : void MacroAssembler::PushAddress(ExternalReference source) {
179 56 : LoadAddress(kScratchRegister, source);
180 : Push(kScratchRegister);
181 56 : }
182 :
183 1666399 : void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
184 : DCHECK(root_array_available_);
185 : movp(destination,
186 3332816 : Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
187 1666417 : }
188 :
189 672 : void MacroAssembler::PushRoot(RootIndex index) {
190 : DCHECK(root_array_available_);
191 1344 : Push(Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
192 672 : }
193 :
194 555676 : void TurboAssembler::CompareRoot(Register with, RootIndex index) {
195 : DCHECK(root_array_available_);
196 555676 : if (IsInRange(index, RootIndex::kFirstStrongOrReadOnlyRoot,
197 : RootIndex::kLastStrongOrReadOnlyRoot)) {
198 : cmp_tagged(with,
199 1064 : Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
200 : } else {
201 : // Some smi roots contain system pointer size values like stack limits.
202 554612 : cmpp(with, Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
203 : }
204 555700 : }
205 :
206 0 : void TurboAssembler::CompareRoot(Operand with, RootIndex index) {
207 : DCHECK(root_array_available_);
208 : DCHECK(!with.AddressUsesRegister(kScratchRegister));
209 0 : LoadRoot(kScratchRegister, index);
210 0 : if (IsInRange(index, RootIndex::kFirstStrongOrReadOnlyRoot,
211 : RootIndex::kLastStrongOrReadOnlyRoot)) {
212 0 : cmp_tagged(with, kScratchRegister);
213 : } else {
214 : // Some smi roots contain system pointer size values like stack limits.
215 0 : cmpp(with, kScratchRegister);
216 : }
217 0 : }
218 :
219 881651 : void TurboAssembler::LoadTaggedPointerField(Register destination,
220 : Operand field_operand,
221 : Register scratch_for_debug) {
222 : #ifdef V8_COMPRESS_POINTERS
223 : DecompressTaggedPointer(destination, field_operand, scratch_for_debug);
224 : #else
225 881651 : movp(destination, field_operand);
226 : #endif
227 881669 : }
228 :
229 336 : void TurboAssembler::LoadAnyTaggedField(Register destination,
230 : Operand field_operand, Register scratch,
231 : Register scratch_for_debug) {
232 : #ifdef V8_COMPRESS_POINTERS
233 : DecompressAnyTagged(destination, field_operand, scratch, scratch_for_debug);
234 : #else
235 336 : movp(destination, field_operand);
236 : #endif
237 336 : }
238 :
239 112 : void TurboAssembler::PushTaggedPointerField(Operand field_operand,
240 : Register scratch,
241 : Register scratch_for_debug) {
242 : #ifdef V8_COMPRESS_POINTERS
243 : DCHECK(!AreAliased(scratch, scratch_for_debug));
244 : DCHECK(!field_operand.AddressUsesRegister(scratch));
245 : DCHECK(!field_operand.AddressUsesRegister(scratch_for_debug));
246 : DecompressTaggedPointer(scratch, field_operand, scratch_for_debug);
247 : Push(scratch);
248 : #else
249 : Push(field_operand);
250 : #endif
251 112 : }
252 :
253 112 : void TurboAssembler::PushTaggedAnyField(Operand field_operand,
254 : Register scratch1, Register scratch2,
255 : Register scratch_for_debug) {
256 : #ifdef V8_COMPRESS_POINTERS
257 : DCHECK(!AreAliased(scratch1, scratch2, scratch_for_debug));
258 : DCHECK(!field_operand.AddressUsesRegister(scratch1));
259 : DCHECK(!field_operand.AddressUsesRegister(scratch2));
260 : DCHECK(!field_operand.AddressUsesRegister(scratch_for_debug));
261 : DecompressAnyTagged(scratch1, field_operand, scratch2, scratch_for_debug);
262 : Push(scratch1);
263 : #else
264 : Push(field_operand);
265 : #endif
266 112 : }
267 :
268 280 : void TurboAssembler::SmiUntagField(Register dst, Operand src) {
269 280 : SmiUntag(dst, src);
270 280 : }
271 :
272 0 : void TurboAssembler::StoreTaggedField(Operand dst_field_operand,
273 : Immediate value) {
274 0 : movp(dst_field_operand, value);
275 0 : }
276 :
277 112 : void TurboAssembler::StoreTaggedField(Operand dst_field_operand,
278 : Register value) {
279 112 : movp(dst_field_operand, value);
280 112 : }
281 :
282 0 : void TurboAssembler::DecompressTaggedSigned(Register destination,
283 : Operand field_operand,
284 : Register scratch_for_debug) {
285 : DCHECK(!AreAliased(destination, scratch_for_debug));
286 0 : RecordComment("[ DecompressTaggedSigned");
287 : if (DEBUG_BOOL && scratch_for_debug.is_valid()) {
288 : Register expected_value = scratch_for_debug;
289 : movq(expected_value, field_operand);
290 : movsxlq(destination, expected_value);
291 : Label check_passed;
292 : cmpq(destination, expected_value);
293 : j(equal, &check_passed);
294 : RecordComment("DecompressTaggedSigned failed");
295 : int3();
296 : bind(&check_passed);
297 : } else {
298 0 : movsxlq(destination, field_operand);
299 : }
300 0 : RecordComment("]");
301 0 : }
302 :
303 0 : void TurboAssembler::DecompressTaggedPointer(Register destination,
304 : Operand field_operand,
305 : Register scratch_for_debug) {
306 : DCHECK(!AreAliased(destination, scratch_for_debug));
307 0 : RecordComment("[ DecompressTaggedPointer");
308 : if (DEBUG_BOOL && scratch_for_debug.is_valid()) {
309 : Register expected_value = scratch_for_debug;
310 : movq(expected_value, field_operand);
311 : movsxlq(destination, expected_value);
312 : addq(destination, kRootRegister);
313 : Label check_passed;
314 : cmpq(destination, expected_value);
315 : j(equal, &check_passed);
316 : RecordComment("DecompressTaggedPointer failed");
317 : int3();
318 : bind(&check_passed);
319 : } else {
320 0 : movsxlq(destination, field_operand);
321 0 : addq(destination, kRootRegister);
322 : }
323 0 : RecordComment("]");
324 0 : }
325 :
326 0 : void TurboAssembler::DecompressAnyTagged(Register destination,
327 : Operand field_operand,
328 : Register scratch,
329 : Register scratch_for_debug) {
330 : DCHECK(!AreAliased(destination, scratch, scratch_for_debug));
331 0 : RecordComment("[ DecompressAnyTagged");
332 : Register expected_value = scratch_for_debug;
333 : if (DEBUG_BOOL && expected_value.is_valid()) {
334 : movq(expected_value, field_operand);
335 : movsxlq(destination, expected_value);
336 : } else {
337 0 : movsxlq(destination, field_operand);
338 : }
339 : // Branchlessly compute |masked_root|:
340 : // masked_root = HAS_SMI_TAG(destination) ? 0 : kRootRegister;
341 : STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag < 32));
342 0 : Register masked_root = scratch;
343 : movl(masked_root, destination);
344 0 : andl(masked_root, Immediate(kSmiTagMask));
345 : negq(masked_root);
346 0 : andq(masked_root, kRootRegister);
347 : // Now this add operation will either leave the value unchanged if it is a smi
348 : // or add the isolate root if it is a heap object.
349 0 : addq(destination, masked_root);
350 : if (DEBUG_BOOL && expected_value.is_valid()) {
351 : Label check_passed;
352 : cmpq(destination, expected_value);
353 : j(equal, &check_passed);
354 : RecordComment("Decompression failed: Tagged");
355 : int3();
356 : bind(&check_passed);
357 : }
358 0 : RecordComment("]");
359 0 : }
360 :
361 112 : void MacroAssembler::RecordWriteField(Register object, int offset,
362 : Register value, Register dst,
363 : SaveFPRegsMode save_fp,
364 : RememberedSetAction remembered_set_action,
365 : SmiCheck smi_check) {
366 : // First, check if a write barrier is even needed. The tests below
367 : // catch stores of Smis.
368 112 : Label done;
369 :
370 : // Skip barrier if writing a smi.
371 112 : if (smi_check == INLINE_SMI_CHECK) {
372 56 : JumpIfSmi(value, &done);
373 : }
374 :
375 : // Although the object register is tagged, the offset is relative to the start
376 : // of the object, so the offset must be a multiple of kTaggedSize.
377 : DCHECK(IsAligned(offset, kTaggedSize));
378 :
379 112 : leap(dst, FieldOperand(object, offset));
380 224 : if (emit_debug_code()) {
381 0 : Label ok;
382 0 : testb(dst, Immediate(kTaggedSize - 1));
383 0 : j(zero, &ok, Label::kNear);
384 0 : int3();
385 0 : bind(&ok);
386 : }
387 :
388 : RecordWrite(object, dst, value, save_fp, remembered_set_action,
389 112 : OMIT_SMI_CHECK);
390 :
391 112 : bind(&done);
392 :
393 : // Clobber clobbered input registers when running with the debug-code flag
394 : // turned on to provoke errors.
395 112 : if (emit_debug_code()) {
396 : Move(value, kZapValue, RelocInfo::NONE);
397 : Move(dst, kZapValue, RelocInfo::NONE);
398 : }
399 112 : }
400 :
401 313662 : void TurboAssembler::SaveRegisters(RegList registers) {
402 : DCHECK_GT(NumRegs(registers), 0);
403 5332254 : for (int i = 0; i < Register::kNumRegisters; ++i) {
404 5018592 : if ((registers >> i) & 1u) {
405 1568310 : pushq(Register::from_code(i));
406 : }
407 : }
408 313662 : }
409 :
410 313662 : void TurboAssembler::RestoreRegisters(RegList registers) {
411 : DCHECK_GT(NumRegs(registers), 0);
412 5332254 : for (int i = Register::kNumRegisters - 1; i >= 0; --i) {
413 5018592 : if ((registers >> i) & 1u) {
414 1568310 : popq(Register::from_code(i));
415 : }
416 : }
417 313662 : }
418 :
419 313491 : void TurboAssembler::CallRecordWriteStub(
420 : Register object, Register address,
421 : RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
422 : CallRecordWriteStub(
423 : object, address, remembered_set_action, fp_mode,
424 : isolate()->builtins()->builtin_handle(Builtins::kRecordWrite),
425 313491 : kNullAddress);
426 313491 : }
427 :
428 171 : void TurboAssembler::CallRecordWriteStub(
429 : Register object, Register address,
430 : RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
431 : Address wasm_target) {
432 : CallRecordWriteStub(object, address, remembered_set_action, fp_mode,
433 171 : Handle<Code>::null(), wasm_target);
434 171 : }
435 :
436 313662 : void TurboAssembler::CallRecordWriteStub(
437 : Register object, Register address,
438 : RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
439 : Handle<Code> code_target, Address wasm_target) {
440 : DCHECK_NE(code_target.is_null(), wasm_target == kNullAddress);
441 :
442 : RecordWriteDescriptor descriptor;
443 : RegList registers = descriptor.allocatable_registers();
444 :
445 313662 : SaveRegisters(registers);
446 :
447 : Register object_parameter(
448 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kObject));
449 : Register slot_parameter(
450 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kSlot));
451 : Register remembered_set_parameter(
452 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kRememberedSet));
453 : Register fp_mode_parameter(
454 : descriptor.GetRegisterParameter(RecordWriteDescriptor::kFPMode));
455 :
456 : // Prepare argument registers for calling RecordWrite
457 : // slot_parameter <= address
458 : // object_parameter <= object
459 313662 : if (slot_parameter != object) {
460 : // Normal case
461 184592 : Move(slot_parameter, address);
462 184592 : Move(object_parameter, object);
463 129070 : } else if (object_parameter != address) {
464 : // Only slot_parameter and object are the same register
465 : // object_parameter <= object
466 : // slot_parameter <= address
467 128983 : Move(object_parameter, object);
468 128983 : Move(slot_parameter, address);
469 : } else {
470 : // slot_parameter \/ address
471 : // object_parameter /\ object
472 87 : xchgq(slot_parameter, object_parameter);
473 : }
474 :
475 : Smi smi_rsa = Smi::FromEnum(remembered_set_action);
476 : Smi smi_fm = Smi::FromEnum(fp_mode);
477 313662 : Move(remembered_set_parameter, smi_rsa);
478 313662 : if (smi_rsa != smi_fm) {
479 273615 : Move(fp_mode_parameter, smi_fm);
480 : } else {
481 40047 : movq(fp_mode_parameter, remembered_set_parameter);
482 : }
483 313662 : if (code_target.is_null()) {
484 : // Use {near_call} for direct Wasm call within a module.
485 171 : near_call(wasm_target, RelocInfo::WASM_STUB_CALL);
486 : } else {
487 313491 : Call(code_target, RelocInfo::CODE_TARGET);
488 : }
489 :
490 313662 : RestoreRegisters(registers);
491 313662 : }
492 :
493 112 : void MacroAssembler::RecordWrite(Register object, Register address,
494 : Register value, SaveFPRegsMode fp_mode,
495 : RememberedSetAction remembered_set_action,
496 : SmiCheck smi_check) {
497 : DCHECK(object != value);
498 : DCHECK(object != address);
499 : DCHECK(value != address);
500 112 : AssertNotSmi(object);
501 :
502 168 : if (remembered_set_action == OMIT_REMEMBERED_SET &&
503 56 : !FLAG_incremental_marking) {
504 0 : return;
505 : }
506 :
507 224 : if (emit_debug_code()) {
508 0 : Label ok;
509 0 : cmp_tagged(value, Operand(address, 0));
510 0 : j(equal, &ok, Label::kNear);
511 0 : int3();
512 0 : bind(&ok);
513 : }
514 :
515 : // First, check if a write barrier is even needed. The tests below
516 : // catch stores of smis and stores into the young generation.
517 112 : Label done;
518 :
519 112 : if (smi_check == INLINE_SMI_CHECK) {
520 : // Skip barrier if writing a smi.
521 0 : JumpIfSmi(value, &done);
522 : }
523 :
524 : CheckPageFlag(value,
525 : value, // Used as scratch.
526 : MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
527 112 : Label::kNear);
528 :
529 : CheckPageFlag(object,
530 : value, // Used as scratch.
531 : MemoryChunk::kPointersFromHereAreInterestingMask,
532 : zero,
533 : &done,
534 112 : Label::kNear);
535 :
536 112 : CallRecordWriteStub(object, address, remembered_set_action, fp_mode);
537 :
538 112 : bind(&done);
539 :
540 : // Count number of write barriers in generated code.
541 224 : isolate()->counters()->write_barriers_static()->Increment();
542 112 : IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
543 :
544 : // Clobber clobbered registers when running with the debug-code flag
545 : // turned on to provoke errors.
546 112 : if (emit_debug_code()) {
547 : Move(address, kZapValue, RelocInfo::NONE);
548 : Move(value, kZapValue, RelocInfo::NONE);
549 : }
550 : }
551 :
552 127 : void TurboAssembler::Assert(Condition cc, AbortReason reason) {
553 127 : if (emit_debug_code()) Check(cc, reason);
554 127 : }
555 :
556 400342 : void TurboAssembler::AssertUnreachable(AbortReason reason) {
557 400342 : if (emit_debug_code()) Abort(reason);
558 400342 : }
559 :
560 185 : void TurboAssembler::Check(Condition cc, AbortReason reason) {
561 185 : Label L;
562 185 : j(cc, &L, Label::kNear);
563 185 : Abort(reason);
564 : // Control will not return here.
565 185 : bind(&L);
566 185 : }
567 :
568 0 : void TurboAssembler::CheckStackAlignment() {
569 0 : int frame_alignment = base::OS::ActivationFrameAlignment();
570 0 : int frame_alignment_mask = frame_alignment - 1;
571 0 : if (frame_alignment > kSystemPointerSize) {
572 : DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
573 0 : Label alignment_as_expected;
574 0 : testp(rsp, Immediate(frame_alignment_mask));
575 0 : j(zero, &alignment_as_expected, Label::kNear);
576 : // Abort if stack is not aligned.
577 0 : int3();
578 0 : bind(&alignment_as_expected);
579 : }
580 0 : }
581 :
582 5159 : void TurboAssembler::Abort(AbortReason reason) {
583 : #ifdef DEBUG
584 : const char* msg = GetAbortReason(reason);
585 : RecordComment("Abort message: ");
586 : RecordComment(msg);
587 : #endif
588 :
589 : // Avoid emitting call to builtin if requested.
590 20614 : if (trap_on_abort()) {
591 0 : int3();
592 0 : return;
593 : }
594 :
595 5159 : if (should_abort_hard()) {
596 : // We don't care if we constructed a frame. Just pretend we did.
597 11 : FrameScope assume_frame(this, StackFrame::NONE);
598 11 : movl(arg_reg_1, Immediate(static_cast<int>(reason)));
599 11 : PrepareCallCFunction(1);
600 11 : LoadAddress(rax, ExternalReference::abort_with_reason());
601 11 : call(rax);
602 11 : return;
603 : }
604 :
605 5148 : Move(rdx, Smi::FromInt(static_cast<int>(reason)));
606 :
607 5148 : if (!has_frame()) {
608 : // We don't actually want to generate a pile of code for this, so just
609 : // claim there is a stack frame, without generating one.
610 56 : FrameScope scope(this, StackFrame::NONE);
611 56 : Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
612 : } else {
613 5092 : Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
614 : }
615 : // Control will not return here.
616 5148 : int3();
617 : }
618 :
619 115 : void TurboAssembler::CallRuntimeWithCEntry(Runtime::FunctionId fid,
620 : Register centry) {
621 115 : const Runtime::Function* f = Runtime::FunctionForId(fid);
622 : // TODO(1236192): Most runtime routines don't need the number of
623 : // arguments passed in because it is constant. At some point we
624 : // should remove this need and make the runtime routine entry code
625 : // smarter.
626 115 : Set(rax, f->nargs);
627 115 : LoadAddress(rbx, ExternalReference::Create(f));
628 : DCHECK(!AreAliased(centry, rax, rbx));
629 : DCHECK(centry == rcx);
630 115 : CallCodeObject(centry);
631 115 : }
632 :
633 1960 : void MacroAssembler::CallRuntime(const Runtime::Function* f,
634 : int num_arguments,
635 : SaveFPRegsMode save_doubles) {
636 : // If the expected number of arguments of the runtime function is
637 : // constant, we check that the actual number of arguments match the
638 : // expectation.
639 1960 : CHECK(f->nargs < 0 || f->nargs == num_arguments);
640 :
641 : // TODO(1236192): Most runtime routines don't need the number of
642 : // arguments passed in because it is constant. At some point we
643 : // should remove this need and make the runtime routine entry code
644 : // smarter.
645 1960 : Set(rax, num_arguments);
646 1960 : LoadAddress(rbx, ExternalReference::Create(f));
647 : Handle<Code> code =
648 1960 : CodeFactory::CEntry(isolate(), f->result_size, save_doubles);
649 1960 : Call(code, RelocInfo::CODE_TARGET);
650 1960 : }
651 :
652 896 : void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
653 : // ----------- S t a t e -------------
654 : // -- rsp[0] : return address
655 : // -- rsp[8] : argument num_arguments - 1
656 : // ...
657 : // -- rsp[8 * num_arguments] : argument 0 (receiver)
658 : //
659 : // For runtime functions with variable arguments:
660 : // -- rax : number of arguments
661 : // -----------------------------------
662 :
663 896 : const Runtime::Function* function = Runtime::FunctionForId(fid);
664 : DCHECK_EQ(1, function->result_size);
665 896 : if (function->nargs >= 0) {
666 896 : Set(rax, function->nargs);
667 : }
668 896 : JumpToExternalReference(ExternalReference::Create(fid));
669 896 : }
670 :
671 896 : void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
672 : bool builtin_exit_frame) {
673 : // Set the entry point and jump to the C entry runtime stub.
674 896 : LoadAddress(rbx, ext);
675 : Handle<Code> code = CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs,
676 896 : kArgvOnStack, builtin_exit_frame);
677 896 : Jump(code, RelocInfo::CODE_TARGET);
678 896 : }
679 :
680 : static constexpr Register saved_regs[] = {rax, rcx, rdx, rbx, rbp, rsi,
681 : rdi, r8, r9, r10, r11};
682 :
683 : static constexpr int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
684 :
685 677 : int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
686 : Register exclusion1,
687 : Register exclusion2,
688 : Register exclusion3) const {
689 : int bytes = 0;
690 8124 : for (int i = 0; i < kNumberOfSavedRegs; i++) {
691 7447 : Register reg = saved_regs[i];
692 7447 : if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
693 6770 : bytes += kSystemPointerSize;
694 : }
695 : }
696 :
697 : // R12 to r15 are callee save on all platforms.
698 677 : if (fp_mode == kSaveFPRegs) {
699 341 : bytes += kDoubleSize * XMMRegister::kNumRegisters;
700 : }
701 :
702 677 : return bytes;
703 : }
704 :
705 677 : int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
706 : Register exclusion2, Register exclusion3) {
707 : // We don't allow a GC during a store buffer overflow so there is no need to
708 : // store the registers in any particular way, but we do have to store and
709 : // restore them.
710 : int bytes = 0;
711 8124 : for (int i = 0; i < kNumberOfSavedRegs; i++) {
712 7447 : Register reg = saved_regs[i];
713 7447 : if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
714 6770 : pushq(reg);
715 6770 : bytes += kSystemPointerSize;
716 : }
717 : }
718 :
719 : // R12 to r15 are callee save on all platforms.
720 677 : if (fp_mode == kSaveFPRegs) {
721 : int delta = kDoubleSize * XMMRegister::kNumRegisters;
722 341 : subp(rsp, Immediate(delta));
723 5797 : for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
724 : XMMRegister reg = XMMRegister::from_code(i);
725 10912 : Movsd(Operand(rsp, i * kDoubleSize), reg);
726 : }
727 341 : bytes += delta;
728 : }
729 :
730 677 : return bytes;
731 : }
732 :
733 677 : int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
734 : Register exclusion2, Register exclusion3) {
735 : int bytes = 0;
736 677 : if (fp_mode == kSaveFPRegs) {
737 5456 : for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
738 : XMMRegister reg = XMMRegister::from_code(i);
739 10912 : Movsd(reg, Operand(rsp, i * kDoubleSize));
740 : }
741 : int delta = kDoubleSize * XMMRegister::kNumRegisters;
742 341 : addp(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
743 : bytes += delta;
744 : }
745 :
746 7447 : for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
747 7447 : Register reg = saved_regs[i];
748 7447 : if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
749 6770 : popq(reg);
750 6770 : bytes += kSystemPointerSize;
751 : }
752 : }
753 :
754 677 : return bytes;
755 : }
756 :
757 11507 : void TurboAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
758 11507 : if (CpuFeatures::IsSupported(AVX)) {
759 : CpuFeatureScope scope(this, AVX);
760 11475 : vcvtss2sd(dst, src, src);
761 : } else {
762 32 : cvtss2sd(dst, src);
763 : }
764 11508 : }
765 :
766 12826 : void TurboAssembler::Cvtss2sd(XMMRegister dst, Operand src) {
767 12826 : if (CpuFeatures::IsSupported(AVX)) {
768 : CpuFeatureScope scope(this, AVX);
769 12826 : vcvtss2sd(dst, dst, src);
770 : } else {
771 0 : cvtss2sd(dst, src);
772 : }
773 12826 : }
774 :
775 6892 : void TurboAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
776 6892 : if (CpuFeatures::IsSupported(AVX)) {
777 : CpuFeatureScope scope(this, AVX);
778 6873 : vcvtsd2ss(dst, src, src);
779 : } else {
780 19 : cvtsd2ss(dst, src);
781 : }
782 6892 : }
783 :
784 13610 : void TurboAssembler::Cvtsd2ss(XMMRegister dst, Operand src) {
785 13610 : if (CpuFeatures::IsSupported(AVX)) {
786 : CpuFeatureScope scope(this, AVX);
787 13610 : vcvtsd2ss(dst, dst, src);
788 : } else {
789 0 : cvtsd2ss(dst, src);
790 : }
791 13610 : }
792 :
793 395967 : void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
794 395967 : if (CpuFeatures::IsSupported(AVX)) {
795 : CpuFeatureScope scope(this, AVX);
796 394030 : vxorpd(dst, dst, dst);
797 : vcvtlsi2sd(dst, dst, src);
798 : } else {
799 1937 : xorpd(dst, dst);
800 1937 : cvtlsi2sd(dst, src);
801 : }
802 395979 : }
803 :
804 4170 : void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Operand src) {
805 4170 : if (CpuFeatures::IsSupported(AVX)) {
806 : CpuFeatureScope scope(this, AVX);
807 4168 : vxorpd(dst, dst, dst);
808 : vcvtlsi2sd(dst, dst, src);
809 : } else {
810 2 : xorpd(dst, dst);
811 2 : cvtlsi2sd(dst, src);
812 : }
813 4170 : }
814 :
815 1303 : void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
816 1303 : if (CpuFeatures::IsSupported(AVX)) {
817 : CpuFeatureScope scope(this, AVX);
818 1289 : vxorps(dst, dst, dst);
819 : vcvtlsi2ss(dst, dst, src);
820 : } else {
821 14 : xorps(dst, dst);
822 14 : cvtlsi2ss(dst, src);
823 : }
824 1303 : }
825 :
826 9 : void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Operand src) {
827 9 : if (CpuFeatures::IsSupported(AVX)) {
828 : CpuFeatureScope scope(this, AVX);
829 9 : vxorps(dst, dst, dst);
830 : vcvtlsi2ss(dst, dst, src);
831 : } else {
832 0 : xorps(dst, dst);
833 0 : cvtlsi2ss(dst, src);
834 : }
835 9 : }
836 :
837 488 : void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
838 488 : if (CpuFeatures::IsSupported(AVX)) {
839 : CpuFeatureScope scope(this, AVX);
840 488 : vxorps(dst, dst, dst);
841 : vcvtqsi2ss(dst, dst, src);
842 : } else {
843 0 : xorps(dst, dst);
844 0 : cvtqsi2ss(dst, src);
845 : }
846 488 : }
847 :
848 0 : void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Operand src) {
849 0 : if (CpuFeatures::IsSupported(AVX)) {
850 : CpuFeatureScope scope(this, AVX);
851 0 : vxorps(dst, dst, dst);
852 : vcvtqsi2ss(dst, dst, src);
853 : } else {
854 0 : xorps(dst, dst);
855 0 : cvtqsi2ss(dst, src);
856 : }
857 0 : }
858 :
859 20507 : void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
860 20507 : if (CpuFeatures::IsSupported(AVX)) {
861 : CpuFeatureScope scope(this, AVX);
862 20199 : vxorpd(dst, dst, dst);
863 : vcvtqsi2sd(dst, dst, src);
864 : } else {
865 308 : xorpd(dst, dst);
866 308 : cvtqsi2sd(dst, src);
867 : }
868 20531 : }
869 :
870 2127 : void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Operand src) {
871 2127 : if (CpuFeatures::IsSupported(AVX)) {
872 : CpuFeatureScope scope(this, AVX);
873 2091 : vxorpd(dst, dst, dst);
874 : vcvtqsi2sd(dst, dst, src);
875 : } else {
876 36 : xorpd(dst, dst);
877 36 : cvtqsi2sd(dst, src);
878 : }
879 2127 : }
880 :
881 112 : void TurboAssembler::Cvtlui2ss(XMMRegister dst, Register src) {
882 : // Zero-extend the 32 bit value to 64 bit.
883 112 : movl(kScratchRegister, src);
884 112 : Cvtqsi2ss(dst, kScratchRegister);
885 112 : }
886 :
887 0 : void TurboAssembler::Cvtlui2ss(XMMRegister dst, Operand src) {
888 : // Zero-extend the 32 bit value to 64 bit.
889 0 : movl(kScratchRegister, src);
890 0 : Cvtqsi2ss(dst, kScratchRegister);
891 0 : }
892 :
893 445 : void TurboAssembler::Cvtlui2sd(XMMRegister dst, Register src) {
894 : // Zero-extend the 32 bit value to 64 bit.
895 445 : movl(kScratchRegister, src);
896 445 : Cvtqsi2sd(dst, kScratchRegister);
897 445 : }
898 :
899 10352 : void TurboAssembler::Cvtlui2sd(XMMRegister dst, Operand src) {
900 : // Zero-extend the 32 bit value to 64 bit.
901 10352 : movl(kScratchRegister, src);
902 10352 : Cvtqsi2sd(dst, kScratchRegister);
903 10352 : }
904 :
905 74 : void TurboAssembler::Cvtqui2ss(XMMRegister dst, Register src) {
906 74 : Label done;
907 74 : Cvtqsi2ss(dst, src);
908 74 : testq(src, src);
909 74 : j(positive, &done, Label::kNear);
910 :
911 : // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
912 74 : if (src != kScratchRegister) movq(kScratchRegister, src);
913 : shrq(kScratchRegister, Immediate(1));
914 : // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
915 74 : Label msb_not_set;
916 74 : j(not_carry, &msb_not_set, Label::kNear);
917 74 : orq(kScratchRegister, Immediate(1));
918 74 : bind(&msb_not_set);
919 74 : Cvtqsi2ss(dst, kScratchRegister);
920 74 : addss(dst, dst);
921 74 : bind(&done);
922 74 : }
923 :
924 0 : void TurboAssembler::Cvtqui2ss(XMMRegister dst, Operand src) {
925 0 : movq(kScratchRegister, src);
926 0 : Cvtqui2ss(dst, kScratchRegister);
927 0 : }
928 :
929 3780 : void TurboAssembler::Cvtqui2sd(XMMRegister dst, Register src) {
930 3780 : Label done;
931 3780 : Cvtqsi2sd(dst, src);
932 3780 : testq(src, src);
933 3780 : j(positive, &done, Label::kNear);
934 :
935 : // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
936 3780 : if (src != kScratchRegister) movq(kScratchRegister, src);
937 : shrq(kScratchRegister, Immediate(1));
938 : // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
939 3779 : Label msb_not_set;
940 3779 : j(not_carry, &msb_not_set, Label::kNear);
941 3780 : orq(kScratchRegister, Immediate(1));
942 3780 : bind(&msb_not_set);
943 3780 : Cvtqsi2sd(dst, kScratchRegister);
944 3780 : addsd(dst, dst);
945 3780 : bind(&done);
946 3780 : }
947 :
948 1232 : void TurboAssembler::Cvtqui2sd(XMMRegister dst, Operand src) {
949 1232 : movq(kScratchRegister, src);
950 1232 : Cvtqui2sd(dst, kScratchRegister);
951 1232 : }
952 :
953 660 : void TurboAssembler::Cvttss2si(Register dst, XMMRegister src) {
954 660 : if (CpuFeatures::IsSupported(AVX)) {
955 : CpuFeatureScope scope(this, AVX);
956 660 : vcvttss2si(dst, src);
957 : } else {
958 0 : cvttss2si(dst, src);
959 : }
960 660 : }
961 :
962 0 : void TurboAssembler::Cvttss2si(Register dst, Operand src) {
963 0 : if (CpuFeatures::IsSupported(AVX)) {
964 : CpuFeatureScope scope(this, AVX);
965 0 : vcvttss2si(dst, src);
966 : } else {
967 0 : cvttss2si(dst, src);
968 : }
969 0 : }
970 :
971 108706 : void TurboAssembler::Cvttsd2si(Register dst, XMMRegister src) {
972 108706 : if (CpuFeatures::IsSupported(AVX)) {
973 : CpuFeatureScope scope(this, AVX);
974 108224 : vcvttsd2si(dst, src);
975 : } else {
976 482 : cvttsd2si(dst, src);
977 : }
978 108706 : }
979 :
980 24049 : void TurboAssembler::Cvttsd2si(Register dst, Operand src) {
981 24049 : if (CpuFeatures::IsSupported(AVX)) {
982 : CpuFeatureScope scope(this, AVX);
983 24049 : vcvttsd2si(dst, src);
984 : } else {
985 0 : cvttsd2si(dst, src);
986 : }
987 24049 : }
988 :
989 461 : void TurboAssembler::Cvttss2siq(Register dst, XMMRegister src) {
990 461 : if (CpuFeatures::IsSupported(AVX)) {
991 : CpuFeatureScope scope(this, AVX);
992 461 : vcvttss2siq(dst, src);
993 : } else {
994 0 : cvttss2siq(dst, src);
995 : }
996 461 : }
997 :
998 0 : void TurboAssembler::Cvttss2siq(Register dst, Operand src) {
999 0 : if (CpuFeatures::IsSupported(AVX)) {
1000 : CpuFeatureScope scope(this, AVX);
1001 0 : vcvttss2siq(dst, src);
1002 : } else {
1003 0 : cvttss2siq(dst, src);
1004 : }
1005 0 : }
1006 :
1007 66264 : void TurboAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
1008 66264 : if (CpuFeatures::IsSupported(AVX)) {
1009 : CpuFeatureScope scope(this, AVX);
1010 65821 : vcvttsd2siq(dst, src);
1011 : } else {
1012 443 : cvttsd2siq(dst, src);
1013 : }
1014 66293 : }
1015 :
1016 1 : void TurboAssembler::Cvttsd2siq(Register dst, Operand src) {
1017 1 : if (CpuFeatures::IsSupported(AVX)) {
1018 : CpuFeatureScope scope(this, AVX);
1019 1 : vcvttsd2siq(dst, src);
1020 : } else {
1021 0 : cvttsd2siq(dst, src);
1022 : }
1023 1 : }
1024 :
1025 : namespace {
1026 : template <typename OperandOrXMMRegister, bool is_double>
1027 4050 : void ConvertFloatToUint64(TurboAssembler* tasm, Register dst,
1028 : OperandOrXMMRegister src, Label* fail) {
1029 4050 : Label success;
1030 : // There does not exist a native float-to-uint instruction, so we have to use
1031 : // a float-to-int, and postprocess the result.
1032 : if (is_double) {
1033 3938 : tasm->Cvttsd2siq(dst, src);
1034 : } else {
1035 112 : tasm->Cvttss2siq(dst, src);
1036 : }
1037 : // If the result of the conversion is positive, we are already done.
1038 4050 : tasm->testq(dst, dst);
1039 4050 : tasm->j(positive, &success);
1040 : // The result of the first conversion was negative, which means that the
1041 : // input value was not within the positive int64 range. We subtract 2^63
1042 : // and convert it again to see if it is within the uint64 range.
1043 : if (is_double) {
1044 : tasm->Move(kScratchDoubleReg, -9223372036854775808.0);
1045 3938 : tasm->addsd(kScratchDoubleReg, src);
1046 3938 : tasm->Cvttsd2siq(dst, kScratchDoubleReg);
1047 : } else {
1048 : tasm->Move(kScratchDoubleReg, -9223372036854775808.0f);
1049 112 : tasm->addss(kScratchDoubleReg, src);
1050 112 : tasm->Cvttss2siq(dst, kScratchDoubleReg);
1051 : }
1052 : tasm->testq(dst, dst);
1053 : // The only possible negative value here is 0x80000000000000000, which is
1054 : // used on x64 to indicate an integer overflow.
1055 4050 : tasm->j(negative, fail ? fail : &success);
1056 : // The input value is within uint64 range and the second conversion worked
1057 : // successfully, but we still have to undo the subtraction we did
1058 : // earlier.
1059 4050 : tasm->Set(kScratchRegister, 0x8000000000000000);
1060 4050 : tasm->orq(dst, kScratchRegister);
1061 4050 : tasm->bind(&success);
1062 4050 : }
1063 : } // namespace
1064 :
1065 0 : void TurboAssembler::Cvttsd2uiq(Register dst, Operand src, Label* success) {
1066 0 : ConvertFloatToUint64<Operand, true>(this, dst, src, success);
1067 0 : }
1068 :
1069 3938 : void TurboAssembler::Cvttsd2uiq(Register dst, XMMRegister src, Label* success) {
1070 3938 : ConvertFloatToUint64<XMMRegister, true>(this, dst, src, success);
1071 3938 : }
1072 :
1073 0 : void TurboAssembler::Cvttss2uiq(Register dst, Operand src, Label* success) {
1074 0 : ConvertFloatToUint64<Operand, false>(this, dst, src, success);
1075 0 : }
1076 :
1077 112 : void TurboAssembler::Cvttss2uiq(Register dst, XMMRegister src, Label* success) {
1078 112 : ConvertFloatToUint64<XMMRegister, false>(this, dst, src, success);
1079 112 : }
1080 :
1081 45 : void MacroAssembler::Load(Register dst, Operand src, Representation r) {
1082 : DCHECK(!r.IsDouble());
1083 45 : if (r.IsInteger8()) {
1084 5 : movsxbq(dst, src);
1085 40 : } else if (r.IsUInteger8()) {
1086 5 : movzxbl(dst, src);
1087 35 : } else if (r.IsInteger16()) {
1088 5 : movsxwq(dst, src);
1089 30 : } else if (r.IsUInteger16()) {
1090 5 : movzxwl(dst, src);
1091 25 : } else if (r.IsInteger32()) {
1092 5 : movl(dst, src);
1093 : } else {
1094 20 : movp(dst, src);
1095 : }
1096 45 : }
1097 :
1098 45 : void MacroAssembler::Store(Operand dst, Register src, Representation r) {
1099 : DCHECK(!r.IsDouble());
1100 45 : if (r.IsInteger8() || r.IsUInteger8()) {
1101 10 : movb(dst, src);
1102 35 : } else if (r.IsInteger16() || r.IsUInteger16()) {
1103 10 : movw(dst, src);
1104 25 : } else if (r.IsInteger32()) {
1105 5 : movl(dst, src);
1106 : } else {
1107 20 : if (r.IsHeapObject()) {
1108 5 : AssertNotSmi(src);
1109 15 : } else if (r.IsSmi()) {
1110 5 : AssertSmi(src);
1111 : }
1112 20 : movp(dst, src);
1113 : }
1114 45 : }
1115 :
1116 2608895 : void TurboAssembler::Set(Register dst, int64_t x) {
1117 2608895 : if (x == 0) {
1118 508501 : xorl(dst, dst);
1119 2100394 : } else if (is_uint32(x)) {
1120 459288 : movl(dst, Immediate(static_cast<uint32_t>(x)));
1121 1641106 : } else if (is_int32(x)) {
1122 178905 : movq(dst, Immediate(static_cast<int32_t>(x)));
1123 : } else {
1124 1462201 : movq(dst, x);
1125 : }
1126 2608925 : }
1127 :
1128 13355 : void TurboAssembler::Set(Operand dst, intptr_t x) {
1129 : if (kSystemPointerSize == kInt64Size) {
1130 13355 : if (is_int32(x)) {
1131 9069 : movp(dst, Immediate(static_cast<int32_t>(x)));
1132 : } else {
1133 4286 : Set(kScratchRegister, x);
1134 4286 : movp(dst, kScratchRegister);
1135 : }
1136 : } else {
1137 : movp(dst, Immediate(static_cast<int32_t>(x)));
1138 : }
1139 13355 : }
1140 :
1141 :
1142 : // ----------------------------------------------------------------------------
1143 : // Smi tagging, untagging and tag detection.
1144 :
1145 168 : Register TurboAssembler::GetSmiConstant(Smi source) {
1146 : STATIC_ASSERT(kSmiTag == 0);
1147 : int value = source->value();
1148 168 : if (value == 0) {
1149 0 : xorl(kScratchRegister, kScratchRegister);
1150 0 : return kScratchRegister;
1151 : }
1152 168 : Move(kScratchRegister, source);
1153 168 : return kScratchRegister;
1154 : }
1155 :
1156 1923052 : void TurboAssembler::Move(Register dst, Smi source) {
1157 : STATIC_ASSERT(kSmiTag == 0);
1158 : int value = source->value();
1159 1923052 : if (value == 0) {
1160 603370 : xorl(dst, dst);
1161 : } else {
1162 : Move(dst, source.ptr(), RelocInfo::NONE);
1163 : }
1164 1923052 : }
1165 :
1166 6031866 : void TurboAssembler::Move(Register dst, ExternalReference ext) {
1167 : if (FLAG_embedded_builtins) {
1168 6031866 : if (root_array_available_ && options().isolate_independent_code) {
1169 768888 : IndirectLoadExternalReference(dst, ext);
1170 6800775 : return;
1171 : }
1172 : }
1173 5262978 : movp(dst, ext.address(), RelocInfo::EXTERNAL_REFERENCE);
1174 : }
1175 :
1176 1582 : void MacroAssembler::SmiTag(Register dst, Register src) {
1177 : STATIC_ASSERT(kSmiTag == 0);
1178 1582 : if (dst != src) {
1179 361 : movp(dst, src);
1180 : }
1181 : DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
1182 1582 : shlp(dst, Immediate(kSmiShift));
1183 1582 : }
1184 :
1185 13638 : void TurboAssembler::SmiUntag(Register dst, Register src) {
1186 : STATIC_ASSERT(kSmiTag == 0);
1187 13638 : if (dst != src) {
1188 0 : movp(dst, src);
1189 : }
1190 : DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
1191 13638 : sarp(dst, Immediate(kSmiShift));
1192 13638 : }
1193 :
1194 1904 : void TurboAssembler::SmiUntag(Register dst, Operand src) {
1195 : if (SmiValuesAre32Bits()) {
1196 3808 : movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1197 : // Sign extend to 64-bit.
1198 1904 : movsxlq(dst, dst);
1199 : } else {
1200 : DCHECK(SmiValuesAre31Bits());
1201 : movp(dst, src);
1202 : sarp(dst, Immediate(kSmiShift));
1203 : }
1204 1904 : }
1205 :
1206 165 : void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1207 165 : AssertSmi(smi1);
1208 165 : AssertSmi(smi2);
1209 165 : cmpp(smi1, smi2);
1210 165 : }
1211 :
1212 224 : void MacroAssembler::SmiCompare(Register dst, Smi src) {
1213 224 : AssertSmi(dst);
1214 224 : Cmp(dst, src);
1215 224 : }
1216 :
1217 224 : void MacroAssembler::Cmp(Register dst, Smi src) {
1218 : DCHECK_NE(dst, kScratchRegister);
1219 224 : if (src->value() == 0) {
1220 56 : test_tagged(dst, dst);
1221 : } else {
1222 168 : Register constant_reg = GetSmiConstant(src);
1223 168 : cmp_tagged(dst, constant_reg);
1224 : }
1225 224 : }
1226 :
1227 0 : void MacroAssembler::SmiCompare(Register dst, Operand src) {
1228 0 : AssertSmi(dst);
1229 0 : AssertSmi(src);
1230 0 : cmp_tagged(dst, src);
1231 0 : }
1232 :
1233 0 : void MacroAssembler::SmiCompare(Operand dst, Register src) {
1234 0 : AssertSmi(dst);
1235 0 : AssertSmi(src);
1236 0 : cmp_tagged(dst, src);
1237 0 : }
1238 :
1239 0 : void MacroAssembler::SmiCompare(Operand dst, Smi src) {
1240 0 : AssertSmi(dst);
1241 : if (SmiValuesAre32Bits()) {
1242 0 : cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1243 : } else {
1244 : DCHECK(SmiValuesAre31Bits());
1245 : cmpl(dst, Immediate(src));
1246 : }
1247 0 : }
1248 :
1249 0 : void MacroAssembler::Cmp(Operand dst, Smi src) {
1250 : // The Operand cannot use the smi register.
1251 0 : Register smi_reg = GetSmiConstant(src);
1252 : DCHECK(!dst.AddressUsesRegister(smi_reg));
1253 0 : cmp_tagged(dst, smi_reg);
1254 0 : }
1255 :
1256 :
1257 40 : Condition TurboAssembler::CheckSmi(Register src) {
1258 : STATIC_ASSERT(kSmiTag == 0);
1259 294110 : testb(src, Immediate(kSmiTagMask));
1260 40 : return zero;
1261 : }
1262 :
1263 0 : Condition TurboAssembler::CheckSmi(Operand src) {
1264 : STATIC_ASSERT(kSmiTag == 0);
1265 0 : testb(src, Immediate(kSmiTagMask));
1266 0 : return zero;
1267 : }
1268 :
1269 294014 : void TurboAssembler::JumpIfSmi(Register src, Label* on_smi,
1270 : Label::Distance near_jump) {
1271 : Condition smi = CheckSmi(src);
1272 294015 : j(smi, on_smi, near_jump);
1273 294015 : }
1274 :
1275 56 : void MacroAssembler::JumpIfNotSmi(Register src,
1276 : Label* on_not_smi,
1277 : Label::Distance near_jump) {
1278 : Condition smi = CheckSmi(src);
1279 56 : j(NegateCondition(smi), on_not_smi, near_jump);
1280 56 : }
1281 :
1282 0 : void MacroAssembler::JumpIfNotSmi(Operand src, Label* on_not_smi,
1283 : Label::Distance near_jump) {
1284 : Condition smi = CheckSmi(src);
1285 0 : j(NegateCondition(smi), on_not_smi, near_jump);
1286 0 : }
1287 :
1288 0 : void MacroAssembler::SmiAddConstant(Operand dst, Smi constant) {
1289 0 : if (constant->value() != 0) {
1290 : if (SmiValuesAre32Bits()) {
1291 : addl(Operand(dst, kSmiShift / kBitsPerByte),
1292 0 : Immediate(constant->value()));
1293 : } else {
1294 : DCHECK(SmiValuesAre31Bits());
1295 : if (kSystemPointerSize == kInt64Size) {
1296 : // Sign-extend value after addition
1297 : movl(kScratchRegister, dst);
1298 : addl(kScratchRegister, Immediate(constant));
1299 : movsxlq(kScratchRegister, kScratchRegister);
1300 : movq(dst, kScratchRegister);
1301 : } else {
1302 : DCHECK_EQ(kSmiShiftSize, 32);
1303 : addp(dst, Immediate(constant));
1304 : }
1305 : }
1306 : }
1307 0 : }
1308 :
1309 568 : SmiIndex MacroAssembler::SmiToIndex(Register dst,
1310 : Register src,
1311 : int shift) {
1312 : if (SmiValuesAre32Bits()) {
1313 : DCHECK(is_uint6(shift));
1314 : // There is a possible optimization if shift is in the range 60-63, but that
1315 : // will (and must) never happen.
1316 568 : if (dst != src) {
1317 200 : movp(dst, src);
1318 : }
1319 568 : if (shift < kSmiShift) {
1320 568 : sarp(dst, Immediate(kSmiShift - shift));
1321 : } else {
1322 0 : shlp(dst, Immediate(shift - kSmiShift));
1323 : }
1324 568 : return SmiIndex(dst, times_1);
1325 : } else {
1326 : DCHECK(SmiValuesAre31Bits());
1327 : if (dst != src) {
1328 : mov_tagged(dst, src);
1329 : }
1330 : // We have to sign extend the index register to 64-bit as the SMI might
1331 : // be negative.
1332 : movsxlq(dst, dst);
1333 : if (shift < kSmiShift) {
1334 : sarq(dst, Immediate(kSmiShift - shift));
1335 : } else if (shift != kSmiShift) {
1336 : if (shift - kSmiShift <= static_cast<int>(times_8)) {
1337 : return SmiIndex(dst, static_cast<ScaleFactor>(shift - kSmiShift));
1338 : }
1339 : shlq(dst, Immediate(shift - kSmiShift));
1340 : }
1341 : return SmiIndex(dst, times_1);
1342 : }
1343 : }
1344 :
1345 56 : void TurboAssembler::Push(Smi source) {
1346 56 : intptr_t smi = static_cast<intptr_t>(source.ptr());
1347 56 : if (is_int32(smi)) {
1348 56 : Push(Immediate(static_cast<int32_t>(smi)));
1349 56 : return;
1350 : }
1351 0 : int first_byte_set = base::bits::CountTrailingZeros64(smi) / 8;
1352 0 : int last_byte_set = (63 - base::bits::CountLeadingZeros64(smi)) / 8;
1353 0 : if (first_byte_set == last_byte_set && kSystemPointerSize == kInt64Size) {
1354 : // This sequence has only 7 bytes, compared to the 12 bytes below.
1355 : Push(Immediate(0));
1356 : movb(Operand(rsp, first_byte_set),
1357 0 : Immediate(static_cast<int8_t>(smi >> (8 * first_byte_set))));
1358 0 : return;
1359 : }
1360 0 : Register constant = GetSmiConstant(source);
1361 : Push(constant);
1362 : }
1363 :
1364 : // ----------------------------------------------------------------------------
1365 :
1366 748407 : void TurboAssembler::Move(Register dst, Register src) {
1367 748407 : if (dst != src) {
1368 618953 : movp(dst, src);
1369 : }
1370 748407 : }
1371 :
1372 1289286 : void TurboAssembler::MoveNumber(Register dst, double value) {
1373 : int32_t smi;
1374 1289286 : if (DoubleToSmiInteger(value, &smi)) {
1375 1245078 : Move(dst, Smi::FromInt(smi));
1376 : } else {
1377 44208 : movp_heap_number(dst, value);
1378 : }
1379 1289286 : }
1380 :
1381 148946 : void TurboAssembler::Move(XMMRegister dst, uint32_t src) {
1382 148946 : if (src == 0) {
1383 : Xorps(dst, dst);
1384 : } else {
1385 : unsigned nlz = base::bits::CountLeadingZeros(src);
1386 : unsigned ntz = base::bits::CountTrailingZeros(src);
1387 : unsigned pop = base::bits::CountPopulation(src);
1388 : DCHECK_NE(0u, pop);
1389 138008 : if (pop + ntz + nlz == 32) {
1390 : Pcmpeqd(dst, dst);
1391 61215 : if (ntz) Pslld(dst, static_cast<byte>(ntz + nlz));
1392 61215 : if (nlz) Psrld(dst, static_cast<byte>(nlz));
1393 : } else {
1394 76794 : movl(kScratchRegister, Immediate(src));
1395 : Movd(dst, kScratchRegister);
1396 : }
1397 : }
1398 148947 : }
1399 :
1400 455157 : void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
1401 455157 : if (src == 0) {
1402 : Xorpd(dst, dst);
1403 : } else {
1404 : unsigned nlz = base::bits::CountLeadingZeros(src);
1405 : unsigned ntz = base::bits::CountTrailingZeros(src);
1406 : unsigned pop = base::bits::CountPopulation(src);
1407 : DCHECK_NE(0u, pop);
1408 351438 : if (pop + ntz + nlz == 64) {
1409 : Pcmpeqd(dst, dst);
1410 231483 : if (ntz) Psllq(dst, static_cast<byte>(ntz + nlz));
1411 231481 : if (nlz) Psrlq(dst, static_cast<byte>(nlz));
1412 : } else {
1413 119961 : uint32_t lower = static_cast<uint32_t>(src);
1414 119961 : uint32_t upper = static_cast<uint32_t>(src >> 32);
1415 119961 : if (upper == 0) {
1416 34 : Move(dst, lower);
1417 : } else {
1418 119927 : movq(kScratchRegister, src);
1419 : Movq(dst, kScratchRegister);
1420 : }
1421 : }
1422 : }
1423 455177 : }
1424 :
1425 : // ----------------------------------------------------------------------------
1426 :
1427 5 : void MacroAssembler::Absps(XMMRegister dst) {
1428 : Andps(dst, ExternalReferenceAsOperand(
1429 5 : ExternalReference::address_of_float_abs_constant()));
1430 5 : }
1431 :
1432 5 : void MacroAssembler::Negps(XMMRegister dst) {
1433 : Xorps(dst, ExternalReferenceAsOperand(
1434 5 : ExternalReference::address_of_float_neg_constant()));
1435 5 : }
1436 :
1437 5 : void MacroAssembler::Abspd(XMMRegister dst) {
1438 : Andps(dst, ExternalReferenceAsOperand(
1439 5 : ExternalReference::address_of_double_abs_constant()));
1440 5 : }
1441 :
1442 5 : void MacroAssembler::Negpd(XMMRegister dst) {
1443 : Xorps(dst, ExternalReferenceAsOperand(
1444 5 : ExternalReference::address_of_double_neg_constant()));
1445 5 : }
1446 :
1447 0 : void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1448 : AllowDeferredHandleDereference smi_check;
1449 0 : if (source->IsSmi()) {
1450 0 : Cmp(dst, Smi::cast(*source));
1451 : } else {
1452 0 : Move(kScratchRegister, Handle<HeapObject>::cast(source));
1453 0 : cmp_tagged(dst, kScratchRegister);
1454 : }
1455 0 : }
1456 :
1457 112 : void MacroAssembler::Cmp(Operand dst, Handle<Object> source) {
1458 : AllowDeferredHandleDereference smi_check;
1459 224 : if (source->IsSmi()) {
1460 0 : Cmp(dst, Smi::cast(*source));
1461 : } else {
1462 112 : Move(kScratchRegister, Handle<HeapObject>::cast(source));
1463 112 : cmp_tagged(dst, kScratchRegister);
1464 : }
1465 112 : }
1466 :
1467 21 : void TurboAssembler::Push(Handle<HeapObject> source) {
1468 21 : Move(kScratchRegister, source);
1469 : Push(kScratchRegister);
1470 21 : }
1471 :
1472 6547893 : void TurboAssembler::Move(Register result, Handle<HeapObject> object,
1473 : RelocInfo::Mode rmode) {
1474 : if (FLAG_embedded_builtins) {
1475 6547893 : if (root_array_available_ && options().isolate_independent_code) {
1476 49280 : IndirectLoadConstant(result, object);
1477 6597172 : return;
1478 : }
1479 : }
1480 12997226 : movp(result, object.address(), rmode);
1481 : }
1482 :
1483 0 : void TurboAssembler::Move(Operand dst, Handle<HeapObject> object,
1484 : RelocInfo::Mode rmode) {
1485 0 : Move(kScratchRegister, object, rmode);
1486 0 : movp(dst, kScratchRegister);
1487 0 : }
1488 :
1489 2227 : void TurboAssembler::MoveStringConstant(Register result,
1490 : const StringConstantBase* string,
1491 : RelocInfo::Mode rmode) {
1492 2227 : movp_string(result, string);
1493 2227 : }
1494 :
1495 288 : void MacroAssembler::Drop(int stack_elements) {
1496 288 : if (stack_elements > 0) {
1497 576 : addp(rsp, Immediate(stack_elements * kSystemPointerSize));
1498 : }
1499 288 : }
1500 :
1501 :
1502 56 : void MacroAssembler::DropUnderReturnAddress(int stack_elements,
1503 : Register scratch) {
1504 : DCHECK_GT(stack_elements, 0);
1505 56 : if (kSystemPointerSize == kInt64Size && stack_elements == 1) {
1506 56 : popq(MemOperand(rsp, 0));
1507 112 : return;
1508 : }
1509 :
1510 : PopReturnAddressTo(scratch);
1511 0 : Drop(stack_elements);
1512 : PushReturnAddressFrom(scratch);
1513 : }
1514 :
1515 33130 : void TurboAssembler::Push(Register src) {
1516 : if (kSystemPointerSize == kInt64Size) {
1517 687510 : pushq(src);
1518 : } else {
1519 : // x32 uses 64-bit push for rbp in the prologue.
1520 : DCHECK(src.code() != rbp.code());
1521 : leal(rsp, Operand(rsp, -4));
1522 : movp(Operand(rsp, 0), src);
1523 : }
1524 33130 : }
1525 :
1526 58096 : void TurboAssembler::Push(Operand src) {
1527 : if (kSystemPointerSize == kInt64Size) {
1528 59496 : pushq(src);
1529 : } else {
1530 : movp(kScratchRegister, src);
1531 : leal(rsp, Operand(rsp, -4));
1532 : movp(Operand(rsp, 0), kScratchRegister);
1533 : }
1534 58098 : }
1535 :
1536 812314 : void MacroAssembler::PushQuad(Operand src) {
1537 : if (kSystemPointerSize == kInt64Size) {
1538 812314 : pushq(src);
1539 : } else {
1540 : movp(kScratchRegister, src);
1541 : pushq(kScratchRegister);
1542 : }
1543 812317 : }
1544 :
1545 171562 : void TurboAssembler::Push(Immediate value) {
1546 : if (kSystemPointerSize == kInt64Size) {
1547 2063923 : pushq(value);
1548 : } else {
1549 : leal(rsp, Operand(rsp, -4));
1550 : movp(Operand(rsp, 0), value);
1551 : }
1552 171562 : }
1553 :
1554 :
1555 0 : void MacroAssembler::PushImm32(int32_t imm32) {
1556 : if (kSystemPointerSize == kInt64Size) {
1557 0 : pushq_imm32(imm32);
1558 : } else {
1559 : leal(rsp, Operand(rsp, -4));
1560 : movp(Operand(rsp, 0), Immediate(imm32));
1561 : }
1562 0 : }
1563 :
1564 :
1565 2264 : void MacroAssembler::Pop(Register dst) {
1566 : if (kSystemPointerSize == kInt64Size) {
1567 3384 : popq(dst);
1568 : } else {
1569 : // x32 uses 64-bit pop for rbp in the epilogue.
1570 : DCHECK(dst.code() != rbp.code());
1571 : movp(dst, Operand(rsp, 0));
1572 : leal(rsp, Operand(rsp, 4));
1573 : }
1574 2264 : }
1575 :
1576 45296 : void MacroAssembler::Pop(Operand dst) {
1577 : if (kSystemPointerSize == kInt64Size) {
1578 45464 : popq(dst);
1579 : } else {
1580 : Register scratch = dst.AddressUsesRegister(kScratchRegister)
1581 : ? kRootRegister : kScratchRegister;
1582 : movp(scratch, Operand(rsp, 0));
1583 : movp(dst, scratch);
1584 : leal(rsp, Operand(rsp, 4));
1585 : if (scratch == kRootRegister) {
1586 : // Restore kRootRegister.
1587 : InitializeRootRegister();
1588 : }
1589 : }
1590 45297 : }
1591 :
1592 722052 : void MacroAssembler::PopQuad(Operand dst) {
1593 : if (kSystemPointerSize == kInt64Size) {
1594 722052 : popq(dst);
1595 : } else {
1596 : popq(kScratchRegister);
1597 : movp(dst, kScratchRegister);
1598 : }
1599 722057 : }
1600 :
1601 0 : void TurboAssembler::Jump(ExternalReference ext) {
1602 0 : LoadAddress(kScratchRegister, ext);
1603 0 : jmp(kScratchRegister);
1604 0 : }
1605 :
1606 0 : void TurboAssembler::Jump(Operand op) {
1607 : if (kSystemPointerSize == kInt64Size) {
1608 0 : jmp(op);
1609 : } else {
1610 : movp(kScratchRegister, op);
1611 : jmp(kScratchRegister);
1612 : }
1613 0 : }
1614 :
1615 1280 : void TurboAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1616 : Move(kScratchRegister, destination, rmode);
1617 1280 : jmp(kScratchRegister);
1618 1280 : }
1619 :
1620 504779 : void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
1621 : Condition cc) {
1622 : DCHECK_IMPLIES(options().isolate_independent_code,
1623 : Builtins::IsIsolateIndependentBuiltin(*code_object));
1624 504779 : if (options().inline_offheap_trampolines) {
1625 456355 : int builtin_index = Builtins::kNoBuiltinId;
1626 456355 : if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1627 : Builtins::IsIsolateIndependent(builtin_index)) {
1628 456110 : Label skip;
1629 456110 : if (cc != always) {
1630 456104 : if (cc == never) return;
1631 456103 : j(NegateCondition(cc), &skip, Label::kNear);
1632 : }
1633 : // Inline the trampoline.
1634 456085 : RecordCommentForOffHeapTrampoline(builtin_index);
1635 456082 : CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1636 456086 : EmbeddedData d = EmbeddedData::FromBlob();
1637 456086 : Address entry = d.InstructionStartOfBuiltin(builtin_index);
1638 : Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1639 456110 : jmp(kScratchRegister);
1640 456105 : bind(&skip);
1641 456115 : return;
1642 : }
1643 : }
1644 48684 : j(cc, code_object, rmode);
1645 : }
1646 :
1647 84672 : void MacroAssembler::JumpToInstructionStream(Address entry) {
1648 : Move(kOffHeapTrampolineRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1649 84672 : jmp(kOffHeapTrampolineRegister);
1650 84672 : }
1651 :
1652 0 : void TurboAssembler::Call(ExternalReference ext) {
1653 0 : LoadAddress(kScratchRegister, ext);
1654 0 : call(kScratchRegister);
1655 0 : }
1656 :
1657 11790 : void TurboAssembler::Call(Operand op) {
1658 11790 : if (kSystemPointerSize == kInt64Size && !CpuFeatures::IsSupported(ATOM)) {
1659 11790 : call(op);
1660 : } else {
1661 0 : movp(kScratchRegister, op);
1662 0 : call(kScratchRegister);
1663 : }
1664 11790 : }
1665 :
1666 0 : void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1667 : Move(kScratchRegister, destination, rmode);
1668 0 : call(kScratchRegister);
1669 0 : }
1670 :
1671 4408887 : void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1672 : DCHECK_IMPLIES(options().isolate_independent_code,
1673 : Builtins::IsIsolateIndependentBuiltin(*code_object));
1674 4408887 : if (options().inline_offheap_trampolines) {
1675 3523629 : int builtin_index = Builtins::kNoBuiltinId;
1676 3523629 : if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1677 : Builtins::IsIsolateIndependent(builtin_index)) {
1678 : // Inline the trampoline.
1679 3503362 : RecordCommentForOffHeapTrampoline(builtin_index);
1680 3503367 : CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1681 3503369 : EmbeddedData d = EmbeddedData::FromBlob();
1682 3503369 : Address entry = d.InstructionStartOfBuiltin(builtin_index);
1683 : Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1684 3503368 : call(kScratchRegister);
1685 4408915 : return;
1686 : }
1687 : }
1688 : DCHECK(RelocInfo::IsCodeTarget(rmode));
1689 905538 : call(code_object, rmode);
1690 : }
1691 :
1692 11790 : void TurboAssembler::CallBuiltinPointer(Register builtin_pointer) {
1693 : #if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1694 : STATIC_ASSERT(kSystemPointerSize == 8);
1695 : STATIC_ASSERT(kSmiShiftSize == 0);
1696 : STATIC_ASSERT(kSmiTagSize == 1);
1697 : STATIC_ASSERT(kSmiTag == 0);
1698 :
1699 : // The builtin_pointer register contains the builtin index as a Smi.
1700 : // Untagging is folded into the indexing operand below (we use times_4 instead
1701 : // of times_8 since smis are already shifted by one).
1702 : Call(Operand(kRootRegister, builtin_pointer, times_4,
1703 : IsolateData::builtin_entry_table_offset()));
1704 : #else // V8_COMPRESS_POINTERS
1705 : STATIC_ASSERT(kSystemPointerSize == 8);
1706 : STATIC_ASSERT(kSmiShiftSize == 31);
1707 : STATIC_ASSERT(kSmiTagSize == 1);
1708 : STATIC_ASSERT(kSmiTag == 0);
1709 :
1710 : // The builtin_pointer register contains the builtin index as a Smi.
1711 11790 : SmiUntag(builtin_pointer, builtin_pointer);
1712 : Call(Operand(kRootRegister, builtin_pointer, times_8,
1713 11790 : IsolateData::builtin_entry_table_offset()));
1714 : #endif // defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1715 11790 : }
1716 :
1717 119577 : void TurboAssembler::LoadCodeObjectEntry(Register destination,
1718 : Register code_object) {
1719 : // Code objects are called differently depending on whether we are generating
1720 : // builtin code (which will later be embedded into the binary) or compiling
1721 : // user JS code at runtime.
1722 : // * Builtin code runs in --jitless mode and thus must not call into on-heap
1723 : // Code targets. Instead, we dispatch through the builtins entry table.
1724 : // * Codegen at runtime does not have this restriction and we can use the
1725 : // shorter, branchless instruction sequence. The assumption here is that
1726 : // targets are usually generated code and not builtin Code objects.
1727 :
1728 119577 : if (options().isolate_independent_code) {
1729 : DCHECK(root_array_available());
1730 8904 : Label if_code_is_builtin, out;
1731 :
1732 : // Check whether the Code object is a builtin. If so, call its (off-heap)
1733 : // entry point directly without going through the (on-heap) trampoline.
1734 : // Otherwise, just call the Code object as always.
1735 : cmpl(FieldOperand(code_object, Code::kBuiltinIndexOffset),
1736 8904 : Immediate(Builtins::kNoBuiltinId));
1737 8904 : j(not_equal, &if_code_is_builtin);
1738 :
1739 : // A non-builtin Code object, the entry point is at
1740 : // Code::raw_instruction_start().
1741 8904 : Move(destination, code_object);
1742 8904 : addp(destination, Immediate(Code::kHeaderSize - kHeapObjectTag));
1743 8904 : jmp(&out);
1744 :
1745 : // A builtin Code object, the entry point is loaded from the builtin entry
1746 : // table.
1747 8904 : bind(&if_code_is_builtin);
1748 : movl(destination, FieldOperand(code_object, Code::kBuiltinIndexOffset));
1749 : movp(destination, Operand(kRootRegister, destination, times_pointer_size,
1750 17808 : IsolateData::builtin_entry_table_offset()));
1751 :
1752 8904 : bind(&out);
1753 : } else {
1754 110673 : Move(destination, code_object);
1755 110674 : addp(destination, Immediate(Code::kHeaderSize - kHeapObjectTag));
1756 : }
1757 119579 : }
1758 :
1759 106070 : void TurboAssembler::CallCodeObject(Register code_object) {
1760 106070 : LoadCodeObjectEntry(code_object, code_object);
1761 106071 : call(code_object);
1762 106071 : }
1763 :
1764 728 : void TurboAssembler::JumpCodeObject(Register code_object) {
1765 728 : LoadCodeObjectEntry(code_object, code_object);
1766 728 : jmp(code_object);
1767 728 : }
1768 :
1769 0 : void TurboAssembler::RetpolineCall(Register reg) {
1770 0 : Label setup_return, setup_target, inner_indirect_branch, capture_spec;
1771 :
1772 0 : jmp(&setup_return); // Jump past the entire retpoline below.
1773 :
1774 0 : bind(&inner_indirect_branch);
1775 0 : call(&setup_target);
1776 :
1777 0 : bind(&capture_spec);
1778 0 : pause();
1779 0 : jmp(&capture_spec);
1780 :
1781 0 : bind(&setup_target);
1782 0 : movq(Operand(rsp, 0), reg);
1783 0 : ret(0);
1784 :
1785 0 : bind(&setup_return);
1786 0 : call(&inner_indirect_branch); // Callee will return after this instruction.
1787 0 : }
1788 :
1789 0 : void TurboAssembler::RetpolineCall(Address destination, RelocInfo::Mode rmode) {
1790 : Move(kScratchRegister, destination, rmode);
1791 0 : RetpolineCall(kScratchRegister);
1792 0 : }
1793 :
1794 0 : void TurboAssembler::RetpolineJump(Register reg) {
1795 0 : Label setup_target, capture_spec;
1796 :
1797 0 : call(&setup_target);
1798 :
1799 0 : bind(&capture_spec);
1800 0 : pause();
1801 0 : jmp(&capture_spec);
1802 :
1803 0 : bind(&setup_target);
1804 0 : movq(Operand(rsp, 0), reg);
1805 0 : ret(0);
1806 0 : }
1807 :
1808 44414 : void TurboAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
1809 44414 : if (imm8 == 0) {
1810 : Movd(dst, src);
1811 : return;
1812 : }
1813 42456 : if (CpuFeatures::IsSupported(SSE4_1)) {
1814 : CpuFeatureScope sse_scope(this, SSE4_1);
1815 42262 : pextrd(dst, src, imm8);
1816 : return;
1817 : }
1818 : DCHECK_EQ(1, imm8);
1819 194 : movq(dst, src);
1820 : shrq(dst, Immediate(32));
1821 : }
1822 :
1823 542 : void TurboAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
1824 542 : if (CpuFeatures::IsSupported(SSE4_1)) {
1825 : CpuFeatureScope sse_scope(this, SSE4_1);
1826 540 : pinsrd(dst, src, imm8);
1827 542 : return;
1828 : }
1829 : Movd(kScratchDoubleReg, src);
1830 2 : if (imm8 == 1) {
1831 : punpckldq(dst, kScratchDoubleReg);
1832 : } else {
1833 : DCHECK_EQ(0, imm8);
1834 : Movss(dst, kScratchDoubleReg);
1835 : }
1836 : }
1837 :
1838 1860 : void TurboAssembler::Pinsrd(XMMRegister dst, Operand src, int8_t imm8) {
1839 1860 : if (CpuFeatures::IsSupported(SSE4_1)) {
1840 : CpuFeatureScope sse_scope(this, SSE4_1);
1841 1860 : pinsrd(dst, src, imm8);
1842 1860 : return;
1843 : }
1844 : Movd(kScratchDoubleReg, src);
1845 0 : if (imm8 == 1) {
1846 : punpckldq(dst, kScratchDoubleReg);
1847 : } else {
1848 : DCHECK_EQ(0, imm8);
1849 : Movss(dst, kScratchDoubleReg);
1850 : }
1851 : }
1852 :
1853 847 : void TurboAssembler::Lzcntl(Register dst, Register src) {
1854 847 : if (CpuFeatures::IsSupported(LZCNT)) {
1855 : CpuFeatureScope scope(this, LZCNT);
1856 845 : lzcntl(dst, src);
1857 847 : return;
1858 : }
1859 2 : Label not_zero_src;
1860 2 : bsrl(dst, src);
1861 2 : j(not_zero, ¬_zero_src, Label::kNear);
1862 2 : Set(dst, 63); // 63^31 == 32
1863 2 : bind(¬_zero_src);
1864 2 : xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
1865 : }
1866 :
1867 0 : void TurboAssembler::Lzcntl(Register dst, Operand src) {
1868 0 : if (CpuFeatures::IsSupported(LZCNT)) {
1869 : CpuFeatureScope scope(this, LZCNT);
1870 0 : lzcntl(dst, src);
1871 0 : return;
1872 : }
1873 0 : Label not_zero_src;
1874 0 : bsrl(dst, src);
1875 0 : j(not_zero, ¬_zero_src, Label::kNear);
1876 0 : Set(dst, 63); // 63^31 == 32
1877 0 : bind(¬_zero_src);
1878 0 : xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
1879 : }
1880 :
1881 47 : void TurboAssembler::Lzcntq(Register dst, Register src) {
1882 47 : if (CpuFeatures::IsSupported(LZCNT)) {
1883 : CpuFeatureScope scope(this, LZCNT);
1884 47 : lzcntq(dst, src);
1885 47 : return;
1886 : }
1887 0 : Label not_zero_src;
1888 0 : bsrq(dst, src);
1889 0 : j(not_zero, ¬_zero_src, Label::kNear);
1890 0 : Set(dst, 127); // 127^63 == 64
1891 0 : bind(¬_zero_src);
1892 0 : xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
1893 : }
1894 :
1895 0 : void TurboAssembler::Lzcntq(Register dst, Operand src) {
1896 0 : if (CpuFeatures::IsSupported(LZCNT)) {
1897 : CpuFeatureScope scope(this, LZCNT);
1898 0 : lzcntq(dst, src);
1899 0 : return;
1900 : }
1901 0 : Label not_zero_src;
1902 0 : bsrq(dst, src);
1903 0 : j(not_zero, ¬_zero_src, Label::kNear);
1904 0 : Set(dst, 127); // 127^63 == 64
1905 0 : bind(¬_zero_src);
1906 0 : xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
1907 : }
1908 :
1909 56 : void TurboAssembler::Tzcntq(Register dst, Register src) {
1910 56 : if (CpuFeatures::IsSupported(BMI1)) {
1911 : CpuFeatureScope scope(this, BMI1);
1912 56 : tzcntq(dst, src);
1913 56 : return;
1914 : }
1915 0 : Label not_zero_src;
1916 0 : bsfq(dst, src);
1917 0 : j(not_zero, ¬_zero_src, Label::kNear);
1918 : // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
1919 0 : Set(dst, 64);
1920 0 : bind(¬_zero_src);
1921 : }
1922 :
1923 0 : void TurboAssembler::Tzcntq(Register dst, Operand src) {
1924 0 : if (CpuFeatures::IsSupported(BMI1)) {
1925 : CpuFeatureScope scope(this, BMI1);
1926 0 : tzcntq(dst, src);
1927 0 : return;
1928 : }
1929 0 : Label not_zero_src;
1930 0 : bsfq(dst, src);
1931 0 : j(not_zero, ¬_zero_src, Label::kNear);
1932 : // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
1933 0 : Set(dst, 64);
1934 0 : bind(¬_zero_src);
1935 : }
1936 :
1937 737 : void TurboAssembler::Tzcntl(Register dst, Register src) {
1938 737 : if (CpuFeatures::IsSupported(BMI1)) {
1939 : CpuFeatureScope scope(this, BMI1);
1940 737 : tzcntl(dst, src);
1941 737 : return;
1942 : }
1943 0 : Label not_zero_src;
1944 0 : bsfl(dst, src);
1945 0 : j(not_zero, ¬_zero_src, Label::kNear);
1946 0 : Set(dst, 32); // The result of tzcnt is 32 if src = 0.
1947 0 : bind(¬_zero_src);
1948 : }
1949 :
1950 0 : void TurboAssembler::Tzcntl(Register dst, Operand src) {
1951 0 : if (CpuFeatures::IsSupported(BMI1)) {
1952 : CpuFeatureScope scope(this, BMI1);
1953 0 : tzcntl(dst, src);
1954 0 : return;
1955 : }
1956 0 : Label not_zero_src;
1957 0 : bsfl(dst, src);
1958 0 : j(not_zero, ¬_zero_src, Label::kNear);
1959 0 : Set(dst, 32); // The result of tzcnt is 32 if src = 0.
1960 0 : bind(¬_zero_src);
1961 : }
1962 :
1963 133 : void TurboAssembler::Popcntl(Register dst, Register src) {
1964 133 : if (CpuFeatures::IsSupported(POPCNT)) {
1965 : CpuFeatureScope scope(this, POPCNT);
1966 133 : popcntl(dst, src);
1967 133 : return;
1968 : }
1969 0 : UNREACHABLE();
1970 : }
1971 :
1972 0 : void TurboAssembler::Popcntl(Register dst, Operand src) {
1973 0 : if (CpuFeatures::IsSupported(POPCNT)) {
1974 : CpuFeatureScope scope(this, POPCNT);
1975 0 : popcntl(dst, src);
1976 0 : return;
1977 : }
1978 0 : UNREACHABLE();
1979 : }
1980 :
1981 62 : void TurboAssembler::Popcntq(Register dst, Register src) {
1982 62 : if (CpuFeatures::IsSupported(POPCNT)) {
1983 : CpuFeatureScope scope(this, POPCNT);
1984 62 : popcntq(dst, src);
1985 62 : return;
1986 : }
1987 0 : UNREACHABLE();
1988 : }
1989 :
1990 0 : void TurboAssembler::Popcntq(Register dst, Operand src) {
1991 0 : if (CpuFeatures::IsSupported(POPCNT)) {
1992 : CpuFeatureScope scope(this, POPCNT);
1993 0 : popcntq(dst, src);
1994 0 : return;
1995 : }
1996 0 : UNREACHABLE();
1997 : }
1998 :
1999 :
2000 0 : void MacroAssembler::Pushad() {
2001 : Push(rax);
2002 : Push(rcx);
2003 : Push(rdx);
2004 : Push(rbx);
2005 : // Not pushing rsp or rbp.
2006 : Push(rsi);
2007 : Push(rdi);
2008 : Push(r8);
2009 : Push(r9);
2010 : // r10 is kScratchRegister.
2011 : Push(r11);
2012 : Push(r12);
2013 : // r13 is kRootRegister.
2014 : Push(r14);
2015 : Push(r15);
2016 : STATIC_ASSERT(12 == kNumSafepointSavedRegisters);
2017 : // Use lea for symmetry with Popad.
2018 : int sp_delta = (kNumSafepointRegisters - kNumSafepointSavedRegisters) *
2019 : kSystemPointerSize;
2020 0 : leap(rsp, Operand(rsp, -sp_delta));
2021 0 : }
2022 :
2023 :
2024 0 : void MacroAssembler::Popad() {
2025 : // Popad must not change the flags, so use lea instead of addq.
2026 : int sp_delta = (kNumSafepointRegisters - kNumSafepointSavedRegisters) *
2027 : kSystemPointerSize;
2028 0 : leap(rsp, Operand(rsp, sp_delta));
2029 : Pop(r15);
2030 : Pop(r14);
2031 : Pop(r12);
2032 : Pop(r11);
2033 : Pop(r9);
2034 : Pop(r8);
2035 : Pop(rdi);
2036 : Pop(rsi);
2037 : Pop(rbx);
2038 : Pop(rdx);
2039 : Pop(rcx);
2040 : Pop(rax);
2041 0 : }
2042 :
2043 :
2044 : // Order general registers are pushed by Pushad:
2045 : // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
2046 : const int
2047 : MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
2048 : 0,
2049 : 1,
2050 : 2,
2051 : 3,
2052 : -1,
2053 : -1,
2054 : 4,
2055 : 5,
2056 : 6,
2057 : 7,
2058 : -1,
2059 : 8,
2060 : 9,
2061 : -1,
2062 : 10,
2063 : 11
2064 : };
2065 :
2066 168 : void MacroAssembler::PushStackHandler() {
2067 : // Adjust this code if not the case.
2068 : STATIC_ASSERT(StackHandlerConstants::kSize == 2 * kSystemPointerSize);
2069 : STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2070 :
2071 : Push(Immediate(0)); // Padding.
2072 :
2073 : // Link the current handler as the next handler.
2074 : ExternalReference handler_address =
2075 168 : ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
2076 168 : Push(ExternalReferenceAsOperand(handler_address));
2077 :
2078 : // Set this new handler as the current one.
2079 168 : movp(ExternalReferenceAsOperand(handler_address), rsp);
2080 168 : }
2081 :
2082 :
2083 168 : void MacroAssembler::PopStackHandler() {
2084 : STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2085 : ExternalReference handler_address =
2086 168 : ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
2087 168 : Pop(ExternalReferenceAsOperand(handler_address));
2088 168 : addp(rsp, Immediate(StackHandlerConstants::kSize - kSystemPointerSize));
2089 168 : }
2090 :
2091 1678 : void TurboAssembler::Ret() { ret(0); }
2092 :
2093 3284222 : void TurboAssembler::Ret(int bytes_dropped, Register scratch) {
2094 3284222 : if (is_uint16(bytes_dropped)) {
2095 3284218 : ret(bytes_dropped);
2096 : } else {
2097 : PopReturnAddressTo(scratch);
2098 4 : addp(rsp, Immediate(bytes_dropped));
2099 : PushReturnAddressFrom(scratch);
2100 4 : ret(0);
2101 : }
2102 3284415 : }
2103 :
2104 560 : void MacroAssembler::CmpObjectType(Register heap_object,
2105 : InstanceType type,
2106 : Register map) {
2107 : LoadTaggedPointerField(map,
2108 560 : FieldOperand(heap_object, HeapObject::kMapOffset));
2109 560 : CmpInstanceType(map, type);
2110 560 : }
2111 :
2112 :
2113 1064 : void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
2114 2128 : cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
2115 1064 : }
2116 :
2117 56 : void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
2118 : XMMRegister scratch, Label* lost_precision,
2119 : Label* is_nan, Label::Distance dst) {
2120 56 : Cvttsd2si(result_reg, input_reg);
2121 56 : Cvtlsi2sd(kScratchDoubleReg, result_reg);
2122 : Ucomisd(kScratchDoubleReg, input_reg);
2123 56 : j(not_equal, lost_precision, dst);
2124 56 : j(parity_even, is_nan, dst); // NaN.
2125 56 : }
2126 :
2127 :
2128 117 : void MacroAssembler::AssertNotSmi(Register object) {
2129 117 : if (emit_debug_code()) {
2130 : Condition is_smi = CheckSmi(object);
2131 0 : Check(NegateCondition(is_smi), AbortReason::kOperandIsASmi);
2132 : }
2133 117 : }
2134 :
2135 :
2136 559 : void MacroAssembler::AssertSmi(Register object) {
2137 559 : if (emit_debug_code()) {
2138 : Condition is_smi = CheckSmi(object);
2139 0 : Check(is_smi, AbortReason::kOperandIsNotASmi);
2140 : }
2141 559 : }
2142 :
2143 0 : void MacroAssembler::AssertSmi(Operand object) {
2144 0 : if (emit_debug_code()) {
2145 : Condition is_smi = CheckSmi(object);
2146 0 : Check(is_smi, AbortReason::kOperandIsNotASmi);
2147 : }
2148 0 : }
2149 :
2150 1556532 : void TurboAssembler::AssertZeroExtended(Register int32_register) {
2151 1556532 : if (emit_debug_code()) {
2152 : DCHECK_NE(int32_register, kScratchRegister);
2153 57 : movq(kScratchRegister, int64_t{0x0000000100000000});
2154 57 : cmpq(kScratchRegister, int32_register);
2155 57 : Check(above_equal, AbortReason::k32BitValueInRegisterIsNotZeroExtended);
2156 : }
2157 1556532 : }
2158 :
2159 112 : void MacroAssembler::AssertConstructor(Register object) {
2160 112 : if (emit_debug_code()) {
2161 0 : testb(object, Immediate(kSmiTagMask));
2162 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotAConstructor);
2163 : Push(object);
2164 : LoadTaggedPointerField(object,
2165 0 : FieldOperand(object, HeapObject::kMapOffset));
2166 : testb(FieldOperand(object, Map::kBitFieldOffset),
2167 0 : Immediate(Map::IsConstructorBit::kMask));
2168 : Pop(object);
2169 0 : Check(not_zero, AbortReason::kOperandIsNotAConstructor);
2170 : }
2171 112 : }
2172 :
2173 280 : void MacroAssembler::AssertFunction(Register object) {
2174 280 : if (emit_debug_code()) {
2175 0 : testb(object, Immediate(kSmiTagMask));
2176 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotAFunction);
2177 : Push(object);
2178 0 : CmpObjectType(object, JS_FUNCTION_TYPE, object);
2179 : Pop(object);
2180 0 : Check(equal, AbortReason::kOperandIsNotAFunction);
2181 : }
2182 280 : }
2183 :
2184 :
2185 112 : void MacroAssembler::AssertBoundFunction(Register object) {
2186 112 : if (emit_debug_code()) {
2187 0 : testb(object, Immediate(kSmiTagMask));
2188 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotABoundFunction);
2189 : Push(object);
2190 0 : CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
2191 : Pop(object);
2192 0 : Check(equal, AbortReason::kOperandIsNotABoundFunction);
2193 : }
2194 112 : }
2195 :
2196 56 : void MacroAssembler::AssertGeneratorObject(Register object) {
2197 112 : if (!emit_debug_code()) return;
2198 0 : testb(object, Immediate(kSmiTagMask));
2199 0 : Check(not_equal, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
2200 :
2201 : // Load map
2202 0 : Register map = object;
2203 : Push(object);
2204 0 : LoadTaggedPointerField(map, FieldOperand(object, HeapObject::kMapOffset));
2205 :
2206 0 : Label do_check;
2207 : // Check if JSGeneratorObject
2208 0 : CmpInstanceType(map, JS_GENERATOR_OBJECT_TYPE);
2209 0 : j(equal, &do_check);
2210 :
2211 : // Check if JSAsyncFunctionObject
2212 0 : CmpInstanceType(map, JS_ASYNC_FUNCTION_OBJECT_TYPE);
2213 0 : j(equal, &do_check);
2214 :
2215 : // Check if JSAsyncGeneratorObject
2216 0 : CmpInstanceType(map, JS_ASYNC_GENERATOR_OBJECT_TYPE);
2217 :
2218 0 : bind(&do_check);
2219 : // Restore generator object to register and perform assertion
2220 : Pop(object);
2221 0 : Check(equal, AbortReason::kOperandIsNotAGeneratorObject);
2222 : }
2223 :
2224 112 : void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
2225 112 : if (emit_debug_code()) {
2226 0 : Label done_checking;
2227 0 : AssertNotSmi(object);
2228 0 : Cmp(object, isolate()->factory()->undefined_value());
2229 0 : j(equal, &done_checking);
2230 0 : Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
2231 0 : Assert(equal, AbortReason::kExpectedUndefinedOrCell);
2232 0 : bind(&done_checking);
2233 : }
2234 112 : }
2235 :
2236 56 : void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
2237 56 : cmpl(in_out, Immediate(kClearedWeakHeapObjectLower32));
2238 56 : j(equal, target_if_cleared);
2239 :
2240 56 : andp(in_out, Immediate(~static_cast<int32_t>(kWeakHeapObjectMask)));
2241 56 : }
2242 :
2243 168 : void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2244 : DCHECK_GT(value, 0);
2245 168 : if (FLAG_native_code_counters && counter->Enabled()) {
2246 : Operand counter_operand =
2247 0 : ExternalReferenceAsOperand(ExternalReference::Create(counter));
2248 0 : if (value == 1) {
2249 0 : incl(counter_operand);
2250 : } else {
2251 0 : addl(counter_operand, Immediate(value));
2252 : }
2253 : }
2254 168 : }
2255 :
2256 :
2257 0 : void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2258 : DCHECK_GT(value, 0);
2259 0 : if (FLAG_native_code_counters && counter->Enabled()) {
2260 : Operand counter_operand =
2261 0 : ExternalReferenceAsOperand(ExternalReference::Create(counter));
2262 0 : if (value == 1) {
2263 0 : decl(counter_operand);
2264 : } else {
2265 0 : subl(counter_operand, Immediate(value));
2266 : }
2267 : }
2268 0 : }
2269 :
2270 56 : void MacroAssembler::MaybeDropFrames() {
2271 : // Check whether we need to drop frames to restart a function on the stack.
2272 : ExternalReference restart_fp =
2273 112 : ExternalReference::debug_restart_fp_address(isolate());
2274 56 : Load(rbx, restart_fp);
2275 56 : testp(rbx, rbx);
2276 :
2277 56 : Label dont_drop;
2278 56 : j(zero, &dont_drop, Label::kNear);
2279 56 : Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET);
2280 :
2281 56 : bind(&dont_drop);
2282 56 : }
2283 :
2284 1344 : void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
2285 : Register caller_args_count_reg,
2286 : Register scratch0, Register scratch1) {
2287 : #if DEBUG
2288 : if (callee_args_count.is_reg()) {
2289 : DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
2290 : scratch1));
2291 : } else {
2292 : DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
2293 : }
2294 : #endif
2295 :
2296 : // Calculate the destination address where we will put the return address
2297 : // after we drop current frame.
2298 1344 : Register new_sp_reg = scratch0;
2299 1344 : if (callee_args_count.is_reg()) {
2300 1344 : subp(caller_args_count_reg, callee_args_count.reg());
2301 : leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
2302 2688 : StandardFrameConstants::kCallerPCOffset));
2303 : } else {
2304 : leap(new_sp_reg,
2305 : Operand(rbp, caller_args_count_reg, times_pointer_size,
2306 : StandardFrameConstants::kCallerPCOffset -
2307 0 : callee_args_count.immediate() * kSystemPointerSize));
2308 : }
2309 :
2310 1344 : if (FLAG_debug_code) {
2311 0 : cmpp(rsp, new_sp_reg);
2312 0 : Check(below, AbortReason::kStackAccessBelowStackPointer);
2313 : }
2314 :
2315 : // Copy return address from caller's frame to current frame's return address
2316 : // to avoid its trashing and let the following loop copy it to the right
2317 : // place.
2318 1344 : Register tmp_reg = scratch1;
2319 2688 : movp(tmp_reg, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
2320 2688 : movp(Operand(rsp, 0), tmp_reg);
2321 :
2322 : // Restore caller's frame pointer now as it could be overwritten by
2323 : // the copying loop.
2324 2688 : movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2325 :
2326 : // +2 here is to copy both receiver and return address.
2327 1344 : Register count_reg = caller_args_count_reg;
2328 1344 : if (callee_args_count.is_reg()) {
2329 2688 : leap(count_reg, Operand(callee_args_count.reg(), 2));
2330 : } else {
2331 0 : movp(count_reg, Immediate(callee_args_count.immediate() + 2));
2332 : // TODO(ishell): Unroll copying loop for small immediate values.
2333 : }
2334 :
2335 : // Now copy callee arguments to the caller frame going backwards to avoid
2336 : // callee arguments corruption (source and destination areas could overlap).
2337 1344 : Label loop, entry;
2338 1344 : jmp(&entry, Label::kNear);
2339 1344 : bind(&loop);
2340 : decp(count_reg);
2341 2688 : movp(tmp_reg, Operand(rsp, count_reg, times_pointer_size, 0));
2342 2688 : movp(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
2343 1344 : bind(&entry);
2344 1344 : cmpp(count_reg, Immediate(0));
2345 1344 : j(not_equal, &loop, Label::kNear);
2346 :
2347 : // Leave current frame.
2348 : movp(rsp, new_sp_reg);
2349 1344 : }
2350 :
2351 112 : void MacroAssembler::InvokeFunction(Register function, Register new_target,
2352 : const ParameterCount& actual,
2353 : InvokeFlag flag) {
2354 : LoadTaggedPointerField(
2355 112 : rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2356 : movzxwq(rbx,
2357 112 : FieldOperand(rbx, SharedFunctionInfo::kFormalParameterCountOffset));
2358 :
2359 : ParameterCount expected(rbx);
2360 112 : InvokeFunction(function, new_target, expected, actual, flag);
2361 112 : }
2362 :
2363 168 : void MacroAssembler::InvokeFunction(Register function, Register new_target,
2364 : const ParameterCount& expected,
2365 : const ParameterCount& actual,
2366 : InvokeFlag flag) {
2367 : DCHECK(function == rdi);
2368 : LoadTaggedPointerField(rsi,
2369 168 : FieldOperand(function, JSFunction::kContextOffset));
2370 168 : InvokeFunctionCode(rdi, new_target, expected, actual, flag);
2371 168 : }
2372 :
2373 336 : void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
2374 : const ParameterCount& expected,
2375 : const ParameterCount& actual,
2376 : InvokeFlag flag) {
2377 : // You can't call a function without a valid frame.
2378 : DCHECK(flag == JUMP_FUNCTION || has_frame());
2379 : DCHECK(function == rdi);
2380 : DCHECK_IMPLIES(new_target.is_valid(), new_target == rdx);
2381 :
2382 : // On function call, call into the debugger if necessary.
2383 336 : CheckDebugHook(function, new_target, expected, actual);
2384 :
2385 : // Clear the new.target register if not given.
2386 336 : if (!new_target.is_valid()) {
2387 224 : LoadRoot(rdx, RootIndex::kUndefinedValue);
2388 : }
2389 :
2390 336 : Label done;
2391 336 : bool definitely_mismatches = false;
2392 : InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2393 336 : Label::kNear);
2394 336 : if (!definitely_mismatches) {
2395 : // We call indirectly through the code field in the function to
2396 : // allow recompilation to take effect without changing any of the
2397 : // call sites.
2398 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
2399 : LoadTaggedPointerField(rcx,
2400 336 : FieldOperand(function, JSFunction::kCodeOffset));
2401 336 : if (flag == CALL_FUNCTION) {
2402 112 : CallCodeObject(rcx);
2403 : } else {
2404 : DCHECK(flag == JUMP_FUNCTION);
2405 224 : JumpCodeObject(rcx);
2406 : }
2407 336 : bind(&done);
2408 : }
2409 336 : }
2410 :
2411 336 : void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2412 0 : const ParameterCount& actual, Label* done,
2413 : bool* definitely_mismatches,
2414 : InvokeFlag flag,
2415 : Label::Distance near_jump) {
2416 : bool definitely_matches = false;
2417 336 : *definitely_mismatches = false;
2418 336 : Label invoke;
2419 336 : if (expected.is_immediate()) {
2420 : DCHECK(actual.is_immediate());
2421 0 : Set(rax, actual.immediate());
2422 0 : if (expected.immediate() == actual.immediate()) {
2423 : definitely_matches = true;
2424 : } else {
2425 0 : if (expected.immediate() ==
2426 : SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
2427 : // Don't worry about adapting arguments for built-ins that
2428 : // don't want that done. Skip adaption code by making it look
2429 : // like we have a match between expected and actual number of
2430 : // arguments.
2431 : definitely_matches = true;
2432 : } else {
2433 0 : *definitely_mismatches = true;
2434 0 : Set(rbx, expected.immediate());
2435 : }
2436 : }
2437 : } else {
2438 336 : if (actual.is_immediate()) {
2439 : // Expected is in register, actual is immediate. This is the
2440 : // case when we invoke function values without going through the
2441 : // IC mechanism.
2442 0 : Set(rax, actual.immediate());
2443 0 : cmpp(expected.reg(), Immediate(actual.immediate()));
2444 0 : j(equal, &invoke, Label::kNear);
2445 : DCHECK(expected.reg() == rbx);
2446 336 : } else if (expected.reg() != actual.reg()) {
2447 : // Both expected and actual are in (different) registers. This
2448 : // is the case when we invoke functions using call and apply.
2449 280 : cmpp(expected.reg(), actual.reg());
2450 280 : j(equal, &invoke, Label::kNear);
2451 : DCHECK(actual.reg() == rax);
2452 : DCHECK(expected.reg() == rbx);
2453 : } else {
2454 : definitely_matches = true;
2455 56 : Move(rax, actual.reg());
2456 : }
2457 : }
2458 :
2459 336 : if (!definitely_matches) {
2460 280 : Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
2461 280 : if (flag == CALL_FUNCTION) {
2462 112 : Call(adaptor, RelocInfo::CODE_TARGET);
2463 112 : if (!*definitely_mismatches) {
2464 112 : jmp(done, near_jump);
2465 : }
2466 : } else {
2467 168 : Jump(adaptor, RelocInfo::CODE_TARGET);
2468 : }
2469 280 : bind(&invoke);
2470 : }
2471 336 : }
2472 :
2473 336 : void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
2474 : const ParameterCount& expected,
2475 : const ParameterCount& actual) {
2476 336 : Label skip_hook;
2477 : ExternalReference debug_hook_active =
2478 672 : ExternalReference::debug_hook_on_function_call_address(isolate());
2479 : Operand debug_hook_active_operand =
2480 336 : ExternalReferenceAsOperand(debug_hook_active);
2481 336 : cmpb(debug_hook_active_operand, Immediate(0));
2482 336 : j(equal, &skip_hook);
2483 :
2484 : {
2485 : FrameScope frame(this,
2486 336 : has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2487 336 : if (expected.is_reg()) {
2488 336 : SmiTag(expected.reg(), expected.reg());
2489 : Push(expected.reg());
2490 : }
2491 336 : if (actual.is_reg()) {
2492 336 : SmiTag(actual.reg(), actual.reg());
2493 : Push(actual.reg());
2494 336 : SmiUntag(actual.reg(), actual.reg());
2495 : }
2496 336 : if (new_target.is_valid()) {
2497 : Push(new_target);
2498 : }
2499 : Push(fun);
2500 : Push(fun);
2501 336 : Push(StackArgumentsAccessor(rbp, actual).GetReceiverOperand());
2502 336 : CallRuntime(Runtime::kDebugOnFunctionCall);
2503 : Pop(fun);
2504 336 : if (new_target.is_valid()) {
2505 : Pop(new_target);
2506 : }
2507 336 : if (actual.is_reg()) {
2508 : Pop(actual.reg());
2509 336 : SmiUntag(actual.reg(), actual.reg());
2510 : }
2511 336 : if (expected.is_reg()) {
2512 : Pop(expected.reg());
2513 336 : SmiUntag(expected.reg(), expected.reg());
2514 336 : }
2515 : }
2516 336 : bind(&skip_hook);
2517 336 : }
2518 :
2519 1178056 : void TurboAssembler::StubPrologue(StackFrame::Type type) {
2520 1178056 : pushq(rbp); // Caller's frame pointer.
2521 : movp(rbp, rsp);
2522 : Push(Immediate(StackFrame::TypeToMarker(type)));
2523 1178137 : }
2524 :
2525 652838 : void TurboAssembler::Prologue() {
2526 652838 : pushq(rbp); // Caller's frame pointer.
2527 : movp(rbp, rsp);
2528 : Push(rsi); // Callee's context.
2529 : Push(rdi); // Callee's JS function.
2530 652843 : }
2531 :
2532 713370 : void TurboAssembler::EnterFrame(StackFrame::Type type) {
2533 713370 : pushq(rbp);
2534 : movp(rbp, rsp);
2535 : Push(Immediate(StackFrame::TypeToMarker(type)));
2536 713352 : }
2537 :
2538 1348104 : void TurboAssembler::LeaveFrame(StackFrame::Type type) {
2539 1348104 : if (emit_debug_code()) {
2540 : cmpp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
2541 0 : Immediate(StackFrame::TypeToMarker(type)));
2542 0 : Check(equal, AbortReason::kStackFrameTypesMustMatch);
2543 : }
2544 1348104 : movp(rsp, rbp);
2545 1348130 : popq(rbp);
2546 1348099 : }
2547 :
2548 672 : void MacroAssembler::EnterExitFramePrologue(bool save_rax,
2549 : StackFrame::Type frame_type) {
2550 : DCHECK(frame_type == StackFrame::EXIT ||
2551 : frame_type == StackFrame::BUILTIN_EXIT);
2552 :
2553 : // Set up the frame structure on the stack.
2554 : // All constants are relative to the frame pointer of the exit frame.
2555 : DCHECK_EQ(kFPOnStackSize + kPCOnStackSize,
2556 : ExitFrameConstants::kCallerSPDisplacement);
2557 : DCHECK_EQ(kFPOnStackSize, ExitFrameConstants::kCallerPCOffset);
2558 : DCHECK_EQ(0 * kSystemPointerSize, ExitFrameConstants::kCallerFPOffset);
2559 672 : pushq(rbp);
2560 : movp(rbp, rsp);
2561 :
2562 : // Reserve room for entry stack pointer and push the code object.
2563 : Push(Immediate(StackFrame::TypeToMarker(frame_type)));
2564 : DCHECK_EQ(-2 * kSystemPointerSize, ExitFrameConstants::kSPOffset);
2565 : Push(Immediate(0)); // Saved entry sp, patched before call.
2566 2688 : Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2567 : Push(kScratchRegister); // Accessed from ExitFrame::code_slot.
2568 :
2569 : // Save the frame pointer and the context in top.
2570 672 : if (save_rax) {
2571 : movp(r14, rax); // Backup rax in callee-save register.
2572 : }
2573 :
2574 : Store(
2575 : ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()),
2576 672 : rbp);
2577 : Store(ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()),
2578 672 : rsi);
2579 : Store(
2580 : ExternalReference::Create(IsolateAddressId::kCFunctionAddress, isolate()),
2581 672 : rbx);
2582 672 : }
2583 :
2584 :
2585 672 : void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2586 : bool save_doubles) {
2587 : #ifdef _WIN64
2588 : const int kShadowSpace = 4;
2589 : arg_stack_space += kShadowSpace;
2590 : #endif
2591 : // Optionally save all XMM registers.
2592 672 : if (save_doubles) {
2593 224 : int space = XMMRegister::kNumRegisters * kDoubleSize +
2594 224 : arg_stack_space * kRegisterSize;
2595 224 : subp(rsp, Immediate(space));
2596 : int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
2597 7168 : const RegisterConfiguration* config = RegisterConfiguration::Default();
2598 7168 : for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
2599 : DoubleRegister reg =
2600 : DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
2601 6720 : Movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
2602 : }
2603 448 : } else if (arg_stack_space > 0) {
2604 224 : subp(rsp, Immediate(arg_stack_space * kRegisterSize));
2605 : }
2606 :
2607 : // Get the required frame alignment for the OS.
2608 672 : const int kFrameAlignment = base::OS::ActivationFrameAlignment();
2609 672 : if (kFrameAlignment > 0) {
2610 : DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
2611 : DCHECK(is_int8(kFrameAlignment));
2612 1344 : andp(rsp, Immediate(-kFrameAlignment));
2613 : }
2614 :
2615 : // Patch the saved entry sp.
2616 1344 : movp(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2617 672 : }
2618 :
2619 448 : void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles,
2620 : StackFrame::Type frame_type) {
2621 448 : EnterExitFramePrologue(true, frame_type);
2622 :
2623 : // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
2624 : // so it must be retained across the C-call.
2625 : int offset = StandardFrameConstants::kCallerSPOffset - kSystemPointerSize;
2626 896 : leap(r15, Operand(rbp, r14, times_pointer_size, offset));
2627 :
2628 448 : EnterExitFrameEpilogue(arg_stack_space, save_doubles);
2629 448 : }
2630 :
2631 :
2632 224 : void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
2633 224 : EnterExitFramePrologue(false, StackFrame::EXIT);
2634 224 : EnterExitFrameEpilogue(arg_stack_space, false);
2635 224 : }
2636 :
2637 :
2638 560 : void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
2639 : // Registers:
2640 : // r15 : argv
2641 560 : if (save_doubles) {
2642 : int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
2643 7168 : const RegisterConfiguration* config = RegisterConfiguration::Default();
2644 7168 : for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
2645 : DoubleRegister reg =
2646 : DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
2647 6720 : Movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
2648 : }
2649 : }
2650 :
2651 560 : if (pop_arguments) {
2652 : // Get the return address from the stack and restore the frame pointer.
2653 896 : movp(rcx, Operand(rbp, kFPOnStackSize));
2654 896 : movp(rbp, Operand(rbp, 0 * kSystemPointerSize));
2655 :
2656 : // Drop everything up to and including the arguments and the receiver
2657 : // from the caller stack.
2658 896 : leap(rsp, Operand(r15, 1 * kSystemPointerSize));
2659 :
2660 : PushReturnAddressFrom(rcx);
2661 : } else {
2662 : // Otherwise just leave the exit frame.
2663 112 : leave();
2664 : }
2665 :
2666 560 : LeaveExitFrameEpilogue();
2667 560 : }
2668 :
2669 112 : void MacroAssembler::LeaveApiExitFrame() {
2670 112 : movp(rsp, rbp);
2671 112 : popq(rbp);
2672 :
2673 112 : LeaveExitFrameEpilogue();
2674 112 : }
2675 :
2676 672 : void MacroAssembler::LeaveExitFrameEpilogue() {
2677 : // Restore current context from top and clear it in debug mode.
2678 : ExternalReference context_address =
2679 1344 : ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
2680 672 : Operand context_operand = ExternalReferenceAsOperand(context_address);
2681 672 : movp(rsi, context_operand);
2682 : #ifdef DEBUG
2683 : movp(context_operand, Immediate(Context::kInvalidContext));
2684 : #endif
2685 :
2686 : // Clear the top frame.
2687 : ExternalReference c_entry_fp_address =
2688 672 : ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
2689 672 : Operand c_entry_fp_operand = ExternalReferenceAsOperand(c_entry_fp_address);
2690 : movp(c_entry_fp_operand, Immediate(0));
2691 672 : }
2692 :
2693 :
2694 : #ifdef _WIN64
2695 : static const int kRegisterPassedArguments = 4;
2696 : #else
2697 : static const int kRegisterPassedArguments = 6;
2698 : #endif
2699 :
2700 :
2701 336 : void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
2702 336 : LoadTaggedPointerField(dst, NativeContextOperand());
2703 336 : LoadTaggedPointerField(dst, ContextOperand(dst, index));
2704 336 : }
2705 :
2706 :
2707 0 : int TurboAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2708 : // On Windows 64 stack slots are reserved by the caller for all arguments
2709 : // including the ones passed in registers, and space is always allocated for
2710 : // the four register arguments even if the function takes fewer than four
2711 : // arguments.
2712 : // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2713 : // and the caller does not reserve stack slots for them.
2714 : DCHECK_GE(num_arguments, 0);
2715 : #ifdef _WIN64
2716 : const int kMinimumStackSlots = kRegisterPassedArguments;
2717 : if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2718 : return num_arguments;
2719 : #else
2720 2124464 : if (num_arguments < kRegisterPassedArguments) return 0;
2721 95215 : return num_arguments - kRegisterPassedArguments;
2722 : #endif
2723 : }
2724 :
2725 1062238 : void TurboAssembler::PrepareCallCFunction(int num_arguments) {
2726 1062238 : int frame_alignment = base::OS::ActivationFrameAlignment();
2727 : DCHECK_NE(frame_alignment, 0);
2728 : DCHECK_GE(num_arguments, 0);
2729 :
2730 : // Make stack end at alignment and allocate space for arguments and old rsp.
2731 1062238 : movp(kScratchRegister, rsp);
2732 : DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
2733 : int argument_slots_on_stack =
2734 : ArgumentStackSlotsForCFunctionCall(num_arguments);
2735 2124476 : subp(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
2736 2124476 : andp(rsp, Immediate(-frame_alignment));
2737 2124476 : movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
2738 1062238 : }
2739 :
2740 1061079 : void TurboAssembler::CallCFunction(ExternalReference function,
2741 : int num_arguments) {
2742 1061079 : LoadAddress(rax, function);
2743 1061082 : CallCFunction(rax, num_arguments);
2744 1061079 : }
2745 :
2746 1062227 : void TurboAssembler::CallCFunction(Register function, int num_arguments) {
2747 : DCHECK_LE(num_arguments, kMaxCParameters);
2748 : DCHECK(has_frame());
2749 : // Check stack alignment.
2750 1062227 : if (emit_debug_code()) {
2751 0 : CheckStackAlignment();
2752 : }
2753 :
2754 : // Save the frame pointer and PC so that the stack layout remains iterable,
2755 : // even without an ExitFrame which normally exists between JS and C frames.
2756 2875943 : if (isolate() != nullptr) {
2757 375746 : Label get_pc;
2758 : DCHECK(!AreAliased(kScratchRegister, function));
2759 751492 : leaq(kScratchRegister, Operand(&get_pc, 0));
2760 375746 : bind(&get_pc);
2761 : movp(ExternalReferenceAsOperand(
2762 : ExternalReference::fast_c_call_caller_pc_address(isolate())),
2763 375746 : kScratchRegister);
2764 : movp(ExternalReferenceAsOperand(
2765 : ExternalReference::fast_c_call_caller_fp_address(isolate())),
2766 375746 : rbp);
2767 : }
2768 :
2769 1062224 : call(function);
2770 :
2771 1062224 : if (isolate() != nullptr) {
2772 : // We don't unset the PC; the FP is the source of truth.
2773 : movp(ExternalReferenceAsOperand(
2774 : ExternalReference::fast_c_call_caller_fp_address(isolate())),
2775 375743 : Immediate(0));
2776 : }
2777 :
2778 : DCHECK_NE(base::OS::ActivationFrameAlignment(), 0);
2779 : DCHECK_GE(num_arguments, 0);
2780 : int argument_slots_on_stack =
2781 : ArgumentStackSlotsForCFunctionCall(num_arguments);
2782 2124451 : movp(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
2783 1062225 : }
2784 :
2785 627324 : void TurboAssembler::CheckPageFlag(Register object, Register scratch, int mask,
2786 : Condition cc, Label* condition_met,
2787 : Label::Distance condition_met_distance) {
2788 : DCHECK(cc == zero || cc == not_zero);
2789 627324 : if (scratch == object) {
2790 112 : andp(scratch, Immediate(~kPageAlignmentMask));
2791 : } else {
2792 627212 : movp(scratch, Immediate(~kPageAlignmentMask));
2793 627212 : andp(scratch, object);
2794 : }
2795 627324 : if (mask < (1 << kBitsPerByte)) {
2796 : testb(Operand(scratch, MemoryChunk::kFlagsOffset),
2797 1254648 : Immediate(static_cast<uint8_t>(mask)));
2798 : } else {
2799 0 : testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2800 : }
2801 627324 : j(cc, condition_met, condition_met_distance);
2802 627324 : }
2803 :
2804 108 : void TurboAssembler::ComputeCodeStartAddress(Register dst) {
2805 108 : Label current;
2806 108 : bind(¤t);
2807 108 : int pc = pc_offset();
2808 : // Load effective address to get the address of the current instruction.
2809 216 : leaq(dst, Operand(¤t, -pc));
2810 108 : }
2811 :
2812 560 : void TurboAssembler::ResetSpeculationPoisonRegister() {
2813 : // TODO(tebbi): Perhaps, we want to put an lfence here.
2814 560 : Set(kSpeculationPoisonRegister, -1);
2815 560 : }
2816 :
2817 2736612 : void TurboAssembler::CallForDeoptimization(Address target, int deopt_id) {
2818 : NoRootArrayScope no_root_array(this);
2819 : // Save the deopt id in r13 (we don't need the roots array from now on).
2820 2736612 : movp(r13, Immediate(deopt_id));
2821 2736624 : call(target, RelocInfo::RUNTIME_ENTRY);
2822 2736597 : }
2823 :
2824 : } // namespace internal
2825 183867 : } // namespace v8
2826 :
2827 : #endif // V8_TARGET_ARCH_X64
|