Line data Source code
1 : // Copyright 2013 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #if V8_TARGET_ARCH_X64
6 :
7 : #include "src/api-arguments.h"
8 : #include "src/bootstrapper.h"
9 : #include "src/code-stubs.h"
10 : #include "src/codegen.h"
11 : #include "src/counters.h"
12 : #include "src/double.h"
13 : #include "src/heap/heap-inl.h"
14 : #include "src/ic/handler-compiler.h"
15 : #include "src/ic/ic.h"
16 : #include "src/ic/stub-cache.h"
17 : #include "src/isolate.h"
18 : #include "src/objects-inl.h"
19 : #include "src/objects/regexp-match-info.h"
20 : #include "src/regexp/jsregexp.h"
21 : #include "src/regexp/regexp-macro-assembler.h"
22 : #include "src/runtime/runtime.h"
23 :
24 : #include "src/x64/code-stubs-x64.h" // Cannot be the first include.
25 :
26 : namespace v8 {
27 : namespace internal {
28 :
29 : #define __ ACCESS_MASM(masm)
30 :
31 43 : void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
32 43 : __ popq(rcx);
33 86 : __ movq(MemOperand(rsp, rax, times_8, 0), rdi);
34 43 : __ pushq(rdi);
35 43 : __ pushq(rbx);
36 43 : __ pushq(rcx);
37 43 : __ addq(rax, Immediate(3));
38 43 : __ TailCallRuntime(Runtime::kNewArray);
39 43 : }
40 :
41 8102 : void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
42 : ExternalReference miss) {
43 : // Update the static counter each time a new code stub is generated.
44 8102 : isolate()->counters()->code_stubs()->Increment();
45 :
46 8102 : CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
47 : int param_count = descriptor.GetRegisterParameterCount();
48 : {
49 : // Call the runtime system in a fresh internal frame.
50 8102 : FrameScope scope(masm, StackFrame::INTERNAL);
51 : DCHECK(param_count == 0 ||
52 : rax.is(descriptor.GetRegisterParameter(param_count - 1)));
53 : // Push arguments
54 16720 : for (int i = 0; i < param_count; ++i) {
55 8618 : __ Push(descriptor.GetRegisterParameter(i));
56 : }
57 8102 : __ CallExternalReference(miss, param_count);
58 : }
59 :
60 8102 : __ Ret();
61 8102 : }
62 :
63 :
64 172 : void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
65 86 : __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
66 : const int argument_count = 1;
67 86 : __ PrepareCallCFunction(argument_count);
68 : __ LoadAddress(arg_reg_1,
69 172 : ExternalReference::isolate_address(isolate()));
70 :
71 : AllowExternalCallThatCantCauseGC scope(masm);
72 : __ CallCFunction(
73 : ExternalReference::store_buffer_overflow_function(isolate()),
74 86 : argument_count);
75 86 : __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
76 86 : __ ret(0);
77 86 : }
78 :
79 :
80 : class FloatingPointHelper : public AllStatic {
81 : public:
82 : enum ConvertUndefined {
83 : CONVERT_UNDEFINED_TO_ZERO,
84 : BAILOUT_ON_UNDEFINED
85 : };
86 : // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
87 : // If the operands are not both numbers, jump to not_numbers.
88 : // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
89 : // NumberOperands assumes both are smis or heap numbers.
90 : static void LoadSSE2UnknownOperands(MacroAssembler* masm,
91 : Label* not_numbers);
92 : };
93 :
94 :
95 9054 : void DoubleToIStub::Generate(MacroAssembler* masm) {
96 : Register input_reg = this->source();
97 9054 : Register final_result_reg = this->destination();
98 : DCHECK(is_truncating());
99 :
100 : Label check_negative, process_64_bits, done;
101 :
102 : int double_offset = offset();
103 :
104 : // Account for return address and saved regs if input is rsp.
105 9054 : if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
106 :
107 9054 : MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
108 : MemOperand exponent_operand(MemOperand(input_reg,
109 9054 : double_offset + kDoubleSize / 2));
110 :
111 : Register scratch1;
112 9054 : Register scratch_candidates[3] = { rbx, rdx, rdi };
113 10299 : for (int i = 0; i < 3; i++) {
114 10299 : scratch1 = scratch_candidates[i];
115 10299 : if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
116 : }
117 :
118 : // Since we must use rcx for shifts below, use some other register (rax)
119 : // to calculate the result if ecx is the requested return register.
120 9054 : Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
121 : // Save ecx if it isn't the return register and therefore volatile, or if it
122 : // is the return register, then save the temp register we use in its stead
123 : // for the result.
124 9054 : Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
125 9054 : __ pushq(scratch1);
126 9054 : __ pushq(save_reg);
127 :
128 9054 : bool stash_exponent_copy = !input_reg.is(rsp);
129 : __ movl(scratch1, mantissa_operand);
130 9054 : __ Movsd(kScratchDoubleReg, mantissa_operand);
131 : __ movl(rcx, exponent_operand);
132 9054 : if (stash_exponent_copy) __ pushq(rcx);
133 :
134 9054 : __ andl(rcx, Immediate(HeapNumber::kExponentMask));
135 : __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
136 18108 : __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
137 9054 : __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
138 9054 : __ j(below, &process_64_bits);
139 :
140 : // Result is entirely in lower 32-bits of mantissa
141 : int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
142 9054 : __ subl(rcx, Immediate(delta));
143 : __ xorl(result_reg, result_reg);
144 9054 : __ cmpl(rcx, Immediate(31));
145 9054 : __ j(above, &done);
146 : __ shll_cl(scratch1);
147 9054 : __ jmp(&check_negative);
148 :
149 9054 : __ bind(&process_64_bits);
150 9054 : __ Cvttsd2siq(result_reg, kScratchDoubleReg);
151 9054 : __ jmp(&done, Label::kNear);
152 :
153 : // If the double was negative, negate the integer result.
154 9054 : __ bind(&check_negative);
155 : __ movl(result_reg, scratch1);
156 : __ negl(result_reg);
157 9054 : if (stash_exponent_copy) {
158 896 : __ cmpl(MemOperand(rsp, 0), Immediate(0));
159 : } else {
160 : __ cmpl(exponent_operand, Immediate(0));
161 : }
162 9054 : __ cmovl(greater, result_reg, scratch1);
163 :
164 : // Restore registers
165 9054 : __ bind(&done);
166 9054 : if (stash_exponent_copy) {
167 448 : __ addp(rsp, Immediate(kDoubleSize));
168 : }
169 9054 : if (!final_result_reg.is(result_reg)) {
170 : DCHECK(final_result_reg.is(rcx));
171 : __ movl(final_result_reg, result_reg);
172 : }
173 9054 : __ popq(save_reg);
174 9054 : __ popq(scratch1);
175 9054 : __ ret(0);
176 9054 : }
177 :
178 :
179 13198 : void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
180 : Label* not_numbers) {
181 : Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
182 : // Load operand in rdx into xmm0, or branch to not_numbers.
183 13198 : __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
184 13198 : __ JumpIfSmi(rdx, &load_smi_rdx);
185 13198 : __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
186 13198 : __ j(not_equal, not_numbers); // Argument in rdx is not a number.
187 13198 : __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
188 : // Load operand in rax into xmm1, or branch to not_numbers.
189 13198 : __ JumpIfSmi(rax, &load_smi_rax);
190 :
191 13198 : __ bind(&load_nonsmi_rax);
192 : __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
193 13198 : __ j(not_equal, not_numbers);
194 13198 : __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
195 13198 : __ jmp(&done);
196 :
197 13198 : __ bind(&load_smi_rdx);
198 13198 : __ SmiToInteger32(kScratchRegister, rdx);
199 13198 : __ Cvtlsi2sd(xmm0, kScratchRegister);
200 13198 : __ JumpIfNotSmi(rax, &load_nonsmi_rax);
201 :
202 13198 : __ bind(&load_smi_rax);
203 13198 : __ SmiToInteger32(kScratchRegister, rax);
204 13198 : __ Cvtlsi2sd(xmm1, kScratchRegister);
205 13198 : __ bind(&done);
206 13198 : }
207 :
208 :
209 336 : void MathPowStub::Generate(MacroAssembler* masm) {
210 112 : const Register exponent = MathPowTaggedDescriptor::exponent();
211 : DCHECK(exponent.is(rdx));
212 : const Register scratch = rcx;
213 112 : const XMMRegister double_result = xmm3;
214 112 : const XMMRegister double_base = xmm2;
215 : const XMMRegister double_exponent = xmm1;
216 112 : const XMMRegister double_scratch = xmm4;
217 :
218 : Label call_runtime, done, exponent_not_smi, int_exponent;
219 :
220 : // Save 1 in double_result - we need this several times later on.
221 112 : __ movp(scratch, Immediate(1));
222 112 : __ Cvtlsi2sd(double_result, scratch);
223 :
224 112 : if (exponent_type() == TAGGED) {
225 19 : __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
226 19 : __ SmiToInteger32(exponent, exponent);
227 19 : __ jmp(&int_exponent);
228 :
229 19 : __ bind(&exponent_not_smi);
230 19 : __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
231 : }
232 :
233 112 : if (exponent_type() != INTEGER) {
234 : Label fast_power, try_arithmetic_simplification;
235 : // Detect integer exponents stored as double.
236 : __ DoubleToI(exponent, double_exponent, double_scratch,
237 : TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
238 : &try_arithmetic_simplification,
239 62 : &try_arithmetic_simplification);
240 62 : __ jmp(&int_exponent);
241 :
242 62 : __ bind(&try_arithmetic_simplification);
243 62 : __ Cvttsd2si(exponent, double_exponent);
244 : // Skip to runtime if possibly NaN (indicated by the indefinite integer).
245 62 : __ cmpl(exponent, Immediate(0x1));
246 62 : __ j(overflow, &call_runtime);
247 :
248 : // Using FPU instructions to calculate power.
249 : Label fast_power_failed;
250 62 : __ bind(&fast_power);
251 62 : __ fnclex(); // Clear flags to catch exceptions later.
252 : // Transfer (B)ase and (E)xponent onto the FPU register stack.
253 62 : __ subp(rsp, Immediate(kDoubleSize));
254 62 : __ Movsd(Operand(rsp, 0), double_exponent);
255 62 : __ fld_d(Operand(rsp, 0)); // E
256 62 : __ Movsd(Operand(rsp, 0), double_base);
257 62 : __ fld_d(Operand(rsp, 0)); // B, E
258 :
259 : // Exponent is in st(1) and base is in st(0)
260 : // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
261 : // FYL2X calculates st(1) * log2(st(0))
262 62 : __ fyl2x(); // X
263 62 : __ fld(0); // X, X
264 62 : __ frndint(); // rnd(X), X
265 62 : __ fsub(1); // rnd(X), X-rnd(X)
266 62 : __ fxch(1); // X - rnd(X), rnd(X)
267 : // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
268 62 : __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
269 62 : __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
270 62 : __ faddp(1); // 2^(X-rnd(X)), rnd(X)
271 : // FSCALE calculates st(0) * 2^st(1)
272 62 : __ fscale(); // 2^X, rnd(X)
273 62 : __ fstp(1);
274 : // Bail out to runtime in case of exceptions in the status word.
275 62 : __ fnstsw_ax();
276 62 : __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
277 62 : __ j(not_zero, &fast_power_failed, Label::kNear);
278 62 : __ fstp_d(Operand(rsp, 0));
279 62 : __ Movsd(double_result, Operand(rsp, 0));
280 62 : __ addp(rsp, Immediate(kDoubleSize));
281 62 : __ jmp(&done);
282 :
283 62 : __ bind(&fast_power_failed);
284 62 : __ fninit();
285 62 : __ addp(rsp, Immediate(kDoubleSize));
286 62 : __ jmp(&call_runtime);
287 : }
288 :
289 : // Calculate power with integer exponent.
290 112 : __ bind(&int_exponent);
291 112 : const XMMRegister double_scratch2 = double_exponent;
292 : // Back up exponent as we need to check if exponent is negative later.
293 : __ movp(scratch, exponent); // Back up exponent.
294 112 : __ Movsd(double_scratch, double_base); // Back up base.
295 112 : __ Movsd(double_scratch2, double_result); // Load double_exponent with 1.
296 :
297 : // Get absolute value of exponent.
298 : Label no_neg, while_true, while_false;
299 : __ testl(scratch, scratch);
300 112 : __ j(positive, &no_neg, Label::kNear);
301 : __ negl(scratch);
302 112 : __ bind(&no_neg);
303 :
304 112 : __ j(zero, &while_false, Label::kNear);
305 : __ shrl(scratch, Immediate(1));
306 : // Above condition means CF==0 && ZF==0. This means that the
307 : // bit that has been shifted out is 0 and the result is not 0.
308 112 : __ j(above, &while_true, Label::kNear);
309 112 : __ Movsd(double_result, double_scratch);
310 112 : __ j(zero, &while_false, Label::kNear);
311 :
312 112 : __ bind(&while_true);
313 : __ shrl(scratch, Immediate(1));
314 112 : __ Mulsd(double_scratch, double_scratch);
315 112 : __ j(above, &while_true, Label::kNear);
316 112 : __ Mulsd(double_result, double_scratch);
317 112 : __ j(not_zero, &while_true);
318 :
319 112 : __ bind(&while_false);
320 : // If the exponent is negative, return 1/result.
321 : __ testl(exponent, exponent);
322 112 : __ j(greater, &done);
323 112 : __ Divsd(double_scratch2, double_result);
324 112 : __ Movsd(double_result, double_scratch2);
325 : // Test whether result is zero. Bail out to check for subnormal result.
326 : // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
327 112 : __ Xorpd(double_scratch2, double_scratch2);
328 112 : __ Ucomisd(double_scratch2, double_result);
329 : // double_exponent aliased as double_scratch2 has already been overwritten
330 : // and may not have contained the exponent value in the first place when the
331 : // input was a smi. We reset it with exponent value before bailing out.
332 112 : __ j(not_equal, &done);
333 112 : __ Cvtlsi2sd(double_exponent, exponent);
334 :
335 : // Returning or bailing out.
336 112 : __ bind(&call_runtime);
337 : // Move base to the correct argument register. Exponent is already in xmm1.
338 112 : __ Movsd(xmm0, double_base);
339 : DCHECK(double_exponent.is(xmm1));
340 : {
341 : AllowExternalCallThatCantCauseGC scope(masm);
342 112 : __ PrepareCallCFunction(2);
343 : __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
344 112 : 2);
345 : }
346 : // Return value is in xmm0.
347 112 : __ Movsd(double_result, xmm0);
348 :
349 112 : __ bind(&done);
350 112 : __ ret(0);
351 112 : }
352 :
353 : static int NegativeComparisonResult(Condition cc) {
354 : DCHECK(cc != equal);
355 : DCHECK((cc == less) || (cc == less_equal)
356 : || (cc == greater) || (cc == greater_equal));
357 2325 : return (cc == greater || cc == greater_equal) ? LESS : GREATER;
358 : }
359 :
360 :
361 26644 : static void CheckInputType(MacroAssembler* masm, Register input,
362 : CompareICState::State expected, Label* fail) {
363 : Label ok;
364 26396 : if (expected == CompareICState::SMI) {
365 6596 : __ JumpIfNotSmi(input, fail);
366 19800 : } else if (expected == CompareICState::NUMBER) {
367 248 : __ JumpIfSmi(input, &ok);
368 248 : __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
369 248 : __ j(not_equal, fail);
370 : }
371 : // We could be strict about internalized/non-internalized here, but as long as
372 : // hydrogen doesn't care, the stub doesn't have to care either.
373 26396 : __ bind(&ok);
374 26396 : }
375 :
376 :
377 24172 : static void BranchIfNotInternalizedString(MacroAssembler* masm,
378 : Label* label,
379 : Register object,
380 : Register scratch) {
381 24172 : __ JumpIfSmi(object, label);
382 24172 : __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
383 : __ movzxbp(scratch,
384 : FieldOperand(scratch, Map::kInstanceTypeOffset));
385 : STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
386 24172 : __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
387 24172 : __ j(not_zero, label);
388 24172 : }
389 :
390 :
391 39594 : void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
392 : Label runtime_call, check_unequal_objects, done;
393 13198 : Condition cc = GetCondition();
394 25284 : Factory* factory = isolate()->factory();
395 :
396 : Label miss;
397 13198 : CheckInputType(masm, rdx, left(), &miss);
398 13198 : CheckInputType(masm, rax, right(), &miss);
399 :
400 : // Compare two smis.
401 : Label non_smi, smi_done;
402 13198 : __ JumpIfNotBothSmi(rax, rdx, &non_smi);
403 13198 : __ subp(rdx, rax);
404 13198 : __ j(no_overflow, &smi_done);
405 : __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
406 13198 : __ bind(&smi_done);
407 : __ movp(rax, rdx);
408 13198 : __ ret(0);
409 13198 : __ bind(&non_smi);
410 :
411 : // The compare stub returns a positive, negative, or zero 64-bit integer
412 : // value in rax, corresponding to result of comparing the two inputs.
413 : // NOTICE! This code is only reached after a smi-fast-case check, so
414 : // it is certain that at least one operand isn't a smi.
415 :
416 : // Two identical objects are equal unless they are both NaN or undefined.
417 : {
418 : Label not_identical;
419 13198 : __ cmpp(rax, rdx);
420 13198 : __ j(not_equal, ¬_identical, Label::kNear);
421 :
422 13198 : if (cc != equal) {
423 : // Check for undefined. undefined OP undefined is false even though
424 : // undefined == undefined.
425 1112 : __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
426 : Label check_for_nan;
427 1112 : __ j(not_equal, &check_for_nan, Label::kNear);
428 1112 : __ Set(rax, NegativeComparisonResult(cc));
429 1112 : __ ret(0);
430 1112 : __ bind(&check_for_nan);
431 : }
432 :
433 : // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
434 : // so we do the second best thing - test it ourselves.
435 : Label heap_number;
436 : // If it's not a heap number, then return equal for (in)equality operator.
437 : __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
438 13198 : factory->heap_number_map());
439 13198 : __ j(equal, &heap_number, Label::kNear);
440 13198 : if (cc != equal) {
441 : __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
442 : __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
443 : // Call runtime on identical objects. Otherwise return equal.
444 : __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE)));
445 1112 : __ j(above_equal, &runtime_call, Label::kFar);
446 : // Call runtime on identical symbols since we need to throw a TypeError.
447 : __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
448 1112 : __ j(equal, &runtime_call, Label::kFar);
449 : }
450 13198 : __ Set(rax, EQUAL);
451 13198 : __ ret(0);
452 :
453 13198 : __ bind(&heap_number);
454 : // It is a heap number, so return equal if it's not NaN.
455 : // For NaN, return 1 for every condition except greater and
456 : // greater-equal. Return -1 for them, so the comparison yields
457 : // false for all conditions except not-equal.
458 13198 : __ Set(rax, EQUAL);
459 13198 : __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
460 13198 : __ Ucomisd(xmm0, xmm0);
461 13198 : __ setcc(parity_even, rax);
462 : // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
463 13198 : if (cc == greater_equal || cc == greater) {
464 : __ negp(rax);
465 : }
466 13198 : __ ret(0);
467 :
468 13198 : __ bind(¬_identical);
469 : }
470 :
471 13198 : if (cc == equal) { // Both strict and non-strict.
472 : Label slow; // Fallthrough label.
473 :
474 : // If we're doing a strict equality comparison, we don't have to do
475 : // type conversion, so we generate code to do fast comparison for objects
476 : // and oddballs. Non-smi numbers and strings still go through the usual
477 : // slow-case code.
478 12086 : if (strict()) {
479 : // If either is a Smi (we know that not both are), then they can only
480 : // be equal if the other is a HeapNumber. If so, use the slow case.
481 : {
482 : Label not_smis;
483 10599 : __ SelectNonSmi(rbx, rax, rdx, ¬_smis);
484 :
485 : // Check if the non-smi operand is a heap number.
486 : __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
487 10599 : factory->heap_number_map());
488 : // If heap number, handle it in the slow case.
489 10599 : __ j(equal, &slow);
490 : // Return non-equal. ebx (the lower half of rbx) is not zero.
491 : __ movp(rax, rbx);
492 10599 : __ ret(0);
493 :
494 10599 : __ bind(¬_smis);
495 : }
496 :
497 : // If either operand is a JSObject or an oddball value, then they are not
498 : // equal since their pointers are different
499 : // There is no test for undetectability in strict equality.
500 :
501 : // If the first object is a JS object, we have done pointer comparison.
502 : STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
503 : Label first_non_object;
504 10599 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
505 10599 : __ j(below, &first_non_object, Label::kNear);
506 : // Return non-zero (rax (not rax) is not zero)
507 : Label return_not_equal;
508 : STATIC_ASSERT(kHeapObjectTag != 0);
509 10599 : __ bind(&return_not_equal);
510 10599 : __ ret(0);
511 :
512 10599 : __ bind(&first_non_object);
513 : // Check for oddballs: true, false, null, undefined.
514 10599 : __ CmpInstanceType(rcx, ODDBALL_TYPE);
515 10599 : __ j(equal, &return_not_equal);
516 :
517 10599 : __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
518 10599 : __ j(above_equal, &return_not_equal);
519 :
520 : // Check for oddballs: true, false, null, undefined.
521 10599 : __ CmpInstanceType(rcx, ODDBALL_TYPE);
522 10599 : __ j(equal, &return_not_equal);
523 :
524 : // Fall through to the general case.
525 : }
526 12086 : __ bind(&slow);
527 : }
528 :
529 : // Generate the number comparison code.
530 : Label non_number_comparison;
531 : Label unordered;
532 13198 : FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
533 : __ xorl(rax, rax);
534 : __ xorl(rcx, rcx);
535 13198 : __ Ucomisd(xmm0, xmm1);
536 :
537 : // Don't base result on EFLAGS when a NaN is involved.
538 13198 : __ j(parity_even, &unordered, Label::kNear);
539 : // Return a result of -1, 0, or 1, based on EFLAGS.
540 13198 : __ setcc(above, rax);
541 13198 : __ setcc(below, rcx);
542 13198 : __ subp(rax, rcx);
543 13198 : __ ret(0);
544 :
545 : // If one of the numbers was NaN, then the result is always false.
546 : // The cc is never not-equal.
547 13198 : __ bind(&unordered);
548 : DCHECK(cc != not_equal);
549 13198 : if (cc == less || cc == less_equal) {
550 576 : __ Set(rax, 1);
551 : } else {
552 12622 : __ Set(rax, -1);
553 : }
554 13198 : __ ret(0);
555 :
556 : // The number comparison code did not provide a valid result.
557 13198 : __ bind(&non_number_comparison);
558 :
559 : // Fast negative check for internalized-to-internalized equality.
560 : Label check_for_strings;
561 13198 : if (cc == equal) {
562 : BranchIfNotInternalizedString(
563 12086 : masm, &check_for_strings, rax, kScratchRegister);
564 : BranchIfNotInternalizedString(
565 12086 : masm, &check_for_strings, rdx, kScratchRegister);
566 :
567 : // We've already checked for object identity, so if both operands are
568 : // internalized strings they aren't equal. Register rax (not rax) already
569 : // holds a non-zero value, which indicates not equal, so just return.
570 12086 : __ ret(0);
571 : }
572 :
573 13198 : __ bind(&check_for_strings);
574 :
575 : __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
576 13198 : &check_unequal_objects);
577 :
578 : // Inline comparison of one-byte strings.
579 13198 : if (cc == equal) {
580 12086 : StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
581 : } else {
582 : StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
583 1112 : rdi, r8);
584 : }
585 :
586 : #ifdef DEBUG
587 : __ Abort(kUnexpectedFallThroughFromStringComparison);
588 : #endif
589 :
590 13198 : __ bind(&check_unequal_objects);
591 25284 : if (cc == equal && !strict()) {
592 : // Not strict equality. Objects are unequal if
593 : // they are both JSObjects and not undetectable,
594 : // and their pointers are different.
595 : Label return_equal, return_unequal, undetectable;
596 : // At most one is a smi, so we can test for smi by adding the two.
597 : // A smi plus a heap object has the low bit set, a heap object plus
598 : // a heap object has the low bit clear.
599 : STATIC_ASSERT(kSmiTag == 0);
600 : STATIC_ASSERT(kSmiTagMask == 1);
601 2974 : __ leap(rcx, Operand(rax, rdx, times_1, 0));
602 1487 : __ testb(rcx, Immediate(kSmiTagMask));
603 1487 : __ j(not_zero, &runtime_call, Label::kNear);
604 :
605 : __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
606 : __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
607 : __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
608 1487 : Immediate(1 << Map::kIsUndetectable));
609 1487 : __ j(not_zero, &undetectable, Label::kNear);
610 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
611 1487 : Immediate(1 << Map::kIsUndetectable));
612 1487 : __ j(not_zero, &return_unequal, Label::kNear);
613 :
614 1487 : __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE);
615 1487 : __ j(below, &runtime_call, Label::kNear);
616 1487 : __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE);
617 1487 : __ j(below, &runtime_call, Label::kNear);
618 :
619 1487 : __ bind(&return_unequal);
620 : // Return non-equal by returning the non-zero object pointer in rax.
621 1487 : __ ret(0);
622 :
623 1487 : __ bind(&undetectable);
624 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
625 1487 : Immediate(1 << Map::kIsUndetectable));
626 1487 : __ j(zero, &return_unequal, Label::kNear);
627 :
628 : // If both sides are JSReceivers, then the result is false according to
629 : // the HTML specification, which says that only comparisons with null or
630 : // undefined are affected by special casing for document.all.
631 1487 : __ CmpInstanceType(rbx, ODDBALL_TYPE);
632 1487 : __ j(zero, &return_equal, Label::kNear);
633 1487 : __ CmpInstanceType(rcx, ODDBALL_TYPE);
634 1487 : __ j(not_zero, &return_unequal, Label::kNear);
635 :
636 1487 : __ bind(&return_equal);
637 1487 : __ Set(rax, EQUAL);
638 1487 : __ ret(0);
639 : }
640 13198 : __ bind(&runtime_call);
641 :
642 13198 : if (cc == equal) {
643 : {
644 12086 : FrameScope scope(masm, StackFrame::INTERNAL);
645 12086 : __ Push(rsi);
646 10599 : __ Call(strict() ? isolate()->builtins()->StrictEqual()
647 1487 : : isolate()->builtins()->Equal(),
648 24172 : RelocInfo::CODE_TARGET);
649 12086 : __ Pop(rsi);
650 : }
651 : // Turn true into 0 and false into some non-zero value.
652 : STATIC_ASSERT(EQUAL == 0);
653 12086 : __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
654 12086 : __ subp(rax, rdx);
655 12086 : __ Ret();
656 : } else {
657 : // Push arguments below the return address to prepare jump to builtin.
658 : __ PopReturnAddressTo(rcx);
659 1112 : __ Push(rdx);
660 1112 : __ Push(rax);
661 1112 : __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
662 : __ PushReturnAddressFrom(rcx);
663 1112 : __ TailCallRuntime(Runtime::kCompare);
664 : }
665 :
666 13198 : __ bind(&miss);
667 13198 : GenerateMiss(masm);
668 13198 : }
669 :
670 :
671 9220 : static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
672 : // rax : number of arguments to the construct function
673 : // rbx : feedback vector
674 : // rdx : slot in feedback vector (Smi)
675 : // rdi : the function to call
676 9220 : FrameScope scope(masm, StackFrame::INTERNAL);
677 :
678 : // Number-of-arguments register must be smi-tagged to call out.
679 9220 : __ Integer32ToSmi(rax, rax);
680 9220 : __ Push(rax);
681 9220 : __ Push(rdi);
682 9220 : __ Integer32ToSmi(rdx, rdx);
683 9220 : __ Push(rdx);
684 9220 : __ Push(rbx);
685 9220 : __ Push(rsi);
686 :
687 9220 : __ CallStub(stub);
688 :
689 9220 : __ Pop(rsi);
690 9220 : __ Pop(rbx);
691 9220 : __ Pop(rdx);
692 9220 : __ Pop(rdi);
693 9220 : __ Pop(rax);
694 9220 : __ SmiToInteger32(rdx, rdx);
695 9220 : __ SmiToInteger32(rax, rax);
696 9220 : }
697 :
698 :
699 4610 : static void GenerateRecordCallTarget(MacroAssembler* masm) {
700 : // Cache the called function in a feedback vector slot. Cache states
701 : // are uninitialized, monomorphic (indicated by a JSFunction), and
702 : // megamorphic.
703 : // rax : number of arguments to the construct function
704 : // rbx : feedback vector
705 : // rdx : slot in feedback vector (Smi)
706 : // rdi : the function to call
707 : Isolate* isolate = masm->isolate();
708 : Label initialize, done, miss, megamorphic, not_array_function;
709 :
710 : // Load the cache state into r11.
711 4610 : __ SmiToInteger32(rdx, rdx);
712 : __ movp(r11,
713 4610 : FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
714 :
715 : // A monomorphic cache hit or an already megamorphic state: invoke the
716 : // function without changing the state.
717 : // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read
718 : // at this position in a symbol (see static asserts in feedback-vector.h).
719 : Label check_allocation_site;
720 : __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset));
721 4610 : __ j(equal, &done, Label::kFar);
722 4610 : __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex);
723 4610 : __ j(equal, &done, Label::kFar);
724 : __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
725 4610 : Heap::kWeakCellMapRootIndex);
726 4610 : __ j(not_equal, &check_allocation_site);
727 :
728 : // If the weak cell is cleared, we have a new chance to become monomorphic.
729 4610 : __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
730 4610 : __ j(equal, &initialize);
731 4610 : __ jmp(&megamorphic);
732 :
733 4610 : __ bind(&check_allocation_site);
734 : // If we came here, we need to see if we are the array function.
735 : // If we didn't have a matching function, and we didn't find the megamorph
736 : // sentinel, then we have in the slot either some other function or an
737 : // AllocationSite.
738 4610 : __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
739 4610 : __ j(not_equal, &miss);
740 :
741 : // Make sure the function is the Array() function
742 4610 : __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
743 4610 : __ cmpp(rdi, r11);
744 4610 : __ j(not_equal, &megamorphic);
745 4610 : __ jmp(&done);
746 :
747 4610 : __ bind(&miss);
748 :
749 : // A monomorphic miss (i.e, here the cache is not uninitialized) goes
750 : // megamorphic.
751 4610 : __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex);
752 4610 : __ j(equal, &initialize);
753 : // MegamorphicSentinel is an immortal immovable object (undefined) so no
754 : // write-barrier is needed.
755 4610 : __ bind(&megamorphic);
756 : __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
757 4610 : FeedbackVector::MegamorphicSentinel(isolate));
758 4610 : __ jmp(&done);
759 :
760 : // An uninitialized cache is patched with the function or sentinel to
761 : // indicate the ElementsKind if function is the Array constructor.
762 4610 : __ bind(&initialize);
763 :
764 : // Make sure the function is the Array() function
765 4610 : __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
766 4610 : __ cmpp(rdi, r11);
767 4610 : __ j(not_equal, ¬_array_function);
768 :
769 : CreateAllocationSiteStub create_stub(isolate);
770 4610 : CallStubInRecordCallTarget(masm, &create_stub);
771 4610 : __ jmp(&done);
772 :
773 4610 : __ bind(¬_array_function);
774 : CreateWeakCellStub weak_cell_stub(isolate);
775 4610 : CallStubInRecordCallTarget(masm, &weak_cell_stub);
776 :
777 4610 : __ bind(&done);
778 : // Increment the call count for all function calls.
779 : __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
780 : FixedArray::kHeaderSize + kPointerSize),
781 4610 : Smi::FromInt(1));
782 4610 : }
783 :
784 :
785 4610 : void CallConstructStub::Generate(MacroAssembler* masm) {
786 : // rax : number of arguments
787 : // rbx : feedback vector
788 : // rdx : slot in feedback vector (Smi)
789 : // rdi : constructor function
790 :
791 : Label non_function;
792 : // Check that the constructor is not a smi.
793 4610 : __ JumpIfSmi(rdi, &non_function);
794 : // Check that constructor is a JSFunction.
795 4610 : __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11);
796 4610 : __ j(not_equal, &non_function);
797 :
798 4610 : GenerateRecordCallTarget(masm);
799 :
800 : Label feedback_register_initialized;
801 : // Put the AllocationSite from the feedback vector into rbx, or undefined.
802 : __ movp(rbx,
803 : FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
804 4610 : __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
805 4610 : __ j(equal, &feedback_register_initialized, Label::kNear);
806 4610 : __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
807 4610 : __ bind(&feedback_register_initialized);
808 :
809 4610 : __ AssertUndefinedOrAllocationSite(rbx);
810 :
811 : // Pass new target to construct stub.
812 : __ movp(rdx, rdi);
813 :
814 : // Tail call to the function-specific construct stub (still in the caller
815 : // context at this point).
816 : __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
817 : __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
818 : __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
819 4610 : __ jmp(rcx);
820 :
821 4610 : __ bind(&non_function);
822 : __ movp(rdx, rdi);
823 4610 : __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
824 4610 : }
825 :
826 15651 : bool CEntryStub::NeedsImmovableCode() {
827 15651 : return false;
828 : }
829 :
830 :
831 43 : void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
832 43 : CEntryStub::GenerateAheadOfTime(isolate);
833 43 : StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
834 43 : StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
835 : // It is important that the store buffer overflow stubs are generated first.
836 43 : CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
837 43 : CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
838 43 : CreateWeakCellStub::GenerateAheadOfTime(isolate);
839 43 : BinaryOpICStub::GenerateAheadOfTime(isolate);
840 43 : BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
841 43 : StoreFastElementStub::GenerateAheadOfTime(isolate);
842 43 : }
843 :
844 :
845 60643 : void CodeStub::GenerateFPStubs(Isolate* isolate) {
846 60643 : }
847 :
848 :
849 43 : void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
850 : CEntryStub stub(isolate, 1, kDontSaveFPRegs);
851 43 : stub.GetCode();
852 : CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
853 43 : save_doubles.GetCode();
854 43 : }
855 :
856 :
857 78334 : void CEntryStub::Generate(MacroAssembler* masm) {
858 : // rax: number of arguments including receiver
859 : // rbx: pointer to C function (C callee-saved)
860 : // rbp: frame pointer of calling JS frame (restored after C call)
861 : // rsp: stack pointer (restored after C call)
862 : // rsi: current context (restored)
863 : //
864 : // If argv_in_register():
865 : // r15: pointer to the first argument
866 :
867 15657 : ProfileEntryHookStub::MaybeCallEntryHook(masm);
868 :
869 : #ifdef _WIN64
870 : // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
871 : // stack to be aligned to 16 bytes. It only allows a single-word to be
872 : // returned in register rax. Larger return sizes must be written to an address
873 : // passed as a hidden first argument.
874 : const Register kCCallArg0 = rcx;
875 : const Register kCCallArg1 = rdx;
876 : const Register kCCallArg2 = r8;
877 : const Register kCCallArg3 = r9;
878 : const int kArgExtraStackSpace = 2;
879 : const int kMaxRegisterResultSize = 1;
880 : #else
881 : // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
882 : // are returned in rax, and a struct of two pointers are returned in rax+rdx.
883 : // Larger return sizes must be written to an address passed as a hidden first
884 : // argument.
885 : const Register kCCallArg0 = rdi;
886 : const Register kCCallArg1 = rsi;
887 : const Register kCCallArg2 = rdx;
888 : const Register kCCallArg3 = rcx;
889 : const int kArgExtraStackSpace = 0;
890 : const int kMaxRegisterResultSize = 2;
891 : #endif // _WIN64
892 :
893 : // Enter the exit frame that transitions from JavaScript to C++.
894 : int arg_stack_space =
895 15657 : kArgExtraStackSpace +
896 : (result_size() <= kMaxRegisterResultSize ? 0 : result_size());
897 15657 : if (argv_in_register()) {
898 : DCHECK(!save_doubles());
899 : DCHECK(!is_builtin_exit());
900 86 : __ EnterApiExitFrame(arg_stack_space);
901 : // Move argc into r14 (argv is already in r15).
902 86 : __ movp(r14, rax);
903 : } else {
904 : __ EnterExitFrame(
905 : arg_stack_space, save_doubles(),
906 31142 : is_builtin_exit() ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
907 : }
908 :
909 : // rbx: pointer to builtin function (C callee-saved).
910 : // rbp: frame pointer of exit frame (restored after C call).
911 : // rsp: stack pointer (restored after C call).
912 : // r14: number of arguments including receiver (C callee-saved).
913 : // r15: argv pointer (C callee-saved).
914 :
915 : // Check stack alignment.
916 15657 : if (FLAG_debug_code) {
917 31 : __ CheckStackAlignment();
918 : }
919 :
920 : // Call C function. The arguments object will be created by stubs declared by
921 : // DECLARE_RUNTIME_FUNCTION().
922 15657 : if (result_size() <= kMaxRegisterResultSize) {
923 : // Pass a pointer to the Arguments object as the first argument.
924 : // Return result in single register (rax), or a register pair (rax, rdx).
925 15608 : __ movp(kCCallArg0, r14); // argc.
926 : __ movp(kCCallArg1, r15); // argv.
927 125287 : __ Move(kCCallArg2, ExternalReference::isolate_address(isolate()));
928 : } else {
929 : DCHECK_LE(result_size(), 3);
930 : // Pass a pointer to the result location as the first argument.
931 49 : __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
932 : // Pass a pointer to the Arguments object as the second argument.
933 : __ movp(kCCallArg1, r14); // argc.
934 : __ movp(kCCallArg2, r15); // argv.
935 49 : __ Move(kCCallArg3, ExternalReference::isolate_address(isolate()));
936 : }
937 15657 : __ call(rbx);
938 :
939 15657 : if (result_size() > kMaxRegisterResultSize) {
940 : // Read result values stored on stack. Result is stored
941 : // above the the two Arguments object slots on Win64.
942 : DCHECK_LE(result_size(), 3);
943 : __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
944 : __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
945 49 : if (result_size() > 2) {
946 : __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2));
947 : }
948 : }
949 : // Result is in rax, rdx:rax or r8:rdx:rax - do not destroy these registers!
950 :
951 : // Check result for exception sentinel.
952 : Label exception_returned;
953 15657 : __ CompareRoot(rax, Heap::kExceptionRootIndex);
954 15657 : __ j(equal, &exception_returned);
955 :
956 : // Check that there is no pending exception, otherwise we
957 : // should have returned the exception sentinel.
958 15657 : if (FLAG_debug_code) {
959 : Label okay;
960 31 : __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
961 : ExternalReference pending_exception_address(
962 31 : Isolate::kPendingExceptionAddress, isolate());
963 : Operand pending_exception_operand =
964 31 : masm->ExternalOperand(pending_exception_address);
965 : __ cmpp(r14, pending_exception_operand);
966 31 : __ j(equal, &okay, Label::kNear);
967 31 : __ int3();
968 31 : __ bind(&okay);
969 : }
970 :
971 : // Exit the JavaScript to C++ exit frame.
972 31314 : __ LeaveExitFrame(save_doubles(), !argv_in_register());
973 15657 : __ ret(0);
974 :
975 : // Handling of exception.
976 15657 : __ bind(&exception_returned);
977 :
978 : ExternalReference pending_handler_context_address(
979 15657 : Isolate::kPendingHandlerContextAddress, isolate());
980 : ExternalReference pending_handler_code_address(
981 15657 : Isolate::kPendingHandlerCodeAddress, isolate());
982 : ExternalReference pending_handler_offset_address(
983 15657 : Isolate::kPendingHandlerOffsetAddress, isolate());
984 : ExternalReference pending_handler_fp_address(
985 15657 : Isolate::kPendingHandlerFPAddress, isolate());
986 : ExternalReference pending_handler_sp_address(
987 15657 : Isolate::kPendingHandlerSPAddress, isolate());
988 :
989 : // Ask the runtime for help to determine the handler. This will set rax to
990 : // contain the current pending exception, don't clobber it.
991 : ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
992 15657 : isolate());
993 : {
994 15657 : FrameScope scope(masm, StackFrame::MANUAL);
995 : __ movp(arg_reg_1, Immediate(0)); // argc.
996 : __ movp(arg_reg_2, Immediate(0)); // argv.
997 15657 : __ Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
998 15657 : __ PrepareCallCFunction(3);
999 15657 : __ CallCFunction(find_handler, 3);
1000 : }
1001 :
1002 : // Retrieve the handler context, SP and FP.
1003 15657 : __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
1004 15657 : __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
1005 15657 : __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
1006 :
1007 : // If the handler is a JS frame, restore the context to the frame. Note that
1008 : // the context will be set to (rsi == 0) for non-JS frames.
1009 : Label skip;
1010 : __ testp(rsi, rsi);
1011 15657 : __ j(zero, &skip, Label::kNear);
1012 31314 : __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
1013 15657 : __ bind(&skip);
1014 :
1015 : // Compute the handler entry address and jump to it.
1016 15657 : __ movp(rdi, masm->ExternalOperand(pending_handler_code_address));
1017 15657 : __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address));
1018 : __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
1019 15657 : __ jmp(rdi);
1020 15657 : }
1021 :
1022 :
1023 258 : void JSEntryStub::Generate(MacroAssembler* masm) {
1024 : Label invoke, handler_entry, exit;
1025 : Label not_outermost_js, not_outermost_js_2;
1026 :
1027 86 : ProfileEntryHookStub::MaybeCallEntryHook(masm);
1028 :
1029 : { // NOLINT. Scope block confuses linter.
1030 : MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
1031 : // Set up frame.
1032 86 : __ pushq(rbp);
1033 : __ movp(rbp, rsp);
1034 :
1035 : // Push the stack frame type.
1036 86 : __ Push(Immediate(StackFrame::TypeToMarker(type()))); // context slot
1037 430 : ExternalReference context_address(Isolate::kContextAddress, isolate());
1038 86 : __ Load(kScratchRegister, context_address);
1039 86 : __ Push(kScratchRegister); // context
1040 : // Save callee-saved registers (X64/X32/Win64 calling conventions).
1041 86 : __ pushq(r12);
1042 86 : __ pushq(r13);
1043 86 : __ pushq(r14);
1044 86 : __ pushq(r15);
1045 : #ifdef _WIN64
1046 : __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
1047 : __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
1048 : #endif
1049 86 : __ pushq(rbx);
1050 :
1051 : #ifdef _WIN64
1052 : // On Win64 XMM6-XMM15 are callee-save
1053 : __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1054 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
1055 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
1056 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
1057 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
1058 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
1059 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
1060 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
1061 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
1062 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
1063 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
1064 : #endif
1065 :
1066 : // Set up the roots and smi constant registers.
1067 : // Needs to be done before any further smi loads.
1068 86 : __ InitializeRootRegister();
1069 : }
1070 :
1071 : // Save copies of the top frame descriptor on the stack.
1072 86 : ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
1073 : {
1074 86 : Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
1075 86 : __ Push(c_entry_fp_operand);
1076 : }
1077 :
1078 : // If this is the outermost JS call, set js_entry_sp value.
1079 86 : ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
1080 86 : __ Load(rax, js_entry_sp);
1081 : __ testp(rax, rax);
1082 86 : __ j(not_zero, ¬_outermost_js);
1083 86 : __ Push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
1084 : __ movp(rax, rbp);
1085 86 : __ Store(js_entry_sp, rax);
1086 : Label cont;
1087 86 : __ jmp(&cont);
1088 86 : __ bind(¬_outermost_js);
1089 86 : __ Push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
1090 86 : __ bind(&cont);
1091 :
1092 : // Jump to a faked try block that does the invoke, with a faked catch
1093 : // block that sets the pending exception.
1094 86 : __ jmp(&invoke);
1095 86 : __ bind(&handler_entry);
1096 86 : handler_offset_ = handler_entry.pos();
1097 : // Caught exception: Store result (exception) in the pending exception
1098 : // field in the JSEnv and return a failure sentinel.
1099 : ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1100 86 : isolate());
1101 86 : __ Store(pending_exception, rax);
1102 86 : __ LoadRoot(rax, Heap::kExceptionRootIndex);
1103 86 : __ jmp(&exit);
1104 :
1105 : // Invoke: Link this frame into the handler chain.
1106 86 : __ bind(&invoke);
1107 86 : __ PushStackHandler();
1108 :
1109 : // Fake a receiver (NULL).
1110 86 : __ Push(Immediate(0)); // receiver
1111 :
1112 : // Invoke the function by calling through JS entry trampoline builtin and
1113 : // pop the faked function when we return. We load the address from an
1114 : // external reference instead of inlining the call target address directly
1115 : // in the code, because the builtin stubs may not have been generated yet
1116 : // at the time this code is generated.
1117 86 : if (type() == StackFrame::ENTRY_CONSTRUCT) {
1118 : ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1119 43 : isolate());
1120 43 : __ Load(rax, construct_entry);
1121 : } else {
1122 43 : ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
1123 43 : __ Load(rax, entry);
1124 : }
1125 : __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
1126 86 : __ call(kScratchRegister);
1127 :
1128 : // Unlink this frame from the handler chain.
1129 86 : __ PopStackHandler();
1130 :
1131 86 : __ bind(&exit);
1132 : // Check if the current stack frame is marked as the outermost JS frame.
1133 86 : __ Pop(rbx);
1134 86 : __ cmpp(rbx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
1135 86 : __ j(not_equal, ¬_outermost_js_2);
1136 : __ Move(kScratchRegister, js_entry_sp);
1137 172 : __ movp(Operand(kScratchRegister, 0), Immediate(0));
1138 86 : __ bind(¬_outermost_js_2);
1139 :
1140 : // Restore the top frame descriptor from the stack.
1141 86 : { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
1142 86 : __ Pop(c_entry_fp_operand);
1143 : }
1144 :
1145 : // Restore callee-saved registers (X64 conventions).
1146 : #ifdef _WIN64
1147 : // On Win64 XMM6-XMM15 are callee-save
1148 : __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
1149 : __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
1150 : __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
1151 : __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
1152 : __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
1153 : __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
1154 : __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
1155 : __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
1156 : __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
1157 : __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
1158 : __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1159 : #endif
1160 :
1161 86 : __ popq(rbx);
1162 : #ifdef _WIN64
1163 : // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
1164 : __ popq(rsi);
1165 : __ popq(rdi);
1166 : #endif
1167 86 : __ popq(r15);
1168 86 : __ popq(r14);
1169 86 : __ popq(r13);
1170 86 : __ popq(r12);
1171 86 : __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
1172 :
1173 : // Restore frame pointer and return.
1174 86 : __ popq(rbp);
1175 86 : __ ret(0);
1176 86 : }
1177 :
1178 :
1179 : // -------------------------------------------------------------------------
1180 : // StringCharCodeAtGenerator
1181 :
1182 64 : void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
1183 : // If the receiver is a smi trigger the non-string case.
1184 64 : if (check_mode_ == RECEIVER_IS_UNKNOWN) {
1185 64 : __ JumpIfSmi(object_, receiver_not_string_);
1186 :
1187 : // Fetch the instance type of the receiver into result register.
1188 64 : __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
1189 : __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1190 : // If the receiver is not a string trigger the non-string case.
1191 64 : __ testb(result_, Immediate(kIsNotStringMask));
1192 64 : __ j(not_zero, receiver_not_string_);
1193 : }
1194 :
1195 : // If the index is non-smi trigger the non-smi case.
1196 64 : __ JumpIfNotSmi(index_, &index_not_smi_);
1197 64 : __ bind(&got_smi_index_);
1198 :
1199 : // Check for index out of range.
1200 64 : __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
1201 64 : __ j(above_equal, index_out_of_range_);
1202 :
1203 64 : __ SmiToInteger32(index_, index_);
1204 :
1205 : StringCharLoadGenerator::Generate(
1206 64 : masm, object_, index_, result_, &call_runtime_);
1207 :
1208 64 : __ Integer32ToSmi(result_, result_);
1209 64 : __ bind(&exit_);
1210 64 : }
1211 :
1212 :
1213 64 : void StringCharCodeAtGenerator::GenerateSlow(
1214 64 : MacroAssembler* masm, EmbedMode embed_mode,
1215 : const RuntimeCallHelper& call_helper) {
1216 64 : __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
1217 :
1218 : Factory* factory = masm->isolate()->factory();
1219 : // Index is not a smi.
1220 64 : __ bind(&index_not_smi_);
1221 : // If index is a heap number, try converting it to an integer.
1222 : __ CheckMap(index_,
1223 : factory->heap_number_map(),
1224 : index_not_number_,
1225 128 : DONT_DO_SMI_CHECK);
1226 64 : call_helper.BeforeCall(masm);
1227 64 : if (embed_mode == PART_OF_IC_HANDLER) {
1228 0 : __ Push(LoadWithVectorDescriptor::VectorRegister());
1229 0 : __ Push(LoadDescriptor::SlotRegister());
1230 : }
1231 64 : __ Push(object_);
1232 64 : __ Push(index_); // Consumed by runtime conversion function.
1233 64 : __ CallRuntime(Runtime::kNumberToSmi);
1234 64 : if (!index_.is(rax)) {
1235 : // Save the conversion result before the pop instructions below
1236 : // have a chance to overwrite it.
1237 : __ movp(index_, rax);
1238 : }
1239 64 : __ Pop(object_);
1240 64 : if (embed_mode == PART_OF_IC_HANDLER) {
1241 0 : __ Pop(LoadDescriptor::SlotRegister());
1242 0 : __ Pop(LoadWithVectorDescriptor::VectorRegister());
1243 : }
1244 : // Reload the instance type.
1245 : __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
1246 : __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1247 64 : call_helper.AfterCall(masm);
1248 : // If index is still not a smi, it must be out of range.
1249 64 : __ JumpIfNotSmi(index_, index_out_of_range_);
1250 : // Otherwise, return to the fast path.
1251 64 : __ jmp(&got_smi_index_);
1252 :
1253 : // Call runtime. We get here when the receiver is a string and the
1254 : // index is a number, but the code of getting the actual character
1255 : // is too complex (e.g., when the string needs to be flattened).
1256 64 : __ bind(&call_runtime_);
1257 64 : call_helper.BeforeCall(masm);
1258 64 : __ Push(object_);
1259 64 : __ Integer32ToSmi(index_, index_);
1260 64 : __ Push(index_);
1261 64 : __ CallRuntime(Runtime::kStringCharCodeAtRT);
1262 64 : if (!result_.is(rax)) {
1263 : __ movp(result_, rax);
1264 : }
1265 64 : call_helper.AfterCall(masm);
1266 64 : __ jmp(&exit_);
1267 :
1268 64 : __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
1269 64 : }
1270 :
1271 15271 : void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
1272 : Register left,
1273 : Register right,
1274 : Register scratch1,
1275 : Register scratch2) {
1276 15271 : Register length = scratch1;
1277 :
1278 : // Compare lengths.
1279 : Label check_zero_length;
1280 15271 : __ movp(length, FieldOperand(left, String::kLengthOffset));
1281 15271 : __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
1282 15271 : __ j(equal, &check_zero_length, Label::kNear);
1283 : __ Move(rax, Smi::FromInt(NOT_EQUAL));
1284 15271 : __ ret(0);
1285 :
1286 : // Check if the length is zero.
1287 : Label compare_chars;
1288 15271 : __ bind(&check_zero_length);
1289 : STATIC_ASSERT(kSmiTag == 0);
1290 15271 : __ SmiTest(length);
1291 15271 : __ j(not_zero, &compare_chars, Label::kNear);
1292 : __ Move(rax, Smi::FromInt(EQUAL));
1293 15271 : __ ret(0);
1294 :
1295 : // Compare characters.
1296 15271 : __ bind(&compare_chars);
1297 : Label strings_not_equal;
1298 : GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
1299 15271 : &strings_not_equal, Label::kNear);
1300 :
1301 : // Characters are equal.
1302 : __ Move(rax, Smi::FromInt(EQUAL));
1303 15271 : __ ret(0);
1304 :
1305 : // Characters are not equal.
1306 15271 : __ bind(&strings_not_equal);
1307 : __ Move(rax, Smi::FromInt(NOT_EQUAL));
1308 15271 : __ ret(0);
1309 15271 : }
1310 :
1311 :
1312 1472 : void StringHelper::GenerateCompareFlatOneByteStrings(
1313 : MacroAssembler* masm, Register left, Register right, Register scratch1,
1314 : Register scratch2, Register scratch3, Register scratch4) {
1315 : // Ensure that you can always subtract a string length from a non-negative
1316 : // number (e.g. another length).
1317 : STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
1318 :
1319 : // Find minimum length and length difference.
1320 1472 : __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
1321 : __ movp(scratch4, scratch1);
1322 : __ SmiSub(scratch4,
1323 : scratch4,
1324 1472 : FieldOperand(right, String::kLengthOffset));
1325 : // Register scratch4 now holds left.length - right.length.
1326 1472 : const Register length_difference = scratch4;
1327 : Label left_shorter;
1328 1472 : __ j(less, &left_shorter, Label::kNear);
1329 : // The right string isn't longer that the left one.
1330 : // Get the right string's length by subtracting the (non-negative) difference
1331 : // from the left string's length.
1332 1472 : __ SmiSub(scratch1, scratch1, length_difference);
1333 1472 : __ bind(&left_shorter);
1334 : // Register scratch1 now holds Min(left.length, right.length).
1335 1472 : const Register min_length = scratch1;
1336 :
1337 : Label compare_lengths;
1338 : // If min-length is zero, go directly to comparing lengths.
1339 1472 : __ SmiTest(min_length);
1340 1472 : __ j(zero, &compare_lengths, Label::kNear);
1341 :
1342 : // Compare loop.
1343 : Label result_not_equal;
1344 : GenerateOneByteCharsCompareLoop(
1345 : masm, left, right, min_length, scratch2, &result_not_equal,
1346 : // In debug-code mode, SmiTest below might push
1347 : // the target label outside the near range.
1348 1472 : Label::kFar);
1349 :
1350 : // Completed loop without finding different characters.
1351 : // Compare lengths (precomputed).
1352 1472 : __ bind(&compare_lengths);
1353 1472 : __ SmiTest(length_difference);
1354 : Label length_not_equal;
1355 1472 : __ j(not_zero, &length_not_equal, Label::kNear);
1356 :
1357 : // Result is EQUAL.
1358 : __ Move(rax, Smi::FromInt(EQUAL));
1359 1472 : __ ret(0);
1360 :
1361 : Label result_greater;
1362 : Label result_less;
1363 1472 : __ bind(&length_not_equal);
1364 1472 : __ j(greater, &result_greater, Label::kNear);
1365 1472 : __ jmp(&result_less, Label::kNear);
1366 1472 : __ bind(&result_not_equal);
1367 : // Unequal comparison of left to right, either character or length.
1368 1472 : __ j(above, &result_greater, Label::kNear);
1369 1472 : __ bind(&result_less);
1370 :
1371 : // Result is LESS.
1372 : __ Move(rax, Smi::FromInt(LESS));
1373 1472 : __ ret(0);
1374 :
1375 : // Result is GREATER.
1376 1472 : __ bind(&result_greater);
1377 : __ Move(rax, Smi::FromInt(GREATER));
1378 1472 : __ ret(0);
1379 1472 : }
1380 :
1381 :
1382 16743 : void StringHelper::GenerateOneByteCharsCompareLoop(
1383 : MacroAssembler* masm, Register left, Register right, Register length,
1384 : Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
1385 : // Change index to run from -length to -1 by adding length to string
1386 : // start. This means that loop ends when index reaches zero, which
1387 : // doesn't need an additional compare.
1388 16743 : __ SmiToInteger32(length, length);
1389 : __ leap(left,
1390 16743 : FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
1391 : __ leap(right,
1392 : FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
1393 : __ negq(length);
1394 16743 : Register index = length; // index = -length;
1395 :
1396 : // Compare loop.
1397 : Label loop;
1398 16743 : __ bind(&loop);
1399 16743 : __ movb(scratch, Operand(left, index, times_1, 0));
1400 33486 : __ cmpb(scratch, Operand(right, index, times_1, 0));
1401 16743 : __ j(not_equal, chars_not_equal, near_jump);
1402 : __ incq(index);
1403 16743 : __ j(not_zero, &loop);
1404 16743 : }
1405 :
1406 :
1407 3867 : void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
1408 : // ----------- S t a t e -------------
1409 : // -- rdx : left
1410 : // -- rax : right
1411 : // -- rsp[0] : return address
1412 : // -----------------------------------
1413 :
1414 : // Load rcx with the allocation site. We stick an undefined dummy value here
1415 : // and replace it with the real allocation site later when we instantiate this
1416 : // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
1417 11601 : __ Move(rcx, isolate()->factory()->undefined_value());
1418 :
1419 : // Make sure that we actually patched the allocation site.
1420 3867 : if (FLAG_debug_code) {
1421 0 : __ testb(rcx, Immediate(kSmiTagMask));
1422 0 : __ Assert(not_equal, kExpectedAllocationSite);
1423 : __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
1424 0 : isolate()->factory()->allocation_site_map());
1425 0 : __ Assert(equal, kExpectedAllocationSite);
1426 : }
1427 :
1428 : // Tail call into the stub that handles binary operations with allocation
1429 : // sites.
1430 3867 : BinaryOpWithAllocationSiteStub stub(isolate(), state());
1431 3867 : __ TailCallStub(&stub);
1432 3867 : }
1433 :
1434 :
1435 4202 : void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
1436 : DCHECK_EQ(CompareICState::BOOLEAN, state());
1437 : Label miss;
1438 : Label::Distance const miss_distance =
1439 2101 : masm->emit_debug_code() ? Label::kFar : Label::kNear;
1440 :
1441 2101 : __ JumpIfSmi(rdx, &miss, miss_distance);
1442 2101 : __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1443 2101 : __ JumpIfSmi(rax, &miss, miss_distance);
1444 : __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1445 : __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
1446 : __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
1447 2101 : if (!Token::IsEqualityOp(op())) {
1448 : __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
1449 18 : __ AssertSmi(rax);
1450 : __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset));
1451 18 : __ AssertSmi(rdx);
1452 18 : __ pushq(rax);
1453 : __ movq(rax, rdx);
1454 18 : __ popq(rdx);
1455 : }
1456 2101 : __ subp(rax, rdx);
1457 2101 : __ Ret();
1458 :
1459 2101 : __ bind(&miss);
1460 2101 : GenerateMiss(masm);
1461 2101 : }
1462 :
1463 :
1464 15291 : void CompareICStub::GenerateSmis(MacroAssembler* masm) {
1465 : DCHECK(state() == CompareICState::SMI);
1466 : Label miss;
1467 15291 : __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
1468 :
1469 15291 : if (GetCondition() == equal) {
1470 : // For equality we do not care about the sign of the result.
1471 6538 : __ subp(rax, rdx);
1472 : } else {
1473 : Label done;
1474 8753 : __ subp(rdx, rax);
1475 8753 : __ j(no_overflow, &done, Label::kNear);
1476 : // Correct sign of result in case of overflow.
1477 : __ notp(rdx);
1478 8753 : __ bind(&done);
1479 : __ movp(rax, rdx);
1480 : }
1481 15291 : __ ret(0);
1482 :
1483 15291 : __ bind(&miss);
1484 15291 : GenerateMiss(masm);
1485 15291 : }
1486 :
1487 :
1488 20544 : void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
1489 : DCHECK(state() == CompareICState::NUMBER);
1490 :
1491 : Label generic_stub;
1492 : Label unordered, maybe_undefined1, maybe_undefined2;
1493 : Label miss;
1494 :
1495 3424 : if (left() == CompareICState::SMI) {
1496 420 : __ JumpIfNotSmi(rdx, &miss);
1497 : }
1498 3424 : if (right() == CompareICState::SMI) {
1499 1076 : __ JumpIfNotSmi(rax, &miss);
1500 : }
1501 :
1502 : // Load left and right operand.
1503 : Label done, left, left_smi, right_smi;
1504 3424 : __ JumpIfSmi(rax, &right_smi, Label::kNear);
1505 15570 : __ CompareMap(rax, isolate()->factory()->heap_number_map());
1506 3424 : __ j(not_equal, &maybe_undefined1, Label::kNear);
1507 3424 : __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1508 3424 : __ jmp(&left, Label::kNear);
1509 3424 : __ bind(&right_smi);
1510 3424 : __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
1511 3424 : __ Cvtlsi2sd(xmm1, rcx);
1512 :
1513 3424 : __ bind(&left);
1514 3424 : __ JumpIfSmi(rdx, &left_smi, Label::kNear);
1515 3424 : __ CompareMap(rdx, isolate()->factory()->heap_number_map());
1516 3424 : __ j(not_equal, &maybe_undefined2, Label::kNear);
1517 3424 : __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1518 3424 : __ jmp(&done);
1519 3424 : __ bind(&left_smi);
1520 3424 : __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
1521 3424 : __ Cvtlsi2sd(xmm0, rcx);
1522 :
1523 3424 : __ bind(&done);
1524 : // Compare operands
1525 3424 : __ Ucomisd(xmm0, xmm1);
1526 :
1527 : // Don't base result on EFLAGS when a NaN is involved.
1528 3424 : __ j(parity_even, &unordered, Label::kNear);
1529 :
1530 : // Return a result of -1, 0, or 1, based on EFLAGS.
1531 : // Performing mov, because xor would destroy the flag register.
1532 : __ movl(rax, Immediate(0));
1533 : __ movl(rcx, Immediate(0));
1534 3424 : __ setcc(above, rax); // Add one to zero if carry clear and not equal.
1535 3424 : __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
1536 3424 : __ ret(0);
1537 :
1538 3424 : __ bind(&unordered);
1539 3424 : __ bind(&generic_stub);
1540 : CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
1541 : CompareICState::GENERIC, CompareICState::GENERIC);
1542 3424 : __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
1543 :
1544 3424 : __ bind(&maybe_undefined1);
1545 3424 : if (Token::IsOrderedRelationalCompareOp(op())) {
1546 937 : __ Cmp(rax, isolate()->factory()->undefined_value());
1547 937 : __ j(not_equal, &miss);
1548 937 : __ JumpIfSmi(rdx, &unordered);
1549 937 : __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
1550 937 : __ j(not_equal, &maybe_undefined2, Label::kNear);
1551 937 : __ jmp(&unordered);
1552 : }
1553 :
1554 3424 : __ bind(&maybe_undefined2);
1555 3424 : if (Token::IsOrderedRelationalCompareOp(op())) {
1556 937 : __ Cmp(rdx, isolate()->factory()->undefined_value());
1557 937 : __ j(equal, &unordered);
1558 : }
1559 :
1560 3424 : __ bind(&miss);
1561 3424 : GenerateMiss(masm);
1562 3424 : }
1563 :
1564 :
1565 3376 : void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
1566 : DCHECK(state() == CompareICState::INTERNALIZED_STRING);
1567 : DCHECK(GetCondition() == equal);
1568 :
1569 : // Registers containing left and right operands respectively.
1570 : Register left = rdx;
1571 : Register right = rax;
1572 : Register tmp1 = rcx;
1573 : Register tmp2 = rbx;
1574 :
1575 : // Check that both operands are heap objects.
1576 : Label miss;
1577 3376 : Condition cond = masm->CheckEitherSmi(left, right, tmp1);
1578 3376 : __ j(cond, &miss, Label::kNear);
1579 :
1580 : // Check that both operands are internalized strings.
1581 : __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
1582 : __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
1583 : __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
1584 : __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
1585 : STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1586 3376 : __ orp(tmp1, tmp2);
1587 3376 : __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1588 3376 : __ j(not_zero, &miss, Label::kNear);
1589 :
1590 : // Internalized strings are compared by identity.
1591 : Label done;
1592 3376 : __ cmpp(left, right);
1593 : // Make sure rax is non-zero. At this point input operands are
1594 : // guaranteed to be non-zero.
1595 : DCHECK(right.is(rax));
1596 3376 : __ j(not_equal, &done, Label::kNear);
1597 : STATIC_ASSERT(EQUAL == 0);
1598 : STATIC_ASSERT(kSmiTag == 0);
1599 : __ Move(rax, Smi::FromInt(EQUAL));
1600 3376 : __ bind(&done);
1601 3376 : __ ret(0);
1602 :
1603 3376 : __ bind(&miss);
1604 3376 : GenerateMiss(masm);
1605 3376 : }
1606 :
1607 :
1608 42 : void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
1609 : DCHECK(state() == CompareICState::UNIQUE_NAME);
1610 : DCHECK(GetCondition() == equal);
1611 :
1612 : // Registers containing left and right operands respectively.
1613 : Register left = rdx;
1614 : Register right = rax;
1615 : Register tmp1 = rcx;
1616 : Register tmp2 = rbx;
1617 :
1618 : // Check that both operands are heap objects.
1619 : Label miss;
1620 42 : Condition cond = masm->CheckEitherSmi(left, right, tmp1);
1621 42 : __ j(cond, &miss, Label::kNear);
1622 :
1623 : // Check that both operands are unique names. This leaves the instance
1624 : // types loaded in tmp1 and tmp2.
1625 : __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
1626 : __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
1627 : __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
1628 : __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
1629 :
1630 42 : __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
1631 42 : __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
1632 :
1633 : // Unique names are compared by identity.
1634 : Label done;
1635 42 : __ cmpp(left, right);
1636 : // Make sure rax is non-zero. At this point input operands are
1637 : // guaranteed to be non-zero.
1638 : DCHECK(right.is(rax));
1639 42 : __ j(not_equal, &done, Label::kNear);
1640 : STATIC_ASSERT(EQUAL == 0);
1641 : STATIC_ASSERT(kSmiTag == 0);
1642 : __ Move(rax, Smi::FromInt(EQUAL));
1643 42 : __ bind(&done);
1644 42 : __ ret(0);
1645 :
1646 42 : __ bind(&miss);
1647 42 : GenerateMiss(masm);
1648 42 : }
1649 :
1650 :
1651 7090 : void CompareICStub::GenerateStrings(MacroAssembler* masm) {
1652 : DCHECK(state() == CompareICState::STRING);
1653 : Label miss;
1654 :
1655 : bool equality = Token::IsEqualityOp(op());
1656 :
1657 : // Registers containing left and right operands respectively.
1658 : Register left = rdx;
1659 : Register right = rax;
1660 : Register tmp1 = rcx;
1661 : Register tmp2 = rbx;
1662 : Register tmp3 = rdi;
1663 :
1664 : // Check that both operands are heap objects.
1665 3545 : Condition cond = masm->CheckEitherSmi(left, right, tmp1);
1666 3545 : __ j(cond, &miss);
1667 :
1668 : // Check that both operands are strings. This leaves the instance
1669 : // types loaded in tmp1 and tmp2.
1670 : __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
1671 : __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
1672 : __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
1673 : __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
1674 : __ movp(tmp3, tmp1);
1675 : STATIC_ASSERT(kNotStringTag != 0);
1676 3545 : __ orp(tmp3, tmp2);
1677 3545 : __ testb(tmp3, Immediate(kIsNotStringMask));
1678 3545 : __ j(not_zero, &miss);
1679 :
1680 : // Fast check for identical strings.
1681 : Label not_same;
1682 3545 : __ cmpp(left, right);
1683 3545 : __ j(not_equal, ¬_same, Label::kNear);
1684 : STATIC_ASSERT(EQUAL == 0);
1685 : STATIC_ASSERT(kSmiTag == 0);
1686 : __ Move(rax, Smi::FromInt(EQUAL));
1687 3545 : __ ret(0);
1688 :
1689 : // Handle not identical strings.
1690 3545 : __ bind(¬_same);
1691 :
1692 : // Check that both strings are internalized strings. If they are, we're done
1693 : // because we already know they are not identical. We also know they are both
1694 : // strings.
1695 3545 : if (equality) {
1696 : Label do_compare;
1697 : STATIC_ASSERT(kInternalizedTag == 0);
1698 3185 : __ orp(tmp1, tmp2);
1699 3185 : __ testb(tmp1, Immediate(kIsNotInternalizedMask));
1700 3185 : __ j(not_zero, &do_compare, Label::kNear);
1701 : // Make sure rax is non-zero. At this point input operands are
1702 : // guaranteed to be non-zero.
1703 : DCHECK(right.is(rax));
1704 3185 : __ ret(0);
1705 3185 : __ bind(&do_compare);
1706 : }
1707 :
1708 : // Check that both strings are sequential one-byte.
1709 : Label runtime;
1710 3545 : __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
1711 :
1712 : // Compare flat one-byte strings. Returns when done.
1713 3545 : if (equality) {
1714 : StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
1715 3185 : tmp2);
1716 : } else {
1717 : StringHelper::GenerateCompareFlatOneByteStrings(
1718 360 : masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
1719 : }
1720 :
1721 : // Handle more complex cases in runtime.
1722 3545 : __ bind(&runtime);
1723 3545 : if (equality) {
1724 : {
1725 3185 : FrameScope scope(masm, StackFrame::INTERNAL);
1726 3185 : __ Push(left);
1727 3185 : __ Push(right);
1728 3185 : __ CallRuntime(Runtime::kStringEqual);
1729 : }
1730 3185 : __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
1731 3185 : __ subp(rax, rdx);
1732 3185 : __ Ret();
1733 : } else {
1734 : __ PopReturnAddressTo(tmp1);
1735 360 : __ Push(left);
1736 360 : __ Push(right);
1737 : __ PushReturnAddressFrom(tmp1);
1738 360 : __ TailCallRuntime(Runtime::kStringCompare);
1739 : }
1740 :
1741 3545 : __ bind(&miss);
1742 3545 : GenerateMiss(masm);
1743 3545 : }
1744 :
1745 :
1746 597 : void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
1747 : DCHECK_EQ(CompareICState::RECEIVER, state());
1748 : Label miss;
1749 597 : Condition either_smi = masm->CheckEitherSmi(rdx, rax);
1750 597 : __ j(either_smi, &miss, Label::kNear);
1751 :
1752 : STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1753 597 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
1754 597 : __ j(below, &miss, Label::kNear);
1755 597 : __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
1756 597 : __ j(below, &miss, Label::kNear);
1757 :
1758 : DCHECK_EQ(equal, GetCondition());
1759 597 : __ subp(rax, rdx);
1760 597 : __ ret(0);
1761 :
1762 597 : __ bind(&miss);
1763 597 : GenerateMiss(masm);
1764 597 : }
1765 :
1766 :
1767 4318 : void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
1768 : Label miss;
1769 2159 : Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
1770 2159 : Condition either_smi = masm->CheckEitherSmi(rdx, rax);
1771 2159 : __ j(either_smi, &miss, Label::kNear);
1772 :
1773 2159 : __ GetWeakValue(rdi, cell);
1774 : __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi);
1775 2159 : __ j(not_equal, &miss, Label::kNear);
1776 : __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi);
1777 2159 : __ j(not_equal, &miss, Label::kNear);
1778 :
1779 2159 : if (Token::IsEqualityOp(op())) {
1780 2058 : __ subp(rax, rdx);
1781 2058 : __ ret(0);
1782 : } else {
1783 : __ PopReturnAddressTo(rcx);
1784 101 : __ Push(rdx);
1785 101 : __ Push(rax);
1786 202 : __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition())));
1787 : __ PushReturnAddressFrom(rcx);
1788 101 : __ TailCallRuntime(Runtime::kCompare);
1789 : }
1790 :
1791 2159 : __ bind(&miss);
1792 2159 : GenerateMiss(masm);
1793 2159 : }
1794 :
1795 :
1796 133708 : void CompareICStub::GenerateMiss(MacroAssembler* masm) {
1797 : {
1798 : // Call the runtime system in a fresh internal frame.
1799 66854 : FrameScope scope(masm, StackFrame::INTERNAL);
1800 66854 : __ Push(rdx);
1801 66854 : __ Push(rax);
1802 66854 : __ Push(rdx);
1803 66854 : __ Push(rax);
1804 66854 : __ Push(Smi::FromInt(op()));
1805 66854 : __ CallRuntime(Runtime::kCompareIC_Miss);
1806 :
1807 : // Compute the entry point of the rewritten stub.
1808 66854 : __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
1809 66854 : __ Pop(rax);
1810 66854 : __ Pop(rdx);
1811 : }
1812 :
1813 : // Do a tail call to the rewritten stub.
1814 66854 : __ jmp(rdi);
1815 66854 : }
1816 :
1817 :
1818 7650 : void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
1819 : Label* miss,
1820 : Label* done,
1821 : Register properties,
1822 : Handle<Name> name,
1823 : Register r0) {
1824 : DCHECK(name->IsUniqueName());
1825 : // If names of slots in range from 1 to kProbes - 1 for the hash value are
1826 : // not equal to the name and kProbes-th slot is not used (its name is the
1827 : // undefined value), it guarantees the hash table doesn't contain the
1828 : // property. It's true even if some slots represent deleted properties
1829 : // (their names are the hole value).
1830 6375 : for (int i = 0; i < kInlinedProbes; i++) {
1831 : // r0 points to properties hash.
1832 : // Compute the masked index: (hash + i + i * i) & mask.
1833 5100 : Register index = r0;
1834 : // Capacity is smi 2^n.
1835 5100 : __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
1836 5100 : __ decl(index);
1837 : __ andp(index,
1838 15300 : Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
1839 :
1840 : // Scale the index by multiplying by the entry size.
1841 : STATIC_ASSERT(NameDictionary::kEntrySize == 3);
1842 10200 : __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
1843 :
1844 5100 : Register entity_name = r0;
1845 : // Having undefined at this place means the name is not contained.
1846 : STATIC_ASSERT(kSmiTagSize == 1);
1847 : __ movp(entity_name, Operand(properties,
1848 : index,
1849 : times_pointer_size,
1850 10200 : kElementsStartOffset - kHeapObjectTag));
1851 5100 : __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
1852 5100 : __ j(equal, done);
1853 :
1854 : // Stop if found the property.
1855 5100 : __ Cmp(entity_name, Handle<Name>(name));
1856 5100 : __ j(equal, miss);
1857 :
1858 : Label good;
1859 : // Check for the hole and skip.
1860 5100 : __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
1861 5100 : __ j(equal, &good, Label::kNear);
1862 :
1863 : // Check if the entry name is not a unique name.
1864 : __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
1865 : __ JumpIfNotUniqueNameInstanceType(
1866 5100 : FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
1867 5100 : __ bind(&good);
1868 : }
1869 :
1870 : NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
1871 : NEGATIVE_LOOKUP);
1872 1275 : __ Push(Handle<Object>(name));
1873 2550 : __ Push(Immediate(name->Hash()));
1874 1275 : __ CallStub(&stub);
1875 1275 : __ testp(r0, r0);
1876 1275 : __ j(not_zero, miss);
1877 1275 : __ jmp(done);
1878 1275 : }
1879 :
1880 1700 : void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
1881 : // This stub overrides SometimesSetsUpAFrame() to return false. That means
1882 : // we cannot call anything that could cause a GC from this stub.
1883 : // Stack frame on entry:
1884 : // rsp[0 * kPointerSize] : return address.
1885 : // rsp[1 * kPointerSize] : key's hash.
1886 : // rsp[2 * kPointerSize] : key.
1887 : // Registers:
1888 : // dictionary_: NameDictionary to probe.
1889 : // result_: used as scratch.
1890 : // index_: will hold an index of entry if lookup is successful.
1891 : // might alias with result_.
1892 : // Returns:
1893 : // result_ is zero if lookup failed, non zero otherwise.
1894 :
1895 : Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
1896 :
1897 : Register scratch = result();
1898 :
1899 34 : __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
1900 34 : __ decl(scratch);
1901 34 : __ Push(scratch);
1902 :
1903 : // If names of slots in range from 1 to kProbes - 1 for the hash value are
1904 : // not equal to the name and kProbes-th slot is not used (its name is the
1905 : // undefined value), it guarantees the hash table doesn't contain the
1906 : // property. It's true even if some slots represent deleted properties
1907 : // (their names are the null value).
1908 : StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
1909 : kPointerSize);
1910 578 : for (int i = kInlinedProbes; i < kTotalProbes; i++) {
1911 : // Compute the masked index: (hash + i + i * i) & mask.
1912 544 : __ movp(scratch, args.GetArgumentOperand(1));
1913 544 : if (i > 0) {
1914 1632 : __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
1915 : }
1916 1088 : __ andp(scratch, Operand(rsp, 0));
1917 :
1918 : // Scale the index by multiplying by the entry size.
1919 : STATIC_ASSERT(NameDictionary::kEntrySize == 3);
1920 1088 : __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
1921 :
1922 : // Having undefined at this place means the name is not contained.
1923 : __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
1924 1088 : kElementsStartOffset - kHeapObjectTag));
1925 :
1926 1088 : __ Cmp(scratch, isolate()->factory()->undefined_value());
1927 544 : __ j(equal, ¬_in_dictionary);
1928 :
1929 : // Stop if found the property.
1930 544 : __ cmpp(scratch, args.GetArgumentOperand(0));
1931 544 : __ j(equal, &in_dictionary);
1932 :
1933 1054 : if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
1934 : // If we hit a key that is not a unique name during negative
1935 : // lookup we have to bailout as this key might be equal to the
1936 : // key we are looking for.
1937 :
1938 : // Check if the entry name is not a unique name.
1939 : __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
1940 : __ JumpIfNotUniqueNameInstanceType(
1941 : FieldOperand(scratch, Map::kInstanceTypeOffset),
1942 510 : &maybe_in_dictionary);
1943 : }
1944 : }
1945 :
1946 34 : __ bind(&maybe_in_dictionary);
1947 : // If we are doing negative lookup then probing failure should be
1948 : // treated as a lookup success. For positive lookup probing failure
1949 : // should be treated as lookup failure.
1950 34 : if (mode() == POSITIVE_LOOKUP) {
1951 : __ movp(scratch, Immediate(0));
1952 0 : __ Drop(1);
1953 0 : __ ret(2 * kPointerSize);
1954 : }
1955 :
1956 34 : __ bind(&in_dictionary);
1957 : __ movp(scratch, Immediate(1));
1958 34 : __ Drop(1);
1959 34 : __ ret(2 * kPointerSize);
1960 :
1961 34 : __ bind(¬_in_dictionary);
1962 : __ movp(scratch, Immediate(0));
1963 34 : __ Drop(1);
1964 34 : __ ret(2 * kPointerSize);
1965 34 : }
1966 :
1967 :
1968 60686 : void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
1969 : Isolate* isolate) {
1970 : StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
1971 60686 : stub1.GetCode();
1972 : StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
1973 60686 : stub2.GetCode();
1974 60686 : }
1975 :
1976 :
1977 : // Takes the input in 3 registers: address_ value_ and object_. A pointer to
1978 : // the value has just been written into the object, now this stub makes sure
1979 : // we keep the GC informed. The word in the object where the value has been
1980 : // written is in the address register.
1981 45606 : void RecordWriteStub::Generate(MacroAssembler* masm) {
1982 : Label skip_to_incremental_noncompacting;
1983 : Label skip_to_incremental_compacting;
1984 :
1985 : // The first two instructions are generated with labels so as to get the
1986 : // offset fixed up correctly by the bind(Label*) call. We patch it back and
1987 : // forth between a compare instructions (a nop in this position) and the
1988 : // real branch when we start and stop incremental heap marking.
1989 : // See RecordWriteStub::Patch for details.
1990 68409 : __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
1991 22803 : __ jmp(&skip_to_incremental_compacting, Label::kFar);
1992 :
1993 22803 : if (remembered_set_action() == EMIT_REMEMBERED_SET) {
1994 : __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
1995 19007 : MacroAssembler::kReturnAtEnd);
1996 : } else {
1997 3796 : __ ret(0);
1998 : }
1999 :
2000 22803 : __ bind(&skip_to_incremental_noncompacting);
2001 22803 : GenerateIncremental(masm, INCREMENTAL);
2002 :
2003 22803 : __ bind(&skip_to_incremental_compacting);
2004 22803 : GenerateIncremental(masm, INCREMENTAL_COMPACTION);
2005 :
2006 : // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
2007 : // Will be checked in IncrementalMarking::ActivateGeneratedStub.
2008 : masm->set_byte_at(0, kTwoByteNopInstruction);
2009 : masm->set_byte_at(2, kFiveByteNopInstruction);
2010 22803 : }
2011 :
2012 :
2013 129226 : void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
2014 45606 : regs_.Save(masm);
2015 :
2016 45606 : if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2017 : Label dont_need_remembered_set;
2018 :
2019 76028 : __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
2020 : __ JumpIfNotInNewSpace(regs_.scratch0(),
2021 : regs_.scratch0(),
2022 : &dont_need_remembered_set);
2023 :
2024 : __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
2025 : &dont_need_remembered_set);
2026 :
2027 : // First notify the incremental marker if necessary, then update the
2028 : // remembered set.
2029 : CheckNeedsToInformIncrementalMarker(
2030 38014 : masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
2031 38014 : InformIncrementalMarker(masm);
2032 38014 : regs_.Restore(masm);
2033 : __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2034 38014 : MacroAssembler::kReturnAtEnd);
2035 :
2036 38014 : __ bind(&dont_need_remembered_set);
2037 : }
2038 :
2039 : CheckNeedsToInformIncrementalMarker(
2040 45606 : masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
2041 45606 : InformIncrementalMarker(masm);
2042 45606 : regs_.Restore(masm);
2043 45606 : __ ret(0);
2044 45606 : }
2045 :
2046 :
2047 167240 : void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
2048 : regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
2049 : Register address =
2050 83620 : arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
2051 : DCHECK(!address.is(regs_.object()));
2052 : DCHECK(!address.is(arg_reg_1));
2053 83620 : __ Move(address, regs_.address());
2054 83620 : __ Move(arg_reg_1, regs_.object());
2055 : // TODO(gc) Can we just set address arg2 in the beginning?
2056 83620 : __ Move(arg_reg_2, address);
2057 : __ LoadAddress(arg_reg_3,
2058 167240 : ExternalReference::isolate_address(isolate()));
2059 : int argument_count = 3;
2060 :
2061 : AllowExternalCallThatCantCauseGC scope(masm);
2062 83620 : __ PrepareCallCFunction(argument_count);
2063 : __ CallCFunction(
2064 : ExternalReference::incremental_marking_record_write_function(isolate()),
2065 83620 : argument_count);
2066 : regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
2067 83620 : }
2068 :
2069 22803 : void RecordWriteStub::Activate(Code* code) {
2070 22803 : code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
2071 22803 : }
2072 :
2073 83620 : void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
2074 : MacroAssembler* masm,
2075 : OnNoNeedToInformIncrementalMarker on_no_need,
2076 76028 : Mode mode) {
2077 : Label on_black;
2078 : Label need_incremental;
2079 : Label need_incremental_pop_object;
2080 :
2081 : // Let's look at the color of the object: If it is not black we don't have
2082 : // to inform the incremental marker.
2083 : __ JumpIfBlack(regs_.object(),
2084 : regs_.scratch0(),
2085 : regs_.scratch1(),
2086 : &on_black,
2087 83620 : Label::kNear);
2088 :
2089 83620 : regs_.Restore(masm);
2090 83620 : if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
2091 : __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2092 38014 : MacroAssembler::kReturnAtEnd);
2093 : } else {
2094 45606 : __ ret(0);
2095 : }
2096 :
2097 83620 : __ bind(&on_black);
2098 :
2099 : // Get the value from the slot.
2100 167240 : __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
2101 :
2102 83620 : if (mode == INCREMENTAL_COMPACTION) {
2103 : Label ensure_not_white;
2104 :
2105 : __ CheckPageFlag(regs_.scratch0(), // Contains value.
2106 : regs_.scratch1(), // Scratch.
2107 : MemoryChunk::kEvacuationCandidateMask,
2108 : zero,
2109 : &ensure_not_white,
2110 41810 : Label::kNear);
2111 :
2112 : __ CheckPageFlag(regs_.object(),
2113 : regs_.scratch1(), // Scratch.
2114 : MemoryChunk::kSkipEvacuationSlotsRecordingMask,
2115 : zero,
2116 41810 : &need_incremental);
2117 :
2118 41810 : __ bind(&ensure_not_white);
2119 : }
2120 :
2121 : // We need an extra register for this, so we push the object register
2122 : // temporarily.
2123 83620 : __ Push(regs_.object());
2124 : __ JumpIfWhite(regs_.scratch0(), // The value.
2125 : regs_.scratch1(), // Scratch.
2126 : regs_.object(), // Scratch.
2127 83620 : &need_incremental_pop_object, Label::kNear);
2128 83620 : __ Pop(regs_.object());
2129 :
2130 83620 : regs_.Restore(masm);
2131 83620 : if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
2132 : __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2133 38014 : MacroAssembler::kReturnAtEnd);
2134 : } else {
2135 45606 : __ ret(0);
2136 : }
2137 :
2138 83620 : __ bind(&need_incremental_pop_object);
2139 83620 : __ Pop(regs_.object());
2140 :
2141 83620 : __ bind(&need_incremental);
2142 :
2143 : // Fall through when we need to inform the incremental marker.
2144 83620 : }
2145 :
2146 :
2147 172 : void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
2148 86 : CEntryStub ces(isolate(), 1, kSaveFPRegs);
2149 86 : __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
2150 : int parameter_count_offset =
2151 : StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
2152 172 : __ movp(rbx, MemOperand(rbp, parameter_count_offset));
2153 86 : masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
2154 : __ PopReturnAddressTo(rcx);
2155 : int additional_offset =
2156 86 : function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
2157 172 : __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
2158 86 : __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
2159 86 : }
2160 :
2161 :
2162 1781034 : void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
2163 1781034 : if (masm->isolate()->function_entry_hook() != NULL) {
2164 : ProfileEntryHookStub stub(masm->isolate());
2165 0 : masm->CallStub(&stub);
2166 : }
2167 1781034 : }
2168 :
2169 :
2170 0 : void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
2171 : // This stub can be called from essentially anywhere, so it needs to save
2172 : // all volatile and callee-save registers.
2173 : const size_t kNumSavedRegisters = 2;
2174 0 : __ pushq(arg_reg_1);
2175 0 : __ pushq(arg_reg_2);
2176 :
2177 : // Calculate the original stack pointer and store it in the second arg.
2178 : __ leap(arg_reg_2,
2179 0 : Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
2180 :
2181 : // Calculate the function address to the first arg.
2182 0 : __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
2183 0 : __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
2184 :
2185 : // Save the remainder of the volatile registers.
2186 0 : masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
2187 :
2188 : // Call the entry hook function.
2189 0 : __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
2190 : Assembler::RelocInfoNone());
2191 :
2192 : AllowExternalCallThatCantCauseGC scope(masm);
2193 :
2194 : const int kArgumentCount = 2;
2195 0 : __ PrepareCallCFunction(kArgumentCount);
2196 0 : __ CallCFunction(rax, kArgumentCount);
2197 :
2198 : // Restore volatile regs.
2199 0 : masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
2200 0 : __ popq(arg_reg_2);
2201 0 : __ popq(arg_reg_1);
2202 :
2203 0 : __ Ret();
2204 0 : }
2205 :
2206 :
2207 : template<class T>
2208 387 : static void CreateArrayDispatch(MacroAssembler* masm,
2209 : AllocationSiteOverrideMode mode) {
2210 86 : if (mode == DISABLE_ALLOCATION_SITES) {
2211 : T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
2212 43 : __ TailCallStub(&stub);
2213 43 : } else if (mode == DONT_OVERRIDE) {
2214 : int last_index = GetSequenceIndexFromFastElementsKind(
2215 43 : TERMINAL_FAST_ELEMENTS_KIND);
2216 301 : for (int i = 0; i <= last_index; ++i) {
2217 : Label next;
2218 258 : ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
2219 516 : __ cmpl(rdx, Immediate(kind));
2220 258 : __ j(not_equal, &next);
2221 : T stub(masm->isolate(), kind);
2222 258 : __ TailCallStub(&stub);
2223 258 : __ bind(&next);
2224 : }
2225 :
2226 : // If we reached this point there is a problem.
2227 43 : __ Abort(kUnexpectedElementsKindInArrayConstructor);
2228 : } else {
2229 0 : UNREACHABLE();
2230 : }
2231 86 : }
2232 :
2233 :
2234 430 : static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
2235 : AllocationSiteOverrideMode mode) {
2236 : // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
2237 : // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
2238 : // rax - number of arguments
2239 : // rdi - constructor?
2240 : // rsp[0] - return address
2241 : // rsp[8] - last argument
2242 :
2243 : Label normal_sequence;
2244 86 : if (mode == DONT_OVERRIDE) {
2245 : STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2246 : STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2247 : STATIC_ASSERT(FAST_ELEMENTS == 2);
2248 : STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
2249 : STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
2250 : STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
2251 :
2252 : // is the low bit set? If so, we are holey and that is good.
2253 43 : __ testb(rdx, Immediate(1));
2254 43 : __ j(not_zero, &normal_sequence);
2255 : }
2256 :
2257 : // look at the first argument
2258 : StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2259 86 : __ movp(rcx, args.GetArgumentOperand(0));
2260 : __ testp(rcx, rcx);
2261 86 : __ j(zero, &normal_sequence);
2262 :
2263 86 : if (mode == DISABLE_ALLOCATION_SITES) {
2264 : ElementsKind initial = GetInitialFastElementsKind();
2265 : ElementsKind holey_initial = GetHoleyElementsKind(initial);
2266 :
2267 : ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
2268 : holey_initial,
2269 : DISABLE_ALLOCATION_SITES);
2270 43 : __ TailCallStub(&stub_holey);
2271 :
2272 43 : __ bind(&normal_sequence);
2273 : ArraySingleArgumentConstructorStub stub(masm->isolate(),
2274 : initial,
2275 : DISABLE_ALLOCATION_SITES);
2276 43 : __ TailCallStub(&stub);
2277 43 : } else if (mode == DONT_OVERRIDE) {
2278 : // We are going to create a holey array, but our kind is non-holey.
2279 : // Fix kind and retry (only if we have an allocation site in the slot).
2280 : __ incl(rdx);
2281 :
2282 43 : if (FLAG_debug_code) {
2283 : Handle<Map> allocation_site_map =
2284 : masm->isolate()->factory()->allocation_site_map();
2285 0 : __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
2286 0 : __ Assert(equal, kExpectedAllocationSite);
2287 : }
2288 :
2289 : // Save the resulting elements kind in type info. We can't just store r3
2290 : // in the AllocationSite::transition_info field because elements kind is
2291 : // restricted to a portion of the field...upper bits need to be left alone.
2292 : STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
2293 : __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
2294 43 : Smi::FromInt(kFastElementsKindPackedToHoley));
2295 :
2296 43 : __ bind(&normal_sequence);
2297 : int last_index = GetSequenceIndexFromFastElementsKind(
2298 43 : TERMINAL_FAST_ELEMENTS_KIND);
2299 301 : for (int i = 0; i <= last_index; ++i) {
2300 : Label next;
2301 258 : ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
2302 516 : __ cmpl(rdx, Immediate(kind));
2303 258 : __ j(not_equal, &next);
2304 : ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
2305 258 : __ TailCallStub(&stub);
2306 258 : __ bind(&next);
2307 : }
2308 :
2309 : // If we reached this point there is a problem.
2310 43 : __ Abort(kUnexpectedElementsKindInArrayConstructor);
2311 : } else {
2312 0 : UNREACHABLE();
2313 : }
2314 86 : }
2315 :
2316 :
2317 : template<class T>
2318 86 : static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
2319 : int to_index = GetSequenceIndexFromFastElementsKind(
2320 86 : TERMINAL_FAST_ELEMENTS_KIND);
2321 602 : for (int i = 0; i <= to_index; ++i) {
2322 516 : ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
2323 : T stub(isolate, kind);
2324 516 : stub.GetCode();
2325 516 : if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
2326 : T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
2327 172 : stub1.GetCode();
2328 : }
2329 : }
2330 86 : }
2331 :
2332 43 : void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2333 : ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
2334 43 : isolate);
2335 : ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
2336 43 : isolate);
2337 : ArrayNArgumentsConstructorStub stub(isolate);
2338 43 : stub.GetCode();
2339 :
2340 43 : ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
2341 129 : for (int i = 0; i < 2; i++) {
2342 : // For internal arrays we only need a few things
2343 86 : InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
2344 86 : stubh1.GetCode();
2345 : InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
2346 86 : stubh2.GetCode();
2347 : }
2348 43 : }
2349 :
2350 86 : void ArrayConstructorStub::GenerateDispatchToArrayStub(
2351 86 : MacroAssembler* masm, AllocationSiteOverrideMode mode) {
2352 : Label not_zero_case, not_one_case;
2353 86 : __ testp(rax, rax);
2354 86 : __ j(not_zero, ¬_zero_case);
2355 86 : CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
2356 :
2357 86 : __ bind(¬_zero_case);
2358 86 : __ cmpl(rax, Immediate(1));
2359 86 : __ j(greater, ¬_one_case);
2360 86 : CreateArrayDispatchOneArgument(masm, mode);
2361 :
2362 86 : __ bind(¬_one_case);
2363 : ArrayNArgumentsConstructorStub stub(masm->isolate());
2364 86 : __ TailCallStub(&stub);
2365 86 : }
2366 :
2367 43 : void ArrayConstructorStub::Generate(MacroAssembler* masm) {
2368 : // ----------- S t a t e -------------
2369 : // -- rax : argc
2370 : // -- rbx : AllocationSite or undefined
2371 : // -- rdi : constructor
2372 : // -- rdx : new target
2373 : // -- rsp[0] : return address
2374 : // -- rsp[8] : last argument
2375 : // -----------------------------------
2376 43 : if (FLAG_debug_code) {
2377 : // The array construct code is only set for the global and natives
2378 : // builtin Array functions which always have maps.
2379 :
2380 : // Initial map for the builtin Array function should be a map.
2381 0 : __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2382 : // Will both indicate a NULL and a Smi.
2383 : STATIC_ASSERT(kSmiTag == 0);
2384 0 : Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
2385 0 : __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
2386 0 : __ CmpObjectType(rcx, MAP_TYPE, rcx);
2387 0 : __ Check(equal, kUnexpectedInitialMapForArrayFunction);
2388 :
2389 : // We should either have undefined in rbx or a valid AllocationSite
2390 0 : __ AssertUndefinedOrAllocationSite(rbx);
2391 : }
2392 :
2393 : // Enter the context of the Array function.
2394 43 : __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2395 :
2396 : Label subclassing;
2397 43 : __ cmpp(rdi, rdx);
2398 43 : __ j(not_equal, &subclassing);
2399 :
2400 : Label no_info;
2401 : // If the feedback vector is the undefined value call an array constructor
2402 : // that doesn't use AllocationSites.
2403 43 : __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
2404 43 : __ j(equal, &no_info);
2405 :
2406 : // Only look at the lower 16 bits of the transition info.
2407 : __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
2408 43 : __ SmiToInteger32(rdx, rdx);
2409 : STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
2410 43 : __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
2411 43 : GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
2412 :
2413 43 : __ bind(&no_info);
2414 43 : GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
2415 :
2416 : // Subclassing
2417 43 : __ bind(&subclassing);
2418 : StackArgumentsAccessor args(rsp, rax);
2419 : __ movp(args.GetReceiverOperand(), rdi);
2420 43 : __ addp(rax, Immediate(3));
2421 : __ PopReturnAddressTo(rcx);
2422 43 : __ Push(rdx);
2423 43 : __ Push(rbx);
2424 : __ PushReturnAddressFrom(rcx);
2425 43 : __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
2426 43 : }
2427 :
2428 :
2429 86 : void InternalArrayConstructorStub::GenerateCase(
2430 : MacroAssembler* masm, ElementsKind kind) {
2431 : Label not_zero_case, not_one_case;
2432 : Label normal_sequence;
2433 :
2434 86 : __ testp(rax, rax);
2435 86 : __ j(not_zero, ¬_zero_case);
2436 301 : InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
2437 86 : __ TailCallStub(&stub0);
2438 :
2439 86 : __ bind(¬_zero_case);
2440 86 : __ cmpl(rax, Immediate(1));
2441 86 : __ j(greater, ¬_one_case);
2442 :
2443 86 : if (IsFastPackedElementsKind(kind)) {
2444 : // We might need to create a holey array
2445 : // look at the first argument
2446 : StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2447 43 : __ movp(rcx, args.GetArgumentOperand(0));
2448 : __ testp(rcx, rcx);
2449 43 : __ j(zero, &normal_sequence);
2450 :
2451 : InternalArraySingleArgumentConstructorStub
2452 : stub1_holey(isolate(), GetHoleyElementsKind(kind));
2453 43 : __ TailCallStub(&stub1_holey);
2454 : }
2455 :
2456 86 : __ bind(&normal_sequence);
2457 : InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
2458 86 : __ TailCallStub(&stub1);
2459 :
2460 86 : __ bind(¬_one_case);
2461 : ArrayNArgumentsConstructorStub stubN(isolate());
2462 86 : __ TailCallStub(&stubN);
2463 86 : }
2464 :
2465 :
2466 43 : void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
2467 : // ----------- S t a t e -------------
2468 : // -- rax : argc
2469 : // -- rdi : constructor
2470 : // -- rsp[0] : return address
2471 : // -- rsp[8] : last argument
2472 : // -----------------------------------
2473 :
2474 43 : if (FLAG_debug_code) {
2475 : // The array construct code is only set for the global and natives
2476 : // builtin Array functions which always have maps.
2477 :
2478 : // Initial map for the builtin Array function should be a map.
2479 0 : __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2480 : // Will both indicate a NULL and a Smi.
2481 : STATIC_ASSERT(kSmiTag == 0);
2482 0 : Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
2483 0 : __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
2484 0 : __ CmpObjectType(rcx, MAP_TYPE, rcx);
2485 0 : __ Check(equal, kUnexpectedInitialMapForArrayFunction);
2486 : }
2487 :
2488 : // Figure out the right elements kind
2489 43 : __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2490 :
2491 : // Load the map's "bit field 2" into |result|. We only need the first byte,
2492 : // but the following masking takes care of that anyway.
2493 : __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
2494 : // Retrieve elements_kind from bit field 2.
2495 43 : __ DecodeField<Map::ElementsKindBits>(rcx);
2496 :
2497 43 : if (FLAG_debug_code) {
2498 : Label done;
2499 0 : __ cmpl(rcx, Immediate(FAST_ELEMENTS));
2500 0 : __ j(equal, &done);
2501 0 : __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
2502 : __ Assert(equal,
2503 0 : kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2504 0 : __ bind(&done);
2505 : }
2506 :
2507 : Label fast_elements_case;
2508 43 : __ cmpl(rcx, Immediate(FAST_ELEMENTS));
2509 43 : __ j(equal, &fast_elements_case);
2510 43 : GenerateCase(masm, FAST_HOLEY_ELEMENTS);
2511 :
2512 43 : __ bind(&fast_elements_case);
2513 43 : GenerateCase(masm, FAST_ELEMENTS);
2514 43 : }
2515 :
2516 : static int Offset(ExternalReference ref0, ExternalReference ref1) {
2517 12348 : int64_t offset = (ref0.address() - ref1.address());
2518 : // Check that fits into int.
2519 : DCHECK(static_cast<int>(offset) == offset);
2520 12348 : return static_cast<int>(offset);
2521 : }
2522 :
2523 : // Prepares stack to put arguments (aligns and so on). WIN64 calling
2524 : // convention requires to put the pointer to the return value slot into
2525 : // rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
2526 : // context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
2527 : // inside the exit frame (not GCed) accessible via StackSpaceOperand.
2528 : static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
2529 6174 : __ EnterApiExitFrame(arg_stack_space);
2530 : }
2531 :
2532 :
2533 : // Calls an API function. Allocates HandleScope, extracts returned value
2534 : // from handle and propagates exceptions. Clobbers r14, r15, rbx and
2535 : // caller-save registers. Restores context. On return removes
2536 : // stack_space * kPointerSize (GCed).
2537 12348 : static void CallApiFunctionAndReturn(MacroAssembler* masm,
2538 : Register function_address,
2539 : ExternalReference thunk_ref,
2540 : Register thunk_last_arg, int stack_space,
2541 : Operand* stack_space_operand,
2542 : Operand return_value_operand,
2543 : Operand* context_restore_operand) {
2544 : Label prologue;
2545 : Label promote_scheduled_exception;
2546 : Label delete_allocated_handles;
2547 : Label leave_exit_frame;
2548 : Label write_back;
2549 :
2550 : Isolate* isolate = masm->isolate();
2551 : Factory* factory = isolate->factory();
2552 : ExternalReference next_address =
2553 6174 : ExternalReference::handle_scope_next_address(isolate);
2554 : const int kNextOffset = 0;
2555 : const int kLimitOffset = Offset(
2556 6174 : ExternalReference::handle_scope_limit_address(isolate), next_address);
2557 : const int kLevelOffset = Offset(
2558 6174 : ExternalReference::handle_scope_level_address(isolate), next_address);
2559 : ExternalReference scheduled_exception_address =
2560 6174 : ExternalReference::scheduled_exception_address(isolate);
2561 :
2562 : DCHECK(rdx.is(function_address) || r8.is(function_address));
2563 : // Allocate HandleScope in callee-save registers.
2564 : Register prev_next_address_reg = r14;
2565 : Register prev_limit_reg = rbx;
2566 : Register base_reg = r15;
2567 : __ Move(base_reg, next_address);
2568 12348 : __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
2569 12348 : __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
2570 12348 : __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
2571 :
2572 6174 : if (FLAG_log_timer_events) {
2573 0 : FrameScope frame(masm, StackFrame::MANUAL);
2574 : __ PushSafepointRegisters();
2575 0 : __ PrepareCallCFunction(1);
2576 0 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
2577 : __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
2578 0 : 1);
2579 0 : __ PopSafepointRegisters();
2580 : }
2581 :
2582 : Label profiler_disabled;
2583 : Label end_profiler_check;
2584 6174 : __ Move(rax, ExternalReference::is_profiling_address(isolate));
2585 12348 : __ cmpb(Operand(rax, 0), Immediate(0));
2586 6174 : __ j(zero, &profiler_disabled);
2587 :
2588 : // Third parameter is the address of the actual getter function.
2589 6174 : __ Move(thunk_last_arg, function_address);
2590 : __ Move(rax, thunk_ref);
2591 6174 : __ jmp(&end_profiler_check);
2592 :
2593 6174 : __ bind(&profiler_disabled);
2594 : // Call the api function!
2595 6174 : __ Move(rax, function_address);
2596 :
2597 6174 : __ bind(&end_profiler_check);
2598 :
2599 : // Call the api function!
2600 6174 : __ call(rax);
2601 :
2602 6174 : if (FLAG_log_timer_events) {
2603 0 : FrameScope frame(masm, StackFrame::MANUAL);
2604 : __ PushSafepointRegisters();
2605 0 : __ PrepareCallCFunction(1);
2606 0 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
2607 : __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
2608 0 : 1);
2609 0 : __ PopSafepointRegisters();
2610 : }
2611 :
2612 : // Load the value from ReturnValue
2613 : __ movp(rax, return_value_operand);
2614 6174 : __ bind(&prologue);
2615 :
2616 : // No more valid handles (the result handle was the last one). Restore
2617 : // previous handle scope.
2618 12348 : __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
2619 12348 : __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
2620 12348 : __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
2621 6174 : __ j(not_equal, &delete_allocated_handles);
2622 :
2623 : // Leave the API exit frame.
2624 6174 : __ bind(&leave_exit_frame);
2625 6174 : bool restore_context = context_restore_operand != NULL;
2626 6174 : if (restore_context) {
2627 : __ movp(rsi, *context_restore_operand);
2628 : }
2629 6174 : if (stack_space_operand != nullptr) {
2630 : __ movp(rbx, *stack_space_operand);
2631 : }
2632 6174 : __ LeaveApiExitFrame(!restore_context);
2633 :
2634 : // Check if the function scheduled an exception.
2635 : __ Move(rdi, scheduled_exception_address);
2636 6174 : __ Cmp(Operand(rdi, 0), factory->the_hole_value());
2637 6174 : __ j(not_equal, &promote_scheduled_exception);
2638 :
2639 : #if DEBUG
2640 : // Check if the function returned a valid JavaScript value.
2641 : Label ok;
2642 : Register return_value = rax;
2643 : Register map = rcx;
2644 :
2645 : __ JumpIfSmi(return_value, &ok, Label::kNear);
2646 : __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
2647 :
2648 : __ CmpInstanceType(map, LAST_NAME_TYPE);
2649 : __ j(below_equal, &ok, Label::kNear);
2650 :
2651 : __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
2652 : __ j(above_equal, &ok, Label::kNear);
2653 :
2654 : __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
2655 : __ j(equal, &ok, Label::kNear);
2656 :
2657 : __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
2658 : __ j(equal, &ok, Label::kNear);
2659 :
2660 : __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
2661 : __ j(equal, &ok, Label::kNear);
2662 :
2663 : __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
2664 : __ j(equal, &ok, Label::kNear);
2665 :
2666 : __ CompareRoot(return_value, Heap::kNullValueRootIndex);
2667 : __ j(equal, &ok, Label::kNear);
2668 :
2669 : __ Abort(kAPICallReturnedInvalidObject);
2670 :
2671 : __ bind(&ok);
2672 : #endif
2673 :
2674 6174 : if (stack_space_operand != nullptr) {
2675 : DCHECK_EQ(stack_space, 0);
2676 : __ PopReturnAddressTo(rcx);
2677 0 : __ addq(rsp, rbx);
2678 0 : __ jmp(rcx);
2679 : } else {
2680 6174 : __ ret(stack_space * kPointerSize);
2681 : }
2682 :
2683 : // Re-throw by promoting a scheduled exception.
2684 6174 : __ bind(&promote_scheduled_exception);
2685 6174 : __ TailCallRuntime(Runtime::kPromoteScheduledException);
2686 :
2687 : // HandleScope limit has changed. Delete allocated extensions.
2688 6174 : __ bind(&delete_allocated_handles);
2689 12348 : __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
2690 : __ movp(prev_limit_reg, rax);
2691 6174 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
2692 : __ LoadAddress(rax,
2693 6174 : ExternalReference::delete_handle_scope_extensions(isolate));
2694 6174 : __ call(rax);
2695 : __ movp(rax, prev_limit_reg);
2696 6174 : __ jmp(&leave_exit_frame);
2697 6174 : }
2698 :
2699 36786 : void CallApiCallbackStub::Generate(MacroAssembler* masm) {
2700 : // ----------- S t a t e -------------
2701 : // -- rdi : callee
2702 : // -- rbx : call_data
2703 : // -- rcx : holder
2704 : // -- rdx : api_function_address
2705 : // -- rsi : context
2706 : // -- rax : number of arguments if argc is a register
2707 : // -- rsp[0] : return address
2708 : // -- rsp[8] : last argument
2709 : // -- ...
2710 : // -- rsp[argc * 8] : first argument
2711 : // -- rsp[(argc + 1) * 8] : receiver
2712 : // -----------------------------------
2713 :
2714 : Register callee = rdi;
2715 : Register call_data = rbx;
2716 6131 : Register holder = rcx;
2717 6131 : Register api_function_address = rdx;
2718 : Register context = rsi;
2719 : Register return_address = r8;
2720 :
2721 : typedef FunctionCallbackArguments FCA;
2722 :
2723 : STATIC_ASSERT(FCA::kContextSaveIndex == 6);
2724 : STATIC_ASSERT(FCA::kCalleeIndex == 5);
2725 : STATIC_ASSERT(FCA::kDataIndex == 4);
2726 : STATIC_ASSERT(FCA::kReturnValueOffset == 3);
2727 : STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
2728 : STATIC_ASSERT(FCA::kIsolateIndex == 1);
2729 : STATIC_ASSERT(FCA::kHolderIndex == 0);
2730 : STATIC_ASSERT(FCA::kNewTargetIndex == 7);
2731 : STATIC_ASSERT(FCA::kArgsLength == 8);
2732 :
2733 : __ PopReturnAddressTo(return_address);
2734 :
2735 : // new target
2736 6131 : __ PushRoot(Heap::kUndefinedValueRootIndex);
2737 :
2738 : // context save
2739 6131 : __ Push(context);
2740 :
2741 : // callee
2742 6131 : __ Push(callee);
2743 :
2744 : // call data
2745 6131 : __ Push(call_data);
2746 :
2747 : // return value
2748 6131 : __ PushRoot(Heap::kUndefinedValueRootIndex);
2749 : // return value default
2750 6131 : __ PushRoot(Heap::kUndefinedValueRootIndex);
2751 : // isolate
2752 : Register scratch = call_data;
2753 6131 : __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
2754 6131 : __ Push(scratch);
2755 : // holder
2756 6131 : __ Push(holder);
2757 :
2758 : __ movp(scratch, rsp);
2759 : // Push return address back on stack.
2760 : __ PushReturnAddressFrom(return_address);
2761 :
2762 6131 : if (!this->is_lazy()) {
2763 : // load context from callee
2764 : __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
2765 : }
2766 :
2767 : // Allocate the v8::Arguments structure in the arguments' space since
2768 : // it's not controlled by GC.
2769 : const int kApiStackSpace = 3;
2770 :
2771 : PrepareCallApiFunction(masm, kApiStackSpace);
2772 :
2773 : // FunctionCallbackInfo::implicit_args_.
2774 : int argc = this->argc();
2775 : __ movp(StackSpaceOperand(0), scratch);
2776 12262 : __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
2777 : // FunctionCallbackInfo::values_.
2778 : __ movp(StackSpaceOperand(1), scratch);
2779 : // FunctionCallbackInfo::length_.
2780 12262 : __ Set(StackSpaceOperand(2), argc);
2781 :
2782 : #if defined(__MINGW64__) || defined(_WIN64)
2783 : Register arguments_arg = rcx;
2784 : Register callback_arg = rdx;
2785 : #else
2786 : Register arguments_arg = rdi;
2787 6131 : Register callback_arg = rsi;
2788 : #endif
2789 :
2790 : // It's okay if api_function_address == callback_arg
2791 : // but not arguments_arg
2792 : DCHECK(!api_function_address.is(arguments_arg));
2793 :
2794 : // v8::InvocationCallback's argument.
2795 : __ leap(arguments_arg, StackSpaceOperand(0));
2796 :
2797 : ExternalReference thunk_ref =
2798 6131 : ExternalReference::invoke_function_callback(masm->isolate());
2799 :
2800 : // Accessor for FunctionCallbackInfo and first js arg.
2801 : StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
2802 : ARGUMENTS_DONT_CONTAIN_RECEIVER);
2803 : Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
2804 6131 : FCA::kArgsLength - FCA::kContextSaveIndex);
2805 : Operand length_operand = StackSpaceOperand(2);
2806 : Operand return_value_operand = args_from_rbp.GetArgumentOperand(
2807 6131 : this->is_store() ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
2808 : int stack_space = 0;
2809 : Operand* stack_space_operand = &length_operand;
2810 6131 : stack_space = argc + FCA::kArgsLength + 1;
2811 : stack_space_operand = nullptr;
2812 : CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
2813 : stack_space, stack_space_operand,
2814 6131 : return_value_operand, &context_restore_operand);
2815 6131 : }
2816 :
2817 :
2818 43 : void CallApiGetterStub::Generate(MacroAssembler* masm) {
2819 : #if defined(__MINGW64__) || defined(_WIN64)
2820 : Register getter_arg = r8;
2821 : Register accessor_info_arg = rdx;
2822 : Register name_arg = rcx;
2823 : #else
2824 43 : Register getter_arg = rdx;
2825 : Register accessor_info_arg = rsi;
2826 : Register name_arg = rdi;
2827 : #endif
2828 : Register api_function_address = r8;
2829 43 : Register receiver = ApiGetterDescriptor::ReceiverRegister();
2830 43 : Register holder = ApiGetterDescriptor::HolderRegister();
2831 43 : Register callback = ApiGetterDescriptor::CallbackRegister();
2832 : Register scratch = rax;
2833 : DCHECK(!AreAliased(receiver, holder, callback, scratch));
2834 :
2835 : // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
2836 : // name below the exit frame to make GC aware of them.
2837 : STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
2838 : STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
2839 : STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
2840 : STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
2841 : STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
2842 : STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
2843 : STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
2844 : STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
2845 :
2846 : // Insert additional parameters into the stack frame above return address.
2847 : __ PopReturnAddressTo(scratch);
2848 43 : __ Push(receiver);
2849 43 : __ Push(FieldOperand(callback, AccessorInfo::kDataOffset));
2850 43 : __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
2851 43 : __ Push(kScratchRegister); // return value
2852 43 : __ Push(kScratchRegister); // return value default
2853 86 : __ PushAddress(ExternalReference::isolate_address(isolate()));
2854 43 : __ Push(holder);
2855 43 : __ Push(Smi::kZero); // should_throw_on_error -> false
2856 43 : __ Push(FieldOperand(callback, AccessorInfo::kNameOffset));
2857 : __ PushReturnAddressFrom(scratch);
2858 :
2859 : // v8::PropertyCallbackInfo::args_ array and name handle.
2860 : const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
2861 :
2862 : // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
2863 : const int kArgStackSpace = 1;
2864 :
2865 : // Load address of v8::PropertyAccessorInfo::args_ array.
2866 86 : __ leap(scratch, Operand(rsp, 2 * kPointerSize));
2867 :
2868 : PrepareCallApiFunction(masm, kArgStackSpace);
2869 : // Create v8::PropertyCallbackInfo object on the stack and initialize
2870 : // it's args_ field.
2871 43 : Operand info_object = StackSpaceOperand(0);
2872 : __ movp(info_object, scratch);
2873 :
2874 86 : __ leap(name_arg, Operand(scratch, -kPointerSize));
2875 : // The context register (rsi) has been saved in PrepareCallApiFunction and
2876 : // could be used to pass arguments.
2877 : __ leap(accessor_info_arg, info_object);
2878 :
2879 : ExternalReference thunk_ref =
2880 43 : ExternalReference::invoke_accessor_getter_callback(isolate());
2881 :
2882 : // It's okay if api_function_address == getter_arg
2883 : // but not accessor_info_arg or name_arg
2884 : DCHECK(!api_function_address.is(accessor_info_arg));
2885 : DCHECK(!api_function_address.is(name_arg));
2886 : __ movp(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
2887 : __ movp(api_function_address,
2888 : FieldOperand(scratch, Foreign::kForeignAddressOffset));
2889 :
2890 : // +3 is to skip prolog, return address and name handle.
2891 : Operand return_value_operand(
2892 43 : rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
2893 : CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
2894 : kStackUnwindSpace, nullptr, return_value_operand,
2895 43 : NULL);
2896 43 : }
2897 :
2898 : #undef __
2899 :
2900 : } // namespace internal
2901 : } // namespace v8
2902 :
2903 : #endif // V8_TARGET_ARCH_X64
|