Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #if V8_TARGET_ARCH_X64
6 :
7 : #include "src/code-factory.h"
8 : #include "src/codegen.h"
9 : #include "src/counters.h"
10 : #include "src/deoptimizer.h"
11 : #include "src/full-codegen/full-codegen.h"
12 : #include "src/objects-inl.h"
13 :
14 : namespace v8 {
15 : namespace internal {
16 :
17 : #define __ ACCESS_MASM(masm)
18 :
19 19608 : void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
20 : ExitFrameType exit_frame_type) {
21 : // ----------- S t a t e -------------
22 : // -- rax : number of arguments excluding receiver
23 : // -- rdi : target
24 : // -- rdx : new.target
25 : // -- rsp[0] : return address
26 : // -- rsp[8] : last argument
27 : // -- ...
28 : // -- rsp[8 * argc] : first argument
29 : // -- rsp[8 * (argc + 1)] : receiver
30 : // -----------------------------------
31 9804 : __ AssertFunction(rdi);
32 :
33 : // The logic contained here is mirrored for TurboFan inlining in
34 : // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
35 :
36 : // Make sure we operate in the context of the called function (for example
37 : // ConstructStubs implemented in C++ will be run in the context of the caller
38 : // instead of the callee, due to the way that [[Construct]] is defined for
39 : // ordinary functions).
40 9804 : __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
41 :
42 : // JumpToExternalReference expects rax to contain the number of arguments
43 : // including the receiver and the extra arguments.
44 : const int num_extra_args = 3;
45 9804 : __ addp(rax, Immediate(num_extra_args + 1));
46 :
47 : // Unconditionally insert argc, target and new target as extra arguments. They
48 : // will be used by stack frame iterators when constructing the stack trace.
49 : __ PopReturnAddressTo(kScratchRegister);
50 9804 : __ Integer32ToSmi(rax, rax);
51 9804 : __ Push(rax);
52 9804 : __ SmiToInteger32(rax, rax);
53 9804 : __ Push(rdi);
54 9804 : __ Push(rdx);
55 : __ PushReturnAddressFrom(kScratchRegister);
56 :
57 : __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
58 19608 : exit_frame_type == BUILTIN_EXIT);
59 9804 : }
60 :
61 43 : static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
62 : __ movp(kScratchRegister,
63 43 : FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
64 : __ movp(kScratchRegister,
65 : FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
66 : __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
67 43 : __ jmp(kScratchRegister);
68 43 : }
69 :
70 215 : static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
71 : Runtime::FunctionId function_id) {
72 : // ----------- S t a t e -------------
73 : // -- rax : argument count (preserved for callee)
74 : // -- rdx : new target (preserved for callee)
75 : // -- rdi : target function (preserved for callee)
76 : // -----------------------------------
77 : {
78 215 : FrameScope scope(masm, StackFrame::INTERNAL);
79 : // Push the number of arguments to the callee.
80 215 : __ Integer32ToSmi(rax, rax);
81 215 : __ Push(rax);
82 : // Push a copy of the target function and the new target.
83 215 : __ Push(rdi);
84 215 : __ Push(rdx);
85 : // Function is also the parameter to the runtime call.
86 215 : __ Push(rdi);
87 :
88 : __ CallRuntime(function_id, 1);
89 215 : __ movp(rbx, rax);
90 :
91 : // Restore target function and new target.
92 215 : __ Pop(rdx);
93 215 : __ Pop(rdi);
94 215 : __ Pop(rax);
95 215 : __ SmiToInteger32(rax, rax);
96 : }
97 : __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize));
98 215 : __ jmp(rbx);
99 215 : }
100 :
101 43 : void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
102 : // Checking whether the queued function is ready for install is optional,
103 : // since we come across interrupts and stack checks elsewhere. However,
104 : // not checking may delay installing ready functions, and always checking
105 : // would be quite expensive. A good compromise is to first check against
106 : // stack limit as a cue for an interrupt signal.
107 : Label ok;
108 43 : __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
109 43 : __ j(above_equal, &ok);
110 :
111 43 : GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
112 :
113 43 : __ bind(&ok);
114 43 : GenerateTailCallToSharedCode(masm);
115 43 : }
116 :
117 : namespace {
118 :
119 473 : void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
120 : bool create_implicit_receiver,
121 : bool disallow_non_object_return) {
122 : Label post_instantiation_deopt_entry;
123 :
124 : // ----------- S t a t e -------------
125 : // -- rax: number of arguments
126 : // -- rsi: context
127 : // -- rdi: constructor function
128 : // -- rdx: new target
129 : // -----------------------------------
130 :
131 : // Enter a construct frame.
132 : {
133 215 : FrameScope scope(masm, StackFrame::CONSTRUCT);
134 :
135 : // Preserve the incoming parameters on the stack.
136 215 : __ Integer32ToSmi(rcx, rax);
137 215 : __ Push(rsi);
138 215 : __ Push(rcx);
139 :
140 215 : if (create_implicit_receiver) {
141 : // Allocate the new receiver object.
142 86 : __ Push(rdi);
143 86 : __ Push(rdx);
144 : __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
145 172 : RelocInfo::CODE_TARGET);
146 86 : __ movp(rbx, rax);
147 86 : __ Pop(rdx);
148 86 : __ Pop(rdi);
149 :
150 : // ----------- S t a t e -------------
151 : // -- rdi: constructor function
152 : // -- rbx: newly allocated object
153 : // -- rdx: new target
154 : // -----------------------------------
155 :
156 : // Retrieve smi-tagged arguments count from the stack.
157 86 : __ SmiToInteger32(rax, Operand(rsp, 0 * kPointerSize));
158 :
159 : // Push the allocated receiver to the stack. We need two copies
160 : // because we may have to return the original one and the calling
161 : // conventions dictate that the called function pops the receiver.
162 86 : __ Push(rbx);
163 86 : __ Push(rbx);
164 : } else {
165 129 : __ PushRoot(Heap::kTheHoleValueRootIndex);
166 : }
167 :
168 : // Deoptimizer re-enters stub code here.
169 215 : __ bind(&post_instantiation_deopt_entry);
170 :
171 : // Set up pointer to last argument.
172 430 : __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
173 :
174 : // Copy arguments and receiver to the expression stack.
175 : Label loop, entry;
176 : __ movp(rcx, rax);
177 215 : __ jmp(&entry);
178 215 : __ bind(&loop);
179 215 : __ Push(Operand(rbx, rcx, times_pointer_size, 0));
180 215 : __ bind(&entry);
181 : __ decp(rcx);
182 215 : __ j(greater_equal, &loop);
183 :
184 : // Call the function.
185 : ParameterCount actual(rax);
186 : __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION,
187 430 : CheckDebugStepCallWrapper());
188 :
189 : // Store offset of return address for deoptimizer.
190 215 : if (create_implicit_receiver && !disallow_non_object_return &&
191 : !is_api_function) {
192 : masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
193 86 : masm->pc_offset());
194 : }
195 :
196 : // Restore context from the frame.
197 430 : __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
198 :
199 215 : if (create_implicit_receiver) {
200 : // If the result is an object (in the ECMA sense), we should get rid
201 : // of the receiver and use the result; see ECMA-262 section 13.2.2-7
202 : // on page 74.
203 : Label use_receiver, return_value, do_throw;
204 :
205 : // If the result is undefined, we jump out to using the implicit
206 : // receiver, otherwise we do a smi check and fall through to
207 : // check if the return value is a valid receiver.
208 86 : if (disallow_non_object_return) {
209 43 : __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
210 43 : __ j(equal, &use_receiver);
211 43 : __ JumpIfSmi(rax, &do_throw, Label::kNear);
212 : } else {
213 : // If the result is a smi, it is *not* an object in the ECMA sense.
214 43 : __ JumpIfSmi(rax, &use_receiver, Label::kNear);
215 : }
216 :
217 : // If the type of the result (stored in its map) is less than
218 : // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
219 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
220 86 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
221 86 : __ j(above_equal, &return_value, Label::kNear);
222 :
223 86 : if (disallow_non_object_return) {
224 43 : __ bind(&do_throw);
225 43 : __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
226 : }
227 :
228 : // Throw away the result of the constructor invocation and use the
229 : // on-stack receiver as the result.
230 86 : __ bind(&use_receiver);
231 172 : __ movp(rax, Operand(rsp, 0));
232 :
233 : // Restore the arguments count and leave the construct frame. The
234 : // arguments count is stored below the receiver.
235 86 : __ bind(&return_value);
236 172 : __ movp(rbx, Operand(rsp, 1 * kPointerSize));
237 : } else {
238 258 : __ movp(rbx, Operand(rsp, 0));
239 215 : }
240 :
241 : // Leave construct frame.
242 : }
243 :
244 : // ES6 9.2.2. Step 13+
245 : // For derived class constructors, throw a TypeError here if the result
246 : // is not a JSReceiver. For the base constructor, we've already checked
247 : // the result, so we omit the check.
248 215 : if (disallow_non_object_return && !create_implicit_receiver) {
249 : Label do_throw, dont_throw;
250 43 : __ JumpIfSmi(rax, &do_throw, Label::kNear);
251 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
252 43 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
253 43 : __ j(above_equal, &dont_throw, Label::kNear);
254 43 : __ bind(&do_throw);
255 : {
256 43 : FrameScope scope(masm, StackFrame::INTERNAL);
257 43 : __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
258 : }
259 43 : __ bind(&dont_throw);
260 : }
261 :
262 : // Remove caller arguments from the stack and return.
263 : __ PopReturnAddressTo(rcx);
264 215 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
265 430 : __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
266 : __ PushReturnAddressFrom(rcx);
267 215 : if (create_implicit_receiver) {
268 86 : Counters* counters = masm->isolate()->counters();
269 86 : __ IncrementCounter(counters->constructed_objects(), 1);
270 : }
271 215 : __ ret(0);
272 :
273 : // Store offset of trampoline address for deoptimizer. This is the bailout
274 : // point after the receiver instantiation but before the function invocation.
275 : // We need to restore some registers in order to continue the above code.
276 215 : if (create_implicit_receiver && !disallow_non_object_return &&
277 : !is_api_function) {
278 : masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
279 : masm->pc_offset());
280 :
281 : // ----------- S t a t e -------------
282 : // -- rax : newly allocated object
283 : // -- rsp[0] : constructor function
284 : // -----------------------------------
285 :
286 43 : __ Pop(rdi);
287 43 : __ Push(rax);
288 43 : __ Push(rax);
289 :
290 : // Retrieve smi-tagged arguments count from the stack.
291 : __ SmiToInteger32(rax,
292 43 : Operand(rbp, ConstructFrameConstants::kLengthOffset));
293 :
294 : // Retrieve the new target value from the stack. This was placed into the
295 : // frame description in place of the receiver by the optimizing compiler.
296 : __ movp(rdx, Operand(rbp, rax, times_pointer_size,
297 86 : StandardFrameConstants::kCallerSPOffset));
298 :
299 : // Continue with constructor function invocation.
300 43 : __ jmp(&post_instantiation_deopt_entry);
301 : }
302 215 : }
303 :
304 : } // namespace
305 :
306 43 : void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
307 43 : Generate_JSConstructStubHelper(masm, false, true, false);
308 43 : }
309 :
310 43 : void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
311 43 : Generate_JSConstructStubHelper(masm, true, false, false);
312 43 : }
313 :
314 43 : void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
315 43 : Generate_JSConstructStubHelper(masm, false, false, false);
316 43 : }
317 :
318 43 : void Builtins::Generate_JSBuiltinsConstructStubForBase(MacroAssembler* masm) {
319 43 : Generate_JSConstructStubHelper(masm, false, true, true);
320 43 : }
321 :
322 43 : void Builtins::Generate_JSBuiltinsConstructStubForDerived(
323 : MacroAssembler* masm) {
324 43 : Generate_JSConstructStubHelper(masm, false, false, true);
325 43 : }
326 :
327 43 : void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
328 43 : FrameScope scope(masm, StackFrame::INTERNAL);
329 43 : __ Push(rdi);
330 43 : __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
331 43 : }
332 :
333 : enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
334 :
335 : // Clobbers rcx, r11, kScratchRegister; preserves all other registers.
336 86 : static void Generate_CheckStackOverflow(MacroAssembler* masm,
337 : IsTagged rax_is_tagged) {
338 : // rax : the number of items to be pushed to the stack
339 : //
340 : // Check the stack for overflow. We are not trying to catch
341 : // interruptions (e.g. debug break and preemption) here, so the "real stack
342 : // limit" is checked.
343 : Label okay;
344 86 : __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
345 86 : __ movp(rcx, rsp);
346 : // Make rcx the space we have left. The stack might already be overflowed
347 : // here which will cause rcx to become negative.
348 86 : __ subp(rcx, kScratchRegister);
349 : // Make r11 the space we need for the array when it is unrolled onto the
350 : // stack.
351 86 : if (rax_is_tagged == kRaxIsSmiTagged) {
352 0 : __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2);
353 : } else {
354 : DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
355 : __ movp(r11, rax);
356 : __ shlq(r11, Immediate(kPointerSizeLog2));
357 : }
358 : // Check if the arguments will overflow the stack.
359 86 : __ cmpp(rcx, r11);
360 86 : __ j(greater, &okay); // Signed comparison.
361 :
362 : // Out of stack space.
363 86 : __ CallRuntime(Runtime::kThrowStackOverflow);
364 :
365 86 : __ bind(&okay);
366 86 : }
367 :
368 258 : static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
369 : bool is_construct) {
370 86 : ProfileEntryHookStub::MaybeCallEntryHook(masm);
371 :
372 : // Expects five C++ function parameters.
373 : // - Object* new_target
374 : // - JSFunction* function
375 : // - Object* receiver
376 : // - int argc
377 : // - Object*** argv
378 : // (see Handle::Invoke in execution.cc).
379 :
380 : // Open a C++ scope for the FrameScope.
381 : {
382 : // Platform specific argument handling. After this, the stack contains
383 : // an internal frame and the pushed function and receiver, and
384 : // register rax and rbx holds the argument count and argument array,
385 : // while rdi holds the function pointer, rsi the context, and rdx the
386 : // new.target.
387 :
388 : #ifdef _WIN64
389 : // MSVC parameters in:
390 : // rcx : new_target
391 : // rdx : function
392 : // r8 : receiver
393 : // r9 : argc
394 : // [rsp+0x20] : argv
395 :
396 : // Enter an internal frame.
397 : FrameScope scope(masm, StackFrame::INTERNAL);
398 :
399 : // Setup the context (we need to use the caller context from the isolate).
400 : ExternalReference context_address(Isolate::kContextAddress,
401 : masm->isolate());
402 : __ movp(rsi, masm->ExternalOperand(context_address));
403 :
404 : // Push the function and the receiver onto the stack.
405 : __ Push(rdx);
406 : __ Push(r8);
407 :
408 : // Load the number of arguments and setup pointer to the arguments.
409 : __ movp(rax, r9);
410 : // Load the previous frame pointer to access C argument on stack
411 : __ movp(kScratchRegister, Operand(rbp, 0));
412 : __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
413 : // Load the function pointer into rdi.
414 : __ movp(rdi, rdx);
415 : // Load the new.target into rdx.
416 : __ movp(rdx, rcx);
417 : #else // _WIN64
418 : // GCC parameters in:
419 : // rdi : new_target
420 : // rsi : function
421 : // rdx : receiver
422 : // rcx : argc
423 : // r8 : argv
424 :
425 86 : __ movp(r11, rdi);
426 : __ movp(rdi, rsi);
427 : // rdi : function
428 : // r11 : new_target
429 :
430 : // Clear the context before we push it when entering the internal frame.
431 86 : __ Set(rsi, 0);
432 :
433 : // Enter an internal frame.
434 86 : FrameScope scope(masm, StackFrame::INTERNAL);
435 :
436 : // Setup the context (we need to use the caller context from the isolate).
437 : ExternalReference context_address(Isolate::kContextAddress,
438 86 : masm->isolate());
439 86 : __ movp(rsi, masm->ExternalOperand(context_address));
440 :
441 : // Push the function and receiver onto the stack.
442 86 : __ Push(rdi);
443 86 : __ Push(rdx);
444 :
445 : // Load the number of arguments and setup pointer to the arguments.
446 : __ movp(rax, rcx);
447 : __ movp(rbx, r8);
448 :
449 : // Load the new.target into rdx.
450 : __ movp(rdx, r11);
451 : #endif // _WIN64
452 :
453 : // Current stack contents:
454 : // [rsp + 2 * kPointerSize ... ] : Internal frame
455 : // [rsp + kPointerSize] : function
456 : // [rsp] : receiver
457 : // Current register contents:
458 : // rax : argc
459 : // rbx : argv
460 : // rsi : context
461 : // rdi : function
462 : // rdx : new.target
463 :
464 : // Check if we have enough stack space to push all arguments.
465 : // Expects argument count in rax. Clobbers rcx, r11.
466 86 : Generate_CheckStackOverflow(masm, kRaxIsUntaggedInt);
467 :
468 : // Copy arguments to the stack in a loop.
469 : // Register rbx points to array of pointers to handle locations.
470 : // Push the values of these handles.
471 : Label loop, entry;
472 86 : __ Set(rcx, 0); // Set loop variable to 0.
473 86 : __ jmp(&entry, Label::kNear);
474 86 : __ bind(&loop);
475 172 : __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
476 86 : __ Push(Operand(kScratchRegister, 0)); // dereference handle
477 86 : __ addp(rcx, Immediate(1));
478 86 : __ bind(&entry);
479 86 : __ cmpp(rcx, rax);
480 86 : __ j(not_equal, &loop);
481 :
482 : // Invoke the builtin code.
483 : Handle<Code> builtin = is_construct
484 43 : ? masm->isolate()->builtins()->Construct()
485 172 : : masm->isolate()->builtins()->Call();
486 86 : __ Call(builtin, RelocInfo::CODE_TARGET);
487 :
488 : // Exit the internal frame. Notice that this also removes the empty
489 : // context and the function left on the stack by the code
490 : // invocation.
491 : }
492 :
493 : // TODO(X64): Is argument correct? Is there a receiver to remove?
494 86 : __ ret(1 * kPointerSize); // Remove receiver.
495 86 : }
496 :
497 43 : void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
498 43 : Generate_JSEntryTrampolineHelper(masm, false);
499 43 : }
500 :
501 43 : void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
502 43 : Generate_JSEntryTrampolineHelper(masm, true);
503 43 : }
504 :
505 : // static
506 129 : void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
507 : // ----------- S t a t e -------------
508 : // -- rax : the value to pass to the generator
509 : // -- rbx : the JSGeneratorObject to resume
510 : // -- rdx : the resume mode (tagged)
511 : // -- rcx : the SuspendFlags of the earlier suspend call (tagged)
512 : // -- rsp[0] : return address
513 : // -----------------------------------
514 : // Untag rcx
515 43 : __ shrq(rcx, Immediate(kSmiTagSize + kSmiShiftSize));
516 43 : __ AssertGeneratorObject(rbx, rcx);
517 :
518 : // Store input value into generator object.
519 : Label async_await, done_store_input;
520 :
521 : __ andb(rcx, Immediate(static_cast<int>(SuspendFlags::kAsyncGeneratorAwait)));
522 : __ cmpb(rcx, Immediate(static_cast<int>(SuspendFlags::kAsyncGeneratorAwait)));
523 43 : __ j(equal, &async_await);
524 :
525 : __ movp(FieldOperand(rbx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
526 : __ RecordWriteField(rbx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
527 43 : kDontSaveFPRegs);
528 43 : __ j(always, &done_store_input, Label::kNear);
529 :
530 43 : __ bind(&async_await);
531 : __ movp(
532 : FieldOperand(rbx, JSAsyncGeneratorObject::kAwaitInputOrDebugPosOffset),
533 : rax);
534 : __ RecordWriteField(rbx, JSAsyncGeneratorObject::kAwaitInputOrDebugPosOffset,
535 43 : rax, rcx, kDontSaveFPRegs);
536 :
537 43 : __ bind(&done_store_input);
538 : // `rcx` no longer holds SuspendFlags
539 :
540 : // Store resume mode into generator object.
541 : __ movp(FieldOperand(rbx, JSGeneratorObject::kResumeModeOffset), rdx);
542 :
543 : // Load suspended function and context.
544 : __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
545 : __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
546 :
547 : // Flood function if we are stepping.
548 : Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
549 : Label stepping_prepared;
550 : ExternalReference debug_hook =
551 43 : ExternalReference::debug_hook_on_function_call_address(masm->isolate());
552 43 : Operand debug_hook_operand = masm->ExternalOperand(debug_hook);
553 : __ cmpb(debug_hook_operand, Immediate(0));
554 43 : __ j(not_equal, &prepare_step_in_if_stepping);
555 :
556 : // Flood function if we need to continue stepping in the suspended generator.
557 : ExternalReference debug_suspended_generator =
558 43 : ExternalReference::debug_suspended_generator_address(masm->isolate());
559 : Operand debug_suspended_generator_operand =
560 43 : masm->ExternalOperand(debug_suspended_generator);
561 : __ cmpp(rbx, debug_suspended_generator_operand);
562 43 : __ j(equal, &prepare_step_in_suspended_generator);
563 43 : __ bind(&stepping_prepared);
564 :
565 : // Pop return address.
566 : __ PopReturnAddressTo(rax);
567 :
568 : // Push receiver.
569 43 : __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
570 :
571 : // ----------- S t a t e -------------
572 : // -- rax : return address
573 : // -- rbx : the JSGeneratorObject to resume
574 : // -- rdx : the resume mode (tagged)
575 : // -- rdi : generator function
576 : // -- rsi : generator context
577 : // -- rsp[0] : generator receiver
578 : // -----------------------------------
579 :
580 : // Push holes for arguments to generator function. Since the parser forced
581 : // context allocation for any variables in generators, the actual argument
582 : // values have already been copied into the context and these dummy values
583 : // will never be used.
584 : __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
585 : __ LoadSharedFunctionInfoSpecialField(
586 43 : rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
587 : {
588 : Label done_loop, loop;
589 43 : __ bind(&loop);
590 43 : __ subl(rcx, Immediate(1));
591 43 : __ j(carry, &done_loop, Label::kNear);
592 43 : __ PushRoot(Heap::kTheHoleValueRootIndex);
593 43 : __ jmp(&loop);
594 43 : __ bind(&done_loop);
595 : }
596 :
597 : // Underlying function needs to have bytecode available.
598 43 : if (FLAG_debug_code) {
599 : __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
600 : __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
601 0 : __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
602 0 : __ Assert(equal, kMissingBytecodeArray);
603 : }
604 :
605 : // Resume (Ignition/TurboFan) generator object.
606 : {
607 : __ PushReturnAddressFrom(rax);
608 : __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
609 : __ LoadSharedFunctionInfoSpecialField(
610 43 : rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
611 : // We abuse new.target both to indicate that this is a resume call and to
612 : // pass in the generator object. In ordinary calls, new.target is always
613 : // undefined because generator functions are non-constructable.
614 : __ movp(rdx, rbx);
615 43 : __ jmp(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
616 : }
617 :
618 43 : __ bind(&prepare_step_in_if_stepping);
619 : {
620 43 : FrameScope scope(masm, StackFrame::INTERNAL);
621 43 : __ Push(rbx);
622 43 : __ Push(rdx);
623 43 : __ Push(rdi);
624 43 : __ CallRuntime(Runtime::kDebugOnFunctionCall);
625 43 : __ Pop(rdx);
626 43 : __ Pop(rbx);
627 43 : __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
628 : }
629 43 : __ jmp(&stepping_prepared);
630 :
631 43 : __ bind(&prepare_step_in_suspended_generator);
632 : {
633 43 : FrameScope scope(masm, StackFrame::INTERNAL);
634 43 : __ Push(rbx);
635 43 : __ Push(rdx);
636 43 : __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
637 43 : __ Pop(rdx);
638 43 : __ Pop(rbx);
639 43 : __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
640 : }
641 43 : __ jmp(&stepping_prepared);
642 43 : }
643 :
644 43 : static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
645 : Register scratch2) {
646 43 : Register args_count = scratch1;
647 43 : Register return_pc = scratch2;
648 :
649 : // Get the arguments + receiver count.
650 : __ movp(args_count,
651 86 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
652 : __ movl(args_count,
653 : FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
654 :
655 : // Leave the frame (also dropping the register file).
656 43 : __ leave();
657 :
658 : // Drop receiver + arguments.
659 : __ PopReturnAddressTo(return_pc);
660 43 : __ addp(rsp, args_count);
661 : __ PushReturnAddressFrom(return_pc);
662 43 : }
663 :
664 : // Generate code for entering a JS function with the interpreter.
665 : // On entry to the function the receiver and arguments have been pushed on the
666 : // stack left to right. The actual argument count matches the formal parameter
667 : // count expected by the function.
668 : //
669 : // The live registers are:
670 : // o rdi: the JS function object being called
671 : // o rdx: the new target
672 : // o rsi: our context
673 : // o rbp: the caller's frame pointer
674 : // o rsp: stack pointer (pointing to return address)
675 : //
676 : // The function builds an interpreter frame. See InterpreterFrameConstants in
677 : // frames.h for its layout.
678 129 : void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
679 43 : ProfileEntryHookStub::MaybeCallEntryHook(masm);
680 :
681 : // Open a frame scope to indicate that there is a frame on the stack. The
682 : // MANUAL indicates that the scope shouldn't actually generate code to set up
683 : // the frame (that is done below).
684 43 : FrameScope frame_scope(masm, StackFrame::MANUAL);
685 43 : __ pushq(rbp); // Caller's frame pointer.
686 : __ movp(rbp, rsp);
687 43 : __ Push(rsi); // Callee's context.
688 43 : __ Push(rdi); // Callee's JS function.
689 43 : __ Push(rdx); // Callee's new target.
690 :
691 : // Get the bytecode array from the function object (or from the DebugInfo if
692 : // it is present) and load it into kInterpreterBytecodeArrayRegister.
693 : __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
694 : Label load_debug_bytecode_array, bytecode_array_loaded;
695 : __ JumpIfNotSmi(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset),
696 43 : &load_debug_bytecode_array);
697 : __ movp(kInterpreterBytecodeArrayRegister,
698 : FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
699 43 : __ bind(&bytecode_array_loaded);
700 :
701 : // Check whether we should continue to use the interpreter.
702 : // TODO(rmcilroy) Remove self healing once liveedit only has to deal with
703 : // Ignition bytecode.
704 : Label switch_to_different_code_kind;
705 43 : __ Move(rcx, masm->CodeObject()); // Self-reference to this code.
706 : __ cmpp(rcx, FieldOperand(rax, SharedFunctionInfo::kCodeOffset));
707 43 : __ j(not_equal, &switch_to_different_code_kind);
708 :
709 : // Increment invocation count for the function.
710 : __ movp(rcx, FieldOperand(rdi, JSFunction::kFeedbackVectorOffset));
711 : __ movp(rcx, FieldOperand(rcx, Cell::kValueOffset));
712 : __ SmiAddConstant(
713 : FieldOperand(rcx, FeedbackVector::kInvocationCountIndex * kPointerSize +
714 : FeedbackVector::kHeaderSize),
715 43 : Smi::FromInt(1));
716 :
717 : // Check function data field is actually a BytecodeArray object.
718 43 : if (FLAG_debug_code) {
719 0 : __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
720 : __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
721 0 : rax);
722 0 : __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
723 : }
724 :
725 : // Reset code age.
726 : __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
727 : BytecodeArray::kBytecodeAgeOffset),
728 43 : Immediate(BytecodeArray::kNoAgeBytecodeAge));
729 :
730 : // Load initial bytecode offset.
731 : __ movp(kInterpreterBytecodeOffsetRegister,
732 : Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
733 :
734 : // Push bytecode array and Smi tagged bytecode offset.
735 43 : __ Push(kInterpreterBytecodeArrayRegister);
736 43 : __ Integer32ToSmi(rcx, kInterpreterBytecodeOffsetRegister);
737 43 : __ Push(rcx);
738 :
739 : // Allocate the local and temporary register file on the stack.
740 : {
741 : // Load frame size from the BytecodeArray object.
742 : __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
743 : BytecodeArray::kFrameSizeOffset));
744 :
745 : // Do a stack check to ensure we don't go over the limit.
746 : Label ok;
747 : __ movp(rdx, rsp);
748 43 : __ subp(rdx, rcx);
749 43 : __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
750 43 : __ j(above_equal, &ok, Label::kNear);
751 43 : __ CallRuntime(Runtime::kThrowStackOverflow);
752 43 : __ bind(&ok);
753 :
754 : // If ok, push undefined as the initial value for all register file entries.
755 : Label loop_header;
756 : Label loop_check;
757 43 : __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
758 43 : __ j(always, &loop_check);
759 43 : __ bind(&loop_header);
760 : // TODO(rmcilroy): Consider doing more than one push per loop iteration.
761 43 : __ Push(rdx);
762 : // Continue loop if not done.
763 43 : __ bind(&loop_check);
764 43 : __ subp(rcx, Immediate(kPointerSize));
765 43 : __ j(greater_equal, &loop_header, Label::kNear);
766 : }
767 :
768 : // Load accumulator and dispatch table into registers.
769 43 : __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
770 : __ Move(
771 : kInterpreterDispatchTableRegister,
772 43 : ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
773 :
774 : // Dispatch to the first bytecode handler for the function.
775 : __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
776 86 : kInterpreterBytecodeOffsetRegister, times_1, 0));
777 : __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
778 86 : times_pointer_size, 0));
779 43 : __ call(rbx);
780 43 : masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
781 :
782 : // The return value is in rax.
783 43 : LeaveInterpreterFrame(masm, rbx, rcx);
784 43 : __ ret(0);
785 :
786 : // Load debug copy of the bytecode array.
787 43 : __ bind(&load_debug_bytecode_array);
788 : Register debug_info = kInterpreterBytecodeArrayRegister;
789 : __ movp(debug_info, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset));
790 : __ movp(kInterpreterBytecodeArrayRegister,
791 : FieldOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
792 43 : __ jmp(&bytecode_array_loaded);
793 :
794 : // If the shared code is no longer this entry trampoline, then the underlying
795 : // function has been switched to a different kind of code and we heal the
796 : // closure by switching the code entry field over to the new code as well.
797 43 : __ bind(&switch_to_different_code_kind);
798 43 : __ leave(); // Leave the frame so we can tail call.
799 : __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
800 : __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
801 : __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
802 : __ movp(FieldOperand(rdi, JSFunction::kCodeEntryOffset), rcx);
803 43 : __ RecordWriteCodeEntryField(rdi, rcx, r15);
804 43 : __ jmp(rcx);
805 43 : }
806 :
807 645 : static void Generate_StackOverflowCheck(
808 : MacroAssembler* masm, Register num_args, Register scratch,
809 : Label* stack_overflow,
810 : Label::Distance stack_overflow_distance = Label::kFar) {
811 : // Check the stack for overflow. We are not trying to catch
812 : // interruptions (e.g. debug break and preemption) here, so the "real stack
813 : // limit" is checked.
814 645 : __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
815 645 : __ movp(scratch, rsp);
816 : // Make scratch the space we have left. The stack might already be overflowed
817 : // here which will cause scratch to become negative.
818 645 : __ subp(scratch, kScratchRegister);
819 : __ sarp(scratch, Immediate(kPointerSizeLog2));
820 : // Check if the arguments will overflow the stack.
821 645 : __ cmpp(scratch, num_args);
822 : // Signed comparison.
823 645 : __ j(less_equal, stack_overflow, stack_overflow_distance);
824 645 : }
825 :
826 473 : static void Generate_InterpreterPushArgs(MacroAssembler* masm,
827 : Register num_args,
828 : Register start_address,
829 : Register scratch) {
830 : // Find the address of the last argument.
831 473 : __ Move(scratch, num_args);
832 473 : __ shlp(scratch, Immediate(kPointerSizeLog2));
833 : __ negp(scratch);
834 473 : __ addp(scratch, start_address);
835 :
836 : // Push the arguments.
837 : Label loop_header, loop_check;
838 473 : __ j(always, &loop_check);
839 473 : __ bind(&loop_header);
840 473 : __ Push(Operand(start_address, 0));
841 473 : __ subp(start_address, Immediate(kPointerSize));
842 473 : __ bind(&loop_check);
843 473 : __ cmpp(start_address, scratch);
844 473 : __ j(greater, &loop_header, Label::kNear);
845 473 : }
846 :
847 : // static
848 301 : void Builtins::Generate_InterpreterPushArgsThenCallImpl(
849 301 : MacroAssembler* masm, ConvertReceiverMode receiver_mode,
850 : TailCallMode tail_call_mode, InterpreterPushArgsMode mode) {
851 : // ----------- S t a t e -------------
852 : // -- rax : the number of arguments (not including the receiver)
853 : // -- rbx : the address of the first argument to be pushed. Subsequent
854 : // arguments should be consecutive above this, in the same order as
855 : // they are to be pushed onto the stack.
856 : // -- rdi : the target to call (can be any Object).
857 : // -----------------------------------
858 : Label stack_overflow;
859 :
860 : // Number of values to be pushed.
861 301 : __ Move(rcx, rax);
862 301 : __ addp(rcx, Immediate(1)); // Add one for receiver.
863 :
864 : // Add a stack check before pushing arguments.
865 301 : Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
866 :
867 : // Pop return address to allow tail-call after pushing arguments.
868 : __ PopReturnAddressTo(kScratchRegister);
869 :
870 : // Push "undefined" as the receiver arg if we need to.
871 301 : if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
872 86 : __ PushRoot(Heap::kUndefinedValueRootIndex);
873 86 : __ subp(rcx, Immediate(1)); // Subtract one for receiver.
874 : }
875 :
876 : // rbx and rdx will be modified.
877 301 : Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
878 :
879 : // Call the target.
880 : __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
881 :
882 301 : if (mode == InterpreterPushArgsMode::kJSFunction) {
883 : __ Jump(masm->isolate()->builtins()->CallFunction(receiver_mode,
884 : tail_call_mode),
885 129 : RelocInfo::CODE_TARGET);
886 172 : } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
887 : __ Jump(masm->isolate()->builtins()->CallWithSpread(),
888 43 : RelocInfo::CODE_TARGET);
889 : } else {
890 : __ Jump(masm->isolate()->builtins()->Call(receiver_mode, tail_call_mode),
891 129 : RelocInfo::CODE_TARGET);
892 : }
893 :
894 : // Throw stack overflow exception.
895 301 : __ bind(&stack_overflow);
896 : {
897 301 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
898 : // This should be unreachable.
899 301 : __ int3();
900 : }
901 301 : }
902 :
903 : // static
904 129 : void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
905 86 : MacroAssembler* masm, InterpreterPushArgsMode mode) {
906 : // ----------- S t a t e -------------
907 : // -- rax : the number of arguments (not including the receiver)
908 : // -- rdx : the new target (either the same as the constructor or
909 : // the JSFunction on which new was invoked initially)
910 : // -- rdi : the constructor to call (can be any Object)
911 : // -- rbx : the allocation site feedback if available, undefined otherwise
912 : // -- rcx : the address of the first argument to be pushed. Subsequent
913 : // arguments should be consecutive above this, in the same order as
914 : // they are to be pushed onto the stack.
915 : // -----------------------------------
916 : Label stack_overflow;
917 :
918 : // Add a stack check before pushing arguments.
919 129 : Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
920 :
921 : // Pop return address to allow tail-call after pushing arguments.
922 : __ PopReturnAddressTo(kScratchRegister);
923 :
924 : // Push slot for the receiver to be constructed.
925 129 : __ Push(Immediate(0));
926 :
927 : // rcx and r8 will be modified.
928 129 : Generate_InterpreterPushArgs(masm, rax, rcx, r8);
929 :
930 : // Push return address in preparation for the tail-call.
931 : __ PushReturnAddressFrom(kScratchRegister);
932 :
933 129 : __ AssertUndefinedOrAllocationSite(rbx);
934 129 : if (mode == InterpreterPushArgsMode::kJSFunction) {
935 : // Tail call to the function-specific construct stub (still in the caller
936 : // context at this point).
937 43 : __ AssertFunction(rdi);
938 :
939 : __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
940 : __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
941 : __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
942 : // Jump to the constructor function (rax, rbx, rdx passed on).
943 43 : __ jmp(rcx);
944 86 : } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
945 : // Call the constructor (rax, rdx, rdi passed on).
946 : __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
947 43 : RelocInfo::CODE_TARGET);
948 : } else {
949 : DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
950 : // Call the constructor (rax, rdx, rdi passed on).
951 43 : __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
952 : }
953 :
954 : // Throw stack overflow exception.
955 129 : __ bind(&stack_overflow);
956 : {
957 129 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
958 : // This should be unreachable.
959 129 : __ int3();
960 : }
961 129 : }
962 :
963 : // static
964 43 : void Builtins::Generate_InterpreterPushArgsThenConstructArray(
965 43 : MacroAssembler* masm) {
966 : // ----------- S t a t e -------------
967 : // -- rax : the number of arguments (not including the receiver)
968 : // -- rdx : the target to call checked to be Array function.
969 : // -- rbx : the allocation site feedback
970 : // -- rcx : the address of the first argument to be pushed. Subsequent
971 : // arguments should be consecutive above this, in the same order as
972 : // they are to be pushed onto the stack.
973 : // -----------------------------------
974 : Label stack_overflow;
975 :
976 : // Number of values to be pushed.
977 43 : __ Move(r8, rax);
978 :
979 : // Add a stack check before pushing arguments.
980 43 : Generate_StackOverflowCheck(masm, r8, rdi, &stack_overflow);
981 :
982 : // Pop return address to allow tail-call after pushing arguments.
983 : __ PopReturnAddressTo(kScratchRegister);
984 :
985 : // Push slot for the receiver to be constructed.
986 43 : __ Push(Immediate(0));
987 :
988 : // rcx and rdi will be modified.
989 43 : Generate_InterpreterPushArgs(masm, r8, rcx, rdi);
990 :
991 : // Push return address in preparation for the tail-call.
992 : __ PushReturnAddressFrom(kScratchRegister);
993 :
994 : // Array constructor expects constructor in rdi. It is same as rdx here.
995 43 : __ Move(rdi, rdx);
996 :
997 43 : ArrayConstructorStub stub(masm->isolate());
998 43 : __ TailCallStub(&stub);
999 :
1000 : // Throw stack overflow exception.
1001 43 : __ bind(&stack_overflow);
1002 : {
1003 43 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
1004 : // This should be unreachable.
1005 43 : __ int3();
1006 : }
1007 43 : }
1008 :
1009 172 : static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1010 : // Set the return address to the correct point in the interpreter entry
1011 : // trampoline.
1012 : Smi* interpreter_entry_return_pc_offset(
1013 86 : masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1014 : DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1015 172 : __ Move(rbx, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1016 : __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() +
1017 172 : Code::kHeaderSize - kHeapObjectTag));
1018 86 : __ Push(rbx);
1019 :
1020 : // Initialize dispatch table register.
1021 : __ Move(
1022 : kInterpreterDispatchTableRegister,
1023 86 : ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1024 :
1025 : // Get the bytecode array pointer from the frame.
1026 : __ movp(kInterpreterBytecodeArrayRegister,
1027 172 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1028 :
1029 86 : if (FLAG_debug_code) {
1030 : // Check function data field is actually a BytecodeArray object.
1031 0 : __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1032 : __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1033 0 : rbx);
1034 0 : __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1035 : }
1036 :
1037 : // Get the target bytecode offset from the frame.
1038 : __ movp(kInterpreterBytecodeOffsetRegister,
1039 172 : Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1040 : __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
1041 86 : kInterpreterBytecodeOffsetRegister);
1042 :
1043 : // Dispatch to the target bytecode.
1044 : __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
1045 172 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1046 : __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
1047 172 : times_pointer_size, 0));
1048 86 : __ jmp(rbx);
1049 86 : }
1050 :
1051 43 : void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1052 : // Advance the current bytecode offset stored within the given interpreter
1053 : // stack frame. This simulates what all bytecode handlers do upon completion
1054 : // of the underlying operation.
1055 86 : __ movp(rbx, Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1056 86 : __ movp(rdx, Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1057 86 : __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1058 : {
1059 43 : FrameScope scope(masm, StackFrame::INTERNAL);
1060 43 : __ Push(kInterpreterAccumulatorRegister);
1061 43 : __ Push(rbx); // First argument is the bytecode array.
1062 43 : __ Push(rdx); // Second argument is the bytecode offset.
1063 43 : __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1064 43 : __ Move(rdx, rax); // Result is the new bytecode offset.
1065 43 : __ Pop(kInterpreterAccumulatorRegister);
1066 : }
1067 86 : __ movp(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rdx);
1068 :
1069 43 : Generate_InterpreterEnterBytecode(masm);
1070 43 : }
1071 :
1072 43 : void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1073 43 : Generate_InterpreterEnterBytecode(masm);
1074 43 : }
1075 :
1076 43 : void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1077 : // ----------- S t a t e -------------
1078 : // -- rax : argument count (preserved for callee)
1079 : // -- rdx : new target (preserved for callee)
1080 : // -- rdi : target function (preserved for callee)
1081 : // -----------------------------------
1082 : // First lookup code, maybe we don't need to compile!
1083 : Label gotta_call_runtime;
1084 : Label try_shared;
1085 : Label loop_top, loop_bottom;
1086 :
1087 : Register closure = rdi;
1088 : Register map = r8;
1089 : Register index = r9;
1090 :
1091 : // Do we have a valid feedback vector?
1092 43 : __ movp(rbx, FieldOperand(closure, JSFunction::kFeedbackVectorOffset));
1093 : __ movp(rbx, FieldOperand(rbx, Cell::kValueOffset));
1094 : __ JumpIfRoot(rbx, Heap::kUndefinedValueRootIndex, &gotta_call_runtime);
1095 :
1096 : __ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1097 : __ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1098 43 : __ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset));
1099 43 : __ cmpl(index, Immediate(2));
1100 43 : __ j(less, &try_shared);
1101 :
1102 : // r14 : native context
1103 : // r9 : length / index
1104 : // r8 : optimized code map
1105 : // rdx : new target
1106 : // rdi : closure
1107 : Register native_context = r14;
1108 : __ movp(native_context, NativeContextOperand());
1109 :
1110 43 : __ bind(&loop_top);
1111 : // Native context match?
1112 : Register temp = r11;
1113 : __ movp(temp, FieldOperand(map, index, times_pointer_size,
1114 : SharedFunctionInfo::kOffsetToPreviousContext));
1115 : __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
1116 43 : __ cmpp(temp, native_context);
1117 43 : __ j(not_equal, &loop_bottom);
1118 :
1119 : // Code available?
1120 : Register entry = rcx;
1121 : __ movp(entry, FieldOperand(map, index, times_pointer_size,
1122 : SharedFunctionInfo::kOffsetToPreviousCachedCode));
1123 : __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset));
1124 43 : __ JumpIfSmi(entry, &try_shared);
1125 :
1126 : // Found code. Get it into the closure and return.
1127 : __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
1128 : __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1129 43 : __ RecordWriteCodeEntryField(closure, entry, r15);
1130 :
1131 : // Link the closure into the optimized function list.
1132 : // rcx : code entry (entry)
1133 : // r14 : native context
1134 : // rdx : new target
1135 : // rdi : closure
1136 : __ movp(rbx,
1137 43 : ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1138 : __ movp(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), rbx);
1139 : __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, rbx, r15,
1140 43 : kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1141 : const int function_list_offset =
1142 : Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1143 : __ movp(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
1144 43 : closure);
1145 : // Save closure before the write barrier.
1146 : __ movp(rbx, closure);
1147 : __ RecordWriteContextSlot(native_context, function_list_offset, closure, r15,
1148 : kDontSaveFPRegs);
1149 : __ movp(closure, rbx);
1150 43 : __ jmp(entry);
1151 :
1152 43 : __ bind(&loop_bottom);
1153 43 : __ subl(index, Immediate(SharedFunctionInfo::kEntryLength));
1154 43 : __ cmpl(index, Immediate(1));
1155 43 : __ j(greater, &loop_top);
1156 :
1157 : // We found no code.
1158 43 : __ bind(&try_shared);
1159 : __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1160 : // Is the shared function marked for tier up?
1161 : __ testb(FieldOperand(entry, SharedFunctionInfo::kMarkedForTierUpByteOffset),
1162 43 : Immediate(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
1163 43 : __ j(not_zero, &gotta_call_runtime);
1164 :
1165 : // If SFI points to anything other than CompileLazy, install that.
1166 : __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
1167 43 : __ Move(rbx, masm->CodeObject());
1168 43 : __ cmpp(entry, rbx);
1169 43 : __ j(equal, &gotta_call_runtime);
1170 :
1171 : // Install the SFI's code entry.
1172 : __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
1173 : __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1174 43 : __ RecordWriteCodeEntryField(closure, entry, r15);
1175 43 : __ jmp(entry);
1176 :
1177 43 : __ bind(&gotta_call_runtime);
1178 43 : GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1179 43 : }
1180 :
1181 43 : void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1182 : GenerateTailCallToReturnedCode(masm,
1183 43 : Runtime::kCompileOptimized_NotConcurrent);
1184 43 : }
1185 :
1186 43 : void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1187 43 : GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1188 43 : }
1189 :
1190 43 : void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1191 : // ----------- S t a t e -------------
1192 : // -- rax : argument count (preserved for callee)
1193 : // -- rdx : new target (preserved for callee)
1194 : // -- rdi : target function (preserved for callee)
1195 : // -----------------------------------
1196 : Label failed;
1197 : {
1198 43 : FrameScope scope(masm, StackFrame::INTERNAL);
1199 : // Preserve argument count for later compare.
1200 43 : __ movp(rcx, rax);
1201 : // Push the number of arguments to the callee.
1202 43 : __ Integer32ToSmi(rax, rax);
1203 43 : __ Push(rax);
1204 : // Push a copy of the target function and the new target.
1205 43 : __ Push(rdi);
1206 43 : __ Push(rdx);
1207 :
1208 : // The function.
1209 43 : __ Push(rdi);
1210 : // Copy arguments from caller (stdlib, foreign, heap).
1211 : Label args_done;
1212 215 : for (int j = 0; j < 4; ++j) {
1213 : Label over;
1214 172 : if (j < 3) {
1215 129 : __ cmpp(rcx, Immediate(j));
1216 129 : __ j(not_equal, &over, Label::kNear);
1217 : }
1218 430 : for (int i = j - 1; i >= 0; --i) {
1219 : __ Push(Operand(
1220 258 : rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1221 : }
1222 258 : for (int i = 0; i < 3 - j; ++i) {
1223 258 : __ PushRoot(Heap::kUndefinedValueRootIndex);
1224 : }
1225 172 : if (j < 3) {
1226 129 : __ jmp(&args_done, Label::kNear);
1227 129 : __ bind(&over);
1228 : }
1229 : }
1230 43 : __ bind(&args_done);
1231 :
1232 : // Call runtime, on success unwind frame, and parent frame.
1233 : __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1234 : // A smi 0 is returned on failure, an object on success.
1235 43 : __ JumpIfSmi(rax, &failed, Label::kNear);
1236 :
1237 43 : __ Drop(2);
1238 43 : __ Pop(rcx);
1239 43 : __ SmiToInteger32(rcx, rcx);
1240 43 : scope.GenerateLeaveFrame();
1241 :
1242 : __ PopReturnAddressTo(rbx);
1243 : __ incp(rcx);
1244 86 : __ leap(rsp, Operand(rsp, rcx, times_pointer_size, 0));
1245 : __ PushReturnAddressFrom(rbx);
1246 43 : __ ret(0);
1247 :
1248 43 : __ bind(&failed);
1249 : // Restore target function and new target.
1250 43 : __ Pop(rdx);
1251 43 : __ Pop(rdi);
1252 43 : __ Pop(rax);
1253 43 : __ SmiToInteger32(rax, rax);
1254 : }
1255 : // On failure, tail call back to regular js.
1256 43 : GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1257 43 : }
1258 :
1259 774 : static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1260 : // For now, we are relying on the fact that make_code_young doesn't do any
1261 : // garbage collection which allows us to save/restore the registers without
1262 : // worrying about which of them contain pointers. We also don't build an
1263 : // internal frame to make the code faster, since we shouldn't have to do stack
1264 : // crawls in MakeCodeYoung. This seems a bit fragile.
1265 :
1266 : // Re-execute the code that was patched back to the young age when
1267 : // the stub returns.
1268 516 : __ subp(Operand(rsp, 0), Immediate(5));
1269 258 : __ Pushad();
1270 258 : __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
1271 516 : __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
1272 : { // NOLINT
1273 258 : FrameScope scope(masm, StackFrame::MANUAL);
1274 258 : __ PrepareCallCFunction(2);
1275 : __ CallCFunction(
1276 258 : ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1277 : }
1278 258 : __ Popad();
1279 258 : __ ret(0);
1280 258 : }
1281 :
1282 : #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1283 : void Builtins::Generate_Make##C##CodeYoungAgain(MacroAssembler* masm) { \
1284 : GenerateMakeCodeYoungAgainCommon(masm); \
1285 : }
1286 215 : CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1287 : #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1288 :
1289 258 : void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1290 : // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1291 : // that make_code_young doesn't do any garbage collection which allows us to
1292 : // save/restore the registers without worrying about which of them contain
1293 : // pointers.
1294 86 : __ Pushad();
1295 86 : __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
1296 172 : __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
1297 86 : __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
1298 : { // NOLINT
1299 86 : FrameScope scope(masm, StackFrame::MANUAL);
1300 86 : __ PrepareCallCFunction(2);
1301 : __ CallCFunction(
1302 : ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1303 86 : 2);
1304 : }
1305 86 : __ Popad();
1306 :
1307 : // Perform prologue operations usually performed by the young code stub.
1308 : __ PopReturnAddressTo(kScratchRegister);
1309 86 : __ pushq(rbp); // Caller's frame pointer.
1310 : __ movp(rbp, rsp);
1311 86 : __ Push(rsi); // Callee's context.
1312 86 : __ Push(rdi); // Callee's JS Function.
1313 : __ PushReturnAddressFrom(kScratchRegister);
1314 :
1315 : // Jump to point after the code-age stub.
1316 86 : __ ret(0);
1317 86 : }
1318 :
1319 43 : void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1320 43 : GenerateMakeCodeYoungAgainCommon(masm);
1321 43 : }
1322 :
1323 43 : void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1324 43 : Generate_MarkCodeAsExecutedOnce(masm);
1325 43 : }
1326 :
1327 86 : static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1328 : SaveFPRegsMode save_doubles) {
1329 : // Enter an internal frame.
1330 : {
1331 86 : FrameScope scope(masm, StackFrame::INTERNAL);
1332 :
1333 : // Preserve registers across notification, this is important for compiled
1334 : // stubs that tail call the runtime on deopts passing their parameters in
1335 : // registers.
1336 86 : __ Pushad();
1337 86 : __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1338 86 : __ Popad();
1339 : // Tear down internal frame.
1340 : }
1341 :
1342 86 : __ DropUnderReturnAddress(1); // Ignore state offset
1343 86 : __ ret(0); // Return to IC Miss stub, continuation still on stack.
1344 86 : }
1345 :
1346 43 : void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1347 43 : Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1348 43 : }
1349 :
1350 43 : void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1351 43 : Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1352 43 : }
1353 :
1354 129 : static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1355 : Deoptimizer::BailoutType type) {
1356 : // Enter an internal frame.
1357 : {
1358 129 : FrameScope scope(masm, StackFrame::INTERNAL);
1359 :
1360 : // Pass the deoptimization type to the runtime system.
1361 258 : __ Push(Smi::FromInt(static_cast<int>(type)));
1362 :
1363 129 : __ CallRuntime(Runtime::kNotifyDeoptimized);
1364 : // Tear down internal frame.
1365 : }
1366 :
1367 : // Get the full codegen state from the stack and untag it.
1368 129 : __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
1369 :
1370 : // Switch on the state.
1371 : Label not_no_registers, not_tos_rax;
1372 : __ cmpp(kScratchRegister,
1373 129 : Immediate(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
1374 129 : __ j(not_equal, ¬_no_registers, Label::kNear);
1375 129 : __ ret(1 * kPointerSize); // Remove state.
1376 :
1377 129 : __ bind(¬_no_registers);
1378 : DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
1379 258 : __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
1380 : __ cmpp(kScratchRegister,
1381 129 : Immediate(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
1382 129 : __ j(not_equal, ¬_tos_rax, Label::kNear);
1383 129 : __ ret(2 * kPointerSize); // Remove state, rax.
1384 :
1385 129 : __ bind(¬_tos_rax);
1386 129 : __ Abort(kNoCasesLeft);
1387 129 : }
1388 :
1389 43 : void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1390 43 : Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1391 43 : }
1392 :
1393 43 : void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1394 43 : Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1395 43 : }
1396 :
1397 43 : void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1398 43 : Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1399 43 : }
1400 :
1401 : // static
1402 129 : void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1403 : // ----------- S t a t e -------------
1404 : // -- rax : argc
1405 : // -- rsp[0] : return address
1406 : // -- rsp[8] : argArray
1407 : // -- rsp[16] : thisArg
1408 : // -- rsp[24] : receiver
1409 : // -----------------------------------
1410 :
1411 : // 1. Load receiver into rdi, argArray into rax (if present), remove all
1412 : // arguments from the stack (including the receiver), and push thisArg (if
1413 : // present) instead.
1414 : {
1415 : Label no_arg_array, no_this_arg;
1416 : StackArgumentsAccessor args(rsp, rax);
1417 43 : __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1418 43 : __ movp(rbx, rdx);
1419 : __ movp(rdi, args.GetReceiverOperand());
1420 : __ testp(rax, rax);
1421 43 : __ j(zero, &no_this_arg, Label::kNear);
1422 : {
1423 43 : __ movp(rdx, args.GetArgumentOperand(1));
1424 43 : __ cmpp(rax, Immediate(1));
1425 43 : __ j(equal, &no_arg_array, Label::kNear);
1426 43 : __ movp(rbx, args.GetArgumentOperand(2));
1427 43 : __ bind(&no_arg_array);
1428 : }
1429 43 : __ bind(&no_this_arg);
1430 : __ PopReturnAddressTo(rcx);
1431 86 : __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1432 43 : __ Push(rdx);
1433 : __ PushReturnAddressFrom(rcx);
1434 : __ movp(rax, rbx);
1435 : }
1436 :
1437 : // ----------- S t a t e -------------
1438 : // -- rax : argArray
1439 : // -- rdi : receiver
1440 : // -- rsp[0] : return address
1441 : // -- rsp[8] : thisArg
1442 : // -----------------------------------
1443 :
1444 : // 2. Make sure the receiver is actually callable.
1445 : Label receiver_not_callable;
1446 43 : __ JumpIfSmi(rdi, &receiver_not_callable, Label::kNear);
1447 : __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1448 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1449 43 : Immediate(1 << Map::kIsCallable));
1450 43 : __ j(zero, &receiver_not_callable, Label::kNear);
1451 :
1452 : // 3. Tail call with no arguments if argArray is null or undefined.
1453 : Label no_arguments;
1454 : __ JumpIfRoot(rax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1455 : __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &no_arguments,
1456 : Label::kNear);
1457 :
1458 : // 4a. Apply the receiver to the given argArray (passing undefined for
1459 : // new.target).
1460 43 : __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1461 43 : __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1462 :
1463 : // 4b. The argArray is either null or undefined, so we tail call without any
1464 : // arguments to the receiver. Since we did not create a frame for
1465 : // Function.prototype.apply() yet, we use a normal Call builtin here.
1466 43 : __ bind(&no_arguments);
1467 : {
1468 43 : __ Set(rax, 0);
1469 43 : __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1470 : }
1471 :
1472 : // 4c. The receiver is not callable, throw an appropriate TypeError.
1473 43 : __ bind(&receiver_not_callable);
1474 : {
1475 : StackArgumentsAccessor args(rsp, 0);
1476 : __ movp(args.GetReceiverOperand(), rdi);
1477 43 : __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1478 : }
1479 43 : }
1480 :
1481 : // static
1482 86 : void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1483 : // Stack Layout:
1484 : // rsp[0] : Return address
1485 : // rsp[8] : Argument n
1486 : // rsp[16] : Argument n-1
1487 : // ...
1488 : // rsp[8 * n] : Argument 1
1489 : // rsp[8 * (n + 1)] : Receiver (callable to call)
1490 : //
1491 : // rax contains the number of arguments, n, not counting the receiver.
1492 : //
1493 : // 1. Make sure we have at least one argument.
1494 : {
1495 : Label done;
1496 43 : __ testp(rax, rax);
1497 43 : __ j(not_zero, &done, Label::kNear);
1498 : __ PopReturnAddressTo(rbx);
1499 43 : __ PushRoot(Heap::kUndefinedValueRootIndex);
1500 : __ PushReturnAddressFrom(rbx);
1501 : __ incp(rax);
1502 43 : __ bind(&done);
1503 : }
1504 :
1505 : // 2. Get the callable to call (passed as receiver) from the stack.
1506 : {
1507 : StackArgumentsAccessor args(rsp, rax);
1508 : __ movp(rdi, args.GetReceiverOperand());
1509 : }
1510 :
1511 : // 3. Shift arguments and return address one slot down on the stack
1512 : // (overwriting the original receiver). Adjust argument count to make
1513 : // the original first argument the new receiver.
1514 : {
1515 : Label loop;
1516 : __ movp(rcx, rax);
1517 : StackArgumentsAccessor args(rsp, rcx);
1518 43 : __ bind(&loop);
1519 43 : __ movp(rbx, args.GetArgumentOperand(1));
1520 43 : __ movp(args.GetArgumentOperand(0), rbx);
1521 : __ decp(rcx);
1522 43 : __ j(not_zero, &loop); // While non-zero.
1523 43 : __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1524 : __ decp(rax); // One fewer argument (first argument is new receiver).
1525 : }
1526 :
1527 : // 4. Call the callable.
1528 : // Since we did not create a frame for Function.prototype.call() yet,
1529 : // we use a normal Call builtin here.
1530 43 : __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1531 43 : }
1532 :
1533 86 : void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1534 : // ----------- S t a t e -------------
1535 : // -- rax : argc
1536 : // -- rsp[0] : return address
1537 : // -- rsp[8] : argumentsList
1538 : // -- rsp[16] : thisArgument
1539 : // -- rsp[24] : target
1540 : // -- rsp[32] : receiver
1541 : // -----------------------------------
1542 :
1543 : // 1. Load target into rdi (if present), argumentsList into rax (if present),
1544 : // remove all arguments from the stack (including the receiver), and push
1545 : // thisArgument (if present) instead.
1546 : {
1547 : Label done;
1548 : StackArgumentsAccessor args(rsp, rax);
1549 43 : __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1550 43 : __ movp(rdx, rdi);
1551 : __ movp(rbx, rdi);
1552 43 : __ cmpp(rax, Immediate(1));
1553 43 : __ j(below, &done, Label::kNear);
1554 43 : __ movp(rdi, args.GetArgumentOperand(1)); // target
1555 43 : __ j(equal, &done, Label::kNear);
1556 43 : __ movp(rdx, args.GetArgumentOperand(2)); // thisArgument
1557 43 : __ cmpp(rax, Immediate(3));
1558 43 : __ j(below, &done, Label::kNear);
1559 43 : __ movp(rbx, args.GetArgumentOperand(3)); // argumentsList
1560 43 : __ bind(&done);
1561 : __ PopReturnAddressTo(rcx);
1562 86 : __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1563 43 : __ Push(rdx);
1564 : __ PushReturnAddressFrom(rcx);
1565 : __ movp(rax, rbx);
1566 : }
1567 :
1568 : // ----------- S t a t e -------------
1569 : // -- rax : argumentsList
1570 : // -- rdi : target
1571 : // -- rsp[0] : return address
1572 : // -- rsp[8] : thisArgument
1573 : // -----------------------------------
1574 :
1575 : // 2. Make sure the target is actually callable.
1576 : Label target_not_callable;
1577 43 : __ JumpIfSmi(rdi, &target_not_callable, Label::kNear);
1578 : __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1579 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1580 43 : Immediate(1 << Map::kIsCallable));
1581 43 : __ j(zero, &target_not_callable, Label::kNear);
1582 :
1583 : // 3a. Apply the target to the given argumentsList (passing undefined for
1584 : // new.target).
1585 43 : __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1586 43 : __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1587 :
1588 : // 3b. The target is not callable, throw an appropriate TypeError.
1589 43 : __ bind(&target_not_callable);
1590 : {
1591 : StackArgumentsAccessor args(rsp, 0);
1592 : __ movp(args.GetReceiverOperand(), rdi);
1593 43 : __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1594 : }
1595 43 : }
1596 :
1597 86 : void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1598 : // ----------- S t a t e -------------
1599 : // -- rax : argc
1600 : // -- rsp[0] : return address
1601 : // -- rsp[8] : new.target (optional)
1602 : // -- rsp[16] : argumentsList
1603 : // -- rsp[24] : target
1604 : // -- rsp[32] : receiver
1605 : // -----------------------------------
1606 :
1607 : // 1. Load target into rdi (if present), argumentsList into rax (if present),
1608 : // new.target into rdx (if present, otherwise use target), remove all
1609 : // arguments from the stack (including the receiver), and push thisArgument
1610 : // (if present) instead.
1611 : {
1612 : Label done;
1613 : StackArgumentsAccessor args(rsp, rax);
1614 43 : __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1615 43 : __ movp(rdx, rdi);
1616 : __ movp(rbx, rdi);
1617 43 : __ cmpp(rax, Immediate(1));
1618 43 : __ j(below, &done, Label::kNear);
1619 43 : __ movp(rdi, args.GetArgumentOperand(1)); // target
1620 : __ movp(rdx, rdi); // new.target defaults to target
1621 43 : __ j(equal, &done, Label::kNear);
1622 43 : __ movp(rbx, args.GetArgumentOperand(2)); // argumentsList
1623 43 : __ cmpp(rax, Immediate(3));
1624 43 : __ j(below, &done, Label::kNear);
1625 43 : __ movp(rdx, args.GetArgumentOperand(3)); // new.target
1626 43 : __ bind(&done);
1627 : __ PopReturnAddressTo(rcx);
1628 86 : __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1629 43 : __ PushRoot(Heap::kUndefinedValueRootIndex);
1630 : __ PushReturnAddressFrom(rcx);
1631 : __ movp(rax, rbx);
1632 : }
1633 :
1634 : // ----------- S t a t e -------------
1635 : // -- rax : argumentsList
1636 : // -- rdx : new.target
1637 : // -- rdi : target
1638 : // -- rsp[0] : return address
1639 : // -- rsp[8] : receiver (undefined)
1640 : // -----------------------------------
1641 :
1642 : // 2. Make sure the target is actually a constructor.
1643 : Label target_not_constructor;
1644 43 : __ JumpIfSmi(rdi, &target_not_constructor, Label::kNear);
1645 : __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1646 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1647 43 : Immediate(1 << Map::kIsConstructor));
1648 43 : __ j(zero, &target_not_constructor, Label::kNear);
1649 :
1650 : // 3. Make sure the target is actually a constructor.
1651 : Label new_target_not_constructor;
1652 43 : __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
1653 : __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1654 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1655 43 : Immediate(1 << Map::kIsConstructor));
1656 43 : __ j(zero, &new_target_not_constructor, Label::kNear);
1657 :
1658 : // 4a. Construct the target with the given new.target and argumentsList.
1659 43 : __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1660 :
1661 : // 4b. The target is not a constructor, throw an appropriate TypeError.
1662 43 : __ bind(&target_not_constructor);
1663 : {
1664 : StackArgumentsAccessor args(rsp, 0);
1665 : __ movp(args.GetReceiverOperand(), rdi);
1666 43 : __ TailCallRuntime(Runtime::kThrowNotConstructor);
1667 : }
1668 :
1669 : // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1670 43 : __ bind(&new_target_not_constructor);
1671 : {
1672 : StackArgumentsAccessor args(rsp, 0);
1673 : __ movp(args.GetReceiverOperand(), rdx);
1674 43 : __ TailCallRuntime(Runtime::kThrowNotConstructor);
1675 : }
1676 43 : }
1677 :
1678 86 : void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1679 : // ----------- S t a t e -------------
1680 : // -- rax : argc
1681 : // -- rsp[0] : return address
1682 : // -- rsp[8] : last argument
1683 : // -----------------------------------
1684 : Label generic_array_code;
1685 :
1686 : // Get the InternalArray function.
1687 43 : __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1688 :
1689 43 : if (FLAG_debug_code) {
1690 : // Initial map for the builtin InternalArray functions should be maps.
1691 0 : __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1692 : // Will both indicate a NULL and a Smi.
1693 : STATIC_ASSERT(kSmiTag == 0);
1694 0 : Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1695 0 : __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1696 0 : __ CmpObjectType(rbx, MAP_TYPE, rcx);
1697 0 : __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1698 : }
1699 :
1700 : // Run the native code for the InternalArray function called as a normal
1701 : // function.
1702 : // tail call a stub
1703 43 : InternalArrayConstructorStub stub(masm->isolate());
1704 43 : __ TailCallStub(&stub);
1705 43 : }
1706 :
1707 86 : void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1708 : // ----------- S t a t e -------------
1709 : // -- rax : argc
1710 : // -- rsp[0] : return address
1711 : // -- rsp[8] : last argument
1712 : // -----------------------------------
1713 : Label generic_array_code;
1714 :
1715 : // Get the Array function.
1716 43 : __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi);
1717 :
1718 43 : if (FLAG_debug_code) {
1719 : // Initial map for the builtin Array functions should be maps.
1720 0 : __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1721 : // Will both indicate a NULL and a Smi.
1722 : STATIC_ASSERT(kSmiTag == 0);
1723 0 : Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1724 0 : __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1725 0 : __ CmpObjectType(rbx, MAP_TYPE, rcx);
1726 0 : __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1727 : }
1728 :
1729 43 : __ movp(rdx, rdi);
1730 : // Run the native code for the Array function called as a normal function.
1731 : // tail call a stub
1732 43 : __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1733 43 : ArrayConstructorStub stub(masm->isolate());
1734 43 : __ TailCallStub(&stub);
1735 43 : }
1736 :
1737 : // static
1738 86 : void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
1739 : // ----------- S t a t e -------------
1740 : // -- rax : number of arguments
1741 : // -- rdi : constructor function
1742 : // -- rsi : context
1743 : // -- rsp[0] : return address
1744 : // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1745 : // -- rsp[(argc + 1) * 8] : receiver
1746 : // -----------------------------------
1747 :
1748 : // 1. Load the first argument into rbx.
1749 : Label no_arguments;
1750 : {
1751 : StackArgumentsAccessor args(rsp, rax);
1752 43 : __ testp(rax, rax);
1753 43 : __ j(zero, &no_arguments, Label::kNear);
1754 43 : __ movp(rbx, args.GetArgumentOperand(1));
1755 : }
1756 :
1757 : // 2a. Convert the first argument to a number.
1758 : {
1759 43 : FrameScope scope(masm, StackFrame::MANUAL);
1760 43 : __ Integer32ToSmi(rax, rax);
1761 43 : __ EnterBuiltinFrame(rsi, rdi, rax);
1762 : __ movp(rax, rbx);
1763 43 : __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1764 43 : __ LeaveBuiltinFrame(rsi, rdi, rbx); // Argc popped to rbx.
1765 43 : __ SmiToInteger32(rbx, rbx);
1766 : }
1767 :
1768 : {
1769 : // Drop all arguments including the receiver.
1770 : __ PopReturnAddressTo(rcx);
1771 86 : __ leap(rsp, Operand(rsp, rbx, times_pointer_size, kPointerSize));
1772 : __ PushReturnAddressFrom(rcx);
1773 43 : __ Ret();
1774 : }
1775 :
1776 : // 2b. No arguments, return +0 (already in rax).
1777 43 : __ bind(&no_arguments);
1778 43 : __ ret(1 * kPointerSize);
1779 43 : }
1780 :
1781 : // static
1782 129 : void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
1783 : // ----------- S t a t e -------------
1784 : // -- rax : number of arguments
1785 : // -- rdi : constructor function
1786 : // -- rdx : new target
1787 : // -- rsi : context
1788 : // -- rsp[0] : return address
1789 : // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1790 : // -- rsp[(argc + 1) * 8] : receiver
1791 : // -----------------------------------
1792 :
1793 : // 1. Make sure we operate in the context of the called function.
1794 43 : __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1795 :
1796 : // Store argc in r8.
1797 43 : __ Integer32ToSmi(r8, rax);
1798 :
1799 : // 2. Load the first argument into rbx.
1800 : {
1801 : StackArgumentsAccessor args(rsp, rax);
1802 : Label no_arguments, done;
1803 : __ testp(rax, rax);
1804 43 : __ j(zero, &no_arguments, Label::kNear);
1805 43 : __ movp(rbx, args.GetArgumentOperand(1));
1806 43 : __ jmp(&done, Label::kNear);
1807 43 : __ bind(&no_arguments);
1808 : __ Move(rbx, Smi::kZero);
1809 43 : __ bind(&done);
1810 : }
1811 :
1812 : // 3. Make sure rbx is a number.
1813 : {
1814 : Label done_convert;
1815 43 : __ JumpIfSmi(rbx, &done_convert);
1816 : __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1817 43 : Heap::kHeapNumberMapRootIndex);
1818 43 : __ j(equal, &done_convert);
1819 : {
1820 43 : FrameScope scope(masm, StackFrame::MANUAL);
1821 43 : __ EnterBuiltinFrame(rsi, rdi, r8);
1822 43 : __ Push(rdx);
1823 43 : __ Move(rax, rbx);
1824 43 : __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1825 43 : __ Move(rbx, rax);
1826 43 : __ Pop(rdx);
1827 43 : __ LeaveBuiltinFrame(rsi, rdi, r8);
1828 : }
1829 43 : __ bind(&done_convert);
1830 : }
1831 :
1832 : // 4. Check if new target and constructor differ.
1833 : Label drop_frame_and_ret, new_object;
1834 43 : __ cmpp(rdx, rdi);
1835 43 : __ j(not_equal, &new_object);
1836 :
1837 : // 5. Allocate a JSValue wrapper for the number.
1838 43 : __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1839 43 : __ jmp(&drop_frame_and_ret, Label::kNear);
1840 :
1841 : // 6. Fallback to the runtime to create new object.
1842 43 : __ bind(&new_object);
1843 : {
1844 43 : FrameScope scope(masm, StackFrame::MANUAL);
1845 43 : __ EnterBuiltinFrame(rsi, rdi, r8);
1846 43 : __ Push(rbx); // the first argument
1847 : __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
1848 86 : RelocInfo::CODE_TARGET);
1849 43 : __ Pop(FieldOperand(rax, JSValue::kValueOffset));
1850 43 : __ LeaveBuiltinFrame(rsi, rdi, r8);
1851 : }
1852 :
1853 43 : __ bind(&drop_frame_and_ret);
1854 : {
1855 : // Drop all arguments including the receiver.
1856 : __ PopReturnAddressTo(rcx);
1857 43 : __ SmiToInteger32(r8, r8);
1858 86 : __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize));
1859 : __ PushReturnAddressFrom(rcx);
1860 43 : __ Ret();
1861 : }
1862 43 : }
1863 :
1864 : // static
1865 86 : void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
1866 : // ----------- S t a t e -------------
1867 : // -- rax : number of arguments
1868 : // -- rdi : constructor function
1869 : // -- rsi : context
1870 : // -- rsp[0] : return address
1871 : // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1872 : // -- rsp[(argc + 1) * 8] : receiver
1873 : // -----------------------------------
1874 :
1875 : // 1. Load the first argument into rax.
1876 : Label no_arguments;
1877 : {
1878 : StackArgumentsAccessor args(rsp, rax);
1879 43 : __ Integer32ToSmi(r8, rax); // Store argc in r8.
1880 43 : __ testp(rax, rax);
1881 43 : __ j(zero, &no_arguments, Label::kNear);
1882 43 : __ movp(rax, args.GetArgumentOperand(1));
1883 : }
1884 :
1885 : // 2a. At least one argument, return rax if it's a string, otherwise
1886 : // dispatch to appropriate conversion.
1887 : Label drop_frame_and_ret, to_string, symbol_descriptive_string;
1888 : {
1889 43 : __ JumpIfSmi(rax, &to_string, Label::kNear);
1890 : STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
1891 43 : __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
1892 43 : __ j(above, &to_string, Label::kNear);
1893 43 : __ j(equal, &symbol_descriptive_string, Label::kNear);
1894 43 : __ jmp(&drop_frame_and_ret, Label::kNear);
1895 : }
1896 :
1897 : // 2b. No arguments, return the empty string (and pop the receiver).
1898 43 : __ bind(&no_arguments);
1899 : {
1900 43 : __ LoadRoot(rax, Heap::kempty_stringRootIndex);
1901 43 : __ ret(1 * kPointerSize);
1902 : }
1903 :
1904 : // 3a. Convert rax to a string.
1905 43 : __ bind(&to_string);
1906 : {
1907 43 : FrameScope scope(masm, StackFrame::MANUAL);
1908 43 : __ EnterBuiltinFrame(rsi, rdi, r8);
1909 43 : __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
1910 43 : __ LeaveBuiltinFrame(rsi, rdi, r8);
1911 : }
1912 43 : __ jmp(&drop_frame_and_ret, Label::kNear);
1913 :
1914 : // 3b. Convert symbol in rax to a string.
1915 43 : __ bind(&symbol_descriptive_string);
1916 : {
1917 : __ PopReturnAddressTo(rcx);
1918 43 : __ SmiToInteger32(r8, r8);
1919 86 : __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize));
1920 43 : __ Push(rax);
1921 : __ PushReturnAddressFrom(rcx);
1922 43 : __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
1923 : }
1924 :
1925 43 : __ bind(&drop_frame_and_ret);
1926 : {
1927 : // Drop all arguments including the receiver.
1928 : __ PopReturnAddressTo(rcx);
1929 43 : __ SmiToInteger32(r8, r8);
1930 86 : __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize));
1931 : __ PushReturnAddressFrom(rcx);
1932 43 : __ Ret();
1933 : }
1934 43 : }
1935 :
1936 : // static
1937 129 : void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
1938 : // ----------- S t a t e -------------
1939 : // -- rax : number of arguments
1940 : // -- rdi : constructor function
1941 : // -- rdx : new target
1942 : // -- rsi : context
1943 : // -- rsp[0] : return address
1944 : // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1945 : // -- rsp[(argc + 1) * 8] : receiver
1946 : // -----------------------------------
1947 :
1948 : // 1. Make sure we operate in the context of the called function.
1949 43 : __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1950 :
1951 : // Store argc in r8.
1952 43 : __ Integer32ToSmi(r8, rax);
1953 :
1954 : // 2. Load the first argument into rbx.
1955 : {
1956 : StackArgumentsAccessor args(rsp, rax);
1957 : Label no_arguments, done;
1958 : __ testp(rax, rax);
1959 43 : __ j(zero, &no_arguments, Label::kNear);
1960 43 : __ movp(rbx, args.GetArgumentOperand(1));
1961 43 : __ jmp(&done, Label::kNear);
1962 43 : __ bind(&no_arguments);
1963 43 : __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1964 43 : __ bind(&done);
1965 : }
1966 :
1967 : // 3. Make sure rbx is a string.
1968 : {
1969 : Label convert, done_convert;
1970 43 : __ JumpIfSmi(rbx, &convert, Label::kNear);
1971 43 : __ CmpObjectType(rbx, FIRST_NONSTRING_TYPE, rcx);
1972 43 : __ j(below, &done_convert);
1973 43 : __ bind(&convert);
1974 : {
1975 43 : FrameScope scope(masm, StackFrame::MANUAL);
1976 43 : __ EnterBuiltinFrame(rsi, rdi, r8);
1977 43 : __ Push(rdx);
1978 43 : __ Move(rax, rbx);
1979 43 : __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
1980 43 : __ Move(rbx, rax);
1981 43 : __ Pop(rdx);
1982 43 : __ LeaveBuiltinFrame(rsi, rdi, r8);
1983 : }
1984 43 : __ bind(&done_convert);
1985 : }
1986 :
1987 : // 4. Check if new target and constructor differ.
1988 : Label drop_frame_and_ret, new_object;
1989 43 : __ cmpp(rdx, rdi);
1990 43 : __ j(not_equal, &new_object);
1991 :
1992 : // 5. Allocate a JSValue wrapper for the string.
1993 43 : __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1994 43 : __ jmp(&drop_frame_and_ret, Label::kNear);
1995 :
1996 : // 6. Fallback to the runtime to create new object.
1997 43 : __ bind(&new_object);
1998 : {
1999 43 : FrameScope scope(masm, StackFrame::MANUAL);
2000 43 : __ EnterBuiltinFrame(rsi, rdi, r8);
2001 43 : __ Push(rbx); // the first argument
2002 : __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
2003 86 : RelocInfo::CODE_TARGET);
2004 43 : __ Pop(FieldOperand(rax, JSValue::kValueOffset));
2005 43 : __ LeaveBuiltinFrame(rsi, rdi, r8);
2006 : }
2007 :
2008 43 : __ bind(&drop_frame_and_ret);
2009 : {
2010 : // Drop all arguments including the receiver.
2011 : __ PopReturnAddressTo(rcx);
2012 43 : __ SmiToInteger32(r8, r8);
2013 86 : __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize));
2014 : __ PushReturnAddressFrom(rcx);
2015 43 : __ Ret();
2016 : }
2017 43 : }
2018 :
2019 86 : static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2020 86 : __ pushq(rbp);
2021 : __ movp(rbp, rsp);
2022 :
2023 : // Store the arguments adaptor context sentinel.
2024 86 : __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2025 :
2026 : // Push the function on the stack.
2027 86 : __ Push(rdi);
2028 :
2029 : // Preserve the number of arguments on the stack. Must preserve rax,
2030 : // rbx and rcx because these registers are used when copying the
2031 : // arguments and the receiver.
2032 86 : __ Integer32ToSmi(r8, rax);
2033 86 : __ Push(r8);
2034 86 : }
2035 :
2036 43 : static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2037 : // Retrieve the number of arguments from the stack. Number is a Smi.
2038 86 : __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2039 :
2040 : // Leave the frame.
2041 : __ movp(rsp, rbp);
2042 43 : __ popq(rbp);
2043 :
2044 : // Remove caller arguments from the stack.
2045 : __ PopReturnAddressTo(rcx);
2046 43 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
2047 86 : __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
2048 : __ PushReturnAddressFrom(rcx);
2049 43 : }
2050 :
2051 : // static
2052 43 : void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2053 : // ----------- S t a t e -------------
2054 : // -- rdx : requested object size (untagged)
2055 : // -- rsp[0] : return address
2056 : // -----------------------------------
2057 43 : __ Integer32ToSmi(rdx, rdx);
2058 : __ PopReturnAddressTo(rcx);
2059 43 : __ Push(rdx);
2060 : __ PushReturnAddressFrom(rcx);
2061 : __ Move(rsi, Smi::kZero);
2062 43 : __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2063 43 : }
2064 :
2065 : // static
2066 43 : void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2067 : // ----------- S t a t e -------------
2068 : // -- rdx : requested object size (untagged)
2069 : // -- rsp[0] : return address
2070 : // -----------------------------------
2071 43 : __ Integer32ToSmi(rdx, rdx);
2072 : __ PopReturnAddressTo(rcx);
2073 43 : __ Push(rdx);
2074 43 : __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2075 : __ PushReturnAddressFrom(rcx);
2076 : __ Move(rsi, Smi::kZero);
2077 43 : __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2078 43 : }
2079 :
2080 : // static
2081 43 : void Builtins::Generate_Abort(MacroAssembler* masm) {
2082 : // ----------- S t a t e -------------
2083 : // -- rdx : message_id as Smi
2084 : // -- rsp[0] : return address
2085 : // -----------------------------------
2086 : __ PopReturnAddressTo(rcx);
2087 43 : __ Push(rdx);
2088 : __ PushReturnAddressFrom(rcx);
2089 : __ Move(rsi, Smi::kZero);
2090 43 : __ TailCallRuntime(Runtime::kAbort);
2091 43 : }
2092 :
2093 129 : void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2094 : // ----------- S t a t e -------------
2095 : // -- rax : actual number of arguments
2096 : // -- rbx : expected number of arguments
2097 : // -- rdx : new target (passed through to callee)
2098 : // -- rdi : function (passed through to callee)
2099 : // -----------------------------------
2100 :
2101 : Label invoke, dont_adapt_arguments, stack_overflow;
2102 43 : Counters* counters = masm->isolate()->counters();
2103 43 : __ IncrementCounter(counters->arguments_adaptors(), 1);
2104 :
2105 : Label enough, too_few;
2106 43 : __ cmpp(rax, rbx);
2107 43 : __ j(less, &too_few);
2108 43 : __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2109 43 : __ j(equal, &dont_adapt_arguments);
2110 :
2111 : { // Enough parameters: Actual >= expected.
2112 43 : __ bind(&enough);
2113 43 : EnterArgumentsAdaptorFrame(masm);
2114 : // The registers rcx and r8 will be modified. The register rbx is only read.
2115 43 : Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
2116 :
2117 : // Copy receiver and all expected arguments.
2118 : const int offset = StandardFrameConstants::kCallerSPOffset;
2119 86 : __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
2120 43 : __ Set(r8, -1); // account for receiver
2121 :
2122 : Label copy;
2123 43 : __ bind(©);
2124 : __ incp(r8);
2125 43 : __ Push(Operand(rax, 0));
2126 43 : __ subp(rax, Immediate(kPointerSize));
2127 43 : __ cmpp(r8, rbx);
2128 43 : __ j(less, ©);
2129 43 : __ jmp(&invoke);
2130 : }
2131 :
2132 : { // Too few parameters: Actual < expected.
2133 43 : __ bind(&too_few);
2134 :
2135 43 : EnterArgumentsAdaptorFrame(masm);
2136 : // The registers rcx and r8 will be modified. The register rbx is only read.
2137 43 : Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
2138 :
2139 : // Copy receiver and all actual arguments.
2140 : const int offset = StandardFrameConstants::kCallerSPOffset;
2141 86 : __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
2142 43 : __ Set(r8, -1); // account for receiver
2143 :
2144 : Label copy;
2145 43 : __ bind(©);
2146 : __ incp(r8);
2147 43 : __ Push(Operand(rdi, 0));
2148 43 : __ subp(rdi, Immediate(kPointerSize));
2149 43 : __ cmpp(r8, rax);
2150 43 : __ j(less, ©);
2151 :
2152 : // Fill remaining expected arguments with undefined values.
2153 : Label fill;
2154 43 : __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
2155 43 : __ bind(&fill);
2156 : __ incp(r8);
2157 43 : __ Push(kScratchRegister);
2158 43 : __ cmpp(r8, rbx);
2159 43 : __ j(less, &fill);
2160 :
2161 : // Restore function pointer.
2162 86 : __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2163 : }
2164 :
2165 : // Call the entry point.
2166 43 : __ bind(&invoke);
2167 : __ movp(rax, rbx);
2168 : // rax : expected number of arguments
2169 : // rdx : new target (passed through to callee)
2170 : // rdi : function (passed through to callee)
2171 : __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2172 43 : __ call(rcx);
2173 :
2174 : // Store offset of return address for deoptimizer.
2175 43 : masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2176 :
2177 : // Leave frame and return.
2178 43 : LeaveArgumentsAdaptorFrame(masm);
2179 43 : __ ret(0);
2180 :
2181 : // -------------------------------------------
2182 : // Dont adapt arguments.
2183 : // -------------------------------------------
2184 43 : __ bind(&dont_adapt_arguments);
2185 : __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2186 43 : __ jmp(rcx);
2187 :
2188 43 : __ bind(&stack_overflow);
2189 : {
2190 43 : FrameScope frame(masm, StackFrame::MANUAL);
2191 43 : __ CallRuntime(Runtime::kThrowStackOverflow);
2192 43 : __ int3();
2193 : }
2194 43 : }
2195 :
2196 : // static
2197 129 : void Builtins::Generate_Apply(MacroAssembler* masm) {
2198 : // ----------- S t a t e -------------
2199 : // -- rax : argumentsList
2200 : // -- rdi : target
2201 : // -- rdx : new.target (checked to be constructor or undefined)
2202 : // -- rsp[0] : return address.
2203 : // -- rsp[8] : thisArgument
2204 : // -----------------------------------
2205 :
2206 : // Create the list of arguments from the array-like argumentsList.
2207 : {
2208 : Label create_arguments, create_array, create_holey_array, create_runtime,
2209 : done_create;
2210 43 : __ JumpIfSmi(rax, &create_runtime);
2211 :
2212 : // Load the map of argumentsList into rcx.
2213 43 : __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
2214 :
2215 : // Load native context into rbx.
2216 : __ movp(rbx, NativeContextOperand());
2217 :
2218 : // Check if argumentsList is an (unmodified) arguments object.
2219 43 : __ cmpp(rcx, ContextOperand(rbx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2220 43 : __ j(equal, &create_arguments);
2221 43 : __ cmpp(rcx, ContextOperand(rbx, Context::STRICT_ARGUMENTS_MAP_INDEX));
2222 43 : __ j(equal, &create_arguments);
2223 :
2224 : // Check if argumentsList is a fast JSArray.
2225 43 : __ CmpInstanceType(rcx, JS_ARRAY_TYPE);
2226 43 : __ j(equal, &create_array);
2227 :
2228 : // Ask the runtime to create the list (actually a FixedArray).
2229 43 : __ bind(&create_runtime);
2230 : {
2231 43 : FrameScope scope(masm, StackFrame::INTERNAL);
2232 43 : __ Push(rdi);
2233 43 : __ Push(rdx);
2234 43 : __ Push(rax);
2235 43 : __ CallRuntime(Runtime::kCreateListFromArrayLike);
2236 43 : __ Pop(rdx);
2237 43 : __ Pop(rdi);
2238 43 : __ SmiToInteger32(rbx, FieldOperand(rax, FixedArray::kLengthOffset));
2239 : }
2240 43 : __ jmp(&done_create);
2241 :
2242 : // Try to create the list from an arguments object.
2243 43 : __ bind(&create_arguments);
2244 : __ movp(rbx, FieldOperand(rax, JSArgumentsObject::kLengthOffset));
2245 : __ movp(rcx, FieldOperand(rax, JSObject::kElementsOffset));
2246 : __ cmpp(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2247 43 : __ j(not_equal, &create_runtime);
2248 43 : __ SmiToInteger32(rbx, rbx);
2249 : __ movp(rax, rcx);
2250 43 : __ jmp(&done_create);
2251 :
2252 43 : __ bind(&create_holey_array);
2253 : // For holey JSArrays we need to check that the array prototype chain
2254 : // protector is intact and our prototype is the Array.prototype actually.
2255 : __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
2256 : __ movp(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
2257 43 : __ cmpp(rcx, ContextOperand(rbx, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2258 43 : __ j(not_equal, &create_runtime);
2259 43 : __ LoadRoot(rcx, Heap::kArrayProtectorRootIndex);
2260 : __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset),
2261 43 : Smi::FromInt(Isolate::kProtectorValid));
2262 43 : __ j(not_equal, &create_runtime);
2263 43 : __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset));
2264 : __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset));
2265 43 : __ jmp(&done_create);
2266 :
2267 : // Try to create the list from a JSArray object.
2268 43 : __ bind(&create_array);
2269 : __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
2270 43 : __ DecodeField<Map::ElementsKindBits>(rcx);
2271 : STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2272 : STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2273 : STATIC_ASSERT(FAST_ELEMENTS == 2);
2274 : STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
2275 43 : __ cmpl(rcx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
2276 43 : __ j(equal, &create_holey_array);
2277 43 : __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
2278 43 : __ j(equal, &create_holey_array);
2279 43 : __ j(above, &create_runtime);
2280 43 : __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset));
2281 : __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset));
2282 :
2283 43 : __ bind(&done_create);
2284 : }
2285 :
2286 : // Check for stack overflow.
2287 : {
2288 : // Check the stack for overflow. We are not trying to catch interruptions
2289 : // (i.e. debug break and preemption) here, so check the "real stack limit".
2290 : Label done;
2291 43 : __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
2292 : __ movp(rcx, rsp);
2293 : // Make rcx the space we have left. The stack might already be overflowed
2294 : // here which will cause rcx to become negative.
2295 43 : __ subp(rcx, kScratchRegister);
2296 : __ sarp(rcx, Immediate(kPointerSizeLog2));
2297 : // Check if the arguments will overflow the stack.
2298 43 : __ cmpp(rcx, rbx);
2299 43 : __ j(greater, &done, Label::kNear); // Signed comparison.
2300 43 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
2301 43 : __ bind(&done);
2302 : }
2303 :
2304 : // ----------- S t a t e -------------
2305 : // -- rdi : target
2306 : // -- rax : args (a FixedArray built from argumentsList)
2307 : // -- rbx : len (number of elements to push from args)
2308 : // -- rdx : new.target (checked to be constructor or undefined)
2309 : // -- rsp[0] : return address.
2310 : // -- rsp[8] : thisArgument
2311 : // -----------------------------------
2312 :
2313 : // Push arguments onto the stack (thisArgument is already on the stack).
2314 : {
2315 : __ PopReturnAddressTo(r8);
2316 43 : __ Set(rcx, 0);
2317 : Label done, push, loop;
2318 43 : __ bind(&loop);
2319 43 : __ cmpl(rcx, rbx);
2320 43 : __ j(equal, &done, Label::kNear);
2321 : // Turn the hole into undefined as we go.
2322 : __ movp(r9, FieldOperand(rax, rcx, times_pointer_size,
2323 : FixedArray::kHeaderSize));
2324 43 : __ CompareRoot(r9, Heap::kTheHoleValueRootIndex);
2325 43 : __ j(not_equal, &push, Label::kNear);
2326 43 : __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
2327 43 : __ bind(&push);
2328 43 : __ Push(r9);
2329 : __ incl(rcx);
2330 43 : __ jmp(&loop);
2331 43 : __ bind(&done);
2332 : __ PushReturnAddressFrom(r8);
2333 43 : __ Move(rax, rcx);
2334 : }
2335 :
2336 : // Dispatch to Call or Construct depending on whether new.target is undefined.
2337 : {
2338 43 : __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
2339 43 : __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2340 43 : __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2341 : }
2342 43 : }
2343 :
2344 : // static
2345 86 : void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
2346 : Handle<Code> code) {
2347 : // ----------- S t a t e -------------
2348 : // -- rdi : the target to call (can be any Object)
2349 : // -- rcx : start index (to support rest parameters)
2350 : // -- rsp[0] : return address.
2351 : // -- rsp[8] : thisArgument
2352 : // -----------------------------------
2353 :
2354 : // Check if we have an arguments adaptor frame below the function frame.
2355 : Label arguments_adaptor, arguments_done;
2356 172 : __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2357 : __ cmpp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
2358 172 : Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2359 86 : __ j(equal, &arguments_adaptor, Label::kNear);
2360 : {
2361 172 : __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2362 : __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
2363 : __ LoadSharedFunctionInfoSpecialField(
2364 86 : rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
2365 : __ movp(rbx, rbp);
2366 : }
2367 86 : __ jmp(&arguments_done, Label::kNear);
2368 86 : __ bind(&arguments_adaptor);
2369 : {
2370 : __ SmiToInteger32(
2371 86 : rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2372 : }
2373 86 : __ bind(&arguments_done);
2374 :
2375 : Label stack_empty, stack_done, stack_overflow;
2376 86 : __ subl(rax, rcx);
2377 86 : __ j(less_equal, &stack_empty);
2378 : {
2379 : // Check for stack overflow.
2380 86 : Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
2381 :
2382 : // Forward the arguments from the caller frame.
2383 : {
2384 : Label loop;
2385 : __ movl(rcx, rax);
2386 86 : __ Pop(r8);
2387 86 : __ bind(&loop);
2388 : {
2389 : StackArgumentsAccessor args(rbx, rcx, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2390 86 : __ Push(args.GetArgumentOperand(0));
2391 : __ decl(rcx);
2392 86 : __ j(not_zero, &loop);
2393 : }
2394 86 : __ Push(r8);
2395 : }
2396 : }
2397 86 : __ jmp(&stack_done, Label::kNear);
2398 86 : __ bind(&stack_overflow);
2399 86 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
2400 86 : __ bind(&stack_empty);
2401 : {
2402 : // We just pass the receiver, which is already on the stack.
2403 86 : __ Set(rax, 0);
2404 : }
2405 86 : __ bind(&stack_done);
2406 :
2407 86 : __ Jump(code, RelocInfo::CODE_TARGET);
2408 86 : }
2409 :
2410 : namespace {
2411 :
2412 : // Drops top JavaScript frame and an arguments adaptor frame below it (if
2413 : // present) preserving all the arguments prepared for current call.
2414 : // Does nothing if debugger is currently active.
2415 : // ES6 14.6.3. PrepareForTailCall
2416 : //
2417 : // Stack structure for the function g() tail calling f():
2418 : //
2419 : // ------- Caller frame: -------
2420 : // | ...
2421 : // | g()'s arg M
2422 : // | ...
2423 : // | g()'s arg 1
2424 : // | g()'s receiver arg
2425 : // | g()'s caller pc
2426 : // ------- g()'s frame: -------
2427 : // | g()'s caller fp <- fp
2428 : // | g()'s context
2429 : // | function pointer: g
2430 : // | -------------------------
2431 : // | ...
2432 : // | ...
2433 : // | f()'s arg N
2434 : // | ...
2435 : // | f()'s arg 1
2436 : // | f()'s receiver arg
2437 : // | f()'s caller pc <- sp
2438 : // ----------------------
2439 : //
2440 602 : void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2441 : Register scratch1, Register scratch2,
2442 : Register scratch3) {
2443 : DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2444 : Comment cmnt(masm, "[ PrepareForTailCall");
2445 :
2446 : // Prepare for tail call only if ES2015 tail call elimination is active.
2447 : Label done;
2448 : ExternalReference is_tail_call_elimination_enabled =
2449 : ExternalReference::is_tail_call_elimination_enabled_address(
2450 301 : masm->isolate());
2451 : __ Move(kScratchRegister, is_tail_call_elimination_enabled);
2452 602 : __ cmpb(Operand(kScratchRegister, 0), Immediate(0));
2453 301 : __ j(equal, &done);
2454 :
2455 : // Drop possible interpreter handler/stub frame.
2456 : {
2457 : Label no_interpreter_frame;
2458 : __ cmpp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
2459 602 : Immediate(StackFrame::TypeToMarker(StackFrame::STUB)));
2460 301 : __ j(not_equal, &no_interpreter_frame, Label::kNear);
2461 602 : __ movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2462 301 : __ bind(&no_interpreter_frame);
2463 : }
2464 :
2465 : // Check if next frame is an arguments adaptor frame.
2466 301 : Register caller_args_count_reg = scratch1;
2467 : Label no_arguments_adaptor, formal_parameter_count_loaded;
2468 602 : __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2469 : __ cmpp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
2470 602 : Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2471 301 : __ j(not_equal, &no_arguments_adaptor, Label::kNear);
2472 :
2473 : // Drop current frame and load arguments count from arguments adaptor frame.
2474 : __ movp(rbp, scratch2);
2475 : __ SmiToInteger32(
2476 : caller_args_count_reg,
2477 301 : Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2478 301 : __ jmp(&formal_parameter_count_loaded, Label::kNear);
2479 :
2480 301 : __ bind(&no_arguments_adaptor);
2481 : // Load caller's formal parameter count
2482 602 : __ movp(scratch1, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2483 : __ movp(scratch1,
2484 : FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2485 : __ LoadSharedFunctionInfoSpecialField(
2486 : caller_args_count_reg, scratch1,
2487 301 : SharedFunctionInfo::kFormalParameterCountOffset);
2488 :
2489 301 : __ bind(&formal_parameter_count_loaded);
2490 :
2491 : ParameterCount callee_args_count(args_reg);
2492 : __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2493 301 : scratch3, ReturnAddressState::kOnStack);
2494 301 : __ bind(&done);
2495 301 : }
2496 : } // namespace
2497 :
2498 : // static
2499 430 : void Builtins::Generate_CallFunction(MacroAssembler* masm,
2500 : ConvertReceiverMode mode,
2501 : TailCallMode tail_call_mode) {
2502 : // ----------- S t a t e -------------
2503 : // -- rax : the number of arguments (not including the receiver)
2504 : // -- rdi : the function to call (checked to be a JSFunction)
2505 : // -----------------------------------
2506 : StackArgumentsAccessor args(rsp, rax);
2507 258 : __ AssertFunction(rdi);
2508 :
2509 : // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2510 : // Check that the function is not a "classConstructor".
2511 : Label class_constructor;
2512 258 : __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2513 : __ testb(FieldOperand(rdx, SharedFunctionInfo::kFunctionKindByteOffset),
2514 258 : Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2515 258 : __ j(not_zero, &class_constructor);
2516 :
2517 : // ----------- S t a t e -------------
2518 : // -- rax : the number of arguments (not including the receiver)
2519 : // -- rdx : the shared function info.
2520 : // -- rdi : the function to call (checked to be a JSFunction)
2521 : // -----------------------------------
2522 :
2523 : // Enter the context of the function; ToObject has to run in the function
2524 : // context, and we also need to take the global proxy from the function
2525 : // context in case of conversion.
2526 : STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2527 : SharedFunctionInfo::kStrictModeByteOffset);
2528 : __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2529 : // We need to convert the receiver for non-native sloppy mode functions.
2530 : Label done_convert;
2531 : __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
2532 : Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2533 258 : (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2534 258 : __ j(not_zero, &done_convert);
2535 : {
2536 : // ----------- S t a t e -------------
2537 : // -- rax : the number of arguments (not including the receiver)
2538 : // -- rdx : the shared function info.
2539 : // -- rdi : the function to call (checked to be a JSFunction)
2540 : // -- rsi : the function context.
2541 : // -----------------------------------
2542 :
2543 258 : if (mode == ConvertReceiverMode::kNullOrUndefined) {
2544 : // Patch receiver to global proxy.
2545 : __ LoadGlobalProxy(rcx);
2546 : } else {
2547 : Label convert_to_object, convert_receiver;
2548 : __ movp(rcx, args.GetReceiverOperand());
2549 172 : __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
2550 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2551 172 : __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
2552 172 : __ j(above_equal, &done_convert);
2553 172 : if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2554 : Label convert_global_proxy;
2555 : __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
2556 : &convert_global_proxy, Label::kNear);
2557 : __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
2558 : Label::kNear);
2559 86 : __ bind(&convert_global_proxy);
2560 : {
2561 : // Patch receiver to global proxy.
2562 : __ LoadGlobalProxy(rcx);
2563 : }
2564 86 : __ jmp(&convert_receiver);
2565 : }
2566 172 : __ bind(&convert_to_object);
2567 : {
2568 : // Convert receiver using ToObject.
2569 : // TODO(bmeurer): Inline the allocation here to avoid building the frame
2570 : // in the fast case? (fall back to AllocateInNewSpace?)
2571 172 : FrameScope scope(masm, StackFrame::INTERNAL);
2572 172 : __ Integer32ToSmi(rax, rax);
2573 172 : __ Push(rax);
2574 172 : __ Push(rdi);
2575 : __ movp(rax, rcx);
2576 172 : __ Push(rsi);
2577 : __ Call(masm->isolate()->builtins()->ToObject(),
2578 172 : RelocInfo::CODE_TARGET);
2579 172 : __ Pop(rsi);
2580 : __ movp(rcx, rax);
2581 172 : __ Pop(rdi);
2582 172 : __ Pop(rax);
2583 172 : __ SmiToInteger32(rax, rax);
2584 : }
2585 : __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2586 172 : __ bind(&convert_receiver);
2587 : }
2588 : __ movp(args.GetReceiverOperand(), rcx);
2589 : }
2590 258 : __ bind(&done_convert);
2591 :
2592 : // ----------- S t a t e -------------
2593 : // -- rax : the number of arguments (not including the receiver)
2594 : // -- rdx : the shared function info.
2595 : // -- rdi : the function to call (checked to be a JSFunction)
2596 : // -- rsi : the function context.
2597 : // -----------------------------------
2598 :
2599 258 : if (tail_call_mode == TailCallMode::kAllow) {
2600 129 : PrepareForTailCall(masm, rax, rbx, rcx, r8);
2601 : }
2602 :
2603 : __ LoadSharedFunctionInfoSpecialField(
2604 258 : rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset);
2605 : ParameterCount actual(rax);
2606 : ParameterCount expected(rbx);
2607 :
2608 : __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION,
2609 516 : CheckDebugStepCallWrapper());
2610 :
2611 : // The function is a "classConstructor", need to raise an exception.
2612 258 : __ bind(&class_constructor);
2613 : {
2614 258 : FrameScope frame(masm, StackFrame::INTERNAL);
2615 258 : __ Push(rdi);
2616 258 : __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2617 : }
2618 258 : }
2619 :
2620 : namespace {
2621 :
2622 129 : void Generate_PushBoundArguments(MacroAssembler* masm) {
2623 : // ----------- S t a t e -------------
2624 : // -- rax : the number of arguments (not including the receiver)
2625 : // -- rdx : new.target (only in case of [[Construct]])
2626 : // -- rdi : target (checked to be a JSBoundFunction)
2627 : // -----------------------------------
2628 :
2629 : // Load [[BoundArguments]] into rcx and length of that into rbx.
2630 : Label no_bound_arguments;
2631 129 : __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2632 129 : __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2633 : __ testl(rbx, rbx);
2634 129 : __ j(zero, &no_bound_arguments);
2635 : {
2636 : // ----------- S t a t e -------------
2637 : // -- rax : the number of arguments (not including the receiver)
2638 : // -- rdx : new.target (only in case of [[Construct]])
2639 : // -- rdi : target (checked to be a JSBoundFunction)
2640 : // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2641 : // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2642 : // -----------------------------------
2643 :
2644 : // Reserve stack space for the [[BoundArguments]].
2645 : {
2646 : Label done;
2647 258 : __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2648 129 : __ subp(rsp, kScratchRegister);
2649 : // Check the stack for overflow. We are not trying to catch interruptions
2650 : // (i.e. debug break and preemption) here, so check the "real stack
2651 : // limit".
2652 129 : __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
2653 129 : __ j(greater, &done, Label::kNear); // Signed comparison.
2654 : // Restore the stack pointer.
2655 258 : __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
2656 : {
2657 129 : FrameScope scope(masm, StackFrame::MANUAL);
2658 129 : __ EnterFrame(StackFrame::INTERNAL);
2659 129 : __ CallRuntime(Runtime::kThrowStackOverflow);
2660 : }
2661 129 : __ bind(&done);
2662 : }
2663 :
2664 : // Adjust effective number of arguments to include return address.
2665 : __ incl(rax);
2666 :
2667 : // Relocate arguments and return address down the stack.
2668 : {
2669 : Label loop;
2670 129 : __ Set(rcx, 0);
2671 258 : __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
2672 129 : __ bind(&loop);
2673 258 : __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
2674 258 : __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
2675 : __ incl(rcx);
2676 129 : __ cmpl(rcx, rax);
2677 129 : __ j(less, &loop);
2678 : }
2679 :
2680 : // Copy [[BoundArguments]] to the stack (below the arguments).
2681 : {
2682 : Label loop;
2683 : __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2684 129 : __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2685 129 : __ bind(&loop);
2686 : __ decl(rbx);
2687 : __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
2688 : FixedArray::kHeaderSize));
2689 258 : __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
2690 258 : __ leal(rax, Operand(rax, 1));
2691 129 : __ j(greater, &loop);
2692 : }
2693 :
2694 : // Adjust effective number of arguments (rax contains the number of
2695 : // arguments from the call plus return address plus the number of
2696 : // [[BoundArguments]]), so we need to subtract one for the return address.
2697 : __ decl(rax);
2698 : }
2699 129 : __ bind(&no_bound_arguments);
2700 129 : }
2701 :
2702 : } // namespace
2703 :
2704 : // static
2705 172 : void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2706 : TailCallMode tail_call_mode) {
2707 : // ----------- S t a t e -------------
2708 : // -- rax : the number of arguments (not including the receiver)
2709 : // -- rdi : the function to call (checked to be a JSBoundFunction)
2710 : // -----------------------------------
2711 86 : __ AssertBoundFunction(rdi);
2712 :
2713 86 : if (tail_call_mode == TailCallMode::kAllow) {
2714 43 : PrepareForTailCall(masm, rax, rbx, rcx, r8);
2715 : }
2716 :
2717 : // Patch the receiver to [[BoundThis]].
2718 : StackArgumentsAccessor args(rsp, rax);
2719 86 : __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2720 : __ movp(args.GetReceiverOperand(), rbx);
2721 :
2722 : // Push the [[BoundArguments]] onto the stack.
2723 86 : Generate_PushBoundArguments(masm);
2724 :
2725 : // Call the [[BoundTargetFunction]] via the Call builtin.
2726 : __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2727 : __ Load(rcx,
2728 86 : ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2729 : __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2730 86 : __ jmp(rcx);
2731 86 : }
2732 :
2733 : // static
2734 1290 : void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2735 : TailCallMode tail_call_mode) {
2736 : // ----------- S t a t e -------------
2737 : // -- rax : the number of arguments (not including the receiver)
2738 : // -- rdi : the target to call (can be any Object)
2739 : // -----------------------------------
2740 : StackArgumentsAccessor args(rsp, rax);
2741 :
2742 : Label non_callable, non_function, non_smi;
2743 258 : __ JumpIfSmi(rdi, &non_callable);
2744 258 : __ bind(&non_smi);
2745 258 : __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2746 : __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2747 258 : RelocInfo::CODE_TARGET);
2748 258 : __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2749 : __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2750 258 : RelocInfo::CODE_TARGET);
2751 :
2752 : // Check if target has a [[Call]] internal method.
2753 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2754 258 : Immediate(1 << Map::kIsCallable));
2755 258 : __ j(zero, &non_callable);
2756 :
2757 258 : __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2758 258 : __ j(not_equal, &non_function);
2759 :
2760 : // 0. Prepare for tail call if necessary.
2761 258 : if (tail_call_mode == TailCallMode::kAllow) {
2762 129 : PrepareForTailCall(masm, rax, rbx, rcx, r8);
2763 : }
2764 :
2765 : // 1. Runtime fallback for Proxy [[Call]].
2766 : __ PopReturnAddressTo(kScratchRegister);
2767 258 : __ Push(rdi);
2768 : __ PushReturnAddressFrom(kScratchRegister);
2769 : // Increase the arguments size to include the pushed function and the
2770 : // existing receiver on the stack.
2771 258 : __ addp(rax, Immediate(2));
2772 : // Tail-call to the runtime.
2773 : __ JumpToExternalReference(
2774 258 : ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2775 :
2776 : // 2. Call to something else, which might have a [[Call]] internal method (if
2777 : // not we raise an exception).
2778 258 : __ bind(&non_function);
2779 : // Overwrite the original receiver with the (original) target.
2780 : __ movp(args.GetReceiverOperand(), rdi);
2781 : // Let the "call_as_function_delegate" take care of the rest.
2782 258 : __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2783 : __ Jump(masm->isolate()->builtins()->CallFunction(
2784 : ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2785 258 : RelocInfo::CODE_TARGET);
2786 :
2787 : // 3. Call to something that is not callable.
2788 258 : __ bind(&non_callable);
2789 : {
2790 258 : FrameScope scope(masm, StackFrame::INTERNAL);
2791 258 : __ Push(rdi);
2792 258 : __ CallRuntime(Runtime::kThrowCalledNonCallable);
2793 : }
2794 258 : }
2795 :
2796 86 : static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
2797 : Label runtime_call, push_args;
2798 : // Load the spread argument into rbx.
2799 172 : __ movp(rbx, Operand(rsp, kPointerSize));
2800 86 : __ JumpIfSmi(rbx, &runtime_call);
2801 : // Load the map of the spread into r15.
2802 : __ movp(r15, FieldOperand(rbx, HeapObject::kMapOffset));
2803 : // Load native context into r14.
2804 : __ movp(r14, NativeContextOperand());
2805 :
2806 : // Check that the spread is an array.
2807 86 : __ CmpInstanceType(r15, JS_ARRAY_TYPE);
2808 86 : __ j(not_equal, &runtime_call);
2809 :
2810 : // Check that we have the original ArrayPrototype.
2811 : __ movp(rcx, FieldOperand(r15, Map::kPrototypeOffset));
2812 86 : __ cmpp(rcx, ContextOperand(r14, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2813 86 : __ j(not_equal, &runtime_call);
2814 :
2815 : // Check that the ArrayPrototype hasn't been modified in a way that would
2816 : // affect iteration.
2817 86 : __ LoadRoot(rcx, Heap::kArrayIteratorProtectorRootIndex);
2818 : __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset),
2819 86 : Smi::FromInt(Isolate::kProtectorValid));
2820 86 : __ j(not_equal, &runtime_call);
2821 :
2822 : // Check that the map of the initial array iterator hasn't changed.
2823 : __ movp(rcx,
2824 86 : ContextOperand(r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2825 : __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2826 : __ cmpp(rcx, ContextOperand(
2827 86 : r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2828 86 : __ j(not_equal, &runtime_call);
2829 :
2830 : // For FastPacked kinds, iteration will have the same effect as simply
2831 : // accessing each property in order.
2832 : Label no_protector_check;
2833 : __ movzxbp(rcx, FieldOperand(r15, Map::kBitField2Offset));
2834 86 : __ DecodeField<Map::ElementsKindBits>(rcx);
2835 86 : __ cmpp(rcx, Immediate(FAST_HOLEY_ELEMENTS));
2836 86 : __ j(above, &runtime_call);
2837 : // For non-FastHoley kinds, we can skip the protector check.
2838 86 : __ cmpp(rcx, Immediate(FAST_SMI_ELEMENTS));
2839 86 : __ j(equal, &no_protector_check);
2840 86 : __ cmpp(rcx, Immediate(FAST_ELEMENTS));
2841 86 : __ j(equal, &no_protector_check);
2842 : // Check the ArrayProtector cell.
2843 86 : __ LoadRoot(rcx, Heap::kArrayProtectorRootIndex);
2844 : __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset),
2845 86 : Smi::FromInt(Isolate::kProtectorValid));
2846 86 : __ j(not_equal, &runtime_call);
2847 :
2848 86 : __ bind(&no_protector_check);
2849 : // Load the FixedArray backing store, but use the length from the array.
2850 86 : __ SmiToInteger32(r9, FieldOperand(rbx, JSArray::kLengthOffset));
2851 : __ movp(rbx, FieldOperand(rbx, JSArray::kElementsOffset));
2852 86 : __ jmp(&push_args);
2853 :
2854 86 : __ bind(&runtime_call);
2855 : {
2856 : // Call the builtin for the result of the spread.
2857 86 : FrameScope scope(masm, StackFrame::INTERNAL);
2858 86 : __ Push(rdi); // target
2859 86 : __ Push(rdx); // new target
2860 86 : __ Integer32ToSmi(rax, rax);
2861 86 : __ Push(rax); // nargs
2862 86 : __ Push(rbx);
2863 86 : __ CallRuntime(Runtime::kSpreadIterableFixed);
2864 : __ movp(rbx, rax);
2865 86 : __ Pop(rax); // nargs
2866 86 : __ SmiToInteger32(rax, rax);
2867 86 : __ Pop(rdx); // new target
2868 86 : __ Pop(rdi); // target
2869 : }
2870 :
2871 : {
2872 : // Calculate the new nargs including the result of the spread.
2873 86 : __ SmiToInteger32(r9, FieldOperand(rbx, FixedArray::kLengthOffset));
2874 :
2875 86 : __ bind(&push_args);
2876 : // rax += r9 - 1. Subtract 1 for the spread itself.
2877 172 : __ leap(rax, Operand(rax, r9, times_1, -1));
2878 : }
2879 :
2880 : // Check for stack overflow.
2881 : {
2882 : // Check the stack for overflow. We are not trying to catch interruptions
2883 : // (i.e. debug break and preemption) here, so check the "real stack limit".
2884 : Label done;
2885 86 : __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
2886 : __ movp(rcx, rsp);
2887 : // Make rcx the space we have left. The stack might already be overflowed
2888 : // here which will cause rcx to become negative.
2889 86 : __ subp(rcx, kScratchRegister);
2890 : __ sarp(rcx, Immediate(kPointerSizeLog2));
2891 : // Check if the arguments will overflow the stack.
2892 86 : __ cmpp(rcx, r9);
2893 86 : __ j(greater, &done, Label::kNear); // Signed comparison.
2894 86 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
2895 86 : __ bind(&done);
2896 : }
2897 :
2898 : // Put the evaluated spread onto the stack as additional arguments.
2899 : {
2900 : // Pop the return address and spread argument.
2901 : __ PopReturnAddressTo(r8);
2902 86 : __ Pop(rcx);
2903 :
2904 86 : __ Set(rcx, 0);
2905 : Label done, push, loop;
2906 86 : __ bind(&loop);
2907 86 : __ cmpl(rcx, r9);
2908 86 : __ j(equal, &done, Label::kNear);
2909 : __ movp(kScratchRegister, FieldOperand(rbx, rcx, times_pointer_size,
2910 : FixedArray::kHeaderSize));
2911 86 : __ CompareRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
2912 86 : __ j(not_equal, &push, Label::kNear);
2913 86 : __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
2914 86 : __ bind(&push);
2915 86 : __ Push(kScratchRegister);
2916 : __ incl(rcx);
2917 86 : __ jmp(&loop);
2918 86 : __ bind(&done);
2919 : __ PushReturnAddressFrom(r8);
2920 : }
2921 86 : }
2922 :
2923 : // static
2924 86 : void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
2925 : // ----------- S t a t e -------------
2926 : // -- rax : the number of arguments (not including the receiver)
2927 : // -- rdi : the target to call (can be any Object)
2928 : // -----------------------------------
2929 :
2930 : // CheckSpreadAndPushToStack will push rdx to save it.
2931 43 : __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
2932 43 : CheckSpreadAndPushToStack(masm);
2933 : __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2934 : TailCallMode::kDisallow),
2935 43 : RelocInfo::CODE_TARGET);
2936 43 : }
2937 :
2938 : // static
2939 43 : void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2940 : // ----------- S t a t e -------------
2941 : // -- rax : the number of arguments (not including the receiver)
2942 : // -- rdx : the new target (checked to be a constructor)
2943 : // -- rdi : the constructor to call (checked to be a JSFunction)
2944 : // -----------------------------------
2945 43 : __ AssertFunction(rdi);
2946 :
2947 : // Calling convention for function specific ConstructStubs require
2948 : // rbx to contain either an AllocationSite or undefined.
2949 43 : __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2950 :
2951 : // Tail call to the function-specific construct stub (still in the caller
2952 : // context at this point).
2953 43 : __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2954 : __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
2955 : __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2956 43 : __ jmp(rcx);
2957 43 : }
2958 :
2959 : // static
2960 86 : void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2961 : // ----------- S t a t e -------------
2962 : // -- rax : the number of arguments (not including the receiver)
2963 : // -- rdx : the new target (checked to be a constructor)
2964 : // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2965 : // -----------------------------------
2966 43 : __ AssertBoundFunction(rdi);
2967 :
2968 : // Push the [[BoundArguments]] onto the stack.
2969 43 : Generate_PushBoundArguments(masm);
2970 :
2971 : // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2972 : {
2973 : Label done;
2974 43 : __ cmpp(rdi, rdx);
2975 43 : __ j(not_equal, &done, Label::kNear);
2976 : __ movp(rdx,
2977 : FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2978 43 : __ bind(&done);
2979 : }
2980 :
2981 : // Construct the [[BoundTargetFunction]] via the Construct builtin.
2982 : __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2983 43 : __ Load(rcx, ExternalReference(Builtins::kConstruct, masm->isolate()));
2984 : __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2985 43 : __ jmp(rcx);
2986 43 : }
2987 :
2988 : // static
2989 86 : void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2990 : // ----------- S t a t e -------------
2991 : // -- rax : the number of arguments (not including the receiver)
2992 : // -- rdi : the constructor to call (checked to be a JSProxy)
2993 : // -- rdx : the new target (either the same as the constructor or
2994 : // the JSFunction on which new was invoked initially)
2995 : // -----------------------------------
2996 :
2997 : // Call into the Runtime for Proxy [[Construct]].
2998 : __ PopReturnAddressTo(kScratchRegister);
2999 43 : __ Push(rdi);
3000 43 : __ Push(rdx);
3001 : __ PushReturnAddressFrom(kScratchRegister);
3002 : // Include the pushed new_target, constructor and the receiver.
3003 43 : __ addp(rax, Immediate(3));
3004 : __ JumpToExternalReference(
3005 43 : ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
3006 43 : }
3007 :
3008 : // static
3009 258 : void Builtins::Generate_Construct(MacroAssembler* masm) {
3010 : // ----------- S t a t e -------------
3011 : // -- rax : the number of arguments (not including the receiver)
3012 : // -- rdx : the new target (either the same as the constructor or
3013 : // the JSFunction on which new was invoked initially)
3014 : // -- rdi : the constructor to call (can be any Object)
3015 : // -----------------------------------
3016 : StackArgumentsAccessor args(rsp, rax);
3017 :
3018 : // Check if target is a Smi.
3019 : Label non_constructor;
3020 43 : __ JumpIfSmi(rdi, &non_constructor, Label::kNear);
3021 :
3022 : // Dispatch based on instance type.
3023 43 : __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
3024 : __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
3025 43 : RelocInfo::CODE_TARGET);
3026 :
3027 : // Check if target has a [[Construct]] internal method.
3028 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
3029 43 : Immediate(1 << Map::kIsConstructor));
3030 43 : __ j(zero, &non_constructor, Label::kNear);
3031 :
3032 : // Only dispatch to bound functions after checking whether they are
3033 : // constructors.
3034 43 : __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
3035 : __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
3036 43 : RelocInfo::CODE_TARGET);
3037 :
3038 : // Only dispatch to proxies after checking whether they are constructors.
3039 43 : __ CmpInstanceType(rcx, JS_PROXY_TYPE);
3040 : __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
3041 43 : RelocInfo::CODE_TARGET);
3042 :
3043 : // Called Construct on an exotic Object with a [[Construct]] internal method.
3044 : {
3045 : // Overwrite the original receiver with the (original) target.
3046 : __ movp(args.GetReceiverOperand(), rdi);
3047 : // Let the "call_as_constructor_delegate" take care of the rest.
3048 43 : __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
3049 : __ Jump(masm->isolate()->builtins()->CallFunction(),
3050 43 : RelocInfo::CODE_TARGET);
3051 : }
3052 :
3053 : // Called Construct on an Object that doesn't have a [[Construct]] internal
3054 : // method.
3055 43 : __ bind(&non_constructor);
3056 : __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
3057 43 : RelocInfo::CODE_TARGET);
3058 43 : }
3059 :
3060 : // static
3061 86 : void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
3062 : // ----------- S t a t e -------------
3063 : // -- rax : the number of arguments (not including the receiver)
3064 : // -- rdx : the new target (either the same as the constructor or
3065 : // the JSFunction on which new was invoked initially)
3066 : // -- rdi : the constructor to call (can be any Object)
3067 : // -----------------------------------
3068 :
3069 43 : CheckSpreadAndPushToStack(masm);
3070 43 : __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
3071 43 : }
3072 :
3073 86 : static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
3074 : bool has_handler_frame) {
3075 : // Lookup the function in the JavaScript frame.
3076 86 : if (has_handler_frame) {
3077 86 : __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3078 86 : __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
3079 : } else {
3080 86 : __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3081 : }
3082 :
3083 : {
3084 86 : FrameScope scope(masm, StackFrame::INTERNAL);
3085 : // Pass function as argument.
3086 86 : __ Push(rax);
3087 86 : __ CallRuntime(Runtime::kCompileForOnStackReplacement);
3088 : }
3089 :
3090 : Label skip;
3091 : // If the code object is null, just return to the caller.
3092 86 : __ cmpp(rax, Immediate(0));
3093 86 : __ j(not_equal, &skip, Label::kNear);
3094 86 : __ ret(0);
3095 :
3096 86 : __ bind(&skip);
3097 :
3098 : // Drop any potential handler frame that is be sitting on top of the actual
3099 : // JavaScript frame. This is the case then OSR is triggered from bytecode.
3100 86 : if (has_handler_frame) {
3101 43 : __ leave();
3102 : }
3103 :
3104 : // Load deoptimization data from the code object.
3105 172 : __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
3106 :
3107 : // Load the OSR entrypoint offset from the deoptimization data.
3108 : __ SmiToInteger32(
3109 : rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
3110 : DeoptimizationInputData::kOsrPcOffsetIndex) -
3111 86 : kHeapObjectTag));
3112 :
3113 : // Compute the target address = code_obj + header_size + osr_offset
3114 172 : __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
3115 :
3116 : // Overwrite the return address on the stack.
3117 : __ movq(StackOperandForReturnAddress(0), rax);
3118 :
3119 : // And "return" to the OSR entry point of the function.
3120 86 : __ ret(0);
3121 86 : }
3122 :
3123 43 : void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
3124 43 : Generate_OnStackReplacementHelper(masm, false);
3125 43 : }
3126 :
3127 43 : void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
3128 43 : Generate_OnStackReplacementHelper(masm, true);
3129 43 : }
3130 :
3131 43 : void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3132 : {
3133 43 : FrameScope scope(masm, StackFrame::INTERNAL);
3134 :
3135 : // Save all parameter registers (see wasm-linkage.cc). They might be
3136 : // overwritten in the runtime call below. We don't have any callee-saved
3137 : // registers in wasm, so no need to store anything else.
3138 43 : constexpr Register gp_regs[]{rax, rbx, rcx, rdx, rsi, rdi};
3139 43 : constexpr XMMRegister xmm_regs[]{xmm1, xmm2, xmm3, xmm4, xmm5, xmm6};
3140 :
3141 301 : for (auto reg : gp_regs) {
3142 258 : __ Push(reg);
3143 : }
3144 43 : __ subp(rsp, Immediate(16 * arraysize(xmm_regs)));
3145 301 : for (int i = 0, e = arraysize(xmm_regs); i < e; ++i) {
3146 258 : __ movdqu(Operand(rsp, 16 * i), xmm_regs[i]);
3147 : }
3148 :
3149 : // Initialize rsi register with kZero, CEntryStub will use it to set the
3150 : // current context on the isolate.
3151 : __ Move(rsi, Smi::kZero);
3152 43 : __ CallRuntime(Runtime::kWasmCompileLazy);
3153 : // Store returned instruction start in r11.
3154 : __ leap(r11, FieldOperand(rax, Code::kHeaderSize));
3155 :
3156 : // Restore registers.
3157 301 : for (int i = arraysize(xmm_regs) - 1; i >= 0; --i) {
3158 258 : __ movdqu(xmm_regs[i], Operand(rsp, 16 * i));
3159 : }
3160 43 : __ addp(rsp, Immediate(16 * arraysize(xmm_regs)));
3161 301 : for (int i = arraysize(gp_regs) - 1; i >= 0; --i) {
3162 258 : __ Pop(gp_regs[i]);
3163 43 : }
3164 : }
3165 : // Now jump to the instructions of the returned code object.
3166 43 : __ jmp(r11);
3167 43 : }
3168 :
3169 : #undef __
3170 :
3171 : } // namespace internal
3172 : } // namespace v8
3173 :
3174 : #endif // V8_TARGET_ARCH_X64
|