Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #if V8_TARGET_ARCH_X64
6 :
7 : #include "src/api-arguments.h"
8 : #include "src/base/adapters.h"
9 : #include "src/code-factory.h"
10 : #include "src/counters.h"
11 : #include "src/deoptimizer.h"
12 : #include "src/frame-constants.h"
13 : #include "src/frames.h"
14 : // For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
15 : #include "src/heap/heap-inl.h"
16 : #include "src/macro-assembler-inl.h"
17 : #include "src/objects-inl.h"
18 : #include "src/objects/cell.h"
19 : #include "src/objects/debug-objects.h"
20 : #include "src/objects/foreign.h"
21 : #include "src/objects/heap-number.h"
22 : #include "src/objects/js-generator.h"
23 : #include "src/objects/smi.h"
24 : #include "src/register-configuration.h"
25 : #include "src/wasm/wasm-linkage.h"
26 : #include "src/wasm/wasm-objects.h"
27 :
28 : namespace v8 {
29 : namespace internal {
30 :
31 : #define __ ACCESS_MASM(masm)
32 :
33 15568 : void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
34 : ExitFrameType exit_frame_type) {
35 15568 : __ LoadAddress(kJavaScriptCallExtraArg1Register,
36 15568 : ExternalReference::Create(address));
37 15568 : if (exit_frame_type == BUILTIN_EXIT) {
38 15400 : __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
39 15400 : RelocInfo::CODE_TARGET);
40 : } else {
41 : DCHECK(exit_frame_type == EXIT);
42 168 : __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
43 168 : RelocInfo::CODE_TARGET);
44 : }
45 15568 : }
46 :
47 280 : static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
48 : Runtime::FunctionId function_id) {
49 : // ----------- S t a t e -------------
50 : // -- rdx : new target (preserved for callee)
51 : // -- rdi : target function (preserved for callee)
52 : // -----------------------------------
53 : {
54 280 : FrameScope scope(masm, StackFrame::INTERNAL);
55 : // Push a copy of the target function and the new target.
56 280 : __ Push(rdi);
57 280 : __ Push(rdx);
58 : // Function is also the parameter to the runtime call.
59 280 : __ Push(rdi);
60 :
61 : __ CallRuntime(function_id, 1);
62 280 : __ movq(rcx, rax);
63 :
64 : // Restore target function and new target.
65 280 : __ Pop(rdx);
66 280 : __ Pop(rdi);
67 : }
68 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
69 280 : __ JumpCodeObject(rcx);
70 280 : }
71 :
72 : namespace {
73 :
74 56 : void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
75 : // ----------- S t a t e -------------
76 : // -- rax: number of arguments
77 : // -- rdi: constructor function
78 : // -- rdx: new target
79 : // -- rsi: context
80 : // -----------------------------------
81 :
82 : // Enter a construct frame.
83 : {
84 56 : FrameScope scope(masm, StackFrame::CONSTRUCT);
85 :
86 : // Preserve the incoming parameters on the stack.
87 56 : __ SmiTag(rcx, rax);
88 56 : __ Push(rsi);
89 56 : __ Push(rcx);
90 :
91 : // The receiver for the builtin/api call.
92 56 : __ PushRoot(RootIndex::kTheHoleValue);
93 :
94 : // Set up pointer to last argument.
95 112 : __ leaq(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
96 :
97 : // Copy arguments and receiver to the expression stack.
98 56 : Label loop, entry;
99 : __ movq(rcx, rax);
100 : // ----------- S t a t e -------------
101 : // -- rax: number of arguments (untagged)
102 : // -- rdi: constructor function
103 : // -- rdx: new target
104 : // -- rbx: pointer to last argument
105 : // -- rcx: counter
106 : // -- sp[0*kSystemPointerSize]: the hole (receiver)
107 : // -- sp[1*kSystemPointerSize]: number of arguments (tagged)
108 : // -- sp[2*kSystemPointerSize]: context
109 : // -----------------------------------
110 56 : __ jmp(&entry);
111 56 : __ bind(&loop);
112 56 : __ Push(Operand(rbx, rcx, times_system_pointer_size, 0));
113 56 : __ bind(&entry);
114 : __ decq(rcx);
115 56 : __ j(greater_equal, &loop, Label::kNear);
116 :
117 : // Call the function.
118 : // rax: number of arguments (untagged)
119 : // rdi: constructor function
120 : // rdx: new target
121 : ParameterCount actual(rax);
122 56 : __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
123 :
124 : // Restore context from the frame.
125 112 : __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
126 : // Restore smi-tagged arguments count from the frame.
127 112 : __ movq(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
128 :
129 : // Leave construct frame.
130 : }
131 :
132 : // Remove caller arguments from the stack and return.
133 : __ PopReturnAddressTo(rcx);
134 56 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
135 112 : __ leaq(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
136 : __ PushReturnAddressFrom(rcx);
137 :
138 56 : __ ret(0);
139 56 : }
140 :
141 896 : void Generate_StackOverflowCheck(
142 : MacroAssembler* masm, Register num_args, Register scratch,
143 : Label* stack_overflow,
144 : Label::Distance stack_overflow_distance = Label::kFar) {
145 : // Check the stack for overflow. We are not trying to catch
146 : // interruptions (e.g. debug break and preemption) here, so the "real stack
147 : // limit" is checked.
148 896 : __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit);
149 896 : __ movq(scratch, rsp);
150 : // Make scratch the space we have left. The stack might already be overflowed
151 : // here which will cause scratch to become negative.
152 : __ subq(scratch, kScratchRegister);
153 : __ sarq(scratch, Immediate(kSystemPointerSizeLog2));
154 : // Check if the arguments will overflow the stack.
155 : __ cmpq(scratch, num_args);
156 : // Signed comparison.
157 896 : __ j(less_equal, stack_overflow, stack_overflow_distance);
158 896 : }
159 :
160 : } // namespace
161 :
162 : // The construct stub for ES5 constructor functions and ES6 class constructors.
163 56 : void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
164 : // ----------- S t a t e -------------
165 : // -- rax: number of arguments (untagged)
166 : // -- rdi: constructor function
167 : // -- rdx: new target
168 : // -- rsi: context
169 : // -- sp[...]: constructor arguments
170 : // -----------------------------------
171 :
172 : // Enter a construct frame.
173 : {
174 56 : FrameScope scope(masm, StackFrame::CONSTRUCT);
175 56 : Label post_instantiation_deopt_entry, not_create_implicit_receiver;
176 :
177 : // Preserve the incoming parameters on the stack.
178 56 : __ SmiTag(rcx, rax);
179 56 : __ Push(rsi);
180 56 : __ Push(rcx);
181 56 : __ Push(rdi);
182 56 : __ PushRoot(RootIndex::kTheHoleValue);
183 56 : __ Push(rdx);
184 :
185 : // ----------- S t a t e -------------
186 : // -- sp[0*kSystemPointerSize]: new target
187 : // -- sp[1*kSystemPointerSize]: padding
188 : // -- rdi and sp[2*kSystemPointerSize]: constructor function
189 : // -- sp[3*kSystemPointerSize]: argument count
190 : // -- sp[4*kSystemPointerSize]: context
191 : // -----------------------------------
192 :
193 : __ LoadTaggedPointerField(
194 56 : rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
195 56 : __ movl(rbx, FieldOperand(rbx, SharedFunctionInfo::kFlagsOffset));
196 : __ DecodeField<SharedFunctionInfo::FunctionKindBits>(rbx);
197 : __ JumpIfIsInRange(rbx, kDefaultDerivedConstructor, kDerivedConstructor,
198 56 : ¬_create_implicit_receiver, Label::kNear);
199 :
200 : // If not derived class constructor: Allocate the new receiver object.
201 56 : __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
202 56 : __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
203 56 : RelocInfo::CODE_TARGET);
204 56 : __ jmp(&post_instantiation_deopt_entry, Label::kNear);
205 :
206 : // Else: use TheHoleValue as receiver for constructor call
207 56 : __ bind(¬_create_implicit_receiver);
208 56 : __ LoadRoot(rax, RootIndex::kTheHoleValue);
209 :
210 : // ----------- S t a t e -------------
211 : // -- rax implicit receiver
212 : // -- Slot 4 / sp[0*kSystemPointerSize] new target
213 : // -- Slot 3 / sp[1*kSystemPointerSize] padding
214 : // -- Slot 2 / sp[2*kSystemPointerSize] constructor function
215 : // -- Slot 1 / sp[3*kSystemPointerSize] number of arguments (tagged)
216 : // -- Slot 0 / sp[4*kSystemPointerSize] context
217 : // -----------------------------------
218 : // Deoptimizer enters here.
219 : masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
220 56 : masm->pc_offset());
221 56 : __ bind(&post_instantiation_deopt_entry);
222 :
223 : // Restore new target.
224 56 : __ Pop(rdx);
225 :
226 : // Push the allocated receiver to the stack. We need two copies
227 : // because we may have to return the original one and the calling
228 : // conventions dictate that the called function pops the receiver.
229 56 : __ Push(rax);
230 56 : __ Push(rax);
231 :
232 : // ----------- S t a t e -------------
233 : // -- sp[0*kSystemPointerSize] implicit receiver
234 : // -- sp[1*kSystemPointerSize] implicit receiver
235 : // -- sp[2*kSystemPointerSize] padding
236 : // -- sp[3*kSystemPointerSize] constructor function
237 : // -- sp[4*kSystemPointerSize] number of arguments (tagged)
238 : // -- sp[5*kSystemPointerSize] context
239 : // -----------------------------------
240 :
241 : // Restore constructor function and argument count.
242 112 : __ movq(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
243 56 : __ SmiUntag(rax, Operand(rbp, ConstructFrameConstants::kLengthOffset));
244 :
245 : // Set up pointer to last argument.
246 112 : __ leaq(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
247 :
248 : // Check if we have enough stack space to push all arguments.
249 : // Argument count in rax. Clobbers rcx.
250 56 : Label enough_stack_space, stack_overflow;
251 56 : Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
252 56 : __ jmp(&enough_stack_space, Label::kNear);
253 :
254 56 : __ bind(&stack_overflow);
255 : // Restore context from the frame.
256 112 : __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
257 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
258 : // This should be unreachable.
259 56 : __ int3();
260 :
261 56 : __ bind(&enough_stack_space);
262 :
263 : // Copy arguments and receiver to the expression stack.
264 56 : Label loop, entry;
265 : __ movq(rcx, rax);
266 : // ----------- S t a t e -------------
267 : // -- rax: number of arguments (untagged)
268 : // -- rdx: new target
269 : // -- rbx: pointer to last argument
270 : // -- rcx: counter (tagged)
271 : // -- sp[0*kSystemPointerSize]: implicit receiver
272 : // -- sp[1*kSystemPointerSize]: implicit receiver
273 : // -- sp[2*kSystemPointerSize]: padding
274 : // -- rdi and sp[3*kSystemPointerSize]: constructor function
275 : // -- sp[4*kSystemPointerSize]: number of arguments (tagged)
276 : // -- sp[5*kSystemPointerSize]: context
277 : // -----------------------------------
278 56 : __ jmp(&entry, Label::kNear);
279 56 : __ bind(&loop);
280 56 : __ Push(Operand(rbx, rcx, times_system_pointer_size, 0));
281 56 : __ bind(&entry);
282 : __ decq(rcx);
283 56 : __ j(greater_equal, &loop, Label::kNear);
284 :
285 : // Call the function.
286 : ParameterCount actual(rax);
287 56 : __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
288 :
289 : // ----------- S t a t e -------------
290 : // -- rax constructor result
291 : // -- sp[0*kSystemPointerSize] implicit receiver
292 : // -- sp[1*kSystemPointerSize] padding
293 : // -- sp[2*kSystemPointerSize] constructor function
294 : // -- sp[3*kSystemPointerSize] number of arguments
295 : // -- sp[4*kSystemPointerSize] context
296 : // -----------------------------------
297 :
298 : // Store offset of return address for deoptimizer.
299 : masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
300 56 : masm->pc_offset());
301 :
302 : // Restore context from the frame.
303 112 : __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
304 :
305 : // If the result is an object (in the ECMA sense), we should get rid
306 : // of the receiver and use the result; see ECMA-262 section 13.2.2-7
307 : // on page 74.
308 56 : Label use_receiver, do_throw, leave_frame;
309 :
310 : // If the result is undefined, we jump out to using the implicit receiver.
311 : __ JumpIfRoot(rax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
312 :
313 : // Otherwise we do a smi check and fall through to check if the return value
314 : // is a valid receiver.
315 :
316 : // If the result is a smi, it is *not* an object in the ECMA sense.
317 56 : __ JumpIfSmi(rax, &use_receiver, Label::kNear);
318 :
319 : // If the type of the result (stored in its map) is less than
320 : // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
321 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
322 56 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
323 56 : __ j(above_equal, &leave_frame, Label::kNear);
324 56 : __ jmp(&use_receiver, Label::kNear);
325 :
326 56 : __ bind(&do_throw);
327 56 : __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
328 :
329 : // Throw away the result of the constructor invocation and use the
330 : // on-stack receiver as the result.
331 56 : __ bind(&use_receiver);
332 112 : __ movq(rax, Operand(rsp, 0 * kSystemPointerSize));
333 : __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw, Label::kNear);
334 :
335 56 : __ bind(&leave_frame);
336 : // Restore the arguments count.
337 112 : __ movq(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
338 : // Leave construct frame.
339 : }
340 : // Remove caller arguments from the stack and return.
341 : __ PopReturnAddressTo(rcx);
342 56 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
343 112 : __ leaq(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
344 : __ PushReturnAddressFrom(rcx);
345 56 : __ ret(0);
346 56 : }
347 :
348 56 : void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
349 56 : Generate_JSBuiltinsConstructStubHelper(masm);
350 56 : }
351 :
352 56 : void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
353 56 : FrameScope scope(masm, StackFrame::INTERNAL);
354 56 : __ Push(rdi);
355 56 : __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
356 56 : }
357 :
358 : namespace {
359 :
360 : // Called with the native C calling convention. The corresponding function
361 : // signature is either:
362 : // using JSEntryFunction = GeneratedCode<Address(
363 : // Address root_register_value, Address new_target, Address target,
364 : // Address receiver, intptr_t argc, Address** argv)>;
365 : // or
366 : // using JSEntryFunction = GeneratedCode<Address(
367 : // Address root_register_value, MicrotaskQueue* microtask_queue)>;
368 168 : void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
369 : Builtins::Name entry_trampoline) {
370 168 : Label invoke, handler_entry, exit;
371 168 : Label not_outermost_js, not_outermost_js_2;
372 :
373 : { // NOLINT. Scope block confuses linter.
374 : NoRootArrayScope uninitialized_root_register(masm);
375 : // Set up frame.
376 168 : __ pushq(rbp);
377 : __ movq(rbp, rsp);
378 :
379 : // Push the stack frame type.
380 168 : __ Push(Immediate(StackFrame::TypeToMarker(type)));
381 : // Reserve a slot for the context. It is filled after the root register has
382 : // been set up.
383 : __ subq(rsp, Immediate(kSystemPointerSize));
384 : // Save callee-saved registers (X64/X32/Win64 calling conventions).
385 168 : __ pushq(r12);
386 168 : __ pushq(r13);
387 168 : __ pushq(r14);
388 168 : __ pushq(r15);
389 : #ifdef _WIN64
390 : __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
391 : __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
392 : #endif
393 168 : __ pushq(rbx);
394 :
395 : #ifdef _WIN64
396 : // On Win64 XMM6-XMM15 are callee-save.
397 : __ subq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
398 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
399 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
400 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
401 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
402 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
403 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
404 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
405 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
406 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
407 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
408 : STATIC_ASSERT(EntryFrameConstants::kCalleeSaveXMMRegisters == 10);
409 : STATIC_ASSERT(EntryFrameConstants::kXMMRegistersBlockSize ==
410 : EntryFrameConstants::kXMMRegisterSize *
411 : EntryFrameConstants::kCalleeSaveXMMRegisters);
412 : #endif
413 :
414 : // Initialize the root register.
415 : // C calling convention. The first argument is passed in arg_reg_1.
416 : __ movq(kRootRegister, arg_reg_1);
417 : }
418 :
419 : // Save copies of the top frame descriptor on the stack.
420 : ExternalReference c_entry_fp = ExternalReference::Create(
421 168 : IsolateAddressId::kCEntryFPAddress, masm->isolate());
422 : {
423 168 : Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
424 168 : __ Push(c_entry_fp_operand);
425 : }
426 :
427 : // Store the context address in the previously-reserved slot.
428 : ExternalReference context_address = ExternalReference::Create(
429 168 : IsolateAddressId::kContextAddress, masm->isolate());
430 168 : __ Load(kScratchRegister, context_address);
431 : static constexpr int kOffsetToContextSlot = -2 * kSystemPointerSize;
432 336 : __ movq(Operand(rbp, kOffsetToContextSlot), kScratchRegister);
433 :
434 : // If this is the outermost JS call, set js_entry_sp value.
435 : ExternalReference js_entry_sp = ExternalReference::Create(
436 168 : IsolateAddressId::kJSEntrySPAddress, masm->isolate());
437 168 : __ Load(rax, js_entry_sp);
438 : __ testq(rax, rax);
439 168 : __ j(not_zero, ¬_outermost_js);
440 168 : __ Push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
441 : __ movq(rax, rbp);
442 168 : __ Store(js_entry_sp, rax);
443 168 : Label cont;
444 168 : __ jmp(&cont);
445 168 : __ bind(¬_outermost_js);
446 168 : __ Push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
447 168 : __ bind(&cont);
448 :
449 : // Jump to a faked try block that does the invoke, with a faked catch
450 : // block that sets the pending exception.
451 168 : __ jmp(&invoke);
452 168 : __ bind(&handler_entry);
453 :
454 : // Store the current pc as the handler offset. It's used later to create the
455 : // handler table.
456 : masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
457 :
458 : // Caught exception: Store result (exception) in the pending exception
459 : // field in the JSEnv and return a failure sentinel.
460 : ExternalReference pending_exception = ExternalReference::Create(
461 168 : IsolateAddressId::kPendingExceptionAddress, masm->isolate());
462 168 : __ Store(pending_exception, rax);
463 168 : __ LoadRoot(rax, RootIndex::kException);
464 168 : __ jmp(&exit);
465 :
466 : // Invoke: Link this frame into the handler chain.
467 168 : __ bind(&invoke);
468 168 : __ PushStackHandler();
469 :
470 : // Invoke the function by calling through JS entry trampoline builtin and
471 : // pop the faked function when we return.
472 : Handle<Code> trampoline_code =
473 168 : masm->isolate()->builtins()->builtin_handle(entry_trampoline);
474 168 : __ Call(trampoline_code, RelocInfo::CODE_TARGET);
475 :
476 : // Unlink this frame from the handler chain.
477 168 : __ PopStackHandler();
478 :
479 168 : __ bind(&exit);
480 : // Check if the current stack frame is marked as the outermost JS frame.
481 168 : __ Pop(rbx);
482 : __ cmpq(rbx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
483 168 : __ j(not_equal, ¬_outermost_js_2);
484 168 : __ Move(kScratchRegister, js_entry_sp);
485 336 : __ movq(Operand(kScratchRegister, 0), Immediate(0));
486 168 : __ bind(¬_outermost_js_2);
487 :
488 : // Restore the top frame descriptor from the stack.
489 : {
490 168 : Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
491 168 : __ Pop(c_entry_fp_operand);
492 : }
493 :
494 : // Restore callee-saved registers (X64 conventions).
495 : #ifdef _WIN64
496 : // On Win64 XMM6-XMM15 are callee-save
497 : __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
498 : __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
499 : __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
500 : __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
501 : __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
502 : __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
503 : __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
504 : __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
505 : __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
506 : __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
507 : __ addq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
508 : #endif
509 :
510 168 : __ popq(rbx);
511 : #ifdef _WIN64
512 : // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
513 : __ popq(rsi);
514 : __ popq(rdi);
515 : #endif
516 168 : __ popq(r15);
517 168 : __ popq(r14);
518 168 : __ popq(r13);
519 168 : __ popq(r12);
520 : __ addq(rsp, Immediate(2 * kSystemPointerSize)); // remove markers
521 :
522 : // Restore frame pointer and return.
523 168 : __ popq(rbp);
524 168 : __ ret(0);
525 168 : }
526 :
527 : } // namespace
528 :
529 56 : void Builtins::Generate_JSEntry(MacroAssembler* masm) {
530 : Generate_JSEntryVariant(masm, StackFrame::ENTRY,
531 56 : Builtins::kJSEntryTrampoline);
532 56 : }
533 :
534 56 : void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
535 : Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
536 56 : Builtins::kJSConstructEntryTrampoline);
537 56 : }
538 :
539 56 : void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
540 : Generate_JSEntryVariant(masm, StackFrame::ENTRY,
541 56 : Builtins::kRunMicrotasksTrampoline);
542 56 : }
543 :
544 112 : static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
545 : bool is_construct) {
546 : // Expects six C++ function parameters.
547 : // - Address root_register_value
548 : // - Address new_target (tagged Object pointer)
549 : // - Address function (tagged JSFunction pointer)
550 : // - Address receiver (tagged Object pointer)
551 : // - intptr_t argc
552 : // - Address** argv (pointer to array of tagged Object pointers)
553 : // (see Handle::Invoke in execution.cc).
554 :
555 : // Open a C++ scope for the FrameScope.
556 : {
557 : // Platform specific argument handling. After this, the stack contains
558 : // an internal frame and the pushed function and receiver, and
559 : // register rax and rbx holds the argument count and argument array,
560 : // while rdi holds the function pointer, rsi the context, and rdx the
561 : // new.target.
562 :
563 : // MSVC parameters in:
564 : // rcx : root_register_value
565 : // rdx : new_target
566 : // r8 : function
567 : // r9 : receiver
568 : // [rsp+0x20] : argc
569 : // [rsp+0x28] : argv
570 : //
571 : // GCC parameters in:
572 : // rdi : root_register_value
573 : // rsi : new_target
574 : // rdx : function
575 : // rcx : receiver
576 : // r8 : argc
577 : // r9 : argv
578 :
579 112 : __ movq(rdi, arg_reg_3);
580 112 : __ Move(rdx, arg_reg_2);
581 : // rdi : function
582 : // rdx : new_target
583 :
584 : // Clear the context before we push it when entering the internal frame.
585 112 : __ Set(rsi, 0);
586 :
587 : // Enter an internal frame.
588 112 : FrameScope scope(masm, StackFrame::INTERNAL);
589 :
590 : // Setup the context (we need to use the caller context from the isolate).
591 : ExternalReference context_address = ExternalReference::Create(
592 112 : IsolateAddressId::kContextAddress, masm->isolate());
593 112 : __ movq(rsi, masm->ExternalReferenceAsOperand(context_address));
594 :
595 : // Push the function and the receiver onto the stack.
596 112 : __ Push(rdi);
597 112 : __ Push(arg_reg_4);
598 :
599 : #ifdef _WIN64
600 : // Load the previous frame pointer to access C arguments on stack
601 : __ movq(kScratchRegister, Operand(rbp, 0));
602 : // Load the number of arguments and setup pointer to the arguments.
603 : __ movq(rax, Operand(kScratchRegister, EntryFrameConstants::kArgcOffset));
604 : __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
605 : #else // _WIN64
606 : // Load the number of arguments and setup pointer to the arguments.
607 : __ movq(rax, r8);
608 : __ movq(rbx, r9);
609 : #endif // _WIN64
610 :
611 : // Current stack contents:
612 : // [rsp + 2 * kSystemPointerSize ... ] : Internal frame
613 : // [rsp + kSystemPointerSize] : function
614 : // [rsp] : receiver
615 : // Current register contents:
616 : // rax : argc
617 : // rbx : argv
618 : // rsi : context
619 : // rdi : function
620 : // rdx : new.target
621 :
622 : // Check if we have enough stack space to push all arguments.
623 : // Argument count in rax. Clobbers rcx.
624 112 : Label enough_stack_space, stack_overflow;
625 112 : Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
626 112 : __ jmp(&enough_stack_space, Label::kNear);
627 :
628 112 : __ bind(&stack_overflow);
629 112 : __ CallRuntime(Runtime::kThrowStackOverflow);
630 : // This should be unreachable.
631 112 : __ int3();
632 :
633 112 : __ bind(&enough_stack_space);
634 :
635 : // Copy arguments to the stack in a loop.
636 : // Register rbx points to array of pointers to handle locations.
637 : // Push the values of these handles.
638 112 : Label loop, entry;
639 112 : __ Set(rcx, 0); // Set loop variable to 0.
640 112 : __ jmp(&entry, Label::kNear);
641 112 : __ bind(&loop);
642 224 : __ movq(kScratchRegister, Operand(rbx, rcx, times_system_pointer_size, 0));
643 112 : __ Push(Operand(kScratchRegister, 0)); // dereference handle
644 : __ addq(rcx, Immediate(1));
645 112 : __ bind(&entry);
646 : __ cmpq(rcx, rax);
647 112 : __ j(not_equal, &loop, Label::kNear);
648 :
649 : // Invoke the builtin code.
650 : Handle<Code> builtin = is_construct
651 : ? BUILTIN_CODE(masm->isolate(), Construct)
652 224 : : masm->isolate()->builtins()->Call();
653 112 : __ Call(builtin, RelocInfo::CODE_TARGET);
654 :
655 : // Exit the internal frame. Notice that this also removes the empty
656 : // context and the function left on the stack by the code
657 : // invocation.
658 : }
659 :
660 112 : __ ret(0);
661 112 : }
662 :
663 56 : void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
664 56 : Generate_JSEntryTrampolineHelper(masm, false);
665 56 : }
666 :
667 56 : void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
668 56 : Generate_JSEntryTrampolineHelper(masm, true);
669 56 : }
670 :
671 56 : void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
672 : // arg_reg_2: microtask_queue
673 56 : __ movq(RunMicrotasksDescriptor::MicrotaskQueueRegister(), arg_reg_2);
674 56 : __ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
675 56 : }
676 :
677 56 : static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
678 : Register sfi_data,
679 : Register scratch1) {
680 56 : Label done;
681 :
682 56 : __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
683 56 : __ j(not_equal, &done, Label::kNear);
684 :
685 56 : __ LoadTaggedPointerField(
686 56 : sfi_data, FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
687 :
688 56 : __ bind(&done);
689 56 : }
690 :
691 : // static
692 56 : void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
693 : // ----------- S t a t e -------------
694 : // -- rax : the value to pass to the generator
695 : // -- rdx : the JSGeneratorObject to resume
696 : // -- rsp[0] : return address
697 : // -----------------------------------
698 56 : __ AssertGeneratorObject(rdx);
699 :
700 : // Store input value into generator object.
701 56 : __ StoreTaggedField(
702 56 : FieldOperand(rdx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
703 : __ RecordWriteField(rdx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
704 56 : kDontSaveFPRegs);
705 :
706 56 : Register decompr_scratch1 = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
707 56 : Register decompr_scratch2 = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
708 :
709 : // Load suspended function and context.
710 : __ LoadTaggedPointerField(
711 56 : rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
712 56 : __ LoadTaggedPointerField(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
713 :
714 : // Flood function if we are stepping.
715 56 : Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
716 56 : Label stepping_prepared;
717 : ExternalReference debug_hook =
718 56 : ExternalReference::debug_hook_on_function_call_address(masm->isolate());
719 56 : Operand debug_hook_operand = masm->ExternalReferenceAsOperand(debug_hook);
720 56 : __ cmpb(debug_hook_operand, Immediate(0));
721 56 : __ j(not_equal, &prepare_step_in_if_stepping);
722 :
723 : // Flood function if we need to continue stepping in the suspended generator.
724 : ExternalReference debug_suspended_generator =
725 56 : ExternalReference::debug_suspended_generator_address(masm->isolate());
726 : Operand debug_suspended_generator_operand =
727 56 : masm->ExternalReferenceAsOperand(debug_suspended_generator);
728 56 : __ cmpq(rdx, debug_suspended_generator_operand);
729 56 : __ j(equal, &prepare_step_in_suspended_generator);
730 56 : __ bind(&stepping_prepared);
731 :
732 : // Check the stack for overflow. We are not trying to catch interruptions
733 : // (i.e. debug break and preemption) here, so check the "real stack limit".
734 56 : Label stack_overflow;
735 56 : __ CompareRoot(rsp, RootIndex::kRealStackLimit);
736 56 : __ j(below, &stack_overflow);
737 :
738 : // Pop return address.
739 : __ PopReturnAddressTo(rax);
740 :
741 : // Push receiver.
742 : __ PushTaggedPointerField(
743 56 : FieldOperand(rdx, JSGeneratorObject::kReceiverOffset), decompr_scratch1);
744 :
745 : // ----------- S t a t e -------------
746 : // -- rax : return address
747 : // -- rdx : the JSGeneratorObject to resume
748 : // -- rdi : generator function
749 : // -- rsi : generator context
750 : // -- rsp[0] : generator receiver
751 : // -----------------------------------
752 :
753 : // Copy the function arguments from the generator object's register file.
754 : __ LoadTaggedPointerField(
755 56 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
756 : __ movzxwq(
757 : rcx, FieldOperand(rcx, SharedFunctionInfo::kFormalParameterCountOffset));
758 :
759 : __ LoadTaggedPointerField(
760 56 : rbx, FieldOperand(rdx, JSGeneratorObject::kParametersAndRegistersOffset));
761 :
762 : {
763 56 : Label done_loop, loop;
764 56 : __ Set(r9, 0);
765 :
766 56 : __ bind(&loop);
767 : __ cmpl(r9, rcx);
768 56 : __ j(greater_equal, &done_loop, Label::kNear);
769 : __ PushTaggedAnyField(
770 : FieldOperand(rbx, r9, times_tagged_size, FixedArray::kHeaderSize),
771 56 : decompr_scratch1, decompr_scratch2);
772 : __ addl(r9, Immediate(1));
773 56 : __ jmp(&loop);
774 :
775 56 : __ bind(&done_loop);
776 : }
777 :
778 : // Underlying function needs to have bytecode available.
779 56 : if (FLAG_debug_code) {
780 : __ LoadTaggedPointerField(
781 0 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
782 : __ LoadTaggedPointerField(
783 0 : rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
784 0 : GetSharedFunctionInfoBytecode(masm, rcx, kScratchRegister);
785 0 : __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
786 0 : __ Assert(equal, AbortReason::kMissingBytecodeArray);
787 : }
788 :
789 : // Resume (Ignition/TurboFan) generator object.
790 : {
791 : __ PushReturnAddressFrom(rax);
792 : __ LoadTaggedPointerField(
793 56 : rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
794 : __ movzxwq(rax, FieldOperand(
795 : rax, SharedFunctionInfo::kFormalParameterCountOffset));
796 : // We abuse new.target both to indicate that this is a resume call and to
797 : // pass in the generator object. In ordinary calls, new.target is always
798 : // undefined because generator functions are non-constructable.
799 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
800 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
801 56 : __ JumpCodeObject(rcx);
802 : }
803 :
804 56 : __ bind(&prepare_step_in_if_stepping);
805 : {
806 56 : FrameScope scope(masm, StackFrame::INTERNAL);
807 56 : __ Push(rdx);
808 56 : __ Push(rdi);
809 : // Push hole as receiver since we do not use it for stepping.
810 56 : __ PushRoot(RootIndex::kTheHoleValue);
811 56 : __ CallRuntime(Runtime::kDebugOnFunctionCall);
812 56 : __ Pop(rdx);
813 : __ LoadTaggedPointerField(
814 56 : rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
815 : }
816 56 : __ jmp(&stepping_prepared);
817 :
818 56 : __ bind(&prepare_step_in_suspended_generator);
819 : {
820 56 : FrameScope scope(masm, StackFrame::INTERNAL);
821 56 : __ Push(rdx);
822 56 : __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
823 56 : __ Pop(rdx);
824 : __ LoadTaggedPointerField(
825 56 : rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
826 : }
827 56 : __ jmp(&stepping_prepared);
828 :
829 56 : __ bind(&stack_overflow);
830 : {
831 56 : FrameScope scope(masm, StackFrame::INTERNAL);
832 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
833 56 : __ int3(); // This should be unreachable.
834 : }
835 56 : }
836 :
837 : // TODO(juliana): if we remove the code below then we don't need all
838 : // the parameters.
839 56 : static void ReplaceClosureCodeWithOptimizedCode(
840 : MacroAssembler* masm, Register optimized_code, Register closure,
841 : Register scratch1, Register scratch2, Register scratch3) {
842 :
843 : // Store the optimized code in the closure.
844 56 : __ StoreTaggedField(FieldOperand(closure, JSFunction::kCodeOffset),
845 56 : optimized_code);
846 56 : __ movq(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
847 : __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
848 56 : kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
849 56 : }
850 :
851 56 : static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
852 : Register scratch2) {
853 56 : Register args_count = scratch1;
854 56 : Register return_pc = scratch2;
855 :
856 : // Get the arguments + receiver count.
857 112 : __ movq(args_count,
858 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
859 : __ movl(args_count,
860 : FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
861 :
862 : // Leave the frame (also dropping the register file).
863 56 : __ leave();
864 :
865 : // Drop receiver + arguments.
866 : __ PopReturnAddressTo(return_pc);
867 : __ addq(rsp, args_count);
868 : __ PushReturnAddressFrom(return_pc);
869 56 : }
870 :
871 : // Tail-call |function_id| if |smi_entry| == |marker|
872 168 : static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
873 : Register smi_entry,
874 : OptimizationMarker marker,
875 : Runtime::FunctionId function_id) {
876 168 : Label no_match;
877 168 : __ SmiCompare(smi_entry, Smi::FromEnum(marker));
878 168 : __ j(not_equal, &no_match);
879 168 : GenerateTailCallToReturnedCode(masm, function_id);
880 168 : __ bind(&no_match);
881 168 : }
882 :
883 56 : static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
884 : Register feedback_vector,
885 : Register scratch1, Register scratch2,
886 : Register scratch3) {
887 : // ----------- S t a t e -------------
888 : // -- rdx : new target (preserved for callee if needed, and caller)
889 : // -- rdi : target function (preserved for callee if needed, and caller)
890 : // -- feedback vector (preserved for caller if needed)
891 : // -----------------------------------
892 : DCHECK(!AreAliased(feedback_vector, rdx, rdi, scratch1, scratch2, scratch3));
893 :
894 56 : Label optimized_code_slot_is_weak_ref, fallthrough;
895 :
896 56 : Register closure = rdi;
897 56 : Register optimized_code_entry = scratch1;
898 56 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? scratch2 : no_reg;
899 :
900 56 : __ LoadAnyTaggedField(
901 : optimized_code_entry,
902 : FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset),
903 56 : decompr_scratch);
904 :
905 : // Check if the code entry is a Smi. If yes, we interpret it as an
906 : // optimisation marker. Otherwise, interpret it as a weak reference to a code
907 : // object.
908 56 : __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
909 :
910 : {
911 : // Optimized code slot is a Smi optimization marker.
912 :
913 : // Fall through if no optimization trigger.
914 : __ SmiCompare(optimized_code_entry,
915 56 : Smi::FromEnum(OptimizationMarker::kNone));
916 56 : __ j(equal, &fallthrough);
917 :
918 : // TODO(v8:8394): The logging of first execution will break if
919 : // feedback vectors are not allocated. We need to find a different way of
920 : // logging these events if required.
921 : TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
922 : OptimizationMarker::kLogFirstExecution,
923 56 : Runtime::kFunctionFirstExecution);
924 : TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
925 : OptimizationMarker::kCompileOptimized,
926 56 : Runtime::kCompileOptimized_NotConcurrent);
927 : TailCallRuntimeIfMarkerEquals(
928 : masm, optimized_code_entry,
929 : OptimizationMarker::kCompileOptimizedConcurrent,
930 56 : Runtime::kCompileOptimized_Concurrent);
931 :
932 : {
933 : // Otherwise, the marker is InOptimizationQueue, so fall through hoping
934 : // that an interrupt will eventually update the slot with optimized code.
935 56 : if (FLAG_debug_code) {
936 : __ SmiCompare(optimized_code_entry,
937 0 : Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
938 0 : __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
939 : }
940 56 : __ jmp(&fallthrough);
941 : }
942 : }
943 :
944 : {
945 : // Optimized code slot is a weak reference.
946 56 : __ bind(&optimized_code_slot_is_weak_ref);
947 :
948 56 : __ LoadWeakValue(optimized_code_entry, &fallthrough);
949 :
950 : // Check if the optimized code is marked for deopt. If it is, call the
951 : // runtime to clear it.
952 56 : Label found_deoptimized_code;
953 : __ LoadTaggedPointerField(
954 : scratch2,
955 56 : FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
956 : __ testl(
957 : FieldOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset),
958 : Immediate(1 << Code::kMarkedForDeoptimizationBit));
959 56 : __ j(not_zero, &found_deoptimized_code);
960 :
961 : // Optimized code is good, get it into the closure and link the closure into
962 : // the optimized functions list, then tail call the optimized code.
963 : // The feedback vector is no longer used, so re-use it as a scratch
964 : // register.
965 : ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
966 56 : scratch2, scratch3, feedback_vector);
967 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
968 56 : __ Move(rcx, optimized_code_entry);
969 56 : __ JumpCodeObject(rcx);
970 :
971 : // Optimized code slot contains deoptimized code, evict it and re-enter the
972 : // closure's code.
973 56 : __ bind(&found_deoptimized_code);
974 56 : GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
975 : }
976 :
977 : // Fall-through if the optimized code cell is clear and there is no
978 : // optimization marker.
979 56 : __ bind(&fallthrough);
980 56 : }
981 :
982 : // Advance the current bytecode offset. This simulates what all bytecode
983 : // handlers do upon completion of the underlying operation. Will bail out to a
984 : // label if the bytecode (without prefix) is a return bytecode.
985 112 : static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
986 : Register bytecode_array,
987 : Register bytecode_offset,
988 : Register bytecode, Register scratch1,
989 : Label* if_return) {
990 112 : Register bytecode_size_table = scratch1;
991 : DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
992 : bytecode));
993 :
994 112 : __ Move(bytecode_size_table,
995 112 : ExternalReference::bytecode_size_table_address());
996 :
997 : // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
998 112 : Label process_bytecode, extra_wide;
999 : STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
1000 : STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
1001 : STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1002 : STATIC_ASSERT(3 ==
1003 : static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1004 112 : __ cmpb(bytecode, Immediate(0x3));
1005 112 : __ j(above, &process_bytecode, Label::kNear);
1006 112 : __ testb(bytecode, Immediate(0x1));
1007 112 : __ j(not_equal, &extra_wide, Label::kNear);
1008 :
1009 : // Load the next bytecode and update table to the wide scaled table.
1010 : __ incl(bytecode_offset);
1011 224 : __ movzxbq(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
1012 : __ addq(bytecode_size_table,
1013 : Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
1014 112 : __ jmp(&process_bytecode, Label::kNear);
1015 :
1016 112 : __ bind(&extra_wide);
1017 : // Load the next bytecode and update table to the extra wide scaled table.
1018 : __ incl(bytecode_offset);
1019 224 : __ movzxbq(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
1020 : __ addq(bytecode_size_table,
1021 : Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
1022 :
1023 112 : __ bind(&process_bytecode);
1024 :
1025 : // Bailout to the return label if this is a return bytecode.
1026 : #define JUMP_IF_EQUAL(NAME) \
1027 : __ cmpb(bytecode, \
1028 : Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1029 : __ j(equal, if_return, Label::kFar);
1030 224 : RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
1031 : #undef JUMP_IF_EQUAL
1032 :
1033 : // Otherwise, load the size of the current bytecode and advance the offset.
1034 224 : __ addl(bytecode_offset,
1035 112 : Operand(bytecode_size_table, bytecode, times_int_size, 0));
1036 112 : }
1037 :
1038 : // Generate code for entering a JS function with the interpreter.
1039 : // On entry to the function the receiver and arguments have been pushed on the
1040 : // stack left to right. The actual argument count matches the formal parameter
1041 : // count expected by the function.
1042 : //
1043 : // The live registers are:
1044 : // o rdi: the JS function object being called
1045 : // o rdx: the incoming new target or generator object
1046 : // o rsi: our context
1047 : // o rbp: the caller's frame pointer
1048 : // o rsp: stack pointer (pointing to return address)
1049 : //
1050 : // The function builds an interpreter frame. See InterpreterFrameConstants in
1051 : // frames.h for its layout.
1052 56 : void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1053 : Register closure = rdi;
1054 : Register feedback_vector = rbx;
1055 :
1056 : // Get the bytecode array from the function object and load it into
1057 : // kInterpreterBytecodeArrayRegister.
1058 56 : __ LoadTaggedPointerField(
1059 56 : rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1060 : __ LoadTaggedPointerField(
1061 : kInterpreterBytecodeArrayRegister,
1062 56 : FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
1063 : GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister,
1064 56 : kScratchRegister);
1065 :
1066 : // The bytecode array could have been flushed from the shared function info,
1067 : // if so, call into CompileLazy.
1068 56 : Label compile_lazy;
1069 56 : __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, rax);
1070 56 : __ j(not_equal, &compile_lazy);
1071 :
1072 : // Load the feedback vector from the closure.
1073 : __ LoadTaggedPointerField(
1074 56 : feedback_vector, FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1075 : __ LoadTaggedPointerField(feedback_vector,
1076 56 : FieldOperand(feedback_vector, Cell::kValueOffset));
1077 :
1078 56 : Label push_stack_frame;
1079 : // Check if feedback vector is valid. If valid, check for optimized code
1080 : // and update invocation count. Otherwise, setup the stack frame.
1081 : __ LoadTaggedPointerField(
1082 56 : rcx, FieldOperand(feedback_vector, HeapObject::kMapOffset));
1083 56 : __ CmpInstanceType(rcx, FEEDBACK_VECTOR_TYPE);
1084 56 : __ j(not_equal, &push_stack_frame);
1085 :
1086 : // Read off the optimized code slot in the feedback vector, and if there
1087 : // is optimized code or an optimization marker, call that instead.
1088 56 : MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r11, r15);
1089 :
1090 : // Increment invocation count for the function.
1091 : __ incl(
1092 : FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1093 :
1094 : // Open a frame scope to indicate that there is a frame on the stack. The
1095 : // MANUAL indicates that the scope shouldn't actually generate code to set up
1096 : // the frame (that is done below).
1097 56 : __ bind(&push_stack_frame);
1098 56 : FrameScope frame_scope(masm, StackFrame::MANUAL);
1099 56 : __ pushq(rbp); // Caller's frame pointer.
1100 : __ movq(rbp, rsp);
1101 56 : __ Push(rsi); // Callee's context.
1102 56 : __ Push(rdi); // Callee's JS function.
1103 :
1104 : // Reset code age and the OSR arming. The OSR field and BytecodeAgeOffset are
1105 : // 8-bit fields next to each other, so we could just optimize by writing a
1106 : // 16-bit. These static asserts guard our assumption is valid.
1107 : STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
1108 : BytecodeArray::kOSRNestingLevelOffset + kCharSize);
1109 : STATIC_ASSERT(BytecodeArray::kNoAgeBytecodeAge == 0);
1110 56 : __ movw(FieldOperand(kInterpreterBytecodeArrayRegister,
1111 : BytecodeArray::kOSRNestingLevelOffset),
1112 56 : Immediate(0));
1113 :
1114 : // Load initial bytecode offset.
1115 : __ movq(kInterpreterBytecodeOffsetRegister,
1116 : Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1117 :
1118 : // Push bytecode array and Smi tagged bytecode offset.
1119 56 : __ Push(kInterpreterBytecodeArrayRegister);
1120 56 : __ SmiTag(rcx, kInterpreterBytecodeOffsetRegister);
1121 56 : __ Push(rcx);
1122 :
1123 : // Allocate the local and temporary register file on the stack.
1124 : {
1125 : // Load frame size from the BytecodeArray object.
1126 : __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
1127 : BytecodeArray::kFrameSizeOffset));
1128 :
1129 : // Do a stack check to ensure we don't go over the limit.
1130 56 : Label ok;
1131 : __ movq(rax, rsp);
1132 : __ subq(rax, rcx);
1133 56 : __ CompareRoot(rax, RootIndex::kRealStackLimit);
1134 56 : __ j(above_equal, &ok, Label::kNear);
1135 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
1136 56 : __ bind(&ok);
1137 :
1138 : // If ok, push undefined as the initial value for all register file entries.
1139 56 : Label loop_header;
1140 56 : Label loop_check;
1141 56 : __ LoadRoot(rax, RootIndex::kUndefinedValue);
1142 56 : __ j(always, &loop_check, Label::kNear);
1143 56 : __ bind(&loop_header);
1144 : // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1145 56 : __ Push(rax);
1146 : // Continue loop if not done.
1147 56 : __ bind(&loop_check);
1148 : __ subq(rcx, Immediate(kSystemPointerSize));
1149 56 : __ j(greater_equal, &loop_header, Label::kNear);
1150 : }
1151 :
1152 : // If the bytecode array has a valid incoming new target or generator object
1153 : // register, initialize it with incoming value which was passed in rdx.
1154 56 : Label no_incoming_new_target_or_generator_register;
1155 : __ movsxlq(
1156 : rax,
1157 : FieldOperand(kInterpreterBytecodeArrayRegister,
1158 56 : BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1159 : __ testl(rax, rax);
1160 56 : __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
1161 112 : __ movq(Operand(rbp, rax, times_system_pointer_size, 0), rdx);
1162 56 : __ bind(&no_incoming_new_target_or_generator_register);
1163 :
1164 : // Load accumulator with undefined.
1165 56 : __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1166 :
1167 : // Load the dispatch table into a register and dispatch to the bytecode
1168 : // handler at the current bytecode offset.
1169 56 : Label do_dispatch;
1170 56 : __ bind(&do_dispatch);
1171 56 : __ Move(
1172 : kInterpreterDispatchTableRegister,
1173 56 : ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1174 112 : __ movzxbq(r11, Operand(kInterpreterBytecodeArrayRegister,
1175 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1176 112 : __ movq(kJavaScriptCallCodeStartRegister,
1177 : Operand(kInterpreterDispatchTableRegister, r11,
1178 : times_system_pointer_size, 0));
1179 56 : __ call(kJavaScriptCallCodeStartRegister);
1180 56 : masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1181 :
1182 : // Any returns to the entry trampoline are either due to the return bytecode
1183 : // or the interpreter tail calling a builtin and then a dispatch.
1184 :
1185 : // Get bytecode array and bytecode offset from the stack frame.
1186 112 : __ movq(kInterpreterBytecodeArrayRegister,
1187 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1188 112 : __ movq(kInterpreterBytecodeOffsetRegister,
1189 : Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1190 : __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1191 56 : kInterpreterBytecodeOffsetRegister);
1192 :
1193 : // Either return, or advance to the next bytecode and dispatch.
1194 56 : Label do_return;
1195 112 : __ movzxbq(rbx, Operand(kInterpreterBytecodeArrayRegister,
1196 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1197 : AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1198 : kInterpreterBytecodeOffsetRegister, rbx, rcx,
1199 56 : &do_return);
1200 56 : __ jmp(&do_dispatch);
1201 :
1202 56 : __ bind(&do_return);
1203 : // The return value is in rax.
1204 56 : LeaveInterpreterFrame(masm, rbx, rcx);
1205 56 : __ ret(0);
1206 :
1207 56 : __ bind(&compile_lazy);
1208 56 : GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1209 56 : __ int3(); // Should not return.
1210 56 : }
1211 :
1212 336 : static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1213 : Register num_args,
1214 : Register start_address,
1215 : Register scratch) {
1216 : // Find the address of the last argument.
1217 336 : __ Move(scratch, num_args);
1218 336 : __ shlq(scratch, Immediate(kSystemPointerSizeLog2));
1219 : __ negq(scratch);
1220 : __ addq(scratch, start_address);
1221 :
1222 : // Push the arguments.
1223 336 : Label loop_header, loop_check;
1224 336 : __ j(always, &loop_check, Label::kNear);
1225 336 : __ bind(&loop_header);
1226 336 : __ Push(Operand(start_address, 0));
1227 : __ subq(start_address, Immediate(kSystemPointerSize));
1228 336 : __ bind(&loop_check);
1229 : __ cmpq(start_address, scratch);
1230 336 : __ j(greater, &loop_header, Label::kNear);
1231 336 : }
1232 :
1233 : // static
1234 168 : void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1235 : MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1236 : InterpreterPushArgsMode mode) {
1237 : DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1238 : // ----------- S t a t e -------------
1239 : // -- rax : the number of arguments (not including the receiver)
1240 : // -- rbx : the address of the first argument to be pushed. Subsequent
1241 : // arguments should be consecutive above this, in the same order as
1242 : // they are to be pushed onto the stack.
1243 : // -- rdi : the target to call (can be any Object).
1244 : // -----------------------------------
1245 168 : Label stack_overflow;
1246 :
1247 : // Number of values to be pushed.
1248 336 : __ leal(rcx, Operand(rax, 1)); // Add one for receiver.
1249 :
1250 : // Add a stack check before pushing arguments.
1251 168 : Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
1252 :
1253 : // Pop return address to allow tail-call after pushing arguments.
1254 : __ PopReturnAddressTo(kScratchRegister);
1255 :
1256 : // Push "undefined" as the receiver arg if we need to.
1257 168 : if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1258 56 : __ PushRoot(RootIndex::kUndefinedValue);
1259 : __ decl(rcx); // Subtract one for receiver.
1260 : }
1261 :
1262 : // rbx and rdx will be modified.
1263 168 : Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
1264 :
1265 168 : if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1266 56 : __ Pop(rbx); // Pass the spread in a register
1267 : __ decl(rax); // Subtract one for spread
1268 : }
1269 :
1270 : // Call the target.
1271 : __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
1272 :
1273 168 : if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1274 56 : __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1275 56 : RelocInfo::CODE_TARGET);
1276 : } else {
1277 112 : __ Jump(masm->isolate()->builtins()->Call(receiver_mode),
1278 112 : RelocInfo::CODE_TARGET);
1279 : }
1280 :
1281 : // Throw stack overflow exception.
1282 168 : __ bind(&stack_overflow);
1283 : {
1284 168 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
1285 : // This should be unreachable.
1286 168 : __ int3();
1287 : }
1288 168 : }
1289 :
1290 : // static
1291 168 : void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1292 : MacroAssembler* masm, InterpreterPushArgsMode mode) {
1293 : // ----------- S t a t e -------------
1294 : // -- rax : the number of arguments (not including the receiver)
1295 : // -- rdx : the new target (either the same as the constructor or
1296 : // the JSFunction on which new was invoked initially)
1297 : // -- rdi : the constructor to call (can be any Object)
1298 : // -- rbx : the allocation site feedback if available, undefined otherwise
1299 : // -- rcx : the address of the first argument to be pushed. Subsequent
1300 : // arguments should be consecutive above this, in the same order as
1301 : // they are to be pushed onto the stack.
1302 : // -----------------------------------
1303 168 : Label stack_overflow;
1304 :
1305 : // Add a stack check before pushing arguments.
1306 168 : Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
1307 :
1308 : // Pop return address to allow tail-call after pushing arguments.
1309 : __ PopReturnAddressTo(kScratchRegister);
1310 :
1311 : // Push slot for the receiver to be constructed.
1312 168 : __ Push(Immediate(0));
1313 :
1314 : // rcx and r8 will be modified.
1315 168 : Generate_InterpreterPushArgs(masm, rax, rcx, r8);
1316 :
1317 168 : if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1318 56 : __ Pop(rbx); // Pass the spread in a register
1319 56 : __ decl(rax); // Subtract one for spread
1320 :
1321 : // Push return address in preparation for the tail-call.
1322 : __ PushReturnAddressFrom(kScratchRegister);
1323 : } else {
1324 : __ PushReturnAddressFrom(kScratchRegister);
1325 112 : __ AssertUndefinedOrAllocationSite(rbx);
1326 : }
1327 :
1328 168 : if (mode == InterpreterPushArgsMode::kArrayFunction) {
1329 : // Tail call to the array construct stub (still in the caller
1330 : // context at this point).
1331 56 : __ AssertFunction(rdi);
1332 : // Jump to the constructor function (rax, rbx, rdx passed on).
1333 56 : Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1334 56 : __ Jump(code, RelocInfo::CODE_TARGET);
1335 112 : } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1336 : // Call the constructor (rax, rdx, rdi passed on).
1337 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1338 56 : RelocInfo::CODE_TARGET);
1339 : } else {
1340 : DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1341 : // Call the constructor (rax, rdx, rdi passed on).
1342 56 : __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1343 : }
1344 :
1345 : // Throw stack overflow exception.
1346 168 : __ bind(&stack_overflow);
1347 : {
1348 168 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
1349 : // This should be unreachable.
1350 168 : __ int3();
1351 : }
1352 168 : }
1353 :
1354 112 : static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1355 : // Set the return address to the correct point in the interpreter entry
1356 : // trampoline.
1357 112 : Label builtin_trampoline, trampoline_loaded;
1358 : Smi interpreter_entry_return_pc_offset(
1359 : masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1360 : DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1361 :
1362 : // If the SFI function_data is an InterpreterData, the function will have a
1363 : // custom copy of the interpreter entry trampoline for profiling. If so,
1364 : // get the custom trampoline, otherwise grab the entry address of the global
1365 : // trampoline.
1366 224 : __ movq(rbx, Operand(rbp, StandardFrameConstants::kFunctionOffset));
1367 112 : __ LoadTaggedPointerField(
1368 112 : rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
1369 : __ LoadTaggedPointerField(
1370 112 : rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
1371 112 : __ CmpObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
1372 112 : __ j(not_equal, &builtin_trampoline, Label::kNear);
1373 :
1374 : __ movq(rbx,
1375 : FieldOperand(rbx, InterpreterData::kInterpreterTrampolineOffset));
1376 : __ addq(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1377 112 : __ jmp(&trampoline_loaded, Label::kNear);
1378 :
1379 112 : __ bind(&builtin_trampoline);
1380 : // TODO(jgruber): Replace this by a lookup in the builtin entry table.
1381 112 : __ movq(rbx,
1382 : __ ExternalReferenceAsOperand(
1383 : ExternalReference::
1384 : address_of_interpreter_entry_trampoline_instruction_start(
1385 : masm->isolate()),
1386 : kScratchRegister));
1387 :
1388 112 : __ bind(&trampoline_loaded);
1389 : __ addq(rbx, Immediate(interpreter_entry_return_pc_offset->value()));
1390 112 : __ Push(rbx);
1391 :
1392 : // Initialize dispatch table register.
1393 112 : __ Move(
1394 : kInterpreterDispatchTableRegister,
1395 112 : ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1396 :
1397 : // Get the bytecode array pointer from the frame.
1398 224 : __ movq(kInterpreterBytecodeArrayRegister,
1399 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1400 :
1401 112 : if (FLAG_debug_code) {
1402 : // Check function data field is actually a BytecodeArray object.
1403 0 : __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1404 : __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1405 0 : rbx);
1406 : __ Assert(
1407 : equal,
1408 0 : AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1409 : }
1410 :
1411 : // Get the target bytecode offset from the frame.
1412 224 : __ movq(kInterpreterBytecodeOffsetRegister,
1413 : Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1414 : __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1415 112 : kInterpreterBytecodeOffsetRegister);
1416 :
1417 : // Dispatch to the target bytecode.
1418 224 : __ movzxbq(r11, Operand(kInterpreterBytecodeArrayRegister,
1419 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1420 224 : __ movq(kJavaScriptCallCodeStartRegister,
1421 : Operand(kInterpreterDispatchTableRegister, r11,
1422 : times_system_pointer_size, 0));
1423 112 : __ jmp(kJavaScriptCallCodeStartRegister);
1424 112 : }
1425 :
1426 56 : void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1427 : // Get bytecode array and bytecode offset from the stack frame.
1428 112 : __ movq(kInterpreterBytecodeArrayRegister,
1429 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1430 112 : __ movq(kInterpreterBytecodeOffsetRegister,
1431 : Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1432 56 : __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1433 56 : kInterpreterBytecodeOffsetRegister);
1434 :
1435 : // Load the current bytecode.
1436 112 : __ movzxbq(rbx, Operand(kInterpreterBytecodeArrayRegister,
1437 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1438 :
1439 : // Advance to the next bytecode.
1440 56 : Label if_return;
1441 : AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1442 : kInterpreterBytecodeOffsetRegister, rbx, rcx,
1443 56 : &if_return);
1444 :
1445 : // Convert new bytecode offset to a Smi and save in the stackframe.
1446 56 : __ SmiTag(rbx, kInterpreterBytecodeOffsetRegister);
1447 112 : __ movq(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rbx);
1448 :
1449 56 : Generate_InterpreterEnterBytecode(masm);
1450 :
1451 : // We should never take the if_return path.
1452 56 : __ bind(&if_return);
1453 56 : __ Abort(AbortReason::kInvalidBytecodeAdvance);
1454 56 : }
1455 :
1456 56 : void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1457 56 : Generate_InterpreterEnterBytecode(masm);
1458 56 : }
1459 :
1460 56 : void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1461 : // ----------- S t a t e -------------
1462 : // -- rax : argument count (preserved for callee)
1463 : // -- rdx : new target (preserved for callee)
1464 : // -- rdi : target function (preserved for callee)
1465 : // -----------------------------------
1466 56 : Label failed;
1467 : {
1468 56 : FrameScope scope(masm, StackFrame::INTERNAL);
1469 : // Preserve argument count for later compare.
1470 56 : __ movq(rcx, rax);
1471 : // Push the number of arguments to the callee.
1472 56 : __ SmiTag(rax, rax);
1473 56 : __ Push(rax);
1474 : // Push a copy of the target function and the new target.
1475 56 : __ Push(rdi);
1476 56 : __ Push(rdx);
1477 :
1478 : // The function.
1479 56 : __ Push(rdi);
1480 : // Copy arguments from caller (stdlib, foreign, heap).
1481 56 : Label args_done;
1482 504 : for (int j = 0; j < 4; ++j) {
1483 224 : Label over;
1484 224 : if (j < 3) {
1485 : __ cmpq(rcx, Immediate(j));
1486 168 : __ j(not_equal, &over, Label::kNear);
1487 : }
1488 560 : for (int i = j - 1; i >= 0; --i) {
1489 672 : __ Push(Operand(rbp, StandardFrameConstants::kCallerSPOffset +
1490 336 : i * kSystemPointerSize));
1491 : }
1492 896 : for (int i = 0; i < 3 - j; ++i) {
1493 336 : __ PushRoot(RootIndex::kUndefinedValue);
1494 : }
1495 224 : if (j < 3) {
1496 168 : __ jmp(&args_done, Label::kNear);
1497 168 : __ bind(&over);
1498 : }
1499 : }
1500 56 : __ bind(&args_done);
1501 :
1502 : // Call runtime, on success unwind frame, and parent frame.
1503 : __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1504 : // A smi 0 is returned on failure, an object on success.
1505 56 : __ JumpIfSmi(rax, &failed, Label::kNear);
1506 :
1507 56 : __ Drop(2);
1508 56 : __ Pop(rcx);
1509 56 : __ SmiUntag(rcx, rcx);
1510 : scope.GenerateLeaveFrame();
1511 :
1512 : __ PopReturnAddressTo(rbx);
1513 : __ incq(rcx);
1514 112 : __ leaq(rsp, Operand(rsp, rcx, times_system_pointer_size, 0));
1515 : __ PushReturnAddressFrom(rbx);
1516 56 : __ ret(0);
1517 :
1518 56 : __ bind(&failed);
1519 : // Restore target function and new target.
1520 56 : __ Pop(rdx);
1521 56 : __ Pop(rdi);
1522 56 : __ Pop(rax);
1523 56 : __ SmiUntag(rax, rax);
1524 : }
1525 : // On failure, tail call back to regular js by re-calling the function
1526 : // which has be reset to the compile lazy builtin.
1527 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1528 56 : __ JumpCodeObject(rcx);
1529 56 : }
1530 :
1531 : namespace {
1532 224 : void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1533 : bool java_script_builtin,
1534 : bool with_result) {
1535 224 : const RegisterConfiguration* config(RegisterConfiguration::Default());
1536 : int allocatable_register_count = config->num_allocatable_general_registers();
1537 224 : if (with_result) {
1538 : // Overwrite the hole inserted by the deoptimizer with the return value from
1539 : // the LAZY deopt point.
1540 224 : __ movq(
1541 : Operand(rsp, config->num_allocatable_general_registers() *
1542 : kSystemPointerSize +
1543 : BuiltinContinuationFrameConstants::kFixedFrameSize),
1544 : rax);
1545 : }
1546 2912 : for (int i = allocatable_register_count - 1; i >= 0; --i) {
1547 : int code = config->GetAllocatableGeneralCode(i);
1548 2688 : __ popq(Register::from_code(code));
1549 2688 : if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1550 112 : __ SmiUntag(Register::from_code(code), Register::from_code(code));
1551 : }
1552 : }
1553 448 : __ movq(
1554 : rbp,
1555 : Operand(rsp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1556 : const int offsetToPC =
1557 : BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp -
1558 : kSystemPointerSize;
1559 224 : __ popq(Operand(rsp, offsetToPC));
1560 224 : __ Drop(offsetToPC / kSystemPointerSize);
1561 224 : __ addq(Operand(rsp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
1562 224 : __ Ret();
1563 224 : }
1564 : } // namespace
1565 :
1566 56 : void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1567 56 : Generate_ContinueToBuiltinHelper(masm, false, false);
1568 56 : }
1569 :
1570 56 : void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1571 : MacroAssembler* masm) {
1572 56 : Generate_ContinueToBuiltinHelper(masm, false, true);
1573 56 : }
1574 :
1575 56 : void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1576 56 : Generate_ContinueToBuiltinHelper(masm, true, false);
1577 56 : }
1578 :
1579 56 : void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1580 : MacroAssembler* masm) {
1581 56 : Generate_ContinueToBuiltinHelper(masm, true, true);
1582 56 : }
1583 :
1584 56 : void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1585 : // Enter an internal frame.
1586 : {
1587 56 : FrameScope scope(masm, StackFrame::INTERNAL);
1588 56 : __ CallRuntime(Runtime::kNotifyDeoptimized);
1589 : // Tear down internal frame.
1590 : }
1591 :
1592 : DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
1593 112 : __ movq(rax, Operand(rsp, kPCOnStackSize));
1594 56 : __ ret(1 * kSystemPointerSize); // Remove rax.
1595 56 : }
1596 :
1597 : // static
1598 56 : void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1599 : // ----------- S t a t e -------------
1600 : // -- rax : argc
1601 : // -- rsp[0] : return address
1602 : // -- rsp[8] : argArray
1603 : // -- rsp[16] : thisArg
1604 : // -- rsp[24] : receiver
1605 : // -----------------------------------
1606 :
1607 : // 1. Load receiver into rdi, argArray into rbx (if present), remove all
1608 : // arguments from the stack (including the receiver), and push thisArg (if
1609 : // present) instead.
1610 : {
1611 56 : Label no_arg_array, no_this_arg;
1612 : StackArgumentsAccessor args(rsp, rax);
1613 56 : __ LoadRoot(rdx, RootIndex::kUndefinedValue);
1614 56 : __ movq(rbx, rdx);
1615 : __ movq(rdi, args.GetReceiverOperand());
1616 : __ testq(rax, rax);
1617 56 : __ j(zero, &no_this_arg, Label::kNear);
1618 : {
1619 56 : __ movq(rdx, args.GetArgumentOperand(1));
1620 : __ cmpq(rax, Immediate(1));
1621 56 : __ j(equal, &no_arg_array, Label::kNear);
1622 56 : __ movq(rbx, args.GetArgumentOperand(2));
1623 56 : __ bind(&no_arg_array);
1624 : }
1625 56 : __ bind(&no_this_arg);
1626 : __ PopReturnAddressTo(rcx);
1627 112 : __ leaq(rsp,
1628 : Operand(rsp, rax, times_system_pointer_size, kSystemPointerSize));
1629 56 : __ Push(rdx);
1630 : __ PushReturnAddressFrom(rcx);
1631 : }
1632 :
1633 : // ----------- S t a t e -------------
1634 : // -- rbx : argArray
1635 : // -- rdi : receiver
1636 : // -- rsp[0] : return address
1637 : // -- rsp[8] : thisArg
1638 : // -----------------------------------
1639 :
1640 : // 2. We don't need to check explicitly for callable receiver here,
1641 : // since that's the first thing the Call/CallWithArrayLike builtins
1642 : // will do.
1643 :
1644 : // 3. Tail call with no arguments if argArray is null or undefined.
1645 56 : Label no_arguments;
1646 : __ JumpIfRoot(rbx, RootIndex::kNullValue, &no_arguments, Label::kNear);
1647 : __ JumpIfRoot(rbx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
1648 :
1649 : // 4a. Apply the receiver to the given argArray.
1650 56 : __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1651 56 : RelocInfo::CODE_TARGET);
1652 :
1653 : // 4b. The argArray is either null or undefined, so we tail call without any
1654 : // arguments to the receiver. Since we did not create a frame for
1655 : // Function.prototype.apply() yet, we use a normal Call builtin here.
1656 56 : __ bind(&no_arguments);
1657 : {
1658 56 : __ Set(rax, 0);
1659 56 : __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1660 : }
1661 56 : }
1662 :
1663 : // static
1664 56 : void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1665 : // Stack Layout:
1666 : // rsp[0] : Return address
1667 : // rsp[8] : Argument n
1668 : // rsp[16] : Argument n-1
1669 : // ...
1670 : // rsp[8 * n] : Argument 1
1671 : // rsp[8 * (n + 1)] : Receiver (callable to call)
1672 : //
1673 : // rax contains the number of arguments, n, not counting the receiver.
1674 : //
1675 : // 1. Make sure we have at least one argument.
1676 : {
1677 56 : Label done;
1678 56 : __ testq(rax, rax);
1679 56 : __ j(not_zero, &done, Label::kNear);
1680 : __ PopReturnAddressTo(rbx);
1681 56 : __ PushRoot(RootIndex::kUndefinedValue);
1682 : __ PushReturnAddressFrom(rbx);
1683 : __ incq(rax);
1684 56 : __ bind(&done);
1685 : }
1686 :
1687 : // 2. Get the callable to call (passed as receiver) from the stack.
1688 : {
1689 : StackArgumentsAccessor args(rsp, rax);
1690 : __ movq(rdi, args.GetReceiverOperand());
1691 : }
1692 :
1693 : // 3. Shift arguments and return address one slot down on the stack
1694 : // (overwriting the original receiver). Adjust argument count to make
1695 : // the original first argument the new receiver.
1696 : {
1697 56 : Label loop;
1698 : __ movq(rcx, rax);
1699 : StackArgumentsAccessor args(rsp, rcx);
1700 56 : __ bind(&loop);
1701 56 : __ movq(rbx, args.GetArgumentOperand(1));
1702 56 : __ movq(args.GetArgumentOperand(0), rbx);
1703 : __ decq(rcx);
1704 56 : __ j(not_zero, &loop); // While non-zero.
1705 56 : __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1706 : __ decq(rax); // One fewer argument (first argument is new receiver).
1707 : }
1708 :
1709 : // 4. Call the callable.
1710 : // Since we did not create a frame for Function.prototype.call() yet,
1711 : // we use a normal Call builtin here.
1712 56 : __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1713 56 : }
1714 :
1715 56 : void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1716 : // ----------- S t a t e -------------
1717 : // -- rax : argc
1718 : // -- rsp[0] : return address
1719 : // -- rsp[8] : argumentsList
1720 : // -- rsp[16] : thisArgument
1721 : // -- rsp[24] : target
1722 : // -- rsp[32] : receiver
1723 : // -----------------------------------
1724 :
1725 : // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1726 : // remove all arguments from the stack (including the receiver), and push
1727 : // thisArgument (if present) instead.
1728 : {
1729 56 : Label done;
1730 : StackArgumentsAccessor args(rsp, rax);
1731 56 : __ LoadRoot(rdi, RootIndex::kUndefinedValue);
1732 56 : __ movq(rdx, rdi);
1733 : __ movq(rbx, rdi);
1734 : __ cmpq(rax, Immediate(1));
1735 56 : __ j(below, &done, Label::kNear);
1736 56 : __ movq(rdi, args.GetArgumentOperand(1)); // target
1737 56 : __ j(equal, &done, Label::kNear);
1738 56 : __ movq(rdx, args.GetArgumentOperand(2)); // thisArgument
1739 : __ cmpq(rax, Immediate(3));
1740 56 : __ j(below, &done, Label::kNear);
1741 56 : __ movq(rbx, args.GetArgumentOperand(3)); // argumentsList
1742 56 : __ bind(&done);
1743 : __ PopReturnAddressTo(rcx);
1744 112 : __ leaq(rsp,
1745 : Operand(rsp, rax, times_system_pointer_size, kSystemPointerSize));
1746 56 : __ Push(rdx);
1747 : __ PushReturnAddressFrom(rcx);
1748 : }
1749 :
1750 : // ----------- S t a t e -------------
1751 : // -- rbx : argumentsList
1752 : // -- rdi : target
1753 : // -- rsp[0] : return address
1754 : // -- rsp[8] : thisArgument
1755 : // -----------------------------------
1756 :
1757 : // 2. We don't need to check explicitly for callable target here,
1758 : // since that's the first thing the Call/CallWithArrayLike builtins
1759 : // will do.
1760 :
1761 : // 3. Apply the target to the given argumentsList.
1762 56 : __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1763 56 : RelocInfo::CODE_TARGET);
1764 56 : }
1765 :
1766 56 : void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1767 : // ----------- S t a t e -------------
1768 : // -- rax : argc
1769 : // -- rsp[0] : return address
1770 : // -- rsp[8] : new.target (optional)
1771 : // -- rsp[16] : argumentsList
1772 : // -- rsp[24] : target
1773 : // -- rsp[32] : receiver
1774 : // -----------------------------------
1775 :
1776 : // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1777 : // new.target into rdx (if present, otherwise use target), remove all
1778 : // arguments from the stack (including the receiver), and push thisArgument
1779 : // (if present) instead.
1780 : {
1781 56 : Label done;
1782 : StackArgumentsAccessor args(rsp, rax);
1783 56 : __ LoadRoot(rdi, RootIndex::kUndefinedValue);
1784 56 : __ movq(rdx, rdi);
1785 : __ movq(rbx, rdi);
1786 : __ cmpq(rax, Immediate(1));
1787 56 : __ j(below, &done, Label::kNear);
1788 56 : __ movq(rdi, args.GetArgumentOperand(1)); // target
1789 : __ movq(rdx, rdi); // new.target defaults to target
1790 56 : __ j(equal, &done, Label::kNear);
1791 56 : __ movq(rbx, args.GetArgumentOperand(2)); // argumentsList
1792 : __ cmpq(rax, Immediate(3));
1793 56 : __ j(below, &done, Label::kNear);
1794 56 : __ movq(rdx, args.GetArgumentOperand(3)); // new.target
1795 56 : __ bind(&done);
1796 : __ PopReturnAddressTo(rcx);
1797 112 : __ leaq(rsp,
1798 : Operand(rsp, rax, times_system_pointer_size, kSystemPointerSize));
1799 56 : __ PushRoot(RootIndex::kUndefinedValue);
1800 : __ PushReturnAddressFrom(rcx);
1801 : }
1802 :
1803 : // ----------- S t a t e -------------
1804 : // -- rbx : argumentsList
1805 : // -- rdx : new.target
1806 : // -- rdi : target
1807 : // -- rsp[0] : return address
1808 : // -- rsp[8] : receiver (undefined)
1809 : // -----------------------------------
1810 :
1811 : // 2. We don't need to check explicitly for constructor target here,
1812 : // since that's the first thing the Construct/ConstructWithArrayLike
1813 : // builtins will do.
1814 :
1815 : // 3. We don't need to check explicitly for constructor new.target here,
1816 : // since that's the second thing the Construct/ConstructWithArrayLike
1817 : // builtins will do.
1818 :
1819 : // 4. Construct the target with the given new.target and argumentsList.
1820 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1821 56 : RelocInfo::CODE_TARGET);
1822 56 : }
1823 :
1824 56 : void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1825 : // ----------- S t a t e -------------
1826 : // -- rax : argc
1827 : // -- rsp[0] : return address
1828 : // -- rsp[8] : last argument
1829 : // -----------------------------------
1830 :
1831 56 : if (FLAG_debug_code) {
1832 : // Initial map for the builtin InternalArray functions should be maps.
1833 0 : __ LoadTaggedPointerField(
1834 0 : rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1835 : // Will both indicate a nullptr and a Smi.
1836 : STATIC_ASSERT(kSmiTag == 0);
1837 0 : Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1838 : __ Check(not_smi,
1839 0 : AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1840 0 : __ CmpObjectType(rbx, MAP_TYPE, rcx);
1841 0 : __ Check(equal, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1842 : }
1843 :
1844 : // Run the native code for the InternalArray function called as a normal
1845 : // function.
1846 56 : __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
1847 56 : RelocInfo::CODE_TARGET);
1848 56 : }
1849 :
1850 56 : static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1851 56 : __ pushq(rbp);
1852 : __ movq(rbp, rsp);
1853 :
1854 : // Store the arguments adaptor context sentinel.
1855 56 : __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1856 :
1857 : // Push the function on the stack.
1858 56 : __ Push(rdi);
1859 :
1860 : // Preserve the number of arguments on the stack. Must preserve rax,
1861 : // rbx and rcx because these registers are used when copying the
1862 : // arguments and the receiver.
1863 56 : __ SmiTag(r8, rax);
1864 56 : __ Push(r8);
1865 :
1866 56 : __ Push(Immediate(0)); // Padding.
1867 56 : }
1868 :
1869 56 : static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1870 : // Retrieve the number of arguments from the stack. Number is a Smi.
1871 112 : __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1872 :
1873 : // Leave the frame.
1874 : __ movq(rsp, rbp);
1875 56 : __ popq(rbp);
1876 :
1877 : // Remove caller arguments from the stack.
1878 : __ PopReturnAddressTo(rcx);
1879 56 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
1880 112 : __ leaq(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
1881 : __ PushReturnAddressFrom(rcx);
1882 56 : }
1883 :
1884 56 : void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1885 : // ----------- S t a t e -------------
1886 : // -- rax : actual number of arguments
1887 : // -- rbx : expected number of arguments
1888 : // -- rdx : new target (passed through to callee)
1889 : // -- rdi : function (passed through to callee)
1890 : // -----------------------------------
1891 :
1892 56 : Label dont_adapt_arguments, stack_overflow, skip_adapt_arguments;
1893 56 : __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1894 56 : __ j(equal, &dont_adapt_arguments);
1895 56 : __ LoadTaggedPointerField(
1896 56 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1897 : __ testl(
1898 : FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
1899 : Immediate(SharedFunctionInfo::IsSafeToSkipArgumentsAdaptorBit::kMask));
1900 56 : __ j(not_zero, &skip_adapt_arguments);
1901 :
1902 : // -------------------------------------------
1903 : // Adapt arguments.
1904 : // -------------------------------------------
1905 : {
1906 56 : EnterArgumentsAdaptorFrame(masm);
1907 56 : Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1908 :
1909 56 : Label under_application, over_application, invoke;
1910 : __ cmpq(rax, rbx);
1911 56 : __ j(less, &under_application, Label::kNear);
1912 :
1913 : // Enough parameters: Actual >= expected.
1914 56 : __ bind(&over_application);
1915 : {
1916 : // Copy receiver and all expected arguments.
1917 : const int offset = StandardFrameConstants::kCallerSPOffset;
1918 112 : __ leaq(r8, Operand(rbp, rax, times_system_pointer_size, offset));
1919 56 : __ Set(rax, -1); // account for receiver
1920 :
1921 56 : Label copy;
1922 56 : __ bind(©);
1923 : __ incq(rax);
1924 56 : __ Push(Operand(r8, 0));
1925 : __ subq(r8, Immediate(kSystemPointerSize));
1926 : __ cmpq(rax, rbx);
1927 56 : __ j(less, ©);
1928 56 : __ jmp(&invoke, Label::kNear);
1929 : }
1930 :
1931 : // Too few parameters: Actual < expected.
1932 56 : __ bind(&under_application);
1933 : {
1934 : // Copy receiver and all actual arguments.
1935 : const int offset = StandardFrameConstants::kCallerSPOffset;
1936 112 : __ leaq(r9, Operand(rbp, rax, times_system_pointer_size, offset));
1937 56 : __ Set(r8, -1); // account for receiver
1938 :
1939 56 : Label copy;
1940 56 : __ bind(©);
1941 : __ incq(r8);
1942 56 : __ Push(Operand(r9, 0));
1943 : __ subq(r9, Immediate(kSystemPointerSize));
1944 : __ cmpq(r8, rax);
1945 56 : __ j(less, ©);
1946 :
1947 : // Fill remaining expected arguments with undefined values.
1948 56 : Label fill;
1949 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
1950 56 : __ bind(&fill);
1951 : __ incq(rax);
1952 56 : __ Push(kScratchRegister);
1953 : __ cmpq(rax, rbx);
1954 56 : __ j(less, &fill);
1955 : }
1956 :
1957 : // Call the entry point.
1958 56 : __ bind(&invoke);
1959 : // rax : expected number of arguments
1960 : // rdx : new target (passed through to callee)
1961 : // rdi : function (passed through to callee)
1962 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
1963 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1964 56 : __ CallCodeObject(rcx);
1965 :
1966 : // Store offset of return address for deoptimizer.
1967 : masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(
1968 56 : masm->pc_offset());
1969 :
1970 : // Leave frame and return.
1971 56 : LeaveArgumentsAdaptorFrame(masm);
1972 56 : __ ret(0);
1973 : }
1974 :
1975 : // -------------------------------------------
1976 : // Skip adapt arguments.
1977 : // -------------------------------------------
1978 56 : __ bind(&skip_adapt_arguments);
1979 : {
1980 : // The callee cannot observe the actual arguments, so it's safe to just
1981 : // pass the expected arguments by massaging the stack appropriately. See
1982 : // http://bit.ly/v8-faster-calls-with-arguments-mismatch for details.
1983 56 : Label under_application, over_application, invoke;
1984 : __ PopReturnAddressTo(rcx);
1985 : __ cmpq(rax, rbx);
1986 56 : __ j(less, &under_application, Label::kNear);
1987 :
1988 56 : __ bind(&over_application);
1989 : {
1990 : // Remove superfluous parameters from the stack.
1991 : __ xchgq(rax, rbx);
1992 : __ subq(rbx, rax);
1993 112 : __ leaq(rsp, Operand(rsp, rbx, times_system_pointer_size, 0));
1994 56 : __ jmp(&invoke, Label::kNear);
1995 : }
1996 :
1997 56 : __ bind(&under_application);
1998 : {
1999 : // Fill remaining expected arguments with undefined values.
2000 56 : Label fill;
2001 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
2002 56 : __ bind(&fill);
2003 : __ incq(rax);
2004 56 : __ Push(kScratchRegister);
2005 : __ cmpq(rax, rbx);
2006 56 : __ j(less, &fill);
2007 : }
2008 :
2009 56 : __ bind(&invoke);
2010 : __ PushReturnAddressFrom(rcx);
2011 : }
2012 :
2013 : // -------------------------------------------
2014 : // Don't adapt arguments.
2015 : // -------------------------------------------
2016 56 : __ bind(&dont_adapt_arguments);
2017 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
2018 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
2019 56 : __ JumpCodeObject(rcx);
2020 :
2021 56 : __ bind(&stack_overflow);
2022 : {
2023 56 : FrameScope frame(masm, StackFrame::MANUAL);
2024 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
2025 56 : __ int3();
2026 : }
2027 56 : }
2028 :
2029 : // static
2030 112 : void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
2031 : Handle<Code> code) {
2032 : // ----------- S t a t e -------------
2033 : // -- rdi : target
2034 : // -- rax : number of parameters on the stack (not including the receiver)
2035 : // -- rbx : arguments list (a FixedArray)
2036 : // -- rcx : len (number of elements to push from args)
2037 : // -- rdx : new.target (for [[Construct]])
2038 : // -- rsp[0] : return address
2039 : // -----------------------------------
2040 : Register scratch = r11;
2041 112 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
2042 :
2043 112 : if (masm->emit_debug_code()) {
2044 : // Allow rbx to be a FixedArray, or a FixedDoubleArray if rcx == 0.
2045 0 : Label ok, fail;
2046 0 : __ AssertNotSmi(rbx);
2047 0 : Register map = r9;
2048 0 : __ LoadTaggedPointerField(map, FieldOperand(rbx, HeapObject::kMapOffset));
2049 0 : __ CmpInstanceType(map, FIXED_ARRAY_TYPE);
2050 0 : __ j(equal, &ok);
2051 0 : __ CmpInstanceType(map, FIXED_DOUBLE_ARRAY_TYPE);
2052 0 : __ j(not_equal, &fail);
2053 : __ cmpl(rcx, Immediate(0));
2054 0 : __ j(equal, &ok);
2055 : // Fall through.
2056 0 : __ bind(&fail);
2057 0 : __ Abort(AbortReason::kOperandIsNotAFixedArray);
2058 :
2059 0 : __ bind(&ok);
2060 : }
2061 :
2062 112 : Label stack_overflow;
2063 112 : Generate_StackOverflowCheck(masm, rcx, r8, &stack_overflow, Label::kNear);
2064 :
2065 : // Push additional arguments onto the stack.
2066 : {
2067 112 : Register value = scratch;
2068 : __ PopReturnAddressTo(r8);
2069 112 : __ Set(r9, 0);
2070 112 : Label done, push, loop;
2071 112 : __ bind(&loop);
2072 : __ cmpl(r9, rcx);
2073 112 : __ j(equal, &done, Label::kNear);
2074 : // Turn the hole into undefined as we go.
2075 : __ LoadAnyTaggedField(
2076 : value,
2077 : FieldOperand(rbx, r9, times_tagged_size, FixedArray::kHeaderSize),
2078 112 : decompr_scratch);
2079 112 : __ CompareRoot(value, RootIndex::kTheHoleValue);
2080 112 : __ j(not_equal, &push, Label::kNear);
2081 112 : __ LoadRoot(value, RootIndex::kUndefinedValue);
2082 112 : __ bind(&push);
2083 112 : __ Push(value);
2084 : __ incl(r9);
2085 112 : __ jmp(&loop);
2086 112 : __ bind(&done);
2087 : __ PushReturnAddressFrom(r8);
2088 : __ addq(rax, r9);
2089 : }
2090 :
2091 : // Tail-call to the actual Call or Construct builtin.
2092 112 : __ Jump(code, RelocInfo::CODE_TARGET);
2093 :
2094 112 : __ bind(&stack_overflow);
2095 112 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
2096 112 : }
2097 :
2098 : // static
2099 224 : void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2100 : CallOrConstructMode mode,
2101 : Handle<Code> code) {
2102 : // ----------- S t a t e -------------
2103 : // -- rax : the number of arguments (not including the receiver)
2104 : // -- rdx : the new target (for [[Construct]] calls)
2105 : // -- rdi : the target to call (can be any Object)
2106 : // -- rcx : start index (to support rest parameters)
2107 : // -----------------------------------
2108 :
2109 : // Check if new.target has a [[Construct]] internal method.
2110 224 : if (mode == CallOrConstructMode::kConstruct) {
2111 112 : Label new_target_constructor, new_target_not_constructor;
2112 112 : __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
2113 112 : __ LoadTaggedPointerField(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
2114 224 : __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2115 112 : Immediate(Map::IsConstructorBit::kMask));
2116 112 : __ j(not_zero, &new_target_constructor, Label::kNear);
2117 112 : __ bind(&new_target_not_constructor);
2118 : {
2119 112 : FrameScope scope(masm, StackFrame::MANUAL);
2120 112 : __ EnterFrame(StackFrame::INTERNAL);
2121 112 : __ Push(rdx);
2122 112 : __ CallRuntime(Runtime::kThrowNotConstructor);
2123 : }
2124 112 : __ bind(&new_target_constructor);
2125 : }
2126 :
2127 : // Check if we have an arguments adaptor frame below the function frame.
2128 224 : Label arguments_adaptor, arguments_done;
2129 448 : __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2130 448 : __ cmpq(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
2131 224 : Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2132 224 : __ j(equal, &arguments_adaptor, Label::kNear);
2133 : {
2134 448 : __ movq(r8, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2135 224 : __ LoadTaggedPointerField(
2136 224 : r8, FieldOperand(r8, JSFunction::kSharedFunctionInfoOffset));
2137 : __ movzxwq(
2138 : r8, FieldOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
2139 : __ movq(rbx, rbp);
2140 : }
2141 224 : __ jmp(&arguments_done, Label::kNear);
2142 224 : __ bind(&arguments_adaptor);
2143 : {
2144 448 : __ SmiUntag(r8,
2145 224 : Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2146 : }
2147 224 : __ bind(&arguments_done);
2148 :
2149 224 : Label stack_done, stack_overflow;
2150 : __ subl(r8, rcx);
2151 224 : __ j(less_equal, &stack_done);
2152 : {
2153 : // Check for stack overflow.
2154 224 : Generate_StackOverflowCheck(masm, r8, rcx, &stack_overflow, Label::kNear);
2155 :
2156 : // Forward the arguments from the caller frame.
2157 : {
2158 224 : Label loop;
2159 : __ addl(rax, r8);
2160 : __ PopReturnAddressTo(rcx);
2161 224 : __ bind(&loop);
2162 : {
2163 : StackArgumentsAccessor args(rbx, r8, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2164 224 : __ Push(args.GetArgumentOperand(0));
2165 : __ decl(r8);
2166 224 : __ j(not_zero, &loop);
2167 : }
2168 : __ PushReturnAddressFrom(rcx);
2169 : }
2170 : }
2171 224 : __ jmp(&stack_done, Label::kNear);
2172 224 : __ bind(&stack_overflow);
2173 224 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
2174 224 : __ bind(&stack_done);
2175 :
2176 : // Tail-call to the {code} handler.
2177 224 : __ Jump(code, RelocInfo::CODE_TARGET);
2178 224 : }
2179 :
2180 : // static
2181 168 : void Builtins::Generate_CallFunction(MacroAssembler* masm,
2182 : ConvertReceiverMode mode) {
2183 : // ----------- S t a t e -------------
2184 : // -- rax : the number of arguments (not including the receiver)
2185 : // -- rdi : the function to call (checked to be a JSFunction)
2186 : // -----------------------------------
2187 :
2188 : StackArgumentsAccessor args(rsp, rax);
2189 168 : __ AssertFunction(rdi);
2190 :
2191 : // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2192 : // Check that the function is not a "classConstructor".
2193 168 : Label class_constructor;
2194 168 : __ LoadTaggedPointerField(
2195 168 : rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2196 168 : __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
2197 : Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
2198 168 : __ j(not_zero, &class_constructor);
2199 :
2200 : // ----------- S t a t e -------------
2201 : // -- rax : the number of arguments (not including the receiver)
2202 : // -- rdx : the shared function info.
2203 : // -- rdi : the function to call (checked to be a JSFunction)
2204 : // -----------------------------------
2205 :
2206 : // Enter the context of the function; ToObject has to run in the function
2207 : // context, and we also need to take the global proxy from the function
2208 : // context in case of conversion.
2209 168 : __ LoadTaggedPointerField(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2210 : // We need to convert the receiver for non-native sloppy mode functions.
2211 168 : Label done_convert;
2212 : __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
2213 : Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2214 : SharedFunctionInfo::IsStrictBit::kMask));
2215 168 : __ j(not_zero, &done_convert);
2216 : {
2217 : // ----------- S t a t e -------------
2218 : // -- rax : the number of arguments (not including the receiver)
2219 : // -- rdx : the shared function info.
2220 : // -- rdi : the function to call (checked to be a JSFunction)
2221 : // -- rsi : the function context.
2222 : // -----------------------------------
2223 :
2224 168 : if (mode == ConvertReceiverMode::kNullOrUndefined) {
2225 : // Patch receiver to global proxy.
2226 : __ LoadGlobalProxy(rcx);
2227 : } else {
2228 112 : Label convert_to_object, convert_receiver;
2229 : __ movq(rcx, args.GetReceiverOperand());
2230 112 : __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
2231 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2232 112 : __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
2233 112 : __ j(above_equal, &done_convert);
2234 112 : if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2235 56 : Label convert_global_proxy;
2236 : __ JumpIfRoot(rcx, RootIndex::kUndefinedValue, &convert_global_proxy,
2237 : Label::kNear);
2238 : __ JumpIfNotRoot(rcx, RootIndex::kNullValue, &convert_to_object,
2239 : Label::kNear);
2240 56 : __ bind(&convert_global_proxy);
2241 : {
2242 : // Patch receiver to global proxy.
2243 : __ LoadGlobalProxy(rcx);
2244 : }
2245 56 : __ jmp(&convert_receiver);
2246 : }
2247 112 : __ bind(&convert_to_object);
2248 : {
2249 : // Convert receiver using ToObject.
2250 : // TODO(bmeurer): Inline the allocation here to avoid building the frame
2251 : // in the fast case? (fall back to AllocateInNewSpace?)
2252 112 : FrameScope scope(masm, StackFrame::INTERNAL);
2253 112 : __ SmiTag(rax, rax);
2254 112 : __ Push(rax);
2255 112 : __ Push(rdi);
2256 : __ movq(rax, rcx);
2257 112 : __ Push(rsi);
2258 112 : __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2259 112 : RelocInfo::CODE_TARGET);
2260 112 : __ Pop(rsi);
2261 : __ movq(rcx, rax);
2262 112 : __ Pop(rdi);
2263 112 : __ Pop(rax);
2264 112 : __ SmiUntag(rax, rax);
2265 : }
2266 : __ LoadTaggedPointerField(
2267 112 : rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2268 112 : __ bind(&convert_receiver);
2269 : }
2270 : __ movq(args.GetReceiverOperand(), rcx);
2271 : }
2272 168 : __ bind(&done_convert);
2273 :
2274 : // ----------- S t a t e -------------
2275 : // -- rax : the number of arguments (not including the receiver)
2276 : // -- rdx : the shared function info.
2277 : // -- rdi : the function to call (checked to be a JSFunction)
2278 : // -- rsi : the function context.
2279 : // -----------------------------------
2280 :
2281 : __ movzxwq(
2282 : rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2283 : ParameterCount actual(rax);
2284 : ParameterCount expected(rbx);
2285 :
2286 168 : __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION);
2287 :
2288 : // The function is a "classConstructor", need to raise an exception.
2289 168 : __ bind(&class_constructor);
2290 : {
2291 168 : FrameScope frame(masm, StackFrame::INTERNAL);
2292 168 : __ Push(rdi);
2293 168 : __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2294 : }
2295 168 : }
2296 :
2297 : namespace {
2298 :
2299 112 : void Generate_PushBoundArguments(MacroAssembler* masm) {
2300 : // ----------- S t a t e -------------
2301 : // -- rax : the number of arguments (not including the receiver)
2302 : // -- rdx : new.target (only in case of [[Construct]])
2303 : // -- rdi : target (checked to be a JSBoundFunction)
2304 : // -----------------------------------
2305 :
2306 112 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
2307 :
2308 : // Load [[BoundArguments]] into rcx and length of that into rbx.
2309 112 : Label no_bound_arguments;
2310 112 : __ LoadTaggedPointerField(
2311 112 : rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2312 112 : __ SmiUntagField(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2313 112 : __ testl(rbx, rbx);
2314 112 : __ j(zero, &no_bound_arguments);
2315 : {
2316 : // ----------- S t a t e -------------
2317 : // -- rax : the number of arguments (not including the receiver)
2318 : // -- rdx : new.target (only in case of [[Construct]])
2319 : // -- rdi : target (checked to be a JSBoundFunction)
2320 : // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2321 : // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2322 : // -----------------------------------
2323 :
2324 : // Reserve stack space for the [[BoundArguments]].
2325 : {
2326 112 : Label done;
2327 224 : __ leaq(kScratchRegister, Operand(rbx, times_system_pointer_size, 0));
2328 : __ subq(rsp, kScratchRegister);
2329 : // Check the stack for overflow. We are not trying to catch interruptions
2330 : // (i.e. debug break and preemption) here, so check the "real stack
2331 : // limit".
2332 112 : __ CompareRoot(rsp, RootIndex::kRealStackLimit);
2333 112 : __ j(above_equal, &done, Label::kNear);
2334 : // Restore the stack pointer.
2335 224 : __ leaq(rsp, Operand(rsp, rbx, times_system_pointer_size, 0));
2336 : {
2337 112 : FrameScope scope(masm, StackFrame::MANUAL);
2338 112 : __ EnterFrame(StackFrame::INTERNAL);
2339 112 : __ CallRuntime(Runtime::kThrowStackOverflow);
2340 : }
2341 112 : __ bind(&done);
2342 : }
2343 :
2344 : // Adjust effective number of arguments to include return address.
2345 : __ incl(rax);
2346 :
2347 : // Relocate arguments and return address down the stack.
2348 : {
2349 112 : Label loop;
2350 112 : __ Set(rcx, 0);
2351 224 : __ leaq(rbx, Operand(rsp, rbx, times_system_pointer_size, 0));
2352 112 : __ bind(&loop);
2353 224 : __ movq(kScratchRegister,
2354 : Operand(rbx, rcx, times_system_pointer_size, 0));
2355 224 : __ movq(Operand(rsp, rcx, times_system_pointer_size, 0),
2356 : kScratchRegister);
2357 : __ incl(rcx);
2358 : __ cmpl(rcx, rax);
2359 112 : __ j(less, &loop);
2360 : }
2361 :
2362 : // Copy [[BoundArguments]] to the stack (below the arguments).
2363 : {
2364 112 : Label loop;
2365 : __ LoadTaggedPointerField(
2366 112 : rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2367 112 : __ SmiUntagField(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2368 112 : __ bind(&loop);
2369 : // Instead of doing decl(rbx) here subtract kTaggedSize from the header
2370 : // offset in order to move be able to move decl(rbx) right before the loop
2371 : // condition. This is necessary in order to avoid flags corruption by
2372 : // pointer decompression code.
2373 : __ LoadAnyTaggedField(r12,
2374 : FieldOperand(rcx, rbx, times_tagged_size,
2375 : FixedArray::kHeaderSize - kTaggedSize),
2376 112 : decompr_scratch);
2377 224 : __ movq(Operand(rsp, rax, times_system_pointer_size, 0), r12);
2378 224 : __ leal(rax, Operand(rax, 1));
2379 : __ decl(rbx);
2380 112 : __ j(greater, &loop);
2381 : }
2382 :
2383 : // Adjust effective number of arguments (rax contains the number of
2384 : // arguments from the call plus return address plus the number of
2385 : // [[BoundArguments]]), so we need to subtract one for the return address.
2386 : __ decl(rax);
2387 : }
2388 112 : __ bind(&no_bound_arguments);
2389 112 : }
2390 :
2391 : } // namespace
2392 :
2393 : // static
2394 56 : void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2395 : // ----------- S t a t e -------------
2396 : // -- rax : the number of arguments (not including the receiver)
2397 : // -- rdi : the function to call (checked to be a JSBoundFunction)
2398 : // -----------------------------------
2399 56 : __ AssertBoundFunction(rdi);
2400 :
2401 56 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
2402 :
2403 : // Patch the receiver to [[BoundThis]].
2404 : StackArgumentsAccessor args(rsp, rax);
2405 56 : __ LoadAnyTaggedField(rbx,
2406 : FieldOperand(rdi, JSBoundFunction::kBoundThisOffset),
2407 56 : decompr_scratch);
2408 56 : __ movq(args.GetReceiverOperand(), rbx);
2409 :
2410 : // Push the [[BoundArguments]] onto the stack.
2411 56 : Generate_PushBoundArguments(masm);
2412 :
2413 : // Call the [[BoundTargetFunction]] via the Call builtin.
2414 : __ LoadTaggedPointerField(
2415 56 : rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2416 56 : __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2417 56 : RelocInfo::CODE_TARGET);
2418 56 : }
2419 :
2420 : // static
2421 168 : void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2422 : // ----------- S t a t e -------------
2423 : // -- rax : the number of arguments (not including the receiver)
2424 : // -- rdi : the target to call (can be any Object)
2425 : // -----------------------------------
2426 : StackArgumentsAccessor args(rsp, rax);
2427 :
2428 168 : Label non_callable;
2429 168 : __ JumpIfSmi(rdi, &non_callable);
2430 168 : __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2431 168 : __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2432 168 : RelocInfo::CODE_TARGET, equal);
2433 :
2434 168 : __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2435 168 : __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2436 168 : RelocInfo::CODE_TARGET, equal);
2437 :
2438 : // Check if target has a [[Call]] internal method.
2439 336 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2440 168 : Immediate(Map::IsCallableBit::kMask));
2441 168 : __ j(zero, &non_callable, Label::kNear);
2442 :
2443 : // Check if target is a proxy and call CallProxy external builtin
2444 168 : __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2445 168 : __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET,
2446 168 : equal);
2447 :
2448 : // 2. Call to something else, which might have a [[Call]] internal method (if
2449 : // not we raise an exception).
2450 :
2451 : // Overwrite the original receiver with the (original) target.
2452 : __ movq(args.GetReceiverOperand(), rdi);
2453 : // Let the "call_as_function_delegate" take care of the rest.
2454 168 : __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2455 168 : __ Jump(masm->isolate()->builtins()->CallFunction(
2456 : ConvertReceiverMode::kNotNullOrUndefined),
2457 168 : RelocInfo::CODE_TARGET);
2458 :
2459 : // 3. Call to something that is not callable.
2460 168 : __ bind(&non_callable);
2461 : {
2462 168 : FrameScope scope(masm, StackFrame::INTERNAL);
2463 168 : __ Push(rdi);
2464 168 : __ CallRuntime(Runtime::kThrowCalledNonCallable);
2465 : }
2466 168 : }
2467 :
2468 : // static
2469 56 : void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2470 : // ----------- S t a t e -------------
2471 : // -- rax : the number of arguments (not including the receiver)
2472 : // -- rdx : the new target (checked to be a constructor)
2473 : // -- rdi : the constructor to call (checked to be a JSFunction)
2474 : // -----------------------------------
2475 56 : __ AssertConstructor(rdi);
2476 56 : __ AssertFunction(rdi);
2477 :
2478 : // Calling convention for function specific ConstructStubs require
2479 : // rbx to contain either an AllocationSite or undefined.
2480 56 : __ LoadRoot(rbx, RootIndex::kUndefinedValue);
2481 :
2482 : // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2483 : __ LoadTaggedPointerField(
2484 56 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2485 56 : __ testl(FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
2486 : Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2487 56 : __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2488 56 : RelocInfo::CODE_TARGET, not_zero);
2489 :
2490 56 : __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2491 56 : RelocInfo::CODE_TARGET);
2492 56 : }
2493 :
2494 : // static
2495 56 : void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2496 : // ----------- S t a t e -------------
2497 : // -- rax : the number of arguments (not including the receiver)
2498 : // -- rdx : the new target (checked to be a constructor)
2499 : // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2500 : // -----------------------------------
2501 56 : __ AssertConstructor(rdi);
2502 56 : __ AssertBoundFunction(rdi);
2503 :
2504 : // Push the [[BoundArguments]] onto the stack.
2505 56 : Generate_PushBoundArguments(masm);
2506 :
2507 : // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2508 : {
2509 56 : Label done;
2510 56 : __ cmpq(rdi, rdx);
2511 56 : __ j(not_equal, &done, Label::kNear);
2512 56 : __ LoadTaggedPointerField(
2513 56 : rdx, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2514 56 : __ bind(&done);
2515 : }
2516 :
2517 : // Construct the [[BoundTargetFunction]] via the Construct builtin.
2518 : __ LoadTaggedPointerField(
2519 56 : rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2520 56 : __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2521 56 : }
2522 :
2523 : // static
2524 56 : void Builtins::Generate_Construct(MacroAssembler* masm) {
2525 : // ----------- S t a t e -------------
2526 : // -- rax : the number of arguments (not including the receiver)
2527 : // -- rdx : the new target (either the same as the constructor or
2528 : // the JSFunction on which new was invoked initially)
2529 : // -- rdi : the constructor to call (can be any Object)
2530 : // -----------------------------------
2531 : StackArgumentsAccessor args(rsp, rax);
2532 :
2533 : // Check if target is a Smi.
2534 56 : Label non_constructor;
2535 56 : __ JumpIfSmi(rdi, &non_constructor);
2536 :
2537 : // Check if target has a [[Construct]] internal method.
2538 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
2539 112 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2540 56 : Immediate(Map::IsConstructorBit::kMask));
2541 56 : __ j(zero, &non_constructor);
2542 :
2543 : // Dispatch based on instance type.
2544 56 : __ CmpInstanceType(rcx, JS_FUNCTION_TYPE);
2545 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2546 56 : RelocInfo::CODE_TARGET, equal);
2547 :
2548 : // Only dispatch to bound functions after checking whether they are
2549 : // constructors.
2550 56 : __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2551 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2552 56 : RelocInfo::CODE_TARGET, equal);
2553 :
2554 : // Only dispatch to proxies after checking whether they are constructors.
2555 56 : __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2556 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy), RelocInfo::CODE_TARGET,
2557 56 : equal);
2558 :
2559 : // Called Construct on an exotic Object with a [[Construct]] internal method.
2560 : {
2561 : // Overwrite the original receiver with the (original) target.
2562 : __ movq(args.GetReceiverOperand(), rdi);
2563 : // Let the "call_as_constructor_delegate" take care of the rest.
2564 56 : __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2565 56 : __ Jump(masm->isolate()->builtins()->CallFunction(),
2566 56 : RelocInfo::CODE_TARGET);
2567 : }
2568 :
2569 : // Called Construct on an Object that doesn't have a [[Construct]] internal
2570 : // method.
2571 56 : __ bind(&non_constructor);
2572 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2573 56 : RelocInfo::CODE_TARGET);
2574 56 : }
2575 :
2576 56 : void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2577 : // Lookup the function in the JavaScript frame.
2578 112 : __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2579 112 : __ movq(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
2580 :
2581 : {
2582 56 : FrameScope scope(masm, StackFrame::INTERNAL);
2583 : // Pass function as argument.
2584 56 : __ Push(rax);
2585 56 : __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2586 : }
2587 :
2588 56 : Label skip;
2589 : // If the code object is null, just return to the caller.
2590 : __ testq(rax, rax);
2591 56 : __ j(not_equal, &skip, Label::kNear);
2592 56 : __ ret(0);
2593 :
2594 56 : __ bind(&skip);
2595 :
2596 : // Drop the handler frame that is be sitting on top of the actual
2597 : // JavaScript frame. This is the case then OSR is triggered from bytecode.
2598 56 : __ leave();
2599 :
2600 : // Load deoptimization data from the code object.
2601 : __ LoadTaggedPointerField(rbx,
2602 56 : FieldOperand(rax, Code::kDeoptimizationDataOffset));
2603 :
2604 : // Load the OSR entrypoint offset from the deoptimization data.
2605 : __ SmiUntagField(
2606 : rbx, FieldOperand(rbx, FixedArray::OffsetOfElementAt(
2607 56 : DeoptimizationData::kOsrPcOffsetIndex)));
2608 :
2609 : // Compute the target address = code_obj + header_size + osr_offset
2610 : __ leaq(rax, FieldOperand(rax, rbx, times_1, Code::kHeaderSize));
2611 :
2612 : // Overwrite the return address on the stack.
2613 : __ movq(StackOperandForReturnAddress(0), rax);
2614 :
2615 : // And "return" to the OSR entry point of the function.
2616 56 : __ ret(0);
2617 56 : }
2618 :
2619 56 : void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2620 : // The function index was pushed to the stack by the caller as int32.
2621 56 : __ Pop(r11);
2622 : // Convert to Smi for the runtime call.
2623 56 : __ SmiTag(r11, r11);
2624 : {
2625 : HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2626 56 : FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2627 :
2628 : // Save all parameter registers (see wasm-linkage.cc). They might be
2629 : // overwritten in the runtime call below. We don't have any callee-saved
2630 : // registers in wasm, so no need to store anything else.
2631 : static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
2632 : arraysize(wasm::kGpParamRegisters),
2633 : "frame size mismatch");
2634 728 : for (Register reg : wasm::kGpParamRegisters) {
2635 336 : __ Push(reg);
2636 : }
2637 : static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
2638 : arraysize(wasm::kFpParamRegisters),
2639 : "frame size mismatch");
2640 56 : __ subq(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2641 : int offset = 0;
2642 728 : for (DoubleRegister reg : wasm::kFpParamRegisters) {
2643 336 : __ movdqu(Operand(rsp, offset), reg);
2644 336 : offset += kSimd128Size;
2645 : }
2646 :
2647 : // Push the WASM instance as an explicit argument to WasmCompileLazy.
2648 56 : __ Push(kWasmInstanceRegister);
2649 : // Push the function index as second argument.
2650 56 : __ Push(r11);
2651 : // Load the correct CEntry builtin from the instance object.
2652 : __ LoadTaggedPointerField(
2653 : rcx, FieldOperand(kWasmInstanceRegister,
2654 56 : WasmInstanceObject::kCEntryStubOffset));
2655 : // Initialize the JavaScript context with 0. CEntry will use it to
2656 : // set the current context on the isolate.
2657 56 : __ Move(kContextRegister, Smi::zero());
2658 56 : __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, rcx);
2659 : // The entrypoint address is the return value.
2660 : __ movq(r11, kReturnRegister0);
2661 :
2662 : // Restore registers.
2663 392 : for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2664 336 : offset -= kSimd128Size;
2665 336 : __ movdqu(reg, Operand(rsp, offset));
2666 : }
2667 : DCHECK_EQ(0, offset);
2668 : __ addq(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2669 392 : for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2670 336 : __ Pop(reg);
2671 : }
2672 : }
2673 : // Finally, jump to the entrypoint.
2674 56 : __ jmp(r11);
2675 56 : }
2676 :
2677 560 : void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2678 : SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2679 : bool builtin_exit_frame) {
2680 : // rax: number of arguments including receiver
2681 : // rbx: pointer to C function (C callee-saved)
2682 : // rbp: frame pointer of calling JS frame (restored after C call)
2683 : // rsp: stack pointer (restored after C call)
2684 : // rsi: current context (restored)
2685 : //
2686 : // If argv_mode == kArgvInRegister:
2687 : // r15: pointer to the first argument
2688 :
2689 : #ifdef _WIN64
2690 : // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
2691 : // stack to be aligned to 16 bytes. It only allows a single-word to be
2692 : // returned in register rax. Larger return sizes must be written to an address
2693 : // passed as a hidden first argument.
2694 : const Register kCCallArg0 = rcx;
2695 : const Register kCCallArg1 = rdx;
2696 : const Register kCCallArg2 = r8;
2697 : const Register kCCallArg3 = r9;
2698 : const int kArgExtraStackSpace = 2;
2699 : const int kMaxRegisterResultSize = 1;
2700 : #else
2701 : // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
2702 : // are returned in rax, and a struct of two pointers are returned in rax+rdx.
2703 : // Larger return sizes must be written to an address passed as a hidden first
2704 : // argument.
2705 : const Register kCCallArg0 = rdi;
2706 : const Register kCCallArg1 = rsi;
2707 : const Register kCCallArg2 = rdx;
2708 560 : const Register kCCallArg3 = rcx;
2709 : const int kArgExtraStackSpace = 0;
2710 : const int kMaxRegisterResultSize = 2;
2711 : #endif // _WIN64
2712 :
2713 : // Enter the exit frame that transitions from JavaScript to C++.
2714 : int arg_stack_space =
2715 : kArgExtraStackSpace +
2716 560 : (result_size <= kMaxRegisterResultSize ? 0 : result_size);
2717 560 : if (argv_mode == kArgvInRegister) {
2718 : DCHECK(save_doubles == kDontSaveFPRegs);
2719 : DCHECK(!builtin_exit_frame);
2720 112 : __ EnterApiExitFrame(arg_stack_space);
2721 : // Move argc into r14 (argv is already in r15).
2722 112 : __ movq(r14, rax);
2723 : } else {
2724 448 : __ EnterExitFrame(
2725 : arg_stack_space, save_doubles == kSaveFPRegs,
2726 448 : builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2727 : }
2728 :
2729 : // rbx: pointer to builtin function (C callee-saved).
2730 : // rbp: frame pointer of exit frame (restored after C call).
2731 : // rsp: stack pointer (restored after C call).
2732 : // r14: number of arguments including receiver (C callee-saved).
2733 : // r15: argv pointer (C callee-saved).
2734 :
2735 : // Check stack alignment.
2736 560 : if (FLAG_debug_code) {
2737 0 : __ CheckStackAlignment();
2738 : }
2739 :
2740 : // Call C function. The arguments object will be created by stubs declared by
2741 : // DECLARE_RUNTIME_FUNCTION().
2742 560 : if (result_size <= kMaxRegisterResultSize) {
2743 : // Pass a pointer to the Arguments object as the first argument.
2744 : // Return result in single register (rax), or a register pair (rax, rdx).
2745 560 : __ movq(kCCallArg0, r14); // argc.
2746 : __ movq(kCCallArg1, r15); // argv.
2747 560 : __ Move(kCCallArg2, ExternalReference::isolate_address(masm->isolate()));
2748 : } else {
2749 : DCHECK_LE(result_size, 2);
2750 : // Pass a pointer to the result location as the first argument.
2751 0 : __ leaq(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
2752 : // Pass a pointer to the Arguments object as the second argument.
2753 : __ movq(kCCallArg1, r14); // argc.
2754 : __ movq(kCCallArg2, r15); // argv.
2755 0 : __ Move(kCCallArg3, ExternalReference::isolate_address(masm->isolate()));
2756 : }
2757 560 : __ call(rbx);
2758 :
2759 560 : if (result_size > kMaxRegisterResultSize) {
2760 : // Read result values stored on stack. Result is stored
2761 : // above the the two Arguments object slots on Win64.
2762 : DCHECK_LE(result_size, 2);
2763 : __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
2764 : __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
2765 : }
2766 : // Result is in rax or rdx:rax - do not destroy these registers!
2767 :
2768 : // Check result for exception sentinel.
2769 560 : Label exception_returned;
2770 560 : __ CompareRoot(rax, RootIndex::kException);
2771 560 : __ j(equal, &exception_returned);
2772 :
2773 : // Check that there is no pending exception, otherwise we
2774 : // should have returned the exception sentinel.
2775 560 : if (FLAG_debug_code) {
2776 0 : Label okay;
2777 0 : __ LoadRoot(r14, RootIndex::kTheHoleValue);
2778 : ExternalReference pending_exception_address = ExternalReference::Create(
2779 0 : IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2780 : Operand pending_exception_operand =
2781 0 : masm->ExternalReferenceAsOperand(pending_exception_address);
2782 0 : __ cmpq(r14, pending_exception_operand);
2783 0 : __ j(equal, &okay, Label::kNear);
2784 0 : __ int3();
2785 0 : __ bind(&okay);
2786 : }
2787 :
2788 : // Exit the JavaScript to C++ exit frame.
2789 560 : __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2790 560 : __ ret(0);
2791 :
2792 : // Handling of exception.
2793 560 : __ bind(&exception_returned);
2794 :
2795 : ExternalReference pending_handler_context_address = ExternalReference::Create(
2796 560 : IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2797 : ExternalReference pending_handler_entrypoint_address =
2798 : ExternalReference::Create(
2799 560 : IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2800 : ExternalReference pending_handler_fp_address = ExternalReference::Create(
2801 560 : IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2802 : ExternalReference pending_handler_sp_address = ExternalReference::Create(
2803 560 : IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2804 :
2805 : // Ask the runtime for help to determine the handler. This will set rax to
2806 : // contain the current pending exception, don't clobber it.
2807 : ExternalReference find_handler =
2808 560 : ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2809 : {
2810 560 : FrameScope scope(masm, StackFrame::MANUAL);
2811 : __ movq(arg_reg_1, Immediate(0)); // argc.
2812 : __ movq(arg_reg_2, Immediate(0)); // argv.
2813 560 : __ Move(arg_reg_3, ExternalReference::isolate_address(masm->isolate()));
2814 560 : __ PrepareCallCFunction(3);
2815 560 : __ CallCFunction(find_handler, 3);
2816 : }
2817 : // Retrieve the handler context, SP and FP.
2818 560 : __ movq(rsi,
2819 : masm->ExternalReferenceAsOperand(pending_handler_context_address));
2820 560 : __ movq(rsp, masm->ExternalReferenceAsOperand(pending_handler_sp_address));
2821 560 : __ movq(rbp, masm->ExternalReferenceAsOperand(pending_handler_fp_address));
2822 :
2823 : // If the handler is a JS frame, restore the context to the frame. Note that
2824 : // the context will be set to (rsi == 0) for non-JS frames.
2825 560 : Label skip;
2826 : __ testq(rsi, rsi);
2827 560 : __ j(zero, &skip, Label::kNear);
2828 1120 : __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
2829 560 : __ bind(&skip);
2830 :
2831 : // Reset the masking register. This is done independent of the underlying
2832 : // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
2833 : // with both configurations. It is safe to always do this, because the
2834 : // underlying register is caller-saved and can be arbitrarily clobbered.
2835 560 : __ ResetSpeculationPoisonRegister();
2836 :
2837 : // Compute the handler entry address and jump to it.
2838 560 : __ movq(rdi,
2839 : masm->ExternalReferenceAsOperand(pending_handler_entrypoint_address));
2840 560 : __ jmp(rdi);
2841 560 : }
2842 :
2843 56 : void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2844 56 : Label check_negative, process_64_bits, done;
2845 :
2846 : // Account for return address and saved regs.
2847 : const int kArgumentOffset = 4 * kSystemPointerSize;
2848 :
2849 56 : MemOperand mantissa_operand(MemOperand(rsp, kArgumentOffset));
2850 : MemOperand exponent_operand(
2851 56 : MemOperand(rsp, kArgumentOffset + kDoubleSize / 2));
2852 :
2853 : // The result is returned on the stack.
2854 56 : MemOperand return_operand = mantissa_operand;
2855 :
2856 : Register scratch1 = rbx;
2857 :
2858 : // Since we must use rcx for shifts below, use some other register (rax)
2859 : // to calculate the result if ecx is the requested return register.
2860 : Register result_reg = rax;
2861 : // Save ecx if it isn't the return register and therefore volatile, or if it
2862 : // is the return register, then save the temp register we use in its stead
2863 : // for the result.
2864 56 : Register save_reg = rax;
2865 56 : __ pushq(rcx);
2866 56 : __ pushq(scratch1);
2867 56 : __ pushq(save_reg);
2868 :
2869 : __ movl(scratch1, mantissa_operand);
2870 : __ Movsd(kScratchDoubleReg, mantissa_operand);
2871 : __ movl(rcx, exponent_operand);
2872 :
2873 : __ andl(rcx, Immediate(HeapNumber::kExponentMask));
2874 : __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
2875 112 : __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
2876 : __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
2877 56 : __ j(below, &process_64_bits, Label::kNear);
2878 :
2879 : // Result is entirely in lower 32-bits of mantissa
2880 : int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2881 : __ subl(rcx, Immediate(delta));
2882 : __ xorl(result_reg, result_reg);
2883 : __ cmpl(rcx, Immediate(31));
2884 56 : __ j(above, &done, Label::kNear);
2885 : __ shll_cl(scratch1);
2886 56 : __ jmp(&check_negative, Label::kNear);
2887 :
2888 56 : __ bind(&process_64_bits);
2889 56 : __ Cvttsd2siq(result_reg, kScratchDoubleReg);
2890 56 : __ jmp(&done, Label::kNear);
2891 :
2892 : // If the double was negative, negate the integer result.
2893 56 : __ bind(&check_negative);
2894 : __ movl(result_reg, scratch1);
2895 : __ negl(result_reg);
2896 56 : __ cmpl(exponent_operand, Immediate(0));
2897 56 : __ cmovl(greater, result_reg, scratch1);
2898 :
2899 : // Restore registers
2900 56 : __ bind(&done);
2901 : __ movl(return_operand, result_reg);
2902 56 : __ popq(save_reg);
2903 56 : __ popq(scratch1);
2904 56 : __ popq(rcx);
2905 56 : __ ret(0);
2906 56 : }
2907 :
2908 56 : void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2909 : // ----------- S t a t e -------------
2910 : // -- rax : argc
2911 : // -- rdi : constructor
2912 : // -- rsp[0] : return address
2913 : // -- rsp[8] : last argument
2914 : // -----------------------------------
2915 :
2916 56 : if (FLAG_debug_code) {
2917 : // The array construct code is only set for the global and natives
2918 : // builtin Array functions which always have maps.
2919 :
2920 : // Initial map for the builtin Array function should be a map.
2921 0 : __ LoadTaggedPointerField(
2922 0 : rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2923 : // Will both indicate a nullptr and a Smi.
2924 : STATIC_ASSERT(kSmiTag == 0);
2925 0 : Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
2926 0 : __ Check(not_smi, AbortReason::kUnexpectedInitialMapForArrayFunction);
2927 0 : __ CmpObjectType(rcx, MAP_TYPE, rcx);
2928 0 : __ Check(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
2929 :
2930 : // Figure out the right elements kind
2931 : __ LoadTaggedPointerField(
2932 0 : rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2933 :
2934 : // Load the map's "bit field 2" into |result|. We only need the first byte,
2935 : // but the following masking takes care of that anyway.
2936 0 : __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset));
2937 : // Retrieve elements_kind from bit field 2.
2938 0 : __ DecodeField<Map::ElementsKindBits>(rcx);
2939 :
2940 : // Initial elements kind should be packed elements.
2941 : __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
2942 0 : __ Assert(equal, AbortReason::kInvalidElementsKindForInternalPackedArray);
2943 :
2944 : // No arguments should be passed.
2945 : __ testq(rax, rax);
2946 0 : __ Assert(zero, AbortReason::kWrongNumberOfArgumentsForInternalPackedArray);
2947 : }
2948 :
2949 56 : __ Jump(
2950 : BUILTIN_CODE(masm->isolate(), InternalArrayNoArgumentConstructor_Packed),
2951 56 : RelocInfo::CODE_TARGET);
2952 56 : }
2953 :
2954 : namespace {
2955 :
2956 : int Offset(ExternalReference ref0, ExternalReference ref1) {
2957 224 : int64_t offset = (ref0.address() - ref1.address());
2958 : // Check that fits into int.
2959 : DCHECK(static_cast<int>(offset) == offset);
2960 224 : return static_cast<int>(offset);
2961 : }
2962 :
2963 : // Calls an API function. Allocates HandleScope, extracts returned value
2964 : // from handle and propagates exceptions. Clobbers r14, r15, rbx and
2965 : // caller-save registers. Restores context. On return removes
2966 : // stack_space * kSystemPointerSize (GCed).
2967 112 : void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address,
2968 : ExternalReference thunk_ref,
2969 : Register thunk_last_arg, int stack_space,
2970 : Operand* stack_space_operand,
2971 : Operand return_value_operand) {
2972 112 : Label prologue;
2973 112 : Label promote_scheduled_exception;
2974 112 : Label delete_allocated_handles;
2975 112 : Label leave_exit_frame;
2976 :
2977 : Isolate* isolate = masm->isolate();
2978 : Factory* factory = isolate->factory();
2979 : ExternalReference next_address =
2980 112 : ExternalReference::handle_scope_next_address(isolate);
2981 : const int kNextOffset = 0;
2982 112 : const int kLimitOffset = Offset(
2983 : ExternalReference::handle_scope_limit_address(isolate), next_address);
2984 112 : const int kLevelOffset = Offset(
2985 : ExternalReference::handle_scope_level_address(isolate), next_address);
2986 : ExternalReference scheduled_exception_address =
2987 112 : ExternalReference::scheduled_exception_address(isolate);
2988 :
2989 : DCHECK(rdx == function_address || r8 == function_address);
2990 : // Allocate HandleScope in callee-save registers.
2991 : Register prev_next_address_reg = r14;
2992 : Register prev_limit_reg = rbx;
2993 112 : Register base_reg = r15;
2994 112 : __ Move(base_reg, next_address);
2995 224 : __ movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
2996 224 : __ movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
2997 112 : __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
2998 :
2999 112 : if (FLAG_log_timer_events) {
3000 0 : FrameScope frame(masm, StackFrame::MANUAL);
3001 : __ PushSafepointRegisters();
3002 0 : __ PrepareCallCFunction(1);
3003 0 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
3004 0 : __ CallCFunction(ExternalReference::log_enter_external_function(), 1);
3005 : __ PopSafepointRegisters();
3006 : }
3007 :
3008 112 : Label profiler_disabled;
3009 112 : Label end_profiler_check;
3010 112 : __ Move(rax, ExternalReference::is_profiling_address(isolate));
3011 224 : __ cmpb(Operand(rax, 0), Immediate(0));
3012 112 : __ j(zero, &profiler_disabled);
3013 :
3014 : // Third parameter is the address of the actual getter function.
3015 112 : __ Move(thunk_last_arg, function_address);
3016 112 : __ Move(rax, thunk_ref);
3017 112 : __ jmp(&end_profiler_check);
3018 :
3019 112 : __ bind(&profiler_disabled);
3020 : // Call the api function!
3021 112 : __ Move(rax, function_address);
3022 :
3023 112 : __ bind(&end_profiler_check);
3024 :
3025 : // Call the api function!
3026 112 : __ call(rax);
3027 :
3028 112 : if (FLAG_log_timer_events) {
3029 0 : FrameScope frame(masm, StackFrame::MANUAL);
3030 : __ PushSafepointRegisters();
3031 0 : __ PrepareCallCFunction(1);
3032 0 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
3033 0 : __ CallCFunction(ExternalReference::log_leave_external_function(), 1);
3034 : __ PopSafepointRegisters();
3035 : }
3036 :
3037 : // Load the value from ReturnValue
3038 : __ movq(rax, return_value_operand);
3039 112 : __ bind(&prologue);
3040 :
3041 : // No more valid handles (the result handle was the last one). Restore
3042 : // previous handle scope.
3043 112 : __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
3044 224 : __ movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
3045 112 : __ cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
3046 112 : __ j(not_equal, &delete_allocated_handles);
3047 :
3048 : // Leave the API exit frame.
3049 112 : __ bind(&leave_exit_frame);
3050 112 : if (stack_space_operand != nullptr) {
3051 : DCHECK_EQ(stack_space, 0);
3052 : __ movq(rbx, *stack_space_operand);
3053 : }
3054 112 : __ LeaveApiExitFrame();
3055 :
3056 : // Check if the function scheduled an exception.
3057 112 : __ Move(rdi, scheduled_exception_address);
3058 112 : __ Cmp(Operand(rdi, 0), factory->the_hole_value());
3059 112 : __ j(not_equal, &promote_scheduled_exception);
3060 :
3061 : #if DEBUG
3062 : // Check if the function returned a valid JavaScript value.
3063 : Label ok;
3064 : Register return_value = rax;
3065 : Register map = rcx;
3066 :
3067 : __ JumpIfSmi(return_value, &ok, Label::kNear);
3068 : __ LoadTaggedPointerField(map,
3069 : FieldOperand(return_value, HeapObject::kMapOffset));
3070 :
3071 : __ CmpInstanceType(map, LAST_NAME_TYPE);
3072 : __ j(below_equal, &ok, Label::kNear);
3073 :
3074 : __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
3075 : __ j(above_equal, &ok, Label::kNear);
3076 :
3077 : __ CompareRoot(map, RootIndex::kHeapNumberMap);
3078 : __ j(equal, &ok, Label::kNear);
3079 :
3080 : __ CompareRoot(return_value, RootIndex::kUndefinedValue);
3081 : __ j(equal, &ok, Label::kNear);
3082 :
3083 : __ CompareRoot(return_value, RootIndex::kTrueValue);
3084 : __ j(equal, &ok, Label::kNear);
3085 :
3086 : __ CompareRoot(return_value, RootIndex::kFalseValue);
3087 : __ j(equal, &ok, Label::kNear);
3088 :
3089 : __ CompareRoot(return_value, RootIndex::kNullValue);
3090 : __ j(equal, &ok, Label::kNear);
3091 :
3092 : __ Abort(AbortReason::kAPICallReturnedInvalidObject);
3093 :
3094 : __ bind(&ok);
3095 : #endif
3096 :
3097 112 : if (stack_space_operand == nullptr) {
3098 : DCHECK_NE(stack_space, 0);
3099 56 : __ ret(stack_space * kSystemPointerSize);
3100 : } else {
3101 : DCHECK_EQ(stack_space, 0);
3102 : __ PopReturnAddressTo(rcx);
3103 : __ addq(rsp, rbx);
3104 56 : __ jmp(rcx);
3105 : }
3106 :
3107 : // Re-throw by promoting a scheduled exception.
3108 112 : __ bind(&promote_scheduled_exception);
3109 112 : __ TailCallRuntime(Runtime::kPromoteScheduledException);
3110 :
3111 : // HandleScope limit has changed. Delete allocated extensions.
3112 112 : __ bind(&delete_allocated_handles);
3113 224 : __ movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
3114 : __ movq(prev_limit_reg, rax);
3115 112 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
3116 112 : __ LoadAddress(rax, ExternalReference::delete_handle_scope_extensions());
3117 112 : __ call(rax);
3118 : __ movq(rax, prev_limit_reg);
3119 112 : __ jmp(&leave_exit_frame);
3120 112 : }
3121 :
3122 : } // namespace
3123 :
3124 : // TODO(jgruber): Instead of explicitly setting up implicit_args_ on the stack
3125 : // in CallApiCallback, we could use the calling convention to set up the stack
3126 : // correctly in the first place.
3127 : //
3128 : // TODO(jgruber): I suspect that most of CallApiCallback could be implemented
3129 : // as a C++ trampoline, vastly simplifying the assembly implementation.
3130 :
3131 56 : void Builtins::Generate_CallApiCallback(MacroAssembler* masm) {
3132 : // ----------- S t a t e -------------
3133 : // -- rsi : context
3134 : // -- rdx : api function address
3135 : // -- rcx : arguments count (not including the receiver)
3136 : // -- rbx : call data
3137 : // -- rdi : holder
3138 : // -- rsp[0] : return address
3139 : // -- rsp[8] : last argument
3140 : // -- ...
3141 : // -- rsp[argc * 8] : first argument
3142 : // -- rsp[(argc + 1) * 8] : receiver
3143 : // -----------------------------------
3144 :
3145 56 : Register api_function_address = rdx;
3146 : Register argc = rcx;
3147 56 : Register call_data = rbx;
3148 56 : Register holder = rdi;
3149 :
3150 : DCHECK(!AreAliased(api_function_address, argc, holder, call_data,
3151 : kScratchRegister));
3152 :
3153 : typedef FunctionCallbackArguments FCA;
3154 :
3155 : STATIC_ASSERT(FCA::kArgsLength == 6);
3156 : STATIC_ASSERT(FCA::kNewTargetIndex == 5);
3157 : STATIC_ASSERT(FCA::kDataIndex == 4);
3158 : STATIC_ASSERT(FCA::kReturnValueOffset == 3);
3159 : STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
3160 : STATIC_ASSERT(FCA::kIsolateIndex == 1);
3161 : STATIC_ASSERT(FCA::kHolderIndex == 0);
3162 :
3163 : // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
3164 : //
3165 : // Current state:
3166 : // rsp[0]: return address
3167 : //
3168 : // Target state:
3169 : // rsp[0 * kSystemPointerSize]: return address
3170 : // rsp[1 * kSystemPointerSize]: kHolder
3171 : // rsp[2 * kSystemPointerSize]: kIsolate
3172 : // rsp[3 * kSystemPointerSize]: undefined (kReturnValueDefaultValue)
3173 : // rsp[4 * kSystemPointerSize]: undefined (kReturnValue)
3174 : // rsp[5 * kSystemPointerSize]: kData
3175 : // rsp[6 * kSystemPointerSize]: undefined (kNewTarget)
3176 :
3177 : __ PopReturnAddressTo(rax);
3178 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
3179 56 : __ Push(kScratchRegister);
3180 56 : __ Push(call_data);
3181 56 : __ Push(kScratchRegister);
3182 56 : __ Push(kScratchRegister);
3183 56 : __ PushAddress(ExternalReference::isolate_address(masm->isolate()));
3184 56 : __ Push(holder);
3185 : __ PushReturnAddressFrom(rax);
3186 :
3187 : // Keep a pointer to kHolder (= implicit_args) in a scratch register.
3188 : // We use it below to set up the FunctionCallbackInfo object.
3189 : Register scratch = rbx;
3190 112 : __ leaq(scratch, Operand(rsp, 1 * kSystemPointerSize));
3191 :
3192 : // Allocate the v8::Arguments structure in the arguments' space since
3193 : // it's not controlled by GC.
3194 : static constexpr int kApiStackSpace = 4;
3195 56 : __ EnterApiExitFrame(kApiStackSpace);
3196 :
3197 : // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above).
3198 : __ movq(StackSpaceOperand(0), scratch);
3199 :
3200 : // FunctionCallbackInfo::values_ (points at the first varargs argument passed
3201 : // on the stack).
3202 112 : __ leaq(scratch, Operand(scratch, argc, times_system_pointer_size,
3203 : (FCA::kArgsLength - 1) * kSystemPointerSize));
3204 : __ movq(StackSpaceOperand(1), scratch);
3205 :
3206 : // FunctionCallbackInfo::length_.
3207 : __ movq(StackSpaceOperand(2), argc);
3208 :
3209 : // We also store the number of bytes to drop from the stack after returning
3210 : // from the API function here.
3211 112 : __ leaq(kScratchRegister,
3212 : Operand(argc, times_system_pointer_size,
3213 : (FCA::kArgsLength + 1 /* receiver */) * kSystemPointerSize));
3214 : __ movq(StackSpaceOperand(3), kScratchRegister);
3215 :
3216 : Register arguments_arg = arg_reg_1;
3217 56 : Register callback_arg = arg_reg_2;
3218 :
3219 : // It's okay if api_function_address == callback_arg
3220 : // but not arguments_arg
3221 : DCHECK(api_function_address != arguments_arg);
3222 :
3223 : // v8::InvocationCallback's argument.
3224 : __ leaq(arguments_arg, StackSpaceOperand(0));
3225 :
3226 56 : ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
3227 :
3228 : // There are two stack slots above the arguments we constructed on the stack:
3229 : // the stored ebp (pushed by EnterApiExitFrame), and the return address.
3230 : static constexpr int kStackSlotsAboveFCA = 2;
3231 : Operand return_value_operand(
3232 : rbp,
3233 56 : (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kSystemPointerSize);
3234 :
3235 : static constexpr int kUseStackSpaceOperand = 0;
3236 56 : Operand stack_space_operand = StackSpaceOperand(3);
3237 : CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
3238 : kUseStackSpaceOperand, &stack_space_operand,
3239 56 : return_value_operand);
3240 56 : }
3241 :
3242 56 : void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
3243 : Register name_arg = arg_reg_1;
3244 : Register accessor_info_arg = arg_reg_2;
3245 56 : Register getter_arg = arg_reg_3;
3246 : Register api_function_address = r8;
3247 56 : Register receiver = ApiGetterDescriptor::ReceiverRegister();
3248 56 : Register holder = ApiGetterDescriptor::HolderRegister();
3249 56 : Register callback = ApiGetterDescriptor::CallbackRegister();
3250 : Register scratch = rax;
3251 56 : Register decompr_scratch1 = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
3252 56 : Register decompr_scratch2 = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
3253 :
3254 : DCHECK(!AreAliased(receiver, holder, callback, scratch, decompr_scratch1,
3255 : decompr_scratch2));
3256 :
3257 : // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
3258 : // name below the exit frame to make GC aware of them.
3259 : STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
3260 : STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
3261 : STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
3262 : STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
3263 : STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
3264 : STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
3265 : STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
3266 : STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
3267 :
3268 : // Insert additional parameters into the stack frame above return address.
3269 : __ PopReturnAddressTo(scratch);
3270 56 : __ Push(receiver);
3271 : __ PushTaggedAnyField(FieldOperand(callback, AccessorInfo::kDataOffset),
3272 56 : decompr_scratch1, decompr_scratch2);
3273 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
3274 56 : __ Push(kScratchRegister); // return value
3275 56 : __ Push(kScratchRegister); // return value default
3276 56 : __ PushAddress(ExternalReference::isolate_address(masm->isolate()));
3277 56 : __ Push(holder);
3278 56 : __ Push(Smi::zero()); // should_throw_on_error -> false
3279 : __ PushTaggedPointerField(FieldOperand(callback, AccessorInfo::kNameOffset),
3280 56 : decompr_scratch1);
3281 : __ PushReturnAddressFrom(scratch);
3282 :
3283 : // v8::PropertyCallbackInfo::args_ array and name handle.
3284 : const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
3285 :
3286 : // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
3287 : const int kArgStackSpace = 1;
3288 :
3289 : // Load address of v8::PropertyAccessorInfo::args_ array.
3290 112 : __ leaq(scratch, Operand(rsp, 2 * kSystemPointerSize));
3291 :
3292 56 : __ EnterApiExitFrame(kArgStackSpace);
3293 :
3294 : // Create v8::PropertyCallbackInfo object on the stack and initialize
3295 : // it's args_ field.
3296 56 : Operand info_object = StackSpaceOperand(0);
3297 : __ movq(info_object, scratch);
3298 :
3299 112 : __ leaq(name_arg, Operand(scratch, -kSystemPointerSize));
3300 : // The context register (rsi) has been saved in EnterApiExitFrame and
3301 : // could be used to pass arguments.
3302 : __ leaq(accessor_info_arg, info_object);
3303 :
3304 : ExternalReference thunk_ref =
3305 56 : ExternalReference::invoke_accessor_getter_callback();
3306 :
3307 : // It's okay if api_function_address == getter_arg
3308 : // but not accessor_info_arg or name_arg
3309 : DCHECK(api_function_address != accessor_info_arg);
3310 : DCHECK(api_function_address != name_arg);
3311 : __ LoadTaggedPointerField(
3312 56 : scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
3313 : __ movq(api_function_address,
3314 : FieldOperand(scratch, Foreign::kForeignAddressOffset));
3315 :
3316 : // +3 is to skip prolog, return address and name handle.
3317 : Operand return_value_operand(
3318 : rbp,
3319 56 : (PropertyCallbackArguments::kReturnValueOffset + 3) * kSystemPointerSize);
3320 : Operand* const kUseStackSpaceConstant = nullptr;
3321 : CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
3322 : kStackUnwindSpace, kUseStackSpaceConstant,
3323 56 : return_value_operand);
3324 56 : }
3325 :
3326 56 : void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
3327 56 : __ int3(); // Unused on this architecture.
3328 56 : }
3329 :
3330 : #undef __
3331 :
3332 : } // namespace internal
3333 59480 : } // namespace v8
3334 :
3335 : #endif // V8_TARGET_ARCH_X64
|