Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #if V8_TARGET_ARCH_X64
6 :
7 : #include "src/api-arguments.h"
8 : #include "src/base/adapters.h"
9 : #include "src/code-factory.h"
10 : #include "src/counters.h"
11 : #include "src/deoptimizer.h"
12 : #include "src/frame-constants.h"
13 : #include "src/frames.h"
14 : // For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
15 : #include "src/heap/heap-inl.h"
16 : #include "src/macro-assembler-inl.h"
17 : #include "src/objects-inl.h"
18 : #include "src/objects/cell.h"
19 : #include "src/objects/debug-objects.h"
20 : #include "src/objects/foreign.h"
21 : #include "src/objects/heap-number.h"
22 : #include "src/objects/js-generator.h"
23 : #include "src/objects/smi.h"
24 : #include "src/register-configuration.h"
25 : #include "src/wasm/wasm-linkage.h"
26 : #include "src/wasm/wasm-objects.h"
27 :
28 : namespace v8 {
29 : namespace internal {
30 :
31 : #define __ ACCESS_MASM(masm)
32 :
33 15456 : void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
34 : ExitFrameType exit_frame_type) {
35 15456 : __ LoadAddress(kJavaScriptCallExtraArg1Register,
36 15456 : ExternalReference::Create(address));
37 15456 : if (exit_frame_type == BUILTIN_EXIT) {
38 15288 : __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
39 15288 : RelocInfo::CODE_TARGET);
40 : } else {
41 : DCHECK(exit_frame_type == EXIT);
42 168 : __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
43 168 : RelocInfo::CODE_TARGET);
44 : }
45 15456 : }
46 :
47 280 : static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
48 : Runtime::FunctionId function_id) {
49 : // ----------- S t a t e -------------
50 : // -- rdx : new target (preserved for callee)
51 : // -- rdi : target function (preserved for callee)
52 : // -----------------------------------
53 : {
54 560 : FrameScope scope(masm, StackFrame::INTERNAL);
55 : // Push a copy of the target function and the new target.
56 280 : __ Push(rdi);
57 280 : __ Push(rdx);
58 : // Function is also the parameter to the runtime call.
59 280 : __ Push(rdi);
60 :
61 280 : __ CallRuntime(function_id, 1);
62 280 : __ movq(rcx, rax);
63 :
64 : // Restore target function and new target.
65 280 : __ Pop(rdx);
66 280 : __ Pop(rdi);
67 : }
68 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
69 280 : __ JumpCodeObject(rcx);
70 280 : }
71 :
72 : namespace {
73 :
74 56 : void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
75 : // ----------- S t a t e -------------
76 : // -- rax: number of arguments
77 : // -- rdi: constructor function
78 : // -- rdx: new target
79 : // -- rsi: context
80 : // -----------------------------------
81 :
82 : // Enter a construct frame.
83 : {
84 112 : FrameScope scope(masm, StackFrame::CONSTRUCT);
85 :
86 : // Preserve the incoming parameters on the stack.
87 56 : __ SmiTag(rcx, rax);
88 56 : __ Push(rsi);
89 56 : __ Push(rcx);
90 :
91 : // The receiver for the builtin/api call.
92 56 : __ PushRoot(RootIndex::kTheHoleValue);
93 :
94 : // Set up pointer to last argument.
95 56 : __ leaq(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
96 :
97 : // Copy arguments and receiver to the expression stack.
98 56 : Label loop, entry;
99 56 : __ movq(rcx, rax);
100 : // ----------- S t a t e -------------
101 : // -- rax: number of arguments (untagged)
102 : // -- rdi: constructor function
103 : // -- rdx: new target
104 : // -- rbx: pointer to last argument
105 : // -- rcx: counter
106 : // -- sp[0*kSystemPointerSize]: the hole (receiver)
107 : // -- sp[1*kSystemPointerSize]: number of arguments (tagged)
108 : // -- sp[2*kSystemPointerSize]: context
109 : // -----------------------------------
110 56 : __ jmp(&entry);
111 56 : __ bind(&loop);
112 56 : __ Push(Operand(rbx, rcx, times_system_pointer_size, 0));
113 56 : __ bind(&entry);
114 56 : __ decq(rcx);
115 56 : __ j(greater_equal, &loop, Label::kNear);
116 :
117 : // Call the function.
118 : // rax: number of arguments (untagged)
119 : // rdi: constructor function
120 : // rdx: new target
121 56 : ParameterCount actual(rax);
122 56 : __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
123 :
124 : // Restore context from the frame.
125 56 : __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
126 : // Restore smi-tagged arguments count from the frame.
127 56 : __ movq(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
128 :
129 : // Leave construct frame.
130 : }
131 :
132 : // Remove caller arguments from the stack and return.
133 56 : __ PopReturnAddressTo(rcx);
134 56 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
135 56 : __ leaq(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
136 56 : __ PushReturnAddressFrom(rcx);
137 :
138 56 : __ ret(0);
139 56 : }
140 :
141 896 : void Generate_StackOverflowCheck(
142 : MacroAssembler* masm, Register num_args, Register scratch,
143 : Label* stack_overflow,
144 : Label::Distance stack_overflow_distance = Label::kFar) {
145 : // Check the stack for overflow. We are not trying to catch
146 : // interruptions (e.g. debug break and preemption) here, so the "real stack
147 : // limit" is checked.
148 896 : __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit);
149 896 : __ movq(scratch, rsp);
150 : // Make scratch the space we have left. The stack might already be overflowed
151 : // here which will cause scratch to become negative.
152 896 : __ subq(scratch, kScratchRegister);
153 896 : __ sarq(scratch, Immediate(kSystemPointerSizeLog2));
154 : // Check if the arguments will overflow the stack.
155 896 : __ cmpq(scratch, num_args);
156 : // Signed comparison.
157 896 : __ j(less_equal, stack_overflow, stack_overflow_distance);
158 896 : }
159 :
160 : } // namespace
161 :
162 : // The construct stub for ES5 constructor functions and ES6 class constructors.
163 56 : void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
164 : // ----------- S t a t e -------------
165 : // -- rax: number of arguments (untagged)
166 : // -- rdi: constructor function
167 : // -- rdx: new target
168 : // -- rsi: context
169 : // -- sp[...]: constructor arguments
170 : // -----------------------------------
171 :
172 : // Enter a construct frame.
173 : {
174 112 : FrameScope scope(masm, StackFrame::CONSTRUCT);
175 56 : Label post_instantiation_deopt_entry, not_create_implicit_receiver;
176 :
177 : // Preserve the incoming parameters on the stack.
178 56 : __ SmiTag(rcx, rax);
179 56 : __ Push(rsi);
180 56 : __ Push(rcx);
181 56 : __ Push(rdi);
182 56 : __ PushRoot(RootIndex::kTheHoleValue);
183 56 : __ Push(rdx);
184 :
185 : // ----------- S t a t e -------------
186 : // -- sp[0*kSystemPointerSize]: new target
187 : // -- sp[1*kSystemPointerSize]: padding
188 : // -- rdi and sp[2*kSystemPointerSize]: constructor function
189 : // -- sp[3*kSystemPointerSize]: argument count
190 : // -- sp[4*kSystemPointerSize]: context
191 : // -----------------------------------
192 :
193 56 : __ LoadTaggedPointerField(
194 56 : rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
195 56 : __ movl(rbx, FieldOperand(rbx, SharedFunctionInfo::kFlagsOffset));
196 56 : __ DecodeField<SharedFunctionInfo::FunctionKindBits>(rbx);
197 : __ JumpIfIsInRange(rbx, kDefaultDerivedConstructor, kDerivedConstructor,
198 56 : ¬_create_implicit_receiver, Label::kNear);
199 :
200 : // If not derived class constructor: Allocate the new receiver object.
201 56 : __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
202 56 : __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
203 56 : RelocInfo::CODE_TARGET);
204 56 : __ jmp(&post_instantiation_deopt_entry, Label::kNear);
205 :
206 : // Else: use TheHoleValue as receiver for constructor call
207 56 : __ bind(¬_create_implicit_receiver);
208 56 : __ LoadRoot(rax, RootIndex::kTheHoleValue);
209 :
210 : // ----------- S t a t e -------------
211 : // -- rax implicit receiver
212 : // -- Slot 4 / sp[0*kSystemPointerSize] new target
213 : // -- Slot 3 / sp[1*kSystemPointerSize] padding
214 : // -- Slot 2 / sp[2*kSystemPointerSize] constructor function
215 : // -- Slot 1 / sp[3*kSystemPointerSize] number of arguments (tagged)
216 : // -- Slot 0 / sp[4*kSystemPointerSize] context
217 : // -----------------------------------
218 : // Deoptimizer enters here.
219 56 : masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
220 56 : masm->pc_offset());
221 56 : __ bind(&post_instantiation_deopt_entry);
222 :
223 : // Restore new target.
224 56 : __ Pop(rdx);
225 :
226 : // Push the allocated receiver to the stack. We need two copies
227 : // because we may have to return the original one and the calling
228 : // conventions dictate that the called function pops the receiver.
229 56 : __ Push(rax);
230 56 : __ Push(rax);
231 :
232 : // ----------- S t a t e -------------
233 : // -- sp[0*kSystemPointerSize] implicit receiver
234 : // -- sp[1*kSystemPointerSize] implicit receiver
235 : // -- sp[2*kSystemPointerSize] padding
236 : // -- sp[3*kSystemPointerSize] constructor function
237 : // -- sp[4*kSystemPointerSize] number of arguments (tagged)
238 : // -- sp[5*kSystemPointerSize] context
239 : // -----------------------------------
240 :
241 : // Restore constructor function and argument count.
242 56 : __ movq(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
243 56 : __ SmiUntag(rax, Operand(rbp, ConstructFrameConstants::kLengthOffset));
244 :
245 : // Set up pointer to last argument.
246 56 : __ leaq(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
247 :
248 : // Check if we have enough stack space to push all arguments.
249 : // Argument count in rax. Clobbers rcx.
250 56 : Label enough_stack_space, stack_overflow;
251 56 : Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
252 56 : __ jmp(&enough_stack_space, Label::kNear);
253 :
254 56 : __ bind(&stack_overflow);
255 : // Restore context from the frame.
256 56 : __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
257 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
258 : // This should be unreachable.
259 56 : __ int3();
260 :
261 56 : __ bind(&enough_stack_space);
262 :
263 : // Copy arguments and receiver to the expression stack.
264 56 : Label loop, entry;
265 56 : __ movq(rcx, rax);
266 : // ----------- S t a t e -------------
267 : // -- rax: number of arguments (untagged)
268 : // -- rdx: new target
269 : // -- rbx: pointer to last argument
270 : // -- rcx: counter (tagged)
271 : // -- sp[0*kSystemPointerSize]: implicit receiver
272 : // -- sp[1*kSystemPointerSize]: implicit receiver
273 : // -- sp[2*kSystemPointerSize]: padding
274 : // -- rdi and sp[3*kSystemPointerSize]: constructor function
275 : // -- sp[4*kSystemPointerSize]: number of arguments (tagged)
276 : // -- sp[5*kSystemPointerSize]: context
277 : // -----------------------------------
278 56 : __ jmp(&entry, Label::kNear);
279 56 : __ bind(&loop);
280 56 : __ Push(Operand(rbx, rcx, times_system_pointer_size, 0));
281 56 : __ bind(&entry);
282 56 : __ decq(rcx);
283 56 : __ j(greater_equal, &loop, Label::kNear);
284 :
285 : // Call the function.
286 56 : ParameterCount actual(rax);
287 56 : __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
288 :
289 : // ----------- S t a t e -------------
290 : // -- rax constructor result
291 : // -- sp[0*kSystemPointerSize] implicit receiver
292 : // -- sp[1*kSystemPointerSize] padding
293 : // -- sp[2*kSystemPointerSize] constructor function
294 : // -- sp[3*kSystemPointerSize] number of arguments
295 : // -- sp[4*kSystemPointerSize] context
296 : // -----------------------------------
297 :
298 : // Store offset of return address for deoptimizer.
299 56 : masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
300 56 : masm->pc_offset());
301 :
302 : // Restore context from the frame.
303 56 : __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
304 :
305 : // If the result is an object (in the ECMA sense), we should get rid
306 : // of the receiver and use the result; see ECMA-262 section 13.2.2-7
307 : // on page 74.
308 56 : Label use_receiver, do_throw, leave_frame;
309 :
310 : // If the result is undefined, we jump out to using the implicit receiver.
311 56 : __ JumpIfRoot(rax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
312 :
313 : // Otherwise we do a smi check and fall through to check if the return value
314 : // is a valid receiver.
315 :
316 : // If the result is a smi, it is *not* an object in the ECMA sense.
317 56 : __ JumpIfSmi(rax, &use_receiver, Label::kNear);
318 :
319 : // If the type of the result (stored in its map) is less than
320 : // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
321 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
322 56 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
323 56 : __ j(above_equal, &leave_frame, Label::kNear);
324 56 : __ jmp(&use_receiver, Label::kNear);
325 :
326 56 : __ bind(&do_throw);
327 56 : __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
328 :
329 : // Throw away the result of the constructor invocation and use the
330 : // on-stack receiver as the result.
331 56 : __ bind(&use_receiver);
332 56 : __ movq(rax, Operand(rsp, 0 * kSystemPointerSize));
333 56 : __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw, Label::kNear);
334 :
335 56 : __ bind(&leave_frame);
336 : // Restore the arguments count.
337 56 : __ movq(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
338 : // Leave construct frame.
339 : }
340 : // Remove caller arguments from the stack and return.
341 56 : __ PopReturnAddressTo(rcx);
342 56 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
343 56 : __ leaq(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
344 56 : __ PushReturnAddressFrom(rcx);
345 56 : __ ret(0);
346 56 : }
347 :
348 56 : void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
349 56 : Generate_JSBuiltinsConstructStubHelper(masm);
350 56 : }
351 :
352 56 : void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
353 112 : FrameScope scope(masm, StackFrame::INTERNAL);
354 56 : __ Push(rdi);
355 56 : __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
356 56 : }
357 :
358 : namespace {
359 :
360 : // Called with the native C calling convention. The corresponding function
361 : // signature is either:
362 : // using JSEntryFunction = GeneratedCode<Address(
363 : // Address root_register_value, Address new_target, Address target,
364 : // Address receiver, intptr_t argc, Address** argv)>;
365 : // or
366 : // using JSEntryFunction = GeneratedCode<Address(
367 : // Address root_register_value, MicrotaskQueue* microtask_queue)>;
368 168 : void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
369 : Builtins::Name entry_trampoline) {
370 168 : Label invoke, handler_entry, exit;
371 168 : Label not_outermost_js, not_outermost_js_2;
372 :
373 : { // NOLINT. Scope block confuses linter.
374 336 : NoRootArrayScope uninitialized_root_register(masm);
375 : // Set up frame.
376 168 : __ pushq(rbp);
377 168 : __ movq(rbp, rsp);
378 :
379 : // Push the stack frame type.
380 168 : __ Push(Immediate(StackFrame::TypeToMarker(type)));
381 : // Reserve a slot for the context. It is filled after the root register has
382 : // been set up.
383 168 : __ subq(rsp, Immediate(kSystemPointerSize));
384 : // Save callee-saved registers (X64/X32/Win64 calling conventions).
385 168 : __ pushq(r12);
386 168 : __ pushq(r13);
387 168 : __ pushq(r14);
388 168 : __ pushq(r15);
389 : #ifdef _WIN64
390 : __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
391 : __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
392 : #endif
393 168 : __ pushq(rbx);
394 :
395 : #ifdef _WIN64
396 : // On Win64 XMM6-XMM15 are callee-save.
397 : __ subq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
398 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
399 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
400 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
401 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
402 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
403 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
404 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
405 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
406 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
407 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
408 : STATIC_ASSERT(EntryFrameConstants::kCalleeSaveXMMRegisters == 10);
409 : STATIC_ASSERT(EntryFrameConstants::kXMMRegistersBlockSize ==
410 : EntryFrameConstants::kXMMRegisterSize *
411 : EntryFrameConstants::kCalleeSaveXMMRegisters);
412 : #endif
413 :
414 : // Initialize the root register.
415 : // C calling convention. The first argument is passed in arg_reg_1.
416 168 : __ movq(kRootRegister, arg_reg_1);
417 : }
418 :
419 : // Save copies of the top frame descriptor on the stack.
420 : ExternalReference c_entry_fp = ExternalReference::Create(
421 168 : IsolateAddressId::kCEntryFPAddress, masm->isolate());
422 : {
423 168 : Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
424 168 : __ Push(c_entry_fp_operand);
425 : }
426 :
427 : // Store the context address in the previously-reserved slot.
428 : ExternalReference context_address = ExternalReference::Create(
429 168 : IsolateAddressId::kContextAddress, masm->isolate());
430 168 : __ Load(kScratchRegister, context_address);
431 : static constexpr int kOffsetToContextSlot = -2 * kSystemPointerSize;
432 168 : __ movq(Operand(rbp, kOffsetToContextSlot), kScratchRegister);
433 :
434 : // If this is the outermost JS call, set js_entry_sp value.
435 : ExternalReference js_entry_sp = ExternalReference::Create(
436 168 : IsolateAddressId::kJSEntrySPAddress, masm->isolate());
437 168 : __ Load(rax, js_entry_sp);
438 168 : __ testq(rax, rax);
439 168 : __ j(not_zero, ¬_outermost_js);
440 168 : __ Push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
441 168 : __ movq(rax, rbp);
442 168 : __ Store(js_entry_sp, rax);
443 168 : Label cont;
444 168 : __ jmp(&cont);
445 168 : __ bind(¬_outermost_js);
446 168 : __ Push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
447 168 : __ bind(&cont);
448 :
449 : // Jump to a faked try block that does the invoke, with a faked catch
450 : // block that sets the pending exception.
451 168 : __ jmp(&invoke);
452 168 : __ bind(&handler_entry);
453 :
454 : // Store the current pc as the handler offset. It's used later to create the
455 : // handler table.
456 168 : masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
457 :
458 : // Caught exception: Store result (exception) in the pending exception
459 : // field in the JSEnv and return a failure sentinel.
460 : ExternalReference pending_exception = ExternalReference::Create(
461 168 : IsolateAddressId::kPendingExceptionAddress, masm->isolate());
462 168 : __ Store(pending_exception, rax);
463 168 : __ LoadRoot(rax, RootIndex::kException);
464 168 : __ jmp(&exit);
465 :
466 : // Invoke: Link this frame into the handler chain.
467 168 : __ bind(&invoke);
468 168 : __ PushStackHandler();
469 :
470 : // Invoke the function by calling through JS entry trampoline builtin and
471 : // pop the faked function when we return.
472 : Handle<Code> trampoline_code =
473 168 : masm->isolate()->builtins()->builtin_handle(entry_trampoline);
474 168 : __ Call(trampoline_code, RelocInfo::CODE_TARGET);
475 :
476 : // Unlink this frame from the handler chain.
477 168 : __ PopStackHandler();
478 :
479 168 : __ bind(&exit);
480 : // Check if the current stack frame is marked as the outermost JS frame.
481 168 : __ Pop(rbx);
482 168 : __ cmpq(rbx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
483 168 : __ j(not_equal, ¬_outermost_js_2);
484 168 : __ Move(kScratchRegister, js_entry_sp);
485 168 : __ movq(Operand(kScratchRegister, 0), Immediate(0));
486 168 : __ bind(¬_outermost_js_2);
487 :
488 : // Restore the top frame descriptor from the stack.
489 : {
490 168 : Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
491 168 : __ Pop(c_entry_fp_operand);
492 : }
493 :
494 : // Restore callee-saved registers (X64 conventions).
495 : #ifdef _WIN64
496 : // On Win64 XMM6-XMM15 are callee-save
497 : __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
498 : __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
499 : __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
500 : __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
501 : __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
502 : __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
503 : __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
504 : __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
505 : __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
506 : __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
507 : __ addq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
508 : #endif
509 :
510 168 : __ popq(rbx);
511 : #ifdef _WIN64
512 : // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
513 : __ popq(rsi);
514 : __ popq(rdi);
515 : #endif
516 168 : __ popq(r15);
517 168 : __ popq(r14);
518 168 : __ popq(r13);
519 168 : __ popq(r12);
520 168 : __ addq(rsp, Immediate(2 * kSystemPointerSize)); // remove markers
521 :
522 : // Restore frame pointer and return.
523 168 : __ popq(rbp);
524 168 : __ ret(0);
525 168 : }
526 :
527 : } // namespace
528 :
529 56 : void Builtins::Generate_JSEntry(MacroAssembler* masm) {
530 : Generate_JSEntryVariant(masm, StackFrame::ENTRY,
531 56 : Builtins::kJSEntryTrampoline);
532 56 : }
533 :
534 56 : void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
535 : Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
536 56 : Builtins::kJSConstructEntryTrampoline);
537 56 : }
538 :
539 56 : void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
540 : Generate_JSEntryVariant(masm, StackFrame::ENTRY,
541 56 : Builtins::kRunMicrotasksTrampoline);
542 56 : }
543 :
544 112 : static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
545 : bool is_construct) {
546 : // Expects six C++ function parameters.
547 : // - Address root_register_value
548 : // - Address new_target (tagged Object pointer)
549 : // - Address function (tagged JSFunction pointer)
550 : // - Address receiver (tagged Object pointer)
551 : // - intptr_t argc
552 : // - Address** argv (pointer to array of tagged Object pointers)
553 : // (see Handle::Invoke in execution.cc).
554 :
555 : // Open a C++ scope for the FrameScope.
556 : {
557 : // Platform specific argument handling. After this, the stack contains
558 : // an internal frame and the pushed function and receiver, and
559 : // register rax and rbx holds the argument count and argument array,
560 : // while rdi holds the function pointer, rsi the context, and rdx the
561 : // new.target.
562 :
563 : // MSVC parameters in:
564 : // rcx : root_register_value
565 : // rdx : new_target
566 : // r8 : function
567 : // r9 : receiver
568 : // [rsp+0x20] : argc
569 : // [rsp+0x28] : argv
570 : //
571 : // GCC parameters in:
572 : // rdi : root_register_value
573 : // rsi : new_target
574 : // rdx : function
575 : // rcx : receiver
576 : // r8 : argc
577 : // r9 : argv
578 :
579 112 : __ movq(rdi, arg_reg_3);
580 112 : __ Move(rdx, arg_reg_2);
581 : // rdi : function
582 : // rdx : new_target
583 :
584 : // Clear the context before we push it when entering the internal frame.
585 112 : __ Set(rsi, 0);
586 :
587 : // Enter an internal frame.
588 224 : FrameScope scope(masm, StackFrame::INTERNAL);
589 :
590 : // Setup the context (we need to use the caller context from the isolate).
591 : ExternalReference context_address = ExternalReference::Create(
592 112 : IsolateAddressId::kContextAddress, masm->isolate());
593 112 : __ movq(rsi, masm->ExternalReferenceAsOperand(context_address));
594 :
595 : // Push the function and the receiver onto the stack.
596 112 : __ Push(rdi);
597 112 : __ Push(arg_reg_4);
598 :
599 : #ifdef _WIN64
600 : // Load the previous frame pointer to access C arguments on stack
601 : __ movq(kScratchRegister, Operand(rbp, 0));
602 : // Load the number of arguments and setup pointer to the arguments.
603 : __ movq(rax, Operand(kScratchRegister, EntryFrameConstants::kArgcOffset));
604 : __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
605 : #else // _WIN64
606 : // Load the number of arguments and setup pointer to the arguments.
607 112 : __ movq(rax, r8);
608 112 : __ movq(rbx, r9);
609 : #endif // _WIN64
610 :
611 : // Current stack contents:
612 : // [rsp + 2 * kSystemPointerSize ... ] : Internal frame
613 : // [rsp + kSystemPointerSize] : function
614 : // [rsp] : receiver
615 : // Current register contents:
616 : // rax : argc
617 : // rbx : argv
618 : // rsi : context
619 : // rdi : function
620 : // rdx : new.target
621 :
622 : // Check if we have enough stack space to push all arguments.
623 : // Argument count in rax. Clobbers rcx.
624 112 : Label enough_stack_space, stack_overflow;
625 112 : Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
626 112 : __ jmp(&enough_stack_space, Label::kNear);
627 :
628 112 : __ bind(&stack_overflow);
629 112 : __ CallRuntime(Runtime::kThrowStackOverflow);
630 : // This should be unreachable.
631 112 : __ int3();
632 :
633 112 : __ bind(&enough_stack_space);
634 :
635 : // Copy arguments to the stack in a loop.
636 : // Register rbx points to array of pointers to handle locations.
637 : // Push the values of these handles.
638 112 : Label loop, entry;
639 112 : __ Set(rcx, 0); // Set loop variable to 0.
640 112 : __ jmp(&entry, Label::kNear);
641 112 : __ bind(&loop);
642 112 : __ movq(kScratchRegister, Operand(rbx, rcx, times_system_pointer_size, 0));
643 112 : __ Push(Operand(kScratchRegister, 0)); // dereference handle
644 112 : __ addq(rcx, Immediate(1));
645 112 : __ bind(&entry);
646 112 : __ cmpq(rcx, rax);
647 112 : __ j(not_equal, &loop, Label::kNear);
648 :
649 : // Invoke the builtin code.
650 : Handle<Code> builtin = is_construct
651 56 : ? BUILTIN_CODE(masm->isolate(), Construct)
652 168 : : masm->isolate()->builtins()->Call();
653 112 : __ Call(builtin, RelocInfo::CODE_TARGET);
654 :
655 : // Exit the internal frame. Notice that this also removes the empty
656 : // context and the function left on the stack by the code
657 : // invocation.
658 : }
659 :
660 112 : __ ret(0);
661 112 : }
662 :
663 56 : void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
664 56 : Generate_JSEntryTrampolineHelper(masm, false);
665 56 : }
666 :
667 56 : void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
668 56 : Generate_JSEntryTrampolineHelper(masm, true);
669 56 : }
670 :
671 56 : void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
672 : // arg_reg_2: microtask_queue
673 56 : __ movq(RunMicrotasksDescriptor::MicrotaskQueueRegister(), arg_reg_2);
674 56 : __ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
675 56 : }
676 :
677 56 : static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
678 : Register sfi_data,
679 : Register scratch1) {
680 56 : Label done;
681 :
682 56 : __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
683 56 : __ j(not_equal, &done, Label::kNear);
684 :
685 56 : __ LoadTaggedPointerField(
686 56 : sfi_data, FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
687 :
688 56 : __ bind(&done);
689 56 : }
690 :
691 : // static
692 56 : void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
693 : // ----------- S t a t e -------------
694 : // -- rax : the value to pass to the generator
695 : // -- rdx : the JSGeneratorObject to resume
696 : // -- rsp[0] : return address
697 : // -----------------------------------
698 56 : __ AssertGeneratorObject(rdx);
699 :
700 : // Store input value into generator object.
701 56 : __ StoreTaggedField(
702 56 : FieldOperand(rdx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
703 : __ RecordWriteField(rdx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
704 56 : kDontSaveFPRegs);
705 :
706 56 : Register decompr_scratch1 = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
707 56 : Register decompr_scratch2 = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
708 :
709 : // Load suspended function and context.
710 56 : __ LoadTaggedPointerField(
711 56 : rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
712 56 : __ LoadTaggedPointerField(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
713 :
714 : // Flood function if we are stepping.
715 56 : Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
716 56 : Label stepping_prepared;
717 : ExternalReference debug_hook =
718 56 : ExternalReference::debug_hook_on_function_call_address(masm->isolate());
719 56 : Operand debug_hook_operand = masm->ExternalReferenceAsOperand(debug_hook);
720 56 : __ cmpb(debug_hook_operand, Immediate(0));
721 56 : __ j(not_equal, &prepare_step_in_if_stepping);
722 :
723 : // Flood function if we need to continue stepping in the suspended generator.
724 : ExternalReference debug_suspended_generator =
725 56 : ExternalReference::debug_suspended_generator_address(masm->isolate());
726 : Operand debug_suspended_generator_operand =
727 56 : masm->ExternalReferenceAsOperand(debug_suspended_generator);
728 56 : __ cmpq(rdx, debug_suspended_generator_operand);
729 56 : __ j(equal, &prepare_step_in_suspended_generator);
730 56 : __ bind(&stepping_prepared);
731 :
732 : // Check the stack for overflow. We are not trying to catch interruptions
733 : // (i.e. debug break and preemption) here, so check the "real stack limit".
734 56 : Label stack_overflow;
735 56 : __ CompareRoot(rsp, RootIndex::kRealStackLimit);
736 56 : __ j(below, &stack_overflow);
737 :
738 : // Pop return address.
739 56 : __ PopReturnAddressTo(rax);
740 :
741 : // Push receiver.
742 56 : __ PushTaggedPointerField(
743 56 : FieldOperand(rdx, JSGeneratorObject::kReceiverOffset), decompr_scratch1);
744 :
745 : // ----------- S t a t e -------------
746 : // -- rax : return address
747 : // -- rdx : the JSGeneratorObject to resume
748 : // -- rdi : generator function
749 : // -- rsi : generator context
750 : // -- rsp[0] : generator receiver
751 : // -----------------------------------
752 :
753 : // Copy the function arguments from the generator object's register file.
754 56 : __ LoadTaggedPointerField(
755 56 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
756 56 : __ movzxwq(
757 56 : rcx, FieldOperand(rcx, SharedFunctionInfo::kFormalParameterCountOffset));
758 :
759 56 : __ LoadTaggedPointerField(
760 56 : rbx, FieldOperand(rdx, JSGeneratorObject::kParametersAndRegistersOffset));
761 :
762 : {
763 56 : Label done_loop, loop;
764 56 : __ Set(r9, 0);
765 :
766 56 : __ bind(&loop);
767 56 : __ cmpl(r9, rcx);
768 56 : __ j(greater_equal, &done_loop, Label::kNear);
769 56 : __ PushTaggedAnyField(
770 : FieldOperand(rbx, r9, times_tagged_size, FixedArray::kHeaderSize),
771 56 : decompr_scratch1, decompr_scratch2);
772 56 : __ addl(r9, Immediate(1));
773 56 : __ jmp(&loop);
774 :
775 56 : __ bind(&done_loop);
776 : }
777 :
778 : // Underlying function needs to have bytecode available.
779 56 : if (FLAG_debug_code) {
780 0 : __ LoadTaggedPointerField(
781 0 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
782 0 : __ LoadTaggedPointerField(
783 0 : rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
784 0 : GetSharedFunctionInfoBytecode(masm, rcx, kScratchRegister);
785 0 : __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
786 0 : __ Assert(equal, AbortReason::kMissingBytecodeArray);
787 : }
788 :
789 : // Resume (Ignition/TurboFan) generator object.
790 : {
791 56 : __ PushReturnAddressFrom(rax);
792 56 : __ LoadTaggedPointerField(
793 56 : rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
794 56 : __ movzxwq(rax, FieldOperand(
795 56 : rax, SharedFunctionInfo::kFormalParameterCountOffset));
796 : // We abuse new.target both to indicate that this is a resume call and to
797 : // pass in the generator object. In ordinary calls, new.target is always
798 : // undefined because generator functions are non-constructable.
799 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
800 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
801 56 : __ JumpCodeObject(rcx);
802 : }
803 :
804 56 : __ bind(&prepare_step_in_if_stepping);
805 : {
806 112 : FrameScope scope(masm, StackFrame::INTERNAL);
807 56 : __ Push(rdx);
808 56 : __ Push(rdi);
809 : // Push hole as receiver since we do not use it for stepping.
810 56 : __ PushRoot(RootIndex::kTheHoleValue);
811 56 : __ CallRuntime(Runtime::kDebugOnFunctionCall);
812 56 : __ Pop(rdx);
813 56 : __ LoadTaggedPointerField(
814 56 : rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
815 : }
816 56 : __ jmp(&stepping_prepared);
817 :
818 56 : __ bind(&prepare_step_in_suspended_generator);
819 : {
820 112 : FrameScope scope(masm, StackFrame::INTERNAL);
821 56 : __ Push(rdx);
822 56 : __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
823 56 : __ Pop(rdx);
824 56 : __ LoadTaggedPointerField(
825 56 : rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
826 : }
827 56 : __ jmp(&stepping_prepared);
828 :
829 56 : __ bind(&stack_overflow);
830 : {
831 112 : FrameScope scope(masm, StackFrame::INTERNAL);
832 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
833 56 : __ int3(); // This should be unreachable.
834 : }
835 56 : }
836 :
837 : // TODO(juliana): if we remove the code below then we don't need all
838 : // the parameters.
839 56 : static void ReplaceClosureCodeWithOptimizedCode(
840 : MacroAssembler* masm, Register optimized_code, Register closure,
841 : Register scratch1, Register scratch2, Register scratch3) {
842 :
843 : // Store the optimized code in the closure.
844 56 : __ StoreTaggedField(FieldOperand(closure, JSFunction::kCodeOffset),
845 56 : optimized_code);
846 56 : __ movq(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
847 : __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
848 56 : kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
849 56 : }
850 :
851 56 : static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
852 : Register scratch2) {
853 56 : Register args_count = scratch1;
854 56 : Register return_pc = scratch2;
855 :
856 : // Get the arguments + receiver count.
857 112 : __ movq(args_count,
858 56 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
859 56 : __ movl(args_count,
860 56 : FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
861 :
862 : // Leave the frame (also dropping the register file).
863 56 : __ leave();
864 :
865 : // Drop receiver + arguments.
866 56 : __ PopReturnAddressTo(return_pc);
867 56 : __ addq(rsp, args_count);
868 56 : __ PushReturnAddressFrom(return_pc);
869 56 : }
870 :
871 : // Tail-call |function_id| if |smi_entry| == |marker|
872 168 : static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
873 : Register smi_entry,
874 : OptimizationMarker marker,
875 : Runtime::FunctionId function_id) {
876 168 : Label no_match;
877 168 : __ SmiCompare(smi_entry, Smi::FromEnum(marker));
878 168 : __ j(not_equal, &no_match);
879 168 : GenerateTailCallToReturnedCode(masm, function_id);
880 168 : __ bind(&no_match);
881 168 : }
882 :
883 56 : static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
884 : Register feedback_vector,
885 : Register scratch1, Register scratch2,
886 : Register scratch3) {
887 : // ----------- S t a t e -------------
888 : // -- rdx : new target (preserved for callee if needed, and caller)
889 : // -- rdi : target function (preserved for callee if needed, and caller)
890 : // -- feedback vector (preserved for caller if needed)
891 : // -----------------------------------
892 : DCHECK(!AreAliased(feedback_vector, rdx, rdi, scratch1, scratch2, scratch3));
893 :
894 56 : Label optimized_code_slot_is_weak_ref, fallthrough;
895 :
896 56 : Register closure = rdi;
897 56 : Register optimized_code_entry = scratch1;
898 56 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? scratch2 : no_reg;
899 :
900 56 : __ LoadAnyTaggedField(
901 : optimized_code_entry,
902 : FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset),
903 56 : decompr_scratch);
904 :
905 : // Check if the code entry is a Smi. If yes, we interpret it as an
906 : // optimisation marker. Otherwise, interpret it as a weak reference to a code
907 : // object.
908 56 : __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
909 :
910 : {
911 : // Optimized code slot is a Smi optimization marker.
912 :
913 : // Fall through if no optimization trigger.
914 56 : __ SmiCompare(optimized_code_entry,
915 56 : Smi::FromEnum(OptimizationMarker::kNone));
916 56 : __ j(equal, &fallthrough);
917 :
918 : // TODO(v8:8394): The logging of first execution will break if
919 : // feedback vectors are not allocated. We need to find a different way of
920 : // logging these events if required.
921 : TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
922 : OptimizationMarker::kLogFirstExecution,
923 56 : Runtime::kFunctionFirstExecution);
924 : TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
925 : OptimizationMarker::kCompileOptimized,
926 56 : Runtime::kCompileOptimized_NotConcurrent);
927 : TailCallRuntimeIfMarkerEquals(
928 : masm, optimized_code_entry,
929 : OptimizationMarker::kCompileOptimizedConcurrent,
930 56 : Runtime::kCompileOptimized_Concurrent);
931 :
932 : {
933 : // Otherwise, the marker is InOptimizationQueue, so fall through hoping
934 : // that an interrupt will eventually update the slot with optimized code.
935 56 : if (FLAG_debug_code) {
936 0 : __ SmiCompare(optimized_code_entry,
937 0 : Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
938 0 : __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
939 : }
940 56 : __ jmp(&fallthrough);
941 : }
942 : }
943 :
944 : {
945 : // Optimized code slot is a weak reference.
946 56 : __ bind(&optimized_code_slot_is_weak_ref);
947 :
948 56 : __ LoadWeakValue(optimized_code_entry, &fallthrough);
949 :
950 : // Check if the optimized code is marked for deopt. If it is, call the
951 : // runtime to clear it.
952 56 : Label found_deoptimized_code;
953 56 : __ LoadTaggedPointerField(
954 : scratch2,
955 56 : FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
956 112 : __ testl(
957 : FieldOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset),
958 56 : Immediate(1 << Code::kMarkedForDeoptimizationBit));
959 56 : __ j(not_zero, &found_deoptimized_code);
960 :
961 : // Optimized code is good, get it into the closure and link the closure into
962 : // the optimized functions list, then tail call the optimized code.
963 : // The feedback vector is no longer used, so re-use it as a scratch
964 : // register.
965 : ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
966 56 : scratch2, scratch3, feedback_vector);
967 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
968 56 : __ Move(rcx, optimized_code_entry);
969 56 : __ JumpCodeObject(rcx);
970 :
971 : // Optimized code slot contains deoptimized code, evict it and re-enter the
972 : // closure's code.
973 56 : __ bind(&found_deoptimized_code);
974 56 : GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
975 : }
976 :
977 : // Fall-through if the optimized code cell is clear and there is no
978 : // optimization marker.
979 56 : __ bind(&fallthrough);
980 56 : }
981 :
982 : // Advance the current bytecode offset. This simulates what all bytecode
983 : // handlers do upon completion of the underlying operation. Will bail out to a
984 : // label if the bytecode (without prefix) is a return bytecode.
985 112 : static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
986 : Register bytecode_array,
987 : Register bytecode_offset,
988 : Register bytecode, Register scratch1,
989 : Label* if_return) {
990 112 : Register bytecode_size_table = scratch1;
991 : DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
992 : bytecode));
993 :
994 112 : __ Move(bytecode_size_table,
995 112 : ExternalReference::bytecode_size_table_address());
996 :
997 : // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
998 112 : Label process_bytecode, extra_wide;
999 : STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
1000 : STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
1001 : STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1002 : STATIC_ASSERT(3 ==
1003 : static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1004 112 : __ cmpb(bytecode, Immediate(0x3));
1005 112 : __ j(above, &process_bytecode, Label::kNear);
1006 112 : __ testb(bytecode, Immediate(0x1));
1007 112 : __ j(not_equal, &extra_wide, Label::kNear);
1008 :
1009 : // Load the next bytecode and update table to the wide scaled table.
1010 112 : __ incl(bytecode_offset);
1011 112 : __ movzxbq(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
1012 224 : __ addq(bytecode_size_table,
1013 112 : Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
1014 112 : __ jmp(&process_bytecode, Label::kNear);
1015 :
1016 112 : __ bind(&extra_wide);
1017 : // Load the next bytecode and update table to the extra wide scaled table.
1018 112 : __ incl(bytecode_offset);
1019 112 : __ movzxbq(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
1020 224 : __ addq(bytecode_size_table,
1021 112 : Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
1022 :
1023 112 : __ bind(&process_bytecode);
1024 :
1025 : // Bailout to the return label if this is a return bytecode.
1026 : #define JUMP_IF_EQUAL(NAME) \
1027 : __ cmpb(bytecode, \
1028 : Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1029 : __ j(equal, if_return, Label::kFar);
1030 112 : RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
1031 : #undef JUMP_IF_EQUAL
1032 :
1033 : // Otherwise, load the size of the current bytecode and advance the offset.
1034 224 : __ addl(bytecode_offset,
1035 112 : Operand(bytecode_size_table, bytecode, times_int_size, 0));
1036 112 : }
1037 :
1038 : // Generate code for entering a JS function with the interpreter.
1039 : // On entry to the function the receiver and arguments have been pushed on the
1040 : // stack left to right. The actual argument count matches the formal parameter
1041 : // count expected by the function.
1042 : //
1043 : // The live registers are:
1044 : // o rdi: the JS function object being called
1045 : // o rdx: the incoming new target or generator object
1046 : // o rsi: our context
1047 : // o rbp: the caller's frame pointer
1048 : // o rsp: stack pointer (pointing to return address)
1049 : //
1050 : // The function builds an interpreter frame. See InterpreterFrameConstants in
1051 : // frames.h for its layout.
1052 56 : void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1053 56 : Register closure = rdi;
1054 56 : Register feedback_vector = rbx;
1055 :
1056 : // Get the bytecode array from the function object and load it into
1057 : // kInterpreterBytecodeArrayRegister.
1058 56 : __ LoadTaggedPointerField(
1059 56 : rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1060 56 : __ LoadTaggedPointerField(
1061 : kInterpreterBytecodeArrayRegister,
1062 56 : FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
1063 : GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister,
1064 56 : kScratchRegister);
1065 :
1066 : // The bytecode array could have been flushed from the shared function info,
1067 : // if so, call into CompileLazy.
1068 56 : Label compile_lazy;
1069 56 : __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, rax);
1070 56 : __ j(not_equal, &compile_lazy);
1071 :
1072 : // Load the feedback vector from the closure.
1073 56 : __ LoadTaggedPointerField(
1074 56 : feedback_vector, FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1075 56 : __ LoadTaggedPointerField(feedback_vector,
1076 56 : FieldOperand(feedback_vector, Cell::kValueOffset));
1077 :
1078 56 : Label push_stack_frame;
1079 : // Check if feedback vector is valid. If valid, check for optimized code
1080 : // and update invocation count. Otherwise, setup the stack frame.
1081 56 : __ LoadTaggedPointerField(
1082 56 : rcx, FieldOperand(feedback_vector, HeapObject::kMapOffset));
1083 56 : __ CmpInstanceType(rcx, FEEDBACK_VECTOR_TYPE);
1084 56 : __ j(not_equal, &push_stack_frame);
1085 :
1086 : // Read off the optimized code slot in the feedback vector, and if there
1087 : // is optimized code or an optimization marker, call that instead.
1088 56 : MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r11, r15);
1089 :
1090 : // Increment invocation count for the function.
1091 56 : __ incl(
1092 56 : FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1093 :
1094 : // Open a frame scope to indicate that there is a frame on the stack. The
1095 : // MANUAL indicates that the scope shouldn't actually generate code to set up
1096 : // the frame (that is done below).
1097 56 : __ bind(&push_stack_frame);
1098 112 : FrameScope frame_scope(masm, StackFrame::MANUAL);
1099 56 : __ pushq(rbp); // Caller's frame pointer.
1100 56 : __ movq(rbp, rsp);
1101 56 : __ Push(rsi); // Callee's context.
1102 56 : __ Push(rdi); // Callee's JS function.
1103 :
1104 : // Reset code age.
1105 112 : __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
1106 : BytecodeArray::kBytecodeAgeOffset),
1107 56 : Immediate(BytecodeArray::kNoAgeBytecodeAge));
1108 :
1109 : // Load initial bytecode offset.
1110 112 : __ movq(kInterpreterBytecodeOffsetRegister,
1111 56 : Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1112 :
1113 : // Push bytecode array and Smi tagged bytecode offset.
1114 56 : __ Push(kInterpreterBytecodeArrayRegister);
1115 56 : __ SmiTag(rcx, kInterpreterBytecodeOffsetRegister);
1116 56 : __ Push(rcx);
1117 :
1118 : // Allocate the local and temporary register file on the stack.
1119 : {
1120 : // Load frame size from the BytecodeArray object.
1121 56 : __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
1122 56 : BytecodeArray::kFrameSizeOffset));
1123 :
1124 : // Do a stack check to ensure we don't go over the limit.
1125 56 : Label ok;
1126 56 : __ movq(rax, rsp);
1127 56 : __ subq(rax, rcx);
1128 56 : __ CompareRoot(rax, RootIndex::kRealStackLimit);
1129 56 : __ j(above_equal, &ok, Label::kNear);
1130 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
1131 56 : __ bind(&ok);
1132 :
1133 : // If ok, push undefined as the initial value for all register file entries.
1134 56 : Label loop_header;
1135 56 : Label loop_check;
1136 56 : __ LoadRoot(rax, RootIndex::kUndefinedValue);
1137 56 : __ j(always, &loop_check, Label::kNear);
1138 56 : __ bind(&loop_header);
1139 : // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1140 56 : __ Push(rax);
1141 : // Continue loop if not done.
1142 56 : __ bind(&loop_check);
1143 56 : __ subq(rcx, Immediate(kSystemPointerSize));
1144 56 : __ j(greater_equal, &loop_header, Label::kNear);
1145 : }
1146 :
1147 : // If the bytecode array has a valid incoming new target or generator object
1148 : // register, initialize it with incoming value which was passed in rdx.
1149 56 : Label no_incoming_new_target_or_generator_register;
1150 56 : __ movsxlq(
1151 : rax,
1152 : FieldOperand(kInterpreterBytecodeArrayRegister,
1153 56 : BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1154 56 : __ testl(rax, rax);
1155 56 : __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
1156 56 : __ movq(Operand(rbp, rax, times_system_pointer_size, 0), rdx);
1157 56 : __ bind(&no_incoming_new_target_or_generator_register);
1158 :
1159 : // Load accumulator with undefined.
1160 56 : __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1161 :
1162 : // Load the dispatch table into a register and dispatch to the bytecode
1163 : // handler at the current bytecode offset.
1164 56 : Label do_dispatch;
1165 56 : __ bind(&do_dispatch);
1166 56 : __ Move(
1167 : kInterpreterDispatchTableRegister,
1168 56 : ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1169 112 : __ movzxbq(r11, Operand(kInterpreterBytecodeArrayRegister,
1170 56 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1171 112 : __ movq(kJavaScriptCallCodeStartRegister,
1172 : Operand(kInterpreterDispatchTableRegister, r11,
1173 56 : times_system_pointer_size, 0));
1174 56 : __ call(kJavaScriptCallCodeStartRegister);
1175 56 : masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1176 :
1177 : // Any returns to the entry trampoline are either due to the return bytecode
1178 : // or the interpreter tail calling a builtin and then a dispatch.
1179 :
1180 : // Get bytecode array and bytecode offset from the stack frame.
1181 112 : __ movq(kInterpreterBytecodeArrayRegister,
1182 56 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1183 112 : __ movq(kInterpreterBytecodeOffsetRegister,
1184 56 : Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1185 56 : __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1186 56 : kInterpreterBytecodeOffsetRegister);
1187 :
1188 : // Either return, or advance to the next bytecode and dispatch.
1189 56 : Label do_return;
1190 112 : __ movzxbq(rbx, Operand(kInterpreterBytecodeArrayRegister,
1191 56 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1192 : AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1193 : kInterpreterBytecodeOffsetRegister, rbx, rcx,
1194 56 : &do_return);
1195 56 : __ jmp(&do_dispatch);
1196 :
1197 56 : __ bind(&do_return);
1198 : // The return value is in rax.
1199 56 : LeaveInterpreterFrame(masm, rbx, rcx);
1200 56 : __ ret(0);
1201 :
1202 56 : __ bind(&compile_lazy);
1203 56 : GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1204 56 : __ int3(); // Should not return.
1205 56 : }
1206 :
1207 336 : static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1208 : Register num_args,
1209 : Register start_address,
1210 : Register scratch) {
1211 : // Find the address of the last argument.
1212 336 : __ Move(scratch, num_args);
1213 336 : __ shlq(scratch, Immediate(kSystemPointerSizeLog2));
1214 336 : __ negq(scratch);
1215 336 : __ addq(scratch, start_address);
1216 :
1217 : // Push the arguments.
1218 336 : Label loop_header, loop_check;
1219 336 : __ j(always, &loop_check, Label::kNear);
1220 336 : __ bind(&loop_header);
1221 336 : __ Push(Operand(start_address, 0));
1222 336 : __ subq(start_address, Immediate(kSystemPointerSize));
1223 336 : __ bind(&loop_check);
1224 336 : __ cmpq(start_address, scratch);
1225 336 : __ j(greater, &loop_header, Label::kNear);
1226 336 : }
1227 :
1228 : // static
1229 168 : void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1230 : MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1231 : InterpreterPushArgsMode mode) {
1232 : DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1233 : // ----------- S t a t e -------------
1234 : // -- rax : the number of arguments (not including the receiver)
1235 : // -- rbx : the address of the first argument to be pushed. Subsequent
1236 : // arguments should be consecutive above this, in the same order as
1237 : // they are to be pushed onto the stack.
1238 : // -- rdi : the target to call (can be any Object).
1239 : // -----------------------------------
1240 168 : Label stack_overflow;
1241 :
1242 : // Number of values to be pushed.
1243 168 : __ leal(rcx, Operand(rax, 1)); // Add one for receiver.
1244 :
1245 : // Add a stack check before pushing arguments.
1246 168 : Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
1247 :
1248 : // Pop return address to allow tail-call after pushing arguments.
1249 168 : __ PopReturnAddressTo(kScratchRegister);
1250 :
1251 : // Push "undefined" as the receiver arg if we need to.
1252 168 : if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1253 56 : __ PushRoot(RootIndex::kUndefinedValue);
1254 56 : __ decl(rcx); // Subtract one for receiver.
1255 : }
1256 :
1257 : // rbx and rdx will be modified.
1258 168 : Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
1259 :
1260 168 : if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1261 56 : __ Pop(rbx); // Pass the spread in a register
1262 56 : __ decl(rax); // Subtract one for spread
1263 : }
1264 :
1265 : // Call the target.
1266 168 : __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
1267 :
1268 168 : if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1269 56 : __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1270 56 : RelocInfo::CODE_TARGET);
1271 : } else {
1272 112 : __ Jump(masm->isolate()->builtins()->Call(receiver_mode),
1273 112 : RelocInfo::CODE_TARGET);
1274 : }
1275 :
1276 : // Throw stack overflow exception.
1277 168 : __ bind(&stack_overflow);
1278 : {
1279 168 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
1280 : // This should be unreachable.
1281 168 : __ int3();
1282 : }
1283 168 : }
1284 :
1285 : // static
1286 168 : void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1287 : MacroAssembler* masm, InterpreterPushArgsMode mode) {
1288 : // ----------- S t a t e -------------
1289 : // -- rax : the number of arguments (not including the receiver)
1290 : // -- rdx : the new target (either the same as the constructor or
1291 : // the JSFunction on which new was invoked initially)
1292 : // -- rdi : the constructor to call (can be any Object)
1293 : // -- rbx : the allocation site feedback if available, undefined otherwise
1294 : // -- rcx : the address of the first argument to be pushed. Subsequent
1295 : // arguments should be consecutive above this, in the same order as
1296 : // they are to be pushed onto the stack.
1297 : // -----------------------------------
1298 168 : Label stack_overflow;
1299 :
1300 : // Add a stack check before pushing arguments.
1301 168 : Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
1302 :
1303 : // Pop return address to allow tail-call after pushing arguments.
1304 168 : __ PopReturnAddressTo(kScratchRegister);
1305 :
1306 : // Push slot for the receiver to be constructed.
1307 168 : __ Push(Immediate(0));
1308 :
1309 : // rcx and r8 will be modified.
1310 168 : Generate_InterpreterPushArgs(masm, rax, rcx, r8);
1311 :
1312 168 : if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1313 56 : __ Pop(rbx); // Pass the spread in a register
1314 56 : __ decl(rax); // Subtract one for spread
1315 :
1316 : // Push return address in preparation for the tail-call.
1317 56 : __ PushReturnAddressFrom(kScratchRegister);
1318 : } else {
1319 112 : __ PushReturnAddressFrom(kScratchRegister);
1320 112 : __ AssertUndefinedOrAllocationSite(rbx);
1321 : }
1322 :
1323 168 : if (mode == InterpreterPushArgsMode::kArrayFunction) {
1324 : // Tail call to the array construct stub (still in the caller
1325 : // context at this point).
1326 56 : __ AssertFunction(rdi);
1327 : // Jump to the constructor function (rax, rbx, rdx passed on).
1328 56 : Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1329 56 : __ Jump(code, RelocInfo::CODE_TARGET);
1330 112 : } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1331 : // Call the constructor (rax, rdx, rdi passed on).
1332 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1333 56 : RelocInfo::CODE_TARGET);
1334 : } else {
1335 : DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1336 : // Call the constructor (rax, rdx, rdi passed on).
1337 56 : __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1338 : }
1339 :
1340 : // Throw stack overflow exception.
1341 168 : __ bind(&stack_overflow);
1342 : {
1343 168 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
1344 : // This should be unreachable.
1345 168 : __ int3();
1346 : }
1347 168 : }
1348 :
1349 112 : static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1350 : // Set the return address to the correct point in the interpreter entry
1351 : // trampoline.
1352 112 : Label builtin_trampoline, trampoline_loaded;
1353 : Smi interpreter_entry_return_pc_offset(
1354 112 : masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1355 : DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1356 :
1357 : // If the SFI function_data is an InterpreterData, the function will have a
1358 : // custom copy of the interpreter entry trampoline for profiling. If so,
1359 : // get the custom trampoline, otherwise grab the entry address of the global
1360 : // trampoline.
1361 112 : __ movq(rbx, Operand(rbp, StandardFrameConstants::kFunctionOffset));
1362 112 : __ LoadTaggedPointerField(
1363 112 : rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
1364 112 : __ LoadTaggedPointerField(
1365 112 : rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
1366 112 : __ CmpObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
1367 112 : __ j(not_equal, &builtin_trampoline, Label::kNear);
1368 :
1369 112 : __ movq(rbx,
1370 112 : FieldOperand(rbx, InterpreterData::kInterpreterTrampolineOffset));
1371 112 : __ addq(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1372 112 : __ jmp(&trampoline_loaded, Label::kNear);
1373 :
1374 112 : __ bind(&builtin_trampoline);
1375 : // TODO(jgruber): Replace this by a lookup in the builtin entry table.
1376 112 : __ movq(rbx,
1377 : __ ExternalReferenceAsOperand(
1378 : ExternalReference::
1379 : address_of_interpreter_entry_trampoline_instruction_start(
1380 : masm->isolate()),
1381 112 : kScratchRegister));
1382 :
1383 112 : __ bind(&trampoline_loaded);
1384 112 : __ addq(rbx, Immediate(interpreter_entry_return_pc_offset->value()));
1385 112 : __ Push(rbx);
1386 :
1387 : // Initialize dispatch table register.
1388 112 : __ Move(
1389 : kInterpreterDispatchTableRegister,
1390 112 : ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1391 :
1392 : // Get the bytecode array pointer from the frame.
1393 224 : __ movq(kInterpreterBytecodeArrayRegister,
1394 112 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1395 :
1396 112 : if (FLAG_debug_code) {
1397 : // Check function data field is actually a BytecodeArray object.
1398 0 : __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1399 : __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1400 0 : rbx);
1401 0 : __ Assert(
1402 : equal,
1403 0 : AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1404 : }
1405 :
1406 : // Get the target bytecode offset from the frame.
1407 224 : __ movq(kInterpreterBytecodeOffsetRegister,
1408 112 : Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1409 112 : __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1410 112 : kInterpreterBytecodeOffsetRegister);
1411 :
1412 : // Dispatch to the target bytecode.
1413 224 : __ movzxbq(r11, Operand(kInterpreterBytecodeArrayRegister,
1414 112 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1415 224 : __ movq(kJavaScriptCallCodeStartRegister,
1416 : Operand(kInterpreterDispatchTableRegister, r11,
1417 112 : times_system_pointer_size, 0));
1418 112 : __ jmp(kJavaScriptCallCodeStartRegister);
1419 112 : }
1420 :
1421 56 : void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1422 : // Get bytecode array and bytecode offset from the stack frame.
1423 112 : __ movq(kInterpreterBytecodeArrayRegister,
1424 56 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1425 112 : __ movq(kInterpreterBytecodeOffsetRegister,
1426 56 : Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1427 56 : __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1428 56 : kInterpreterBytecodeOffsetRegister);
1429 :
1430 : // Load the current bytecode.
1431 112 : __ movzxbq(rbx, Operand(kInterpreterBytecodeArrayRegister,
1432 56 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1433 :
1434 : // Advance to the next bytecode.
1435 56 : Label if_return;
1436 : AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1437 : kInterpreterBytecodeOffsetRegister, rbx, rcx,
1438 56 : &if_return);
1439 :
1440 : // Convert new bytecode offset to a Smi and save in the stackframe.
1441 56 : __ SmiTag(rbx, kInterpreterBytecodeOffsetRegister);
1442 56 : __ movq(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rbx);
1443 :
1444 56 : Generate_InterpreterEnterBytecode(masm);
1445 :
1446 : // We should never take the if_return path.
1447 56 : __ bind(&if_return);
1448 56 : __ Abort(AbortReason::kInvalidBytecodeAdvance);
1449 56 : }
1450 :
1451 56 : void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1452 56 : Generate_InterpreterEnterBytecode(masm);
1453 56 : }
1454 :
1455 56 : void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1456 : // ----------- S t a t e -------------
1457 : // -- rax : argument count (preserved for callee)
1458 : // -- rdx : new target (preserved for callee)
1459 : // -- rdi : target function (preserved for callee)
1460 : // -----------------------------------
1461 56 : Label failed;
1462 : {
1463 112 : FrameScope scope(masm, StackFrame::INTERNAL);
1464 : // Preserve argument count for later compare.
1465 56 : __ movq(rcx, rax);
1466 : // Push the number of arguments to the callee.
1467 56 : __ SmiTag(rax, rax);
1468 56 : __ Push(rax);
1469 : // Push a copy of the target function and the new target.
1470 56 : __ Push(rdi);
1471 56 : __ Push(rdx);
1472 :
1473 : // The function.
1474 56 : __ Push(rdi);
1475 : // Copy arguments from caller (stdlib, foreign, heap).
1476 56 : Label args_done;
1477 280 : for (int j = 0; j < 4; ++j) {
1478 224 : Label over;
1479 224 : if (j < 3) {
1480 168 : __ cmpq(rcx, Immediate(j));
1481 168 : __ j(not_equal, &over, Label::kNear);
1482 : }
1483 560 : for (int i = j - 1; i >= 0; --i) {
1484 672 : __ Push(Operand(rbp, StandardFrameConstants::kCallerSPOffset +
1485 336 : i * kSystemPointerSize));
1486 : }
1487 560 : for (int i = 0; i < 3 - j; ++i) {
1488 336 : __ PushRoot(RootIndex::kUndefinedValue);
1489 : }
1490 224 : if (j < 3) {
1491 168 : __ jmp(&args_done, Label::kNear);
1492 168 : __ bind(&over);
1493 : }
1494 : }
1495 56 : __ bind(&args_done);
1496 :
1497 : // Call runtime, on success unwind frame, and parent frame.
1498 56 : __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1499 : // A smi 0 is returned on failure, an object on success.
1500 56 : __ JumpIfSmi(rax, &failed, Label::kNear);
1501 :
1502 56 : __ Drop(2);
1503 56 : __ Pop(rcx);
1504 56 : __ SmiUntag(rcx, rcx);
1505 56 : scope.GenerateLeaveFrame();
1506 :
1507 56 : __ PopReturnAddressTo(rbx);
1508 56 : __ incq(rcx);
1509 56 : __ leaq(rsp, Operand(rsp, rcx, times_system_pointer_size, 0));
1510 56 : __ PushReturnAddressFrom(rbx);
1511 56 : __ ret(0);
1512 :
1513 56 : __ bind(&failed);
1514 : // Restore target function and new target.
1515 56 : __ Pop(rdx);
1516 56 : __ Pop(rdi);
1517 56 : __ Pop(rax);
1518 56 : __ SmiUntag(rax, rax);
1519 : }
1520 : // On failure, tail call back to regular js by re-calling the function
1521 : // which has be reset to the compile lazy builtin.
1522 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1523 56 : __ JumpCodeObject(rcx);
1524 56 : }
1525 :
1526 : namespace {
1527 224 : void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1528 : bool java_script_builtin,
1529 : bool with_result) {
1530 224 : const RegisterConfiguration* config(RegisterConfiguration::Default());
1531 224 : int allocatable_register_count = config->num_allocatable_general_registers();
1532 224 : if (with_result) {
1533 : // Overwrite the hole inserted by the deoptimizer with the return value from
1534 : // the LAZY deopt point.
1535 224 : __ movq(
1536 112 : Operand(rsp, config->num_allocatable_general_registers() *
1537 : kSystemPointerSize +
1538 : BuiltinContinuationFrameConstants::kFixedFrameSize),
1539 112 : rax);
1540 : }
1541 2912 : for (int i = allocatable_register_count - 1; i >= 0; --i) {
1542 2688 : int code = config->GetAllocatableGeneralCode(i);
1543 2688 : __ popq(Register::from_code(code));
1544 2688 : if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1545 112 : __ SmiUntag(Register::from_code(code), Register::from_code(code));
1546 : }
1547 : }
1548 448 : __ movq(
1549 : rbp,
1550 224 : Operand(rsp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1551 : const int offsetToPC =
1552 : BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp -
1553 224 : kSystemPointerSize;
1554 224 : __ popq(Operand(rsp, offsetToPC));
1555 224 : __ Drop(offsetToPC / kSystemPointerSize);
1556 224 : __ addq(Operand(rsp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
1557 224 : __ Ret();
1558 224 : }
1559 : } // namespace
1560 :
1561 56 : void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1562 56 : Generate_ContinueToBuiltinHelper(masm, false, false);
1563 56 : }
1564 :
1565 56 : void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1566 : MacroAssembler* masm) {
1567 56 : Generate_ContinueToBuiltinHelper(masm, false, true);
1568 56 : }
1569 :
1570 56 : void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1571 56 : Generate_ContinueToBuiltinHelper(masm, true, false);
1572 56 : }
1573 :
1574 56 : void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1575 : MacroAssembler* masm) {
1576 56 : Generate_ContinueToBuiltinHelper(masm, true, true);
1577 56 : }
1578 :
1579 56 : void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1580 : // Enter an internal frame.
1581 : {
1582 112 : FrameScope scope(masm, StackFrame::INTERNAL);
1583 56 : __ CallRuntime(Runtime::kNotifyDeoptimized);
1584 : // Tear down internal frame.
1585 : }
1586 :
1587 : DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
1588 56 : __ movq(rax, Operand(rsp, kPCOnStackSize));
1589 56 : __ ret(1 * kSystemPointerSize); // Remove rax.
1590 56 : }
1591 :
1592 : // static
1593 56 : void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1594 : // ----------- S t a t e -------------
1595 : // -- rax : argc
1596 : // -- rsp[0] : return address
1597 : // -- rsp[8] : argArray
1598 : // -- rsp[16] : thisArg
1599 : // -- rsp[24] : receiver
1600 : // -----------------------------------
1601 :
1602 : // 1. Load receiver into rdi, argArray into rbx (if present), remove all
1603 : // arguments from the stack (including the receiver), and push thisArg (if
1604 : // present) instead.
1605 : {
1606 56 : Label no_arg_array, no_this_arg;
1607 56 : StackArgumentsAccessor args(rsp, rax);
1608 56 : __ LoadRoot(rdx, RootIndex::kUndefinedValue);
1609 56 : __ movq(rbx, rdx);
1610 56 : __ movq(rdi, args.GetReceiverOperand());
1611 56 : __ testq(rax, rax);
1612 56 : __ j(zero, &no_this_arg, Label::kNear);
1613 : {
1614 56 : __ movq(rdx, args.GetArgumentOperand(1));
1615 56 : __ cmpq(rax, Immediate(1));
1616 56 : __ j(equal, &no_arg_array, Label::kNear);
1617 56 : __ movq(rbx, args.GetArgumentOperand(2));
1618 56 : __ bind(&no_arg_array);
1619 : }
1620 56 : __ bind(&no_this_arg);
1621 56 : __ PopReturnAddressTo(rcx);
1622 112 : __ leaq(rsp,
1623 56 : Operand(rsp, rax, times_system_pointer_size, kSystemPointerSize));
1624 56 : __ Push(rdx);
1625 56 : __ PushReturnAddressFrom(rcx);
1626 : }
1627 :
1628 : // ----------- S t a t e -------------
1629 : // -- rbx : argArray
1630 : // -- rdi : receiver
1631 : // -- rsp[0] : return address
1632 : // -- rsp[8] : thisArg
1633 : // -----------------------------------
1634 :
1635 : // 2. We don't need to check explicitly for callable receiver here,
1636 : // since that's the first thing the Call/CallWithArrayLike builtins
1637 : // will do.
1638 :
1639 : // 3. Tail call with no arguments if argArray is null or undefined.
1640 56 : Label no_arguments;
1641 56 : __ JumpIfRoot(rbx, RootIndex::kNullValue, &no_arguments, Label::kNear);
1642 56 : __ JumpIfRoot(rbx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
1643 :
1644 : // 4a. Apply the receiver to the given argArray.
1645 56 : __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1646 56 : RelocInfo::CODE_TARGET);
1647 :
1648 : // 4b. The argArray is either null or undefined, so we tail call without any
1649 : // arguments to the receiver. Since we did not create a frame for
1650 : // Function.prototype.apply() yet, we use a normal Call builtin here.
1651 56 : __ bind(&no_arguments);
1652 : {
1653 56 : __ Set(rax, 0);
1654 56 : __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1655 : }
1656 56 : }
1657 :
1658 : // static
1659 56 : void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1660 : // Stack Layout:
1661 : // rsp[0] : Return address
1662 : // rsp[8] : Argument n
1663 : // rsp[16] : Argument n-1
1664 : // ...
1665 : // rsp[8 * n] : Argument 1
1666 : // rsp[8 * (n + 1)] : Receiver (callable to call)
1667 : //
1668 : // rax contains the number of arguments, n, not counting the receiver.
1669 : //
1670 : // 1. Make sure we have at least one argument.
1671 : {
1672 56 : Label done;
1673 56 : __ testq(rax, rax);
1674 56 : __ j(not_zero, &done, Label::kNear);
1675 56 : __ PopReturnAddressTo(rbx);
1676 56 : __ PushRoot(RootIndex::kUndefinedValue);
1677 56 : __ PushReturnAddressFrom(rbx);
1678 56 : __ incq(rax);
1679 56 : __ bind(&done);
1680 : }
1681 :
1682 : // 2. Get the callable to call (passed as receiver) from the stack.
1683 : {
1684 56 : StackArgumentsAccessor args(rsp, rax);
1685 56 : __ movq(rdi, args.GetReceiverOperand());
1686 : }
1687 :
1688 : // 3. Shift arguments and return address one slot down on the stack
1689 : // (overwriting the original receiver). Adjust argument count to make
1690 : // the original first argument the new receiver.
1691 : {
1692 56 : Label loop;
1693 56 : __ movq(rcx, rax);
1694 56 : StackArgumentsAccessor args(rsp, rcx);
1695 56 : __ bind(&loop);
1696 56 : __ movq(rbx, args.GetArgumentOperand(1));
1697 56 : __ movq(args.GetArgumentOperand(0), rbx);
1698 56 : __ decq(rcx);
1699 56 : __ j(not_zero, &loop); // While non-zero.
1700 56 : __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1701 56 : __ decq(rax); // One fewer argument (first argument is new receiver).
1702 : }
1703 :
1704 : // 4. Call the callable.
1705 : // Since we did not create a frame for Function.prototype.call() yet,
1706 : // we use a normal Call builtin here.
1707 56 : __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1708 56 : }
1709 :
1710 56 : void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1711 : // ----------- S t a t e -------------
1712 : // -- rax : argc
1713 : // -- rsp[0] : return address
1714 : // -- rsp[8] : argumentsList
1715 : // -- rsp[16] : thisArgument
1716 : // -- rsp[24] : target
1717 : // -- rsp[32] : receiver
1718 : // -----------------------------------
1719 :
1720 : // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1721 : // remove all arguments from the stack (including the receiver), and push
1722 : // thisArgument (if present) instead.
1723 : {
1724 56 : Label done;
1725 56 : StackArgumentsAccessor args(rsp, rax);
1726 56 : __ LoadRoot(rdi, RootIndex::kUndefinedValue);
1727 56 : __ movq(rdx, rdi);
1728 56 : __ movq(rbx, rdi);
1729 56 : __ cmpq(rax, Immediate(1));
1730 56 : __ j(below, &done, Label::kNear);
1731 56 : __ movq(rdi, args.GetArgumentOperand(1)); // target
1732 56 : __ j(equal, &done, Label::kNear);
1733 56 : __ movq(rdx, args.GetArgumentOperand(2)); // thisArgument
1734 56 : __ cmpq(rax, Immediate(3));
1735 56 : __ j(below, &done, Label::kNear);
1736 56 : __ movq(rbx, args.GetArgumentOperand(3)); // argumentsList
1737 56 : __ bind(&done);
1738 56 : __ PopReturnAddressTo(rcx);
1739 112 : __ leaq(rsp,
1740 56 : Operand(rsp, rax, times_system_pointer_size, kSystemPointerSize));
1741 56 : __ Push(rdx);
1742 56 : __ PushReturnAddressFrom(rcx);
1743 : }
1744 :
1745 : // ----------- S t a t e -------------
1746 : // -- rbx : argumentsList
1747 : // -- rdi : target
1748 : // -- rsp[0] : return address
1749 : // -- rsp[8] : thisArgument
1750 : // -----------------------------------
1751 :
1752 : // 2. We don't need to check explicitly for callable target here,
1753 : // since that's the first thing the Call/CallWithArrayLike builtins
1754 : // will do.
1755 :
1756 : // 3. Apply the target to the given argumentsList.
1757 56 : __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1758 56 : RelocInfo::CODE_TARGET);
1759 56 : }
1760 :
1761 56 : void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1762 : // ----------- S t a t e -------------
1763 : // -- rax : argc
1764 : // -- rsp[0] : return address
1765 : // -- rsp[8] : new.target (optional)
1766 : // -- rsp[16] : argumentsList
1767 : // -- rsp[24] : target
1768 : // -- rsp[32] : receiver
1769 : // -----------------------------------
1770 :
1771 : // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1772 : // new.target into rdx (if present, otherwise use target), remove all
1773 : // arguments from the stack (including the receiver), and push thisArgument
1774 : // (if present) instead.
1775 : {
1776 56 : Label done;
1777 56 : StackArgumentsAccessor args(rsp, rax);
1778 56 : __ LoadRoot(rdi, RootIndex::kUndefinedValue);
1779 56 : __ movq(rdx, rdi);
1780 56 : __ movq(rbx, rdi);
1781 56 : __ cmpq(rax, Immediate(1));
1782 56 : __ j(below, &done, Label::kNear);
1783 56 : __ movq(rdi, args.GetArgumentOperand(1)); // target
1784 56 : __ movq(rdx, rdi); // new.target defaults to target
1785 56 : __ j(equal, &done, Label::kNear);
1786 56 : __ movq(rbx, args.GetArgumentOperand(2)); // argumentsList
1787 56 : __ cmpq(rax, Immediate(3));
1788 56 : __ j(below, &done, Label::kNear);
1789 56 : __ movq(rdx, args.GetArgumentOperand(3)); // new.target
1790 56 : __ bind(&done);
1791 56 : __ PopReturnAddressTo(rcx);
1792 112 : __ leaq(rsp,
1793 56 : Operand(rsp, rax, times_system_pointer_size, kSystemPointerSize));
1794 56 : __ PushRoot(RootIndex::kUndefinedValue);
1795 56 : __ PushReturnAddressFrom(rcx);
1796 : }
1797 :
1798 : // ----------- S t a t e -------------
1799 : // -- rbx : argumentsList
1800 : // -- rdx : new.target
1801 : // -- rdi : target
1802 : // -- rsp[0] : return address
1803 : // -- rsp[8] : receiver (undefined)
1804 : // -----------------------------------
1805 :
1806 : // 2. We don't need to check explicitly for constructor target here,
1807 : // since that's the first thing the Construct/ConstructWithArrayLike
1808 : // builtins will do.
1809 :
1810 : // 3. We don't need to check explicitly for constructor new.target here,
1811 : // since that's the second thing the Construct/ConstructWithArrayLike
1812 : // builtins will do.
1813 :
1814 : // 4. Construct the target with the given new.target and argumentsList.
1815 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1816 56 : RelocInfo::CODE_TARGET);
1817 56 : }
1818 :
1819 56 : void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1820 : // ----------- S t a t e -------------
1821 : // -- rax : argc
1822 : // -- rsp[0] : return address
1823 : // -- rsp[8] : last argument
1824 : // -----------------------------------
1825 :
1826 56 : if (FLAG_debug_code) {
1827 : // Initial map for the builtin InternalArray functions should be maps.
1828 0 : __ LoadTaggedPointerField(
1829 0 : rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1830 : // Will both indicate a nullptr and a Smi.
1831 : STATIC_ASSERT(kSmiTag == 0);
1832 0 : Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1833 0 : __ Check(not_smi,
1834 0 : AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1835 0 : __ CmpObjectType(rbx, MAP_TYPE, rcx);
1836 0 : __ Check(equal, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1837 : }
1838 :
1839 : // Run the native code for the InternalArray function called as a normal
1840 : // function.
1841 56 : __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
1842 56 : RelocInfo::CODE_TARGET);
1843 56 : }
1844 :
1845 56 : static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1846 56 : __ pushq(rbp);
1847 56 : __ movq(rbp, rsp);
1848 :
1849 : // Store the arguments adaptor context sentinel.
1850 56 : __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1851 :
1852 : // Push the function on the stack.
1853 56 : __ Push(rdi);
1854 :
1855 : // Preserve the number of arguments on the stack. Must preserve rax,
1856 : // rbx and rcx because these registers are used when copying the
1857 : // arguments and the receiver.
1858 56 : __ SmiTag(r8, rax);
1859 56 : __ Push(r8);
1860 :
1861 56 : __ Push(Immediate(0)); // Padding.
1862 56 : }
1863 :
1864 56 : static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1865 : // Retrieve the number of arguments from the stack. Number is a Smi.
1866 56 : __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1867 :
1868 : // Leave the frame.
1869 56 : __ movq(rsp, rbp);
1870 56 : __ popq(rbp);
1871 :
1872 : // Remove caller arguments from the stack.
1873 56 : __ PopReturnAddressTo(rcx);
1874 56 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
1875 56 : __ leaq(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
1876 56 : __ PushReturnAddressFrom(rcx);
1877 56 : }
1878 :
1879 56 : void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1880 : // ----------- S t a t e -------------
1881 : // -- rax : actual number of arguments
1882 : // -- rbx : expected number of arguments
1883 : // -- rdx : new target (passed through to callee)
1884 : // -- rdi : function (passed through to callee)
1885 : // -----------------------------------
1886 :
1887 56 : Label dont_adapt_arguments, stack_overflow, skip_adapt_arguments;
1888 56 : __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1889 56 : __ j(equal, &dont_adapt_arguments);
1890 56 : __ LoadTaggedPointerField(
1891 56 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1892 112 : __ testl(
1893 : FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
1894 56 : Immediate(SharedFunctionInfo::IsSafeToSkipArgumentsAdaptorBit::kMask));
1895 56 : __ j(not_zero, &skip_adapt_arguments);
1896 :
1897 : // -------------------------------------------
1898 : // Adapt arguments.
1899 : // -------------------------------------------
1900 : {
1901 56 : EnterArgumentsAdaptorFrame(masm);
1902 56 : Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1903 :
1904 56 : Label under_application, over_application, invoke;
1905 56 : __ cmpq(rax, rbx);
1906 56 : __ j(less, &under_application, Label::kNear);
1907 :
1908 : // Enough parameters: Actual >= expected.
1909 56 : __ bind(&over_application);
1910 : {
1911 : // Copy receiver and all expected arguments.
1912 56 : const int offset = StandardFrameConstants::kCallerSPOffset;
1913 56 : __ leaq(r8, Operand(rbp, rax, times_system_pointer_size, offset));
1914 56 : __ Set(rax, -1); // account for receiver
1915 :
1916 56 : Label copy;
1917 56 : __ bind(©);
1918 56 : __ incq(rax);
1919 56 : __ Push(Operand(r8, 0));
1920 56 : __ subq(r8, Immediate(kSystemPointerSize));
1921 56 : __ cmpq(rax, rbx);
1922 56 : __ j(less, ©);
1923 56 : __ jmp(&invoke, Label::kNear);
1924 : }
1925 :
1926 : // Too few parameters: Actual < expected.
1927 56 : __ bind(&under_application);
1928 : {
1929 : // Copy receiver and all actual arguments.
1930 56 : const int offset = StandardFrameConstants::kCallerSPOffset;
1931 56 : __ leaq(r9, Operand(rbp, rax, times_system_pointer_size, offset));
1932 56 : __ Set(r8, -1); // account for receiver
1933 :
1934 56 : Label copy;
1935 56 : __ bind(©);
1936 56 : __ incq(r8);
1937 56 : __ Push(Operand(r9, 0));
1938 56 : __ subq(r9, Immediate(kSystemPointerSize));
1939 56 : __ cmpq(r8, rax);
1940 56 : __ j(less, ©);
1941 :
1942 : // Fill remaining expected arguments with undefined values.
1943 56 : Label fill;
1944 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
1945 56 : __ bind(&fill);
1946 56 : __ incq(rax);
1947 56 : __ Push(kScratchRegister);
1948 56 : __ cmpq(rax, rbx);
1949 56 : __ j(less, &fill);
1950 : }
1951 :
1952 : // Call the entry point.
1953 56 : __ bind(&invoke);
1954 : // rax : expected number of arguments
1955 : // rdx : new target (passed through to callee)
1956 : // rdi : function (passed through to callee)
1957 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
1958 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1959 56 : __ CallCodeObject(rcx);
1960 :
1961 : // Store offset of return address for deoptimizer.
1962 56 : masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(
1963 56 : masm->pc_offset());
1964 :
1965 : // Leave frame and return.
1966 56 : LeaveArgumentsAdaptorFrame(masm);
1967 56 : __ ret(0);
1968 : }
1969 :
1970 : // -------------------------------------------
1971 : // Skip adapt arguments.
1972 : // -------------------------------------------
1973 56 : __ bind(&skip_adapt_arguments);
1974 : {
1975 : // The callee cannot observe the actual arguments, so it's safe to just
1976 : // pass the expected arguments by massaging the stack appropriately. See
1977 : // http://bit.ly/v8-faster-calls-with-arguments-mismatch for details.
1978 56 : Label under_application, over_application, invoke;
1979 56 : __ PopReturnAddressTo(rcx);
1980 56 : __ cmpq(rax, rbx);
1981 56 : __ j(less, &under_application, Label::kNear);
1982 :
1983 56 : __ bind(&over_application);
1984 : {
1985 : // Remove superfluous parameters from the stack.
1986 56 : __ xchgq(rax, rbx);
1987 56 : __ subq(rbx, rax);
1988 56 : __ leaq(rsp, Operand(rsp, rbx, times_system_pointer_size, 0));
1989 56 : __ jmp(&invoke, Label::kNear);
1990 : }
1991 :
1992 56 : __ bind(&under_application);
1993 : {
1994 : // Fill remaining expected arguments with undefined values.
1995 56 : Label fill;
1996 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
1997 56 : __ bind(&fill);
1998 56 : __ incq(rax);
1999 56 : __ Push(kScratchRegister);
2000 56 : __ cmpq(rax, rbx);
2001 56 : __ j(less, &fill);
2002 : }
2003 :
2004 56 : __ bind(&invoke);
2005 56 : __ PushReturnAddressFrom(rcx);
2006 : }
2007 :
2008 : // -------------------------------------------
2009 : // Don't adapt arguments.
2010 : // -------------------------------------------
2011 56 : __ bind(&dont_adapt_arguments);
2012 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
2013 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
2014 56 : __ JumpCodeObject(rcx);
2015 :
2016 56 : __ bind(&stack_overflow);
2017 : {
2018 112 : FrameScope frame(masm, StackFrame::MANUAL);
2019 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
2020 56 : __ int3();
2021 : }
2022 56 : }
2023 :
2024 : // static
2025 112 : void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
2026 : Handle<Code> code) {
2027 : // ----------- S t a t e -------------
2028 : // -- rdi : target
2029 : // -- rax : number of parameters on the stack (not including the receiver)
2030 : // -- rbx : arguments list (a FixedArray)
2031 : // -- rcx : len (number of elements to push from args)
2032 : // -- rdx : new.target (for [[Construct]])
2033 : // -- rsp[0] : return address
2034 : // -----------------------------------
2035 112 : Register scratch = r11;
2036 112 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
2037 :
2038 112 : if (masm->emit_debug_code()) {
2039 : // Allow rbx to be a FixedArray, or a FixedDoubleArray if rcx == 0.
2040 0 : Label ok, fail;
2041 0 : __ AssertNotSmi(rbx);
2042 0 : Register map = r9;
2043 0 : __ LoadTaggedPointerField(map, FieldOperand(rbx, HeapObject::kMapOffset));
2044 0 : __ CmpInstanceType(map, FIXED_ARRAY_TYPE);
2045 0 : __ j(equal, &ok);
2046 0 : __ CmpInstanceType(map, FIXED_DOUBLE_ARRAY_TYPE);
2047 0 : __ j(not_equal, &fail);
2048 0 : __ cmpl(rcx, Immediate(0));
2049 0 : __ j(equal, &ok);
2050 : // Fall through.
2051 0 : __ bind(&fail);
2052 0 : __ Abort(AbortReason::kOperandIsNotAFixedArray);
2053 :
2054 0 : __ bind(&ok);
2055 : }
2056 :
2057 112 : Label stack_overflow;
2058 112 : Generate_StackOverflowCheck(masm, rcx, r8, &stack_overflow, Label::kNear);
2059 :
2060 : // Push additional arguments onto the stack.
2061 : {
2062 112 : Register value = scratch;
2063 112 : __ PopReturnAddressTo(r8);
2064 112 : __ Set(r9, 0);
2065 112 : Label done, push, loop;
2066 112 : __ bind(&loop);
2067 112 : __ cmpl(r9, rcx);
2068 112 : __ j(equal, &done, Label::kNear);
2069 : // Turn the hole into undefined as we go.
2070 112 : __ LoadAnyTaggedField(
2071 : value,
2072 : FieldOperand(rbx, r9, times_tagged_size, FixedArray::kHeaderSize),
2073 112 : decompr_scratch);
2074 112 : __ CompareRoot(value, RootIndex::kTheHoleValue);
2075 112 : __ j(not_equal, &push, Label::kNear);
2076 112 : __ LoadRoot(value, RootIndex::kUndefinedValue);
2077 112 : __ bind(&push);
2078 112 : __ Push(value);
2079 112 : __ incl(r9);
2080 112 : __ jmp(&loop);
2081 112 : __ bind(&done);
2082 112 : __ PushReturnAddressFrom(r8);
2083 112 : __ addq(rax, r9);
2084 : }
2085 :
2086 : // Tail-call to the actual Call or Construct builtin.
2087 112 : __ Jump(code, RelocInfo::CODE_TARGET);
2088 :
2089 112 : __ bind(&stack_overflow);
2090 112 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
2091 112 : }
2092 :
2093 : // static
2094 224 : void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2095 : CallOrConstructMode mode,
2096 : Handle<Code> code) {
2097 : // ----------- S t a t e -------------
2098 : // -- rax : the number of arguments (not including the receiver)
2099 : // -- rdx : the new target (for [[Construct]] calls)
2100 : // -- rdi : the target to call (can be any Object)
2101 : // -- rcx : start index (to support rest parameters)
2102 : // -----------------------------------
2103 :
2104 : // Check if new.target has a [[Construct]] internal method.
2105 224 : if (mode == CallOrConstructMode::kConstruct) {
2106 112 : Label new_target_constructor, new_target_not_constructor;
2107 112 : __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
2108 112 : __ LoadTaggedPointerField(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
2109 224 : __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2110 112 : Immediate(Map::IsConstructorBit::kMask));
2111 112 : __ j(not_zero, &new_target_constructor, Label::kNear);
2112 112 : __ bind(&new_target_not_constructor);
2113 : {
2114 224 : FrameScope scope(masm, StackFrame::MANUAL);
2115 112 : __ EnterFrame(StackFrame::INTERNAL);
2116 112 : __ Push(rdx);
2117 112 : __ CallRuntime(Runtime::kThrowNotConstructor);
2118 : }
2119 112 : __ bind(&new_target_constructor);
2120 : }
2121 :
2122 : // Check if we have an arguments adaptor frame below the function frame.
2123 224 : Label arguments_adaptor, arguments_done;
2124 224 : __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2125 448 : __ cmpq(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
2126 224 : Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2127 224 : __ j(equal, &arguments_adaptor, Label::kNear);
2128 : {
2129 224 : __ movq(r8, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2130 224 : __ LoadTaggedPointerField(
2131 224 : r8, FieldOperand(r8, JSFunction::kSharedFunctionInfoOffset));
2132 224 : __ movzxwq(
2133 224 : r8, FieldOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
2134 224 : __ movq(rbx, rbp);
2135 : }
2136 224 : __ jmp(&arguments_done, Label::kNear);
2137 224 : __ bind(&arguments_adaptor);
2138 : {
2139 448 : __ SmiUntag(r8,
2140 224 : Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2141 : }
2142 224 : __ bind(&arguments_done);
2143 :
2144 224 : Label stack_done, stack_overflow;
2145 224 : __ subl(r8, rcx);
2146 224 : __ j(less_equal, &stack_done);
2147 : {
2148 : // Check for stack overflow.
2149 224 : Generate_StackOverflowCheck(masm, r8, rcx, &stack_overflow, Label::kNear);
2150 :
2151 : // Forward the arguments from the caller frame.
2152 : {
2153 224 : Label loop;
2154 224 : __ addl(rax, r8);
2155 224 : __ PopReturnAddressTo(rcx);
2156 224 : __ bind(&loop);
2157 : {
2158 224 : StackArgumentsAccessor args(rbx, r8, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2159 224 : __ Push(args.GetArgumentOperand(0));
2160 224 : __ decl(r8);
2161 224 : __ j(not_zero, &loop);
2162 : }
2163 224 : __ PushReturnAddressFrom(rcx);
2164 : }
2165 : }
2166 224 : __ jmp(&stack_done, Label::kNear);
2167 224 : __ bind(&stack_overflow);
2168 224 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
2169 224 : __ bind(&stack_done);
2170 :
2171 : // Tail-call to the {code} handler.
2172 224 : __ Jump(code, RelocInfo::CODE_TARGET);
2173 224 : }
2174 :
2175 : // static
2176 168 : void Builtins::Generate_CallFunction(MacroAssembler* masm,
2177 : ConvertReceiverMode mode) {
2178 : // ----------- S t a t e -------------
2179 : // -- rax : the number of arguments (not including the receiver)
2180 : // -- rdi : the function to call (checked to be a JSFunction)
2181 : // -----------------------------------
2182 :
2183 168 : StackArgumentsAccessor args(rsp, rax);
2184 168 : __ AssertFunction(rdi);
2185 :
2186 : // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2187 : // Check that the function is not a "classConstructor".
2188 168 : Label class_constructor;
2189 168 : __ LoadTaggedPointerField(
2190 168 : rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2191 336 : __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
2192 168 : Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
2193 168 : __ j(not_zero, &class_constructor);
2194 :
2195 : // ----------- S t a t e -------------
2196 : // -- rax : the number of arguments (not including the receiver)
2197 : // -- rdx : the shared function info.
2198 : // -- rdi : the function to call (checked to be a JSFunction)
2199 : // -----------------------------------
2200 :
2201 : // Enter the context of the function; ToObject has to run in the function
2202 : // context, and we also need to take the global proxy from the function
2203 : // context in case of conversion.
2204 168 : __ LoadTaggedPointerField(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2205 : // We need to convert the receiver for non-native sloppy mode functions.
2206 168 : Label done_convert;
2207 336 : __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
2208 : Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2209 168 : SharedFunctionInfo::IsStrictBit::kMask));
2210 168 : __ j(not_zero, &done_convert);
2211 : {
2212 : // ----------- S t a t e -------------
2213 : // -- rax : the number of arguments (not including the receiver)
2214 : // -- rdx : the shared function info.
2215 : // -- rdi : the function to call (checked to be a JSFunction)
2216 : // -- rsi : the function context.
2217 : // -----------------------------------
2218 :
2219 168 : if (mode == ConvertReceiverMode::kNullOrUndefined) {
2220 : // Patch receiver to global proxy.
2221 56 : __ LoadGlobalProxy(rcx);
2222 : } else {
2223 112 : Label convert_to_object, convert_receiver;
2224 112 : __ movq(rcx, args.GetReceiverOperand());
2225 112 : __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
2226 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2227 112 : __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
2228 112 : __ j(above_equal, &done_convert);
2229 112 : if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2230 56 : Label convert_global_proxy;
2231 : __ JumpIfRoot(rcx, RootIndex::kUndefinedValue, &convert_global_proxy,
2232 56 : Label::kNear);
2233 : __ JumpIfNotRoot(rcx, RootIndex::kNullValue, &convert_to_object,
2234 56 : Label::kNear);
2235 56 : __ bind(&convert_global_proxy);
2236 : {
2237 : // Patch receiver to global proxy.
2238 56 : __ LoadGlobalProxy(rcx);
2239 : }
2240 56 : __ jmp(&convert_receiver);
2241 : }
2242 112 : __ bind(&convert_to_object);
2243 : {
2244 : // Convert receiver using ToObject.
2245 : // TODO(bmeurer): Inline the allocation here to avoid building the frame
2246 : // in the fast case? (fall back to AllocateInNewSpace?)
2247 224 : FrameScope scope(masm, StackFrame::INTERNAL);
2248 112 : __ SmiTag(rax, rax);
2249 112 : __ Push(rax);
2250 112 : __ Push(rdi);
2251 112 : __ movq(rax, rcx);
2252 112 : __ Push(rsi);
2253 112 : __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2254 112 : RelocInfo::CODE_TARGET);
2255 112 : __ Pop(rsi);
2256 112 : __ movq(rcx, rax);
2257 112 : __ Pop(rdi);
2258 112 : __ Pop(rax);
2259 112 : __ SmiUntag(rax, rax);
2260 : }
2261 112 : __ LoadTaggedPointerField(
2262 112 : rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2263 112 : __ bind(&convert_receiver);
2264 : }
2265 168 : __ movq(args.GetReceiverOperand(), rcx);
2266 : }
2267 168 : __ bind(&done_convert);
2268 :
2269 : // ----------- S t a t e -------------
2270 : // -- rax : the number of arguments (not including the receiver)
2271 : // -- rdx : the shared function info.
2272 : // -- rdi : the function to call (checked to be a JSFunction)
2273 : // -- rsi : the function context.
2274 : // -----------------------------------
2275 :
2276 168 : __ movzxwq(
2277 168 : rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2278 168 : ParameterCount actual(rax);
2279 168 : ParameterCount expected(rbx);
2280 :
2281 168 : __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION);
2282 :
2283 : // The function is a "classConstructor", need to raise an exception.
2284 168 : __ bind(&class_constructor);
2285 : {
2286 336 : FrameScope frame(masm, StackFrame::INTERNAL);
2287 168 : __ Push(rdi);
2288 168 : __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2289 : }
2290 168 : }
2291 :
2292 : namespace {
2293 :
2294 112 : void Generate_PushBoundArguments(MacroAssembler* masm) {
2295 : // ----------- S t a t e -------------
2296 : // -- rax : the number of arguments (not including the receiver)
2297 : // -- rdx : new.target (only in case of [[Construct]])
2298 : // -- rdi : target (checked to be a JSBoundFunction)
2299 : // -----------------------------------
2300 :
2301 112 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
2302 :
2303 : // Load [[BoundArguments]] into rcx and length of that into rbx.
2304 112 : Label no_bound_arguments;
2305 112 : __ LoadTaggedPointerField(
2306 112 : rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2307 112 : __ SmiUntagField(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2308 112 : __ testl(rbx, rbx);
2309 112 : __ j(zero, &no_bound_arguments);
2310 : {
2311 : // ----------- S t a t e -------------
2312 : // -- rax : the number of arguments (not including the receiver)
2313 : // -- rdx : new.target (only in case of [[Construct]])
2314 : // -- rdi : target (checked to be a JSBoundFunction)
2315 : // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2316 : // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2317 : // -----------------------------------
2318 :
2319 : // Reserve stack space for the [[BoundArguments]].
2320 : {
2321 112 : Label done;
2322 112 : __ leaq(kScratchRegister, Operand(rbx, times_system_pointer_size, 0));
2323 112 : __ subq(rsp, kScratchRegister);
2324 : // Check the stack for overflow. We are not trying to catch interruptions
2325 : // (i.e. debug break and preemption) here, so check the "real stack
2326 : // limit".
2327 112 : __ CompareRoot(rsp, RootIndex::kRealStackLimit);
2328 112 : __ j(above_equal, &done, Label::kNear);
2329 : // Restore the stack pointer.
2330 112 : __ leaq(rsp, Operand(rsp, rbx, times_system_pointer_size, 0));
2331 : {
2332 224 : FrameScope scope(masm, StackFrame::MANUAL);
2333 112 : __ EnterFrame(StackFrame::INTERNAL);
2334 112 : __ CallRuntime(Runtime::kThrowStackOverflow);
2335 : }
2336 112 : __ bind(&done);
2337 : }
2338 :
2339 : // Adjust effective number of arguments to include return address.
2340 112 : __ incl(rax);
2341 :
2342 : // Relocate arguments and return address down the stack.
2343 : {
2344 112 : Label loop;
2345 112 : __ Set(rcx, 0);
2346 112 : __ leaq(rbx, Operand(rsp, rbx, times_system_pointer_size, 0));
2347 112 : __ bind(&loop);
2348 224 : __ movq(kScratchRegister,
2349 112 : Operand(rbx, rcx, times_system_pointer_size, 0));
2350 224 : __ movq(Operand(rsp, rcx, times_system_pointer_size, 0),
2351 112 : kScratchRegister);
2352 112 : __ incl(rcx);
2353 112 : __ cmpl(rcx, rax);
2354 112 : __ j(less, &loop);
2355 : }
2356 :
2357 : // Copy [[BoundArguments]] to the stack (below the arguments).
2358 : {
2359 112 : Label loop;
2360 112 : __ LoadTaggedPointerField(
2361 112 : rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2362 112 : __ SmiUntagField(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2363 112 : __ bind(&loop);
2364 : // Instead of doing decl(rbx) here subtract kTaggedSize from the header
2365 : // offset in order to move be able to move decl(rbx) right before the loop
2366 : // condition. This is necessary in order to avoid flags corruption by
2367 : // pointer decompression code.
2368 112 : __ LoadAnyTaggedField(r12,
2369 : FieldOperand(rcx, rbx, times_tagged_size,
2370 : FixedArray::kHeaderSize - kTaggedSize),
2371 112 : decompr_scratch);
2372 112 : __ movq(Operand(rsp, rax, times_system_pointer_size, 0), r12);
2373 112 : __ leal(rax, Operand(rax, 1));
2374 112 : __ decl(rbx);
2375 112 : __ j(greater, &loop);
2376 : }
2377 :
2378 : // Adjust effective number of arguments (rax contains the number of
2379 : // arguments from the call plus return address plus the number of
2380 : // [[BoundArguments]]), so we need to subtract one for the return address.
2381 112 : __ decl(rax);
2382 : }
2383 112 : __ bind(&no_bound_arguments);
2384 112 : }
2385 :
2386 : } // namespace
2387 :
2388 : // static
2389 56 : void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2390 : // ----------- S t a t e -------------
2391 : // -- rax : the number of arguments (not including the receiver)
2392 : // -- rdi : the function to call (checked to be a JSBoundFunction)
2393 : // -----------------------------------
2394 56 : __ AssertBoundFunction(rdi);
2395 :
2396 56 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
2397 :
2398 : // Patch the receiver to [[BoundThis]].
2399 56 : StackArgumentsAccessor args(rsp, rax);
2400 56 : __ LoadAnyTaggedField(rbx,
2401 : FieldOperand(rdi, JSBoundFunction::kBoundThisOffset),
2402 56 : decompr_scratch);
2403 56 : __ movq(args.GetReceiverOperand(), rbx);
2404 :
2405 : // Push the [[BoundArguments]] onto the stack.
2406 56 : Generate_PushBoundArguments(masm);
2407 :
2408 : // Call the [[BoundTargetFunction]] via the Call builtin.
2409 56 : __ LoadTaggedPointerField(
2410 56 : rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2411 56 : __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2412 56 : RelocInfo::CODE_TARGET);
2413 56 : }
2414 :
2415 : // static
2416 168 : void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2417 : // ----------- S t a t e -------------
2418 : // -- rax : the number of arguments (not including the receiver)
2419 : // -- rdi : the target to call (can be any Object)
2420 : // -----------------------------------
2421 168 : StackArgumentsAccessor args(rsp, rax);
2422 :
2423 168 : Label non_callable;
2424 168 : __ JumpIfSmi(rdi, &non_callable);
2425 168 : __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2426 168 : __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2427 168 : RelocInfo::CODE_TARGET, equal);
2428 :
2429 168 : __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2430 168 : __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2431 168 : RelocInfo::CODE_TARGET, equal);
2432 :
2433 : // Check if target has a [[Call]] internal method.
2434 336 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2435 168 : Immediate(Map::IsCallableBit::kMask));
2436 168 : __ j(zero, &non_callable, Label::kNear);
2437 :
2438 : // Check if target is a proxy and call CallProxy external builtin
2439 168 : __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2440 168 : __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET,
2441 168 : equal);
2442 :
2443 : // 2. Call to something else, which might have a [[Call]] internal method (if
2444 : // not we raise an exception).
2445 :
2446 : // Overwrite the original receiver with the (original) target.
2447 168 : __ movq(args.GetReceiverOperand(), rdi);
2448 : // Let the "call_as_function_delegate" take care of the rest.
2449 168 : __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2450 168 : __ Jump(masm->isolate()->builtins()->CallFunction(
2451 : ConvertReceiverMode::kNotNullOrUndefined),
2452 168 : RelocInfo::CODE_TARGET);
2453 :
2454 : // 3. Call to something that is not callable.
2455 168 : __ bind(&non_callable);
2456 : {
2457 336 : FrameScope scope(masm, StackFrame::INTERNAL);
2458 168 : __ Push(rdi);
2459 168 : __ CallRuntime(Runtime::kThrowCalledNonCallable);
2460 : }
2461 168 : }
2462 :
2463 : // static
2464 56 : void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2465 : // ----------- S t a t e -------------
2466 : // -- rax : the number of arguments (not including the receiver)
2467 : // -- rdx : the new target (checked to be a constructor)
2468 : // -- rdi : the constructor to call (checked to be a JSFunction)
2469 : // -----------------------------------
2470 56 : __ AssertConstructor(rdi);
2471 56 : __ AssertFunction(rdi);
2472 :
2473 : // Calling convention for function specific ConstructStubs require
2474 : // rbx to contain either an AllocationSite or undefined.
2475 56 : __ LoadRoot(rbx, RootIndex::kUndefinedValue);
2476 :
2477 : // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2478 56 : __ LoadTaggedPointerField(
2479 56 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2480 112 : __ testl(FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
2481 56 : Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2482 56 : __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2483 56 : RelocInfo::CODE_TARGET, not_zero);
2484 :
2485 56 : __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2486 56 : RelocInfo::CODE_TARGET);
2487 56 : }
2488 :
2489 : // static
2490 56 : void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2491 : // ----------- S t a t e -------------
2492 : // -- rax : the number of arguments (not including the receiver)
2493 : // -- rdx : the new target (checked to be a constructor)
2494 : // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2495 : // -----------------------------------
2496 56 : __ AssertConstructor(rdi);
2497 56 : __ AssertBoundFunction(rdi);
2498 :
2499 : // Push the [[BoundArguments]] onto the stack.
2500 56 : Generate_PushBoundArguments(masm);
2501 :
2502 : // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2503 : {
2504 56 : Label done;
2505 56 : __ cmpq(rdi, rdx);
2506 56 : __ j(not_equal, &done, Label::kNear);
2507 56 : __ LoadTaggedPointerField(
2508 56 : rdx, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2509 56 : __ bind(&done);
2510 : }
2511 :
2512 : // Construct the [[BoundTargetFunction]] via the Construct builtin.
2513 56 : __ LoadTaggedPointerField(
2514 56 : rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2515 56 : __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2516 56 : }
2517 :
2518 : // static
2519 56 : void Builtins::Generate_Construct(MacroAssembler* masm) {
2520 : // ----------- S t a t e -------------
2521 : // -- rax : the number of arguments (not including the receiver)
2522 : // -- rdx : the new target (either the same as the constructor or
2523 : // the JSFunction on which new was invoked initially)
2524 : // -- rdi : the constructor to call (can be any Object)
2525 : // -----------------------------------
2526 56 : StackArgumentsAccessor args(rsp, rax);
2527 :
2528 : // Check if target is a Smi.
2529 56 : Label non_constructor;
2530 56 : __ JumpIfSmi(rdi, &non_constructor);
2531 :
2532 : // Check if target has a [[Construct]] internal method.
2533 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
2534 112 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2535 56 : Immediate(Map::IsConstructorBit::kMask));
2536 56 : __ j(zero, &non_constructor);
2537 :
2538 : // Dispatch based on instance type.
2539 56 : __ CmpInstanceType(rcx, JS_FUNCTION_TYPE);
2540 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2541 56 : RelocInfo::CODE_TARGET, equal);
2542 :
2543 : // Only dispatch to bound functions after checking whether they are
2544 : // constructors.
2545 56 : __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2546 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2547 56 : RelocInfo::CODE_TARGET, equal);
2548 :
2549 : // Only dispatch to proxies after checking whether they are constructors.
2550 56 : __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2551 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy), RelocInfo::CODE_TARGET,
2552 56 : equal);
2553 :
2554 : // Called Construct on an exotic Object with a [[Construct]] internal method.
2555 : {
2556 : // Overwrite the original receiver with the (original) target.
2557 56 : __ movq(args.GetReceiverOperand(), rdi);
2558 : // Let the "call_as_constructor_delegate" take care of the rest.
2559 56 : __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2560 56 : __ Jump(masm->isolate()->builtins()->CallFunction(),
2561 56 : RelocInfo::CODE_TARGET);
2562 : }
2563 :
2564 : // Called Construct on an Object that doesn't have a [[Construct]] internal
2565 : // method.
2566 56 : __ bind(&non_constructor);
2567 56 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2568 56 : RelocInfo::CODE_TARGET);
2569 56 : }
2570 :
2571 56 : void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2572 : // Lookup the function in the JavaScript frame.
2573 56 : __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2574 56 : __ movq(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
2575 :
2576 : {
2577 112 : FrameScope scope(masm, StackFrame::INTERNAL);
2578 : // Pass function as argument.
2579 56 : __ Push(rax);
2580 56 : __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2581 : }
2582 :
2583 56 : Label skip;
2584 : // If the code object is null, just return to the caller.
2585 56 : __ testq(rax, rax);
2586 56 : __ j(not_equal, &skip, Label::kNear);
2587 56 : __ ret(0);
2588 :
2589 56 : __ bind(&skip);
2590 :
2591 : // Drop the handler frame that is be sitting on top of the actual
2592 : // JavaScript frame. This is the case then OSR is triggered from bytecode.
2593 56 : __ leave();
2594 :
2595 : // Load deoptimization data from the code object.
2596 56 : __ LoadTaggedPointerField(rbx,
2597 56 : FieldOperand(rax, Code::kDeoptimizationDataOffset));
2598 :
2599 : // Load the OSR entrypoint offset from the deoptimization data.
2600 56 : __ SmiUntagField(
2601 : rbx, FieldOperand(rbx, FixedArray::OffsetOfElementAt(
2602 56 : DeoptimizationData::kOsrPcOffsetIndex)));
2603 :
2604 : // Compute the target address = code_obj + header_size + osr_offset
2605 56 : __ leaq(rax, FieldOperand(rax, rbx, times_1, Code::kHeaderSize));
2606 :
2607 : // Overwrite the return address on the stack.
2608 56 : __ movq(StackOperandForReturnAddress(0), rax);
2609 :
2610 : // And "return" to the OSR entry point of the function.
2611 56 : __ ret(0);
2612 56 : }
2613 :
2614 56 : void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2615 : // The function index was pushed to the stack by the caller as int32.
2616 56 : __ Pop(r11);
2617 : // Convert to Smi for the runtime call.
2618 56 : __ SmiTag(r11, r11);
2619 : {
2620 112 : HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2621 112 : FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2622 :
2623 : // Save all parameter registers (see wasm-linkage.cc). They might be
2624 : // overwritten in the runtime call below. We don't have any callee-saved
2625 : // registers in wasm, so no need to store anything else.
2626 : static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
2627 : arraysize(wasm::kGpParamRegisters),
2628 : "frame size mismatch");
2629 392 : for (Register reg : wasm::kGpParamRegisters) {
2630 336 : __ Push(reg);
2631 : }
2632 : static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
2633 : arraysize(wasm::kFpParamRegisters),
2634 : "frame size mismatch");
2635 56 : __ subq(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2636 56 : int offset = 0;
2637 392 : for (DoubleRegister reg : wasm::kFpParamRegisters) {
2638 336 : __ movdqu(Operand(rsp, offset), reg);
2639 336 : offset += kSimd128Size;
2640 : }
2641 :
2642 : // Push the WASM instance as an explicit argument to WasmCompileLazy.
2643 56 : __ Push(kWasmInstanceRegister);
2644 : // Push the function index as second argument.
2645 56 : __ Push(r11);
2646 : // Load the correct CEntry builtin from the instance object.
2647 56 : __ LoadTaggedPointerField(
2648 : rcx, FieldOperand(kWasmInstanceRegister,
2649 56 : WasmInstanceObject::kCEntryStubOffset));
2650 : // Initialize the JavaScript context with 0. CEntry will use it to
2651 : // set the current context on the isolate.
2652 56 : __ Move(kContextRegister, Smi::zero());
2653 56 : __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, rcx);
2654 : // The entrypoint address is the return value.
2655 56 : __ movq(r11, kReturnRegister0);
2656 :
2657 : // Restore registers.
2658 392 : for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2659 336 : offset -= kSimd128Size;
2660 336 : __ movdqu(reg, Operand(rsp, offset));
2661 : }
2662 : DCHECK_EQ(0, offset);
2663 56 : __ addq(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2664 392 : for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2665 336 : __ Pop(reg);
2666 : }
2667 : }
2668 : // Finally, jump to the entrypoint.
2669 56 : __ jmp(r11);
2670 56 : }
2671 :
2672 560 : void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2673 : SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2674 : bool builtin_exit_frame) {
2675 : // rax: number of arguments including receiver
2676 : // rbx: pointer to C function (C callee-saved)
2677 : // rbp: frame pointer of calling JS frame (restored after C call)
2678 : // rsp: stack pointer (restored after C call)
2679 : // rsi: current context (restored)
2680 : //
2681 : // If argv_mode == kArgvInRegister:
2682 : // r15: pointer to the first argument
2683 :
2684 : #ifdef _WIN64
2685 : // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
2686 : // stack to be aligned to 16 bytes. It only allows a single-word to be
2687 : // returned in register rax. Larger return sizes must be written to an address
2688 : // passed as a hidden first argument.
2689 : const Register kCCallArg0 = rcx;
2690 : const Register kCCallArg1 = rdx;
2691 : const Register kCCallArg2 = r8;
2692 : const Register kCCallArg3 = r9;
2693 : const int kArgExtraStackSpace = 2;
2694 : const int kMaxRegisterResultSize = 1;
2695 : #else
2696 : // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
2697 : // are returned in rax, and a struct of two pointers are returned in rax+rdx.
2698 : // Larger return sizes must be written to an address passed as a hidden first
2699 : // argument.
2700 560 : const Register kCCallArg0 = rdi;
2701 560 : const Register kCCallArg1 = rsi;
2702 560 : const Register kCCallArg2 = rdx;
2703 560 : const Register kCCallArg3 = rcx;
2704 560 : const int kArgExtraStackSpace = 0;
2705 560 : const int kMaxRegisterResultSize = 2;
2706 : #endif // _WIN64
2707 :
2708 : // Enter the exit frame that transitions from JavaScript to C++.
2709 : int arg_stack_space =
2710 : kArgExtraStackSpace +
2711 560 : (result_size <= kMaxRegisterResultSize ? 0 : result_size);
2712 560 : if (argv_mode == kArgvInRegister) {
2713 : DCHECK(save_doubles == kDontSaveFPRegs);
2714 : DCHECK(!builtin_exit_frame);
2715 112 : __ EnterApiExitFrame(arg_stack_space);
2716 : // Move argc into r14 (argv is already in r15).
2717 112 : __ movq(r14, rax);
2718 : } else {
2719 448 : __ EnterExitFrame(
2720 : arg_stack_space, save_doubles == kSaveFPRegs,
2721 448 : builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2722 : }
2723 :
2724 : // rbx: pointer to builtin function (C callee-saved).
2725 : // rbp: frame pointer of exit frame (restored after C call).
2726 : // rsp: stack pointer (restored after C call).
2727 : // r14: number of arguments including receiver (C callee-saved).
2728 : // r15: argv pointer (C callee-saved).
2729 :
2730 : // Check stack alignment.
2731 560 : if (FLAG_debug_code) {
2732 0 : __ CheckStackAlignment();
2733 : }
2734 :
2735 : // Call C function. The arguments object will be created by stubs declared by
2736 : // DECLARE_RUNTIME_FUNCTION().
2737 560 : if (result_size <= kMaxRegisterResultSize) {
2738 : // Pass a pointer to the Arguments object as the first argument.
2739 : // Return result in single register (rax), or a register pair (rax, rdx).
2740 560 : __ movq(kCCallArg0, r14); // argc.
2741 560 : __ movq(kCCallArg1, r15); // argv.
2742 560 : __ Move(kCCallArg2, ExternalReference::isolate_address(masm->isolate()));
2743 : } else {
2744 : DCHECK_LE(result_size, 2);
2745 : // Pass a pointer to the result location as the first argument.
2746 0 : __ leaq(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
2747 : // Pass a pointer to the Arguments object as the second argument.
2748 0 : __ movq(kCCallArg1, r14); // argc.
2749 0 : __ movq(kCCallArg2, r15); // argv.
2750 0 : __ Move(kCCallArg3, ExternalReference::isolate_address(masm->isolate()));
2751 : }
2752 560 : __ call(rbx);
2753 :
2754 560 : if (result_size > kMaxRegisterResultSize) {
2755 : // Read result values stored on stack. Result is stored
2756 : // above the the two Arguments object slots on Win64.
2757 : DCHECK_LE(result_size, 2);
2758 0 : __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
2759 0 : __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
2760 : }
2761 : // Result is in rax or rdx:rax - do not destroy these registers!
2762 :
2763 : // Check result for exception sentinel.
2764 560 : Label exception_returned;
2765 560 : __ CompareRoot(rax, RootIndex::kException);
2766 560 : __ j(equal, &exception_returned);
2767 :
2768 : // Check that there is no pending exception, otherwise we
2769 : // should have returned the exception sentinel.
2770 560 : if (FLAG_debug_code) {
2771 0 : Label okay;
2772 0 : __ LoadRoot(r14, RootIndex::kTheHoleValue);
2773 : ExternalReference pending_exception_address = ExternalReference::Create(
2774 0 : IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2775 : Operand pending_exception_operand =
2776 0 : masm->ExternalReferenceAsOperand(pending_exception_address);
2777 0 : __ cmpq(r14, pending_exception_operand);
2778 0 : __ j(equal, &okay, Label::kNear);
2779 0 : __ int3();
2780 0 : __ bind(&okay);
2781 : }
2782 :
2783 : // Exit the JavaScript to C++ exit frame.
2784 560 : __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2785 560 : __ ret(0);
2786 :
2787 : // Handling of exception.
2788 560 : __ bind(&exception_returned);
2789 :
2790 : ExternalReference pending_handler_context_address = ExternalReference::Create(
2791 560 : IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2792 : ExternalReference pending_handler_entrypoint_address =
2793 : ExternalReference::Create(
2794 560 : IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2795 : ExternalReference pending_handler_fp_address = ExternalReference::Create(
2796 560 : IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2797 : ExternalReference pending_handler_sp_address = ExternalReference::Create(
2798 560 : IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2799 :
2800 : // Ask the runtime for help to determine the handler. This will set rax to
2801 : // contain the current pending exception, don't clobber it.
2802 : ExternalReference find_handler =
2803 560 : ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2804 : {
2805 1120 : FrameScope scope(masm, StackFrame::MANUAL);
2806 560 : __ movq(arg_reg_1, Immediate(0)); // argc.
2807 560 : __ movq(arg_reg_2, Immediate(0)); // argv.
2808 560 : __ Move(arg_reg_3, ExternalReference::isolate_address(masm->isolate()));
2809 560 : __ PrepareCallCFunction(3);
2810 560 : __ CallCFunction(find_handler, 3);
2811 : }
2812 : // Retrieve the handler context, SP and FP.
2813 560 : __ movq(rsi,
2814 560 : masm->ExternalReferenceAsOperand(pending_handler_context_address));
2815 560 : __ movq(rsp, masm->ExternalReferenceAsOperand(pending_handler_sp_address));
2816 560 : __ movq(rbp, masm->ExternalReferenceAsOperand(pending_handler_fp_address));
2817 :
2818 : // If the handler is a JS frame, restore the context to the frame. Note that
2819 : // the context will be set to (rsi == 0) for non-JS frames.
2820 560 : Label skip;
2821 560 : __ testq(rsi, rsi);
2822 560 : __ j(zero, &skip, Label::kNear);
2823 560 : __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
2824 560 : __ bind(&skip);
2825 :
2826 : // Reset the masking register. This is done independent of the underlying
2827 : // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
2828 : // with both configurations. It is safe to always do this, because the
2829 : // underlying register is caller-saved and can be arbitrarily clobbered.
2830 560 : __ ResetSpeculationPoisonRegister();
2831 :
2832 : // Compute the handler entry address and jump to it.
2833 560 : __ movq(rdi,
2834 560 : masm->ExternalReferenceAsOperand(pending_handler_entrypoint_address));
2835 560 : __ jmp(rdi);
2836 560 : }
2837 :
2838 56 : void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2839 56 : Label check_negative, process_64_bits, done;
2840 :
2841 : // Account for return address and saved regs.
2842 56 : const int kArgumentOffset = 4 * kSystemPointerSize;
2843 :
2844 56 : MemOperand mantissa_operand(MemOperand(rsp, kArgumentOffset));
2845 : MemOperand exponent_operand(
2846 56 : MemOperand(rsp, kArgumentOffset + kDoubleSize / 2));
2847 :
2848 : // The result is returned on the stack.
2849 56 : MemOperand return_operand = mantissa_operand;
2850 :
2851 56 : Register scratch1 = rbx;
2852 :
2853 : // Since we must use rcx for shifts below, use some other register (rax)
2854 : // to calculate the result if ecx is the requested return register.
2855 56 : Register result_reg = rax;
2856 : // Save ecx if it isn't the return register and therefore volatile, or if it
2857 : // is the return register, then save the temp register we use in its stead
2858 : // for the result.
2859 56 : Register save_reg = rax;
2860 56 : __ pushq(rcx);
2861 56 : __ pushq(scratch1);
2862 56 : __ pushq(save_reg);
2863 :
2864 56 : __ movl(scratch1, mantissa_operand);
2865 56 : __ Movsd(kScratchDoubleReg, mantissa_operand);
2866 56 : __ movl(rcx, exponent_operand);
2867 :
2868 56 : __ andl(rcx, Immediate(HeapNumber::kExponentMask));
2869 56 : __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
2870 56 : __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
2871 56 : __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
2872 56 : __ j(below, &process_64_bits, Label::kNear);
2873 :
2874 : // Result is entirely in lower 32-bits of mantissa
2875 56 : int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2876 56 : __ subl(rcx, Immediate(delta));
2877 56 : __ xorl(result_reg, result_reg);
2878 56 : __ cmpl(rcx, Immediate(31));
2879 56 : __ j(above, &done, Label::kNear);
2880 56 : __ shll_cl(scratch1);
2881 56 : __ jmp(&check_negative, Label::kNear);
2882 :
2883 56 : __ bind(&process_64_bits);
2884 56 : __ Cvttsd2siq(result_reg, kScratchDoubleReg);
2885 56 : __ jmp(&done, Label::kNear);
2886 :
2887 : // If the double was negative, negate the integer result.
2888 56 : __ bind(&check_negative);
2889 56 : __ movl(result_reg, scratch1);
2890 56 : __ negl(result_reg);
2891 56 : __ cmpl(exponent_operand, Immediate(0));
2892 56 : __ cmovl(greater, result_reg, scratch1);
2893 :
2894 : // Restore registers
2895 56 : __ bind(&done);
2896 56 : __ movl(return_operand, result_reg);
2897 56 : __ popq(save_reg);
2898 56 : __ popq(scratch1);
2899 56 : __ popq(rcx);
2900 56 : __ ret(0);
2901 56 : }
2902 :
2903 56 : void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2904 : // ----------- S t a t e -------------
2905 : // -- rax : argc
2906 : // -- rdi : constructor
2907 : // -- rsp[0] : return address
2908 : // -- rsp[8] : last argument
2909 : // -----------------------------------
2910 :
2911 56 : if (FLAG_debug_code) {
2912 : // The array construct code is only set for the global and natives
2913 : // builtin Array functions which always have maps.
2914 :
2915 : // Initial map for the builtin Array function should be a map.
2916 0 : __ LoadTaggedPointerField(
2917 0 : rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2918 : // Will both indicate a nullptr and a Smi.
2919 : STATIC_ASSERT(kSmiTag == 0);
2920 0 : Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
2921 0 : __ Check(not_smi, AbortReason::kUnexpectedInitialMapForArrayFunction);
2922 0 : __ CmpObjectType(rcx, MAP_TYPE, rcx);
2923 0 : __ Check(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
2924 :
2925 : // Figure out the right elements kind
2926 0 : __ LoadTaggedPointerField(
2927 0 : rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2928 :
2929 : // Load the map's "bit field 2" into |result|. We only need the first byte,
2930 : // but the following masking takes care of that anyway.
2931 0 : __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset));
2932 : // Retrieve elements_kind from bit field 2.
2933 0 : __ DecodeField<Map::ElementsKindBits>(rcx);
2934 :
2935 : // Initial elements kind should be packed elements.
2936 0 : __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
2937 0 : __ Assert(equal, AbortReason::kInvalidElementsKindForInternalPackedArray);
2938 :
2939 : // No arguments should be passed.
2940 0 : __ testq(rax, rax);
2941 0 : __ Assert(zero, AbortReason::kWrongNumberOfArgumentsForInternalPackedArray);
2942 : }
2943 :
2944 56 : __ Jump(
2945 : BUILTIN_CODE(masm->isolate(), InternalArrayNoArgumentConstructor_Packed),
2946 56 : RelocInfo::CODE_TARGET);
2947 56 : }
2948 :
2949 : namespace {
2950 :
2951 224 : int Offset(ExternalReference ref0, ExternalReference ref1) {
2952 224 : int64_t offset = (ref0.address() - ref1.address());
2953 : // Check that fits into int.
2954 : DCHECK(static_cast<int>(offset) == offset);
2955 224 : return static_cast<int>(offset);
2956 : }
2957 :
2958 : // Calls an API function. Allocates HandleScope, extracts returned value
2959 : // from handle and propagates exceptions. Clobbers r14, r15, rbx and
2960 : // caller-save registers. Restores context. On return removes
2961 : // stack_space * kSystemPointerSize (GCed).
2962 112 : void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address,
2963 : ExternalReference thunk_ref,
2964 : Register thunk_last_arg, int stack_space,
2965 : Operand* stack_space_operand,
2966 : Operand return_value_operand) {
2967 112 : Label prologue;
2968 112 : Label promote_scheduled_exception;
2969 112 : Label delete_allocated_handles;
2970 112 : Label leave_exit_frame;
2971 :
2972 112 : Isolate* isolate = masm->isolate();
2973 112 : Factory* factory = isolate->factory();
2974 : ExternalReference next_address =
2975 112 : ExternalReference::handle_scope_next_address(isolate);
2976 112 : const int kNextOffset = 0;
2977 112 : const int kLimitOffset = Offset(
2978 112 : ExternalReference::handle_scope_limit_address(isolate), next_address);
2979 112 : const int kLevelOffset = Offset(
2980 112 : ExternalReference::handle_scope_level_address(isolate), next_address);
2981 : ExternalReference scheduled_exception_address =
2982 112 : ExternalReference::scheduled_exception_address(isolate);
2983 :
2984 : DCHECK(rdx == function_address || r8 == function_address);
2985 : // Allocate HandleScope in callee-save registers.
2986 112 : Register prev_next_address_reg = r14;
2987 112 : Register prev_limit_reg = rbx;
2988 112 : Register base_reg = r15;
2989 112 : __ Move(base_reg, next_address);
2990 112 : __ movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
2991 112 : __ movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
2992 112 : __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
2993 :
2994 112 : if (FLAG_log_timer_events) {
2995 0 : FrameScope frame(masm, StackFrame::MANUAL);
2996 0 : __ PushSafepointRegisters();
2997 0 : __ PrepareCallCFunction(1);
2998 0 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
2999 0 : __ CallCFunction(ExternalReference::log_enter_external_function(), 1);
3000 0 : __ PopSafepointRegisters();
3001 : }
3002 :
3003 112 : Label profiler_disabled;
3004 112 : Label end_profiler_check;
3005 112 : __ Move(rax, ExternalReference::is_profiling_address(isolate));
3006 112 : __ cmpb(Operand(rax, 0), Immediate(0));
3007 112 : __ j(zero, &profiler_disabled);
3008 :
3009 : // Third parameter is the address of the actual getter function.
3010 112 : __ Move(thunk_last_arg, function_address);
3011 112 : __ Move(rax, thunk_ref);
3012 112 : __ jmp(&end_profiler_check);
3013 :
3014 112 : __ bind(&profiler_disabled);
3015 : // Call the api function!
3016 112 : __ Move(rax, function_address);
3017 :
3018 112 : __ bind(&end_profiler_check);
3019 :
3020 : // Call the api function!
3021 112 : __ call(rax);
3022 :
3023 112 : if (FLAG_log_timer_events) {
3024 0 : FrameScope frame(masm, StackFrame::MANUAL);
3025 0 : __ PushSafepointRegisters();
3026 0 : __ PrepareCallCFunction(1);
3027 0 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
3028 0 : __ CallCFunction(ExternalReference::log_leave_external_function(), 1);
3029 0 : __ PopSafepointRegisters();
3030 : }
3031 :
3032 : // Load the value from ReturnValue
3033 112 : __ movq(rax, return_value_operand);
3034 112 : __ bind(&prologue);
3035 :
3036 : // No more valid handles (the result handle was the last one). Restore
3037 : // previous handle scope.
3038 112 : __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
3039 112 : __ movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
3040 112 : __ cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
3041 112 : __ j(not_equal, &delete_allocated_handles);
3042 :
3043 : // Leave the API exit frame.
3044 112 : __ bind(&leave_exit_frame);
3045 112 : if (stack_space_operand != nullptr) {
3046 : DCHECK_EQ(stack_space, 0);
3047 56 : __ movq(rbx, *stack_space_operand);
3048 : }
3049 112 : __ LeaveApiExitFrame();
3050 :
3051 : // Check if the function scheduled an exception.
3052 112 : __ Move(rdi, scheduled_exception_address);
3053 224 : __ Cmp(Operand(rdi, 0), factory->the_hole_value());
3054 112 : __ j(not_equal, &promote_scheduled_exception);
3055 :
3056 : #if DEBUG
3057 : // Check if the function returned a valid JavaScript value.
3058 : Label ok;
3059 : Register return_value = rax;
3060 : Register map = rcx;
3061 :
3062 : __ JumpIfSmi(return_value, &ok, Label::kNear);
3063 : __ LoadTaggedPointerField(map,
3064 : FieldOperand(return_value, HeapObject::kMapOffset));
3065 :
3066 : __ CmpInstanceType(map, LAST_NAME_TYPE);
3067 : __ j(below_equal, &ok, Label::kNear);
3068 :
3069 : __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
3070 : __ j(above_equal, &ok, Label::kNear);
3071 :
3072 : __ CompareRoot(map, RootIndex::kHeapNumberMap);
3073 : __ j(equal, &ok, Label::kNear);
3074 :
3075 : __ CompareRoot(return_value, RootIndex::kUndefinedValue);
3076 : __ j(equal, &ok, Label::kNear);
3077 :
3078 : __ CompareRoot(return_value, RootIndex::kTrueValue);
3079 : __ j(equal, &ok, Label::kNear);
3080 :
3081 : __ CompareRoot(return_value, RootIndex::kFalseValue);
3082 : __ j(equal, &ok, Label::kNear);
3083 :
3084 : __ CompareRoot(return_value, RootIndex::kNullValue);
3085 : __ j(equal, &ok, Label::kNear);
3086 :
3087 : __ Abort(AbortReason::kAPICallReturnedInvalidObject);
3088 :
3089 : __ bind(&ok);
3090 : #endif
3091 :
3092 112 : if (stack_space_operand == nullptr) {
3093 : DCHECK_NE(stack_space, 0);
3094 56 : __ ret(stack_space * kSystemPointerSize);
3095 : } else {
3096 : DCHECK_EQ(stack_space, 0);
3097 56 : __ PopReturnAddressTo(rcx);
3098 56 : __ addq(rsp, rbx);
3099 56 : __ jmp(rcx);
3100 : }
3101 :
3102 : // Re-throw by promoting a scheduled exception.
3103 112 : __ bind(&promote_scheduled_exception);
3104 112 : __ TailCallRuntime(Runtime::kPromoteScheduledException);
3105 :
3106 : // HandleScope limit has changed. Delete allocated extensions.
3107 112 : __ bind(&delete_allocated_handles);
3108 112 : __ movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
3109 112 : __ movq(prev_limit_reg, rax);
3110 112 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
3111 112 : __ LoadAddress(rax, ExternalReference::delete_handle_scope_extensions());
3112 112 : __ call(rax);
3113 112 : __ movq(rax, prev_limit_reg);
3114 112 : __ jmp(&leave_exit_frame);
3115 112 : }
3116 :
3117 : } // namespace
3118 :
3119 : // TODO(jgruber): Instead of explicitly setting up implicit_args_ on the stack
3120 : // in CallApiCallback, we could use the calling convention to set up the stack
3121 : // correctly in the first place.
3122 : //
3123 : // TODO(jgruber): I suspect that most of CallApiCallback could be implemented
3124 : // as a C++ trampoline, vastly simplifying the assembly implementation.
3125 :
3126 56 : void Builtins::Generate_CallApiCallback(MacroAssembler* masm) {
3127 : // ----------- S t a t e -------------
3128 : // -- rsi : context
3129 : // -- rdx : api function address
3130 : // -- rcx : arguments count (not including the receiver)
3131 : // -- rbx : call data
3132 : // -- rdi : holder
3133 : // -- rsp[0] : return address
3134 : // -- rsp[8] : last argument
3135 : // -- ...
3136 : // -- rsp[argc * 8] : first argument
3137 : // -- rsp[(argc + 1) * 8] : receiver
3138 : // -----------------------------------
3139 :
3140 56 : Register api_function_address = rdx;
3141 56 : Register argc = rcx;
3142 56 : Register call_data = rbx;
3143 56 : Register holder = rdi;
3144 :
3145 : DCHECK(!AreAliased(api_function_address, argc, holder, call_data,
3146 : kScratchRegister));
3147 :
3148 : typedef FunctionCallbackArguments FCA;
3149 :
3150 : STATIC_ASSERT(FCA::kArgsLength == 6);
3151 : STATIC_ASSERT(FCA::kNewTargetIndex == 5);
3152 : STATIC_ASSERT(FCA::kDataIndex == 4);
3153 : STATIC_ASSERT(FCA::kReturnValueOffset == 3);
3154 : STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
3155 : STATIC_ASSERT(FCA::kIsolateIndex == 1);
3156 : STATIC_ASSERT(FCA::kHolderIndex == 0);
3157 :
3158 : // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
3159 : //
3160 : // Current state:
3161 : // rsp[0]: return address
3162 : //
3163 : // Target state:
3164 : // rsp[0 * kSystemPointerSize]: return address
3165 : // rsp[1 * kSystemPointerSize]: kHolder
3166 : // rsp[2 * kSystemPointerSize]: kIsolate
3167 : // rsp[3 * kSystemPointerSize]: undefined (kReturnValueDefaultValue)
3168 : // rsp[4 * kSystemPointerSize]: undefined (kReturnValue)
3169 : // rsp[5 * kSystemPointerSize]: kData
3170 : // rsp[6 * kSystemPointerSize]: undefined (kNewTarget)
3171 :
3172 56 : __ PopReturnAddressTo(rax);
3173 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
3174 56 : __ Push(kScratchRegister);
3175 56 : __ Push(call_data);
3176 56 : __ Push(kScratchRegister);
3177 56 : __ Push(kScratchRegister);
3178 56 : __ PushAddress(ExternalReference::isolate_address(masm->isolate()));
3179 56 : __ Push(holder);
3180 56 : __ PushReturnAddressFrom(rax);
3181 :
3182 : // Keep a pointer to kHolder (= implicit_args) in a scratch register.
3183 : // We use it below to set up the FunctionCallbackInfo object.
3184 56 : Register scratch = rbx;
3185 56 : __ leaq(scratch, Operand(rsp, 1 * kSystemPointerSize));
3186 :
3187 : // Allocate the v8::Arguments structure in the arguments' space since
3188 : // it's not controlled by GC.
3189 : static constexpr int kApiStackSpace = 4;
3190 56 : __ EnterApiExitFrame(kApiStackSpace);
3191 :
3192 : // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above).
3193 56 : __ movq(StackSpaceOperand(0), scratch);
3194 :
3195 : // FunctionCallbackInfo::values_ (points at the first varargs argument passed
3196 : // on the stack).
3197 112 : __ leaq(scratch, Operand(scratch, argc, times_system_pointer_size,
3198 56 : (FCA::kArgsLength - 1) * kSystemPointerSize));
3199 56 : __ movq(StackSpaceOperand(1), scratch);
3200 :
3201 : // FunctionCallbackInfo::length_.
3202 56 : __ movq(StackSpaceOperand(2), argc);
3203 :
3204 : // We also store the number of bytes to drop from the stack after returning
3205 : // from the API function here.
3206 112 : __ leaq(kScratchRegister,
3207 : Operand(argc, times_system_pointer_size,
3208 56 : (FCA::kArgsLength + 1 /* receiver */) * kSystemPointerSize));
3209 56 : __ movq(StackSpaceOperand(3), kScratchRegister);
3210 :
3211 56 : Register arguments_arg = arg_reg_1;
3212 56 : Register callback_arg = arg_reg_2;
3213 :
3214 : // It's okay if api_function_address == callback_arg
3215 : // but not arguments_arg
3216 : DCHECK(api_function_address != arguments_arg);
3217 :
3218 : // v8::InvocationCallback's argument.
3219 56 : __ leaq(arguments_arg, StackSpaceOperand(0));
3220 :
3221 56 : ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
3222 :
3223 : // There are two stack slots above the arguments we constructed on the stack:
3224 : // the stored ebp (pushed by EnterApiExitFrame), and the return address.
3225 : static constexpr int kStackSlotsAboveFCA = 2;
3226 : Operand return_value_operand(
3227 : rbp,
3228 56 : (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kSystemPointerSize);
3229 :
3230 : static constexpr int kUseStackSpaceOperand = 0;
3231 56 : Operand stack_space_operand = StackSpaceOperand(3);
3232 : CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
3233 : kUseStackSpaceOperand, &stack_space_operand,
3234 56 : return_value_operand);
3235 56 : }
3236 :
3237 56 : void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
3238 56 : Register name_arg = arg_reg_1;
3239 56 : Register accessor_info_arg = arg_reg_2;
3240 56 : Register getter_arg = arg_reg_3;
3241 56 : Register api_function_address = r8;
3242 56 : Register receiver = ApiGetterDescriptor::ReceiverRegister();
3243 56 : Register holder = ApiGetterDescriptor::HolderRegister();
3244 56 : Register callback = ApiGetterDescriptor::CallbackRegister();
3245 56 : Register scratch = rax;
3246 56 : Register decompr_scratch1 = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
3247 56 : Register decompr_scratch2 = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
3248 :
3249 : DCHECK(!AreAliased(receiver, holder, callback, scratch, decompr_scratch1,
3250 : decompr_scratch2));
3251 :
3252 : // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
3253 : // name below the exit frame to make GC aware of them.
3254 : STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
3255 : STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
3256 : STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
3257 : STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
3258 : STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
3259 : STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
3260 : STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
3261 : STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
3262 :
3263 : // Insert additional parameters into the stack frame above return address.
3264 56 : __ PopReturnAddressTo(scratch);
3265 56 : __ Push(receiver);
3266 56 : __ PushTaggedAnyField(FieldOperand(callback, AccessorInfo::kDataOffset),
3267 56 : decompr_scratch1, decompr_scratch2);
3268 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
3269 56 : __ Push(kScratchRegister); // return value
3270 56 : __ Push(kScratchRegister); // return value default
3271 56 : __ PushAddress(ExternalReference::isolate_address(masm->isolate()));
3272 56 : __ Push(holder);
3273 56 : __ Push(Smi::zero()); // should_throw_on_error -> false
3274 56 : __ PushTaggedPointerField(FieldOperand(callback, AccessorInfo::kNameOffset),
3275 56 : decompr_scratch1);
3276 56 : __ PushReturnAddressFrom(scratch);
3277 :
3278 : // v8::PropertyCallbackInfo::args_ array and name handle.
3279 56 : const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
3280 :
3281 : // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
3282 56 : const int kArgStackSpace = 1;
3283 :
3284 : // Load address of v8::PropertyAccessorInfo::args_ array.
3285 56 : __ leaq(scratch, Operand(rsp, 2 * kSystemPointerSize));
3286 :
3287 56 : __ EnterApiExitFrame(kArgStackSpace);
3288 :
3289 : // Create v8::PropertyCallbackInfo object on the stack and initialize
3290 : // it's args_ field.
3291 56 : Operand info_object = StackSpaceOperand(0);
3292 56 : __ movq(info_object, scratch);
3293 :
3294 56 : __ leaq(name_arg, Operand(scratch, -kSystemPointerSize));
3295 : // The context register (rsi) has been saved in EnterApiExitFrame and
3296 : // could be used to pass arguments.
3297 56 : __ leaq(accessor_info_arg, info_object);
3298 :
3299 : ExternalReference thunk_ref =
3300 56 : ExternalReference::invoke_accessor_getter_callback();
3301 :
3302 : // It's okay if api_function_address == getter_arg
3303 : // but not accessor_info_arg or name_arg
3304 : DCHECK(api_function_address != accessor_info_arg);
3305 : DCHECK(api_function_address != name_arg);
3306 56 : __ LoadTaggedPointerField(
3307 56 : scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
3308 56 : __ movq(api_function_address,
3309 56 : FieldOperand(scratch, Foreign::kForeignAddressOffset));
3310 :
3311 : // +3 is to skip prolog, return address and name handle.
3312 : Operand return_value_operand(
3313 : rbp,
3314 56 : (PropertyCallbackArguments::kReturnValueOffset + 3) * kSystemPointerSize);
3315 56 : Operand* const kUseStackSpaceConstant = nullptr;
3316 : CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
3317 : kStackUnwindSpace, kUseStackSpaceConstant,
3318 56 : return_value_operand);
3319 56 : }
3320 :
3321 56 : void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
3322 56 : __ int3(); // Unused on this architecture.
3323 56 : }
3324 :
3325 : #undef __
3326 :
3327 : } // namespace internal
3328 87414 : } // namespace v8
3329 :
3330 : #endif // V8_TARGET_ARCH_X64
|