Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #if V8_TARGET_ARCH_X64
6 :
7 : #include "src/api-arguments.h"
8 : #include "src/base/adapters.h"
9 : #include "src/code-factory.h"
10 : #include "src/counters.h"
11 : #include "src/deoptimizer.h"
12 : #include "src/frame-constants.h"
13 : #include "src/frames.h"
14 : // For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
15 : #include "src/heap/heap-inl.h"
16 : #include "src/macro-assembler-inl.h"
17 : #include "src/objects-inl.h"
18 : #include "src/objects/cell.h"
19 : #include "src/objects/debug-objects.h"
20 : #include "src/objects/foreign.h"
21 : #include "src/objects/heap-number.h"
22 : #include "src/objects/js-generator.h"
23 : #include "src/objects/smi.h"
24 : #include "src/register-configuration.h"
25 : #include "src/wasm/wasm-linkage.h"
26 : #include "src/wasm/wasm-objects.h"
27 :
28 : namespace v8 {
29 : namespace internal {
30 :
31 : #define __ ACCESS_MASM(masm)
32 :
33 15456 : void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
34 : ExitFrameType exit_frame_type) {
35 : __ LoadAddress(kJavaScriptCallExtraArg1Register,
36 15456 : ExternalReference::Create(address));
37 15456 : if (exit_frame_type == BUILTIN_EXIT) {
38 : __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
39 15288 : RelocInfo::CODE_TARGET);
40 : } else {
41 : DCHECK(exit_frame_type == EXIT);
42 : __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
43 168 : RelocInfo::CODE_TARGET);
44 : }
45 15456 : }
46 :
47 280 : static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
48 : Runtime::FunctionId function_id) {
49 : // ----------- S t a t e -------------
50 : // -- rax : argument count (preserved for callee)
51 : // -- rdx : new target (preserved for callee)
52 : // -- rdi : target function (preserved for callee)
53 : // -----------------------------------
54 : {
55 280 : FrameScope scope(masm, StackFrame::INTERNAL);
56 : // Push the number of arguments to the callee.
57 280 : __ SmiTag(rax, rax);
58 280 : __ Push(rax);
59 : // Push a copy of the target function and the new target.
60 280 : __ Push(rdi);
61 280 : __ Push(rdx);
62 : // Function is also the parameter to the runtime call.
63 280 : __ Push(rdi);
64 :
65 280 : __ CallRuntime(function_id, 1);
66 280 : __ movq(rcx, rax);
67 :
68 : // Restore target function and new target.
69 280 : __ Pop(rdx);
70 280 : __ Pop(rdi);
71 280 : __ Pop(rax);
72 280 : __ SmiUntag(rax, rax);
73 : }
74 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
75 280 : __ JumpCodeObject(rcx);
76 280 : }
77 :
78 : namespace {
79 :
80 56 : void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
81 : // ----------- S t a t e -------------
82 : // -- rax: number of arguments
83 : // -- rdi: constructor function
84 : // -- rdx: new target
85 : // -- rsi: context
86 : // -----------------------------------
87 :
88 : // Enter a construct frame.
89 : {
90 56 : FrameScope scope(masm, StackFrame::CONSTRUCT);
91 :
92 : // Preserve the incoming parameters on the stack.
93 56 : __ SmiTag(rcx, rax);
94 56 : __ Push(rsi);
95 56 : __ Push(rcx);
96 :
97 : // The receiver for the builtin/api call.
98 56 : __ PushRoot(RootIndex::kTheHoleValue);
99 :
100 : // Set up pointer to last argument.
101 56 : __ leaq(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
102 :
103 : // Copy arguments and receiver to the expression stack.
104 56 : Label loop, entry;
105 56 : __ movq(rcx, rax);
106 : // ----------- S t a t e -------------
107 : // -- rax: number of arguments (untagged)
108 : // -- rdi: constructor function
109 : // -- rdx: new target
110 : // -- rbx: pointer to last argument
111 : // -- rcx: counter
112 : // -- sp[0*kSystemPointerSize]: the hole (receiver)
113 : // -- sp[1*kSystemPointerSize]: number of arguments (tagged)
114 : // -- sp[2*kSystemPointerSize]: context
115 : // -----------------------------------
116 56 : __ jmp(&entry);
117 56 : __ bind(&loop);
118 56 : __ Push(Operand(rbx, rcx, times_system_pointer_size, 0));
119 56 : __ bind(&entry);
120 56 : __ decq(rcx);
121 56 : __ j(greater_equal, &loop, Label::kNear);
122 :
123 : // Call the function.
124 : // rax: number of arguments (untagged)
125 : // rdi: constructor function
126 : // rdx: new target
127 56 : ParameterCount actual(rax);
128 56 : __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
129 :
130 : // Restore context from the frame.
131 56 : __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
132 : // Restore smi-tagged arguments count from the frame.
133 56 : __ movq(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
134 :
135 : // Leave construct frame.
136 : }
137 :
138 : // Remove caller arguments from the stack and return.
139 56 : __ PopReturnAddressTo(rcx);
140 56 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
141 56 : __ leaq(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
142 56 : __ PushReturnAddressFrom(rcx);
143 :
144 56 : __ ret(0);
145 56 : }
146 :
147 952 : void Generate_StackOverflowCheck(
148 : MacroAssembler* masm, Register num_args, Register scratch,
149 : Label* stack_overflow,
150 : Label::Distance stack_overflow_distance = Label::kFar) {
151 : // Check the stack for overflow. We are not trying to catch
152 : // interruptions (e.g. debug break and preemption) here, so the "real stack
153 : // limit" is checked.
154 952 : __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit);
155 952 : __ movq(scratch, rsp);
156 : // Make scratch the space we have left. The stack might already be overflowed
157 : // here which will cause scratch to become negative.
158 952 : __ subq(scratch, kScratchRegister);
159 952 : __ sarq(scratch, Immediate(kSystemPointerSizeLog2));
160 : // Check if the arguments will overflow the stack.
161 952 : __ cmpq(scratch, num_args);
162 : // Signed comparison.
163 952 : __ j(less_equal, stack_overflow, stack_overflow_distance);
164 952 : }
165 :
166 : } // namespace
167 :
168 : // The construct stub for ES5 constructor functions and ES6 class constructors.
169 56 : void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
170 : // ----------- S t a t e -------------
171 : // -- rax: number of arguments (untagged)
172 : // -- rdi: constructor function
173 : // -- rdx: new target
174 : // -- rsi: context
175 : // -- sp[...]: constructor arguments
176 : // -----------------------------------
177 :
178 : // Enter a construct frame.
179 : {
180 56 : FrameScope scope(masm, StackFrame::CONSTRUCT);
181 56 : Label post_instantiation_deopt_entry, not_create_implicit_receiver;
182 :
183 : // Preserve the incoming parameters on the stack.
184 56 : __ SmiTag(rcx, rax);
185 56 : __ Push(rsi);
186 56 : __ Push(rcx);
187 56 : __ Push(rdi);
188 56 : __ PushRoot(RootIndex::kTheHoleValue);
189 56 : __ Push(rdx);
190 :
191 : // ----------- S t a t e -------------
192 : // -- sp[0*kSystemPointerSize]: new target
193 : // -- sp[1*kSystemPointerSize]: padding
194 : // -- rdi and sp[2*kSystemPointerSize]: constructor function
195 : // -- sp[3*kSystemPointerSize]: argument count
196 : // -- sp[4*kSystemPointerSize]: context
197 : // -----------------------------------
198 :
199 : __ LoadTaggedPointerField(
200 56 : rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
201 : __ testl(FieldOperand(rbx, SharedFunctionInfo::kFlagsOffset),
202 56 : Immediate(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
203 56 : __ j(not_zero, ¬_create_implicit_receiver, Label::kNear);
204 :
205 : // If not derived class constructor: Allocate the new receiver object.
206 56 : __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
207 : __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
208 56 : RelocInfo::CODE_TARGET);
209 56 : __ jmp(&post_instantiation_deopt_entry, Label::kNear);
210 :
211 : // Else: use TheHoleValue as receiver for constructor call
212 56 : __ bind(¬_create_implicit_receiver);
213 56 : __ LoadRoot(rax, RootIndex::kTheHoleValue);
214 :
215 : // ----------- S t a t e -------------
216 : // -- rax implicit receiver
217 : // -- Slot 4 / sp[0*kSystemPointerSize] new target
218 : // -- Slot 3 / sp[1*kSystemPointerSize] padding
219 : // -- Slot 2 / sp[2*kSystemPointerSize] constructor function
220 : // -- Slot 1 / sp[3*kSystemPointerSize] number of arguments (tagged)
221 : // -- Slot 0 / sp[4*kSystemPointerSize] context
222 : // -----------------------------------
223 : // Deoptimizer enters here.
224 : masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
225 56 : masm->pc_offset());
226 56 : __ bind(&post_instantiation_deopt_entry);
227 :
228 : // Restore new target.
229 56 : __ Pop(rdx);
230 :
231 : // Push the allocated receiver to the stack. We need two copies
232 : // because we may have to return the original one and the calling
233 : // conventions dictate that the called function pops the receiver.
234 56 : __ Push(rax);
235 56 : __ Push(rax);
236 :
237 : // ----------- S t a t e -------------
238 : // -- sp[0*kSystemPointerSize] implicit receiver
239 : // -- sp[1*kSystemPointerSize] implicit receiver
240 : // -- sp[2*kSystemPointerSize] padding
241 : // -- sp[3*kSystemPointerSize] constructor function
242 : // -- sp[4*kSystemPointerSize] number of arguments (tagged)
243 : // -- sp[5*kSystemPointerSize] context
244 : // -----------------------------------
245 :
246 : // Restore constructor function and argument count.
247 56 : __ movq(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
248 56 : __ SmiUntag(rax, Operand(rbp, ConstructFrameConstants::kLengthOffset));
249 :
250 : // Set up pointer to last argument.
251 56 : __ leaq(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
252 :
253 : // Check if we have enough stack space to push all arguments.
254 : // Argument count in rax. Clobbers rcx.
255 56 : Label enough_stack_space, stack_overflow;
256 56 : Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
257 56 : __ jmp(&enough_stack_space, Label::kNear);
258 :
259 56 : __ bind(&stack_overflow);
260 : // Restore context from the frame.
261 56 : __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
262 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
263 : // This should be unreachable.
264 56 : __ int3();
265 :
266 56 : __ bind(&enough_stack_space);
267 :
268 : // Copy arguments and receiver to the expression stack.
269 56 : Label loop, entry;
270 56 : __ movq(rcx, rax);
271 : // ----------- S t a t e -------------
272 : // -- rax: number of arguments (untagged)
273 : // -- rdx: new target
274 : // -- rbx: pointer to last argument
275 : // -- rcx: counter (tagged)
276 : // -- sp[0*kSystemPointerSize]: implicit receiver
277 : // -- sp[1*kSystemPointerSize]: implicit receiver
278 : // -- sp[2*kSystemPointerSize]: padding
279 : // -- rdi and sp[3*kSystemPointerSize]: constructor function
280 : // -- sp[4*kSystemPointerSize]: number of arguments (tagged)
281 : // -- sp[5*kSystemPointerSize]: context
282 : // -----------------------------------
283 56 : __ jmp(&entry, Label::kNear);
284 56 : __ bind(&loop);
285 56 : __ Push(Operand(rbx, rcx, times_system_pointer_size, 0));
286 56 : __ bind(&entry);
287 56 : __ decq(rcx);
288 56 : __ j(greater_equal, &loop, Label::kNear);
289 :
290 : // Call the function.
291 56 : ParameterCount actual(rax);
292 56 : __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
293 :
294 : // ----------- S t a t e -------------
295 : // -- rax constructor result
296 : // -- sp[0*kSystemPointerSize] implicit receiver
297 : // -- sp[1*kSystemPointerSize] padding
298 : // -- sp[2*kSystemPointerSize] constructor function
299 : // -- sp[3*kSystemPointerSize] number of arguments
300 : // -- sp[4*kSystemPointerSize] context
301 : // -----------------------------------
302 :
303 : // Store offset of return address for deoptimizer.
304 : masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
305 56 : masm->pc_offset());
306 :
307 : // Restore context from the frame.
308 56 : __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
309 :
310 : // If the result is an object (in the ECMA sense), we should get rid
311 : // of the receiver and use the result; see ECMA-262 section 13.2.2-7
312 : // on page 74.
313 56 : Label use_receiver, do_throw, leave_frame;
314 :
315 : // If the result is undefined, we jump out to using the implicit receiver.
316 56 : __ JumpIfRoot(rax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
317 :
318 : // Otherwise we do a smi check and fall through to check if the return value
319 : // is a valid receiver.
320 :
321 : // If the result is a smi, it is *not* an object in the ECMA sense.
322 56 : __ JumpIfSmi(rax, &use_receiver, Label::kNear);
323 :
324 : // If the type of the result (stored in its map) is less than
325 : // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
326 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
327 56 : __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
328 56 : __ j(above_equal, &leave_frame, Label::kNear);
329 56 : __ jmp(&use_receiver, Label::kNear);
330 :
331 56 : __ bind(&do_throw);
332 56 : __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
333 :
334 : // Throw away the result of the constructor invocation and use the
335 : // on-stack receiver as the result.
336 56 : __ bind(&use_receiver);
337 56 : __ movq(rax, Operand(rsp, 0 * kSystemPointerSize));
338 56 : __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw, Label::kNear);
339 :
340 56 : __ bind(&leave_frame);
341 : // Restore the arguments count.
342 56 : __ movq(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
343 : // Leave construct frame.
344 : }
345 : // Remove caller arguments from the stack and return.
346 56 : __ PopReturnAddressTo(rcx);
347 56 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
348 56 : __ leaq(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
349 56 : __ PushReturnAddressFrom(rcx);
350 56 : __ ret(0);
351 56 : }
352 :
353 56 : void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
354 56 : Generate_JSBuiltinsConstructStubHelper(masm);
355 56 : }
356 :
357 56 : void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
358 56 : FrameScope scope(masm, StackFrame::INTERNAL);
359 56 : __ Push(rdi);
360 56 : __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
361 56 : }
362 :
363 : namespace {
364 :
365 : // Called with the native C calling convention. The corresponding function
366 : // signature is either:
367 : // using JSEntryFunction = GeneratedCode<Address(
368 : // Address root_register_value, Address new_target, Address target,
369 : // Address receiver, intptr_t argc, Address** argv)>;
370 : // or
371 : // using JSEntryFunction = GeneratedCode<Address(
372 : // Address root_register_value, MicrotaskQueue* microtask_queue)>;
373 168 : void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
374 : Builtins::Name entry_trampoline) {
375 168 : Label invoke, handler_entry, exit;
376 168 : Label not_outermost_js, not_outermost_js_2;
377 :
378 : { // NOLINT. Scope block confuses linter.
379 168 : NoRootArrayScope uninitialized_root_register(masm);
380 : // Set up frame.
381 168 : __ pushq(rbp);
382 168 : __ movq(rbp, rsp);
383 :
384 : // Push the stack frame type.
385 168 : __ Push(Immediate(StackFrame::TypeToMarker(type)));
386 : // Reserve a slot for the context. It is filled after the root register has
387 : // been set up.
388 168 : __ subq(rsp, Immediate(kSystemPointerSize));
389 : // Save callee-saved registers (X64/X32/Win64 calling conventions).
390 168 : __ pushq(r12);
391 168 : __ pushq(r13);
392 168 : __ pushq(r14);
393 168 : __ pushq(r15);
394 : #ifdef _WIN64
395 : __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
396 : __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
397 : #endif
398 168 : __ pushq(rbx);
399 :
400 : #ifdef _WIN64
401 : // On Win64 XMM6-XMM15 are callee-save.
402 : __ subq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
403 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
404 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
405 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
406 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
407 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
408 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
409 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
410 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
411 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
412 : __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
413 : STATIC_ASSERT(EntryFrameConstants::kCalleeSaveXMMRegisters == 10);
414 : STATIC_ASSERT(EntryFrameConstants::kXMMRegistersBlockSize ==
415 : EntryFrameConstants::kXMMRegisterSize *
416 : EntryFrameConstants::kCalleeSaveXMMRegisters);
417 : #endif
418 :
419 : // Initialize the root register.
420 : // C calling convention. The first argument is passed in arg_reg_1.
421 168 : __ movq(kRootRegister, arg_reg_1);
422 : }
423 :
424 : // Save copies of the top frame descriptor on the stack.
425 : ExternalReference c_entry_fp = ExternalReference::Create(
426 168 : IsolateAddressId::kCEntryFPAddress, masm->isolate());
427 : {
428 168 : Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
429 168 : __ Push(c_entry_fp_operand);
430 : }
431 :
432 : // Store the context address in the previously-reserved slot.
433 : ExternalReference context_address = ExternalReference::Create(
434 168 : IsolateAddressId::kContextAddress, masm->isolate());
435 168 : __ Load(kScratchRegister, context_address);
436 : static constexpr int kOffsetToContextSlot = -2 * kSystemPointerSize;
437 168 : __ movq(Operand(rbp, kOffsetToContextSlot), kScratchRegister);
438 :
439 : // If this is the outermost JS call, set js_entry_sp value.
440 : ExternalReference js_entry_sp = ExternalReference::Create(
441 168 : IsolateAddressId::kJSEntrySPAddress, masm->isolate());
442 168 : __ Load(rax, js_entry_sp);
443 168 : __ testq(rax, rax);
444 168 : __ j(not_zero, ¬_outermost_js);
445 168 : __ Push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
446 168 : __ movq(rax, rbp);
447 168 : __ Store(js_entry_sp, rax);
448 168 : Label cont;
449 168 : __ jmp(&cont);
450 168 : __ bind(¬_outermost_js);
451 168 : __ Push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
452 168 : __ bind(&cont);
453 :
454 : // Jump to a faked try block that does the invoke, with a faked catch
455 : // block that sets the pending exception.
456 168 : __ jmp(&invoke);
457 168 : __ bind(&handler_entry);
458 :
459 : // Store the current pc as the handler offset. It's used later to create the
460 : // handler table.
461 168 : masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
462 :
463 : // Caught exception: Store result (exception) in the pending exception
464 : // field in the JSEnv and return a failure sentinel.
465 : ExternalReference pending_exception = ExternalReference::Create(
466 168 : IsolateAddressId::kPendingExceptionAddress, masm->isolate());
467 168 : __ Store(pending_exception, rax);
468 168 : __ LoadRoot(rax, RootIndex::kException);
469 168 : __ jmp(&exit);
470 :
471 : // Invoke: Link this frame into the handler chain.
472 168 : __ bind(&invoke);
473 168 : __ PushStackHandler();
474 :
475 : // Invoke the function by calling through JS entry trampoline builtin and
476 : // pop the faked function when we return.
477 : Handle<Code> trampoline_code =
478 168 : masm->isolate()->builtins()->builtin_handle(entry_trampoline);
479 168 : __ Call(trampoline_code, RelocInfo::CODE_TARGET);
480 :
481 : // Unlink this frame from the handler chain.
482 168 : __ PopStackHandler();
483 :
484 168 : __ bind(&exit);
485 : // Check if the current stack frame is marked as the outermost JS frame.
486 168 : __ Pop(rbx);
487 168 : __ cmpq(rbx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
488 168 : __ j(not_equal, ¬_outermost_js_2);
489 168 : __ Move(kScratchRegister, js_entry_sp);
490 168 : __ movq(Operand(kScratchRegister, 0), Immediate(0));
491 168 : __ bind(¬_outermost_js_2);
492 :
493 : // Restore the top frame descriptor from the stack.
494 : {
495 168 : Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
496 168 : __ Pop(c_entry_fp_operand);
497 : }
498 :
499 : // Restore callee-saved registers (X64 conventions).
500 : #ifdef _WIN64
501 : // On Win64 XMM6-XMM15 are callee-save
502 : __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
503 : __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
504 : __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
505 : __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
506 : __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
507 : __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
508 : __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
509 : __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
510 : __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
511 : __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
512 : __ addq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
513 : #endif
514 :
515 168 : __ popq(rbx);
516 : #ifdef _WIN64
517 : // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
518 : __ popq(rsi);
519 : __ popq(rdi);
520 : #endif
521 168 : __ popq(r15);
522 168 : __ popq(r14);
523 168 : __ popq(r13);
524 168 : __ popq(r12);
525 168 : __ addq(rsp, Immediate(2 * kSystemPointerSize)); // remove markers
526 :
527 : // Restore frame pointer and return.
528 168 : __ popq(rbp);
529 168 : __ ret(0);
530 168 : }
531 :
532 : } // namespace
533 :
534 56 : void Builtins::Generate_JSEntry(MacroAssembler* masm) {
535 : Generate_JSEntryVariant(masm, StackFrame::ENTRY,
536 56 : Builtins::kJSEntryTrampoline);
537 56 : }
538 :
539 56 : void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
540 : Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
541 56 : Builtins::kJSConstructEntryTrampoline);
542 56 : }
543 :
544 56 : void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
545 : Generate_JSEntryVariant(masm, StackFrame::ENTRY,
546 56 : Builtins::kRunMicrotasksTrampoline);
547 56 : }
548 :
549 112 : static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
550 : bool is_construct) {
551 : // Expects six C++ function parameters.
552 : // - Address root_register_value
553 : // - Address new_target (tagged Object pointer)
554 : // - Address function (tagged JSFunction pointer)
555 : // - Address receiver (tagged Object pointer)
556 : // - intptr_t argc
557 : // - Address** argv (pointer to array of tagged Object pointers)
558 : // (see Handle::Invoke in execution.cc).
559 :
560 : // Open a C++ scope for the FrameScope.
561 : {
562 : // Platform specific argument handling. After this, the stack contains
563 : // an internal frame and the pushed function and receiver, and
564 : // register rax and rbx holds the argument count and argument array,
565 : // while rdi holds the function pointer, rsi the context, and rdx the
566 : // new.target.
567 :
568 : // MSVC parameters in:
569 : // rcx : root_register_value
570 : // rdx : new_target
571 : // r8 : function
572 : // r9 : receiver
573 : // [rsp+0x20] : argc
574 : // [rsp+0x28] : argv
575 : //
576 : // GCC parameters in:
577 : // rdi : root_register_value
578 : // rsi : new_target
579 : // rdx : function
580 : // rcx : receiver
581 : // r8 : argc
582 : // r9 : argv
583 :
584 112 : __ movq(rdi, arg_reg_3);
585 112 : __ Move(rdx, arg_reg_2);
586 : // rdi : function
587 : // rdx : new_target
588 :
589 : // Clear the context before we push it when entering the internal frame.
590 112 : __ Set(rsi, 0);
591 :
592 : // Enter an internal frame.
593 112 : FrameScope scope(masm, StackFrame::INTERNAL);
594 :
595 : // Setup the context (we need to use the caller context from the isolate).
596 : ExternalReference context_address = ExternalReference::Create(
597 112 : IsolateAddressId::kContextAddress, masm->isolate());
598 112 : __ movq(rsi, masm->ExternalReferenceAsOperand(context_address));
599 :
600 : // Push the function and the receiver onto the stack.
601 112 : __ Push(rdi);
602 112 : __ Push(arg_reg_4);
603 :
604 : #ifdef _WIN64
605 : // Load the previous frame pointer to access C arguments on stack
606 : __ movq(kScratchRegister, Operand(rbp, 0));
607 : // Load the number of arguments and setup pointer to the arguments.
608 : __ movq(rax, Operand(kScratchRegister, EntryFrameConstants::kArgcOffset));
609 : __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
610 : #else // _WIN64
611 : // Load the number of arguments and setup pointer to the arguments.
612 112 : __ movq(rax, r8);
613 112 : __ movq(rbx, r9);
614 : #endif // _WIN64
615 :
616 : // Current stack contents:
617 : // [rsp + 2 * kSystemPointerSize ... ] : Internal frame
618 : // [rsp + kSystemPointerSize] : function
619 : // [rsp] : receiver
620 : // Current register contents:
621 : // rax : argc
622 : // rbx : argv
623 : // rsi : context
624 : // rdi : function
625 : // rdx : new.target
626 :
627 : // Check if we have enough stack space to push all arguments.
628 : // Argument count in rax. Clobbers rcx.
629 112 : Label enough_stack_space, stack_overflow;
630 112 : Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
631 112 : __ jmp(&enough_stack_space, Label::kNear);
632 :
633 112 : __ bind(&stack_overflow);
634 112 : __ CallRuntime(Runtime::kThrowStackOverflow);
635 : // This should be unreachable.
636 112 : __ int3();
637 :
638 112 : __ bind(&enough_stack_space);
639 :
640 : // Copy arguments to the stack in a loop.
641 : // Register rbx points to array of pointers to handle locations.
642 : // Push the values of these handles.
643 112 : Label loop, entry;
644 112 : __ Set(rcx, 0); // Set loop variable to 0.
645 112 : __ jmp(&entry, Label::kNear);
646 112 : __ bind(&loop);
647 112 : __ movq(kScratchRegister, Operand(rbx, rcx, times_system_pointer_size, 0));
648 112 : __ Push(Operand(kScratchRegister, 0)); // dereference handle
649 112 : __ addq(rcx, Immediate(1));
650 112 : __ bind(&entry);
651 112 : __ cmpq(rcx, rax);
652 112 : __ j(not_equal, &loop, Label::kNear);
653 :
654 : // Invoke the builtin code.
655 : Handle<Code> builtin = is_construct
656 56 : ? BUILTIN_CODE(masm->isolate(), Construct)
657 168 : : masm->isolate()->builtins()->Call();
658 112 : __ Call(builtin, RelocInfo::CODE_TARGET);
659 :
660 : // Exit the internal frame. Notice that this also removes the empty
661 : // context and the function left on the stack by the code
662 : // invocation.
663 : }
664 :
665 112 : __ ret(0);
666 112 : }
667 :
668 56 : void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
669 56 : Generate_JSEntryTrampolineHelper(masm, false);
670 56 : }
671 :
672 56 : void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
673 56 : Generate_JSEntryTrampolineHelper(masm, true);
674 56 : }
675 :
676 56 : void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
677 : // arg_reg_2: microtask_queue
678 56 : __ movq(RunMicrotasksDescriptor::MicrotaskQueueRegister(), arg_reg_2);
679 56 : __ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
680 56 : }
681 :
682 56 : static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
683 : Register sfi_data,
684 : Register scratch1) {
685 56 : Label done;
686 :
687 56 : __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
688 56 : __ j(not_equal, &done, Label::kNear);
689 :
690 : __ LoadTaggedPointerField(
691 56 : sfi_data, FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
692 :
693 56 : __ bind(&done);
694 56 : }
695 :
696 : // static
697 56 : void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
698 : // ----------- S t a t e -------------
699 : // -- rax : the value to pass to the generator
700 : // -- rdx : the JSGeneratorObject to resume
701 : // -- rsp[0] : return address
702 : // -----------------------------------
703 56 : __ AssertGeneratorObject(rdx);
704 :
705 : // Store input value into generator object.
706 : __ StoreTaggedField(
707 56 : FieldOperand(rdx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
708 : __ RecordWriteField(rdx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
709 56 : kDontSaveFPRegs);
710 :
711 56 : Register decompr_scratch1 = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
712 56 : Register decompr_scratch2 = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
713 :
714 : // Load suspended function and context.
715 : __ LoadTaggedPointerField(
716 56 : rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
717 56 : __ LoadTaggedPointerField(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
718 :
719 : // Flood function if we are stepping.
720 56 : Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
721 56 : Label stepping_prepared;
722 : ExternalReference debug_hook =
723 56 : ExternalReference::debug_hook_on_function_call_address(masm->isolate());
724 56 : Operand debug_hook_operand = masm->ExternalReferenceAsOperand(debug_hook);
725 56 : __ cmpb(debug_hook_operand, Immediate(0));
726 56 : __ j(not_equal, &prepare_step_in_if_stepping);
727 :
728 : // Flood function if we need to continue stepping in the suspended generator.
729 : ExternalReference debug_suspended_generator =
730 56 : ExternalReference::debug_suspended_generator_address(masm->isolate());
731 : Operand debug_suspended_generator_operand =
732 56 : masm->ExternalReferenceAsOperand(debug_suspended_generator);
733 56 : __ cmpq(rdx, debug_suspended_generator_operand);
734 56 : __ j(equal, &prepare_step_in_suspended_generator);
735 56 : __ bind(&stepping_prepared);
736 :
737 : // Check the stack for overflow. We are not trying to catch interruptions
738 : // (i.e. debug break and preemption) here, so check the "real stack limit".
739 56 : Label stack_overflow;
740 56 : __ CompareRoot(rsp, RootIndex::kRealStackLimit);
741 56 : __ j(below, &stack_overflow);
742 :
743 : // Pop return address.
744 56 : __ PopReturnAddressTo(rax);
745 :
746 : // Push receiver.
747 : __ PushTaggedPointerField(
748 56 : FieldOperand(rdx, JSGeneratorObject::kReceiverOffset), decompr_scratch1);
749 :
750 : // ----------- S t a t e -------------
751 : // -- rax : return address
752 : // -- rdx : the JSGeneratorObject to resume
753 : // -- rdi : generator function
754 : // -- rsi : generator context
755 : // -- rsp[0] : generator receiver
756 : // -----------------------------------
757 :
758 : // Copy the function arguments from the generator object's register file.
759 : __ LoadTaggedPointerField(
760 56 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
761 : __ movzxwq(
762 56 : rcx, FieldOperand(rcx, SharedFunctionInfo::kFormalParameterCountOffset));
763 :
764 : __ LoadTaggedPointerField(
765 56 : rbx, FieldOperand(rdx, JSGeneratorObject::kParametersAndRegistersOffset));
766 :
767 : {
768 56 : Label done_loop, loop;
769 56 : __ Set(r9, 0);
770 :
771 56 : __ bind(&loop);
772 56 : __ cmpl(r9, rcx);
773 56 : __ j(greater_equal, &done_loop, Label::kNear);
774 : __ PushTaggedAnyField(
775 : FieldOperand(rbx, r9, times_tagged_size, FixedArray::kHeaderSize),
776 56 : decompr_scratch1, decompr_scratch2);
777 56 : __ addl(r9, Immediate(1));
778 56 : __ jmp(&loop);
779 :
780 56 : __ bind(&done_loop);
781 : }
782 :
783 : // Underlying function needs to have bytecode available.
784 56 : if (FLAG_debug_code) {
785 : __ LoadTaggedPointerField(
786 0 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
787 : __ LoadTaggedPointerField(
788 0 : rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
789 0 : GetSharedFunctionInfoBytecode(masm, rcx, kScratchRegister);
790 0 : __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
791 0 : __ Assert(equal, AbortReason::kMissingBytecodeArray);
792 : }
793 :
794 : // Resume (Ignition/TurboFan) generator object.
795 : {
796 56 : __ PushReturnAddressFrom(rax);
797 : __ LoadTaggedPointerField(
798 56 : rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
799 : __ movzxwq(rax, FieldOperand(
800 56 : rax, SharedFunctionInfo::kFormalParameterCountOffset));
801 : // We abuse new.target both to indicate that this is a resume call and to
802 : // pass in the generator object. In ordinary calls, new.target is always
803 : // undefined because generator functions are non-constructable.
804 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
805 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
806 56 : __ JumpCodeObject(rcx);
807 : }
808 :
809 56 : __ bind(&prepare_step_in_if_stepping);
810 : {
811 56 : FrameScope scope(masm, StackFrame::INTERNAL);
812 56 : __ Push(rdx);
813 56 : __ Push(rdi);
814 : // Push hole as receiver since we do not use it for stepping.
815 56 : __ PushRoot(RootIndex::kTheHoleValue);
816 56 : __ CallRuntime(Runtime::kDebugOnFunctionCall);
817 56 : __ Pop(rdx);
818 : __ LoadTaggedPointerField(
819 56 : rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
820 : }
821 56 : __ jmp(&stepping_prepared);
822 :
823 56 : __ bind(&prepare_step_in_suspended_generator);
824 : {
825 56 : FrameScope scope(masm, StackFrame::INTERNAL);
826 56 : __ Push(rdx);
827 56 : __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
828 56 : __ Pop(rdx);
829 : __ LoadTaggedPointerField(
830 56 : rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
831 : }
832 56 : __ jmp(&stepping_prepared);
833 :
834 56 : __ bind(&stack_overflow);
835 : {
836 56 : FrameScope scope(masm, StackFrame::INTERNAL);
837 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
838 56 : __ int3(); // This should be unreachable.
839 : }
840 56 : }
841 :
842 : // TODO(juliana): if we remove the code below then we don't need all
843 : // the parameters.
844 56 : static void ReplaceClosureCodeWithOptimizedCode(
845 : MacroAssembler* masm, Register optimized_code, Register closure,
846 : Register scratch1, Register scratch2, Register scratch3) {
847 :
848 : // Store the optimized code in the closure.
849 : __ StoreTaggedField(FieldOperand(closure, JSFunction::kCodeOffset),
850 56 : optimized_code);
851 56 : __ movq(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
852 : __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
853 56 : kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
854 56 : }
855 :
856 56 : static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
857 : Register scratch2) {
858 56 : Register args_count = scratch1;
859 56 : Register return_pc = scratch2;
860 :
861 : // Get the arguments + receiver count.
862 : __ movq(args_count,
863 56 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
864 : __ movl(args_count,
865 56 : FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
866 :
867 : // Leave the frame (also dropping the register file).
868 56 : __ leave();
869 :
870 : // Drop receiver + arguments.
871 56 : __ PopReturnAddressTo(return_pc);
872 56 : __ addq(rsp, args_count);
873 56 : __ PushReturnAddressFrom(return_pc);
874 56 : }
875 :
876 : // Tail-call |function_id| if |smi_entry| == |marker|
877 168 : static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
878 : Register smi_entry,
879 : OptimizationMarker marker,
880 : Runtime::FunctionId function_id) {
881 168 : Label no_match;
882 168 : __ SmiCompare(smi_entry, Smi::FromEnum(marker));
883 168 : __ j(not_equal, &no_match);
884 168 : GenerateTailCallToReturnedCode(masm, function_id);
885 168 : __ bind(&no_match);
886 168 : }
887 :
888 56 : static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
889 : Register feedback_vector,
890 : Register scratch1, Register scratch2,
891 : Register scratch3) {
892 : // ----------- S t a t e -------------
893 : // -- rax : argument count (preserved for callee if needed, and caller)
894 : // -- rdx : new target (preserved for callee if needed, and caller)
895 : // -- rdi : target function (preserved for callee if needed, and caller)
896 : // -- feedback vector (preserved for caller if needed)
897 : // -----------------------------------
898 : DCHECK(!AreAliased(feedback_vector, rax, rdx, rdi, scratch1, scratch2,
899 : scratch3));
900 :
901 56 : Label optimized_code_slot_is_weak_ref, fallthrough;
902 :
903 56 : Register closure = rdi;
904 56 : Register optimized_code_entry = scratch1;
905 56 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? scratch2 : no_reg;
906 :
907 : __ LoadAnyTaggedField(
908 : optimized_code_entry,
909 : FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset),
910 56 : decompr_scratch);
911 :
912 : // Check if the code entry is a Smi. If yes, we interpret it as an
913 : // optimisation marker. Otherwise, interpret it as a weak reference to a code
914 : // object.
915 56 : __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
916 :
917 : {
918 : // Optimized code slot is a Smi optimization marker.
919 :
920 : // Fall through if no optimization trigger.
921 : __ SmiCompare(optimized_code_entry,
922 56 : Smi::FromEnum(OptimizationMarker::kNone));
923 56 : __ j(equal, &fallthrough);
924 :
925 : // TODO(v8:8394): The logging of first execution will break if
926 : // feedback vectors are not allocated. We need to find a different way of
927 : // logging these events if required.
928 : TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
929 : OptimizationMarker::kLogFirstExecution,
930 56 : Runtime::kFunctionFirstExecution);
931 : TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
932 : OptimizationMarker::kCompileOptimized,
933 56 : Runtime::kCompileOptimized_NotConcurrent);
934 : TailCallRuntimeIfMarkerEquals(
935 : masm, optimized_code_entry,
936 : OptimizationMarker::kCompileOptimizedConcurrent,
937 56 : Runtime::kCompileOptimized_Concurrent);
938 :
939 : {
940 : // Otherwise, the marker is InOptimizationQueue, so fall through hoping
941 : // that an interrupt will eventually update the slot with optimized code.
942 56 : if (FLAG_debug_code) {
943 : __ SmiCompare(optimized_code_entry,
944 0 : Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
945 0 : __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
946 : }
947 56 : __ jmp(&fallthrough);
948 : }
949 : }
950 :
951 : {
952 : // Optimized code slot is a weak reference.
953 56 : __ bind(&optimized_code_slot_is_weak_ref);
954 :
955 56 : __ LoadWeakValue(optimized_code_entry, &fallthrough);
956 :
957 : // Check if the optimized code is marked for deopt. If it is, call the
958 : // runtime to clear it.
959 56 : Label found_deoptimized_code;
960 : __ LoadTaggedPointerField(
961 : scratch2,
962 56 : FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
963 : __ testl(
964 : FieldOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset),
965 56 : Immediate(1 << Code::kMarkedForDeoptimizationBit));
966 56 : __ j(not_zero, &found_deoptimized_code);
967 :
968 : // Optimized code is good, get it into the closure and link the closure into
969 : // the optimized functions list, then tail call the optimized code.
970 : // The feedback vector is no longer used, so re-use it as a scratch
971 : // register.
972 : ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
973 56 : scratch2, scratch3, feedback_vector);
974 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
975 56 : __ Move(rcx, optimized_code_entry);
976 56 : __ JumpCodeObject(rcx);
977 :
978 : // Optimized code slot contains deoptimized code, evict it and re-enter the
979 : // closure's code.
980 56 : __ bind(&found_deoptimized_code);
981 56 : GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
982 : }
983 :
984 : // Fall-through if the optimized code cell is clear and there is no
985 : // optimization marker.
986 56 : __ bind(&fallthrough);
987 56 : }
988 :
989 : // Advance the current bytecode offset. This simulates what all bytecode
990 : // handlers do upon completion of the underlying operation. Will bail out to a
991 : // label if the bytecode (without prefix) is a return bytecode.
992 112 : static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
993 : Register bytecode_array,
994 : Register bytecode_offset,
995 : Register bytecode, Register scratch1,
996 : Label* if_return) {
997 112 : Register bytecode_size_table = scratch1;
998 : DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
999 : bytecode));
1000 :
1001 : __ Move(bytecode_size_table,
1002 112 : ExternalReference::bytecode_size_table_address());
1003 :
1004 : // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
1005 112 : Label process_bytecode, extra_wide;
1006 : STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
1007 : STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
1008 : STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1009 : STATIC_ASSERT(3 ==
1010 : static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1011 112 : __ cmpb(bytecode, Immediate(0x3));
1012 112 : __ j(above, &process_bytecode, Label::kNear);
1013 112 : __ testb(bytecode, Immediate(0x1));
1014 112 : __ j(not_equal, &extra_wide, Label::kNear);
1015 :
1016 : // Load the next bytecode and update table to the wide scaled table.
1017 112 : __ incl(bytecode_offset);
1018 112 : __ movzxbq(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
1019 : __ addq(bytecode_size_table,
1020 112 : Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
1021 112 : __ jmp(&process_bytecode, Label::kNear);
1022 :
1023 112 : __ bind(&extra_wide);
1024 : // Load the next bytecode and update table to the extra wide scaled table.
1025 112 : __ incl(bytecode_offset);
1026 112 : __ movzxbq(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
1027 : __ addq(bytecode_size_table,
1028 112 : Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
1029 :
1030 112 : __ bind(&process_bytecode);
1031 :
1032 : // Bailout to the return label if this is a return bytecode.
1033 : #define JUMP_IF_EQUAL(NAME) \
1034 : __ cmpb(bytecode, \
1035 : Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1036 : __ j(equal, if_return, Label::kFar);
1037 112 : RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
1038 : #undef JUMP_IF_EQUAL
1039 :
1040 : // Otherwise, load the size of the current bytecode and advance the offset.
1041 : __ addl(bytecode_offset,
1042 112 : Operand(bytecode_size_table, bytecode, times_int_size, 0));
1043 112 : }
1044 :
1045 : // Generate code for entering a JS function with the interpreter.
1046 : // On entry to the function the receiver and arguments have been pushed on the
1047 : // stack left to right. The actual argument count matches the formal parameter
1048 : // count expected by the function.
1049 : //
1050 : // The live registers are:
1051 : // o rdi: the JS function object being called
1052 : // o rdx: the incoming new target or generator object
1053 : // o rsi: our context
1054 : // o rbp: the caller's frame pointer
1055 : // o rsp: stack pointer (pointing to return address)
1056 : //
1057 : // The function builds an interpreter frame. See InterpreterFrameConstants in
1058 : // frames.h for its layout.
1059 56 : void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1060 56 : Register closure = rdi;
1061 56 : Register feedback_vector = rbx;
1062 :
1063 : // Get the bytecode array from the function object and load it into
1064 : // kInterpreterBytecodeArrayRegister.
1065 : __ LoadTaggedPointerField(
1066 56 : rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1067 : __ LoadTaggedPointerField(
1068 : kInterpreterBytecodeArrayRegister,
1069 56 : FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
1070 : GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister,
1071 56 : kScratchRegister);
1072 :
1073 : // The bytecode array could have been flushed from the shared function info,
1074 : // if so, call into CompileLazy.
1075 56 : Label compile_lazy;
1076 56 : __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, rax);
1077 56 : __ j(not_equal, &compile_lazy);
1078 :
1079 : // Load the feedback vector from the closure.
1080 : __ LoadTaggedPointerField(
1081 56 : feedback_vector, FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1082 : __ LoadTaggedPointerField(feedback_vector,
1083 56 : FieldOperand(feedback_vector, Cell::kValueOffset));
1084 :
1085 56 : Label push_stack_frame;
1086 : // Check if feedback vector is valid. If valid, check for optimized code
1087 : // and update invocation count. Otherwise, setup the stack frame.
1088 56 : __ JumpIfRoot(feedback_vector, RootIndex::kUndefinedValue, &push_stack_frame);
1089 :
1090 : // Read off the optimized code slot in the feedback vector, and if there
1091 : // is optimized code or an optimization marker, call that instead.
1092 56 : MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r11, r15);
1093 :
1094 : // Increment invocation count for the function.
1095 : __ incl(
1096 56 : FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1097 :
1098 : // Open a frame scope to indicate that there is a frame on the stack. The
1099 : // MANUAL indicates that the scope shouldn't actually generate code to set up
1100 : // the frame (that is done below).
1101 56 : __ bind(&push_stack_frame);
1102 56 : FrameScope frame_scope(masm, StackFrame::MANUAL);
1103 56 : __ pushq(rbp); // Caller's frame pointer.
1104 56 : __ movq(rbp, rsp);
1105 56 : __ Push(rsi); // Callee's context.
1106 56 : __ Push(rdi); // Callee's JS function.
1107 :
1108 : // Reset code age.
1109 : __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
1110 : BytecodeArray::kBytecodeAgeOffset),
1111 56 : Immediate(BytecodeArray::kNoAgeBytecodeAge));
1112 :
1113 : // Load initial bytecode offset.
1114 : __ movq(kInterpreterBytecodeOffsetRegister,
1115 56 : Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1116 :
1117 : // Push bytecode array and Smi tagged bytecode offset.
1118 56 : __ Push(kInterpreterBytecodeArrayRegister);
1119 56 : __ SmiTag(rcx, kInterpreterBytecodeOffsetRegister);
1120 56 : __ Push(rcx);
1121 :
1122 : // Allocate the local and temporary register file on the stack.
1123 : {
1124 : // Load frame size from the BytecodeArray object.
1125 : __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
1126 56 : BytecodeArray::kFrameSizeOffset));
1127 :
1128 : // Do a stack check to ensure we don't go over the limit.
1129 56 : Label ok;
1130 56 : __ movq(rax, rsp);
1131 56 : __ subq(rax, rcx);
1132 56 : __ CompareRoot(rax, RootIndex::kRealStackLimit);
1133 56 : __ j(above_equal, &ok, Label::kNear);
1134 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
1135 56 : __ bind(&ok);
1136 :
1137 : // If ok, push undefined as the initial value for all register file entries.
1138 56 : Label loop_header;
1139 56 : Label loop_check;
1140 56 : __ LoadRoot(rax, RootIndex::kUndefinedValue);
1141 56 : __ j(always, &loop_check, Label::kNear);
1142 56 : __ bind(&loop_header);
1143 : // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1144 56 : __ Push(rax);
1145 : // Continue loop if not done.
1146 56 : __ bind(&loop_check);
1147 56 : __ subq(rcx, Immediate(kSystemPointerSize));
1148 56 : __ j(greater_equal, &loop_header, Label::kNear);
1149 : }
1150 :
1151 : // If the bytecode array has a valid incoming new target or generator object
1152 : // register, initialize it with incoming value which was passed in rdx.
1153 56 : Label no_incoming_new_target_or_generator_register;
1154 : __ movsxlq(
1155 : rax,
1156 : FieldOperand(kInterpreterBytecodeArrayRegister,
1157 56 : BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1158 56 : __ testl(rax, rax);
1159 56 : __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
1160 56 : __ movq(Operand(rbp, rax, times_system_pointer_size, 0), rdx);
1161 56 : __ bind(&no_incoming_new_target_or_generator_register);
1162 :
1163 : // Load accumulator with undefined.
1164 56 : __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1165 :
1166 : // Load the dispatch table into a register and dispatch to the bytecode
1167 : // handler at the current bytecode offset.
1168 56 : Label do_dispatch;
1169 56 : __ bind(&do_dispatch);
1170 : __ Move(
1171 : kInterpreterDispatchTableRegister,
1172 56 : ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1173 : __ movzxbq(r11, Operand(kInterpreterBytecodeArrayRegister,
1174 56 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1175 : __ movq(kJavaScriptCallCodeStartRegister,
1176 : Operand(kInterpreterDispatchTableRegister, r11,
1177 56 : times_system_pointer_size, 0));
1178 56 : __ call(kJavaScriptCallCodeStartRegister);
1179 56 : masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1180 :
1181 : // Any returns to the entry trampoline are either due to the return bytecode
1182 : // or the interpreter tail calling a builtin and then a dispatch.
1183 :
1184 : // Get bytecode array and bytecode offset from the stack frame.
1185 : __ movq(kInterpreterBytecodeArrayRegister,
1186 56 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1187 : __ movq(kInterpreterBytecodeOffsetRegister,
1188 56 : Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1189 : __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1190 56 : kInterpreterBytecodeOffsetRegister);
1191 :
1192 : // Either return, or advance to the next bytecode and dispatch.
1193 56 : Label do_return;
1194 : __ movzxbq(rbx, Operand(kInterpreterBytecodeArrayRegister,
1195 56 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1196 : AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1197 : kInterpreterBytecodeOffsetRegister, rbx, rcx,
1198 56 : &do_return);
1199 56 : __ jmp(&do_dispatch);
1200 :
1201 56 : __ bind(&do_return);
1202 : // The return value is in rax.
1203 56 : LeaveInterpreterFrame(masm, rbx, rcx);
1204 56 : __ ret(0);
1205 :
1206 56 : __ bind(&compile_lazy);
1207 56 : GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1208 56 : __ int3(); // Should not return.
1209 56 : }
1210 :
1211 336 : static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1212 : Register num_args,
1213 : Register start_address,
1214 : Register scratch) {
1215 : // Find the address of the last argument.
1216 336 : __ Move(scratch, num_args);
1217 336 : __ shlq(scratch, Immediate(kSystemPointerSizeLog2));
1218 336 : __ negq(scratch);
1219 336 : __ addq(scratch, start_address);
1220 :
1221 : // Push the arguments.
1222 336 : Label loop_header, loop_check;
1223 336 : __ j(always, &loop_check, Label::kNear);
1224 336 : __ bind(&loop_header);
1225 336 : __ Push(Operand(start_address, 0));
1226 336 : __ subq(start_address, Immediate(kSystemPointerSize));
1227 336 : __ bind(&loop_check);
1228 336 : __ cmpq(start_address, scratch);
1229 336 : __ j(greater, &loop_header, Label::kNear);
1230 336 : }
1231 :
1232 : // static
1233 168 : void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1234 : MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1235 : InterpreterPushArgsMode mode) {
1236 : DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1237 : // ----------- S t a t e -------------
1238 : // -- rax : the number of arguments (not including the receiver)
1239 : // -- rbx : the address of the first argument to be pushed. Subsequent
1240 : // arguments should be consecutive above this, in the same order as
1241 : // they are to be pushed onto the stack.
1242 : // -- rdi : the target to call (can be any Object).
1243 : // -----------------------------------
1244 168 : Label stack_overflow;
1245 :
1246 : // Number of values to be pushed.
1247 168 : __ leal(rcx, Operand(rax, 1)); // Add one for receiver.
1248 :
1249 : // Add a stack check before pushing arguments.
1250 168 : Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
1251 :
1252 : // Pop return address to allow tail-call after pushing arguments.
1253 168 : __ PopReturnAddressTo(kScratchRegister);
1254 :
1255 : // Push "undefined" as the receiver arg if we need to.
1256 168 : if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1257 56 : __ PushRoot(RootIndex::kUndefinedValue);
1258 56 : __ decl(rcx); // Subtract one for receiver.
1259 : }
1260 :
1261 : // rbx and rdx will be modified.
1262 168 : Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
1263 :
1264 168 : if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1265 56 : __ Pop(rbx); // Pass the spread in a register
1266 56 : __ decl(rax); // Subtract one for spread
1267 : }
1268 :
1269 : // Call the target.
1270 168 : __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
1271 :
1272 168 : if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1273 : __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1274 56 : RelocInfo::CODE_TARGET);
1275 : } else {
1276 : __ Jump(masm->isolate()->builtins()->Call(receiver_mode),
1277 112 : RelocInfo::CODE_TARGET);
1278 : }
1279 :
1280 : // Throw stack overflow exception.
1281 168 : __ bind(&stack_overflow);
1282 : {
1283 168 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
1284 : // This should be unreachable.
1285 168 : __ int3();
1286 : }
1287 168 : }
1288 :
1289 : // static
1290 168 : void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1291 : MacroAssembler* masm, InterpreterPushArgsMode mode) {
1292 : // ----------- S t a t e -------------
1293 : // -- rax : the number of arguments (not including the receiver)
1294 : // -- rdx : the new target (either the same as the constructor or
1295 : // the JSFunction on which new was invoked initially)
1296 : // -- rdi : the constructor to call (can be any Object)
1297 : // -- rbx : the allocation site feedback if available, undefined otherwise
1298 : // -- rcx : the address of the first argument to be pushed. Subsequent
1299 : // arguments should be consecutive above this, in the same order as
1300 : // they are to be pushed onto the stack.
1301 : // -----------------------------------
1302 168 : Label stack_overflow;
1303 :
1304 : // Add a stack check before pushing arguments.
1305 168 : Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
1306 :
1307 : // Pop return address to allow tail-call after pushing arguments.
1308 168 : __ PopReturnAddressTo(kScratchRegister);
1309 :
1310 : // Push slot for the receiver to be constructed.
1311 168 : __ Push(Immediate(0));
1312 :
1313 : // rcx and r8 will be modified.
1314 168 : Generate_InterpreterPushArgs(masm, rax, rcx, r8);
1315 :
1316 168 : if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1317 56 : __ Pop(rbx); // Pass the spread in a register
1318 56 : __ decl(rax); // Subtract one for spread
1319 :
1320 : // Push return address in preparation for the tail-call.
1321 56 : __ PushReturnAddressFrom(kScratchRegister);
1322 : } else {
1323 112 : __ PushReturnAddressFrom(kScratchRegister);
1324 112 : __ AssertUndefinedOrAllocationSite(rbx);
1325 : }
1326 :
1327 168 : if (mode == InterpreterPushArgsMode::kArrayFunction) {
1328 : // Tail call to the array construct stub (still in the caller
1329 : // context at this point).
1330 56 : __ AssertFunction(rdi);
1331 : // Jump to the constructor function (rax, rbx, rdx passed on).
1332 56 : Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1333 56 : __ Jump(code, RelocInfo::CODE_TARGET);
1334 112 : } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1335 : // Call the constructor (rax, rdx, rdi passed on).
1336 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1337 56 : RelocInfo::CODE_TARGET);
1338 : } else {
1339 : DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1340 : // Call the constructor (rax, rdx, rdi passed on).
1341 56 : __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1342 : }
1343 :
1344 : // Throw stack overflow exception.
1345 168 : __ bind(&stack_overflow);
1346 : {
1347 168 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
1348 : // This should be unreachable.
1349 168 : __ int3();
1350 : }
1351 168 : }
1352 :
1353 112 : static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1354 : // Set the return address to the correct point in the interpreter entry
1355 : // trampoline.
1356 112 : Label builtin_trampoline, trampoline_loaded;
1357 : Smi interpreter_entry_return_pc_offset(
1358 112 : masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1359 : DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1360 :
1361 : // If the SFI function_data is an InterpreterData, the function will have a
1362 : // custom copy of the interpreter entry trampoline for profiling. If so,
1363 : // get the custom trampoline, otherwise grab the entry address of the global
1364 : // trampoline.
1365 112 : __ movq(rbx, Operand(rbp, StandardFrameConstants::kFunctionOffset));
1366 : __ LoadTaggedPointerField(
1367 112 : rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
1368 : __ LoadTaggedPointerField(
1369 112 : rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
1370 112 : __ CmpObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
1371 112 : __ j(not_equal, &builtin_trampoline, Label::kNear);
1372 :
1373 : __ movq(rbx,
1374 112 : FieldOperand(rbx, InterpreterData::kInterpreterTrampolineOffset));
1375 112 : __ addq(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1376 112 : __ jmp(&trampoline_loaded, Label::kNear);
1377 :
1378 112 : __ bind(&builtin_trampoline);
1379 : // TODO(jgruber): Replace this by a lookup in the builtin entry table.
1380 : __ movq(rbx,
1381 : __ ExternalReferenceAsOperand(
1382 : ExternalReference::
1383 : address_of_interpreter_entry_trampoline_instruction_start(
1384 : masm->isolate()),
1385 112 : kScratchRegister));
1386 :
1387 112 : __ bind(&trampoline_loaded);
1388 112 : __ addq(rbx, Immediate(interpreter_entry_return_pc_offset->value()));
1389 112 : __ Push(rbx);
1390 :
1391 : // Initialize dispatch table register.
1392 : __ Move(
1393 : kInterpreterDispatchTableRegister,
1394 112 : ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1395 :
1396 : // Get the bytecode array pointer from the frame.
1397 : __ movq(kInterpreterBytecodeArrayRegister,
1398 112 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1399 :
1400 112 : if (FLAG_debug_code) {
1401 : // Check function data field is actually a BytecodeArray object.
1402 0 : __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1403 : __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1404 0 : rbx);
1405 : __ Assert(
1406 : equal,
1407 0 : AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1408 : }
1409 :
1410 : // Get the target bytecode offset from the frame.
1411 : __ movq(kInterpreterBytecodeOffsetRegister,
1412 112 : Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1413 : __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1414 112 : kInterpreterBytecodeOffsetRegister);
1415 :
1416 : // Dispatch to the target bytecode.
1417 : __ movzxbq(r11, Operand(kInterpreterBytecodeArrayRegister,
1418 112 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1419 : __ movq(kJavaScriptCallCodeStartRegister,
1420 : Operand(kInterpreterDispatchTableRegister, r11,
1421 112 : times_system_pointer_size, 0));
1422 112 : __ jmp(kJavaScriptCallCodeStartRegister);
1423 112 : }
1424 :
1425 56 : void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1426 : // Get bytecode array and bytecode offset from the stack frame.
1427 : __ movq(kInterpreterBytecodeArrayRegister,
1428 56 : Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1429 : __ movq(kInterpreterBytecodeOffsetRegister,
1430 56 : Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1431 : __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1432 56 : kInterpreterBytecodeOffsetRegister);
1433 :
1434 : // Load the current bytecode.
1435 : __ movzxbq(rbx, Operand(kInterpreterBytecodeArrayRegister,
1436 56 : kInterpreterBytecodeOffsetRegister, times_1, 0));
1437 :
1438 : // Advance to the next bytecode.
1439 56 : Label if_return;
1440 : AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1441 : kInterpreterBytecodeOffsetRegister, rbx, rcx,
1442 56 : &if_return);
1443 :
1444 : // Convert new bytecode offset to a Smi and save in the stackframe.
1445 56 : __ SmiTag(rbx, kInterpreterBytecodeOffsetRegister);
1446 56 : __ movq(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rbx);
1447 :
1448 56 : Generate_InterpreterEnterBytecode(masm);
1449 :
1450 : // We should never take the if_return path.
1451 56 : __ bind(&if_return);
1452 56 : __ Abort(AbortReason::kInvalidBytecodeAdvance);
1453 56 : }
1454 :
1455 56 : void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1456 56 : Generate_InterpreterEnterBytecode(masm);
1457 56 : }
1458 :
1459 56 : void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1460 : // ----------- S t a t e -------------
1461 : // -- rax : argument count (preserved for callee)
1462 : // -- rdx : new target (preserved for callee)
1463 : // -- rdi : target function (preserved for callee)
1464 : // -----------------------------------
1465 56 : Label failed;
1466 : {
1467 56 : FrameScope scope(masm, StackFrame::INTERNAL);
1468 : // Preserve argument count for later compare.
1469 56 : __ movq(rcx, rax);
1470 : // Push the number of arguments to the callee.
1471 56 : __ SmiTag(rax, rax);
1472 56 : __ Push(rax);
1473 : // Push a copy of the target function and the new target.
1474 56 : __ Push(rdi);
1475 56 : __ Push(rdx);
1476 :
1477 : // The function.
1478 56 : __ Push(rdi);
1479 : // Copy arguments from caller (stdlib, foreign, heap).
1480 56 : Label args_done;
1481 280 : for (int j = 0; j < 4; ++j) {
1482 224 : Label over;
1483 224 : if (j < 3) {
1484 168 : __ cmpq(rcx, Immediate(j));
1485 168 : __ j(not_equal, &over, Label::kNear);
1486 : }
1487 560 : for (int i = j - 1; i >= 0; --i) {
1488 : __ Push(Operand(rbp, StandardFrameConstants::kCallerSPOffset +
1489 336 : i * kSystemPointerSize));
1490 : }
1491 560 : for (int i = 0; i < 3 - j; ++i) {
1492 336 : __ PushRoot(RootIndex::kUndefinedValue);
1493 : }
1494 224 : if (j < 3) {
1495 168 : __ jmp(&args_done, Label::kNear);
1496 168 : __ bind(&over);
1497 : }
1498 : }
1499 56 : __ bind(&args_done);
1500 :
1501 : // Call runtime, on success unwind frame, and parent frame.
1502 56 : __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1503 : // A smi 0 is returned on failure, an object on success.
1504 56 : __ JumpIfSmi(rax, &failed, Label::kNear);
1505 :
1506 56 : __ Drop(2);
1507 56 : __ Pop(rcx);
1508 56 : __ SmiUntag(rcx, rcx);
1509 56 : scope.GenerateLeaveFrame();
1510 :
1511 56 : __ PopReturnAddressTo(rbx);
1512 56 : __ incq(rcx);
1513 56 : __ leaq(rsp, Operand(rsp, rcx, times_system_pointer_size, 0));
1514 56 : __ PushReturnAddressFrom(rbx);
1515 56 : __ ret(0);
1516 :
1517 56 : __ bind(&failed);
1518 : // Restore target function and new target.
1519 56 : __ Pop(rdx);
1520 56 : __ Pop(rdi);
1521 56 : __ Pop(rax);
1522 56 : __ SmiUntag(rax, rax);
1523 : }
1524 : // On failure, tail call back to regular js by re-calling the function
1525 : // which has be reset to the compile lazy builtin.
1526 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1527 56 : __ JumpCodeObject(rcx);
1528 56 : }
1529 :
1530 : namespace {
1531 224 : void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1532 : bool java_script_builtin,
1533 : bool with_result) {
1534 224 : const RegisterConfiguration* config(RegisterConfiguration::Default());
1535 224 : int allocatable_register_count = config->num_allocatable_general_registers();
1536 224 : if (with_result) {
1537 : // Overwrite the hole inserted by the deoptimizer with the return value from
1538 : // the LAZY deopt point.
1539 : __ movq(
1540 112 : Operand(rsp, config->num_allocatable_general_registers() *
1541 : kSystemPointerSize +
1542 : BuiltinContinuationFrameConstants::kFixedFrameSize),
1543 112 : rax);
1544 : }
1545 2912 : for (int i = allocatable_register_count - 1; i >= 0; --i) {
1546 2688 : int code = config->GetAllocatableGeneralCode(i);
1547 2688 : __ popq(Register::from_code(code));
1548 2688 : if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1549 112 : __ SmiUntag(Register::from_code(code), Register::from_code(code));
1550 : }
1551 : }
1552 : __ movq(
1553 : rbp,
1554 224 : Operand(rsp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1555 : const int offsetToPC =
1556 : BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp -
1557 224 : kSystemPointerSize;
1558 224 : __ popq(Operand(rsp, offsetToPC));
1559 224 : __ Drop(offsetToPC / kSystemPointerSize);
1560 224 : __ addq(Operand(rsp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
1561 224 : __ Ret();
1562 224 : }
1563 : } // namespace
1564 :
1565 56 : void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1566 56 : Generate_ContinueToBuiltinHelper(masm, false, false);
1567 56 : }
1568 :
1569 56 : void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1570 : MacroAssembler* masm) {
1571 56 : Generate_ContinueToBuiltinHelper(masm, false, true);
1572 56 : }
1573 :
1574 56 : void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1575 56 : Generate_ContinueToBuiltinHelper(masm, true, false);
1576 56 : }
1577 :
1578 56 : void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1579 : MacroAssembler* masm) {
1580 56 : Generate_ContinueToBuiltinHelper(masm, true, true);
1581 56 : }
1582 :
1583 56 : void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1584 : // Enter an internal frame.
1585 : {
1586 56 : FrameScope scope(masm, StackFrame::INTERNAL);
1587 56 : __ CallRuntime(Runtime::kNotifyDeoptimized);
1588 : // Tear down internal frame.
1589 : }
1590 :
1591 : DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
1592 56 : __ movq(rax, Operand(rsp, kPCOnStackSize));
1593 56 : __ ret(1 * kSystemPointerSize); // Remove rax.
1594 56 : }
1595 :
1596 : // static
1597 56 : void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1598 : // ----------- S t a t e -------------
1599 : // -- rax : argc
1600 : // -- rsp[0] : return address
1601 : // -- rsp[8] : argArray
1602 : // -- rsp[16] : thisArg
1603 : // -- rsp[24] : receiver
1604 : // -----------------------------------
1605 :
1606 : // 1. Load receiver into rdi, argArray into rbx (if present), remove all
1607 : // arguments from the stack (including the receiver), and push thisArg (if
1608 : // present) instead.
1609 : {
1610 56 : Label no_arg_array, no_this_arg;
1611 56 : StackArgumentsAccessor args(rsp, rax);
1612 56 : __ LoadRoot(rdx, RootIndex::kUndefinedValue);
1613 56 : __ movq(rbx, rdx);
1614 56 : __ movq(rdi, args.GetReceiverOperand());
1615 56 : __ testq(rax, rax);
1616 56 : __ j(zero, &no_this_arg, Label::kNear);
1617 : {
1618 56 : __ movq(rdx, args.GetArgumentOperand(1));
1619 56 : __ cmpq(rax, Immediate(1));
1620 56 : __ j(equal, &no_arg_array, Label::kNear);
1621 56 : __ movq(rbx, args.GetArgumentOperand(2));
1622 56 : __ bind(&no_arg_array);
1623 : }
1624 56 : __ bind(&no_this_arg);
1625 56 : __ PopReturnAddressTo(rcx);
1626 : __ leaq(rsp,
1627 56 : Operand(rsp, rax, times_system_pointer_size, kSystemPointerSize));
1628 56 : __ Push(rdx);
1629 56 : __ PushReturnAddressFrom(rcx);
1630 : }
1631 :
1632 : // ----------- S t a t e -------------
1633 : // -- rbx : argArray
1634 : // -- rdi : receiver
1635 : // -- rsp[0] : return address
1636 : // -- rsp[8] : thisArg
1637 : // -----------------------------------
1638 :
1639 : // 2. We don't need to check explicitly for callable receiver here,
1640 : // since that's the first thing the Call/CallWithArrayLike builtins
1641 : // will do.
1642 :
1643 : // 3. Tail call with no arguments if argArray is null or undefined.
1644 56 : Label no_arguments;
1645 56 : __ JumpIfRoot(rbx, RootIndex::kNullValue, &no_arguments, Label::kNear);
1646 56 : __ JumpIfRoot(rbx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
1647 :
1648 : // 4a. Apply the receiver to the given argArray.
1649 : __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1650 56 : RelocInfo::CODE_TARGET);
1651 :
1652 : // 4b. The argArray is either null or undefined, so we tail call without any
1653 : // arguments to the receiver. Since we did not create a frame for
1654 : // Function.prototype.apply() yet, we use a normal Call builtin here.
1655 56 : __ bind(&no_arguments);
1656 : {
1657 56 : __ Set(rax, 0);
1658 56 : __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1659 : }
1660 56 : }
1661 :
1662 : // static
1663 56 : void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1664 : // Stack Layout:
1665 : // rsp[0] : Return address
1666 : // rsp[8] : Argument n
1667 : // rsp[16] : Argument n-1
1668 : // ...
1669 : // rsp[8 * n] : Argument 1
1670 : // rsp[8 * (n + 1)] : Receiver (callable to call)
1671 : //
1672 : // rax contains the number of arguments, n, not counting the receiver.
1673 : //
1674 : // 1. Make sure we have at least one argument.
1675 : {
1676 56 : Label done;
1677 56 : __ testq(rax, rax);
1678 56 : __ j(not_zero, &done, Label::kNear);
1679 56 : __ PopReturnAddressTo(rbx);
1680 56 : __ PushRoot(RootIndex::kUndefinedValue);
1681 56 : __ PushReturnAddressFrom(rbx);
1682 56 : __ incq(rax);
1683 56 : __ bind(&done);
1684 : }
1685 :
1686 : // 2. Get the callable to call (passed as receiver) from the stack.
1687 : {
1688 56 : StackArgumentsAccessor args(rsp, rax);
1689 56 : __ movq(rdi, args.GetReceiverOperand());
1690 : }
1691 :
1692 : // 3. Shift arguments and return address one slot down on the stack
1693 : // (overwriting the original receiver). Adjust argument count to make
1694 : // the original first argument the new receiver.
1695 : {
1696 56 : Label loop;
1697 56 : __ movq(rcx, rax);
1698 56 : StackArgumentsAccessor args(rsp, rcx);
1699 56 : __ bind(&loop);
1700 56 : __ movq(rbx, args.GetArgumentOperand(1));
1701 56 : __ movq(args.GetArgumentOperand(0), rbx);
1702 56 : __ decq(rcx);
1703 56 : __ j(not_zero, &loop); // While non-zero.
1704 56 : __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1705 56 : __ decq(rax); // One fewer argument (first argument is new receiver).
1706 : }
1707 :
1708 : // 4. Call the callable.
1709 : // Since we did not create a frame for Function.prototype.call() yet,
1710 : // we use a normal Call builtin here.
1711 56 : __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1712 56 : }
1713 :
1714 56 : void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1715 : // ----------- S t a t e -------------
1716 : // -- rax : argc
1717 : // -- rsp[0] : return address
1718 : // -- rsp[8] : argumentsList
1719 : // -- rsp[16] : thisArgument
1720 : // -- rsp[24] : target
1721 : // -- rsp[32] : receiver
1722 : // -----------------------------------
1723 :
1724 : // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1725 : // remove all arguments from the stack (including the receiver), and push
1726 : // thisArgument (if present) instead.
1727 : {
1728 56 : Label done;
1729 56 : StackArgumentsAccessor args(rsp, rax);
1730 56 : __ LoadRoot(rdi, RootIndex::kUndefinedValue);
1731 56 : __ movq(rdx, rdi);
1732 56 : __ movq(rbx, rdi);
1733 56 : __ cmpq(rax, Immediate(1));
1734 56 : __ j(below, &done, Label::kNear);
1735 56 : __ movq(rdi, args.GetArgumentOperand(1)); // target
1736 56 : __ j(equal, &done, Label::kNear);
1737 56 : __ movq(rdx, args.GetArgumentOperand(2)); // thisArgument
1738 56 : __ cmpq(rax, Immediate(3));
1739 56 : __ j(below, &done, Label::kNear);
1740 56 : __ movq(rbx, args.GetArgumentOperand(3)); // argumentsList
1741 56 : __ bind(&done);
1742 56 : __ PopReturnAddressTo(rcx);
1743 : __ leaq(rsp,
1744 56 : Operand(rsp, rax, times_system_pointer_size, kSystemPointerSize));
1745 56 : __ Push(rdx);
1746 56 : __ PushReturnAddressFrom(rcx);
1747 : }
1748 :
1749 : // ----------- S t a t e -------------
1750 : // -- rbx : argumentsList
1751 : // -- rdi : target
1752 : // -- rsp[0] : return address
1753 : // -- rsp[8] : thisArgument
1754 : // -----------------------------------
1755 :
1756 : // 2. We don't need to check explicitly for callable target here,
1757 : // since that's the first thing the Call/CallWithArrayLike builtins
1758 : // will do.
1759 :
1760 : // 3. Apply the target to the given argumentsList.
1761 : __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1762 56 : RelocInfo::CODE_TARGET);
1763 56 : }
1764 :
1765 56 : void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1766 : // ----------- S t a t e -------------
1767 : // -- rax : argc
1768 : // -- rsp[0] : return address
1769 : // -- rsp[8] : new.target (optional)
1770 : // -- rsp[16] : argumentsList
1771 : // -- rsp[24] : target
1772 : // -- rsp[32] : receiver
1773 : // -----------------------------------
1774 :
1775 : // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1776 : // new.target into rdx (if present, otherwise use target), remove all
1777 : // arguments from the stack (including the receiver), and push thisArgument
1778 : // (if present) instead.
1779 : {
1780 56 : Label done;
1781 56 : StackArgumentsAccessor args(rsp, rax);
1782 56 : __ LoadRoot(rdi, RootIndex::kUndefinedValue);
1783 56 : __ movq(rdx, rdi);
1784 56 : __ movq(rbx, rdi);
1785 56 : __ cmpq(rax, Immediate(1));
1786 56 : __ j(below, &done, Label::kNear);
1787 56 : __ movq(rdi, args.GetArgumentOperand(1)); // target
1788 56 : __ movq(rdx, rdi); // new.target defaults to target
1789 56 : __ j(equal, &done, Label::kNear);
1790 56 : __ movq(rbx, args.GetArgumentOperand(2)); // argumentsList
1791 56 : __ cmpq(rax, Immediate(3));
1792 56 : __ j(below, &done, Label::kNear);
1793 56 : __ movq(rdx, args.GetArgumentOperand(3)); // new.target
1794 56 : __ bind(&done);
1795 56 : __ PopReturnAddressTo(rcx);
1796 : __ leaq(rsp,
1797 56 : Operand(rsp, rax, times_system_pointer_size, kSystemPointerSize));
1798 56 : __ PushRoot(RootIndex::kUndefinedValue);
1799 56 : __ PushReturnAddressFrom(rcx);
1800 : }
1801 :
1802 : // ----------- S t a t e -------------
1803 : // -- rbx : argumentsList
1804 : // -- rdx : new.target
1805 : // -- rdi : target
1806 : // -- rsp[0] : return address
1807 : // -- rsp[8] : receiver (undefined)
1808 : // -----------------------------------
1809 :
1810 : // 2. We don't need to check explicitly for constructor target here,
1811 : // since that's the first thing the Construct/ConstructWithArrayLike
1812 : // builtins will do.
1813 :
1814 : // 3. We don't need to check explicitly for constructor new.target here,
1815 : // since that's the second thing the Construct/ConstructWithArrayLike
1816 : // builtins will do.
1817 :
1818 : // 4. Construct the target with the given new.target and argumentsList.
1819 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1820 56 : RelocInfo::CODE_TARGET);
1821 56 : }
1822 :
1823 56 : void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1824 : // ----------- S t a t e -------------
1825 : // -- rax : argc
1826 : // -- rsp[0] : return address
1827 : // -- rsp[8] : last argument
1828 : // -----------------------------------
1829 :
1830 56 : if (FLAG_debug_code) {
1831 : // Initial map for the builtin InternalArray functions should be maps.
1832 : __ LoadTaggedPointerField(
1833 0 : rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1834 : // Will both indicate a nullptr and a Smi.
1835 : STATIC_ASSERT(kSmiTag == 0);
1836 0 : Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1837 : __ Check(not_smi,
1838 0 : AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1839 0 : __ CmpObjectType(rbx, MAP_TYPE, rcx);
1840 0 : __ Check(equal, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1841 : }
1842 :
1843 : // Run the native code for the InternalArray function called as a normal
1844 : // function.
1845 : __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
1846 56 : RelocInfo::CODE_TARGET);
1847 56 : }
1848 :
1849 112 : static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1850 112 : __ pushq(rbp);
1851 112 : __ movq(rbp, rsp);
1852 :
1853 : // Store the arguments adaptor context sentinel.
1854 112 : __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1855 :
1856 : // Push the function on the stack.
1857 112 : __ Push(rdi);
1858 :
1859 : // Preserve the number of arguments on the stack. Must preserve rax,
1860 : // rbx and rcx because these registers are used when copying the
1861 : // arguments and the receiver.
1862 112 : __ SmiTag(r8, rax);
1863 112 : __ Push(r8);
1864 :
1865 112 : __ Push(Immediate(0)); // Padding.
1866 112 : }
1867 :
1868 56 : static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1869 : // Retrieve the number of arguments from the stack. Number is a Smi.
1870 56 : __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1871 :
1872 : // Leave the frame.
1873 56 : __ movq(rsp, rbp);
1874 56 : __ popq(rbp);
1875 :
1876 : // Remove caller arguments from the stack.
1877 56 : __ PopReturnAddressTo(rcx);
1878 56 : SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
1879 56 : __ leaq(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
1880 56 : __ PushReturnAddressFrom(rcx);
1881 56 : }
1882 :
1883 56 : void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1884 : // ----------- S t a t e -------------
1885 : // -- rax : actual number of arguments
1886 : // -- rbx : expected number of arguments
1887 : // -- rdx : new target (passed through to callee)
1888 : // -- rdi : function (passed through to callee)
1889 : // -----------------------------------
1890 :
1891 56 : Label invoke, dont_adapt_arguments, stack_overflow, enough, too_few;
1892 56 : __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1893 56 : __ j(equal, &dont_adapt_arguments);
1894 56 : __ cmpq(rax, rbx);
1895 56 : __ j(less, &too_few);
1896 :
1897 : { // Enough parameters: Actual >= expected.
1898 56 : __ bind(&enough);
1899 56 : EnterArgumentsAdaptorFrame(masm);
1900 : // The registers rcx and r8 will be modified. The register rbx is only read.
1901 56 : Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1902 :
1903 : // Copy receiver and all expected arguments.
1904 56 : const int offset = StandardFrameConstants::kCallerSPOffset;
1905 56 : __ leaq(rax, Operand(rbp, rax, times_system_pointer_size, offset));
1906 56 : __ Set(r8, -1); // account for receiver
1907 :
1908 56 : Label copy;
1909 56 : __ bind(©);
1910 56 : __ incq(r8);
1911 56 : __ Push(Operand(rax, 0));
1912 56 : __ subq(rax, Immediate(kSystemPointerSize));
1913 56 : __ cmpq(r8, rbx);
1914 56 : __ j(less, ©);
1915 56 : __ jmp(&invoke);
1916 : }
1917 :
1918 : { // Too few parameters: Actual < expected.
1919 56 : __ bind(&too_few);
1920 :
1921 56 : EnterArgumentsAdaptorFrame(masm);
1922 : // The registers rcx and r8 will be modified. The register rbx is only read.
1923 56 : Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1924 :
1925 : // Copy receiver and all actual arguments.
1926 56 : const int offset = StandardFrameConstants::kCallerSPOffset;
1927 56 : __ leaq(rdi, Operand(rbp, rax, times_system_pointer_size, offset));
1928 56 : __ Set(r8, -1); // account for receiver
1929 :
1930 56 : Label copy;
1931 56 : __ bind(©);
1932 56 : __ incq(r8);
1933 56 : __ Push(Operand(rdi, 0));
1934 56 : __ subq(rdi, Immediate(kSystemPointerSize));
1935 56 : __ cmpq(r8, rax);
1936 56 : __ j(less, ©);
1937 :
1938 : // Fill remaining expected arguments with undefined values.
1939 56 : Label fill;
1940 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
1941 56 : __ bind(&fill);
1942 56 : __ incq(r8);
1943 56 : __ Push(kScratchRegister);
1944 56 : __ cmpq(r8, rbx);
1945 56 : __ j(less, &fill);
1946 :
1947 : // Restore function pointer.
1948 56 : __ movq(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
1949 : }
1950 :
1951 : // Call the entry point.
1952 56 : __ bind(&invoke);
1953 56 : __ movq(rax, rbx);
1954 : // rax : expected number of arguments
1955 : // rdx : new target (passed through to callee)
1956 : // rdi : function (passed through to callee)
1957 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
1958 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1959 56 : __ CallCodeObject(rcx);
1960 :
1961 : // Store offset of return address for deoptimizer.
1962 56 : masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1963 :
1964 : // Leave frame and return.
1965 56 : LeaveArgumentsAdaptorFrame(masm);
1966 56 : __ ret(0);
1967 :
1968 : // -------------------------------------------
1969 : // Dont adapt arguments.
1970 : // -------------------------------------------
1971 56 : __ bind(&dont_adapt_arguments);
1972 : static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
1973 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1974 56 : __ JumpCodeObject(rcx);
1975 :
1976 56 : __ bind(&stack_overflow);
1977 : {
1978 56 : FrameScope frame(masm, StackFrame::MANUAL);
1979 56 : __ CallRuntime(Runtime::kThrowStackOverflow);
1980 56 : __ int3();
1981 : }
1982 56 : }
1983 :
1984 : // static
1985 112 : void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1986 : Handle<Code> code) {
1987 : // ----------- S t a t e -------------
1988 : // -- rdi : target
1989 : // -- rax : number of parameters on the stack (not including the receiver)
1990 : // -- rbx : arguments list (a FixedArray)
1991 : // -- rcx : len (number of elements to push from args)
1992 : // -- rdx : new.target (for [[Construct]])
1993 : // -- rsp[0] : return address
1994 : // -----------------------------------
1995 112 : Register scratch = r11;
1996 112 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
1997 :
1998 112 : if (masm->emit_debug_code()) {
1999 : // Allow rbx to be a FixedArray, or a FixedDoubleArray if rcx == 0.
2000 0 : Label ok, fail;
2001 0 : __ AssertNotSmi(rbx);
2002 0 : Register map = r9;
2003 0 : __ LoadTaggedPointerField(map, FieldOperand(rbx, HeapObject::kMapOffset));
2004 0 : __ CmpInstanceType(map, FIXED_ARRAY_TYPE);
2005 0 : __ j(equal, &ok);
2006 0 : __ CmpInstanceType(map, FIXED_DOUBLE_ARRAY_TYPE);
2007 0 : __ j(not_equal, &fail);
2008 0 : __ cmpl(rcx, Immediate(0));
2009 0 : __ j(equal, &ok);
2010 : // Fall through.
2011 0 : __ bind(&fail);
2012 0 : __ Abort(AbortReason::kOperandIsNotAFixedArray);
2013 :
2014 0 : __ bind(&ok);
2015 : }
2016 :
2017 112 : Label stack_overflow;
2018 112 : Generate_StackOverflowCheck(masm, rcx, r8, &stack_overflow, Label::kNear);
2019 :
2020 : // Push additional arguments onto the stack.
2021 : {
2022 112 : Register value = scratch;
2023 112 : __ PopReturnAddressTo(r8);
2024 112 : __ Set(r9, 0);
2025 112 : Label done, push, loop;
2026 112 : __ bind(&loop);
2027 112 : __ cmpl(r9, rcx);
2028 112 : __ j(equal, &done, Label::kNear);
2029 : // Turn the hole into undefined as we go.
2030 : __ LoadAnyTaggedField(
2031 : value,
2032 : FieldOperand(rbx, r9, times_tagged_size, FixedArray::kHeaderSize),
2033 112 : decompr_scratch);
2034 112 : __ CompareRoot(value, RootIndex::kTheHoleValue);
2035 112 : __ j(not_equal, &push, Label::kNear);
2036 112 : __ LoadRoot(value, RootIndex::kUndefinedValue);
2037 112 : __ bind(&push);
2038 112 : __ Push(value);
2039 112 : __ incl(r9);
2040 112 : __ jmp(&loop);
2041 112 : __ bind(&done);
2042 112 : __ PushReturnAddressFrom(r8);
2043 112 : __ addq(rax, r9);
2044 : }
2045 :
2046 : // Tail-call to the actual Call or Construct builtin.
2047 112 : __ Jump(code, RelocInfo::CODE_TARGET);
2048 :
2049 112 : __ bind(&stack_overflow);
2050 112 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
2051 112 : }
2052 :
2053 : // static
2054 224 : void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2055 : CallOrConstructMode mode,
2056 : Handle<Code> code) {
2057 : // ----------- S t a t e -------------
2058 : // -- rax : the number of arguments (not including the receiver)
2059 : // -- rdx : the new target (for [[Construct]] calls)
2060 : // -- rdi : the target to call (can be any Object)
2061 : // -- rcx : start index (to support rest parameters)
2062 : // -----------------------------------
2063 :
2064 : // Check if new.target has a [[Construct]] internal method.
2065 224 : if (mode == CallOrConstructMode::kConstruct) {
2066 112 : Label new_target_constructor, new_target_not_constructor;
2067 112 : __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
2068 112 : __ LoadTaggedPointerField(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
2069 : __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2070 112 : Immediate(Map::IsConstructorBit::kMask));
2071 112 : __ j(not_zero, &new_target_constructor, Label::kNear);
2072 112 : __ bind(&new_target_not_constructor);
2073 : {
2074 112 : FrameScope scope(masm, StackFrame::MANUAL);
2075 112 : __ EnterFrame(StackFrame::INTERNAL);
2076 112 : __ Push(rdx);
2077 112 : __ CallRuntime(Runtime::kThrowNotConstructor);
2078 : }
2079 112 : __ bind(&new_target_constructor);
2080 : }
2081 :
2082 : // Check if we have an arguments adaptor frame below the function frame.
2083 224 : Label arguments_adaptor, arguments_done;
2084 224 : __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2085 : __ cmpq(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
2086 224 : Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2087 224 : __ j(equal, &arguments_adaptor, Label::kNear);
2088 : {
2089 224 : __ movq(r8, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2090 : __ LoadTaggedPointerField(
2091 224 : r8, FieldOperand(r8, JSFunction::kSharedFunctionInfoOffset));
2092 : __ movzxwq(
2093 224 : r8, FieldOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
2094 224 : __ movq(rbx, rbp);
2095 : }
2096 224 : __ jmp(&arguments_done, Label::kNear);
2097 224 : __ bind(&arguments_adaptor);
2098 : {
2099 : __ SmiUntag(r8,
2100 224 : Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2101 : }
2102 224 : __ bind(&arguments_done);
2103 :
2104 224 : Label stack_done, stack_overflow;
2105 224 : __ subl(r8, rcx);
2106 224 : __ j(less_equal, &stack_done);
2107 : {
2108 : // Check for stack overflow.
2109 224 : Generate_StackOverflowCheck(masm, r8, rcx, &stack_overflow, Label::kNear);
2110 :
2111 : // Forward the arguments from the caller frame.
2112 : {
2113 224 : Label loop;
2114 224 : __ addl(rax, r8);
2115 224 : __ PopReturnAddressTo(rcx);
2116 224 : __ bind(&loop);
2117 : {
2118 224 : StackArgumentsAccessor args(rbx, r8, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2119 224 : __ Push(args.GetArgumentOperand(0));
2120 224 : __ decl(r8);
2121 224 : __ j(not_zero, &loop);
2122 : }
2123 224 : __ PushReturnAddressFrom(rcx);
2124 : }
2125 : }
2126 224 : __ jmp(&stack_done, Label::kNear);
2127 224 : __ bind(&stack_overflow);
2128 224 : __ TailCallRuntime(Runtime::kThrowStackOverflow);
2129 224 : __ bind(&stack_done);
2130 :
2131 : // Tail-call to the {code} handler.
2132 224 : __ Jump(code, RelocInfo::CODE_TARGET);
2133 224 : }
2134 :
2135 : // static
2136 168 : void Builtins::Generate_CallFunction(MacroAssembler* masm,
2137 : ConvertReceiverMode mode) {
2138 : // ----------- S t a t e -------------
2139 : // -- rax : the number of arguments (not including the receiver)
2140 : // -- rdi : the function to call (checked to be a JSFunction)
2141 : // -----------------------------------
2142 :
2143 168 : StackArgumentsAccessor args(rsp, rax);
2144 168 : __ AssertFunction(rdi);
2145 :
2146 : // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2147 : // Check that the function is not a "classConstructor".
2148 168 : Label class_constructor;
2149 : __ LoadTaggedPointerField(
2150 168 : rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2151 : __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
2152 168 : Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
2153 168 : __ j(not_zero, &class_constructor);
2154 :
2155 : // ----------- S t a t e -------------
2156 : // -- rax : the number of arguments (not including the receiver)
2157 : // -- rdx : the shared function info.
2158 : // -- rdi : the function to call (checked to be a JSFunction)
2159 : // -----------------------------------
2160 :
2161 : // Enter the context of the function; ToObject has to run in the function
2162 : // context, and we also need to take the global proxy from the function
2163 : // context in case of conversion.
2164 168 : __ LoadTaggedPointerField(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2165 : // We need to convert the receiver for non-native sloppy mode functions.
2166 168 : Label done_convert;
2167 : __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
2168 : Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2169 168 : SharedFunctionInfo::IsStrictBit::kMask));
2170 168 : __ j(not_zero, &done_convert);
2171 : {
2172 : // ----------- S t a t e -------------
2173 : // -- rax : the number of arguments (not including the receiver)
2174 : // -- rdx : the shared function info.
2175 : // -- rdi : the function to call (checked to be a JSFunction)
2176 : // -- rsi : the function context.
2177 : // -----------------------------------
2178 :
2179 168 : if (mode == ConvertReceiverMode::kNullOrUndefined) {
2180 : // Patch receiver to global proxy.
2181 56 : __ LoadGlobalProxy(rcx);
2182 : } else {
2183 112 : Label convert_to_object, convert_receiver;
2184 112 : __ movq(rcx, args.GetReceiverOperand());
2185 112 : __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
2186 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2187 112 : __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
2188 112 : __ j(above_equal, &done_convert);
2189 112 : if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2190 56 : Label convert_global_proxy;
2191 : __ JumpIfRoot(rcx, RootIndex::kUndefinedValue, &convert_global_proxy,
2192 56 : Label::kNear);
2193 : __ JumpIfNotRoot(rcx, RootIndex::kNullValue, &convert_to_object,
2194 56 : Label::kNear);
2195 56 : __ bind(&convert_global_proxy);
2196 : {
2197 : // Patch receiver to global proxy.
2198 56 : __ LoadGlobalProxy(rcx);
2199 : }
2200 56 : __ jmp(&convert_receiver);
2201 : }
2202 112 : __ bind(&convert_to_object);
2203 : {
2204 : // Convert receiver using ToObject.
2205 : // TODO(bmeurer): Inline the allocation here to avoid building the frame
2206 : // in the fast case? (fall back to AllocateInNewSpace?)
2207 112 : FrameScope scope(masm, StackFrame::INTERNAL);
2208 112 : __ SmiTag(rax, rax);
2209 112 : __ Push(rax);
2210 112 : __ Push(rdi);
2211 112 : __ movq(rax, rcx);
2212 112 : __ Push(rsi);
2213 : __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2214 112 : RelocInfo::CODE_TARGET);
2215 112 : __ Pop(rsi);
2216 112 : __ movq(rcx, rax);
2217 112 : __ Pop(rdi);
2218 112 : __ Pop(rax);
2219 112 : __ SmiUntag(rax, rax);
2220 : }
2221 : __ LoadTaggedPointerField(
2222 112 : rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2223 112 : __ bind(&convert_receiver);
2224 : }
2225 168 : __ movq(args.GetReceiverOperand(), rcx);
2226 : }
2227 168 : __ bind(&done_convert);
2228 :
2229 : // ----------- S t a t e -------------
2230 : // -- rax : the number of arguments (not including the receiver)
2231 : // -- rdx : the shared function info.
2232 : // -- rdi : the function to call (checked to be a JSFunction)
2233 : // -- rsi : the function context.
2234 : // -----------------------------------
2235 :
2236 : __ movzxwq(
2237 168 : rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2238 168 : ParameterCount actual(rax);
2239 168 : ParameterCount expected(rbx);
2240 :
2241 168 : __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION);
2242 :
2243 : // The function is a "classConstructor", need to raise an exception.
2244 168 : __ bind(&class_constructor);
2245 : {
2246 168 : FrameScope frame(masm, StackFrame::INTERNAL);
2247 168 : __ Push(rdi);
2248 168 : __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2249 : }
2250 168 : }
2251 :
2252 : namespace {
2253 :
2254 112 : void Generate_PushBoundArguments(MacroAssembler* masm) {
2255 : // ----------- S t a t e -------------
2256 : // -- rax : the number of arguments (not including the receiver)
2257 : // -- rdx : new.target (only in case of [[Construct]])
2258 : // -- rdi : target (checked to be a JSBoundFunction)
2259 : // -----------------------------------
2260 :
2261 112 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
2262 :
2263 : // Load [[BoundArguments]] into rcx and length of that into rbx.
2264 112 : Label no_bound_arguments;
2265 : __ LoadTaggedPointerField(
2266 112 : rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2267 112 : __ SmiUntagField(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2268 112 : __ testl(rbx, rbx);
2269 112 : __ j(zero, &no_bound_arguments);
2270 : {
2271 : // ----------- S t a t e -------------
2272 : // -- rax : the number of arguments (not including the receiver)
2273 : // -- rdx : new.target (only in case of [[Construct]])
2274 : // -- rdi : target (checked to be a JSBoundFunction)
2275 : // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2276 : // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2277 : // -----------------------------------
2278 :
2279 : // Reserve stack space for the [[BoundArguments]].
2280 : {
2281 112 : Label done;
2282 112 : __ leaq(kScratchRegister, Operand(rbx, times_system_pointer_size, 0));
2283 112 : __ subq(rsp, kScratchRegister);
2284 : // Check the stack for overflow. We are not trying to catch interruptions
2285 : // (i.e. debug break and preemption) here, so check the "real stack
2286 : // limit".
2287 112 : __ CompareRoot(rsp, RootIndex::kRealStackLimit);
2288 112 : __ j(above_equal, &done, Label::kNear);
2289 : // Restore the stack pointer.
2290 112 : __ leaq(rsp, Operand(rsp, rbx, times_system_pointer_size, 0));
2291 : {
2292 112 : FrameScope scope(masm, StackFrame::MANUAL);
2293 112 : __ EnterFrame(StackFrame::INTERNAL);
2294 112 : __ CallRuntime(Runtime::kThrowStackOverflow);
2295 : }
2296 112 : __ bind(&done);
2297 : }
2298 :
2299 : // Adjust effective number of arguments to include return address.
2300 112 : __ incl(rax);
2301 :
2302 : // Relocate arguments and return address down the stack.
2303 : {
2304 112 : Label loop;
2305 112 : __ Set(rcx, 0);
2306 112 : __ leaq(rbx, Operand(rsp, rbx, times_system_pointer_size, 0));
2307 112 : __ bind(&loop);
2308 : __ movq(kScratchRegister,
2309 112 : Operand(rbx, rcx, times_system_pointer_size, 0));
2310 : __ movq(Operand(rsp, rcx, times_system_pointer_size, 0),
2311 112 : kScratchRegister);
2312 112 : __ incl(rcx);
2313 112 : __ cmpl(rcx, rax);
2314 112 : __ j(less, &loop);
2315 : }
2316 :
2317 : // Copy [[BoundArguments]] to the stack (below the arguments).
2318 : {
2319 112 : Label loop;
2320 : __ LoadTaggedPointerField(
2321 112 : rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2322 112 : __ SmiUntagField(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2323 112 : __ bind(&loop);
2324 : // Instead of doing decl(rbx) here subtract kTaggedSize from the header
2325 : // offset in order to move be able to move decl(rbx) right before the loop
2326 : // condition. This is necessary in order to avoid flags corruption by
2327 : // pointer decompression code.
2328 : __ LoadAnyTaggedField(r12,
2329 : FieldOperand(rcx, rbx, times_tagged_size,
2330 : FixedArray::kHeaderSize - kTaggedSize),
2331 112 : decompr_scratch);
2332 112 : __ movq(Operand(rsp, rax, times_system_pointer_size, 0), r12);
2333 112 : __ leal(rax, Operand(rax, 1));
2334 112 : __ decl(rbx);
2335 112 : __ j(greater, &loop);
2336 : }
2337 :
2338 : // Adjust effective number of arguments (rax contains the number of
2339 : // arguments from the call plus return address plus the number of
2340 : // [[BoundArguments]]), so we need to subtract one for the return address.
2341 112 : __ decl(rax);
2342 : }
2343 112 : __ bind(&no_bound_arguments);
2344 112 : }
2345 :
2346 : } // namespace
2347 :
2348 : // static
2349 56 : void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2350 : // ----------- S t a t e -------------
2351 : // -- rax : the number of arguments (not including the receiver)
2352 : // -- rdi : the function to call (checked to be a JSBoundFunction)
2353 : // -----------------------------------
2354 56 : __ AssertBoundFunction(rdi);
2355 :
2356 56 : Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
2357 :
2358 : // Patch the receiver to [[BoundThis]].
2359 56 : StackArgumentsAccessor args(rsp, rax);
2360 : __ LoadAnyTaggedField(rbx,
2361 : FieldOperand(rdi, JSBoundFunction::kBoundThisOffset),
2362 56 : decompr_scratch);
2363 56 : __ movq(args.GetReceiverOperand(), rbx);
2364 :
2365 : // Push the [[BoundArguments]] onto the stack.
2366 56 : Generate_PushBoundArguments(masm);
2367 :
2368 : // Call the [[BoundTargetFunction]] via the Call builtin.
2369 : __ LoadTaggedPointerField(
2370 56 : rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2371 : __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2372 56 : RelocInfo::CODE_TARGET);
2373 56 : }
2374 :
2375 : // static
2376 168 : void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2377 : // ----------- S t a t e -------------
2378 : // -- rax : the number of arguments (not including the receiver)
2379 : // -- rdi : the target to call (can be any Object)
2380 : // -----------------------------------
2381 168 : StackArgumentsAccessor args(rsp, rax);
2382 :
2383 168 : Label non_callable;
2384 168 : __ JumpIfSmi(rdi, &non_callable);
2385 168 : __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2386 : __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2387 168 : RelocInfo::CODE_TARGET, equal);
2388 :
2389 168 : __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2390 : __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2391 168 : RelocInfo::CODE_TARGET, equal);
2392 :
2393 : // Check if target has a [[Call]] internal method.
2394 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2395 168 : Immediate(Map::IsCallableBit::kMask));
2396 168 : __ j(zero, &non_callable, Label::kNear);
2397 :
2398 : // Check if target is a proxy and call CallProxy external builtin
2399 168 : __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2400 : __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET,
2401 168 : equal);
2402 :
2403 : // 2. Call to something else, which might have a [[Call]] internal method (if
2404 : // not we raise an exception).
2405 :
2406 : // Overwrite the original receiver with the (original) target.
2407 168 : __ movq(args.GetReceiverOperand(), rdi);
2408 : // Let the "call_as_function_delegate" take care of the rest.
2409 168 : __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2410 : __ Jump(masm->isolate()->builtins()->CallFunction(
2411 : ConvertReceiverMode::kNotNullOrUndefined),
2412 168 : RelocInfo::CODE_TARGET);
2413 :
2414 : // 3. Call to something that is not callable.
2415 168 : __ bind(&non_callable);
2416 : {
2417 168 : FrameScope scope(masm, StackFrame::INTERNAL);
2418 168 : __ Push(rdi);
2419 168 : __ CallRuntime(Runtime::kThrowCalledNonCallable);
2420 : }
2421 168 : }
2422 :
2423 : // static
2424 56 : void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2425 : // ----------- S t a t e -------------
2426 : // -- rax : the number of arguments (not including the receiver)
2427 : // -- rdx : the new target (checked to be a constructor)
2428 : // -- rdi : the constructor to call (checked to be a JSFunction)
2429 : // -----------------------------------
2430 56 : __ AssertConstructor(rdi);
2431 56 : __ AssertFunction(rdi);
2432 :
2433 : // Calling convention for function specific ConstructStubs require
2434 : // rbx to contain either an AllocationSite or undefined.
2435 56 : __ LoadRoot(rbx, RootIndex::kUndefinedValue);
2436 :
2437 : // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2438 : __ LoadTaggedPointerField(
2439 56 : rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2440 : __ testl(FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
2441 56 : Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2442 : __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2443 56 : RelocInfo::CODE_TARGET, not_zero);
2444 :
2445 : __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2446 56 : RelocInfo::CODE_TARGET);
2447 56 : }
2448 :
2449 : // static
2450 56 : void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2451 : // ----------- S t a t e -------------
2452 : // -- rax : the number of arguments (not including the receiver)
2453 : // -- rdx : the new target (checked to be a constructor)
2454 : // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2455 : // -----------------------------------
2456 56 : __ AssertConstructor(rdi);
2457 56 : __ AssertBoundFunction(rdi);
2458 :
2459 : // Push the [[BoundArguments]] onto the stack.
2460 56 : Generate_PushBoundArguments(masm);
2461 :
2462 : // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2463 : {
2464 56 : Label done;
2465 56 : __ cmpq(rdi, rdx);
2466 56 : __ j(not_equal, &done, Label::kNear);
2467 : __ LoadTaggedPointerField(
2468 56 : rdx, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2469 56 : __ bind(&done);
2470 : }
2471 :
2472 : // Construct the [[BoundTargetFunction]] via the Construct builtin.
2473 : __ LoadTaggedPointerField(
2474 56 : rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2475 56 : __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2476 56 : }
2477 :
2478 : // static
2479 56 : void Builtins::Generate_Construct(MacroAssembler* masm) {
2480 : // ----------- S t a t e -------------
2481 : // -- rax : the number of arguments (not including the receiver)
2482 : // -- rdx : the new target (either the same as the constructor or
2483 : // the JSFunction on which new was invoked initially)
2484 : // -- rdi : the constructor to call (can be any Object)
2485 : // -----------------------------------
2486 56 : StackArgumentsAccessor args(rsp, rax);
2487 :
2488 : // Check if target is a Smi.
2489 56 : Label non_constructor;
2490 56 : __ JumpIfSmi(rdi, &non_constructor);
2491 :
2492 : // Check if target has a [[Construct]] internal method.
2493 56 : __ LoadTaggedPointerField(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
2494 : __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2495 56 : Immediate(Map::IsConstructorBit::kMask));
2496 56 : __ j(zero, &non_constructor);
2497 :
2498 : // Dispatch based on instance type.
2499 56 : __ CmpInstanceType(rcx, JS_FUNCTION_TYPE);
2500 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2501 56 : RelocInfo::CODE_TARGET, equal);
2502 :
2503 : // Only dispatch to bound functions after checking whether they are
2504 : // constructors.
2505 56 : __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2506 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2507 56 : RelocInfo::CODE_TARGET, equal);
2508 :
2509 : // Only dispatch to proxies after checking whether they are constructors.
2510 56 : __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2511 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy), RelocInfo::CODE_TARGET,
2512 56 : equal);
2513 :
2514 : // Called Construct on an exotic Object with a [[Construct]] internal method.
2515 : {
2516 : // Overwrite the original receiver with the (original) target.
2517 56 : __ movq(args.GetReceiverOperand(), rdi);
2518 : // Let the "call_as_constructor_delegate" take care of the rest.
2519 56 : __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2520 : __ Jump(masm->isolate()->builtins()->CallFunction(),
2521 56 : RelocInfo::CODE_TARGET);
2522 : }
2523 :
2524 : // Called Construct on an Object that doesn't have a [[Construct]] internal
2525 : // method.
2526 56 : __ bind(&non_constructor);
2527 : __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2528 56 : RelocInfo::CODE_TARGET);
2529 56 : }
2530 :
2531 56 : void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2532 : // Lookup the function in the JavaScript frame.
2533 56 : __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2534 56 : __ movq(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
2535 :
2536 : {
2537 56 : FrameScope scope(masm, StackFrame::INTERNAL);
2538 : // Pass function as argument.
2539 56 : __ Push(rax);
2540 56 : __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2541 : }
2542 :
2543 56 : Label skip;
2544 : // If the code object is null, just return to the caller.
2545 56 : __ testq(rax, rax);
2546 56 : __ j(not_equal, &skip, Label::kNear);
2547 56 : __ ret(0);
2548 :
2549 56 : __ bind(&skip);
2550 :
2551 : // Drop the handler frame that is be sitting on top of the actual
2552 : // JavaScript frame. This is the case then OSR is triggered from bytecode.
2553 56 : __ leave();
2554 :
2555 : // Load deoptimization data from the code object.
2556 : __ LoadTaggedPointerField(rbx,
2557 56 : FieldOperand(rax, Code::kDeoptimizationDataOffset));
2558 :
2559 : // Load the OSR entrypoint offset from the deoptimization data.
2560 : __ SmiUntagField(
2561 : rbx, FieldOperand(rbx, FixedArray::OffsetOfElementAt(
2562 56 : DeoptimizationData::kOsrPcOffsetIndex)));
2563 :
2564 : // Compute the target address = code_obj + header_size + osr_offset
2565 56 : __ leaq(rax, FieldOperand(rax, rbx, times_1, Code::kHeaderSize));
2566 :
2567 : // Overwrite the return address on the stack.
2568 56 : __ movq(StackOperandForReturnAddress(0), rax);
2569 :
2570 : // And "return" to the OSR entry point of the function.
2571 56 : __ ret(0);
2572 56 : }
2573 :
2574 56 : void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2575 : // The function index was pushed to the stack by the caller as int32.
2576 56 : __ Pop(r11);
2577 : // Convert to Smi for the runtime call.
2578 56 : __ SmiTag(r11, r11);
2579 : {
2580 56 : HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2581 112 : FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2582 :
2583 : // Save all parameter registers (see wasm-linkage.cc). They might be
2584 : // overwritten in the runtime call below. We don't have any callee-saved
2585 : // registers in wasm, so no need to store anything else.
2586 : static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
2587 : arraysize(wasm::kGpParamRegisters),
2588 : "frame size mismatch");
2589 392 : for (Register reg : wasm::kGpParamRegisters) {
2590 336 : __ Push(reg);
2591 : }
2592 : static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
2593 : arraysize(wasm::kFpParamRegisters),
2594 : "frame size mismatch");
2595 56 : __ subq(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2596 56 : int offset = 0;
2597 392 : for (DoubleRegister reg : wasm::kFpParamRegisters) {
2598 336 : __ movdqu(Operand(rsp, offset), reg);
2599 336 : offset += kSimd128Size;
2600 : }
2601 :
2602 : // Push the WASM instance as an explicit argument to WasmCompileLazy.
2603 56 : __ Push(kWasmInstanceRegister);
2604 : // Push the function index as second argument.
2605 56 : __ Push(r11);
2606 : // Load the correct CEntry builtin from the instance object.
2607 : __ LoadTaggedPointerField(
2608 : rcx, FieldOperand(kWasmInstanceRegister,
2609 56 : WasmInstanceObject::kCEntryStubOffset));
2610 : // Initialize the JavaScript context with 0. CEntry will use it to
2611 : // set the current context on the isolate.
2612 56 : __ Move(kContextRegister, Smi::zero());
2613 56 : __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, rcx);
2614 : // The entrypoint address is the return value.
2615 56 : __ movq(r11, kReturnRegister0);
2616 :
2617 : // Restore registers.
2618 392 : for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2619 336 : offset -= kSimd128Size;
2620 336 : __ movdqu(reg, Operand(rsp, offset));
2621 : }
2622 : DCHECK_EQ(0, offset);
2623 56 : __ addq(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2624 392 : for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2625 336 : __ Pop(reg);
2626 56 : }
2627 : }
2628 : // Finally, jump to the entrypoint.
2629 56 : __ jmp(r11);
2630 56 : }
2631 :
2632 560 : void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2633 : SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2634 : bool builtin_exit_frame) {
2635 : // rax: number of arguments including receiver
2636 : // rbx: pointer to C function (C callee-saved)
2637 : // rbp: frame pointer of calling JS frame (restored after C call)
2638 : // rsp: stack pointer (restored after C call)
2639 : // rsi: current context (restored)
2640 : //
2641 : // If argv_mode == kArgvInRegister:
2642 : // r15: pointer to the first argument
2643 :
2644 : #ifdef _WIN64
2645 : // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
2646 : // stack to be aligned to 16 bytes. It only allows a single-word to be
2647 : // returned in register rax. Larger return sizes must be written to an address
2648 : // passed as a hidden first argument.
2649 : const Register kCCallArg0 = rcx;
2650 : const Register kCCallArg1 = rdx;
2651 : const Register kCCallArg2 = r8;
2652 : const Register kCCallArg3 = r9;
2653 : const int kArgExtraStackSpace = 2;
2654 : const int kMaxRegisterResultSize = 1;
2655 : #else
2656 : // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
2657 : // are returned in rax, and a struct of two pointers are returned in rax+rdx.
2658 : // Larger return sizes must be written to an address passed as a hidden first
2659 : // argument.
2660 560 : const Register kCCallArg0 = rdi;
2661 560 : const Register kCCallArg1 = rsi;
2662 560 : const Register kCCallArg2 = rdx;
2663 560 : const Register kCCallArg3 = rcx;
2664 560 : const int kArgExtraStackSpace = 0;
2665 560 : const int kMaxRegisterResultSize = 2;
2666 : #endif // _WIN64
2667 :
2668 : // Enter the exit frame that transitions from JavaScript to C++.
2669 : int arg_stack_space =
2670 560 : kArgExtraStackSpace +
2671 560 : (result_size <= kMaxRegisterResultSize ? 0 : result_size);
2672 560 : if (argv_mode == kArgvInRegister) {
2673 : DCHECK(save_doubles == kDontSaveFPRegs);
2674 : DCHECK(!builtin_exit_frame);
2675 112 : __ EnterApiExitFrame(arg_stack_space);
2676 : // Move argc into r14 (argv is already in r15).
2677 112 : __ movq(r14, rax);
2678 : } else {
2679 : __ EnterExitFrame(
2680 : arg_stack_space, save_doubles == kSaveFPRegs,
2681 448 : builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2682 : }
2683 :
2684 : // rbx: pointer to builtin function (C callee-saved).
2685 : // rbp: frame pointer of exit frame (restored after C call).
2686 : // rsp: stack pointer (restored after C call).
2687 : // r14: number of arguments including receiver (C callee-saved).
2688 : // r15: argv pointer (C callee-saved).
2689 :
2690 : // Check stack alignment.
2691 560 : if (FLAG_debug_code) {
2692 0 : __ CheckStackAlignment();
2693 : }
2694 :
2695 : // Call C function. The arguments object will be created by stubs declared by
2696 : // DECLARE_RUNTIME_FUNCTION().
2697 560 : if (result_size <= kMaxRegisterResultSize) {
2698 : // Pass a pointer to the Arguments object as the first argument.
2699 : // Return result in single register (rax), or a register pair (rax, rdx).
2700 560 : __ movq(kCCallArg0, r14); // argc.
2701 560 : __ movq(kCCallArg1, r15); // argv.
2702 560 : __ Move(kCCallArg2, ExternalReference::isolate_address(masm->isolate()));
2703 : } else {
2704 : DCHECK_LE(result_size, 2);
2705 : // Pass a pointer to the result location as the first argument.
2706 0 : __ leaq(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
2707 : // Pass a pointer to the Arguments object as the second argument.
2708 0 : __ movq(kCCallArg1, r14); // argc.
2709 0 : __ movq(kCCallArg2, r15); // argv.
2710 0 : __ Move(kCCallArg3, ExternalReference::isolate_address(masm->isolate()));
2711 : }
2712 560 : __ call(rbx);
2713 :
2714 560 : if (result_size > kMaxRegisterResultSize) {
2715 : // Read result values stored on stack. Result is stored
2716 : // above the the two Arguments object slots on Win64.
2717 : DCHECK_LE(result_size, 2);
2718 0 : __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
2719 0 : __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
2720 : }
2721 : // Result is in rax or rdx:rax - do not destroy these registers!
2722 :
2723 : // Check result for exception sentinel.
2724 560 : Label exception_returned;
2725 560 : __ CompareRoot(rax, RootIndex::kException);
2726 560 : __ j(equal, &exception_returned);
2727 :
2728 : // Check that there is no pending exception, otherwise we
2729 : // should have returned the exception sentinel.
2730 560 : if (FLAG_debug_code) {
2731 0 : Label okay;
2732 0 : __ LoadRoot(r14, RootIndex::kTheHoleValue);
2733 : ExternalReference pending_exception_address = ExternalReference::Create(
2734 0 : IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2735 : Operand pending_exception_operand =
2736 0 : masm->ExternalReferenceAsOperand(pending_exception_address);
2737 0 : __ cmpq(r14, pending_exception_operand);
2738 0 : __ j(equal, &okay, Label::kNear);
2739 0 : __ int3();
2740 0 : __ bind(&okay);
2741 : }
2742 :
2743 : // Exit the JavaScript to C++ exit frame.
2744 560 : __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2745 560 : __ ret(0);
2746 :
2747 : // Handling of exception.
2748 560 : __ bind(&exception_returned);
2749 :
2750 : ExternalReference pending_handler_context_address = ExternalReference::Create(
2751 560 : IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2752 : ExternalReference pending_handler_entrypoint_address =
2753 : ExternalReference::Create(
2754 560 : IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2755 : ExternalReference pending_handler_fp_address = ExternalReference::Create(
2756 560 : IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2757 : ExternalReference pending_handler_sp_address = ExternalReference::Create(
2758 560 : IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2759 :
2760 : // Ask the runtime for help to determine the handler. This will set rax to
2761 : // contain the current pending exception, don't clobber it.
2762 : ExternalReference find_handler =
2763 560 : ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2764 : {
2765 560 : FrameScope scope(masm, StackFrame::MANUAL);
2766 560 : __ movq(arg_reg_1, Immediate(0)); // argc.
2767 560 : __ movq(arg_reg_2, Immediate(0)); // argv.
2768 560 : __ Move(arg_reg_3, ExternalReference::isolate_address(masm->isolate()));
2769 560 : __ PrepareCallCFunction(3);
2770 560 : __ CallCFunction(find_handler, 3);
2771 : }
2772 : // Retrieve the handler context, SP and FP.
2773 : __ movq(rsi,
2774 560 : masm->ExternalReferenceAsOperand(pending_handler_context_address));
2775 560 : __ movq(rsp, masm->ExternalReferenceAsOperand(pending_handler_sp_address));
2776 560 : __ movq(rbp, masm->ExternalReferenceAsOperand(pending_handler_fp_address));
2777 :
2778 : // If the handler is a JS frame, restore the context to the frame. Note that
2779 : // the context will be set to (rsi == 0) for non-JS frames.
2780 560 : Label skip;
2781 560 : __ testq(rsi, rsi);
2782 560 : __ j(zero, &skip, Label::kNear);
2783 560 : __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
2784 560 : __ bind(&skip);
2785 :
2786 : // Reset the masking register. This is done independent of the underlying
2787 : // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
2788 : // with both configurations. It is safe to always do this, because the
2789 : // underlying register is caller-saved and can be arbitrarily clobbered.
2790 560 : __ ResetSpeculationPoisonRegister();
2791 :
2792 : // Compute the handler entry address and jump to it.
2793 : __ movq(rdi,
2794 560 : masm->ExternalReferenceAsOperand(pending_handler_entrypoint_address));
2795 560 : __ jmp(rdi);
2796 560 : }
2797 :
2798 56 : void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2799 56 : Label check_negative, process_64_bits, done;
2800 :
2801 : // Account for return address and saved regs.
2802 56 : const int kArgumentOffset = 4 * kSystemPointerSize;
2803 :
2804 56 : MemOperand mantissa_operand(MemOperand(rsp, kArgumentOffset));
2805 : MemOperand exponent_operand(
2806 56 : MemOperand(rsp, kArgumentOffset + kDoubleSize / 2));
2807 :
2808 : // The result is returned on the stack.
2809 56 : MemOperand return_operand = mantissa_operand;
2810 :
2811 56 : Register scratch1 = rbx;
2812 :
2813 : // Since we must use rcx for shifts below, use some other register (rax)
2814 : // to calculate the result if ecx is the requested return register.
2815 56 : Register result_reg = rax;
2816 : // Save ecx if it isn't the return register and therefore volatile, or if it
2817 : // is the return register, then save the temp register we use in its stead
2818 : // for the result.
2819 56 : Register save_reg = rax;
2820 56 : __ pushq(rcx);
2821 56 : __ pushq(scratch1);
2822 56 : __ pushq(save_reg);
2823 :
2824 56 : __ movl(scratch1, mantissa_operand);
2825 56 : __ Movsd(kScratchDoubleReg, mantissa_operand);
2826 56 : __ movl(rcx, exponent_operand);
2827 :
2828 56 : __ andl(rcx, Immediate(HeapNumber::kExponentMask));
2829 56 : __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
2830 56 : __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
2831 56 : __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
2832 56 : __ j(below, &process_64_bits, Label::kNear);
2833 :
2834 : // Result is entirely in lower 32-bits of mantissa
2835 56 : int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2836 56 : __ subl(rcx, Immediate(delta));
2837 56 : __ xorl(result_reg, result_reg);
2838 56 : __ cmpl(rcx, Immediate(31));
2839 56 : __ j(above, &done, Label::kNear);
2840 56 : __ shll_cl(scratch1);
2841 56 : __ jmp(&check_negative, Label::kNear);
2842 :
2843 56 : __ bind(&process_64_bits);
2844 56 : __ Cvttsd2siq(result_reg, kScratchDoubleReg);
2845 56 : __ jmp(&done, Label::kNear);
2846 :
2847 : // If the double was negative, negate the integer result.
2848 56 : __ bind(&check_negative);
2849 56 : __ movl(result_reg, scratch1);
2850 56 : __ negl(result_reg);
2851 56 : __ cmpl(exponent_operand, Immediate(0));
2852 56 : __ cmovl(greater, result_reg, scratch1);
2853 :
2854 : // Restore registers
2855 56 : __ bind(&done);
2856 56 : __ movl(return_operand, result_reg);
2857 56 : __ popq(save_reg);
2858 56 : __ popq(scratch1);
2859 56 : __ popq(rcx);
2860 56 : __ ret(0);
2861 56 : }
2862 :
2863 56 : void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2864 : // ----------- S t a t e -------------
2865 : // -- rax : argc
2866 : // -- rdi : constructor
2867 : // -- rsp[0] : return address
2868 : // -- rsp[8] : last argument
2869 : // -----------------------------------
2870 :
2871 56 : if (FLAG_debug_code) {
2872 : // The array construct code is only set for the global and natives
2873 : // builtin Array functions which always have maps.
2874 :
2875 : // Initial map for the builtin Array function should be a map.
2876 : __ LoadTaggedPointerField(
2877 0 : rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2878 : // Will both indicate a nullptr and a Smi.
2879 : STATIC_ASSERT(kSmiTag == 0);
2880 0 : Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
2881 0 : __ Check(not_smi, AbortReason::kUnexpectedInitialMapForArrayFunction);
2882 0 : __ CmpObjectType(rcx, MAP_TYPE, rcx);
2883 0 : __ Check(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
2884 :
2885 : // Figure out the right elements kind
2886 : __ LoadTaggedPointerField(
2887 0 : rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2888 :
2889 : // Load the map's "bit field 2" into |result|. We only need the first byte,
2890 : // but the following masking takes care of that anyway.
2891 0 : __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset));
2892 : // Retrieve elements_kind from bit field 2.
2893 0 : __ DecodeField<Map::ElementsKindBits>(rcx);
2894 :
2895 : // Initial elements kind should be packed elements.
2896 0 : __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
2897 0 : __ Assert(equal, AbortReason::kInvalidElementsKindForInternalPackedArray);
2898 :
2899 : // No arguments should be passed.
2900 0 : __ testq(rax, rax);
2901 0 : __ Assert(zero, AbortReason::kWrongNumberOfArgumentsForInternalPackedArray);
2902 : }
2903 :
2904 : __ Jump(
2905 : BUILTIN_CODE(masm->isolate(), InternalArrayNoArgumentConstructor_Packed),
2906 56 : RelocInfo::CODE_TARGET);
2907 56 : }
2908 :
2909 : namespace {
2910 :
2911 224 : int Offset(ExternalReference ref0, ExternalReference ref1) {
2912 224 : int64_t offset = (ref0.address() - ref1.address());
2913 : // Check that fits into int.
2914 : DCHECK(static_cast<int>(offset) == offset);
2915 224 : return static_cast<int>(offset);
2916 : }
2917 :
2918 : // Calls an API function. Allocates HandleScope, extracts returned value
2919 : // from handle and propagates exceptions. Clobbers r14, r15, rbx and
2920 : // caller-save registers. Restores context. On return removes
2921 : // stack_space * kSystemPointerSize (GCed).
2922 112 : void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address,
2923 : ExternalReference thunk_ref,
2924 : Register thunk_last_arg, int stack_space,
2925 : Operand* stack_space_operand,
2926 : Operand return_value_operand) {
2927 112 : Label prologue;
2928 112 : Label promote_scheduled_exception;
2929 112 : Label delete_allocated_handles;
2930 112 : Label leave_exit_frame;
2931 :
2932 112 : Isolate* isolate = masm->isolate();
2933 112 : Factory* factory = isolate->factory();
2934 : ExternalReference next_address =
2935 112 : ExternalReference::handle_scope_next_address(isolate);
2936 112 : const int kNextOffset = 0;
2937 : const int kLimitOffset = Offset(
2938 112 : ExternalReference::handle_scope_limit_address(isolate), next_address);
2939 : const int kLevelOffset = Offset(
2940 112 : ExternalReference::handle_scope_level_address(isolate), next_address);
2941 : ExternalReference scheduled_exception_address =
2942 112 : ExternalReference::scheduled_exception_address(isolate);
2943 :
2944 : DCHECK(rdx == function_address || r8 == function_address);
2945 : // Allocate HandleScope in callee-save registers.
2946 112 : Register prev_next_address_reg = r14;
2947 112 : Register prev_limit_reg = rbx;
2948 112 : Register base_reg = r15;
2949 112 : __ Move(base_reg, next_address);
2950 112 : __ movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
2951 112 : __ movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
2952 112 : __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
2953 :
2954 112 : if (FLAG_log_timer_events) {
2955 0 : FrameScope frame(masm, StackFrame::MANUAL);
2956 0 : __ PushSafepointRegisters();
2957 0 : __ PrepareCallCFunction(1);
2958 0 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
2959 0 : __ CallCFunction(ExternalReference::log_enter_external_function(), 1);
2960 0 : __ PopSafepointRegisters();
2961 : }
2962 :
2963 112 : Label profiler_disabled;
2964 112 : Label end_profiler_check;
2965 112 : __ Move(rax, ExternalReference::is_profiling_address(isolate));
2966 112 : __ cmpb(Operand(rax, 0), Immediate(0));
2967 112 : __ j(zero, &profiler_disabled);
2968 :
2969 : // Third parameter is the address of the actual getter function.
2970 112 : __ Move(thunk_last_arg, function_address);
2971 112 : __ Move(rax, thunk_ref);
2972 112 : __ jmp(&end_profiler_check);
2973 :
2974 112 : __ bind(&profiler_disabled);
2975 : // Call the api function!
2976 112 : __ Move(rax, function_address);
2977 :
2978 112 : __ bind(&end_profiler_check);
2979 :
2980 : // Call the api function!
2981 112 : __ call(rax);
2982 :
2983 112 : if (FLAG_log_timer_events) {
2984 0 : FrameScope frame(masm, StackFrame::MANUAL);
2985 0 : __ PushSafepointRegisters();
2986 0 : __ PrepareCallCFunction(1);
2987 0 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
2988 0 : __ CallCFunction(ExternalReference::log_leave_external_function(), 1);
2989 0 : __ PopSafepointRegisters();
2990 : }
2991 :
2992 : // Load the value from ReturnValue
2993 112 : __ movq(rax, return_value_operand);
2994 112 : __ bind(&prologue);
2995 :
2996 : // No more valid handles (the result handle was the last one). Restore
2997 : // previous handle scope.
2998 112 : __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
2999 112 : __ movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
3000 112 : __ cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
3001 112 : __ j(not_equal, &delete_allocated_handles);
3002 :
3003 : // Leave the API exit frame.
3004 112 : __ bind(&leave_exit_frame);
3005 112 : if (stack_space_operand != nullptr) {
3006 : DCHECK_EQ(stack_space, 0);
3007 56 : __ movq(rbx, *stack_space_operand);
3008 : }
3009 112 : __ LeaveApiExitFrame();
3010 :
3011 : // Check if the function scheduled an exception.
3012 112 : __ Move(rdi, scheduled_exception_address);
3013 224 : __ Cmp(Operand(rdi, 0), factory->the_hole_value());
3014 112 : __ j(not_equal, &promote_scheduled_exception);
3015 :
3016 : #if DEBUG
3017 : // Check if the function returned a valid JavaScript value.
3018 : Label ok;
3019 : Register return_value = rax;
3020 : Register map = rcx;
3021 :
3022 : __ JumpIfSmi(return_value, &ok, Label::kNear);
3023 : __ LoadTaggedPointerField(map,
3024 : FieldOperand(return_value, HeapObject::kMapOffset));
3025 :
3026 : __ CmpInstanceType(map, LAST_NAME_TYPE);
3027 : __ j(below_equal, &ok, Label::kNear);
3028 :
3029 : __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
3030 : __ j(above_equal, &ok, Label::kNear);
3031 :
3032 : __ CompareRoot(map, RootIndex::kHeapNumberMap);
3033 : __ j(equal, &ok, Label::kNear);
3034 :
3035 : __ CompareRoot(return_value, RootIndex::kUndefinedValue);
3036 : __ j(equal, &ok, Label::kNear);
3037 :
3038 : __ CompareRoot(return_value, RootIndex::kTrueValue);
3039 : __ j(equal, &ok, Label::kNear);
3040 :
3041 : __ CompareRoot(return_value, RootIndex::kFalseValue);
3042 : __ j(equal, &ok, Label::kNear);
3043 :
3044 : __ CompareRoot(return_value, RootIndex::kNullValue);
3045 : __ j(equal, &ok, Label::kNear);
3046 :
3047 : __ Abort(AbortReason::kAPICallReturnedInvalidObject);
3048 :
3049 : __ bind(&ok);
3050 : #endif
3051 :
3052 112 : if (stack_space_operand == nullptr) {
3053 : DCHECK_NE(stack_space, 0);
3054 56 : __ ret(stack_space * kSystemPointerSize);
3055 : } else {
3056 : DCHECK_EQ(stack_space, 0);
3057 56 : __ PopReturnAddressTo(rcx);
3058 56 : __ addq(rsp, rbx);
3059 56 : __ jmp(rcx);
3060 : }
3061 :
3062 : // Re-throw by promoting a scheduled exception.
3063 112 : __ bind(&promote_scheduled_exception);
3064 112 : __ TailCallRuntime(Runtime::kPromoteScheduledException);
3065 :
3066 : // HandleScope limit has changed. Delete allocated extensions.
3067 112 : __ bind(&delete_allocated_handles);
3068 112 : __ movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
3069 112 : __ movq(prev_limit_reg, rax);
3070 112 : __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
3071 112 : __ LoadAddress(rax, ExternalReference::delete_handle_scope_extensions());
3072 112 : __ call(rax);
3073 112 : __ movq(rax, prev_limit_reg);
3074 112 : __ jmp(&leave_exit_frame);
3075 112 : }
3076 :
3077 : } // namespace
3078 :
3079 : // TODO(jgruber): Instead of explicitly setting up implicit_args_ on the stack
3080 : // in CallApiCallback, we could use the calling convention to set up the stack
3081 : // correctly in the first place.
3082 : //
3083 : // TODO(jgruber): I suspect that most of CallApiCallback could be implemented
3084 : // as a C++ trampoline, vastly simplifying the assembly implementation.
3085 :
3086 56 : void Builtins::Generate_CallApiCallback(MacroAssembler* masm) {
3087 : // ----------- S t a t e -------------
3088 : // -- rsi : context
3089 : // -- rdx : api function address
3090 : // -- rcx : arguments count (not including the receiver)
3091 : // -- rbx : call data
3092 : // -- rdi : holder
3093 : // -- rsp[0] : return address
3094 : // -- rsp[8] : last argument
3095 : // -- ...
3096 : // -- rsp[argc * 8] : first argument
3097 : // -- rsp[(argc + 1) * 8] : receiver
3098 : // -----------------------------------
3099 :
3100 56 : Register api_function_address = rdx;
3101 56 : Register argc = rcx;
3102 56 : Register call_data = rbx;
3103 56 : Register holder = rdi;
3104 :
3105 : DCHECK(!AreAliased(api_function_address, argc, holder, call_data,
3106 : kScratchRegister));
3107 :
3108 : typedef FunctionCallbackArguments FCA;
3109 :
3110 : STATIC_ASSERT(FCA::kArgsLength == 6);
3111 : STATIC_ASSERT(FCA::kNewTargetIndex == 5);
3112 : STATIC_ASSERT(FCA::kDataIndex == 4);
3113 : STATIC_ASSERT(FCA::kReturnValueOffset == 3);
3114 : STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
3115 : STATIC_ASSERT(FCA::kIsolateIndex == 1);
3116 : STATIC_ASSERT(FCA::kHolderIndex == 0);
3117 :
3118 : // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
3119 : //
3120 : // Current state:
3121 : // rsp[0]: return address
3122 : //
3123 : // Target state:
3124 : // rsp[0 * kSystemPointerSize]: return address
3125 : // rsp[1 * kSystemPointerSize]: kHolder
3126 : // rsp[2 * kSystemPointerSize]: kIsolate
3127 : // rsp[3 * kSystemPointerSize]: undefined (kReturnValueDefaultValue)
3128 : // rsp[4 * kSystemPointerSize]: undefined (kReturnValue)
3129 : // rsp[5 * kSystemPointerSize]: kData
3130 : // rsp[6 * kSystemPointerSize]: undefined (kNewTarget)
3131 :
3132 : // Reserve space on the stack.
3133 56 : __ subq(rsp, Immediate(FCA::kArgsLength * kSystemPointerSize));
3134 :
3135 : // Return address (the old stack location is overwritten later on).
3136 : __ movq(kScratchRegister,
3137 56 : Operand(rsp, FCA::kArgsLength * kSystemPointerSize));
3138 56 : __ movq(Operand(rsp, 0 * kSystemPointerSize), kScratchRegister);
3139 :
3140 : // kHolder.
3141 56 : __ movq(Operand(rsp, 1 * kSystemPointerSize), holder);
3142 :
3143 : // kIsolate.
3144 : __ Move(kScratchRegister,
3145 56 : ExternalReference::isolate_address(masm->isolate()));
3146 56 : __ movq(Operand(rsp, 2 * kSystemPointerSize), kScratchRegister);
3147 :
3148 : // kReturnValueDefaultValue and kReturnValue.
3149 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
3150 56 : __ movq(Operand(rsp, 3 * kSystemPointerSize), kScratchRegister);
3151 56 : __ movq(Operand(rsp, 4 * kSystemPointerSize), kScratchRegister);
3152 :
3153 : // kData.
3154 56 : __ movq(Operand(rsp, 5 * kSystemPointerSize), call_data);
3155 :
3156 : // kNewTarget.
3157 56 : __ movq(Operand(rsp, 6 * kSystemPointerSize), kScratchRegister);
3158 :
3159 : // Keep a pointer to kHolder (= implicit_args) in a scratch register.
3160 : // We use it below to set up the FunctionCallbackInfo object.
3161 56 : Register scratch = rbx;
3162 56 : __ leaq(scratch, Operand(rsp, 1 * kSystemPointerSize));
3163 :
3164 : // Allocate the v8::Arguments structure in the arguments' space since
3165 : // it's not controlled by GC.
3166 : static constexpr int kApiStackSpace = 4;
3167 56 : __ EnterApiExitFrame(kApiStackSpace);
3168 :
3169 : // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above).
3170 56 : __ movq(StackSpaceOperand(0), scratch);
3171 :
3172 : // FunctionCallbackInfo::values_ (points at the first varargs argument passed
3173 : // on the stack).
3174 : __ leaq(scratch, Operand(scratch, argc, times_system_pointer_size,
3175 56 : (FCA::kArgsLength - 1) * kSystemPointerSize));
3176 56 : __ movq(StackSpaceOperand(1), scratch);
3177 :
3178 : // FunctionCallbackInfo::length_.
3179 56 : __ movq(StackSpaceOperand(2), argc);
3180 :
3181 : // We also store the number of bytes to drop from the stack after returning
3182 : // from the API function here.
3183 : __ leaq(kScratchRegister,
3184 : Operand(argc, times_system_pointer_size,
3185 56 : (FCA::kArgsLength + 1 /* receiver */) * kSystemPointerSize));
3186 56 : __ movq(StackSpaceOperand(3), kScratchRegister);
3187 :
3188 56 : Register arguments_arg = arg_reg_1;
3189 56 : Register callback_arg = arg_reg_2;
3190 :
3191 : // It's okay if api_function_address == callback_arg
3192 : // but not arguments_arg
3193 : DCHECK(api_function_address != arguments_arg);
3194 :
3195 : // v8::InvocationCallback's argument.
3196 56 : __ leaq(arguments_arg, StackSpaceOperand(0));
3197 :
3198 56 : ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
3199 :
3200 : // There are two stack slots above the arguments we constructed on the stack:
3201 : // the stored ebp (pushed by EnterApiExitFrame), and the return address.
3202 : static constexpr int kStackSlotsAboveFCA = 2;
3203 : Operand return_value_operand(
3204 : rbp,
3205 56 : (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kSystemPointerSize);
3206 :
3207 : static constexpr int kUseStackSpaceOperand = 0;
3208 56 : Operand stack_space_operand = StackSpaceOperand(3);
3209 : CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
3210 : kUseStackSpaceOperand, &stack_space_operand,
3211 56 : return_value_operand);
3212 56 : }
3213 :
3214 56 : void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
3215 56 : Register name_arg = arg_reg_1;
3216 56 : Register accessor_info_arg = arg_reg_2;
3217 56 : Register getter_arg = arg_reg_3;
3218 56 : Register api_function_address = r8;
3219 56 : Register receiver = ApiGetterDescriptor::ReceiverRegister();
3220 56 : Register holder = ApiGetterDescriptor::HolderRegister();
3221 56 : Register callback = ApiGetterDescriptor::CallbackRegister();
3222 56 : Register scratch = rax;
3223 56 : Register decompr_scratch1 = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
3224 56 : Register decompr_scratch2 = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
3225 :
3226 : DCHECK(!AreAliased(receiver, holder, callback, scratch, decompr_scratch1,
3227 : decompr_scratch2));
3228 :
3229 : // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
3230 : // name below the exit frame to make GC aware of them.
3231 : STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
3232 : STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
3233 : STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
3234 : STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
3235 : STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
3236 : STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
3237 : STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
3238 : STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
3239 :
3240 : // Insert additional parameters into the stack frame above return address.
3241 56 : __ PopReturnAddressTo(scratch);
3242 56 : __ Push(receiver);
3243 : __ PushTaggedAnyField(FieldOperand(callback, AccessorInfo::kDataOffset),
3244 56 : decompr_scratch1, decompr_scratch2);
3245 56 : __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
3246 56 : __ Push(kScratchRegister); // return value
3247 56 : __ Push(kScratchRegister); // return value default
3248 56 : __ PushAddress(ExternalReference::isolate_address(masm->isolate()));
3249 56 : __ Push(holder);
3250 56 : __ Push(Smi::zero()); // should_throw_on_error -> false
3251 : __ PushTaggedPointerField(FieldOperand(callback, AccessorInfo::kNameOffset),
3252 56 : decompr_scratch1);
3253 56 : __ PushReturnAddressFrom(scratch);
3254 :
3255 : // v8::PropertyCallbackInfo::args_ array and name handle.
3256 56 : const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
3257 :
3258 : // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
3259 56 : const int kArgStackSpace = 1;
3260 :
3261 : // Load address of v8::PropertyAccessorInfo::args_ array.
3262 56 : __ leaq(scratch, Operand(rsp, 2 * kSystemPointerSize));
3263 :
3264 56 : __ EnterApiExitFrame(kArgStackSpace);
3265 :
3266 : // Create v8::PropertyCallbackInfo object on the stack and initialize
3267 : // it's args_ field.
3268 56 : Operand info_object = StackSpaceOperand(0);
3269 56 : __ movq(info_object, scratch);
3270 :
3271 56 : __ leaq(name_arg, Operand(scratch, -kSystemPointerSize));
3272 : // The context register (rsi) has been saved in EnterApiExitFrame and
3273 : // could be used to pass arguments.
3274 56 : __ leaq(accessor_info_arg, info_object);
3275 :
3276 : ExternalReference thunk_ref =
3277 56 : ExternalReference::invoke_accessor_getter_callback();
3278 :
3279 : // It's okay if api_function_address == getter_arg
3280 : // but not accessor_info_arg or name_arg
3281 : DCHECK(api_function_address != accessor_info_arg);
3282 : DCHECK(api_function_address != name_arg);
3283 : __ LoadTaggedPointerField(
3284 56 : scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
3285 : __ movq(api_function_address,
3286 56 : FieldOperand(scratch, Foreign::kForeignAddressOffset));
3287 :
3288 : // +3 is to skip prolog, return address and name handle.
3289 : Operand return_value_operand(
3290 : rbp,
3291 56 : (PropertyCallbackArguments::kReturnValueOffset + 3) * kSystemPointerSize);
3292 56 : Operand* const kUseStackSpaceConstant = nullptr;
3293 : CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
3294 : kStackUnwindSpace, kUseStackSpaceConstant,
3295 56 : return_value_operand);
3296 56 : }
3297 :
3298 56 : void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
3299 56 : __ int3(); // Unused on this architecture.
3300 56 : }
3301 :
3302 : #undef __
3303 :
3304 : } // namespace internal
3305 86739 : } // namespace v8
3306 :
3307 : #endif // V8_TARGET_ARCH_X64
|