Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/builtins/builtins-constructor-gen.h"
6 :
7 : #include "src/ast/ast.h"
8 : #include "src/builtins/builtins-call-gen.h"
9 : #include "src/builtins/builtins-constructor.h"
10 : #include "src/builtins/builtins-utils-gen.h"
11 : #include "src/builtins/builtins.h"
12 : #include "src/code-factory.h"
13 : #include "src/code-stub-assembler.h"
14 : #include "src/counters.h"
15 : #include "src/interface-descriptors.h"
16 : #include "src/macro-assembler.h"
17 : #include "src/objects-inl.h"
18 :
19 : namespace v8 {
20 : namespace internal {
21 :
22 56 : void Builtins::Generate_ConstructVarargs(MacroAssembler* masm) {
23 : Generate_CallOrConstructVarargs(masm,
24 56 : BUILTIN_CODE(masm->isolate(), Construct));
25 56 : }
26 :
27 56 : void Builtins::Generate_ConstructForwardVarargs(MacroAssembler* masm) {
28 : Generate_CallOrConstructForwardVarargs(
29 : masm, CallOrConstructMode::kConstruct,
30 56 : BUILTIN_CODE(masm->isolate(), Construct));
31 56 : }
32 :
33 56 : void Builtins::Generate_ConstructFunctionForwardVarargs(MacroAssembler* masm) {
34 : Generate_CallOrConstructForwardVarargs(
35 : masm, CallOrConstructMode::kConstruct,
36 56 : BUILTIN_CODE(masm->isolate(), ConstructFunction));
37 56 : }
38 :
39 392 : TF_BUILTIN(ConstructWithArrayLike, CallOrConstructBuiltinsAssembler) {
40 56 : TNode<Object> target = CAST(Parameter(Descriptor::kTarget));
41 56 : SloppyTNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
42 56 : TNode<Object> arguments_list = CAST(Parameter(Descriptor::kArgumentsList));
43 56 : TNode<Context> context = CAST(Parameter(Descriptor::kContext));
44 56 : CallOrConstructWithArrayLike(target, new_target, arguments_list, context);
45 56 : }
46 :
47 448 : TF_BUILTIN(ConstructWithSpread, CallOrConstructBuiltinsAssembler) {
48 56 : TNode<Object> target = CAST(Parameter(Descriptor::kTarget));
49 56 : SloppyTNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
50 56 : TNode<Object> spread = CAST(Parameter(Descriptor::kSpread));
51 : TNode<Int32T> args_count =
52 56 : UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
53 56 : TNode<Context> context = CAST(Parameter(Descriptor::kContext));
54 56 : CallOrConstructWithSpread(target, new_target, spread, args_count, context);
55 56 : }
56 :
57 : typedef compiler::Node Node;
58 :
59 224 : TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
60 : Node* shared_function_info = Parameter(Descriptor::kSharedFunctionInfo);
61 : Node* feedback_cell = Parameter(Descriptor::kFeedbackCell);
62 : Node* context = Parameter(Descriptor::kContext);
63 :
64 : CSA_ASSERT(this, IsFeedbackCell(feedback_cell));
65 : CSA_ASSERT(this, IsSharedFunctionInfo(shared_function_info));
66 :
67 112 : IncrementCounter(isolate()->counters()->fast_new_closure_total(), 1);
68 :
69 : // Bump the closure counter encoded the {feedback_cell}s map.
70 : {
71 112 : Node* const feedback_cell_map = LoadMap(feedback_cell);
72 56 : Label no_closures(this), one_closure(this), cell_done(this);
73 :
74 112 : GotoIf(IsNoFeedbackCellMap(feedback_cell_map), &cell_done);
75 112 : GotoIf(IsNoClosuresCellMap(feedback_cell_map), &no_closures);
76 112 : GotoIf(IsOneClosureCellMap(feedback_cell_map), &one_closure);
77 : CSA_ASSERT(this, IsManyClosuresCellMap(feedback_cell_map),
78 : feedback_cell_map, feedback_cell);
79 56 : Goto(&cell_done);
80 :
81 56 : BIND(&no_closures);
82 56 : StoreMapNoWriteBarrier(feedback_cell, RootIndex::kOneClosureCellMap);
83 56 : Goto(&cell_done);
84 :
85 56 : BIND(&one_closure);
86 56 : StoreMapNoWriteBarrier(feedback_cell, RootIndex::kManyClosuresCellMap);
87 56 : Goto(&cell_done);
88 :
89 112 : BIND(&cell_done);
90 : }
91 :
92 : // The calculation of |function_map_index| must be in sync with
93 : // SharedFunctionInfo::function_map_index().
94 : Node* const flags =
95 : LoadObjectField(shared_function_info, SharedFunctionInfo::kFlagsOffset,
96 56 : MachineType::Uint32());
97 : Node* const function_map_index = IntPtrAdd(
98 56 : DecodeWordFromWord32<SharedFunctionInfo::FunctionMapIndexBits>(flags),
99 280 : IntPtrConstant(Context::FIRST_FUNCTION_MAP_INDEX));
100 : CSA_ASSERT(this, UintPtrLessThanOrEqual(
101 : function_map_index,
102 : IntPtrConstant(Context::LAST_FUNCTION_MAP_INDEX)));
103 :
104 : // Get the function map in the current native context and set that
105 : // as the map of the allocated object.
106 112 : Node* const native_context = LoadNativeContext(context);
107 : Node* const function_map =
108 112 : LoadContextElement(native_context, function_map_index);
109 :
110 : // Create a new closure from the given function info in new space
111 : TNode<IntPtrT> instance_size_in_bytes =
112 112 : TimesTaggedSize(LoadMapInstanceSizeInWords(function_map));
113 112 : TNode<Object> result = Allocate(instance_size_in_bytes);
114 56 : StoreMapNoWriteBarrier(result, function_map);
115 : InitializeJSObjectBodyNoSlackTracking(result, function_map,
116 : instance_size_in_bytes,
117 56 : JSFunction::kSizeWithoutPrototype);
118 :
119 : // Initialize the rest of the function.
120 : StoreObjectFieldRoot(result, JSObject::kPropertiesOrHashOffset,
121 56 : RootIndex::kEmptyFixedArray);
122 : StoreObjectFieldRoot(result, JSObject::kElementsOffset,
123 56 : RootIndex::kEmptyFixedArray);
124 : {
125 : // Set function prototype if necessary.
126 56 : Label done(this), init_prototype(this);
127 56 : Branch(IsFunctionWithPrototypeSlotMap(function_map), &init_prototype,
128 112 : &done);
129 :
130 56 : BIND(&init_prototype);
131 : StoreObjectFieldRoot(result, JSFunction::kPrototypeOrInitialMapOffset,
132 56 : RootIndex::kTheHoleValue);
133 56 : Goto(&done);
134 112 : BIND(&done);
135 : }
136 :
137 : STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
138 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kFeedbackCellOffset,
139 56 : feedback_cell);
140 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
141 56 : shared_function_info);
142 56 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
143 : Handle<Code> lazy_builtin_handle(
144 56 : isolate()->builtins()->builtin(Builtins::kCompileLazy), isolate());
145 : Node* lazy_builtin = HeapConstant(lazy_builtin_handle);
146 56 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeOffset, lazy_builtin);
147 56 : Return(result);
148 56 : }
149 :
150 224 : TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) {
151 : Node* context = Parameter(Descriptor::kContext);
152 : Node* target = Parameter(Descriptor::kTarget);
153 : Node* new_target = Parameter(Descriptor::kNewTarget);
154 :
155 : Label call_runtime(this);
156 :
157 56 : Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
158 56 : Return(result);
159 :
160 56 : BIND(&call_runtime);
161 56 : TailCallRuntime(Runtime::kNewObject, context, target, new_target);
162 56 : }
163 :
164 392 : Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
165 : Node* target,
166 : Node* new_target) {
167 392 : VARIABLE(var_obj, MachineRepresentation::kTagged);
168 392 : Label call_runtime(this), end(this);
169 :
170 392 : Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
171 392 : var_obj.Bind(result);
172 392 : Goto(&end);
173 :
174 392 : BIND(&call_runtime);
175 392 : var_obj.Bind(CallRuntime(Runtime::kNewObject, context, target, new_target));
176 392 : Goto(&end);
177 :
178 392 : BIND(&end);
179 784 : return var_obj.value();
180 : }
181 :
182 448 : Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
183 : Node* target,
184 : Node* new_target,
185 : Label* call_runtime) {
186 : CSA_ASSERT(this, HasInstanceType(target, JS_FUNCTION_TYPE));
187 : CSA_ASSERT(this, IsJSReceiver(new_target));
188 :
189 : // Verify that the new target is a JSFunction.
190 896 : Label fast(this), end(this);
191 896 : GotoIf(HasInstanceType(new_target, JS_FUNCTION_TYPE), &fast);
192 448 : Goto(call_runtime);
193 :
194 448 : BIND(&fast);
195 :
196 : // Load the initial map and verify that it's in fact a map.
197 : Node* initial_map =
198 : LoadObjectField(new_target, JSFunction::kPrototypeOrInitialMapOffset);
199 896 : GotoIf(TaggedIsSmi(initial_map), call_runtime);
200 896 : GotoIf(DoesntHaveInstanceType(initial_map, MAP_TYPE), call_runtime);
201 :
202 : // Fall back to runtime if the target differs from the new target's
203 : // initial map constructor.
204 : Node* new_target_constructor =
205 : LoadObjectField(initial_map, Map::kConstructorOrBackPointerOffset);
206 896 : GotoIf(WordNotEqual(target, new_target_constructor), call_runtime);
207 :
208 896 : VARIABLE(properties, MachineRepresentation::kTagged);
209 :
210 448 : Label instantiate_map(this), allocate_properties(this);
211 896 : GotoIf(IsDictionaryMap(initial_map), &allocate_properties);
212 : {
213 896 : properties.Bind(EmptyFixedArrayConstant());
214 448 : Goto(&instantiate_map);
215 : }
216 448 : BIND(&allocate_properties);
217 : {
218 896 : properties.Bind(AllocateNameDictionary(NameDictionary::kInitialCapacity));
219 448 : Goto(&instantiate_map);
220 : }
221 :
222 448 : BIND(&instantiate_map);
223 : return AllocateJSObjectFromMap(initial_map, properties.value(), nullptr,
224 896 : kNone, kWithSlackTracking);
225 : }
226 :
227 448 : Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
228 : Node* scope_info, Node* slots_uint32, Node* context, ScopeType scope_type) {
229 896 : TNode<IntPtrT> slots = Signed(ChangeUint32ToWord(slots_uint32));
230 : TNode<IntPtrT> size = ElementOffsetFromIndex(
231 448 : slots, PACKED_ELEMENTS, INTPTR_PARAMETERS, Context::kTodoHeaderSize);
232 :
233 : // Create a new closure from the given function info in new space
234 : TNode<Context> function_context =
235 448 : UncheckedCast<Context>(AllocateInNewSpace(size));
236 :
237 : RootIndex context_type;
238 448 : switch (scope_type) {
239 : case EVAL_SCOPE:
240 : context_type = RootIndex::kEvalContextMap;
241 : break;
242 : case FUNCTION_SCOPE:
243 : context_type = RootIndex::kFunctionContextMap;
244 224 : break;
245 : default:
246 0 : UNREACHABLE();
247 : }
248 : // Set up the header.
249 448 : StoreMapNoWriteBarrier(function_context, context_type);
250 448 : TNode<IntPtrT> min_context_slots = IntPtrConstant(Context::MIN_CONTEXT_SLOTS);
251 : // TODO(ishell): for now, length also includes MIN_CONTEXT_SLOTS.
252 : TNode<IntPtrT> length = IntPtrAdd(slots, min_context_slots);
253 : StoreObjectFieldNoWriteBarrier(function_context, Context::kLengthOffset,
254 896 : SmiTag(length));
255 : StoreObjectFieldNoWriteBarrier(function_context, Context::kScopeInfoOffset,
256 448 : scope_info);
257 : StoreObjectFieldNoWriteBarrier(function_context, Context::kPreviousOffset,
258 448 : context);
259 : StoreObjectFieldNoWriteBarrier(function_context, Context::kExtensionOffset,
260 896 : TheHoleConstant());
261 448 : TNode<Context> native_context = LoadNativeContext(context);
262 : StoreObjectFieldNoWriteBarrier(function_context,
263 448 : Context::kNativeContextOffset, native_context);
264 :
265 : // Initialize the varrest of the slots to undefined.
266 896 : TNode<HeapObject> undefined = UndefinedConstant();
267 448 : TNode<IntPtrT> start_offset = IntPtrConstant(Context::kTodoHeaderSize);
268 448 : CodeStubAssembler::VariableList vars(0, zone());
269 : BuildFastLoop(
270 : vars, start_offset, size,
271 : [=](Node* offset) {
272 : StoreObjectFieldNoWriteBarrier(
273 448 : function_context, UncheckedCast<IntPtrT>(offset), undefined);
274 : },
275 896 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
276 448 : return function_context;
277 : }
278 :
279 224 : TF_BUILTIN(FastNewFunctionContextEval, ConstructorBuiltinsAssembler) {
280 : Node* scope_info = Parameter(Descriptor::kScopeInfo);
281 : Node* slots = Parameter(Descriptor::kSlots);
282 : Node* context = Parameter(Descriptor::kContext);
283 : Return(EmitFastNewFunctionContext(scope_info, slots, context,
284 112 : ScopeType::EVAL_SCOPE));
285 56 : }
286 :
287 224 : TF_BUILTIN(FastNewFunctionContextFunction, ConstructorBuiltinsAssembler) {
288 : Node* scope_info = Parameter(Descriptor::kScopeInfo);
289 : Node* slots = Parameter(Descriptor::kSlots);
290 : Node* context = Parameter(Descriptor::kContext);
291 : Return(EmitFastNewFunctionContext(scope_info, slots, context,
292 112 : ScopeType::FUNCTION_SCOPE));
293 56 : }
294 :
295 224 : Node* ConstructorBuiltinsAssembler::EmitCreateRegExpLiteral(
296 : Node* feedback_vector, Node* slot, Node* pattern, Node* flags,
297 : Node* context) {
298 448 : Label call_runtime(this, Label::kDeferred), end(this);
299 :
300 448 : VARIABLE(result, MachineRepresentation::kTagged);
301 : TNode<Object> literal_site =
302 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
303 448 : GotoIf(NotHasBoilerplate(literal_site), &call_runtime);
304 : {
305 : Node* boilerplate = literal_site;
306 : CSA_ASSERT(this, IsJSRegExp(boilerplate));
307 : int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kTaggedSize;
308 448 : Node* copy = Allocate(size);
309 1792 : for (int offset = 0; offset < size; offset += kTaggedSize) {
310 : Node* value = LoadObjectField(boilerplate, offset);
311 1568 : StoreObjectFieldNoWriteBarrier(copy, offset, value);
312 : }
313 224 : result.Bind(copy);
314 224 : Goto(&end);
315 : }
316 :
317 224 : BIND(&call_runtime);
318 : {
319 : result.Bind(CallRuntime(Runtime::kCreateRegExpLiteral, context,
320 448 : feedback_vector, SmiTag(slot), pattern, flags));
321 224 : Goto(&end);
322 : }
323 :
324 224 : BIND(&end);
325 448 : return result.value();
326 : }
327 :
328 280 : TF_BUILTIN(CreateRegExpLiteral, ConstructorBuiltinsAssembler) {
329 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
330 112 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
331 : Node* pattern = Parameter(Descriptor::kPattern);
332 : Node* flags = Parameter(Descriptor::kFlags);
333 : Node* context = Parameter(Descriptor::kContext);
334 : Node* result =
335 56 : EmitCreateRegExpLiteral(feedback_vector, slot, pattern, flags, context);
336 56 : Return(result);
337 56 : }
338 :
339 224 : Node* ConstructorBuiltinsAssembler::EmitCreateShallowArrayLiteral(
340 : Node* feedback_vector, Node* slot, Node* context, Label* call_runtime,
341 : AllocationSiteMode allocation_site_mode) {
342 448 : Label zero_capacity(this), cow_elements(this), fast_elements(this),
343 224 : return_result(this);
344 448 : VARIABLE(result, MachineRepresentation::kTagged);
345 :
346 : TNode<Object> maybe_allocation_site =
347 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
348 448 : GotoIf(NotHasBoilerplate(maybe_allocation_site), call_runtime);
349 :
350 : TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
351 224 : TNode<JSArray> boilerplate = CAST(LoadBoilerplate(allocation_site));
352 :
353 : ParameterMode mode = OptimalParameterMode();
354 224 : if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
355 168 : return CloneFastJSArray(context, boilerplate, mode, allocation_site);
356 : } else {
357 56 : return CloneFastJSArray(context, boilerplate, mode);
358 224 : }
359 : }
360 :
361 280 : TF_BUILTIN(CreateShallowArrayLiteral, ConstructorBuiltinsAssembler) {
362 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
363 112 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
364 : Node* constant_elements = Parameter(Descriptor::kConstantElements);
365 : Node* context = Parameter(Descriptor::kContext);
366 : Label call_runtime(this, Label::kDeferred);
367 : Return(EmitCreateShallowArrayLiteral(feedback_vector, slot, context,
368 : &call_runtime,
369 112 : DONT_TRACK_ALLOCATION_SITE));
370 :
371 56 : BIND(&call_runtime);
372 : {
373 56 : Comment("call runtime");
374 : int const flags =
375 : AggregateLiteral::kDisableMementos | AggregateLiteral::kIsShallow;
376 : Return(CallRuntime(Runtime::kCreateArrayLiteral, context, feedback_vector,
377 168 : SmiTag(slot), constant_elements, SmiConstant(flags)));
378 56 : }
379 56 : }
380 :
381 224 : Node* ConstructorBuiltinsAssembler::EmitCreateEmptyArrayLiteral(
382 : Node* feedback_vector, Node* slot, Node* context) {
383 : // Array literals always have a valid AllocationSite to properly track
384 : // elements transitions.
385 : TNode<Object> maybe_allocation_site =
386 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
387 224 : TVARIABLE(AllocationSite, allocation_site);
388 :
389 224 : Label create_empty_array(this),
390 224 : initialize_allocation_site(this, Label::kDeferred), done(this);
391 448 : GotoIf(TaggedIsSmi(maybe_allocation_site), &initialize_allocation_site);
392 : {
393 : allocation_site = CAST(maybe_allocation_site);
394 224 : Goto(&create_empty_array);
395 : }
396 : // TODO(cbruni): create the AllocationSite in CSA.
397 224 : BIND(&initialize_allocation_site);
398 : {
399 672 : allocation_site =
400 : CreateAllocationSiteInFeedbackVector(feedback_vector, SmiTag(slot));
401 224 : Goto(&create_empty_array);
402 : }
403 :
404 224 : BIND(&create_empty_array);
405 224 : TNode<Int32T> kind = LoadElementsKind(allocation_site.value());
406 224 : TNode<Context> native_context = LoadNativeContext(context);
407 224 : Comment("LoadJSArrayElementsMap");
408 224 : TNode<Map> array_map = LoadJSArrayElementsMap(kind, native_context);
409 224 : TNode<Smi> zero = SmiConstant(0);
410 224 : Comment("Allocate JSArray");
411 : TNode<JSArray> result =
412 : AllocateJSArray(GetInitialFastElementsKind(), array_map, zero, zero,
413 224 : allocation_site.value(), ParameterMode::SMI_PARAMETERS);
414 :
415 224 : Goto(&done);
416 224 : BIND(&done);
417 :
418 224 : return result;
419 : }
420 :
421 280 : TF_BUILTIN(CreateEmptyArrayLiteral, ConstructorBuiltinsAssembler) {
422 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
423 112 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
424 : Node* context = Parameter(Descriptor::kContext);
425 56 : Node* result = EmitCreateEmptyArrayLiteral(feedback_vector, slot, context);
426 56 : Return(result);
427 56 : }
428 :
429 224 : Node* ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral(
430 : Node* feedback_vector, Node* slot, Label* call_runtime) {
431 : TNode<Object> maybe_allocation_site =
432 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
433 448 : GotoIf(NotHasBoilerplate(maybe_allocation_site), call_runtime);
434 :
435 : TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
436 224 : TNode<JSObject> boilerplate = LoadBoilerplate(allocation_site);
437 224 : TNode<Map> boilerplate_map = LoadMap(boilerplate);
438 : CSA_ASSERT(this, IsJSObjectMap(boilerplate_map));
439 :
440 224 : VARIABLE(var_properties, MachineRepresentation::kTagged);
441 : {
442 448 : Node* bit_field_3 = LoadMapBitField3(boilerplate_map);
443 224 : GotoIf(IsSetWord32<Map::IsDeprecatedBit>(bit_field_3), call_runtime);
444 : // Directly copy over the property store for dict-mode boilerplates.
445 224 : Label if_dictionary(this), if_fast(this), done(this);
446 : Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field_3), &if_dictionary,
447 224 : &if_fast);
448 224 : BIND(&if_dictionary);
449 : {
450 224 : Comment("Copy dictionary properties");
451 : var_properties.Bind(CopyNameDictionary(
452 448 : CAST(LoadSlowProperties(boilerplate)), call_runtime));
453 : // Slow objects have no in-object properties.
454 224 : Goto(&done);
455 : }
456 224 : BIND(&if_fast);
457 : {
458 : // TODO(cbruni): support copying out-of-object properties.
459 448 : Node* boilerplate_properties = LoadFastProperties(boilerplate);
460 448 : GotoIfNot(IsEmptyFixedArray(boilerplate_properties), call_runtime);
461 448 : var_properties.Bind(EmptyFixedArrayConstant());
462 224 : Goto(&done);
463 : }
464 448 : BIND(&done);
465 : }
466 :
467 448 : VARIABLE(var_elements, MachineRepresentation::kTagged);
468 : {
469 : // Copy the elements backing store, assuming that it's flat.
470 224 : Label if_empty_fixed_array(this), if_copy_elements(this), done(this);
471 : Node* boilerplate_elements = LoadElements(boilerplate);
472 224 : Branch(IsEmptyFixedArray(boilerplate_elements), &if_empty_fixed_array,
473 448 : &if_copy_elements);
474 :
475 224 : BIND(&if_empty_fixed_array);
476 224 : var_elements.Bind(boilerplate_elements);
477 224 : Goto(&done);
478 :
479 224 : BIND(&if_copy_elements);
480 : CSA_ASSERT(this, Word32BinaryNot(
481 : IsFixedCOWArrayMap(LoadMap(boilerplate_elements))));
482 : ExtractFixedArrayFlags flags;
483 : flags |= ExtractFixedArrayFlag::kAllFixedArrays;
484 : flags |= ExtractFixedArrayFlag::kNewSpaceAllocationOnly;
485 : flags |= ExtractFixedArrayFlag::kDontCopyCOW;
486 224 : var_elements.Bind(CloneFixedArray(boilerplate_elements, flags));
487 224 : Goto(&done);
488 448 : BIND(&done);
489 : }
490 :
491 : // Ensure new-space allocation for a fresh JSObject so we can skip write
492 : // barriers when copying all object fields.
493 : STATIC_ASSERT(JSObject::kMaxInstanceSize < kMaxRegularHeapObjectSize);
494 : TNode<IntPtrT> instance_size =
495 448 : TimesTaggedSize(LoadMapInstanceSizeInWords(boilerplate_map));
496 : TNode<IntPtrT> allocation_size = instance_size;
497 224 : bool needs_allocation_memento = FLAG_allocation_site_pretenuring;
498 224 : if (needs_allocation_memento) {
499 : // Prepare for inner-allocating the AllocationMemento.
500 224 : allocation_size =
501 : IntPtrAdd(instance_size, IntPtrConstant(AllocationMemento::kSize));
502 : }
503 :
504 : TNode<HeapObject> copy =
505 224 : UncheckedCast<HeapObject>(AllocateInNewSpace(allocation_size));
506 : {
507 224 : Comment("Initialize Literal Copy");
508 : // Initialize Object fields.
509 224 : StoreMapNoWriteBarrier(copy, boilerplate_map);
510 : StoreObjectFieldNoWriteBarrier(copy, JSObject::kPropertiesOrHashOffset,
511 224 : var_properties.value());
512 : StoreObjectFieldNoWriteBarrier(copy, JSObject::kElementsOffset,
513 224 : var_elements.value());
514 : }
515 :
516 : // Initialize the AllocationMemento before potential GCs due to heap number
517 : // allocation when copying the in-object properties.
518 224 : if (needs_allocation_memento) {
519 224 : InitializeAllocationMemento(copy, instance_size, allocation_site);
520 : }
521 :
522 : {
523 : // Copy over in-object properties.
524 : Label continue_with_write_barrier(this), done_init(this);
525 224 : TVARIABLE(IntPtrT, offset, IntPtrConstant(JSObject::kHeaderSize));
526 : // Mutable heap numbers only occur on 32-bit platforms.
527 : bool may_use_mutable_heap_numbers = !FLAG_unbox_double_fields;
528 : {
529 224 : Comment("Copy in-object properties fast");
530 : Label continue_fast(this, &offset);
531 224 : Branch(WordEqual(offset.value(), instance_size), &done_init,
532 448 : &continue_fast);
533 224 : BIND(&continue_fast);
534 : if (may_use_mutable_heap_numbers) {
535 : TNode<Object> field = LoadObjectField(boilerplate, offset.value());
536 : Label store_field(this);
537 : GotoIf(TaggedIsSmi(field), &store_field);
538 : GotoIf(IsMutableHeapNumber(CAST(field)), &continue_with_write_barrier);
539 : Goto(&store_field);
540 : BIND(&store_field);
541 : StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
542 : } else {
543 : // Copy fields as raw data.
544 : TNode<IntPtrT> field =
545 224 : LoadObjectField<IntPtrT>(boilerplate, offset.value());
546 : StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
547 : }
548 224 : offset = IntPtrAdd(offset.value(), IntPtrConstant(kTaggedSize));
549 224 : Branch(WordNotEqual(offset.value(), instance_size), &continue_fast,
550 672 : &done_init);
551 : }
552 :
553 : if (!may_use_mutable_heap_numbers) {
554 224 : BIND(&done_init);
555 : return copy;
556 : }
557 : // Continue initializing the literal after seeing the first sub-object
558 : // potentially causing allocation. In this case we prepare the new literal
559 : // by copying all pending fields over from the boilerplate and emit full
560 : // write barriers from here on.
561 : BIND(&continue_with_write_barrier);
562 : {
563 : Comment("Copy in-object properties slow");
564 : BuildFastLoop(
565 : offset.value(), instance_size,
566 : [=](Node* offset) {
567 : // TODO(ishell): value decompression is not necessary here.
568 : Node* field = LoadObjectField(boilerplate, offset);
569 : StoreObjectFieldNoWriteBarrier(copy, offset, field);
570 : },
571 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
572 : Comment("Copy mutable HeapNumber values");
573 : BuildFastLoop(
574 : offset.value(), instance_size,
575 : [=](Node* offset) {
576 : Node* field = LoadObjectField(copy, offset);
577 : Label copy_mutable_heap_number(this, Label::kDeferred),
578 : continue_loop(this);
579 : // We only have to clone complex field values.
580 : GotoIf(TaggedIsSmi(field), &continue_loop);
581 : Branch(IsMutableHeapNumber(field), ©_mutable_heap_number,
582 : &continue_loop);
583 : BIND(©_mutable_heap_number);
584 : {
585 : Node* double_value = LoadHeapNumberValue(field);
586 : Node* mutable_heap_number =
587 : AllocateMutableHeapNumberWithValue(double_value);
588 : StoreObjectField(copy, offset, mutable_heap_number);
589 : Goto(&continue_loop);
590 : }
591 : BIND(&continue_loop);
592 : },
593 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
594 : Goto(&done_init);
595 : }
596 : BIND(&done_init);
597 : }
598 224 : return copy;
599 : }
600 :
601 336 : TF_BUILTIN(CreateShallowObjectLiteral, ConstructorBuiltinsAssembler) {
602 56 : Label call_runtime(this);
603 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
604 112 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
605 : Node* copy =
606 56 : EmitCreateShallowObjectLiteral(feedback_vector, slot, &call_runtime);
607 56 : Return(copy);
608 :
609 56 : BIND(&call_runtime);
610 : Node* object_boilerplate_description =
611 : Parameter(Descriptor::kObjectBoilerplateDescription);
612 : Node* flags = Parameter(Descriptor::kFlags);
613 : Node* context = Parameter(Descriptor::kContext);
614 : TailCallRuntime(Runtime::kCreateObjectLiteral, context, feedback_vector,
615 112 : SmiTag(slot), object_boilerplate_description, flags);
616 56 : }
617 :
618 : // Used by the CreateEmptyObjectLiteral bytecode and the Object constructor.
619 112 : Node* ConstructorBuiltinsAssembler::EmitCreateEmptyObjectLiteral(
620 : Node* context) {
621 224 : Node* native_context = LoadNativeContext(context);
622 : Node* object_function =
623 224 : LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX);
624 : Node* map = LoadObjectField(object_function,
625 : JSFunction::kPrototypeOrInitialMapOffset);
626 : CSA_ASSERT(this, IsMap(map));
627 : // Ensure that slack tracking is disabled for the map.
628 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
629 : CSA_ASSERT(
630 : this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
631 224 : Node* empty_fixed_array = EmptyFixedArrayConstant();
632 : Node* result =
633 112 : AllocateJSObjectFromMap(map, empty_fixed_array, empty_fixed_array);
634 112 : return result;
635 : }
636 :
637 : // ES #sec-object-constructor
638 336 : TF_BUILTIN(ObjectConstructor, ConstructorBuiltinsAssembler) {
639 : int const kValueArg = 0;
640 : Node* argc =
641 112 : ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
642 56 : CodeStubArguments args(this, argc);
643 : Node* context = Parameter(Descriptor::kContext);
644 : Node* new_target = Parameter(Descriptor::kJSNewTarget);
645 :
646 56 : VARIABLE(var_result, MachineRepresentation::kTagged);
647 56 : Label if_subclass(this, Label::kDeferred), if_notsubclass(this),
648 56 : return_result(this);
649 112 : GotoIf(IsUndefined(new_target), &if_notsubclass);
650 : TNode<JSFunction> target = CAST(Parameter(Descriptor::kJSTarget));
651 56 : Branch(WordEqual(new_target, target), &if_notsubclass, &if_subclass);
652 :
653 56 : BIND(&if_subclass);
654 : {
655 : Node* result =
656 112 : CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
657 56 : var_result.Bind(result);
658 56 : Goto(&return_result);
659 : }
660 :
661 56 : BIND(&if_notsubclass);
662 : {
663 56 : Label if_newobject(this, Label::kDeferred), if_toobject(this);
664 :
665 112 : Node* value_index = IntPtrConstant(kValueArg);
666 112 : GotoIf(UintPtrGreaterThanOrEqual(value_index, argc), &if_newobject);
667 112 : Node* value = args.AtIndex(value_index);
668 112 : GotoIf(IsNull(value), &if_newobject);
669 112 : Branch(IsUndefined(value), &if_newobject, &if_toobject);
670 :
671 56 : BIND(&if_newobject);
672 : {
673 56 : Node* result = EmitCreateEmptyObjectLiteral(context);
674 56 : var_result.Bind(result);
675 56 : Goto(&return_result);
676 : }
677 :
678 56 : BIND(&if_toobject);
679 : {
680 112 : Node* result = CallBuiltin(Builtins::kToObject, context, value);
681 56 : var_result.Bind(result);
682 56 : Goto(&return_result);
683 56 : }
684 : }
685 :
686 56 : BIND(&return_result);
687 112 : args.PopAndReturn(var_result.value());
688 56 : }
689 :
690 : // ES #sec-number-constructor
691 280 : TF_BUILTIN(NumberConstructor, ConstructorBuiltinsAssembler) {
692 : Node* context = Parameter(Descriptor::kContext);
693 : Node* argc =
694 112 : ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
695 56 : CodeStubArguments args(this, argc);
696 :
697 : // 1. If no arguments were passed to this function invocation, let n be +0.
698 112 : VARIABLE(var_n, MachineRepresentation::kTagged, SmiConstant(0));
699 56 : Label if_nloaded(this, &var_n);
700 168 : GotoIf(WordEqual(argc, IntPtrConstant(0)), &if_nloaded);
701 :
702 : // 2. Else,
703 : // a. Let prim be ? ToNumeric(value).
704 : // b. If Type(prim) is BigInt, let n be the Number value for prim.
705 : // c. Otherwise, let n be prim.
706 112 : Node* value = args.AtIndex(0);
707 112 : var_n.Bind(ToNumber(context, value, BigIntHandling::kConvertToNumber));
708 56 : Goto(&if_nloaded);
709 :
710 56 : BIND(&if_nloaded);
711 : {
712 : // 3. If NewTarget is undefined, return n.
713 56 : Node* n_value = var_n.value();
714 : Node* new_target = Parameter(Descriptor::kJSNewTarget);
715 56 : Label return_n(this), constructnumber(this, Label::kDeferred);
716 112 : Branch(IsUndefined(new_target), &return_n, &constructnumber);
717 :
718 56 : BIND(&return_n);
719 56 : { args.PopAndReturn(n_value); }
720 :
721 56 : BIND(&constructnumber);
722 : {
723 : // 4. Let O be ? OrdinaryCreateFromConstructor(NewTarget,
724 : // "%NumberPrototype%", « [[NumberData]] »).
725 : // 5. Set O.[[NumberData]] to n.
726 : // 6. Return O.
727 :
728 : // We are not using Parameter(Descriptor::kJSTarget) and loading the value
729 : // from the current frame here in order to reduce register pressure on the
730 : // fast path.
731 56 : TNode<JSFunction> target = LoadTargetFromFrame();
732 : Node* result =
733 112 : CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
734 56 : StoreObjectField(result, JSValue::kValueOffset, n_value);
735 56 : args.PopAndReturn(result);
736 56 : }
737 56 : }
738 56 : }
739 :
740 224 : TF_BUILTIN(GenericConstructorLazyDeoptContinuation,
741 : ConstructorBuiltinsAssembler) {
742 : Node* result = Parameter(Descriptor::kResult);
743 56 : Return(result);
744 56 : }
745 :
746 : // https://tc39.github.io/ecma262/#sec-string-constructor
747 280 : TF_BUILTIN(StringConstructor, ConstructorBuiltinsAssembler) {
748 : Node* context = Parameter(Descriptor::kContext);
749 : Node* argc =
750 112 : ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
751 56 : CodeStubArguments args(this, argc);
752 :
753 : TNode<Object> new_target = CAST(Parameter(Descriptor::kJSNewTarget));
754 :
755 : // 1. If no arguments were passed to this function invocation, let s be "".
756 112 : VARIABLE(var_s, MachineRepresentation::kTagged, EmptyStringConstant());
757 56 : Label if_sloaded(this, &var_s);
758 168 : GotoIf(WordEqual(argc, IntPtrConstant(0)), &if_sloaded);
759 :
760 : // 2. Else,
761 : // a. If NewTarget is undefined [...]
762 112 : Node* value = args.AtIndex(0);
763 56 : Label if_tostring(this, &var_s);
764 112 : GotoIfNot(IsUndefined(new_target), &if_tostring);
765 :
766 : // 2a. [...] and Type(value) is Symbol, return SymbolDescriptiveString(value).
767 112 : GotoIf(TaggedIsSmi(value), &if_tostring);
768 112 : GotoIfNot(IsSymbol(value), &if_tostring);
769 : {
770 : Node* result =
771 : CallRuntime(Runtime::kSymbolDescriptiveString, context, value);
772 56 : args.PopAndReturn(result);
773 : }
774 :
775 : // 2b. Let s be ? ToString(value).
776 56 : BIND(&if_tostring);
777 : {
778 112 : var_s.Bind(CallBuiltin(Builtins::kToString, context, value));
779 56 : Goto(&if_sloaded);
780 : }
781 :
782 : // 3. If NewTarget is undefined, return s.
783 56 : BIND(&if_sloaded);
784 : {
785 56 : Node* s_value = var_s.value();
786 56 : Label return_s(this), constructstring(this, Label::kDeferred);
787 112 : Branch(IsUndefined(new_target), &return_s, &constructstring);
788 :
789 56 : BIND(&return_s);
790 56 : { args.PopAndReturn(s_value); }
791 :
792 56 : BIND(&constructstring);
793 : {
794 : // We are not using Parameter(Descriptor::kJSTarget) and loading the value
795 : // from the current frame here in order to reduce register pressure on the
796 : // fast path.
797 56 : TNode<JSFunction> target = LoadTargetFromFrame();
798 :
799 : Node* result =
800 112 : CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
801 56 : StoreObjectField(result, JSValue::kValueOffset, s_value);
802 56 : args.PopAndReturn(result);
803 56 : }
804 56 : }
805 56 : }
806 :
807 : } // namespace internal
808 94089 : } // namespace v8
|