Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/builtins/builtins-constructor-gen.h"
6 :
7 : #include "src/ast/ast.h"
8 : #include "src/builtins/builtins-call-gen.h"
9 : #include "src/builtins/builtins-constructor.h"
10 : #include "src/builtins/builtins-utils-gen.h"
11 : #include "src/builtins/builtins.h"
12 : #include "src/code-factory.h"
13 : #include "src/code-stub-assembler.h"
14 : #include "src/counters.h"
15 : #include "src/interface-descriptors.h"
16 : #include "src/macro-assembler.h"
17 : #include "src/objects-inl.h"
18 :
19 : namespace v8 {
20 : namespace internal {
21 :
22 56 : void Builtins::Generate_ConstructVarargs(MacroAssembler* masm) {
23 56 : Generate_CallOrConstructVarargs(masm,
24 56 : BUILTIN_CODE(masm->isolate(), Construct));
25 56 : }
26 :
27 56 : void Builtins::Generate_ConstructForwardVarargs(MacroAssembler* masm) {
28 56 : Generate_CallOrConstructForwardVarargs(
29 : masm, CallOrConstructMode::kConstruct,
30 56 : BUILTIN_CODE(masm->isolate(), Construct));
31 56 : }
32 :
33 56 : void Builtins::Generate_ConstructFunctionForwardVarargs(MacroAssembler* masm) {
34 56 : Generate_CallOrConstructForwardVarargs(
35 : masm, CallOrConstructMode::kConstruct,
36 56 : BUILTIN_CODE(masm->isolate(), ConstructFunction));
37 56 : }
38 :
39 392 : TF_BUILTIN(ConstructWithArrayLike, CallOrConstructBuiltinsAssembler) {
40 56 : TNode<Object> target = CAST(Parameter(Descriptor::kTarget));
41 56 : SloppyTNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
42 56 : TNode<Object> arguments_list = CAST(Parameter(Descriptor::kArgumentsList));
43 56 : TNode<Context> context = CAST(Parameter(Descriptor::kContext));
44 56 : CallOrConstructWithArrayLike(target, new_target, arguments_list, context);
45 56 : }
46 :
47 448 : TF_BUILTIN(ConstructWithSpread, CallOrConstructBuiltinsAssembler) {
48 56 : TNode<Object> target = CAST(Parameter(Descriptor::kTarget));
49 56 : SloppyTNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
50 56 : TNode<Object> spread = CAST(Parameter(Descriptor::kSpread));
51 : TNode<Int32T> args_count =
52 56 : UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
53 56 : TNode<Context> context = CAST(Parameter(Descriptor::kContext));
54 56 : CallOrConstructWithSpread(target, new_target, spread, args_count, context);
55 56 : }
56 :
57 : typedef compiler::Node Node;
58 :
59 224 : TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
60 : Node* shared_function_info = Parameter(Descriptor::kSharedFunctionInfo);
61 : Node* feedback_cell = Parameter(Descriptor::kFeedbackCell);
62 : Node* context = Parameter(Descriptor::kContext);
63 :
64 : CSA_ASSERT(this, IsFeedbackCell(feedback_cell));
65 : CSA_ASSERT(this, IsSharedFunctionInfo(shared_function_info));
66 :
67 112 : IncrementCounter(isolate()->counters()->fast_new_closure_total(), 1);
68 :
69 : // Bump the closure counter encoded the {feedback_cell}s map.
70 : {
71 112 : Node* const feedback_cell_map = LoadMap(feedback_cell);
72 56 : Label no_closures(this), one_closure(this), cell_done(this);
73 :
74 112 : GotoIf(IsNoClosuresCellMap(feedback_cell_map), &no_closures);
75 112 : GotoIf(IsOneClosureCellMap(feedback_cell_map), &one_closure);
76 : CSA_ASSERT(this, IsManyClosuresCellMap(feedback_cell_map),
77 : feedback_cell_map, feedback_cell);
78 56 : Goto(&cell_done);
79 :
80 56 : BIND(&no_closures);
81 56 : StoreMapNoWriteBarrier(feedback_cell, RootIndex::kOneClosureCellMap);
82 56 : Goto(&cell_done);
83 :
84 56 : BIND(&one_closure);
85 56 : StoreMapNoWriteBarrier(feedback_cell, RootIndex::kManyClosuresCellMap);
86 56 : Goto(&cell_done);
87 :
88 56 : BIND(&cell_done);
89 : }
90 :
91 : // The calculation of |function_map_index| must be in sync with
92 : // SharedFunctionInfo::function_map_index().
93 : Node* const flags =
94 56 : LoadObjectField(shared_function_info, SharedFunctionInfo::kFlagsOffset,
95 56 : MachineType::Uint32());
96 112 : Node* const function_map_index = IntPtrAdd(
97 112 : DecodeWordFromWord32<SharedFunctionInfo::FunctionMapIndexBits>(flags),
98 168 : IntPtrConstant(Context::FIRST_FUNCTION_MAP_INDEX));
99 : CSA_ASSERT(this, UintPtrLessThanOrEqual(
100 : function_map_index,
101 : IntPtrConstant(Context::LAST_FUNCTION_MAP_INDEX)));
102 :
103 : // Get the function map in the current native context and set that
104 : // as the map of the allocated object.
105 112 : Node* const native_context = LoadNativeContext(context);
106 : Node* const function_map =
107 112 : LoadContextElement(native_context, function_map_index);
108 :
109 : // Create a new closure from the given function info in new space
110 : TNode<IntPtrT> instance_size_in_bytes =
111 112 : TimesTaggedSize(LoadMapInstanceSizeInWords(function_map));
112 112 : TNode<Object> result = Allocate(instance_size_in_bytes);
113 56 : StoreMapNoWriteBarrier(result, function_map);
114 : InitializeJSObjectBodyNoSlackTracking(result, function_map,
115 : instance_size_in_bytes,
116 56 : JSFunction::kSizeWithoutPrototype);
117 :
118 : // Initialize the rest of the function.
119 : StoreObjectFieldRoot(result, JSObject::kPropertiesOrHashOffset,
120 56 : RootIndex::kEmptyFixedArray);
121 : StoreObjectFieldRoot(result, JSObject::kElementsOffset,
122 56 : RootIndex::kEmptyFixedArray);
123 : {
124 : // Set function prototype if necessary.
125 56 : Label done(this), init_prototype(this);
126 112 : Branch(IsFunctionWithPrototypeSlotMap(function_map), &init_prototype,
127 56 : &done);
128 :
129 56 : BIND(&init_prototype);
130 : StoreObjectFieldRoot(result, JSFunction::kPrototypeOrInitialMapOffset,
131 56 : RootIndex::kTheHoleValue);
132 56 : Goto(&done);
133 56 : BIND(&done);
134 : }
135 :
136 : STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
137 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kFeedbackCellOffset,
138 56 : feedback_cell);
139 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
140 56 : shared_function_info);
141 56 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
142 : Handle<Code> lazy_builtin_handle =
143 112 : isolate()->builtins()->builtin_handle(Builtins::kCompileLazy);
144 : Node* lazy_builtin = HeapConstant(lazy_builtin_handle);
145 56 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeOffset, lazy_builtin);
146 56 : Return(result);
147 56 : }
148 :
149 224 : TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) {
150 : Node* context = Parameter(Descriptor::kContext);
151 : Node* target = Parameter(Descriptor::kTarget);
152 : Node* new_target = Parameter(Descriptor::kNewTarget);
153 :
154 56 : Label call_runtime(this);
155 :
156 56 : Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
157 56 : Return(result);
158 :
159 56 : BIND(&call_runtime);
160 56 : TailCallRuntime(Runtime::kNewObject, context, target, new_target);
161 56 : }
162 :
163 392 : Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
164 : Node* target,
165 : Node* new_target) {
166 784 : VARIABLE(var_obj, MachineRepresentation::kTagged);
167 392 : Label call_runtime(this), end(this);
168 :
169 392 : Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
170 392 : var_obj.Bind(result);
171 392 : Goto(&end);
172 :
173 392 : BIND(&call_runtime);
174 392 : var_obj.Bind(CallRuntime(Runtime::kNewObject, context, target, new_target));
175 392 : Goto(&end);
176 :
177 392 : BIND(&end);
178 784 : return var_obj.value();
179 : }
180 :
181 448 : Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
182 : Node* target,
183 : Node* new_target,
184 : Label* call_runtime) {
185 : CSA_ASSERT(this, HasInstanceType(target, JS_FUNCTION_TYPE));
186 : CSA_ASSERT(this, IsJSReceiver(new_target));
187 :
188 : // Verify that the new target is a JSFunction.
189 896 : Label fast(this), end(this);
190 896 : GotoIf(HasInstanceType(new_target, JS_FUNCTION_TYPE), &fast);
191 448 : Goto(call_runtime);
192 :
193 448 : BIND(&fast);
194 :
195 : // Load the initial map and verify that it's in fact a map.
196 : Node* initial_map =
197 : LoadObjectField(new_target, JSFunction::kPrototypeOrInitialMapOffset);
198 896 : GotoIf(TaggedIsSmi(initial_map), call_runtime);
199 896 : GotoIf(DoesntHaveInstanceType(initial_map, MAP_TYPE), call_runtime);
200 :
201 : // Fall back to runtime if the target differs from the new target's
202 : // initial map constructor.
203 : Node* new_target_constructor =
204 : LoadObjectField(initial_map, Map::kConstructorOrBackPointerOffset);
205 896 : GotoIf(WordNotEqual(target, new_target_constructor), call_runtime);
206 :
207 896 : VARIABLE(properties, MachineRepresentation::kTagged);
208 :
209 448 : Label instantiate_map(this), allocate_properties(this);
210 896 : GotoIf(IsDictionaryMap(initial_map), &allocate_properties);
211 : {
212 896 : properties.Bind(EmptyFixedArrayConstant());
213 448 : Goto(&instantiate_map);
214 : }
215 448 : BIND(&allocate_properties);
216 : {
217 896 : properties.Bind(AllocateNameDictionary(NameDictionary::kInitialCapacity));
218 448 : Goto(&instantiate_map);
219 : }
220 :
221 448 : BIND(&instantiate_map);
222 896 : return AllocateJSObjectFromMap(initial_map, properties.value(), nullptr,
223 896 : kNone, kWithSlackTracking);
224 : }
225 :
226 448 : Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
227 : Node* scope_info, Node* slots_uint32, Node* context, ScopeType scope_type) {
228 896 : TNode<IntPtrT> slots = Signed(ChangeUint32ToWord(slots_uint32));
229 : TNode<IntPtrT> size = ElementOffsetFromIndex(
230 448 : slots, PACKED_ELEMENTS, INTPTR_PARAMETERS, Context::kTodoHeaderSize);
231 :
232 : // Create a new closure from the given function info in new space
233 : TNode<Context> function_context =
234 448 : UncheckedCast<Context>(AllocateInNewSpace(size));
235 :
236 : RootIndex context_type;
237 448 : switch (scope_type) {
238 : case EVAL_SCOPE:
239 : context_type = RootIndex::kEvalContextMap;
240 : break;
241 : case FUNCTION_SCOPE:
242 : context_type = RootIndex::kFunctionContextMap;
243 224 : break;
244 : default:
245 0 : UNREACHABLE();
246 : }
247 : // Set up the header.
248 448 : StoreMapNoWriteBarrier(function_context, context_type);
249 448 : TNode<IntPtrT> min_context_slots = IntPtrConstant(Context::MIN_CONTEXT_SLOTS);
250 : // TODO(ishell): for now, length also includes MIN_CONTEXT_SLOTS.
251 : TNode<IntPtrT> length = IntPtrAdd(slots, min_context_slots);
252 896 : StoreObjectFieldNoWriteBarrier(function_context, Context::kLengthOffset,
253 : SmiTag(length));
254 : StoreObjectFieldNoWriteBarrier(function_context, Context::kScopeInfoOffset,
255 448 : scope_info);
256 : StoreObjectFieldNoWriteBarrier(function_context, Context::kPreviousOffset,
257 448 : context);
258 448 : StoreObjectFieldNoWriteBarrier(function_context, Context::kExtensionOffset,
259 : TheHoleConstant());
260 448 : TNode<Context> native_context = LoadNativeContext(context);
261 : StoreObjectFieldNoWriteBarrier(function_context,
262 : Context::kNativeContextOffset, native_context);
263 :
264 : // Initialize the varrest of the slots to undefined.
265 896 : TNode<HeapObject> undefined = UndefinedConstant();
266 448 : TNode<IntPtrT> start_offset = IntPtrConstant(Context::kTodoHeaderSize);
267 448 : CodeStubAssembler::VariableList vars(0, zone());
268 448 : BuildFastLoop(
269 : vars, start_offset, size,
270 : [=](Node* offset) {
271 448 : StoreObjectFieldNoWriteBarrier(
272 448 : function_context, UncheckedCast<IntPtrT>(offset), undefined);
273 : },
274 448 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
275 448 : return function_context;
276 : }
277 :
278 224 : TF_BUILTIN(FastNewFunctionContextEval, ConstructorBuiltinsAssembler) {
279 : Node* scope_info = Parameter(Descriptor::kScopeInfo);
280 : Node* slots = Parameter(Descriptor::kSlots);
281 : Node* context = Parameter(Descriptor::kContext);
282 112 : Return(EmitFastNewFunctionContext(scope_info, slots, context,
283 56 : ScopeType::EVAL_SCOPE));
284 56 : }
285 :
286 224 : TF_BUILTIN(FastNewFunctionContextFunction, ConstructorBuiltinsAssembler) {
287 : Node* scope_info = Parameter(Descriptor::kScopeInfo);
288 : Node* slots = Parameter(Descriptor::kSlots);
289 : Node* context = Parameter(Descriptor::kContext);
290 112 : Return(EmitFastNewFunctionContext(scope_info, slots, context,
291 56 : ScopeType::FUNCTION_SCOPE));
292 56 : }
293 :
294 224 : Node* ConstructorBuiltinsAssembler::EmitCreateRegExpLiteral(
295 : Node* feedback_vector, Node* slot, Node* pattern, Node* flags,
296 : Node* context) {
297 448 : Label call_runtime(this, Label::kDeferred), end(this);
298 :
299 448 : GotoIf(IsUndefined(feedback_vector), &call_runtime);
300 :
301 448 : VARIABLE(result, MachineRepresentation::kTagged);
302 : TNode<Object> literal_site =
303 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
304 448 : GotoIf(NotHasBoilerplate(literal_site), &call_runtime);
305 : {
306 : Node* boilerplate = literal_site;
307 : CSA_ASSERT(this, IsJSRegExp(boilerplate));
308 : int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kTaggedSize;
309 448 : Node* copy = Allocate(size);
310 3360 : for (int offset = 0; offset < size; offset += kTaggedSize) {
311 : Node* value = LoadObjectField(boilerplate, offset);
312 1568 : StoreObjectFieldNoWriteBarrier(copy, offset, value);
313 : }
314 224 : result.Bind(copy);
315 224 : Goto(&end);
316 : }
317 :
318 224 : BIND(&call_runtime);
319 : {
320 : result.Bind(CallRuntime(Runtime::kCreateRegExpLiteral, context,
321 448 : feedback_vector, SmiTag(slot), pattern, flags));
322 224 : Goto(&end);
323 : }
324 :
325 224 : BIND(&end);
326 448 : return result.value();
327 : }
328 :
329 280 : TF_BUILTIN(CreateRegExpLiteral, ConstructorBuiltinsAssembler) {
330 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
331 112 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
332 : Node* pattern = Parameter(Descriptor::kPattern);
333 : Node* flags = Parameter(Descriptor::kFlags);
334 : Node* context = Parameter(Descriptor::kContext);
335 : Node* result =
336 56 : EmitCreateRegExpLiteral(feedback_vector, slot, pattern, flags, context);
337 56 : Return(result);
338 56 : }
339 :
340 224 : Node* ConstructorBuiltinsAssembler::EmitCreateShallowArrayLiteral(
341 : Node* feedback_vector, Node* slot, Node* context, Label* call_runtime,
342 : AllocationSiteMode allocation_site_mode) {
343 448 : Label zero_capacity(this), cow_elements(this), fast_elements(this),
344 224 : return_result(this);
345 448 : VARIABLE(result, MachineRepresentation::kTagged);
346 :
347 : TNode<Object> maybe_allocation_site =
348 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
349 448 : GotoIf(NotHasBoilerplate(maybe_allocation_site), call_runtime);
350 :
351 : TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
352 224 : TNode<JSArray> boilerplate = CAST(LoadBoilerplate(allocation_site));
353 :
354 : ParameterMode mode = OptimalParameterMode();
355 224 : if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
356 168 : return CloneFastJSArray(context, boilerplate, mode, allocation_site);
357 : } else {
358 56 : return CloneFastJSArray(context, boilerplate, mode);
359 : }
360 : }
361 :
362 280 : TF_BUILTIN(CreateShallowArrayLiteral, ConstructorBuiltinsAssembler) {
363 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
364 112 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
365 : Node* constant_elements = Parameter(Descriptor::kConstantElements);
366 : Node* context = Parameter(Descriptor::kContext);
367 56 : Label call_runtime(this, Label::kDeferred);
368 112 : Return(EmitCreateShallowArrayLiteral(feedback_vector, slot, context,
369 : &call_runtime,
370 56 : DONT_TRACK_ALLOCATION_SITE));
371 :
372 56 : BIND(&call_runtime);
373 : {
374 56 : Comment("call runtime");
375 : int const flags =
376 : AggregateLiteral::kDisableMementos | AggregateLiteral::kIsShallow;
377 56 : Return(CallRuntime(Runtime::kCreateArrayLiteral, context, feedback_vector,
378 224 : SmiTag(slot), constant_elements, SmiConstant(flags)));
379 : }
380 56 : }
381 :
382 224 : Node* ConstructorBuiltinsAssembler::EmitCreateEmptyArrayLiteral(
383 : Node* feedback_vector, Node* slot, Node* context) {
384 : // Array literals always have a valid AllocationSite to properly track
385 : // elements transitions.
386 : TNode<Object> maybe_allocation_site =
387 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
388 224 : TVARIABLE(AllocationSite, allocation_site);
389 :
390 224 : Label create_empty_array(this),
391 224 : initialize_allocation_site(this, Label::kDeferred), done(this);
392 448 : GotoIf(TaggedIsSmi(maybe_allocation_site), &initialize_allocation_site);
393 : {
394 : allocation_site = CAST(maybe_allocation_site);
395 224 : Goto(&create_empty_array);
396 : }
397 : // TODO(cbruni): create the AllocationSite in CSA.
398 224 : BIND(&initialize_allocation_site);
399 : {
400 672 : allocation_site =
401 : CreateAllocationSiteInFeedbackVector(feedback_vector, SmiTag(slot));
402 224 : Goto(&create_empty_array);
403 : }
404 :
405 224 : BIND(&create_empty_array);
406 224 : TNode<Int32T> kind = LoadElementsKind(allocation_site.value());
407 224 : TNode<Context> native_context = LoadNativeContext(context);
408 224 : Comment("LoadJSArrayElementsMap");
409 224 : TNode<Map> array_map = LoadJSArrayElementsMap(kind, native_context);
410 224 : TNode<Smi> zero = SmiConstant(0);
411 224 : Comment("Allocate JSArray");
412 : TNode<JSArray> result =
413 : AllocateJSArray(GetInitialFastElementsKind(), array_map, zero, zero,
414 224 : allocation_site.value(), ParameterMode::SMI_PARAMETERS);
415 :
416 224 : Goto(&done);
417 224 : BIND(&done);
418 :
419 224 : return result;
420 : }
421 :
422 280 : TF_BUILTIN(CreateEmptyArrayLiteral, ConstructorBuiltinsAssembler) {
423 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
424 112 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
425 : Node* context = Parameter(Descriptor::kContext);
426 56 : Node* result = EmitCreateEmptyArrayLiteral(feedback_vector, slot, context);
427 56 : Return(result);
428 56 : }
429 :
430 224 : Node* ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral(
431 : Node* feedback_vector, Node* slot, Label* call_runtime) {
432 : TNode<Object> maybe_allocation_site =
433 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
434 448 : GotoIf(NotHasBoilerplate(maybe_allocation_site), call_runtime);
435 :
436 : TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
437 224 : TNode<JSObject> boilerplate = LoadBoilerplate(allocation_site);
438 224 : TNode<Map> boilerplate_map = LoadMap(boilerplate);
439 : CSA_ASSERT(this, IsJSObjectMap(boilerplate_map));
440 :
441 448 : VARIABLE(var_properties, MachineRepresentation::kTagged);
442 : {
443 448 : Node* bit_field_3 = LoadMapBitField3(boilerplate_map);
444 224 : GotoIf(IsSetWord32<Map::IsDeprecatedBit>(bit_field_3), call_runtime);
445 : // Directly copy over the property store for dict-mode boilerplates.
446 224 : Label if_dictionary(this), if_fast(this), done(this);
447 224 : Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field_3), &if_dictionary,
448 224 : &if_fast);
449 224 : BIND(&if_dictionary);
450 : {
451 224 : Comment("Copy dictionary properties");
452 448 : var_properties.Bind(CopyNameDictionary(
453 672 : CAST(LoadSlowProperties(boilerplate)), call_runtime));
454 : // Slow objects have no in-object properties.
455 224 : Goto(&done);
456 : }
457 224 : BIND(&if_fast);
458 : {
459 : // TODO(cbruni): support copying out-of-object properties.
460 448 : Node* boilerplate_properties = LoadFastProperties(boilerplate);
461 448 : GotoIfNot(IsEmptyFixedArray(boilerplate_properties), call_runtime);
462 448 : var_properties.Bind(EmptyFixedArrayConstant());
463 224 : Goto(&done);
464 : }
465 224 : BIND(&done);
466 : }
467 :
468 448 : VARIABLE(var_elements, MachineRepresentation::kTagged);
469 : {
470 : // Copy the elements backing store, assuming that it's flat.
471 224 : Label if_empty_fixed_array(this), if_copy_elements(this), done(this);
472 : Node* boilerplate_elements = LoadElements(boilerplate);
473 448 : Branch(IsEmptyFixedArray(boilerplate_elements), &if_empty_fixed_array,
474 224 : &if_copy_elements);
475 :
476 224 : BIND(&if_empty_fixed_array);
477 224 : var_elements.Bind(boilerplate_elements);
478 224 : Goto(&done);
479 :
480 224 : BIND(&if_copy_elements);
481 : CSA_ASSERT(this, Word32BinaryNot(
482 : IsFixedCOWArrayMap(LoadMap(boilerplate_elements))));
483 : ExtractFixedArrayFlags flags;
484 : flags |= ExtractFixedArrayFlag::kAllFixedArrays;
485 : flags |= ExtractFixedArrayFlag::kNewSpaceAllocationOnly;
486 : flags |= ExtractFixedArrayFlag::kDontCopyCOW;
487 224 : var_elements.Bind(CloneFixedArray(boilerplate_elements, flags));
488 224 : Goto(&done);
489 224 : BIND(&done);
490 : }
491 :
492 : // Ensure new-space allocation for a fresh JSObject so we can skip write
493 : // barriers when copying all object fields.
494 : STATIC_ASSERT(JSObject::kMaxInstanceSize < kMaxRegularHeapObjectSize);
495 : TNode<IntPtrT> instance_size =
496 448 : TimesTaggedSize(LoadMapInstanceSizeInWords(boilerplate_map));
497 : TNode<IntPtrT> allocation_size = instance_size;
498 224 : bool needs_allocation_memento = FLAG_allocation_site_pretenuring;
499 224 : if (needs_allocation_memento) {
500 : // Prepare for inner-allocating the AllocationMemento.
501 224 : allocation_size =
502 : IntPtrAdd(instance_size, IntPtrConstant(AllocationMemento::kSize));
503 : }
504 :
505 : TNode<HeapObject> copy =
506 224 : UncheckedCast<HeapObject>(AllocateInNewSpace(allocation_size));
507 : {
508 224 : Comment("Initialize Literal Copy");
509 : // Initialize Object fields.
510 224 : StoreMapNoWriteBarrier(copy, boilerplate_map);
511 224 : StoreObjectFieldNoWriteBarrier(copy, JSObject::kPropertiesOrHashOffset,
512 224 : var_properties.value());
513 224 : StoreObjectFieldNoWriteBarrier(copy, JSObject::kElementsOffset,
514 224 : var_elements.value());
515 : }
516 :
517 : // Initialize the AllocationMemento before potential GCs due to heap number
518 : // allocation when copying the in-object properties.
519 224 : if (needs_allocation_memento) {
520 224 : InitializeAllocationMemento(copy, instance_size, allocation_site);
521 : }
522 :
523 : {
524 : // Copy over in-object properties.
525 : Label continue_with_write_barrier(this), done_init(this);
526 224 : TVARIABLE(IntPtrT, offset, IntPtrConstant(JSObject::kHeaderSize));
527 : // Mutable heap numbers only occur on 32-bit platforms.
528 : bool may_use_mutable_heap_numbers = !FLAG_unbox_double_fields;
529 : {
530 224 : Comment("Copy in-object properties fast");
531 224 : Label continue_fast(this, &offset);
532 448 : Branch(WordEqual(offset.value(), instance_size), &done_init,
533 224 : &continue_fast);
534 224 : BIND(&continue_fast);
535 : if (may_use_mutable_heap_numbers) {
536 : TNode<Object> field = LoadObjectField(boilerplate, offset.value());
537 : Label store_field(this);
538 : GotoIf(TaggedIsSmi(field), &store_field);
539 : GotoIf(IsMutableHeapNumber(CAST(field)), &continue_with_write_barrier);
540 : Goto(&store_field);
541 : BIND(&store_field);
542 : StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
543 : } else {
544 : // Copy fields as raw data.
545 : TNode<IntPtrT> field =
546 : LoadObjectField<IntPtrT>(boilerplate, offset.value());
547 : StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
548 : }
549 224 : offset = IntPtrAdd(offset.value(), IntPtrConstant(kTaggedSize));
550 448 : Branch(WordNotEqual(offset.value(), instance_size), &continue_fast,
551 224 : &done_init);
552 : }
553 :
554 : if (!may_use_mutable_heap_numbers) {
555 224 : BIND(&done_init);
556 : return copy;
557 : }
558 : // Continue initializing the literal after seeing the first sub-object
559 : // potentially causing allocation. In this case we prepare the new literal
560 : // by copying all pending fields over from the boilerplate and emit full
561 : // write barriers from here on.
562 : BIND(&continue_with_write_barrier);
563 : {
564 : Comment("Copy in-object properties slow");
565 : BuildFastLoop(
566 : offset.value(), instance_size,
567 : [=](Node* offset) {
568 : // TODO(ishell): value decompression is not necessary here.
569 : Node* field = LoadObjectField(boilerplate, offset);
570 : StoreObjectFieldNoWriteBarrier(copy, offset, field);
571 : },
572 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
573 : Comment("Copy mutable HeapNumber values");
574 : BuildFastLoop(
575 : offset.value(), instance_size,
576 : [=](Node* offset) {
577 : Node* field = LoadObjectField(copy, offset);
578 : Label copy_mutable_heap_number(this, Label::kDeferred),
579 : continue_loop(this);
580 : // We only have to clone complex field values.
581 : GotoIf(TaggedIsSmi(field), &continue_loop);
582 : Branch(IsMutableHeapNumber(field), ©_mutable_heap_number,
583 : &continue_loop);
584 : BIND(©_mutable_heap_number);
585 : {
586 : Node* double_value = LoadHeapNumberValue(field);
587 : Node* mutable_heap_number =
588 : AllocateMutableHeapNumberWithValue(double_value);
589 : StoreObjectField(copy, offset, mutable_heap_number);
590 : Goto(&continue_loop);
591 : }
592 : BIND(&continue_loop);
593 : },
594 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
595 : Goto(&done_init);
596 : }
597 : BIND(&done_init);
598 : }
599 : return copy;
600 : }
601 :
602 336 : TF_BUILTIN(CreateShallowObjectLiteral, ConstructorBuiltinsAssembler) {
603 112 : Label call_runtime(this);
604 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
605 112 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
606 : Node* copy =
607 56 : EmitCreateShallowObjectLiteral(feedback_vector, slot, &call_runtime);
608 56 : Return(copy);
609 :
610 56 : BIND(&call_runtime);
611 : Node* object_boilerplate_description =
612 : Parameter(Descriptor::kObjectBoilerplateDescription);
613 : Node* flags = Parameter(Descriptor::kFlags);
614 : Node* context = Parameter(Descriptor::kContext);
615 112 : TailCallRuntime(Runtime::kCreateObjectLiteral, context, feedback_vector,
616 56 : SmiTag(slot), object_boilerplate_description, flags);
617 56 : }
618 :
619 : // Used by the CreateEmptyObjectLiteral bytecode and the Object constructor.
620 112 : Node* ConstructorBuiltinsAssembler::EmitCreateEmptyObjectLiteral(
621 : Node* context) {
622 224 : Node* native_context = LoadNativeContext(context);
623 : Node* object_function =
624 224 : LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX);
625 : Node* map = LoadObjectField(object_function,
626 : JSFunction::kPrototypeOrInitialMapOffset);
627 : CSA_ASSERT(this, IsMap(map));
628 : // Ensure that slack tracking is disabled for the map.
629 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
630 : CSA_ASSERT(
631 : this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
632 224 : Node* empty_fixed_array = EmptyFixedArrayConstant();
633 : Node* result =
634 112 : AllocateJSObjectFromMap(map, empty_fixed_array, empty_fixed_array);
635 112 : return result;
636 : }
637 :
638 : // ES #sec-object-constructor
639 336 : TF_BUILTIN(ObjectConstructor, ConstructorBuiltinsAssembler) {
640 : int const kValueArg = 0;
641 : Node* argc =
642 112 : ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
643 56 : CodeStubArguments args(this, argc);
644 : Node* context = Parameter(Descriptor::kContext);
645 : Node* new_target = Parameter(Descriptor::kJSNewTarget);
646 :
647 112 : VARIABLE(var_result, MachineRepresentation::kTagged);
648 56 : Label if_subclass(this, Label::kDeferred), if_notsubclass(this),
649 56 : return_result(this);
650 112 : GotoIf(IsUndefined(new_target), &if_notsubclass);
651 : TNode<JSFunction> target = CAST(Parameter(Descriptor::kJSTarget));
652 56 : Branch(WordEqual(new_target, target), &if_notsubclass, &if_subclass);
653 :
654 56 : BIND(&if_subclass);
655 : {
656 : Node* result =
657 112 : CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
658 56 : var_result.Bind(result);
659 56 : Goto(&return_result);
660 : }
661 :
662 56 : BIND(&if_notsubclass);
663 : {
664 56 : Label if_newobject(this, Label::kDeferred), if_toobject(this);
665 :
666 112 : Node* value_index = IntPtrConstant(kValueArg);
667 112 : GotoIf(UintPtrGreaterThanOrEqual(value_index, argc), &if_newobject);
668 112 : Node* value = args.AtIndex(value_index);
669 112 : GotoIf(IsNull(value), &if_newobject);
670 112 : Branch(IsUndefined(value), &if_newobject, &if_toobject);
671 :
672 56 : BIND(&if_newobject);
673 : {
674 56 : Node* result = EmitCreateEmptyObjectLiteral(context);
675 56 : var_result.Bind(result);
676 56 : Goto(&return_result);
677 : }
678 :
679 56 : BIND(&if_toobject);
680 : {
681 112 : Node* result = CallBuiltin(Builtins::kToObject, context, value);
682 56 : var_result.Bind(result);
683 56 : Goto(&return_result);
684 : }
685 : }
686 :
687 56 : BIND(&return_result);
688 56 : args.PopAndReturn(var_result.value());
689 56 : }
690 :
691 : // ES #sec-number-constructor
692 280 : TF_BUILTIN(NumberConstructor, ConstructorBuiltinsAssembler) {
693 : Node* context = Parameter(Descriptor::kContext);
694 : Node* argc =
695 112 : ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
696 56 : CodeStubArguments args(this, argc);
697 :
698 : // 1. If no arguments were passed to this function invocation, let n be +0.
699 168 : VARIABLE(var_n, MachineRepresentation::kTagged, SmiConstant(0));
700 56 : Label if_nloaded(this, &var_n);
701 168 : GotoIf(WordEqual(argc, IntPtrConstant(0)), &if_nloaded);
702 :
703 : // 2. Else,
704 : // a. Let prim be ? ToNumeric(value).
705 : // b. If Type(prim) is BigInt, let n be the Number value for prim.
706 : // c. Otherwise, let n be prim.
707 112 : Node* value = args.AtIndex(0);
708 112 : var_n.Bind(ToNumber(context, value, BigIntHandling::kConvertToNumber));
709 56 : Goto(&if_nloaded);
710 :
711 56 : BIND(&if_nloaded);
712 : {
713 : // 3. If NewTarget is undefined, return n.
714 56 : Node* n_value = var_n.value();
715 : Node* new_target = Parameter(Descriptor::kJSNewTarget);
716 56 : Label return_n(this), constructnumber(this, Label::kDeferred);
717 112 : Branch(IsUndefined(new_target), &return_n, &constructnumber);
718 :
719 56 : BIND(&return_n);
720 56 : { args.PopAndReturn(n_value); }
721 :
722 56 : BIND(&constructnumber);
723 : {
724 : // 4. Let O be ? OrdinaryCreateFromConstructor(NewTarget,
725 : // "%NumberPrototype%", « [[NumberData]] »).
726 : // 5. Set O.[[NumberData]] to n.
727 : // 6. Return O.
728 :
729 : // We are not using Parameter(Descriptor::kJSTarget) and loading the value
730 : // from the current frame here in order to reduce register pressure on the
731 : // fast path.
732 56 : TNode<JSFunction> target = LoadTargetFromFrame();
733 : Node* result =
734 112 : CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
735 56 : StoreObjectField(result, JSValue::kValueOffset, n_value);
736 56 : args.PopAndReturn(result);
737 : }
738 : }
739 56 : }
740 :
741 224 : TF_BUILTIN(GenericConstructorLazyDeoptContinuation,
742 : ConstructorBuiltinsAssembler) {
743 : Node* result = Parameter(Descriptor::kResult);
744 56 : Return(result);
745 56 : }
746 :
747 : // https://tc39.github.io/ecma262/#sec-string-constructor
748 280 : TF_BUILTIN(StringConstructor, ConstructorBuiltinsAssembler) {
749 : Node* context = Parameter(Descriptor::kContext);
750 : Node* argc =
751 112 : ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
752 56 : CodeStubArguments args(this, argc);
753 :
754 : TNode<Object> new_target = CAST(Parameter(Descriptor::kJSNewTarget));
755 :
756 : // 1. If no arguments were passed to this function invocation, let s be "".
757 168 : VARIABLE(var_s, MachineRepresentation::kTagged, EmptyStringConstant());
758 56 : Label if_sloaded(this, &var_s);
759 168 : GotoIf(WordEqual(argc, IntPtrConstant(0)), &if_sloaded);
760 :
761 : // 2. Else,
762 : // a. If NewTarget is undefined [...]
763 112 : Node* value = args.AtIndex(0);
764 56 : Label if_tostring(this, &var_s);
765 112 : GotoIfNot(IsUndefined(new_target), &if_tostring);
766 :
767 : // 2a. [...] and Type(value) is Symbol, return SymbolDescriptiveString(value).
768 112 : GotoIf(TaggedIsSmi(value), &if_tostring);
769 112 : GotoIfNot(IsSymbol(value), &if_tostring);
770 : {
771 : Node* result =
772 : CallRuntime(Runtime::kSymbolDescriptiveString, context, value);
773 56 : args.PopAndReturn(result);
774 : }
775 :
776 : // 2b. Let s be ? ToString(value).
777 56 : BIND(&if_tostring);
778 : {
779 112 : var_s.Bind(CallBuiltin(Builtins::kToString, context, value));
780 56 : Goto(&if_sloaded);
781 : }
782 :
783 : // 3. If NewTarget is undefined, return s.
784 56 : BIND(&if_sloaded);
785 : {
786 56 : Node* s_value = var_s.value();
787 56 : Label return_s(this), constructstring(this, Label::kDeferred);
788 112 : Branch(IsUndefined(new_target), &return_s, &constructstring);
789 :
790 56 : BIND(&return_s);
791 56 : { args.PopAndReturn(s_value); }
792 :
793 56 : BIND(&constructstring);
794 : {
795 : // We are not using Parameter(Descriptor::kJSTarget) and loading the value
796 : // from the current frame here in order to reduce register pressure on the
797 : // fast path.
798 56 : TNode<JSFunction> target = LoadTargetFromFrame();
799 :
800 : Node* result =
801 112 : CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
802 56 : StoreObjectField(result, JSValue::kValueOffset, s_value);
803 56 : args.PopAndReturn(result);
804 : }
805 : }
806 56 : }
807 :
808 : } // namespace internal
809 59480 : } // namespace v8
|