Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/builtins/builtins-constructor-gen.h"
6 :
7 : #include "src/ast/ast.h"
8 : #include "src/builtins/builtins-call-gen.h"
9 : #include "src/builtins/builtins-constructor.h"
10 : #include "src/builtins/builtins-utils-gen.h"
11 : #include "src/builtins/builtins.h"
12 : #include "src/code-factory.h"
13 : #include "src/code-stub-assembler.h"
14 : #include "src/counters.h"
15 : #include "src/interface-descriptors.h"
16 : #include "src/macro-assembler.h"
17 : #include "src/objects-inl.h"
18 :
19 : namespace v8 {
20 : namespace internal {
21 :
22 56 : void Builtins::Generate_ConstructVarargs(MacroAssembler* masm) {
23 : Generate_CallOrConstructVarargs(masm,
24 56 : BUILTIN_CODE(masm->isolate(), Construct));
25 56 : }
26 :
27 56 : void Builtins::Generate_ConstructForwardVarargs(MacroAssembler* masm) {
28 : Generate_CallOrConstructForwardVarargs(
29 : masm, CallOrConstructMode::kConstruct,
30 56 : BUILTIN_CODE(masm->isolate(), Construct));
31 56 : }
32 :
33 56 : void Builtins::Generate_ConstructFunctionForwardVarargs(MacroAssembler* masm) {
34 : Generate_CallOrConstructForwardVarargs(
35 : masm, CallOrConstructMode::kConstruct,
36 56 : BUILTIN_CODE(masm->isolate(), ConstructFunction));
37 56 : }
38 :
39 448 : TF_BUILTIN(ConstructWithArrayLike, CallOrConstructBuiltinsAssembler) {
40 56 : TNode<Object> target = CAST(Parameter(Descriptor::kTarget));
41 56 : SloppyTNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
42 56 : TNode<Object> arguments_list = CAST(Parameter(Descriptor::kArgumentsList));
43 56 : TNode<Context> context = CAST(Parameter(Descriptor::kContext));
44 56 : CallOrConstructWithArrayLike(target, new_target, arguments_list, context);
45 56 : }
46 :
47 504 : TF_BUILTIN(ConstructWithSpread, CallOrConstructBuiltinsAssembler) {
48 56 : TNode<Object> target = CAST(Parameter(Descriptor::kTarget));
49 56 : SloppyTNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
50 56 : TNode<Object> spread = CAST(Parameter(Descriptor::kSpread));
51 : TNode<Int32T> args_count =
52 56 : UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
53 56 : TNode<Context> context = CAST(Parameter(Descriptor::kContext));
54 56 : CallOrConstructWithSpread(target, new_target, spread, args_count, context);
55 56 : }
56 :
57 : typedef compiler::Node Node;
58 :
59 392 : TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
60 56 : Node* shared_function_info = Parameter(Descriptor::kSharedFunctionInfo);
61 56 : Node* feedback_cell = Parameter(Descriptor::kFeedbackCell);
62 56 : Node* context = Parameter(Descriptor::kContext);
63 :
64 : CSA_ASSERT(this, IsFeedbackCell(feedback_cell));
65 : CSA_ASSERT(this, IsSharedFunctionInfo(shared_function_info));
66 :
67 56 : IncrementCounter(isolate()->counters()->fast_new_closure_total(), 1);
68 :
69 : // Bump the closure counter encoded the {feedback_cell}s map.
70 : {
71 56 : Node* const feedback_cell_map = LoadMap(feedback_cell);
72 112 : Label no_closures(this), one_closure(this), cell_done(this);
73 :
74 56 : GotoIf(IsNoFeedbackCellMap(feedback_cell_map), &cell_done);
75 56 : GotoIf(IsNoClosuresCellMap(feedback_cell_map), &no_closures);
76 56 : GotoIf(IsOneClosureCellMap(feedback_cell_map), &one_closure);
77 : CSA_ASSERT(this, IsManyClosuresCellMap(feedback_cell_map),
78 : feedback_cell_map, feedback_cell);
79 56 : Goto(&cell_done);
80 :
81 56 : BIND(&no_closures);
82 56 : StoreMapNoWriteBarrier(feedback_cell, RootIndex::kOneClosureCellMap);
83 56 : Goto(&cell_done);
84 :
85 56 : BIND(&one_closure);
86 56 : StoreMapNoWriteBarrier(feedback_cell, RootIndex::kManyClosuresCellMap);
87 56 : Goto(&cell_done);
88 :
89 112 : BIND(&cell_done);
90 : }
91 :
92 : // The calculation of |function_map_index| must be in sync with
93 : // SharedFunctionInfo::function_map_index().
94 : Node* const flags =
95 : LoadObjectField(shared_function_info, SharedFunctionInfo::kFlagsOffset,
96 56 : MachineType::Uint32());
97 : Node* const function_map_index = IntPtrAdd(
98 112 : DecodeWordFromWord32<SharedFunctionInfo::FunctionMapIndexBits>(flags),
99 168 : IntPtrConstant(Context::FIRST_FUNCTION_MAP_INDEX));
100 : CSA_ASSERT(this, UintPtrLessThanOrEqual(
101 : function_map_index,
102 : IntPtrConstant(Context::LAST_FUNCTION_MAP_INDEX)));
103 :
104 : // Get the function map in the current native context and set that
105 : // as the map of the allocated object.
106 56 : Node* const native_context = LoadNativeContext(context);
107 : Node* const function_map =
108 56 : LoadContextElement(native_context, function_map_index);
109 :
110 : // Create a new closure from the given function info in new space
111 : TNode<IntPtrT> instance_size_in_bytes =
112 56 : TimesTaggedSize(LoadMapInstanceSizeInWords(function_map));
113 56 : TNode<Object> result = Allocate(instance_size_in_bytes);
114 56 : StoreMapNoWriteBarrier(result, function_map);
115 : InitializeJSObjectBodyNoSlackTracking(result, function_map,
116 : instance_size_in_bytes,
117 56 : JSFunction::kSizeWithoutPrototype);
118 :
119 : // Initialize the rest of the function.
120 : StoreObjectFieldRoot(result, JSObject::kPropertiesOrHashOffset,
121 56 : RootIndex::kEmptyFixedArray);
122 : StoreObjectFieldRoot(result, JSObject::kElementsOffset,
123 56 : RootIndex::kEmptyFixedArray);
124 : {
125 : // Set function prototype if necessary.
126 112 : Label done(this), init_prototype(this);
127 112 : Branch(IsFunctionWithPrototypeSlotMap(function_map), &init_prototype,
128 56 : &done);
129 :
130 56 : BIND(&init_prototype);
131 : StoreObjectFieldRoot(result, JSFunction::kPrototypeOrInitialMapOffset,
132 56 : RootIndex::kTheHoleValue);
133 56 : Goto(&done);
134 112 : BIND(&done);
135 : }
136 :
137 : STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
138 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kFeedbackCellOffset,
139 56 : feedback_cell);
140 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
141 56 : shared_function_info);
142 56 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
143 : Handle<Code> lazy_builtin_handle =
144 56 : isolate()->builtins()->builtin_handle(Builtins::kCompileLazy);
145 56 : Node* lazy_builtin = HeapConstant(lazy_builtin_handle);
146 56 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeOffset, lazy_builtin);
147 56 : Return(result);
148 56 : }
149 :
150 392 : TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) {
151 56 : Node* context = Parameter(Descriptor::kContext);
152 56 : Node* target = Parameter(Descriptor::kTarget);
153 56 : Node* new_target = Parameter(Descriptor::kNewTarget);
154 :
155 56 : Label call_runtime(this);
156 :
157 56 : Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
158 56 : Return(result);
159 :
160 56 : BIND(&call_runtime);
161 56 : TailCallRuntime(Runtime::kNewObject, context, target, new_target);
162 56 : }
163 :
164 392 : Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
165 : Node* target,
166 : Node* new_target) {
167 392 : VARIABLE(var_obj, MachineRepresentation::kTagged);
168 784 : Label call_runtime(this), end(this);
169 :
170 392 : Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
171 392 : var_obj.Bind(result);
172 392 : Goto(&end);
173 :
174 392 : BIND(&call_runtime);
175 392 : var_obj.Bind(CallRuntime(Runtime::kNewObject, context, target, new_target));
176 392 : Goto(&end);
177 :
178 392 : BIND(&end);
179 784 : return var_obj.value();
180 : }
181 :
182 448 : Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
183 : Node* target,
184 : Node* new_target,
185 : Label* call_runtime) {
186 : CSA_ASSERT(this, HasInstanceType(target, JS_FUNCTION_TYPE));
187 : CSA_ASSERT(this, IsJSReceiver(new_target));
188 :
189 : // Verify that the new target is a JSFunction.
190 896 : Label fast(this), end(this);
191 448 : GotoIf(HasInstanceType(new_target, JS_FUNCTION_TYPE), &fast);
192 448 : Goto(call_runtime);
193 :
194 448 : BIND(&fast);
195 :
196 : // Load the initial map and verify that it's in fact a map.
197 : Node* initial_map =
198 448 : LoadObjectField(new_target, JSFunction::kPrototypeOrInitialMapOffset);
199 448 : GotoIf(TaggedIsSmi(initial_map), call_runtime);
200 448 : GotoIf(DoesntHaveInstanceType(initial_map, MAP_TYPE), call_runtime);
201 :
202 : // Fall back to runtime if the target differs from the new target's
203 : // initial map constructor.
204 : Node* new_target_constructor =
205 448 : LoadObjectField(initial_map, Map::kConstructorOrBackPointerOffset);
206 448 : GotoIf(WordNotEqual(target, new_target_constructor), call_runtime);
207 :
208 896 : VARIABLE(properties, MachineRepresentation::kTagged);
209 :
210 896 : Label instantiate_map(this), allocate_properties(this);
211 448 : GotoIf(IsDictionaryMap(initial_map), &allocate_properties);
212 : {
213 448 : properties.Bind(EmptyFixedArrayConstant());
214 448 : Goto(&instantiate_map);
215 : }
216 448 : BIND(&allocate_properties);
217 : {
218 448 : properties.Bind(AllocateNameDictionary(NameDictionary::kInitialCapacity));
219 448 : Goto(&instantiate_map);
220 : }
221 :
222 448 : BIND(&instantiate_map);
223 : return AllocateJSObjectFromMap(initial_map, properties.value(), nullptr,
224 896 : kNone, kWithSlackTracking);
225 : }
226 :
227 448 : Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
228 : Node* scope_info, Node* slots_uint32, Node* context, ScopeType scope_type) {
229 448 : TNode<IntPtrT> slots = Signed(ChangeUint32ToWord(slots_uint32));
230 : TNode<IntPtrT> size = ElementOffsetFromIndex(
231 448 : slots, PACKED_ELEMENTS, INTPTR_PARAMETERS, Context::kTodoHeaderSize);
232 :
233 : // Create a new closure from the given function info in new space
234 : TNode<Context> function_context =
235 448 : UncheckedCast<Context>(AllocateInNewSpace(size));
236 :
237 : RootIndex context_type;
238 448 : switch (scope_type) {
239 : case EVAL_SCOPE:
240 224 : context_type = RootIndex::kEvalContextMap;
241 224 : break;
242 : case FUNCTION_SCOPE:
243 224 : context_type = RootIndex::kFunctionContextMap;
244 224 : break;
245 : default:
246 0 : UNREACHABLE();
247 : }
248 : // Set up the header.
249 448 : StoreMapNoWriteBarrier(function_context, context_type);
250 448 : TNode<IntPtrT> min_context_slots = IntPtrConstant(Context::MIN_CONTEXT_SLOTS);
251 : // TODO(ishell): for now, length also includes MIN_CONTEXT_SLOTS.
252 448 : TNode<IntPtrT> length = IntPtrAdd(slots, min_context_slots);
253 : StoreObjectFieldNoWriteBarrier(function_context, Context::kLengthOffset,
254 448 : SmiTag(length));
255 : StoreObjectFieldNoWriteBarrier(function_context, Context::kScopeInfoOffset,
256 448 : scope_info);
257 : StoreObjectFieldNoWriteBarrier(function_context, Context::kPreviousOffset,
258 448 : context);
259 : StoreObjectFieldNoWriteBarrier(function_context, Context::kExtensionOffset,
260 448 : TheHoleConstant());
261 448 : TNode<Context> native_context = LoadNativeContext(context);
262 : StoreObjectFieldNoWriteBarrier(function_context,
263 448 : Context::kNativeContextOffset, native_context);
264 :
265 : // Initialize the varrest of the slots to undefined.
266 448 : TNode<HeapObject> undefined = UndefinedConstant();
267 448 : TNode<IntPtrT> start_offset = IntPtrConstant(Context::kTodoHeaderSize);
268 448 : CodeStubAssembler::VariableList vars(0, zone());
269 : BuildFastLoop(
270 : vars, start_offset, size,
271 448 : [=](Node* offset) {
272 : StoreObjectFieldNoWriteBarrier(
273 448 : function_context, UncheckedCast<IntPtrT>(offset), undefined);
274 448 : },
275 448 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
276 448 : return function_context;
277 : }
278 :
279 392 : TF_BUILTIN(FastNewFunctionContextEval, ConstructorBuiltinsAssembler) {
280 56 : Node* scope_info = Parameter(Descriptor::kScopeInfo);
281 56 : Node* slots = Parameter(Descriptor::kSlots);
282 56 : Node* context = Parameter(Descriptor::kContext);
283 : Return(EmitFastNewFunctionContext(scope_info, slots, context,
284 56 : ScopeType::EVAL_SCOPE));
285 56 : }
286 :
287 392 : TF_BUILTIN(FastNewFunctionContextFunction, ConstructorBuiltinsAssembler) {
288 56 : Node* scope_info = Parameter(Descriptor::kScopeInfo);
289 56 : Node* slots = Parameter(Descriptor::kSlots);
290 56 : Node* context = Parameter(Descriptor::kContext);
291 : Return(EmitFastNewFunctionContext(scope_info, slots, context,
292 56 : ScopeType::FUNCTION_SCOPE));
293 56 : }
294 :
295 224 : Node* ConstructorBuiltinsAssembler::EmitCreateRegExpLiteral(
296 : Node* feedback_vector, Node* slot, Node* pattern, Node* flags,
297 : Node* context) {
298 448 : Label call_runtime(this, Label::kDeferred), end(this);
299 :
300 224 : GotoIf(IsUndefined(feedback_vector), &call_runtime);
301 :
302 448 : VARIABLE(result, MachineRepresentation::kTagged);
303 : TNode<Object> literal_site =
304 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
305 224 : GotoIf(NotHasBoilerplate(literal_site), &call_runtime);
306 : {
307 224 : Node* boilerplate = literal_site;
308 : CSA_ASSERT(this, IsJSRegExp(boilerplate));
309 224 : int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kTaggedSize;
310 224 : Node* copy = Allocate(size);
311 1792 : for (int offset = 0; offset < size; offset += kTaggedSize) {
312 1568 : Node* value = LoadObjectField(boilerplate, offset);
313 1568 : StoreObjectFieldNoWriteBarrier(copy, offset, value);
314 : }
315 224 : result.Bind(copy);
316 224 : Goto(&end);
317 : }
318 :
319 224 : BIND(&call_runtime);
320 : {
321 : result.Bind(CallRuntime(Runtime::kCreateRegExpLiteral, context,
322 224 : feedback_vector, SmiTag(slot), pattern, flags));
323 224 : Goto(&end);
324 : }
325 :
326 224 : BIND(&end);
327 448 : return result.value();
328 : }
329 :
330 504 : TF_BUILTIN(CreateRegExpLiteral, ConstructorBuiltinsAssembler) {
331 56 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
332 56 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
333 56 : Node* pattern = Parameter(Descriptor::kPattern);
334 56 : Node* flags = Parameter(Descriptor::kFlags);
335 56 : Node* context = Parameter(Descriptor::kContext);
336 : Node* result =
337 56 : EmitCreateRegExpLiteral(feedback_vector, slot, pattern, flags, context);
338 56 : Return(result);
339 56 : }
340 :
341 224 : Node* ConstructorBuiltinsAssembler::EmitCreateShallowArrayLiteral(
342 : Node* feedback_vector, Node* slot, Node* context, Label* call_runtime,
343 : AllocationSiteMode allocation_site_mode) {
344 448 : Label zero_capacity(this), cow_elements(this), fast_elements(this),
345 448 : return_result(this);
346 448 : VARIABLE(result, MachineRepresentation::kTagged);
347 :
348 : TNode<Object> maybe_allocation_site =
349 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
350 224 : GotoIf(NotHasBoilerplate(maybe_allocation_site), call_runtime);
351 :
352 224 : TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
353 224 : TNode<JSArray> boilerplate = CAST(LoadBoilerplate(allocation_site));
354 :
355 224 : ParameterMode mode = OptimalParameterMode();
356 224 : if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
357 168 : return CloneFastJSArray(context, boilerplate, mode, allocation_site);
358 : } else {
359 56 : return CloneFastJSArray(context, boilerplate, mode);
360 224 : }
361 : }
362 :
363 448 : TF_BUILTIN(CreateShallowArrayLiteral, ConstructorBuiltinsAssembler) {
364 56 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
365 56 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
366 56 : Node* constant_elements = Parameter(Descriptor::kConstantElements);
367 56 : Node* context = Parameter(Descriptor::kContext);
368 56 : Label call_runtime(this, Label::kDeferred);
369 : Return(EmitCreateShallowArrayLiteral(feedback_vector, slot, context,
370 : &call_runtime,
371 56 : DONT_TRACK_ALLOCATION_SITE));
372 :
373 56 : BIND(&call_runtime);
374 : {
375 56 : Comment("call runtime");
376 : int const flags =
377 56 : AggregateLiteral::kDisableMementos | AggregateLiteral::kIsShallow;
378 : Return(CallRuntime(Runtime::kCreateArrayLiteral, context, feedback_vector,
379 56 : SmiTag(slot), constant_elements, SmiConstant(flags)));
380 56 : }
381 56 : }
382 :
383 224 : Node* ConstructorBuiltinsAssembler::EmitCreateEmptyArrayLiteral(
384 : Node* feedback_vector, Node* slot, Node* context) {
385 : // Array literals always have a valid AllocationSite to properly track
386 : // elements transitions.
387 : TNode<Object> maybe_allocation_site =
388 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
389 224 : TVARIABLE(AllocationSite, allocation_site);
390 :
391 448 : Label create_empty_array(this),
392 448 : initialize_allocation_site(this, Label::kDeferred), done(this);
393 224 : GotoIf(TaggedIsSmi(maybe_allocation_site), &initialize_allocation_site);
394 : {
395 224 : allocation_site = CAST(maybe_allocation_site);
396 224 : Goto(&create_empty_array);
397 : }
398 : // TODO(cbruni): create the AllocationSite in CSA.
399 224 : BIND(&initialize_allocation_site);
400 : {
401 448 : allocation_site =
402 224 : CreateAllocationSiteInFeedbackVector(feedback_vector, SmiTag(slot));
403 224 : Goto(&create_empty_array);
404 : }
405 :
406 224 : BIND(&create_empty_array);
407 224 : TNode<Int32T> kind = LoadElementsKind(allocation_site.value());
408 224 : TNode<Context> native_context = LoadNativeContext(context);
409 224 : Comment("LoadJSArrayElementsMap");
410 224 : TNode<Map> array_map = LoadJSArrayElementsMap(kind, native_context);
411 224 : TNode<Smi> zero = SmiConstant(0);
412 224 : Comment("Allocate JSArray");
413 : TNode<JSArray> result =
414 224 : AllocateJSArray(GetInitialFastElementsKind(), array_map, zero, zero,
415 448 : allocation_site.value(), ParameterMode::SMI_PARAMETERS);
416 :
417 224 : Goto(&done);
418 224 : BIND(&done);
419 :
420 448 : return result;
421 : }
422 :
423 392 : TF_BUILTIN(CreateEmptyArrayLiteral, ConstructorBuiltinsAssembler) {
424 56 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
425 56 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
426 56 : Node* context = Parameter(Descriptor::kContext);
427 56 : Node* result = EmitCreateEmptyArrayLiteral(feedback_vector, slot, context);
428 56 : Return(result);
429 56 : }
430 :
431 224 : Node* ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral(
432 : Node* feedback_vector, Node* slot, Label* call_runtime) {
433 : TNode<Object> maybe_allocation_site =
434 224 : CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
435 224 : GotoIf(NotHasBoilerplate(maybe_allocation_site), call_runtime);
436 :
437 224 : TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
438 224 : TNode<JSObject> boilerplate = LoadBoilerplate(allocation_site);
439 224 : TNode<Map> boilerplate_map = LoadMap(boilerplate);
440 : CSA_ASSERT(this, IsJSObjectMap(boilerplate_map));
441 :
442 224 : VARIABLE(var_properties, MachineRepresentation::kTagged);
443 : {
444 224 : Node* bit_field_3 = LoadMapBitField3(boilerplate_map);
445 224 : GotoIf(IsSetWord32<Map::IsDeprecatedBit>(bit_field_3), call_runtime);
446 : // Directly copy over the property store for dict-mode boilerplates.
447 448 : Label if_dictionary(this), if_fast(this), done(this);
448 448 : Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field_3), &if_dictionary,
449 224 : &if_fast);
450 224 : BIND(&if_dictionary);
451 : {
452 224 : Comment("Copy dictionary properties");
453 : var_properties.Bind(CopyNameDictionary(
454 224 : CAST(LoadSlowProperties(boilerplate)), call_runtime));
455 : // Slow objects have no in-object properties.
456 224 : Goto(&done);
457 : }
458 224 : BIND(&if_fast);
459 : {
460 : // TODO(cbruni): support copying out-of-object properties.
461 224 : Node* boilerplate_properties = LoadFastProperties(boilerplate);
462 224 : GotoIfNot(IsEmptyFixedArray(boilerplate_properties), call_runtime);
463 224 : var_properties.Bind(EmptyFixedArrayConstant());
464 224 : Goto(&done);
465 : }
466 448 : BIND(&done);
467 : }
468 :
469 448 : VARIABLE(var_elements, MachineRepresentation::kTagged);
470 : {
471 : // Copy the elements backing store, assuming that it's flat.
472 448 : Label if_empty_fixed_array(this), if_copy_elements(this), done(this);
473 224 : Node* boilerplate_elements = LoadElements(boilerplate);
474 448 : Branch(IsEmptyFixedArray(boilerplate_elements), &if_empty_fixed_array,
475 224 : &if_copy_elements);
476 :
477 224 : BIND(&if_empty_fixed_array);
478 224 : var_elements.Bind(boilerplate_elements);
479 224 : Goto(&done);
480 :
481 224 : BIND(&if_copy_elements);
482 : CSA_ASSERT(this, Word32BinaryNot(
483 : IsFixedCOWArrayMap(LoadMap(boilerplate_elements))));
484 224 : ExtractFixedArrayFlags flags;
485 224 : flags |= ExtractFixedArrayFlag::kAllFixedArrays;
486 224 : flags |= ExtractFixedArrayFlag::kNewSpaceAllocationOnly;
487 224 : flags |= ExtractFixedArrayFlag::kDontCopyCOW;
488 224 : var_elements.Bind(CloneFixedArray(boilerplate_elements, flags));
489 224 : Goto(&done);
490 448 : BIND(&done);
491 : }
492 :
493 : // Ensure new-space allocation for a fresh JSObject so we can skip write
494 : // barriers when copying all object fields.
495 : STATIC_ASSERT(JSObject::kMaxInstanceSize < kMaxRegularHeapObjectSize);
496 : TNode<IntPtrT> instance_size =
497 224 : TimesTaggedSize(LoadMapInstanceSizeInWords(boilerplate_map));
498 224 : TNode<IntPtrT> allocation_size = instance_size;
499 224 : bool needs_allocation_memento = FLAG_allocation_site_pretenuring;
500 224 : if (needs_allocation_memento) {
501 : // Prepare for inner-allocating the AllocationMemento.
502 224 : allocation_size =
503 224 : IntPtrAdd(instance_size, IntPtrConstant(AllocationMemento::kSize));
504 : }
505 :
506 : TNode<HeapObject> copy =
507 224 : UncheckedCast<HeapObject>(AllocateInNewSpace(allocation_size));
508 : {
509 224 : Comment("Initialize Literal Copy");
510 : // Initialize Object fields.
511 224 : StoreMapNoWriteBarrier(copy, boilerplate_map);
512 : StoreObjectFieldNoWriteBarrier(copy, JSObject::kPropertiesOrHashOffset,
513 224 : var_properties.value());
514 : StoreObjectFieldNoWriteBarrier(copy, JSObject::kElementsOffset,
515 224 : var_elements.value());
516 : }
517 :
518 : // Initialize the AllocationMemento before potential GCs due to heap number
519 : // allocation when copying the in-object properties.
520 224 : if (needs_allocation_memento) {
521 224 : InitializeAllocationMemento(copy, instance_size, allocation_site);
522 : }
523 :
524 : {
525 : // Copy over in-object properties.
526 224 : Label continue_with_write_barrier(this), done_init(this);
527 224 : TVARIABLE(IntPtrT, offset, IntPtrConstant(JSObject::kHeaderSize));
528 : // Mutable heap numbers only occur on 32-bit platforms.
529 224 : bool may_use_mutable_heap_numbers = !FLAG_unbox_double_fields;
530 : {
531 224 : Comment("Copy in-object properties fast");
532 224 : Label continue_fast(this, &offset);
533 448 : Branch(WordEqual(offset.value(), instance_size), &done_init,
534 224 : &continue_fast);
535 224 : BIND(&continue_fast);
536 224 : if (may_use_mutable_heap_numbers) {
537 0 : TNode<Object> field = LoadObjectField(boilerplate, offset.value());
538 0 : Label store_field(this);
539 0 : GotoIf(TaggedIsSmi(field), &store_field);
540 0 : GotoIf(IsMutableHeapNumber(CAST(field)), &continue_with_write_barrier);
541 0 : Goto(&store_field);
542 0 : BIND(&store_field);
543 0 : StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
544 : } else {
545 : // Copy fields as raw data.
546 : TNode<IntPtrT> field =
547 224 : LoadObjectField<IntPtrT>(boilerplate, offset.value());
548 224 : StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
549 : }
550 224 : offset = IntPtrAdd(offset.value(), IntPtrConstant(kTaggedSize));
551 448 : Branch(WordNotEqual(offset.value(), instance_size), &continue_fast,
552 448 : &done_init);
553 : }
554 :
555 224 : if (!may_use_mutable_heap_numbers) {
556 224 : BIND(&done_init);
557 224 : return copy;
558 : }
559 : // Continue initializing the literal after seeing the first sub-object
560 : // potentially causing allocation. In this case we prepare the new literal
561 : // by copying all pending fields over from the boilerplate and emit full
562 : // write barriers from here on.
563 0 : BIND(&continue_with_write_barrier);
564 : {
565 0 : Comment("Copy in-object properties slow");
566 : BuildFastLoop(
567 : offset.value(), instance_size,
568 0 : [=](Node* offset) {
569 : // TODO(ishell): value decompression is not necessary here.
570 0 : Node* field = LoadObjectField(boilerplate, offset);
571 0 : StoreObjectFieldNoWriteBarrier(copy, offset, field);
572 0 : },
573 0 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
574 0 : Comment("Copy mutable HeapNumber values");
575 : BuildFastLoop(
576 : offset.value(), instance_size,
577 0 : [=](Node* offset) {
578 0 : Node* field = LoadObjectField(copy, offset);
579 0 : Label copy_mutable_heap_number(this, Label::kDeferred),
580 0 : continue_loop(this);
581 : // We only have to clone complex field values.
582 0 : GotoIf(TaggedIsSmi(field), &continue_loop);
583 0 : Branch(IsMutableHeapNumber(field), ©_mutable_heap_number,
584 0 : &continue_loop);
585 0 : BIND(©_mutable_heap_number);
586 : {
587 0 : Node* double_value = LoadHeapNumberValue(field);
588 : Node* mutable_heap_number =
589 0 : AllocateMutableHeapNumberWithValue(double_value);
590 0 : StoreObjectField(copy, offset, mutable_heap_number);
591 0 : Goto(&continue_loop);
592 : }
593 0 : BIND(&continue_loop);
594 0 : },
595 0 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
596 0 : Goto(&done_init);
597 : }
598 0 : BIND(&done_init);
599 : }
600 224 : return copy;
601 : }
602 :
603 504 : TF_BUILTIN(CreateShallowObjectLiteral, ConstructorBuiltinsAssembler) {
604 56 : Label call_runtime(this);
605 56 : Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
606 56 : Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
607 : Node* copy =
608 56 : EmitCreateShallowObjectLiteral(feedback_vector, slot, &call_runtime);
609 56 : Return(copy);
610 :
611 56 : BIND(&call_runtime);
612 : Node* object_boilerplate_description =
613 56 : Parameter(Descriptor::kObjectBoilerplateDescription);
614 56 : Node* flags = Parameter(Descriptor::kFlags);
615 56 : Node* context = Parameter(Descriptor::kContext);
616 : TailCallRuntime(Runtime::kCreateObjectLiteral, context, feedback_vector,
617 56 : SmiTag(slot), object_boilerplate_description, flags);
618 56 : }
619 :
620 : // Used by the CreateEmptyObjectLiteral bytecode and the Object constructor.
621 112 : Node* ConstructorBuiltinsAssembler::EmitCreateEmptyObjectLiteral(
622 : Node* context) {
623 112 : Node* native_context = LoadNativeContext(context);
624 : Node* object_function =
625 112 : LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX);
626 : Node* map = LoadObjectField(object_function,
627 112 : JSFunction::kPrototypeOrInitialMapOffset);
628 : CSA_ASSERT(this, IsMap(map));
629 : // Ensure that slack tracking is disabled for the map.
630 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
631 : CSA_ASSERT(
632 : this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
633 112 : Node* empty_fixed_array = EmptyFixedArrayConstant();
634 : Node* result =
635 112 : AllocateJSObjectFromMap(map, empty_fixed_array, empty_fixed_array);
636 112 : return result;
637 : }
638 :
639 : // ES #sec-object-constructor
640 448 : TF_BUILTIN(ObjectConstructor, ConstructorBuiltinsAssembler) {
641 56 : int const kValueArg = 0;
642 : Node* argc =
643 56 : ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
644 56 : CodeStubArguments args(this, argc);
645 56 : Node* context = Parameter(Descriptor::kContext);
646 56 : Node* new_target = Parameter(Descriptor::kJSNewTarget);
647 :
648 56 : VARIABLE(var_result, MachineRepresentation::kTagged);
649 112 : Label if_subclass(this, Label::kDeferred), if_notsubclass(this),
650 112 : return_result(this);
651 56 : GotoIf(IsUndefined(new_target), &if_notsubclass);
652 56 : TNode<JSFunction> target = CAST(Parameter(Descriptor::kJSTarget));
653 56 : Branch(WordEqual(new_target, target), &if_notsubclass, &if_subclass);
654 :
655 56 : BIND(&if_subclass);
656 : {
657 : Node* result =
658 56 : CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
659 56 : var_result.Bind(result);
660 56 : Goto(&return_result);
661 : }
662 :
663 56 : BIND(&if_notsubclass);
664 : {
665 112 : Label if_newobject(this, Label::kDeferred), if_toobject(this);
666 :
667 56 : Node* value_index = IntPtrConstant(kValueArg);
668 56 : GotoIf(UintPtrGreaterThanOrEqual(value_index, argc), &if_newobject);
669 56 : Node* value = args.AtIndex(value_index);
670 56 : GotoIf(IsNull(value), &if_newobject);
671 56 : Branch(IsUndefined(value), &if_newobject, &if_toobject);
672 :
673 56 : BIND(&if_newobject);
674 : {
675 56 : Node* result = EmitCreateEmptyObjectLiteral(context);
676 56 : var_result.Bind(result);
677 56 : Goto(&return_result);
678 : }
679 :
680 56 : BIND(&if_toobject);
681 : {
682 56 : Node* result = CallBuiltin(Builtins::kToObject, context, value);
683 56 : var_result.Bind(result);
684 56 : Goto(&return_result);
685 56 : }
686 : }
687 :
688 56 : BIND(&return_result);
689 112 : args.PopAndReturn(var_result.value());
690 56 : }
691 :
692 : // ES #sec-number-constructor
693 392 : TF_BUILTIN(NumberConstructor, ConstructorBuiltinsAssembler) {
694 56 : Node* context = Parameter(Descriptor::kContext);
695 : Node* argc =
696 56 : ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
697 56 : CodeStubArguments args(this, argc);
698 :
699 : // 1. If no arguments were passed to this function invocation, let n be +0.
700 56 : VARIABLE(var_n, MachineRepresentation::kTagged, SmiConstant(0));
701 112 : Label if_nloaded(this, &var_n);
702 56 : GotoIf(WordEqual(argc, IntPtrConstant(0)), &if_nloaded);
703 :
704 : // 2. Else,
705 : // a. Let prim be ? ToNumeric(value).
706 : // b. If Type(prim) is BigInt, let n be the Number value for prim.
707 : // c. Otherwise, let n be prim.
708 56 : Node* value = args.AtIndex(0);
709 56 : var_n.Bind(ToNumber(context, value, BigIntHandling::kConvertToNumber));
710 56 : Goto(&if_nloaded);
711 :
712 56 : BIND(&if_nloaded);
713 : {
714 : // 3. If NewTarget is undefined, return n.
715 56 : Node* n_value = var_n.value();
716 56 : Node* new_target = Parameter(Descriptor::kJSNewTarget);
717 112 : Label return_n(this), constructnumber(this, Label::kDeferred);
718 56 : Branch(IsUndefined(new_target), &return_n, &constructnumber);
719 :
720 56 : BIND(&return_n);
721 56 : { args.PopAndReturn(n_value); }
722 :
723 56 : BIND(&constructnumber);
724 : {
725 : // 4. Let O be ? OrdinaryCreateFromConstructor(NewTarget,
726 : // "%NumberPrototype%", « [[NumberData]] »).
727 : // 5. Set O.[[NumberData]] to n.
728 : // 6. Return O.
729 :
730 : // We are not using Parameter(Descriptor::kJSTarget) and loading the value
731 : // from the current frame here in order to reduce register pressure on the
732 : // fast path.
733 56 : TNode<JSFunction> target = LoadTargetFromFrame();
734 : Node* result =
735 56 : CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
736 56 : StoreObjectField(result, JSValue::kValueOffset, n_value);
737 56 : args.PopAndReturn(result);
738 56 : }
739 56 : }
740 56 : }
741 :
742 280 : TF_BUILTIN(GenericConstructorLazyDeoptContinuation,
743 : ConstructorBuiltinsAssembler) {
744 56 : Node* result = Parameter(Descriptor::kResult);
745 56 : Return(result);
746 56 : }
747 :
748 : // https://tc39.github.io/ecma262/#sec-string-constructor
749 392 : TF_BUILTIN(StringConstructor, ConstructorBuiltinsAssembler) {
750 56 : Node* context = Parameter(Descriptor::kContext);
751 : Node* argc =
752 56 : ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
753 56 : CodeStubArguments args(this, argc);
754 :
755 56 : TNode<Object> new_target = CAST(Parameter(Descriptor::kJSNewTarget));
756 :
757 : // 1. If no arguments were passed to this function invocation, let s be "".
758 56 : VARIABLE(var_s, MachineRepresentation::kTagged, EmptyStringConstant());
759 112 : Label if_sloaded(this, &var_s);
760 56 : GotoIf(WordEqual(argc, IntPtrConstant(0)), &if_sloaded);
761 :
762 : // 2. Else,
763 : // a. If NewTarget is undefined [...]
764 56 : Node* value = args.AtIndex(0);
765 112 : Label if_tostring(this, &var_s);
766 56 : GotoIfNot(IsUndefined(new_target), &if_tostring);
767 :
768 : // 2a. [...] and Type(value) is Symbol, return SymbolDescriptiveString(value).
769 56 : GotoIf(TaggedIsSmi(value), &if_tostring);
770 56 : GotoIfNot(IsSymbol(value), &if_tostring);
771 : {
772 : Node* result =
773 56 : CallRuntime(Runtime::kSymbolDescriptiveString, context, value);
774 56 : args.PopAndReturn(result);
775 : }
776 :
777 : // 2b. Let s be ? ToString(value).
778 56 : BIND(&if_tostring);
779 : {
780 56 : var_s.Bind(CallBuiltin(Builtins::kToString, context, value));
781 56 : Goto(&if_sloaded);
782 : }
783 :
784 : // 3. If NewTarget is undefined, return s.
785 56 : BIND(&if_sloaded);
786 : {
787 56 : Node* s_value = var_s.value();
788 112 : Label return_s(this), constructstring(this, Label::kDeferred);
789 56 : Branch(IsUndefined(new_target), &return_s, &constructstring);
790 :
791 56 : BIND(&return_s);
792 56 : { args.PopAndReturn(s_value); }
793 :
794 56 : BIND(&constructstring);
795 : {
796 : // We are not using Parameter(Descriptor::kJSTarget) and loading the value
797 : // from the current frame here in order to reduce register pressure on the
798 : // fast path.
799 56 : TNode<JSFunction> target = LoadTargetFromFrame();
800 :
801 : Node* result =
802 56 : CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
803 56 : StoreObjectField(result, JSValue::kValueOffset, s_value);
804 56 : args.PopAndReturn(result);
805 56 : }
806 56 : }
807 56 : }
808 :
809 : } // namespace internal
810 86739 : } // namespace v8
|