Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/builtins/builtins-constructor-gen.h"
6 :
7 : #include "src/ast/ast.h"
8 : #include "src/builtins/builtins-constructor.h"
9 : #include "src/builtins/builtins-utils-gen.h"
10 : #include "src/builtins/builtins.h"
11 : #include "src/code-factory.h"
12 : #include "src/code-stub-assembler.h"
13 : #include "src/counters.h"
14 : #include "src/interface-descriptors.h"
15 : #include "src/objects-inl.h"
16 :
17 : namespace v8 {
18 : namespace internal {
19 :
20 : typedef compiler::Node Node;
21 :
22 172 : Node* ConstructorBuiltinsAssembler::EmitFastNewClosure(Node* shared_info,
23 : Node* feedback_vector,
24 : Node* slot,
25 : Node* context) {
26 172 : Isolate* isolate = this->isolate();
27 : Factory* factory = isolate->factory();
28 172 : IncrementCounter(isolate->counters()->fast_new_closure_total(), 1);
29 :
30 : // Create a new closure from the given function info in new space
31 172 : Node* result = Allocate(JSFunction::kSize);
32 :
33 : // Calculate the index of the map we should install on the function based on
34 : // the FunctionKind and LanguageMode of the function.
35 : // Note: Must be kept in sync with Context::FunctionMapIndex
36 : Node* compiler_hints =
37 : LoadObjectField(shared_info, SharedFunctionInfo::kCompilerHintsOffset,
38 172 : MachineType::Uint32());
39 : Node* is_strict = Word32And(
40 172 : compiler_hints, Int32Constant(1 << SharedFunctionInfo::kStrictModeBit));
41 :
42 172 : Label if_normal(this), if_generator(this), if_async(this),
43 172 : if_class_constructor(this), if_function_without_prototype(this),
44 172 : load_map(this);
45 344 : VARIABLE(map_index, MachineType::PointerRepresentation());
46 :
47 : STATIC_ASSERT(FunctionKind::kNormalFunction == 0);
48 : Node* is_not_normal =
49 : Word32And(compiler_hints,
50 172 : Int32Constant(SharedFunctionInfo::kAllFunctionKindBitsMask));
51 172 : GotoIfNot(is_not_normal, &if_normal);
52 :
53 : Node* is_generator = Word32And(
54 : compiler_hints, Int32Constant(FunctionKind::kGeneratorFunction
55 172 : << SharedFunctionInfo::kFunctionKindShift));
56 172 : GotoIf(is_generator, &if_generator);
57 :
58 : Node* is_async = Word32And(
59 : compiler_hints, Int32Constant(FunctionKind::kAsyncFunction
60 172 : << SharedFunctionInfo::kFunctionKindShift));
61 172 : GotoIf(is_async, &if_async);
62 :
63 : Node* is_class_constructor = Word32And(
64 : compiler_hints, Int32Constant(FunctionKind::kClassConstructor
65 172 : << SharedFunctionInfo::kFunctionKindShift));
66 172 : GotoIf(is_class_constructor, &if_class_constructor);
67 :
68 : if (FLAG_debug_code) {
69 : // Function must be a function without a prototype.
70 : CSA_ASSERT(
71 : this,
72 : Word32And(compiler_hints,
73 : Int32Constant((FunctionKind::kAccessorFunction |
74 : FunctionKind::kArrowFunction |
75 : FunctionKind::kConciseMethod)
76 : << SharedFunctionInfo::kFunctionKindShift)));
77 : }
78 172 : Goto(&if_function_without_prototype);
79 :
80 172 : BIND(&if_normal);
81 : {
82 : map_index.Bind(SelectIntPtrConstant(is_strict,
83 : Context::STRICT_FUNCTION_MAP_INDEX,
84 172 : Context::SLOPPY_FUNCTION_MAP_INDEX));
85 172 : Goto(&load_map);
86 : }
87 :
88 172 : BIND(&if_generator);
89 : {
90 : Node* is_async =
91 : Word32And(compiler_hints,
92 : Int32Constant(FunctionKind::kAsyncFunction
93 172 : << SharedFunctionInfo::kFunctionKindShift));
94 : map_index.Bind(SelectIntPtrConstant(
95 : is_async, Context::ASYNC_GENERATOR_FUNCTION_MAP_INDEX,
96 172 : Context::GENERATOR_FUNCTION_MAP_INDEX));
97 172 : Goto(&load_map);
98 : }
99 :
100 172 : BIND(&if_async);
101 : {
102 172 : map_index.Bind(IntPtrConstant(Context::ASYNC_FUNCTION_MAP_INDEX));
103 172 : Goto(&load_map);
104 : }
105 :
106 172 : BIND(&if_class_constructor);
107 : {
108 172 : map_index.Bind(IntPtrConstant(Context::CLASS_FUNCTION_MAP_INDEX));
109 172 : Goto(&load_map);
110 : }
111 :
112 172 : BIND(&if_function_without_prototype);
113 : {
114 : map_index.Bind(
115 172 : IntPtrConstant(Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX));
116 172 : Goto(&load_map);
117 : }
118 :
119 172 : BIND(&load_map);
120 :
121 : // Get the function map in the current native context and set that
122 : // as the map of the allocated object.
123 172 : Node* native_context = LoadNativeContext(context);
124 : Node* map_slot_value =
125 172 : LoadFixedArrayElement(native_context, map_index.value());
126 172 : StoreMapNoWriteBarrier(result, map_slot_value);
127 :
128 : // Initialize the rest of the function.
129 172 : Node* empty_fixed_array = HeapConstant(factory->empty_fixed_array());
130 : StoreObjectFieldNoWriteBarrier(result, JSObject::kPropertiesOffset,
131 172 : empty_fixed_array);
132 : StoreObjectFieldNoWriteBarrier(result, JSObject::kElementsOffset,
133 172 : empty_fixed_array);
134 : Node* literals_cell = LoadFixedArrayElement(
135 172 : feedback_vector, slot, 0, CodeStubAssembler::SMI_PARAMETERS);
136 : {
137 : // Bump the closure counter encoded in the cell's map.
138 172 : Node* cell_map = LoadMap(literals_cell);
139 172 : Label no_closures(this), one_closure(this), cell_done(this);
140 :
141 172 : GotoIf(IsNoClosuresCellMap(cell_map), &no_closures);
142 172 : GotoIf(IsOneClosureCellMap(cell_map), &one_closure);
143 : CSA_ASSERT(this, IsManyClosuresCellMap(cell_map));
144 172 : Goto(&cell_done);
145 :
146 172 : BIND(&no_closures);
147 172 : StoreMapNoWriteBarrier(literals_cell, Heap::kOneClosureCellMapRootIndex);
148 172 : Goto(&cell_done);
149 :
150 172 : BIND(&one_closure);
151 172 : StoreMapNoWriteBarrier(literals_cell, Heap::kManyClosuresCellMapRootIndex);
152 172 : Goto(&cell_done);
153 :
154 344 : BIND(&cell_done);
155 : }
156 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kFeedbackVectorOffset,
157 172 : literals_cell);
158 : StoreObjectFieldNoWriteBarrier(
159 172 : result, JSFunction::kPrototypeOrInitialMapOffset, TheHoleConstant());
160 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
161 172 : shared_info);
162 172 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
163 : Handle<Code> lazy_builtin_handle(
164 : isolate->builtins()->builtin(Builtins::kCompileLazy));
165 172 : Node* lazy_builtin = HeapConstant(lazy_builtin_handle);
166 : Node* lazy_builtin_entry =
167 : IntPtrAdd(BitcastTaggedToWord(lazy_builtin),
168 172 : IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
169 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeEntryOffset,
170 : lazy_builtin_entry,
171 172 : MachineType::PointerRepresentation());
172 : StoreObjectFieldNoWriteBarrier(result, JSFunction::kNextFunctionLinkOffset,
173 172 : UndefinedConstant());
174 :
175 172 : return result;
176 : }
177 :
178 172 : TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
179 : Node* shared = Parameter(FastNewClosureDescriptor::kSharedFunctionInfo);
180 : Node* context = Parameter(FastNewClosureDescriptor::kContext);
181 : Node* vector = Parameter(FastNewClosureDescriptor::kVector);
182 : Node* slot = Parameter(FastNewClosureDescriptor::kSlot);
183 43 : Return(EmitFastNewClosure(shared, vector, slot, context));
184 43 : }
185 :
186 172 : TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) {
187 : Node* context = Parameter(Descriptor::kContext);
188 : Node* target = Parameter(Descriptor::kTarget);
189 : Node* new_target = Parameter(Descriptor::kNewTarget);
190 :
191 : Label call_runtime(this);
192 :
193 43 : Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
194 43 : Return(result);
195 :
196 43 : BIND(&call_runtime);
197 43 : TailCallRuntime(Runtime::kNewObject, context, target, new_target);
198 43 : }
199 :
200 86 : Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
201 : Node* target,
202 : Node* new_target) {
203 86 : VARIABLE(var_obj, MachineRepresentation::kTagged);
204 86 : Label call_runtime(this), end(this);
205 :
206 86 : Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
207 86 : var_obj.Bind(result);
208 86 : Goto(&end);
209 :
210 86 : BIND(&call_runtime);
211 86 : var_obj.Bind(CallRuntime(Runtime::kNewObject, context, target, new_target));
212 86 : Goto(&end);
213 :
214 86 : BIND(&end);
215 172 : return var_obj.value();
216 : }
217 :
218 129 : Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
219 : Node* target,
220 : Node* new_target,
221 : Label* call_runtime) {
222 : CSA_ASSERT(this, HasInstanceType(target, JS_FUNCTION_TYPE));
223 : CSA_ASSERT(this, IsJSReceiver(new_target));
224 :
225 : // Verify that the new target is a JSFunction.
226 258 : Label fast(this), end(this);
227 129 : GotoIf(HasInstanceType(new_target, JS_FUNCTION_TYPE), &fast);
228 129 : Goto(call_runtime);
229 :
230 129 : BIND(&fast);
231 :
232 : // Load the initial map and verify that it's in fact a map.
233 : Node* initial_map =
234 129 : LoadObjectField(new_target, JSFunction::kPrototypeOrInitialMapOffset);
235 129 : GotoIf(TaggedIsSmi(initial_map), call_runtime);
236 129 : GotoIf(DoesntHaveInstanceType(initial_map, MAP_TYPE), call_runtime);
237 :
238 : // Fall back to runtime if the target differs from the new target's
239 : // initial map constructor.
240 : Node* new_target_constructor =
241 129 : LoadObjectField(initial_map, Map::kConstructorOrBackPointerOffset);
242 129 : GotoIf(WordNotEqual(target, new_target_constructor), call_runtime);
243 :
244 258 : VARIABLE(properties, MachineRepresentation::kTagged);
245 :
246 129 : Label instantiate_map(this), allocate_properties(this);
247 129 : GotoIf(IsDictionaryMap(initial_map), &allocate_properties);
248 : {
249 129 : properties.Bind(EmptyFixedArrayConstant());
250 129 : Goto(&instantiate_map);
251 : }
252 129 : BIND(&allocate_properties);
253 : {
254 129 : properties.Bind(AllocateNameDictionary(NameDictionary::kInitialCapacity));
255 129 : Goto(&instantiate_map);
256 : }
257 :
258 129 : BIND(&instantiate_map);
259 :
260 129 : Node* object = AllocateJSObjectFromMap(initial_map, properties.value());
261 :
262 : Node* instance_size_words = ChangeUint32ToWord(LoadObjectField(
263 129 : initial_map, Map::kInstanceSizeOffset, MachineType::Uint8()));
264 : Node* instance_size =
265 129 : WordShl(instance_size_words, IntPtrConstant(kPointerSizeLog2));
266 :
267 : // Perform in-object slack tracking if requested.
268 129 : Node* bit_field3 = LoadMapBitField3(initial_map);
269 129 : Label slack_tracking(this), finalize(this, Label::kDeferred), done(this);
270 129 : GotoIf(IsSetWord32<Map::ConstructionCounter>(bit_field3), &slack_tracking);
271 :
272 : // Initialize remaining fields.
273 : {
274 129 : Comment("no slack tracking");
275 : InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize),
276 129 : instance_size, Heap::kUndefinedValueRootIndex);
277 129 : Goto(&end);
278 : }
279 :
280 : {
281 129 : BIND(&slack_tracking);
282 :
283 : // Decrease generous allocation count.
284 : STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
285 129 : Comment("update allocation count");
286 : Node* new_bit_field3 = Int32Sub(
287 129 : bit_field3, Int32Constant(1 << Map::ConstructionCounter::kShift));
288 : StoreObjectFieldNoWriteBarrier(initial_map, Map::kBitField3Offset,
289 : new_bit_field3,
290 129 : MachineRepresentation::kWord32);
291 129 : GotoIf(IsClearWord32<Map::ConstructionCounter>(new_bit_field3), &finalize);
292 :
293 : Node* unused_fields = LoadObjectField(
294 129 : initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8());
295 : Node* used_size =
296 : IntPtrSub(instance_size, WordShl(ChangeUint32ToWord(unused_fields),
297 129 : IntPtrConstant(kPointerSizeLog2)));
298 :
299 129 : Comment("initialize filler fields (no finalize)");
300 : InitializeFieldsWithRoot(object, used_size, instance_size,
301 129 : Heap::kOnePointerFillerMapRootIndex);
302 :
303 129 : Comment("initialize undefined fields (no finalize)");
304 : InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize),
305 129 : used_size, Heap::kUndefinedValueRootIndex);
306 129 : Goto(&end);
307 : }
308 :
309 : {
310 : // Finalize the instance size.
311 129 : BIND(&finalize);
312 :
313 : Node* unused_fields = LoadObjectField(
314 129 : initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8());
315 : Node* used_size =
316 : IntPtrSub(instance_size, WordShl(ChangeUint32ToWord(unused_fields),
317 129 : IntPtrConstant(kPointerSizeLog2)));
318 :
319 129 : Comment("initialize filler fields (finalize)");
320 : InitializeFieldsWithRoot(object, used_size, instance_size,
321 129 : Heap::kOnePointerFillerMapRootIndex);
322 :
323 129 : Comment("initialize undefined fields (finalize)");
324 : InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize),
325 129 : used_size, Heap::kUndefinedValueRootIndex);
326 :
327 129 : CallRuntime(Runtime::kFinalizeInstanceSize, context, initial_map);
328 129 : Goto(&end);
329 : }
330 :
331 129 : BIND(&end);
332 129 : return object;
333 : }
334 :
335 344 : Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
336 : Node* function, Node* slots, Node* context, ScopeType scope_type) {
337 344 : slots = ChangeUint32ToWord(slots);
338 :
339 : // TODO(ishell): Use CSA::OptimalParameterMode() here.
340 : ParameterMode mode = INTPTR_PARAMETERS;
341 344 : Node* min_context_slots = IntPtrConstant(Context::MIN_CONTEXT_SLOTS);
342 344 : Node* length = IntPtrAdd(slots, min_context_slots);
343 344 : Node* size = GetFixedArrayAllocationSize(length, FAST_ELEMENTS, mode);
344 :
345 : // Create a new closure from the given function info in new space
346 344 : Node* function_context = AllocateInNewSpace(size);
347 :
348 : Heap::RootListIndex context_type;
349 344 : switch (scope_type) {
350 : case EVAL_SCOPE:
351 : context_type = Heap::kEvalContextMapRootIndex;
352 : break;
353 : case FUNCTION_SCOPE:
354 : context_type = Heap::kFunctionContextMapRootIndex;
355 172 : break;
356 : default:
357 0 : UNREACHABLE();
358 : }
359 344 : StoreMapNoWriteBarrier(function_context, context_type);
360 : StoreObjectFieldNoWriteBarrier(function_context, Context::kLengthOffset,
361 344 : SmiTag(length));
362 :
363 : // Set up the fixed slots.
364 : StoreFixedArrayElement(function_context, Context::CLOSURE_INDEX, function,
365 344 : SKIP_WRITE_BARRIER);
366 : StoreFixedArrayElement(function_context, Context::PREVIOUS_INDEX, context,
367 344 : SKIP_WRITE_BARRIER);
368 : StoreFixedArrayElement(function_context, Context::EXTENSION_INDEX,
369 344 : TheHoleConstant(), SKIP_WRITE_BARRIER);
370 :
371 : // Copy the native context from the previous context.
372 344 : Node* native_context = LoadNativeContext(context);
373 : StoreFixedArrayElement(function_context, Context::NATIVE_CONTEXT_INDEX,
374 344 : native_context, SKIP_WRITE_BARRIER);
375 :
376 : // Initialize the rest of the slots to undefined.
377 344 : Node* undefined = UndefinedConstant();
378 : BuildFastFixedArrayForEach(
379 : function_context, FAST_ELEMENTS, min_context_slots, length,
380 : [this, undefined](Node* context, Node* offset) {
381 : StoreNoWriteBarrier(MachineRepresentation::kTagged, context, offset,
382 344 : undefined);
383 : },
384 688 : mode);
385 :
386 344 : return function_context;
387 : }
388 :
389 172 : TF_BUILTIN(FastNewFunctionContextEval, ConstructorBuiltinsAssembler) {
390 : Node* function = Parameter(FastNewFunctionContextDescriptor::kFunction);
391 : Node* slots = Parameter(FastNewFunctionContextDescriptor::kSlots);
392 : Node* context = Parameter(FastNewFunctionContextDescriptor::kContext);
393 : Return(EmitFastNewFunctionContext(function, slots, context,
394 43 : ScopeType::EVAL_SCOPE));
395 43 : }
396 :
397 172 : TF_BUILTIN(FastNewFunctionContextFunction, ConstructorBuiltinsAssembler) {
398 : Node* function = Parameter(FastNewFunctionContextDescriptor::kFunction);
399 : Node* slots = Parameter(FastNewFunctionContextDescriptor::kSlots);
400 : Node* context = Parameter(FastNewFunctionContextDescriptor::kContext);
401 : Return(EmitFastNewFunctionContext(function, slots, context,
402 43 : ScopeType::FUNCTION_SCOPE));
403 43 : }
404 :
405 172 : Node* ConstructorBuiltinsAssembler::EmitFastCloneRegExp(Node* closure,
406 : Node* literal_index,
407 : Node* pattern,
408 : Node* flags,
409 : Node* context) {
410 344 : Label call_runtime(this, Label::kDeferred), end(this);
411 :
412 344 : VARIABLE(result, MachineRepresentation::kTagged);
413 :
414 172 : Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset);
415 172 : Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset);
416 : Node* boilerplate = LoadFixedArrayElement(feedback_vector, literal_index, 0,
417 172 : CodeStubAssembler::SMI_PARAMETERS);
418 172 : GotoIf(IsUndefined(boilerplate), &call_runtime);
419 :
420 : {
421 : int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
422 172 : Node* copy = Allocate(size);
423 1376 : for (int offset = 0; offset < size; offset += kPointerSize) {
424 1204 : Node* value = LoadObjectField(boilerplate, offset);
425 1204 : StoreObjectFieldNoWriteBarrier(copy, offset, value);
426 : }
427 172 : result.Bind(copy);
428 172 : Goto(&end);
429 : }
430 :
431 172 : BIND(&call_runtime);
432 : {
433 : result.Bind(CallRuntime(Runtime::kCreateRegExpLiteral, context, closure,
434 172 : literal_index, pattern, flags));
435 172 : Goto(&end);
436 : }
437 :
438 172 : BIND(&end);
439 344 : return result.value();
440 : }
441 :
442 172 : TF_BUILTIN(FastCloneRegExp, ConstructorBuiltinsAssembler) {
443 : Node* closure = Parameter(FastCloneRegExpDescriptor::kClosure);
444 : Node* literal_index = Parameter(FastCloneRegExpDescriptor::kLiteralIndex);
445 : Node* pattern = Parameter(FastCloneRegExpDescriptor::kPattern);
446 : Node* flags = Parameter(FastCloneRegExpDescriptor::kFlags);
447 : Node* context = Parameter(FastCloneRegExpDescriptor::kContext);
448 :
449 43 : Return(EmitFastCloneRegExp(closure, literal_index, pattern, flags, context));
450 43 : }
451 :
452 430 : Node* ConstructorBuiltinsAssembler::NonEmptyShallowClone(
453 : Node* boilerplate, Node* boilerplate_map, Node* boilerplate_elements,
454 : Node* allocation_site, Node* capacity, ElementsKind kind) {
455 : ParameterMode param_mode = OptimalParameterMode();
456 :
457 430 : Node* length = LoadJSArrayLength(boilerplate);
458 : capacity = TaggedToParameter(capacity, param_mode);
459 :
460 : Node *array, *elements;
461 860 : std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
462 : kind, boilerplate_map, length, allocation_site, capacity, param_mode);
463 :
464 430 : Comment("copy elements header");
465 : // Header consists of map and length.
466 : STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
467 430 : StoreMap(elements, LoadMap(boilerplate_elements));
468 : {
469 : int offset = FixedArrayBase::kLengthOffset;
470 : StoreObjectFieldNoWriteBarrier(
471 430 : elements, offset, LoadObjectField(boilerplate_elements, offset));
472 : }
473 :
474 : length = TaggedToParameter(length, param_mode);
475 :
476 430 : Comment("copy boilerplate elements");
477 : CopyFixedArrayElements(kind, boilerplate_elements, elements, length,
478 : SKIP_WRITE_BARRIER, param_mode);
479 430 : IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1);
480 :
481 430 : return array;
482 : }
483 :
484 215 : Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowArray(
485 : Node* closure, Node* literal_index, Node* context, Label* call_runtime,
486 : AllocationSiteMode allocation_site_mode) {
487 430 : Label zero_capacity(this), cow_elements(this), fast_elements(this),
488 215 : return_result(this);
489 430 : VARIABLE(result, MachineRepresentation::kTagged);
490 :
491 215 : Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset);
492 215 : Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset);
493 : Node* allocation_site = LoadFixedArrayElement(
494 215 : feedback_vector, literal_index, 0, CodeStubAssembler::SMI_PARAMETERS);
495 :
496 215 : GotoIf(IsUndefined(allocation_site), call_runtime);
497 : allocation_site = LoadFixedArrayElement(feedback_vector, literal_index, 0,
498 215 : CodeStubAssembler::SMI_PARAMETERS);
499 :
500 : Node* boilerplate =
501 215 : LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset);
502 215 : Node* boilerplate_map = LoadMap(boilerplate);
503 215 : Node* boilerplate_elements = LoadElements(boilerplate);
504 215 : Node* capacity = LoadFixedArrayBaseLength(boilerplate_elements);
505 : allocation_site =
506 215 : allocation_site_mode == TRACK_ALLOCATION_SITE ? allocation_site : nullptr;
507 :
508 215 : Node* zero = SmiConstant(Smi::kZero);
509 215 : GotoIf(SmiEqual(capacity, zero), &zero_capacity);
510 :
511 215 : Node* elements_map = LoadMap(boilerplate_elements);
512 215 : GotoIf(IsFixedCOWArrayMap(elements_map), &cow_elements);
513 :
514 215 : GotoIf(IsFixedArrayMap(elements_map), &fast_elements);
515 : {
516 215 : Comment("fast double elements path");
517 215 : if (FLAG_debug_code) {
518 0 : Label correct_elements_map(this), abort(this, Label::kDeferred);
519 : Branch(IsFixedDoubleArrayMap(elements_map), &correct_elements_map,
520 0 : &abort);
521 :
522 0 : BIND(&abort);
523 : {
524 : Node* abort_id = SmiConstant(
525 0 : Smi::FromInt(BailoutReason::kExpectedFixedDoubleArrayMap));
526 0 : CallRuntime(Runtime::kAbort, context, abort_id);
527 0 : result.Bind(UndefinedConstant());
528 0 : Goto(&return_result);
529 : }
530 0 : BIND(&correct_elements_map);
531 : }
532 :
533 : Node* array =
534 : NonEmptyShallowClone(boilerplate, boilerplate_map, boilerplate_elements,
535 215 : allocation_site, capacity, FAST_DOUBLE_ELEMENTS);
536 215 : result.Bind(array);
537 215 : Goto(&return_result);
538 : }
539 :
540 215 : BIND(&fast_elements);
541 : {
542 215 : Comment("fast elements path");
543 : Node* array =
544 : NonEmptyShallowClone(boilerplate, boilerplate_map, boilerplate_elements,
545 215 : allocation_site, capacity, FAST_ELEMENTS);
546 215 : result.Bind(array);
547 215 : Goto(&return_result);
548 : }
549 :
550 430 : VARIABLE(length, MachineRepresentation::kTagged);
551 430 : VARIABLE(elements, MachineRepresentation::kTagged);
552 215 : Label allocate_without_elements(this);
553 :
554 215 : BIND(&cow_elements);
555 : {
556 215 : Comment("fixed cow path");
557 215 : length.Bind(LoadJSArrayLength(boilerplate));
558 215 : elements.Bind(boilerplate_elements);
559 :
560 215 : Goto(&allocate_without_elements);
561 : }
562 :
563 215 : BIND(&zero_capacity);
564 : {
565 215 : Comment("zero capacity path");
566 215 : length.Bind(zero);
567 215 : elements.Bind(LoadRoot(Heap::kEmptyFixedArrayRootIndex));
568 :
569 215 : Goto(&allocate_without_elements);
570 : }
571 :
572 215 : BIND(&allocate_without_elements);
573 : {
574 : Node* array = AllocateUninitializedJSArrayWithoutElements(
575 215 : FAST_ELEMENTS, boilerplate_map, length.value(), allocation_site);
576 215 : StoreObjectField(array, JSObject::kElementsOffset, elements.value());
577 215 : result.Bind(array);
578 215 : Goto(&return_result);
579 : }
580 :
581 215 : BIND(&return_result);
582 430 : return result.value();
583 : }
584 :
585 86 : void ConstructorBuiltinsAssembler::CreateFastCloneShallowArrayBuiltin(
586 : AllocationSiteMode allocation_site_mode) {
587 86 : Node* closure = Parameter(FastCloneShallowArrayDescriptor::kClosure);
588 : Node* literal_index =
589 86 : Parameter(FastCloneShallowArrayDescriptor::kLiteralIndex);
590 : Node* constant_elements =
591 86 : Parameter(FastCloneShallowArrayDescriptor::kConstantElements);
592 86 : Node* context = Parameter(FastCloneShallowArrayDescriptor::kContext);
593 : Label call_runtime(this, Label::kDeferred);
594 : Return(EmitFastCloneShallowArray(closure, literal_index, context,
595 86 : &call_runtime, allocation_site_mode));
596 :
597 86 : BIND(&call_runtime);
598 : {
599 86 : Comment("call runtime");
600 : Node* flags =
601 : SmiConstant(Smi::FromInt(ArrayLiteral::kShallowElements |
602 : (allocation_site_mode == TRACK_ALLOCATION_SITE
603 : ? 0
604 172 : : ArrayLiteral::kDisableMementos)));
605 : Return(CallRuntime(Runtime::kCreateArrayLiteral, context, closure,
606 86 : literal_index, constant_elements, flags));
607 86 : }
608 86 : }
609 :
610 129 : TF_BUILTIN(FastCloneShallowArrayTrack, ConstructorBuiltinsAssembler) {
611 43 : CreateFastCloneShallowArrayBuiltin(TRACK_ALLOCATION_SITE);
612 0 : }
613 :
614 129 : TF_BUILTIN(FastCloneShallowArrayDontTrack, ConstructorBuiltinsAssembler) {
615 43 : CreateFastCloneShallowArrayBuiltin(DONT_TRACK_ALLOCATION_SITE);
616 0 : }
617 :
618 430 : Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject(
619 : Label* call_runtime, Node* closure, Node* literals_index,
620 : Node* properties_count) {
621 430 : Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset);
622 430 : Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset);
623 : Node* allocation_site = LoadFixedArrayElement(
624 430 : feedback_vector, literals_index, 0, CodeStubAssembler::SMI_PARAMETERS);
625 430 : GotoIf(IsUndefined(allocation_site), call_runtime);
626 :
627 : // Calculate the object and allocation size based on the properties count.
628 : Node* object_size = IntPtrAdd(WordShl(properties_count, kPointerSizeLog2),
629 430 : IntPtrConstant(JSObject::kHeaderSize));
630 : Node* allocation_size = object_size;
631 430 : if (FLAG_allocation_site_pretenuring) {
632 : allocation_size =
633 430 : IntPtrAdd(object_size, IntPtrConstant(AllocationMemento::kSize));
634 : }
635 : Node* boilerplate =
636 430 : LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset);
637 430 : Node* boilerplate_map = LoadMap(boilerplate);
638 430 : Node* instance_size = LoadMapInstanceSize(boilerplate_map);
639 430 : Node* size_in_words = WordShr(object_size, kPointerSizeLog2);
640 430 : GotoIfNot(WordEqual(instance_size, size_in_words), call_runtime);
641 :
642 430 : Node* copy = AllocateInNewSpace(allocation_size);
643 :
644 : // Copy boilerplate elements.
645 430 : VARIABLE(offset, MachineType::PointerRepresentation());
646 430 : offset.Bind(IntPtrConstant(-kHeapObjectTag));
647 430 : Node* end_offset = IntPtrAdd(object_size, offset.value());
648 430 : Label loop_body(this, &offset), loop_check(this, &offset);
649 : // We should always have an object size greater than zero.
650 430 : Goto(&loop_body);
651 430 : BIND(&loop_body);
652 : {
653 : // The Allocate above guarantees that the copy lies in new space. This
654 : // allows us to skip write barriers. This is necessary since we may also be
655 : // copying unboxed doubles.
656 430 : Node* field = Load(MachineType::IntPtr(), boilerplate, offset.value());
657 : StoreNoWriteBarrier(MachineType::PointerRepresentation(), copy,
658 430 : offset.value(), field);
659 430 : Goto(&loop_check);
660 : }
661 430 : BIND(&loop_check);
662 : {
663 430 : offset.Bind(IntPtrAdd(offset.value(), IntPtrConstant(kPointerSize)));
664 430 : GotoIfNot(IntPtrGreaterThanOrEqual(offset.value(), end_offset), &loop_body);
665 : }
666 :
667 430 : if (FLAG_allocation_site_pretenuring) {
668 430 : Node* memento = InnerAllocate(copy, object_size);
669 430 : StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex);
670 : StoreObjectFieldNoWriteBarrier(
671 430 : memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
672 : Node* memento_create_count = LoadObjectField(
673 430 : allocation_site, AllocationSite::kPretenureCreateCountOffset);
674 : memento_create_count =
675 430 : SmiAdd(memento_create_count, SmiConstant(Smi::FromInt(1)));
676 : StoreObjectFieldNoWriteBarrier(allocation_site,
677 : AllocationSite::kPretenureCreateCountOffset,
678 430 : memento_create_count);
679 : }
680 :
681 : // TODO(verwaest): Allocate and fill in double boxes.
682 430 : return copy;
683 : }
684 :
685 : template <typename Descriptor>
686 301 : void ConstructorBuiltinsAssembler::CreateFastCloneShallowObjectBuiltin(
687 : int properties_count) {
688 : DCHECK_GE(properties_count, 0);
689 : DCHECK_LE(properties_count,
690 : ConstructorBuiltins::kMaximumClonedShallowObjectProperties);
691 301 : Label call_runtime(this);
692 301 : Node* closure = Parameter(Descriptor::kClosure);
693 301 : Node* literals_index = Parameter(Descriptor::kLiteralIndex);
694 :
695 : Node* properties_count_node =
696 : IntPtrConstant(ConstructorBuiltins::FastCloneShallowObjectPropertiesCount(
697 301 : properties_count));
698 : Node* copy = EmitFastCloneShallowObject(
699 301 : &call_runtime, closure, literals_index, properties_count_node);
700 301 : Return(copy);
701 :
702 301 : BIND(&call_runtime);
703 301 : Node* constant_properties = Parameter(Descriptor::kConstantProperties);
704 301 : Node* flags = Parameter(Descriptor::kFlags);
705 301 : Node* context = Parameter(Descriptor::kContext);
706 301 : TailCallRuntime(Runtime::kCreateObjectLiteral, context, closure,
707 301 : literals_index, constant_properties, flags);
708 301 : }
709 :
710 : #define SHALLOW_OBJECT_BUILTIN(props) \
711 : TF_BUILTIN(FastCloneShallowObject##props, ConstructorBuiltinsAssembler) { \
712 : CreateFastCloneShallowObjectBuiltin<Descriptor>(props); \
713 : }
714 :
715 129 : SHALLOW_OBJECT_BUILTIN(0);
716 129 : SHALLOW_OBJECT_BUILTIN(1);
717 129 : SHALLOW_OBJECT_BUILTIN(2);
718 129 : SHALLOW_OBJECT_BUILTIN(3);
719 129 : SHALLOW_OBJECT_BUILTIN(4);
720 129 : SHALLOW_OBJECT_BUILTIN(5);
721 129 : SHALLOW_OBJECT_BUILTIN(6);
722 :
723 : } // namespace internal
724 : } // namespace v8
|