Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/api.h"
6 : #include "src/builtins/builtins-utils-gen.h"
7 : #include "src/builtins/builtins.h"
8 : #include "src/code-stub-assembler.h"
9 : #include "src/heap/heap-inl.h" // crbug.com/v8/8499
10 : #include "src/ic/accessor-assembler.h"
11 : #include "src/ic/keyed-store-generic.h"
12 : #include "src/macro-assembler.h"
13 : #include "src/objects/debug-objects.h"
14 : #include "src/objects/shared-function-info.h"
15 : #include "src/runtime/runtime.h"
16 :
17 : namespace v8 {
18 : namespace internal {
19 :
20 : template <typename T>
21 : using TNode = compiler::TNode<T>;
22 :
23 : // -----------------------------------------------------------------------------
24 : // Stack checks.
25 :
26 56 : void Builtins::Generate_StackCheck(MacroAssembler* masm) {
27 56 : masm->TailCallRuntime(Runtime::kStackGuard);
28 56 : }
29 :
30 : // -----------------------------------------------------------------------------
31 : // TurboFan support builtins.
32 :
33 280 : TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
34 56 : Node* object = Parameter(Descriptor::kObject);
35 :
36 : // Load the {object}s elements.
37 56 : Node* source = LoadObjectField(object, JSObject::kElementsOffset);
38 56 : Node* target = CloneFixedArray(source, ExtractFixedArrayFlag::kFixedArrays);
39 56 : StoreObjectField(object, JSObject::kElementsOffset, target);
40 56 : Return(target);
41 56 : }
42 :
43 392 : TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
44 56 : Node* object = Parameter(Descriptor::kObject);
45 56 : Node* key = Parameter(Descriptor::kKey);
46 56 : Node* context = Parameter(Descriptor::kContext);
47 :
48 112 : Label runtime(this, Label::kDeferred);
49 56 : Node* elements = LoadElements(object);
50 56 : elements = TryGrowElementsCapacity(object, elements, PACKED_DOUBLE_ELEMENTS,
51 56 : key, &runtime);
52 56 : Return(elements);
53 :
54 56 : BIND(&runtime);
55 56 : TailCallRuntime(Runtime::kGrowArrayElements, context, object, key);
56 56 : }
57 :
58 392 : TF_BUILTIN(GrowFastSmiOrObjectElements, CodeStubAssembler) {
59 56 : Node* object = Parameter(Descriptor::kObject);
60 56 : Node* key = Parameter(Descriptor::kKey);
61 56 : Node* context = Parameter(Descriptor::kContext);
62 :
63 112 : Label runtime(this, Label::kDeferred);
64 56 : Node* elements = LoadElements(object);
65 : elements =
66 56 : TryGrowElementsCapacity(object, elements, PACKED_ELEMENTS, key, &runtime);
67 56 : Return(elements);
68 :
69 56 : BIND(&runtime);
70 56 : TailCallRuntime(Runtime::kGrowArrayElements, context, object, key);
71 56 : }
72 :
73 392 : TF_BUILTIN(NewArgumentsElements, CodeStubAssembler) {
74 56 : Node* frame = Parameter(Descriptor::kFrame);
75 56 : TNode<IntPtrT> length = SmiToIntPtr(Parameter(Descriptor::kLength));
76 : TNode<IntPtrT> mapped_count =
77 56 : SmiToIntPtr(Parameter(Descriptor::kMappedCount));
78 :
79 : // Check if we can allocate in new space.
80 56 : ElementsKind kind = PACKED_ELEMENTS;
81 56 : int max_elements = FixedArray::GetMaxLengthForNewSpaceAllocation(kind);
82 112 : Label if_newspace(this), if_oldspace(this, Label::kDeferred);
83 112 : Branch(IntPtrLessThan(length, IntPtrConstant(max_elements)), &if_newspace,
84 56 : &if_oldspace);
85 :
86 56 : BIND(&if_newspace);
87 : {
88 : // Prefer EmptyFixedArray in case of non-positive {length} (the {length}
89 : // can be negative here for rest parameters).
90 112 : Label if_empty(this), if_notempty(this);
91 112 : Branch(IntPtrLessThanOrEqual(length, IntPtrConstant(0)), &if_empty,
92 56 : &if_notempty);
93 :
94 56 : BIND(&if_empty);
95 56 : Return(EmptyFixedArrayConstant());
96 :
97 56 : BIND(&if_notempty);
98 : {
99 : // Allocate a FixedArray in new space.
100 56 : TNode<FixedArray> result = CAST(AllocateFixedArray(kind, length));
101 :
102 : // The elements might be used to back mapped arguments. In that case fill
103 : // the mapped elements (i.e. the first {mapped_count}) with the hole, but
104 : // make sure not to overshoot the {length} if some arguments are missing.
105 56 : TNode<IntPtrT> number_of_holes = IntPtrMin(mapped_count, length);
106 56 : Node* the_hole = TheHoleConstant();
107 :
108 : // Fill the first elements up to {number_of_holes} with the hole.
109 112 : TVARIABLE(IntPtrT, var_index, IntPtrConstant(0));
110 112 : Label loop1(this, &var_index), done_loop1(this);
111 56 : Goto(&loop1);
112 56 : BIND(&loop1);
113 : {
114 : // Load the current {index}.
115 56 : TNode<IntPtrT> index = var_index.value();
116 :
117 : // Check if we are done.
118 56 : GotoIf(WordEqual(index, number_of_holes), &done_loop1);
119 :
120 : // Store the hole into the {result}.
121 56 : StoreFixedArrayElement(result, index, the_hole, SKIP_WRITE_BARRIER);
122 :
123 : // Continue with next {index}.
124 56 : var_index = IntPtrAdd(index, IntPtrConstant(1));
125 56 : Goto(&loop1);
126 : }
127 56 : BIND(&done_loop1);
128 :
129 : // Compute the effective {offset} into the {frame}.
130 56 : TNode<IntPtrT> offset = IntPtrAdd(length, IntPtrConstant(1));
131 :
132 : // Copy the parameters from {frame} (starting at {offset}) to {result}.
133 112 : Label loop2(this, &var_index), done_loop2(this);
134 56 : Goto(&loop2);
135 56 : BIND(&loop2);
136 : {
137 : // Load the current {index}.
138 56 : TNode<IntPtrT> index = var_index.value();
139 :
140 : // Check if we are done.
141 56 : GotoIf(WordEqual(index, length), &done_loop2);
142 :
143 : // Load the parameter at the given {index}.
144 : TNode<Object> value = BitcastWordToTagged(
145 : Load(MachineType::Pointer(), frame,
146 56 : TimesSystemPointerSize(IntPtrSub(offset, index))));
147 :
148 : // Store the {value} into the {result}.
149 56 : StoreFixedArrayElement(result, index, value, SKIP_WRITE_BARRIER);
150 :
151 : // Continue with next {index}.
152 56 : var_index = IntPtrAdd(index, IntPtrConstant(1));
153 56 : Goto(&loop2);
154 : }
155 56 : BIND(&done_loop2);
156 :
157 56 : Return(result);
158 : }
159 : }
160 :
161 56 : BIND(&if_oldspace);
162 : {
163 : // Allocate in old space (or large object space).
164 112 : TailCallRuntime(Runtime::kNewArgumentsElements, NoContextConstant(),
165 : BitcastWordToTagged(frame), SmiFromIntPtr(length),
166 56 : SmiFromIntPtr(mapped_count));
167 : }
168 56 : }
169 :
170 280 : TF_BUILTIN(ReturnReceiver, CodeStubAssembler) {
171 56 : Return(Parameter(Descriptor::kReceiver));
172 56 : }
173 :
174 448 : TF_BUILTIN(DebugBreakTrampoline, CodeStubAssembler) {
175 112 : Label tailcall_to_shared(this);
176 56 : TNode<Context> context = CAST(Parameter(Descriptor::kContext));
177 56 : TNode<Object> new_target = CAST(Parameter(Descriptor::kJSNewTarget));
178 : TNode<Int32T> arg_count =
179 56 : UncheckedCast<Int32T>(Parameter(Descriptor::kJSActualArgumentsCount));
180 56 : TNode<JSFunction> function = CAST(Parameter(Descriptor::kJSTarget));
181 :
182 : // Check break-at-entry flag on the debug info.
183 : TNode<SharedFunctionInfo> shared =
184 56 : CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
185 : TNode<Object> maybe_heap_object_or_smi =
186 56 : LoadObjectField(shared, SharedFunctionInfo::kScriptOrDebugInfoOffset);
187 : TNode<HeapObject> maybe_debug_info =
188 56 : TaggedToHeapObject(maybe_heap_object_or_smi, &tailcall_to_shared);
189 112 : GotoIfNot(HasInstanceType(maybe_debug_info, InstanceType::DEBUG_INFO_TYPE),
190 56 : &tailcall_to_shared);
191 :
192 : {
193 56 : TNode<DebugInfo> debug_info = CAST(maybe_debug_info);
194 : TNode<Smi> flags =
195 56 : CAST(LoadObjectField(debug_info, DebugInfo::kFlagsOffset));
196 112 : GotoIfNot(SmiToInt32(SmiAnd(flags, SmiConstant(DebugInfo::kBreakAtEntry))),
197 56 : &tailcall_to_shared);
198 :
199 56 : CallRuntime(Runtime::kDebugBreakAtEntry, context, function);
200 56 : Goto(&tailcall_to_shared);
201 : }
202 :
203 56 : BIND(&tailcall_to_shared);
204 : // Tail call into code object on the SharedFunctionInfo.
205 56 : TNode<Code> code = GetSharedFunctionInfoCode(shared);
206 56 : TailCallJSCode(code, context, function, new_target, arg_count);
207 56 : }
208 :
209 56 : class RecordWriteCodeStubAssembler : public CodeStubAssembler {
210 : public:
211 56 : explicit RecordWriteCodeStubAssembler(compiler::CodeAssemblerState* state)
212 56 : : CodeStubAssembler(state) {}
213 :
214 56 : Node* IsMarking() {
215 112 : Node* is_marking_addr = ExternalConstant(
216 112 : ExternalReference::heap_is_marking_flag_address(this->isolate()));
217 56 : return Load(MachineType::Uint8(), is_marking_addr);
218 : }
219 :
220 224 : TNode<BoolT> IsPageFlagSet(TNode<IntPtrT> object, int mask) {
221 224 : TNode<IntPtrT> page = PageFromAddress(object);
222 : TNode<IntPtrT> flags =
223 : UncheckedCast<IntPtrT>(Load(MachineType::Pointer(), page,
224 224 : IntPtrConstant(MemoryChunk::kFlagsOffset)));
225 448 : return WordNotEqual(WordAnd(flags, IntPtrConstant(mask)),
226 672 : IntPtrConstant(0));
227 : }
228 :
229 56 : TNode<BoolT> IsWhite(TNode<IntPtrT> object) {
230 : DCHECK_EQ(strcmp(Marking::kWhiteBitPattern, "00"), 0);
231 : Node* cell;
232 : Node* mask;
233 56 : GetMarkBit(object, &cell, &mask);
234 56 : mask = TruncateIntPtrToInt32(mask);
235 : // Non-white has 1 for the first bit, so we only need to check for the first
236 : // bit.
237 112 : return Word32Equal(Word32And(Load(MachineType::Int32(), cell), mask),
238 168 : Int32Constant(0));
239 : }
240 :
241 56 : void GetMarkBit(TNode<IntPtrT> object, Node** cell, Node** mask) {
242 56 : TNode<IntPtrT> page = PageFromAddress(object);
243 56 : Node* bitmap = Load(MachineType::Pointer(), page,
244 112 : IntPtrConstant(MemoryChunk::kMarkBitmapOffset));
245 :
246 : {
247 : // Temp variable to calculate cell offset in bitmap.
248 : Node* r0;
249 : int shift = Bitmap::kBitsPerCellLog2 + kTaggedSizeLog2 -
250 56 : Bitmap::kBytesPerCellLog2;
251 56 : r0 = WordShr(object, IntPtrConstant(shift));
252 280 : r0 = WordAnd(r0, IntPtrConstant((kPageAlignmentMask >> shift) &
253 224 : ~(Bitmap::kBytesPerCell - 1)));
254 56 : *cell = IntPtrAdd(bitmap, r0);
255 : }
256 : {
257 : // Temp variable to calculate bit offset in cell.
258 : Node* r1;
259 56 : r1 = WordShr(object, IntPtrConstant(kTaggedSizeLog2));
260 56 : r1 = WordAnd(r1, IntPtrConstant((1 << Bitmap::kBitsPerCellLog2) - 1));
261 : // It seems that LSB(e.g. cl) is automatically used, so no manual masking
262 : // is needed. Uncomment the following line otherwise.
263 : // WordAnd(r1, IntPtrConstant((1 << kBitsPerByte) - 1)));
264 56 : *mask = WordShl(IntPtrConstant(1), r1);
265 : }
266 56 : }
267 :
268 168 : Node* ShouldSkipFPRegs(Node* mode) {
269 168 : return WordEqual(mode, SmiConstant(kDontSaveFPRegs));
270 : }
271 :
272 56 : Node* ShouldEmitRememberSet(Node* remembered_set) {
273 56 : return WordEqual(remembered_set, SmiConstant(EMIT_REMEMBERED_SET));
274 : }
275 :
276 112 : void CallCFunction1WithCallerSavedRegistersMode(MachineType return_type,
277 : MachineType arg0_type,
278 : Node* function, Node* arg0,
279 : Node* mode, Label* next) {
280 224 : Label dont_save_fp(this), save_fp(this);
281 112 : Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
282 112 : BIND(&dont_save_fp);
283 : {
284 112 : CallCFunction1WithCallerSavedRegisters(return_type, arg0_type, function,
285 112 : arg0, kDontSaveFPRegs);
286 112 : Goto(next);
287 : }
288 :
289 112 : BIND(&save_fp);
290 : {
291 112 : CallCFunction1WithCallerSavedRegisters(return_type, arg0_type, function,
292 112 : arg0, kSaveFPRegs);
293 112 : Goto(next);
294 : }
295 112 : }
296 :
297 56 : void CallCFunction3WithCallerSavedRegistersMode(
298 : MachineType return_type, MachineType arg0_type, MachineType arg1_type,
299 : MachineType arg2_type, Node* function, Node* arg0, Node* arg1, Node* arg2,
300 : Node* mode, Label* next) {
301 112 : Label dont_save_fp(this), save_fp(this);
302 56 : Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
303 56 : BIND(&dont_save_fp);
304 : {
305 56 : CallCFunction3WithCallerSavedRegisters(return_type, arg0_type, arg1_type,
306 : arg2_type, function, arg0, arg1,
307 56 : arg2, kDontSaveFPRegs);
308 56 : Goto(next);
309 : }
310 :
311 56 : BIND(&save_fp);
312 : {
313 56 : CallCFunction3WithCallerSavedRegisters(return_type, arg0_type, arg1_type,
314 : arg2_type, function, arg0, arg1,
315 56 : arg2, kSaveFPRegs);
316 56 : Goto(next);
317 : }
318 56 : }
319 :
320 112 : void InsertToStoreBufferAndGoto(Node* isolate, Node* slot, Node* mode,
321 : Label* next) {
322 : Node* store_buffer_top_addr =
323 112 : ExternalConstant(ExternalReference::store_buffer_top(this->isolate()));
324 : Node* store_buffer_top =
325 112 : Load(MachineType::Pointer(), store_buffer_top_addr);
326 112 : StoreNoWriteBarrier(MachineType::PointerRepresentation(), store_buffer_top,
327 112 : slot);
328 : Node* new_store_buffer_top =
329 112 : IntPtrAdd(store_buffer_top, IntPtrConstant(kSystemPointerSize));
330 112 : StoreNoWriteBarrier(MachineType::PointerRepresentation(),
331 112 : store_buffer_top_addr, new_store_buffer_top);
332 :
333 224 : Node* test = WordAnd(new_store_buffer_top,
334 336 : IntPtrConstant(Heap::store_buffer_mask_constant()));
335 :
336 224 : Label overflow(this);
337 112 : Branch(WordEqual(test, IntPtrConstant(0)), &overflow, next);
338 :
339 112 : BIND(&overflow);
340 : {
341 : Node* function =
342 112 : ExternalConstant(ExternalReference::store_buffer_overflow_function());
343 112 : CallCFunction1WithCallerSavedRegistersMode(MachineType::Int32(),
344 : MachineType::Pointer(),
345 112 : function, isolate, mode, next);
346 : }
347 112 : }
348 : };
349 :
350 728 : TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
351 112 : Label generational_wb(this);
352 112 : Label incremental_wb(this);
353 112 : Label exit(this);
354 :
355 56 : Node* remembered_set = Parameter(Descriptor::kRememberedSet);
356 112 : Branch(ShouldEmitRememberSet(remembered_set), &generational_wb,
357 56 : &incremental_wb);
358 :
359 56 : BIND(&generational_wb);
360 : {
361 112 : Label test_old_to_young_flags(this);
362 112 : Label store_buffer_exit(this), store_buffer_incremental_wb(this);
363 :
364 : // When incremental marking is not on, we skip cross generation pointer
365 : // checking here, because there are checks for
366 : // `kPointersFromHereAreInterestingMask` and
367 : // `kPointersToHereAreInterestingMask` in
368 : // `src/compiler/<arch>/code-generator-<arch>.cc` before calling this stub,
369 : // which serves as the cross generation checking.
370 56 : TNode<IntPtrT> slot = UncheckedCast<IntPtrT>(Parameter(Descriptor::kSlot));
371 56 : Branch(IsMarking(), &test_old_to_young_flags, &store_buffer_exit);
372 :
373 56 : BIND(&test_old_to_young_flags);
374 : {
375 : // TODO(ishell): do a new-space range check instead.
376 : TNode<IntPtrT> value =
377 56 : BitcastTaggedToWord(Load(MachineType::TaggedPointer(), slot));
378 :
379 : // TODO(albertnetymk): Try to cache the page flag for value and object,
380 : // instead of calling IsPageFlagSet each time.
381 : TNode<BoolT> value_is_young =
382 56 : IsPageFlagSet(value, MemoryChunk::kIsInYoungGenerationMask);
383 56 : GotoIfNot(value_is_young, &incremental_wb);
384 :
385 : TNode<IntPtrT> object =
386 56 : BitcastTaggedToWord(Parameter(Descriptor::kObject));
387 : TNode<BoolT> object_is_young =
388 56 : IsPageFlagSet(object, MemoryChunk::kIsInYoungGenerationMask);
389 56 : Branch(object_is_young, &incremental_wb, &store_buffer_incremental_wb);
390 : }
391 :
392 56 : BIND(&store_buffer_exit);
393 : {
394 : Node* isolate_constant =
395 56 : ExternalConstant(ExternalReference::isolate_address(isolate()));
396 56 : Node* fp_mode = Parameter(Descriptor::kFPMode);
397 56 : InsertToStoreBufferAndGoto(isolate_constant, slot, fp_mode, &exit);
398 : }
399 :
400 56 : BIND(&store_buffer_incremental_wb);
401 : {
402 : Node* isolate_constant =
403 56 : ExternalConstant(ExternalReference::isolate_address(isolate()));
404 56 : Node* fp_mode = Parameter(Descriptor::kFPMode);
405 56 : InsertToStoreBufferAndGoto(isolate_constant, slot, fp_mode,
406 56 : &incremental_wb);
407 : }
408 : }
409 :
410 56 : BIND(&incremental_wb);
411 : {
412 112 : Label call_incremental_wb(this);
413 :
414 56 : TNode<IntPtrT> slot = UncheckedCast<IntPtrT>(Parameter(Descriptor::kSlot));
415 : TNode<IntPtrT> value =
416 56 : BitcastTaggedToWord(Load(MachineType::TaggedPointer(), slot));
417 :
418 : // There are two cases we need to call incremental write barrier.
419 : // 1) value_is_white
420 56 : GotoIf(IsWhite(value), &call_incremental_wb);
421 :
422 : // 2) is_compacting && value_in_EC && obj_isnt_skip
423 : // is_compacting = true when is_marking = true
424 112 : GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask),
425 56 : &exit);
426 :
427 56 : TNode<IntPtrT> object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
428 112 : Branch(
429 112 : IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
430 56 : &exit, &call_incremental_wb);
431 :
432 56 : BIND(&call_incremental_wb);
433 : {
434 112 : Node* function = ExternalConstant(
435 112 : ExternalReference::incremental_marking_record_write_function());
436 : Node* isolate_constant =
437 56 : ExternalConstant(ExternalReference::isolate_address(isolate()));
438 56 : Node* fp_mode = Parameter(Descriptor::kFPMode);
439 : TNode<IntPtrT> object =
440 56 : BitcastTaggedToWord(Parameter(Descriptor::kObject));
441 56 : CallCFunction3WithCallerSavedRegistersMode(
442 : MachineType::Int32(), MachineType::Pointer(), MachineType::Pointer(),
443 : MachineType::Pointer(), function, object, slot, isolate_constant,
444 56 : fp_mode, &exit);
445 : }
446 : }
447 :
448 56 : BIND(&exit);
449 56 : Return(TrueConstant());
450 56 : }
451 :
452 56 : class DeletePropertyBaseAssembler : public AccessorAssembler {
453 : public:
454 56 : explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
455 56 : : AccessorAssembler(state) {}
456 :
457 56 : void DeleteDictionaryProperty(TNode<Object> receiver,
458 : TNode<NameDictionary> properties,
459 : TNode<Name> name, TNode<Context> context,
460 : Label* dont_delete, Label* notfound) {
461 112 : TVARIABLE(IntPtrT, var_name_index);
462 112 : Label dictionary_found(this, &var_name_index);
463 56 : NameDictionaryLookup<NameDictionary>(properties, name, &dictionary_found,
464 56 : &var_name_index, notfound);
465 :
466 56 : BIND(&dictionary_found);
467 56 : TNode<IntPtrT> key_index = var_name_index.value();
468 : TNode<Uint32T> details =
469 56 : LoadDetailsByKeyIndex<NameDictionary>(properties, key_index);
470 112 : GotoIf(IsSetWord32(details, PropertyDetails::kAttributesDontDeleteMask),
471 56 : dont_delete);
472 : // Overwrite the entry itself (see NameDictionary::SetEntry).
473 56 : TNode<HeapObject> filler = TheHoleConstant();
474 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kTheHoleValue));
475 56 : StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
476 112 : StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
477 56 : SKIP_WRITE_BARRIER);
478 56 : StoreDetailsByKeyIndex<NameDictionary>(properties, key_index,
479 56 : SmiConstant(0));
480 :
481 : // Update bookkeeping information (see NameDictionary::ElementRemoved).
482 56 : TNode<Smi> nof = GetNumberOfElements<NameDictionary>(properties);
483 56 : TNode<Smi> new_nof = SmiSub(nof, SmiConstant(1));
484 56 : SetNumberOfElements<NameDictionary>(properties, new_nof);
485 : TNode<Smi> num_deleted =
486 56 : GetNumberOfDeletedElements<NameDictionary>(properties);
487 56 : TNode<Smi> new_deleted = SmiAdd(num_deleted, SmiConstant(1));
488 56 : SetNumberOfDeletedElements<NameDictionary>(properties, new_deleted);
489 :
490 : // Shrink the dictionary if necessary (see NameDictionary::Shrink).
491 112 : Label shrinking_done(this);
492 56 : TNode<Smi> capacity = GetCapacity<NameDictionary>(properties);
493 56 : GotoIf(SmiGreaterThan(new_nof, SmiShr(capacity, 2)), &shrinking_done);
494 56 : GotoIf(SmiLessThan(new_nof, SmiConstant(16)), &shrinking_done);
495 56 : CallRuntime(Runtime::kShrinkPropertyDictionary, context, receiver);
496 56 : Goto(&shrinking_done);
497 56 : BIND(&shrinking_done);
498 :
499 56 : Return(TrueConstant());
500 56 : }
501 : };
502 :
503 448 : TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
504 56 : TNode<Object> receiver = CAST(Parameter(Descriptor::kObject));
505 56 : TNode<Object> key = CAST(Parameter(Descriptor::kKey));
506 56 : TNode<Smi> language_mode = CAST(Parameter(Descriptor::kLanguageMode));
507 56 : TNode<Context> context = CAST(Parameter(Descriptor::kContext));
508 :
509 112 : VARIABLE(var_index, MachineType::PointerRepresentation());
510 112 : VARIABLE(var_unique, MachineRepresentation::kTagged, key);
511 112 : Label if_index(this), if_unique_name(this), if_notunique(this),
512 112 : if_notfound(this), slow(this);
513 :
514 56 : GotoIf(TaggedIsSmi(receiver), &slow);
515 56 : TNode<Map> receiver_map = LoadMap(CAST(receiver));
516 56 : TNode<Int32T> instance_type = LoadMapInstanceType(receiver_map);
517 56 : GotoIf(IsCustomElementsReceiverInstanceType(instance_type), &slow);
518 56 : TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
519 56 : &if_notunique);
520 :
521 56 : BIND(&if_index);
522 : {
523 56 : Comment("integer index");
524 56 : Goto(&slow); // TODO(jkummerow): Implement more smarts here.
525 : }
526 :
527 56 : BIND(&if_unique_name);
528 : {
529 56 : Comment("key is unique name");
530 56 : TNode<Name> unique = CAST(var_unique.value());
531 56 : CheckForAssociatedProtector(unique, &slow);
532 :
533 112 : Label dictionary(this), dont_delete(this);
534 56 : GotoIf(IsDictionaryMap(receiver_map), &dictionary);
535 :
536 : // Fast properties need to clear recorded slots, which can only be done
537 : // in C++.
538 56 : Goto(&slow);
539 :
540 56 : BIND(&dictionary);
541 : {
542 56 : InvalidateValidityCellIfPrototype(receiver_map);
543 :
544 : TNode<NameDictionary> properties =
545 56 : CAST(LoadSlowProperties(CAST(receiver)));
546 56 : DeleteDictionaryProperty(receiver, properties, unique, context,
547 56 : &dont_delete, &if_notfound);
548 : }
549 :
550 56 : BIND(&dont_delete);
551 : {
552 : STATIC_ASSERT(LanguageModeSize == 2);
553 112 : GotoIf(SmiNotEqual(language_mode, SmiConstant(LanguageMode::kSloppy)),
554 56 : &slow);
555 56 : Return(FalseConstant());
556 : }
557 : }
558 :
559 56 : BIND(&if_notunique);
560 : {
561 : // If the string was not found in the string table, then no object can
562 : // have a property with that name.
563 56 : TryInternalizeString(key, &if_index, &var_index, &if_unique_name,
564 56 : &var_unique, &if_notfound, &slow);
565 : }
566 :
567 56 : BIND(&if_notfound);
568 56 : Return(TrueConstant());
569 :
570 56 : BIND(&slow);
571 : {
572 112 : TailCallRuntime(Runtime::kDeleteProperty, context, receiver, key,
573 56 : language_mode);
574 : }
575 56 : }
576 :
577 336 : TF_BUILTIN(ForInEnumerate, CodeStubAssembler) {
578 56 : Node* receiver = Parameter(Descriptor::kReceiver);
579 56 : Node* context = Parameter(Descriptor::kContext);
580 :
581 112 : Label if_empty(this), if_runtime(this, Label::kDeferred);
582 56 : Node* receiver_map = CheckEnumCache(receiver, &if_empty, &if_runtime);
583 56 : Return(receiver_map);
584 :
585 56 : BIND(&if_empty);
586 56 : Return(EmptyFixedArrayConstant());
587 :
588 56 : BIND(&if_runtime);
589 56 : TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
590 56 : }
591 :
592 392 : TF_BUILTIN(ForInFilter, CodeStubAssembler) {
593 56 : Node* key = Parameter(Descriptor::kKey);
594 56 : Node* object = Parameter(Descriptor::kObject);
595 56 : Node* context = Parameter(Descriptor::kContext);
596 :
597 : CSA_ASSERT(this, IsString(key));
598 :
599 112 : Label if_true(this), if_false(this);
600 56 : TNode<Oddball> result = HasProperty(context, object, key, kForInHasProperty);
601 56 : Branch(IsTrue(result), &if_true, &if_false);
602 :
603 56 : BIND(&if_true);
604 56 : Return(key);
605 :
606 56 : BIND(&if_false);
607 56 : Return(UndefinedConstant());
608 56 : }
609 :
610 336 : TF_BUILTIN(SameValue, CodeStubAssembler) {
611 56 : Node* lhs = Parameter(Descriptor::kLeft);
612 56 : Node* rhs = Parameter(Descriptor::kRight);
613 :
614 112 : Label if_true(this), if_false(this);
615 56 : BranchIfSameValue(lhs, rhs, &if_true, &if_false);
616 :
617 56 : BIND(&if_true);
618 56 : Return(TrueConstant());
619 :
620 56 : BIND(&if_false);
621 56 : Return(FalseConstant());
622 56 : }
623 :
624 112 : class InternalBuiltinsAssembler : public CodeStubAssembler {
625 : public:
626 112 : explicit InternalBuiltinsAssembler(compiler::CodeAssemblerState* state)
627 112 : : CodeStubAssembler(state) {}
628 :
629 : template <typename Descriptor>
630 : void GenerateAdaptorWithExitFrameType(
631 : Builtins::ExitFrameType exit_frame_type);
632 : };
633 :
634 : template <typename Descriptor>
635 112 : void InternalBuiltinsAssembler::GenerateAdaptorWithExitFrameType(
636 : Builtins::ExitFrameType exit_frame_type) {
637 112 : TNode<JSFunction> target = CAST(Parameter(Descriptor::kTarget));
638 112 : TNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
639 : TNode<WordT> c_function =
640 112 : UncheckedCast<WordT>(Parameter(Descriptor::kCFunction));
641 :
642 : // The logic contained here is mirrored for TurboFan inlining in
643 : // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
644 :
645 : // Make sure we operate in the context of the called function (for example
646 : // ConstructStubs implemented in C++ will be run in the context of the caller
647 : // instead of the callee, due to the way that [[Construct]] is defined for
648 : // ordinary functions).
649 : TNode<Context> context =
650 112 : CAST(LoadObjectField(target, JSFunction::kContextOffset));
651 :
652 : // Update arguments count for CEntry to contain the number of arguments
653 : // including the receiver and the extra arguments.
654 : TNode<Int32T> argc =
655 112 : UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
656 112 : argc = Int32Add(
657 : argc,
658 : Int32Constant(BuiltinExitFrameConstants::kNumExtraArgsWithReceiver));
659 :
660 112 : TNode<Code> code = HeapConstant(
661 : CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
662 112 : exit_frame_type == Builtins::BUILTIN_EXIT));
663 :
664 : // Unconditionally push argc, target and new target as extra stack arguments.
665 : // They will be used by stack frame iterators when constructing stack trace.
666 112 : TailCallStub(CEntry1ArgvOnStackDescriptor{}, // descriptor
667 : code, context, // standard arguments for TailCallStub
668 : argc, c_function, // register arguments
669 : TheHoleConstant(), // additional stack argument 1 (padding)
670 : SmiFromInt32(argc), // additional stack argument 2
671 : target, // additional stack argument 3
672 : new_target); // additional stack argument 4
673 112 : }
674 :
675 224 : TF_BUILTIN(AdaptorWithExitFrame, InternalBuiltinsAssembler) {
676 56 : GenerateAdaptorWithExitFrameType<Descriptor>(Builtins::EXIT);
677 56 : }
678 :
679 224 : TF_BUILTIN(AdaptorWithBuiltinExitFrame, InternalBuiltinsAssembler) {
680 56 : GenerateAdaptorWithExitFrameType<Descriptor>(Builtins::BUILTIN_EXIT);
681 56 : }
682 :
683 280 : TF_BUILTIN(AllocateInYoungGeneration, CodeStubAssembler) {
684 : TNode<IntPtrT> requested_size =
685 56 : UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
686 :
687 112 : TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
688 56 : SmiFromIntPtr(requested_size));
689 56 : }
690 :
691 280 : TF_BUILTIN(AllocateInOldGeneration, CodeStubAssembler) {
692 : TNode<IntPtrT> requested_size =
693 56 : UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
694 :
695 112 : TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
696 56 : SmiFromIntPtr(requested_size), SmiConstant(0));
697 56 : }
698 :
699 280 : TF_BUILTIN(Abort, CodeStubAssembler) {
700 56 : TNode<Smi> message_id = CAST(Parameter(Descriptor::kMessageOrMessageId));
701 56 : TailCallRuntime(Runtime::kAbort, NoContextConstant(), message_id);
702 56 : }
703 :
704 280 : TF_BUILTIN(AbortJS, CodeStubAssembler) {
705 56 : TNode<String> message = CAST(Parameter(Descriptor::kMessageOrMessageId));
706 56 : TailCallRuntime(Runtime::kAbortJS, NoContextConstant(), message);
707 56 : }
708 :
709 56 : void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
710 : MacroAssembler* masm) {
711 56 : Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, false);
712 56 : }
713 :
714 56 : void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
715 : MacroAssembler* masm) {
716 56 : Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, true);
717 56 : }
718 :
719 56 : void Builtins::
720 : Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
721 : MacroAssembler* masm) {
722 56 : Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvInRegister, false);
723 56 : }
724 :
725 56 : void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
726 : MacroAssembler* masm) {
727 56 : Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, false);
728 56 : }
729 :
730 56 : void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(
731 : MacroAssembler* masm) {
732 56 : Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, true);
733 56 : }
734 :
735 56 : void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
736 : MacroAssembler* masm) {
737 56 : Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, false);
738 56 : }
739 :
740 56 : void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
741 : MacroAssembler* masm) {
742 56 : Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, true);
743 56 : }
744 :
745 56 : void Builtins::
746 : Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
747 : MacroAssembler* masm) {
748 56 : Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvInRegister, false);
749 56 : }
750 :
751 56 : void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
752 : MacroAssembler* masm) {
753 56 : Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, false);
754 56 : }
755 :
756 56 : void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(
757 : MacroAssembler* masm) {
758 56 : Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, true);
759 56 : }
760 :
761 : #if !defined(V8_TARGET_ARCH_ARM) && !defined(V8_TARGET_ARCH_MIPS)
762 56 : void Builtins::Generate_MemCopyUint8Uint8(MacroAssembler* masm) {
763 56 : masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
764 56 : }
765 : #endif // !defined(V8_TARGET_ARCH_ARM) && !defined(V8_TARGET_ARCH_MIPS)
766 :
767 : #ifndef V8_TARGET_ARCH_ARM
768 56 : void Builtins::Generate_MemCopyUint16Uint8(MacroAssembler* masm) {
769 56 : masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
770 56 : }
771 : #endif // V8_TARGET_ARCH_ARM
772 :
773 : #ifndef V8_TARGET_ARCH_IA32
774 56 : void Builtins::Generate_MemMove(MacroAssembler* masm) {
775 56 : masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
776 56 : }
777 : #endif // V8_TARGET_ARCH_IA32
778 :
779 : // ES6 [[Get]] operation.
780 392 : TF_BUILTIN(GetProperty, CodeStubAssembler) {
781 56 : Node* object = Parameter(Descriptor::kObject);
782 56 : Node* key = Parameter(Descriptor::kKey);
783 56 : Node* context = Parameter(Descriptor::kContext);
784 112 : Label if_notfound(this), if_proxy(this, Label::kDeferred),
785 112 : if_slow(this, Label::kDeferred);
786 :
787 : CodeStubAssembler::LookupInHolder lookup_property_in_holder =
788 : [=](Node* receiver, Node* holder, Node* holder_map,
789 : Node* holder_instance_type, Node* unique_name, Label* next_holder,
790 56 : Label* if_bailout) {
791 280 : VARIABLE(var_value, MachineRepresentation::kTagged);
792 112 : Label if_found(this);
793 112 : TryGetOwnProperty(context, receiver, holder, holder_map,
794 : holder_instance_type, unique_name, &if_found,
795 56 : &var_value, next_holder, if_bailout);
796 56 : BIND(&if_found);
797 112 : Return(var_value.value());
798 168 : };
799 :
800 : CodeStubAssembler::LookupInHolder lookup_element_in_holder =
801 : [=](Node* receiver, Node* holder, Node* holder_map,
802 : Node* holder_instance_type, Node* index, Label* next_holder,
803 56 : Label* if_bailout) {
804 : // Not supported yet.
805 56 : Use(next_holder);
806 56 : Goto(if_bailout);
807 168 : };
808 :
809 56 : TryPrototypeChainLookup(object, key, lookup_property_in_holder,
810 : lookup_element_in_holder, &if_notfound, &if_slow,
811 56 : &if_proxy);
812 :
813 56 : BIND(&if_notfound);
814 56 : Return(UndefinedConstant());
815 :
816 56 : BIND(&if_slow);
817 56 : TailCallRuntime(Runtime::kGetProperty, context, object, key);
818 :
819 56 : BIND(&if_proxy);
820 : {
821 : // Convert the {key} to a Name first.
822 56 : Node* name = CallBuiltin(Builtins::kToName, context, key);
823 :
824 : // The {object} is a JSProxy instance, look up the {name} on it, passing
825 : // {object} both as receiver and holder. If {name} is absent we can safely
826 : // return undefined from here.
827 112 : TailCallBuiltin(Builtins::kProxyGetProperty, context, object, name, object,
828 56 : SmiConstant(OnNonExistent::kReturnUndefined));
829 : }
830 56 : }
831 :
832 : // ES6 [[Set]] operation.
833 448 : TF_BUILTIN(SetProperty, CodeStubAssembler) {
834 56 : TNode<Context> context = CAST(Parameter(Descriptor::kContext));
835 56 : TNode<Object> receiver = CAST(Parameter(Descriptor::kReceiver));
836 56 : TNode<Object> key = CAST(Parameter(Descriptor::kKey));
837 56 : TNode<Object> value = CAST(Parameter(Descriptor::kValue));
838 :
839 56 : KeyedStoreGenericGenerator::SetProperty(state(), context, receiver, key,
840 56 : value, LanguageMode::kStrict);
841 56 : }
842 :
843 : // ES6 CreateDataProperty(), specialized for the case where objects are still
844 : // being initialized, and have not yet been made accessible to the user. Thus,
845 : // any operation here should be unobservable until after the object has been
846 : // returned.
847 448 : TF_BUILTIN(SetPropertyInLiteral, CodeStubAssembler) {
848 56 : TNode<Context> context = CAST(Parameter(Descriptor::kContext));
849 56 : TNode<JSObject> receiver = CAST(Parameter(Descriptor::kReceiver));
850 56 : TNode<Object> key = CAST(Parameter(Descriptor::kKey));
851 56 : TNode<Object> value = CAST(Parameter(Descriptor::kValue));
852 :
853 56 : KeyedStoreGenericGenerator::SetPropertyInLiteral(state(), context, receiver,
854 56 : key, value);
855 56 : }
856 :
857 : } // namespace internal
858 87414 : } // namespace v8
|