Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/builtins/builtins-utils-gen.h"
6 : #include "src/builtins/builtins.h"
7 : #include "src/code-stub-assembler.h"
8 : #include "src/heap/heap-inl.h"
9 : #include "src/macro-assembler.h"
10 : #include "src/objects/shared-function-info.h"
11 : #include "src/runtime/runtime.h"
12 :
13 : namespace v8 {
14 : namespace internal {
15 :
16 : // -----------------------------------------------------------------------------
17 : // Interrupt and stack checks.
18 :
19 31 : void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
20 31 : masm->TailCallRuntime(Runtime::kInterrupt);
21 31 : }
22 :
23 31 : void Builtins::Generate_StackCheck(MacroAssembler* masm) {
24 31 : masm->TailCallRuntime(Runtime::kStackGuard);
25 31 : }
26 :
27 : // -----------------------------------------------------------------------------
28 : // TurboFan support builtins.
29 :
30 93 : TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
31 : Node* object = Parameter(Descriptor::kObject);
32 :
33 : // Load the {object}s elements.
34 31 : Node* source = LoadObjectField(object, JSObject::kElementsOffset);
35 31 : Node* target = CloneFixedArray(source, ExtractFixedArrayFlag::kFixedArrays);
36 31 : StoreObjectField(object, JSObject::kElementsOffset, target);
37 31 : Return(target);
38 31 : }
39 :
40 93 : TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
41 : Node* object = Parameter(Descriptor::kObject);
42 : Node* key = Parameter(Descriptor::kKey);
43 : Node* context = Parameter(Descriptor::kContext);
44 :
45 : Label runtime(this, Label::kDeferred);
46 62 : Node* elements = LoadElements(object);
47 : elements = TryGrowElementsCapacity(object, elements, PACKED_DOUBLE_ELEMENTS,
48 31 : key, &runtime);
49 31 : Return(elements);
50 :
51 31 : BIND(&runtime);
52 31 : TailCallRuntime(Runtime::kGrowArrayElements, context, object, key);
53 31 : }
54 :
55 93 : TF_BUILTIN(GrowFastSmiOrObjectElements, CodeStubAssembler) {
56 : Node* object = Parameter(Descriptor::kObject);
57 : Node* key = Parameter(Descriptor::kKey);
58 : Node* context = Parameter(Descriptor::kContext);
59 :
60 : Label runtime(this, Label::kDeferred);
61 62 : Node* elements = LoadElements(object);
62 : elements =
63 31 : TryGrowElementsCapacity(object, elements, PACKED_ELEMENTS, key, &runtime);
64 31 : Return(elements);
65 :
66 31 : BIND(&runtime);
67 31 : TailCallRuntime(Runtime::kGrowArrayElements, context, object, key);
68 31 : }
69 :
70 124 : TF_BUILTIN(NewArgumentsElements, CodeStubAssembler) {
71 : Node* frame = Parameter(Descriptor::kFrame);
72 31 : Node* length = SmiToWord(Parameter(Descriptor::kLength));
73 : Node* mapped_count = SmiToWord(Parameter(Descriptor::kMappedCount));
74 :
75 : // Check if we can allocate in new space.
76 : ElementsKind kind = PACKED_ELEMENTS;
77 31 : int max_elements = FixedArray::GetMaxLengthForNewSpaceAllocation(kind);
78 31 : Label if_newspace(this), if_oldspace(this, Label::kDeferred);
79 62 : Branch(IntPtrLessThan(length, IntPtrConstant(max_elements)), &if_newspace,
80 62 : &if_oldspace);
81 :
82 31 : BIND(&if_newspace);
83 : {
84 : // Prefer EmptyFixedArray in case of non-positive {length} (the {length}
85 : // can be negative here for rest parameters).
86 31 : Label if_empty(this), if_notempty(this);
87 62 : Branch(IntPtrLessThanOrEqual(length, IntPtrConstant(0)), &if_empty,
88 62 : &if_notempty);
89 :
90 31 : BIND(&if_empty);
91 62 : Return(EmptyFixedArrayConstant());
92 :
93 31 : BIND(&if_notempty);
94 : {
95 : // Allocate a FixedArray in new space.
96 31 : Node* result = AllocateFixedArray(kind, length);
97 :
98 : // The elements might be used to back mapped arguments. In that case fill
99 : // the mapped elements (i.e. the first {mapped_count}) with the hole, but
100 : // make sure not to overshoot the {length} if some arguments are missing.
101 : Node* number_of_holes =
102 31 : SelectConstant(IntPtrLessThan(mapped_count, length), mapped_count,
103 62 : length, MachineType::PointerRepresentation());
104 62 : Node* the_hole = TheHoleConstant();
105 :
106 : // Fill the first elements up to {number_of_holes} with the hole.
107 31 : VARIABLE(var_index, MachineType::PointerRepresentation());
108 31 : Label loop1(this, &var_index), done_loop1(this);
109 62 : var_index.Bind(IntPtrConstant(0));
110 31 : Goto(&loop1);
111 31 : BIND(&loop1);
112 : {
113 : // Load the current {index}.
114 31 : Node* index = var_index.value();
115 :
116 : // Check if we are done.
117 62 : GotoIf(WordEqual(index, number_of_holes), &done_loop1);
118 :
119 : // Store the hole into the {result}.
120 31 : StoreFixedArrayElement(result, index, the_hole, SKIP_WRITE_BARRIER);
121 :
122 : // Continue with next {index}.
123 93 : var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
124 31 : Goto(&loop1);
125 : }
126 31 : BIND(&done_loop1);
127 :
128 : // Compute the effective {offset} into the {frame}.
129 93 : Node* offset = IntPtrAdd(length, IntPtrConstant(1));
130 :
131 : // Copy the parameters from {frame} (starting at {offset}) to {result}.
132 31 : Label loop2(this, &var_index), done_loop2(this);
133 31 : Goto(&loop2);
134 31 : BIND(&loop2);
135 : {
136 : // Load the current {index}.
137 31 : Node* index = var_index.value();
138 :
139 : // Check if we are done.
140 62 : GotoIf(WordEqual(index, length), &done_loop2);
141 :
142 : // Load the parameter at the given {index}.
143 : Node* value = Load(MachineType::AnyTagged(), frame,
144 62 : TimesPointerSize(IntPtrSub(offset, index)));
145 :
146 : // Store the {value} into the {result}.
147 31 : StoreFixedArrayElement(result, index, value, SKIP_WRITE_BARRIER);
148 :
149 : // Continue with next {index}.
150 93 : var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
151 31 : Goto(&loop2);
152 : }
153 31 : BIND(&done_loop2);
154 :
155 62 : Return(result);
156 31 : }
157 : }
158 :
159 31 : BIND(&if_oldspace);
160 : {
161 : // Allocate in old space (or large object space).
162 : TailCallRuntime(Runtime::kNewArgumentsElements, NoContextConstant(),
163 : BitcastWordToTagged(frame), SmiFromWord(length),
164 62 : SmiFromWord(mapped_count));
165 31 : }
166 31 : }
167 :
168 93 : TF_BUILTIN(ReturnReceiver, CodeStubAssembler) {
169 31 : Return(Parameter(Descriptor::kReceiver));
170 31 : }
171 :
172 : class RecordWriteCodeStubAssembler : public CodeStubAssembler {
173 : public:
174 : explicit RecordWriteCodeStubAssembler(compiler::CodeAssemblerState* state)
175 31 : : CodeStubAssembler(state) {}
176 :
177 31 : Node* IsMarking() {
178 : Node* is_marking_addr = ExternalConstant(
179 62 : ExternalReference::heap_is_marking_flag_address(this->isolate()));
180 31 : return Load(MachineType::Uint8(), is_marking_addr);
181 : }
182 :
183 124 : Node* IsPageFlagSet(Node* object, int mask) {
184 372 : Node* page = WordAnd(object, IntPtrConstant(~Page::kPageAlignmentMask));
185 : Node* flags = Load(MachineType::Pointer(), page,
186 248 : IntPtrConstant(MemoryChunk::kFlagsOffset));
187 248 : return WordNotEqual(WordAnd(flags, IntPtrConstant(mask)),
188 620 : IntPtrConstant(0));
189 : }
190 :
191 : void GotoIfNotBlack(Node* object, Label* not_black) {
192 : Label exit(this);
193 : Label* black = &exit;
194 :
195 : DCHECK_EQ(strcmp(Marking::kBlackBitPattern, "11"), 0);
196 :
197 : Node* cell;
198 : Node* mask;
199 :
200 : GetMarkBit(object, &cell, &mask);
201 : mask = TruncateWordToWord32(mask);
202 :
203 : Node* bits = Load(MachineType::Int32(), cell);
204 : Node* bit_0 = Word32And(bits, mask);
205 :
206 : GotoIf(Word32Equal(bit_0, Int32Constant(0)), not_black);
207 :
208 : mask = Word32Shl(mask, Int32Constant(1));
209 :
210 : Label word_boundary(this), in_word(this);
211 :
212 : // If mask becomes zero, we know mask was `1 << 31`, i.e., the bit is on
213 : // word boundary. Otherwise, the bit is within the word.
214 : Branch(Word32Equal(mask, Int32Constant(0)), &word_boundary, &in_word);
215 :
216 : BIND(&word_boundary);
217 : {
218 : Node* bit_1 = Word32And(
219 : Load(MachineType::Int32(), IntPtrAdd(cell, IntPtrConstant(4))),
220 : Int32Constant(1));
221 : Branch(Word32Equal(bit_1, Int32Constant(0)), not_black, black);
222 : }
223 :
224 : BIND(&in_word);
225 : {
226 : Branch(Word32Equal(Word32And(bits, mask), Int32Constant(0)), not_black,
227 : black);
228 : }
229 :
230 : BIND(&exit);
231 : }
232 :
233 31 : Node* IsWhite(Node* object) {
234 : DCHECK_EQ(strcmp(Marking::kWhiteBitPattern, "00"), 0);
235 : Node* cell;
236 : Node* mask;
237 31 : GetMarkBit(object, &cell, &mask);
238 : // Non-white has 1 for the first bit, so we only need to check for the first
239 : // bit.
240 93 : return WordEqual(WordAnd(Load(MachineType::Pointer(), cell), mask),
241 155 : IntPtrConstant(0));
242 : }
243 :
244 31 : void GetMarkBit(Node* object, Node** cell, Node** mask) {
245 93 : Node* page = WordAnd(object, IntPtrConstant(~Page::kPageAlignmentMask));
246 :
247 : {
248 : // Temp variable to calculate cell offset in bitmap.
249 : Node* r0;
250 : int shift = Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 -
251 : Bitmap::kBytesPerCellLog2;
252 93 : r0 = WordShr(object, IntPtrConstant(shift));
253 62 : r0 = WordAnd(r0, IntPtrConstant((Page::kPageAlignmentMask >> shift) &
254 62 : ~(Bitmap::kBytesPerCell - 1)));
255 93 : *cell = IntPtrAdd(IntPtrAdd(page, r0),
256 155 : IntPtrConstant(MemoryChunk::kHeaderSize));
257 : }
258 : {
259 : // Temp variable to calculate bit offset in cell.
260 : Node* r1;
261 93 : r1 = WordShr(object, IntPtrConstant(kPointerSizeLog2));
262 93 : r1 = WordAnd(r1, IntPtrConstant((1 << Bitmap::kBitsPerCellLog2) - 1));
263 : // It seems that LSB(e.g. cl) is automatically used, so no manual masking
264 : // is needed. Uncomment the following line otherwise.
265 : // WordAnd(r1, IntPtrConstant((1 << kBitsPerByte) - 1)));
266 93 : *mask = WordShl(IntPtrConstant(1), r1);
267 : }
268 31 : }
269 :
270 93 : Node* ShouldSkipFPRegs(Node* mode) {
271 186 : return WordEqual(mode, SmiConstant(kDontSaveFPRegs));
272 : }
273 :
274 31 : Node* ShouldEmitRememberSet(Node* remembered_set) {
275 62 : return WordEqual(remembered_set, SmiConstant(EMIT_REMEMBERED_SET));
276 : }
277 :
278 62 : void CallCFunction1WithCallerSavedRegistersMode(MachineType return_type,
279 : MachineType arg0_type,
280 : Node* function, Node* arg0,
281 : Node* mode, Label* next) {
282 124 : Label dont_save_fp(this), save_fp(this);
283 124 : Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
284 62 : BIND(&dont_save_fp);
285 : {
286 : CallCFunction1WithCallerSavedRegisters(return_type, arg0_type, function,
287 62 : arg0, kDontSaveFPRegs);
288 62 : Goto(next);
289 : }
290 :
291 62 : BIND(&save_fp);
292 : {
293 : CallCFunction1WithCallerSavedRegisters(return_type, arg0_type, function,
294 62 : arg0, kSaveFPRegs);
295 62 : Goto(next);
296 62 : }
297 62 : }
298 :
299 31 : void CallCFunction3WithCallerSavedRegistersMode(
300 : MachineType return_type, MachineType arg0_type, MachineType arg1_type,
301 : MachineType arg2_type, Node* function, Node* arg0, Node* arg1, Node* arg2,
302 : Node* mode, Label* next) {
303 62 : Label dont_save_fp(this), save_fp(this);
304 62 : Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
305 31 : BIND(&dont_save_fp);
306 : {
307 : CallCFunction3WithCallerSavedRegisters(return_type, arg0_type, arg1_type,
308 : arg2_type, function, arg0, arg1,
309 31 : arg2, kDontSaveFPRegs);
310 31 : Goto(next);
311 : }
312 :
313 31 : BIND(&save_fp);
314 : {
315 : CallCFunction3WithCallerSavedRegisters(return_type, arg0_type, arg1_type,
316 : arg2_type, function, arg0, arg1,
317 31 : arg2, kSaveFPRegs);
318 31 : Goto(next);
319 31 : }
320 31 : }
321 :
322 62 : void InsertToStoreBufferAndGoto(Node* isolate, Node* slot, Node* mode,
323 : Label* next) {
324 : Node* store_buffer_top_addr =
325 124 : ExternalConstant(ExternalReference::store_buffer_top(this->isolate()));
326 : Node* store_buffer_top =
327 62 : Load(MachineType::Pointer(), store_buffer_top_addr);
328 : StoreNoWriteBarrier(MachineType::PointerRepresentation(), store_buffer_top,
329 62 : slot);
330 : Node* new_store_buffer_top =
331 186 : IntPtrAdd(store_buffer_top, IntPtrConstant(kPointerSize));
332 : StoreNoWriteBarrier(MachineType::PointerRepresentation(),
333 62 : store_buffer_top_addr, new_store_buffer_top);
334 :
335 : Node* test = WordAnd(new_store_buffer_top,
336 186 : IntPtrConstant(StoreBuffer::kStoreBufferMask));
337 :
338 : Label overflow(this);
339 186 : Branch(WordEqual(test, IntPtrConstant(0)), &overflow, next);
340 :
341 62 : BIND(&overflow);
342 : {
343 : Node* function = ExternalConstant(
344 124 : ExternalReference::store_buffer_overflow_function(this->isolate()));
345 : CallCFunction1WithCallerSavedRegistersMode(MachineType::Int32(),
346 : MachineType::Pointer(),
347 62 : function, isolate, mode, next);
348 62 : }
349 62 : }
350 : };
351 :
352 155 : TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
353 62 : Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
354 : Node* slot = Parameter(Descriptor::kSlot);
355 : Node* isolate = Parameter(Descriptor::kIsolate);
356 : Node* remembered_set = Parameter(Descriptor::kRememberedSet);
357 : Node* fp_mode = Parameter(Descriptor::kFPMode);
358 :
359 31 : Node* value = Load(MachineType::Pointer(), slot);
360 :
361 : Label generational_wb(this);
362 31 : Label incremental_wb(this);
363 31 : Label exit(this);
364 :
365 : Branch(ShouldEmitRememberSet(remembered_set), &generational_wb,
366 62 : &incremental_wb);
367 :
368 31 : BIND(&generational_wb);
369 : {
370 : Label test_old_to_new_flags(this);
371 31 : Label store_buffer_exit(this), store_buffer_incremental_wb(this);
372 : // When incremental marking is not on, we skip cross generation pointer
373 : // checking here, because there are checks for
374 : // `kPointersFromHereAreInterestingMask` and
375 : // `kPointersToHereAreInterestingMask` in
376 : // `src/compiler/<arch>/code-generator-<arch>.cc` before calling this stub,
377 : // which serves as the cross generation checking.
378 62 : Branch(IsMarking(), &test_old_to_new_flags, &store_buffer_exit);
379 :
380 31 : BIND(&test_old_to_new_flags);
381 : {
382 : // TODO(albertnetymk): Try to cache the page flag for value and object,
383 : // instead of calling IsPageFlagSet each time.
384 : Node* value_in_new_space =
385 31 : IsPageFlagSet(value, MemoryChunk::kIsInNewSpaceMask);
386 31 : GotoIfNot(value_in_new_space, &incremental_wb);
387 :
388 : Node* object_in_new_space =
389 31 : IsPageFlagSet(object, MemoryChunk::kIsInNewSpaceMask);
390 31 : GotoIf(object_in_new_space, &incremental_wb);
391 :
392 31 : Goto(&store_buffer_incremental_wb);
393 : }
394 :
395 31 : BIND(&store_buffer_exit);
396 31 : { InsertToStoreBufferAndGoto(isolate, slot, fp_mode, &exit); }
397 :
398 31 : BIND(&store_buffer_incremental_wb);
399 62 : { InsertToStoreBufferAndGoto(isolate, slot, fp_mode, &incremental_wb); }
400 : }
401 :
402 31 : BIND(&incremental_wb);
403 : {
404 : Label call_incremental_wb(this);
405 :
406 : #ifndef V8_CONCURRENT_MARKING
407 : GotoIfNotBlack(object, &exit);
408 : #endif
409 :
410 : // There are two cases we need to call incremental write barrier.
411 : // 1) value_is_white
412 62 : GotoIf(IsWhite(value), &call_incremental_wb);
413 :
414 : // 2) is_compacting && value_in_EC && obj_isnt_skip
415 : // is_compacting = true when is_marking = true
416 : GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask),
417 62 : &exit);
418 : GotoIf(
419 : IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
420 62 : &exit);
421 :
422 31 : Goto(&call_incremental_wb);
423 :
424 31 : BIND(&call_incremental_wb);
425 : {
426 : Node* function = ExternalConstant(
427 : ExternalReference::incremental_marking_record_write_function(
428 62 : this->isolate()));
429 : CallCFunction3WithCallerSavedRegistersMode(
430 : MachineType::Int32(), MachineType::Pointer(), MachineType::Pointer(),
431 : MachineType::Pointer(), function, object, slot, isolate, fp_mode,
432 31 : &exit);
433 31 : }
434 : }
435 :
436 31 : BIND(&exit);
437 93 : Return(TrueConstant());
438 31 : }
439 :
440 : class DeletePropertyBaseAssembler : public CodeStubAssembler {
441 : public:
442 : explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
443 31 : : CodeStubAssembler(state) {}
444 :
445 31 : void DeleteDictionaryProperty(Node* receiver, Node* properties, Node* name,
446 : Node* context, Label* dont_delete,
447 : Label* notfound) {
448 31 : VARIABLE(var_name_index, MachineType::PointerRepresentation());
449 31 : Label dictionary_found(this, &var_name_index);
450 : NameDictionaryLookup<NameDictionary>(properties, name, &dictionary_found,
451 31 : &var_name_index, notfound);
452 :
453 31 : BIND(&dictionary_found);
454 31 : Node* key_index = var_name_index.value();
455 : Node* details =
456 : LoadDetailsByKeyIndex<NameDictionary>(properties, key_index);
457 31 : GotoIf(IsSetWord32(details, PropertyDetails::kAttributesDontDeleteMask),
458 62 : dont_delete);
459 : // Overwrite the entry itself (see NameDictionary::SetEntry).
460 62 : Node* filler = TheHoleConstant();
461 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kTheHoleValueRootIndex));
462 31 : StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
463 : StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
464 : SKIP_WRITE_BARRIER);
465 : StoreDetailsByKeyIndex<NameDictionary>(properties, key_index,
466 62 : SmiConstant(0));
467 :
468 : // Update bookkeeping information (see NameDictionary::ElementRemoved).
469 : Node* nof = GetNumberOfElements<NameDictionary>(properties);
470 93 : Node* new_nof = SmiSub(nof, SmiConstant(1));
471 : SetNumberOfElements<NameDictionary>(properties, new_nof);
472 : Node* num_deleted = GetNumberOfDeletedElements<NameDictionary>(properties);
473 93 : Node* new_deleted = SmiAdd(num_deleted, SmiConstant(1));
474 : SetNumberOfDeletedElements<NameDictionary>(properties, new_deleted);
475 :
476 : // Shrink the dictionary if necessary (see NameDictionary::Shrink).
477 31 : Label shrinking_done(this);
478 : Node* capacity = GetCapacity<NameDictionary>(properties);
479 62 : GotoIf(SmiGreaterThan(new_nof, SmiShr(capacity, 2)), &shrinking_done);
480 93 : GotoIf(SmiLessThan(new_nof, SmiConstant(16)), &shrinking_done);
481 : CallRuntime(Runtime::kShrinkPropertyDictionary, context, receiver);
482 31 : Goto(&shrinking_done);
483 31 : BIND(&shrinking_done);
484 :
485 93 : Return(TrueConstant());
486 31 : }
487 : };
488 :
489 124 : TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
490 : Node* receiver = Parameter(Descriptor::kObject);
491 : Node* key = Parameter(Descriptor::kKey);
492 : Node* language_mode = Parameter(Descriptor::kLanguageMode);
493 : Node* context = Parameter(Descriptor::kContext);
494 :
495 31 : VARIABLE(var_index, MachineType::PointerRepresentation());
496 62 : VARIABLE(var_unique, MachineRepresentation::kTagged, key);
497 31 : Label if_index(this), if_unique_name(this), if_notunique(this),
498 31 : if_notfound(this), slow(this);
499 :
500 62 : GotoIf(TaggedIsSmi(receiver), &slow);
501 62 : Node* receiver_map = LoadMap(receiver);
502 62 : Node* instance_type = LoadMapInstanceType(receiver_map);
503 : GotoIf(Int32LessThanOrEqual(instance_type,
504 62 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
505 62 : &slow);
506 : TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
507 31 : &if_notunique);
508 :
509 31 : BIND(&if_index);
510 : {
511 31 : Comment("integer index");
512 31 : Goto(&slow); // TODO(jkummerow): Implement more smarts here.
513 : }
514 :
515 31 : BIND(&if_unique_name);
516 : {
517 31 : Comment("key is unique name");
518 31 : Node* unique = var_unique.value();
519 31 : CheckForAssociatedProtector(unique, &slow);
520 :
521 31 : Label dictionary(this), dont_delete(this);
522 62 : GotoIf(IsDictionaryMap(receiver_map), &dictionary);
523 :
524 : // Fast properties need to clear recorded slots, which can only be done
525 : // in C++.
526 31 : Goto(&slow);
527 :
528 31 : BIND(&dictionary);
529 : {
530 62 : Node* properties = LoadSlowProperties(receiver);
531 : DeleteDictionaryProperty(receiver, properties, unique, context,
532 31 : &dont_delete, &if_notfound);
533 : }
534 :
535 31 : BIND(&dont_delete);
536 : {
537 : STATIC_ASSERT(LanguageModeSize == 2);
538 : GotoIf(SmiNotEqual(language_mode,
539 : SmiConstant(Smi::FromEnum(LanguageMode::kSloppy))),
540 93 : &slow);
541 62 : Return(FalseConstant());
542 31 : }
543 : }
544 :
545 31 : BIND(&if_notunique);
546 : {
547 : // If the string was not found in the string table, then no object can
548 : // have a property with that name.
549 : TryInternalizeString(key, &if_index, &var_index, &if_unique_name,
550 31 : &var_unique, &if_notfound, &slow);
551 : }
552 :
553 31 : BIND(&if_notfound);
554 62 : Return(TrueConstant());
555 :
556 31 : BIND(&slow);
557 : {
558 : TailCallRuntime(Runtime::kDeleteProperty, context, receiver, key,
559 : language_mode);
560 31 : }
561 31 : }
562 :
563 93 : TF_BUILTIN(ForInEnumerate, CodeStubAssembler) {
564 : Node* receiver = Parameter(Descriptor::kReceiver);
565 : Node* context = Parameter(Descriptor::kContext);
566 :
567 31 : Label if_empty(this), if_runtime(this, Label::kDeferred);
568 31 : Node* receiver_map = CheckEnumCache(receiver, &if_empty, &if_runtime);
569 31 : Return(receiver_map);
570 :
571 31 : BIND(&if_empty);
572 62 : Return(EmptyFixedArrayConstant());
573 :
574 31 : BIND(&if_runtime);
575 31 : TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
576 31 : }
577 :
578 93 : TF_BUILTIN(ForInFilter, CodeStubAssembler) {
579 : Node* key = Parameter(Descriptor::kKey);
580 : Node* object = Parameter(Descriptor::kObject);
581 : Node* context = Parameter(Descriptor::kContext);
582 :
583 : CSA_ASSERT(this, IsString(key));
584 :
585 31 : Label if_true(this), if_false(this);
586 31 : Node* result = HasProperty(object, key, context, kForInHasProperty);
587 62 : Branch(IsTrue(result), &if_true, &if_false);
588 :
589 31 : BIND(&if_true);
590 31 : Return(key);
591 :
592 31 : BIND(&if_false);
593 93 : Return(UndefinedConstant());
594 31 : }
595 :
596 : } // namespace internal
597 : } // namespace v8
|