Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/ic/keyed-store-generic.h"
6 :
7 : #include "src/code-factory.h"
8 : #include "src/code-stub-assembler.h"
9 : #include "src/contexts.h"
10 : #include "src/feedback-vector.h"
11 : #include "src/ic/accessor-assembler.h"
12 : #include "src/interface-descriptors.h"
13 : #include "src/isolate.h"
14 : #include "src/objects-inl.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 :
19 : using compiler::Node;
20 :
21 : class KeyedStoreGenericAssembler : public AccessorAssembler {
22 : public:
23 : explicit KeyedStoreGenericAssembler(compiler::CodeAssemblerState* state)
24 : : AccessorAssembler(state) {}
25 :
26 : void KeyedStoreGeneric();
27 :
28 : void StoreIC_Uninitialized();
29 :
30 : private:
31 : enum UpdateLength {
32 : kDontChangeLength,
33 : kIncrementLengthByOne,
34 : kBumpLengthWithGap
35 : };
36 :
37 : enum UseStubCache { kUseStubCache, kDontUseStubCache };
38 :
39 : void EmitGenericElementStore(Node* receiver, Node* receiver_map,
40 : Node* instance_type, Node* intptr_index,
41 : Node* value, Node* context, Label* slow);
42 :
43 : void EmitGenericPropertyStore(Node* receiver, Node* receiver_map,
44 : const StoreICParameters* p, Label* slow,
45 : UseStubCache use_stub_cache = kUseStubCache);
46 :
47 : void BranchIfPrototypesHaveNonFastElements(Node* receiver_map,
48 : Label* non_fast_elements,
49 : Label* only_fast_elements);
50 :
51 : void TryRewriteElements(Node* receiver, Node* receiver_map, Node* elements,
52 : Node* native_context, ElementsKind from_kind,
53 : ElementsKind to_kind, Label* bailout);
54 :
55 : void StoreElementWithCapacity(Node* receiver, Node* receiver_map,
56 : Node* elements, Node* elements_kind,
57 : Node* intptr_index, Node* value, Node* context,
58 : Label* slow, UpdateLength update_length);
59 :
60 : void MaybeUpdateLengthAndReturn(Node* receiver, Node* index, Node* value,
61 : UpdateLength update_length);
62 :
63 : void TryChangeToHoleyMapHelper(Node* receiver, Node* receiver_map,
64 : Node* native_context, ElementsKind packed_kind,
65 : ElementsKind holey_kind, Label* done,
66 : Label* map_mismatch, Label* bailout);
67 : void TryChangeToHoleyMap(Node* receiver, Node* receiver_map,
68 : Node* current_elements_kind, Node* context,
69 : ElementsKind packed_kind, Label* bailout);
70 : void TryChangeToHoleyMapMulti(Node* receiver, Node* receiver_map,
71 : Node* current_elements_kind, Node* context,
72 : ElementsKind packed_kind,
73 : ElementsKind packed_kind_2, Label* bailout);
74 :
75 : void LookupPropertyOnPrototypeChain(Node* receiver_map, Node* name,
76 : Label* accessor,
77 : Variable* var_accessor_pair,
78 : Variable* var_accessor_holder,
79 : Label* readonly, Label* bailout);
80 :
81 : void CheckFieldType(Node* descriptors, Node* name_index, Node* representation,
82 : Node* value, Label* bailout);
83 : void OverwriteExistingFastProperty(Node* object, Node* object_map,
84 : Node* properties, Node* descriptors,
85 : Node* descriptor_name_index, Node* details,
86 : Node* value, Label* slow);
87 : };
88 :
89 31 : void KeyedStoreGenericGenerator::Generate(compiler::CodeAssemblerState* state) {
90 : KeyedStoreGenericAssembler assembler(state);
91 31 : assembler.KeyedStoreGeneric();
92 31 : }
93 :
94 31 : void StoreICUninitializedGenerator::Generate(
95 : compiler::CodeAssemblerState* state) {
96 : KeyedStoreGenericAssembler assembler(state);
97 31 : assembler.StoreIC_Uninitialized();
98 31 : }
99 :
100 186 : void KeyedStoreGenericAssembler::BranchIfPrototypesHaveNonFastElements(
101 : Node* receiver_map, Label* non_fast_elements, Label* only_fast_elements) {
102 186 : VARIABLE(var_map, MachineRepresentation::kTagged);
103 186 : var_map.Bind(receiver_map);
104 186 : Label loop_body(this, &var_map);
105 186 : Goto(&loop_body);
106 :
107 186 : BIND(&loop_body);
108 : {
109 186 : Node* map = var_map.value();
110 372 : Node* prototype = LoadMapPrototype(map);
111 372 : GotoIf(WordEqual(prototype, NullConstant()), only_fast_elements);
112 372 : Node* prototype_map = LoadMap(prototype);
113 186 : var_map.Bind(prototype_map);
114 372 : Node* instance_type = LoadMapInstanceType(prototype_map);
115 : STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
116 : STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
117 : GotoIf(Int32LessThanOrEqual(instance_type,
118 372 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
119 372 : non_fast_elements);
120 372 : Node* elements_kind = LoadMapElementsKind(prototype_map);
121 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
122 372 : GotoIf(IsFastElementsKind(elements_kind), &loop_body);
123 558 : GotoIf(Word32Equal(elements_kind, Int32Constant(NO_ELEMENTS)), &loop_body);
124 186 : Goto(non_fast_elements);
125 186 : }
126 186 : }
127 :
128 279 : void KeyedStoreGenericAssembler::TryRewriteElements(
129 : Node* receiver, Node* receiver_map, Node* elements, Node* native_context,
130 : ElementsKind from_kind, ElementsKind to_kind, Label* bailout) {
131 : DCHECK(IsFastPackedElementsKind(from_kind));
132 : ElementsKind holey_from_kind = GetHoleyElementsKind(from_kind);
133 : ElementsKind holey_to_kind = GetHoleyElementsKind(to_kind);
134 279 : if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
135 186 : TrapAllocationMemento(receiver, bailout);
136 : }
137 558 : Label perform_transition(this), check_holey_map(this);
138 558 : VARIABLE(var_target_map, MachineRepresentation::kTagged);
139 : // Check if the receiver has the default |from_kind| map.
140 : {
141 558 : Node* packed_map = LoadJSArrayElementsMap(from_kind, native_context);
142 558 : GotoIf(WordNotEqual(receiver_map, packed_map), &check_holey_map);
143 : var_target_map.Bind(
144 558 : LoadContextElement(native_context, Context::ArrayMapIndex(to_kind)));
145 279 : Goto(&perform_transition);
146 : }
147 :
148 : // Check if the receiver has the default |holey_from_kind| map.
149 279 : BIND(&check_holey_map);
150 : {
151 : Node* holey_map = LoadContextElement(
152 558 : native_context, Context::ArrayMapIndex(holey_from_kind));
153 558 : GotoIf(WordNotEqual(receiver_map, holey_map), bailout);
154 : var_target_map.Bind(LoadContextElement(
155 558 : native_context, Context::ArrayMapIndex(holey_to_kind)));
156 279 : Goto(&perform_transition);
157 : }
158 :
159 : // Found a supported transition target map, perform the transition!
160 279 : BIND(&perform_transition);
161 : {
162 279 : if (IsDoubleElementsKind(from_kind) != IsDoubleElementsKind(to_kind)) {
163 558 : Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
164 : GrowElementsCapacity(receiver, elements, from_kind, to_kind, capacity,
165 186 : capacity, INTPTR_PARAMETERS, bailout);
166 : }
167 279 : StoreMap(receiver, var_target_map.value());
168 279 : }
169 279 : }
170 :
171 124 : void KeyedStoreGenericAssembler::TryChangeToHoleyMapHelper(
172 : Node* receiver, Node* receiver_map, Node* native_context,
173 : ElementsKind packed_kind, ElementsKind holey_kind, Label* done,
174 : Label* map_mismatch, Label* bailout) {
175 248 : Node* packed_map = LoadJSArrayElementsMap(packed_kind, native_context);
176 248 : GotoIf(WordNotEqual(receiver_map, packed_map), map_mismatch);
177 124 : if (AllocationSite::ShouldTrack(packed_kind, holey_kind)) {
178 31 : TrapAllocationMemento(receiver, bailout);
179 : }
180 : Node* holey_map =
181 248 : LoadContextElement(native_context, Context::ArrayMapIndex(holey_kind));
182 124 : StoreMap(receiver, holey_map);
183 124 : Goto(done);
184 124 : }
185 :
186 62 : void KeyedStoreGenericAssembler::TryChangeToHoleyMap(
187 : Node* receiver, Node* receiver_map, Node* current_elements_kind,
188 : Node* context, ElementsKind packed_kind, Label* bailout) {
189 : ElementsKind holey_kind = GetHoleyElementsKind(packed_kind);
190 62 : Label already_holey(this);
191 :
192 124 : GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind)),
193 124 : &already_holey);
194 124 : Node* native_context = LoadNativeContext(context);
195 : TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, packed_kind,
196 62 : holey_kind, &already_holey, bailout, bailout);
197 62 : BIND(&already_holey);
198 62 : }
199 :
200 31 : void KeyedStoreGenericAssembler::TryChangeToHoleyMapMulti(
201 : Node* receiver, Node* receiver_map, Node* current_elements_kind,
202 : Node* context, ElementsKind packed_kind, ElementsKind packed_kind_2,
203 : Label* bailout) {
204 : ElementsKind holey_kind = GetHoleyElementsKind(packed_kind);
205 : ElementsKind holey_kind_2 = GetHoleyElementsKind(packed_kind_2);
206 62 : Label already_holey(this), check_other_kind(this);
207 :
208 62 : GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind)),
209 62 : &already_holey);
210 62 : GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind_2)),
211 62 : &already_holey);
212 :
213 62 : Node* native_context = LoadNativeContext(context);
214 : TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, packed_kind,
215 : holey_kind, &already_holey, &check_other_kind,
216 31 : bailout);
217 31 : BIND(&check_other_kind);
218 : TryChangeToHoleyMapHelper(receiver, receiver_map, native_context,
219 : packed_kind_2, holey_kind_2, &already_holey,
220 31 : bailout, bailout);
221 62 : BIND(&already_holey);
222 31 : }
223 :
224 558 : void KeyedStoreGenericAssembler::MaybeUpdateLengthAndReturn(
225 : Node* receiver, Node* index, Node* value, UpdateLength update_length) {
226 558 : if (update_length != kDontChangeLength) {
227 1488 : Node* new_length = SmiTag(Signed(IntPtrAdd(index, IntPtrConstant(1))));
228 : StoreObjectFieldNoWriteBarrier(receiver, JSArray::kLengthOffset, new_length,
229 372 : MachineRepresentation::kTagged);
230 : }
231 558 : Return(value);
232 558 : }
233 :
234 93 : void KeyedStoreGenericAssembler::StoreElementWithCapacity(
235 : Node* receiver, Node* receiver_map, Node* elements, Node* elements_kind,
236 : Node* intptr_index, Node* value, Node* context, Label* slow,
237 : UpdateLength update_length) {
238 93 : if (update_length != kDontChangeLength) {
239 : CSA_ASSERT(this, InstanceTypeEqual(LoadMapInstanceType(receiver_map),
240 : JS_ARRAY_TYPE));
241 : // Check if the length property is writable. The fast check is only
242 : // supported for fast properties.
243 124 : GotoIf(IsDictionaryMap(receiver_map), slow);
244 : // The length property is non-configurable, so it's guaranteed to always
245 : // be the first property.
246 124 : Node* descriptors = LoadMapDescriptors(receiver_map);
247 : Node* details =
248 62 : LoadFixedArrayElement(descriptors, DescriptorArray::ToDetailsIndex(0));
249 124 : GotoIf(IsSetSmi(details, PropertyDetails::kAttributesReadOnlyMask), slow);
250 : }
251 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
252 : const int kHeaderSize = FixedArray::kHeaderSize - kHeapObjectTag;
253 :
254 186 : Label check_double_elements(this), check_cow_elements(this);
255 186 : Node* elements_map = LoadMap(elements);
256 93 : GotoIf(WordNotEqual(elements_map, LoadRoot(Heap::kFixedArrayMapRootIndex)),
257 93 : &check_double_elements);
258 :
259 : // FixedArray backing store -> Smi or object elements.
260 : {
261 : Node* offset = ElementOffsetFromIndex(intptr_index, PACKED_ELEMENTS,
262 93 : INTPTR_PARAMETERS, kHeaderSize);
263 : // Check if we're about to overwrite the hole. We can safely do that
264 : // only if there can be no setters on the prototype chain.
265 : // If we know that we're storing beyond the previous array length, we
266 : // can skip the hole check (and always assume the hole).
267 : {
268 : Label hole_check_passed(this);
269 93 : if (update_length == kDontChangeLength) {
270 31 : Node* element = Load(MachineType::AnyTagged(), elements, offset);
271 62 : GotoIf(WordNotEqual(element, TheHoleConstant()), &hole_check_passed);
272 : }
273 : BranchIfPrototypesHaveNonFastElements(receiver_map, slow,
274 93 : &hole_check_passed);
275 93 : BIND(&hole_check_passed);
276 : }
277 :
278 : // Check if the value we're storing matches the elements_kind. Smis
279 : // can always be stored.
280 : {
281 : Label non_smi_value(this);
282 186 : GotoIfNot(TaggedIsSmi(value), &non_smi_value);
283 : // If we're about to introduce holes, ensure holey elements.
284 93 : if (update_length == kBumpLengthWithGap) {
285 : TryChangeToHoleyMapMulti(receiver, receiver_map, elements_kind, context,
286 31 : PACKED_SMI_ELEMENTS, PACKED_ELEMENTS, slow);
287 : }
288 : StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset,
289 93 : value);
290 93 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
291 :
292 93 : BIND(&non_smi_value);
293 : }
294 :
295 : // Check if we already have object elements; just do the store if so.
296 : {
297 : Label must_transition(this);
298 : STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
299 : STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
300 : GotoIf(Int32LessThanOrEqual(elements_kind,
301 186 : Int32Constant(HOLEY_SMI_ELEMENTS)),
302 186 : &must_transition);
303 93 : if (update_length == kBumpLengthWithGap) {
304 : TryChangeToHoleyMap(receiver, receiver_map, elements_kind, context,
305 31 : PACKED_ELEMENTS, slow);
306 : }
307 93 : Store(elements, offset, value);
308 93 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
309 :
310 93 : BIND(&must_transition);
311 : }
312 :
313 : // Transition to the required ElementsKind.
314 : {
315 93 : Label transition_to_double(this), transition_to_object(this);
316 186 : Node* native_context = LoadNativeContext(context);
317 279 : Branch(WordEqual(LoadMap(value), LoadRoot(Heap::kHeapNumberMapRootIndex)),
318 93 : &transition_to_double, &transition_to_object);
319 93 : BIND(&transition_to_double);
320 : {
321 : // If we're adding holes at the end, always transition to a holey
322 : // elements kind, otherwise try to remain packed.
323 : ElementsKind target_kind = update_length == kBumpLengthWithGap
324 : ? HOLEY_DOUBLE_ELEMENTS
325 93 : : PACKED_DOUBLE_ELEMENTS;
326 : TryRewriteElements(receiver, receiver_map, elements, native_context,
327 93 : PACKED_SMI_ELEMENTS, target_kind, slow);
328 : // Reload migrated elements.
329 186 : Node* double_elements = LoadElements(receiver);
330 : Node* double_offset =
331 : ElementOffsetFromIndex(intptr_index, PACKED_DOUBLE_ELEMENTS,
332 93 : INTPTR_PARAMETERS, kHeaderSize);
333 : // Make sure we do not store signalling NaNs into double arrays.
334 279 : Node* double_value = Float64SilenceNaN(LoadHeapNumberValue(value));
335 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, double_elements,
336 93 : double_offset, double_value);
337 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value,
338 93 : update_length);
339 : }
340 :
341 93 : BIND(&transition_to_object);
342 : {
343 : // If we're adding holes at the end, always transition to a holey
344 : // elements kind, otherwise try to remain packed.
345 : ElementsKind target_kind = update_length == kBumpLengthWithGap
346 : ? HOLEY_ELEMENTS
347 93 : : PACKED_ELEMENTS;
348 : TryRewriteElements(receiver, receiver_map, elements, native_context,
349 93 : PACKED_SMI_ELEMENTS, target_kind, slow);
350 : // The elements backing store didn't change, no reload necessary.
351 : CSA_ASSERT(this, WordEqual(elements, LoadElements(receiver)));
352 93 : Store(elements, offset, value);
353 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value,
354 93 : update_length);
355 93 : }
356 : }
357 : }
358 :
359 93 : BIND(&check_double_elements);
360 186 : Node* fixed_double_array_map = LoadRoot(Heap::kFixedDoubleArrayMapRootIndex);
361 93 : GotoIf(WordNotEqual(elements_map, fixed_double_array_map),
362 186 : &check_cow_elements);
363 : // FixedDoubleArray backing store -> double elements.
364 : {
365 : Node* offset = ElementOffsetFromIndex(intptr_index, PACKED_DOUBLE_ELEMENTS,
366 93 : INTPTR_PARAMETERS, kHeaderSize);
367 : // Check if we're about to overwrite the hole. We can safely do that
368 : // only if there can be no setters on the prototype chain.
369 : {
370 : Label hole_check_passed(this);
371 : // If we know that we're storing beyond the previous array length, we
372 : // can skip the hole check (and always assume the hole).
373 93 : if (update_length == kDontChangeLength) {
374 : Label found_hole(this);
375 : LoadDoubleWithHoleCheck(elements, offset, &found_hole,
376 31 : MachineType::None());
377 31 : Goto(&hole_check_passed);
378 31 : BIND(&found_hole);
379 : }
380 : BranchIfPrototypesHaveNonFastElements(receiver_map, slow,
381 93 : &hole_check_passed);
382 93 : BIND(&hole_check_passed);
383 : }
384 :
385 : // Try to store the value as a double.
386 : {
387 : Label non_number_value(this);
388 93 : Node* double_value = TryTaggedToFloat64(value, &non_number_value);
389 :
390 : // Make sure we do not store signalling NaNs into double arrays.
391 186 : double_value = Float64SilenceNaN(double_value);
392 : // If we're about to introduce holes, ensure holey elements.
393 93 : if (update_length == kBumpLengthWithGap) {
394 : TryChangeToHoleyMap(receiver, receiver_map, elements_kind, context,
395 31 : PACKED_DOUBLE_ELEMENTS, slow);
396 : }
397 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, elements, offset,
398 93 : double_value);
399 93 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
400 :
401 93 : BIND(&non_number_value);
402 : }
403 :
404 : // Transition to object elements.
405 : {
406 186 : Node* native_context = LoadNativeContext(context);
407 : ElementsKind target_kind = update_length == kBumpLengthWithGap
408 : ? HOLEY_ELEMENTS
409 93 : : PACKED_ELEMENTS;
410 : TryRewriteElements(receiver, receiver_map, elements, native_context,
411 93 : PACKED_DOUBLE_ELEMENTS, target_kind, slow);
412 : // Reload migrated elements.
413 186 : Node* fast_elements = LoadElements(receiver);
414 : Node* fast_offset = ElementOffsetFromIndex(
415 93 : intptr_index, PACKED_ELEMENTS, INTPTR_PARAMETERS, kHeaderSize);
416 93 : Store(fast_elements, fast_offset, value);
417 93 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
418 : }
419 : }
420 :
421 93 : BIND(&check_cow_elements);
422 : {
423 : // TODO(jkummerow): Use GrowElementsCapacity instead of bailing out.
424 93 : Goto(slow);
425 93 : }
426 93 : }
427 :
428 31 : void KeyedStoreGenericAssembler::EmitGenericElementStore(
429 : Node* receiver, Node* receiver_map, Node* instance_type, Node* intptr_index,
430 : Node* value, Node* context, Label* slow) {
431 62 : Label if_fast(this), if_in_bounds(this), if_increment_length_by_one(this),
432 31 : if_bump_length_with_gap(this), if_grow(this), if_nonfast(this),
433 31 : if_typed_array(this), if_dictionary(this);
434 62 : Node* elements = LoadElements(receiver);
435 62 : Node* elements_kind = LoadMapElementsKind(receiver_map);
436 62 : Branch(IsFastElementsKind(elements_kind), &if_fast, &if_nonfast);
437 31 : BIND(&if_fast);
438 :
439 31 : Label if_array(this);
440 62 : GotoIf(InstanceTypeEqual(instance_type, JS_ARRAY_TYPE), &if_array);
441 : {
442 93 : Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
443 62 : Branch(UintPtrLessThan(intptr_index, capacity), &if_in_bounds, &if_grow);
444 : }
445 31 : BIND(&if_array);
446 : {
447 93 : Node* length = SmiUntag(LoadFastJSArrayLength(receiver));
448 62 : GotoIf(UintPtrLessThan(intptr_index, length), &if_in_bounds);
449 93 : Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
450 62 : GotoIf(UintPtrGreaterThanOrEqual(intptr_index, capacity), &if_grow);
451 31 : Branch(WordEqual(intptr_index, length), &if_increment_length_by_one,
452 62 : &if_bump_length_with_gap);
453 : }
454 :
455 31 : BIND(&if_in_bounds);
456 : {
457 : StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind,
458 : intptr_index, value, context, slow,
459 31 : kDontChangeLength);
460 : }
461 :
462 31 : BIND(&if_increment_length_by_one);
463 : {
464 : StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind,
465 : intptr_index, value, context, slow,
466 31 : kIncrementLengthByOne);
467 : }
468 :
469 31 : BIND(&if_bump_length_with_gap);
470 : {
471 : StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind,
472 : intptr_index, value, context, slow,
473 31 : kBumpLengthWithGap);
474 : }
475 :
476 : // Out-of-capacity accesses (index >= capacity) jump here. Additionally,
477 : // an ElementsKind transition might be necessary.
478 : // The index can also be negative at this point! Jump to the runtime in that
479 : // case to convert it to a named property.
480 31 : BIND(&if_grow);
481 : {
482 31 : Comment("Grow backing store");
483 : // TODO(jkummerow): Support inline backing store growth.
484 31 : Goto(slow);
485 : }
486 :
487 : // Any ElementsKind > LAST_FAST_ELEMENTS_KIND jumps here for further
488 : // dispatch.
489 31 : BIND(&if_nonfast);
490 : {
491 : STATIC_ASSERT(LAST_ELEMENTS_KIND == LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
492 : GotoIf(Int32GreaterThanOrEqual(
493 : elements_kind,
494 62 : Int32Constant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)),
495 62 : &if_typed_array);
496 62 : GotoIf(Word32Equal(elements_kind, Int32Constant(DICTIONARY_ELEMENTS)),
497 62 : &if_dictionary);
498 31 : Goto(slow);
499 : }
500 :
501 31 : BIND(&if_dictionary);
502 : {
503 31 : Comment("Dictionary");
504 : // TODO(jkummerow): Support storing to dictionary elements.
505 31 : Goto(slow);
506 : }
507 :
508 31 : BIND(&if_typed_array);
509 : {
510 31 : Comment("Typed array");
511 : // TODO(jkummerow): Support typed arrays.
512 31 : Goto(slow);
513 31 : }
514 31 : }
515 :
516 62 : void KeyedStoreGenericAssembler::LookupPropertyOnPrototypeChain(
517 : Node* receiver_map, Node* name, Label* accessor,
518 : Variable* var_accessor_pair, Variable* var_accessor_holder, Label* readonly,
519 : Label* bailout) {
520 62 : Label ok_to_write(this);
521 124 : VARIABLE(var_holder, MachineRepresentation::kTagged);
522 124 : var_holder.Bind(LoadMapPrototype(receiver_map));
523 124 : VARIABLE(var_holder_map, MachineRepresentation::kTagged);
524 186 : var_holder_map.Bind(LoadMap(var_holder.value()));
525 :
526 62 : Variable* merged_variables[] = {&var_holder, &var_holder_map};
527 124 : Label loop(this, arraysize(merged_variables), merged_variables);
528 62 : Goto(&loop);
529 62 : BIND(&loop);
530 : {
531 62 : Node* holder = var_holder.value();
532 62 : Node* holder_map = var_holder_map.value();
533 124 : Node* instance_type = LoadMapInstanceType(holder_map);
534 : Label next_proto(this);
535 : {
536 62 : Label found(this), found_fast(this), found_dict(this), found_global(this);
537 124 : VARIABLE(var_meta_storage, MachineRepresentation::kTagged);
538 124 : VARIABLE(var_entry, MachineType::PointerRepresentation());
539 : TryLookupProperty(holder, holder_map, instance_type, name, &found_fast,
540 : &found_dict, &found_global, &var_meta_storage,
541 62 : &var_entry, &next_proto, bailout);
542 62 : BIND(&found_fast);
543 : {
544 62 : Node* descriptors = var_meta_storage.value();
545 62 : Node* name_index = var_entry.value();
546 : Node* details =
547 : LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
548 62 : JumpIfDataProperty(details, &ok_to_write, readonly);
549 :
550 : // Accessor case.
551 : // TODO(jkummerow): Implement a trimmed-down LoadAccessorFromFastObject.
552 62 : VARIABLE(var_details, MachineRepresentation::kWord32);
553 : LoadPropertyFromFastObject(holder, holder_map, descriptors, name_index,
554 62 : &var_details, var_accessor_pair);
555 62 : var_accessor_holder->Bind(holder);
556 62 : Goto(accessor);
557 : }
558 :
559 62 : BIND(&found_dict);
560 : {
561 62 : Node* dictionary = var_meta_storage.value();
562 62 : Node* entry = var_entry.value();
563 : Node* details =
564 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, entry);
565 62 : JumpIfDataProperty(details, &ok_to_write, readonly);
566 :
567 : // Accessor case.
568 : var_accessor_pair->Bind(
569 62 : LoadValueByKeyIndex<NameDictionary>(dictionary, entry));
570 62 : var_accessor_holder->Bind(holder);
571 62 : Goto(accessor);
572 : }
573 :
574 62 : BIND(&found_global);
575 : {
576 62 : Node* dictionary = var_meta_storage.value();
577 62 : Node* entry = var_entry.value();
578 : Node* property_cell =
579 : LoadValueByKeyIndex<GlobalDictionary>(dictionary, entry);
580 : Node* value =
581 : LoadObjectField(property_cell, PropertyCell::kValueOffset);
582 124 : GotoIf(WordEqual(value, TheHoleConstant()), &next_proto);
583 : Node* details = LoadAndUntagToWord32ObjectField(
584 124 : property_cell, PropertyCell::kDetailsOffset);
585 62 : JumpIfDataProperty(details, &ok_to_write, readonly);
586 :
587 : // Accessor case.
588 62 : var_accessor_pair->Bind(value);
589 62 : var_accessor_holder->Bind(holder);
590 62 : Goto(accessor);
591 62 : }
592 : }
593 :
594 62 : BIND(&next_proto);
595 : // Bailout if it can be an integer indexed exotic case.
596 124 : GotoIf(InstanceTypeEqual(instance_type, JS_TYPED_ARRAY_TYPE), bailout);
597 124 : Node* proto = LoadMapPrototype(holder_map);
598 124 : GotoIf(WordEqual(proto, NullConstant()), &ok_to_write);
599 62 : var_holder.Bind(proto);
600 124 : var_holder_map.Bind(LoadMap(proto));
601 62 : Goto(&loop);
602 : }
603 124 : BIND(&ok_to_write);
604 62 : }
605 :
606 62 : void KeyedStoreGenericAssembler::CheckFieldType(Node* descriptors,
607 : Node* name_index,
608 : Node* representation,
609 : Node* value, Label* bailout) {
610 124 : Label r_smi(this), r_double(this), r_heapobject(this), all_fine(this);
611 : // Ignore FLAG_track_fields etc. and always emit code for all checks,
612 : // because this builtin is part of the snapshot and therefore should
613 : // be flag independent.
614 124 : GotoIf(Word32Equal(representation, Int32Constant(Representation::kSmi)),
615 124 : &r_smi);
616 124 : GotoIf(Word32Equal(representation, Int32Constant(Representation::kDouble)),
617 124 : &r_double);
618 : GotoIf(
619 124 : Word32Equal(representation, Int32Constant(Representation::kHeapObject)),
620 124 : &r_heapobject);
621 124 : GotoIf(Word32Equal(representation, Int32Constant(Representation::kNone)),
622 124 : bailout);
623 : CSA_ASSERT(this, Word32Equal(representation,
624 : Int32Constant(Representation::kTagged)));
625 62 : Goto(&all_fine);
626 :
627 62 : BIND(&r_smi);
628 124 : { Branch(TaggedIsSmi(value), &all_fine, bailout); }
629 :
630 62 : BIND(&r_double);
631 : {
632 124 : GotoIf(TaggedIsSmi(value), &all_fine);
633 124 : Node* value_map = LoadMap(value);
634 : // While supporting mutable HeapNumbers would be straightforward, such
635 : // objects should not end up here anyway.
636 : CSA_ASSERT(this,
637 : WordNotEqual(value_map,
638 : LoadRoot(Heap::kMutableHeapNumberMapRootIndex)));
639 124 : Branch(IsHeapNumberMap(value_map), &all_fine, bailout);
640 : }
641 :
642 62 : BIND(&r_heapobject);
643 : {
644 124 : GotoIf(TaggedIsSmi(value), bailout);
645 : Node* field_type =
646 : LoadValueByKeyIndex<DescriptorArray>(descriptors, name_index);
647 62 : intptr_t kNoneType = reinterpret_cast<intptr_t>(FieldType::None());
648 62 : intptr_t kAnyType = reinterpret_cast<intptr_t>(FieldType::Any());
649 : // FieldType::None can't hold any value.
650 186 : GotoIf(WordEqual(field_type, IntPtrConstant(kNoneType)), bailout);
651 : // FieldType::Any can hold any value.
652 186 : GotoIf(WordEqual(field_type, IntPtrConstant(kAnyType)), &all_fine);
653 : CSA_ASSERT(this, IsWeakCell(field_type));
654 : // Cleared WeakCells count as FieldType::None, which can't hold any value.
655 62 : field_type = LoadWeakCellValue(field_type, bailout);
656 : // FieldType::Class(...) performs a map check.
657 : CSA_ASSERT(this, IsMap(field_type));
658 124 : Branch(WordEqual(LoadMap(value), field_type), &all_fine, bailout);
659 : }
660 :
661 124 : BIND(&all_fine);
662 62 : }
663 :
664 62 : void KeyedStoreGenericAssembler::OverwriteExistingFastProperty(
665 : Node* object, Node* object_map, Node* properties, Node* descriptors,
666 : Node* descriptor_name_index, Node* details, Node* value, Label* slow) {
667 : // Properties in descriptors can't be overwritten without map transition.
668 62 : GotoIf(Word32NotEqual(DecodeWord32<PropertyDetails::LocationField>(details),
669 124 : Int32Constant(kField)),
670 124 : slow);
671 :
672 : if (FLAG_track_constant_fields) {
673 : // TODO(ishell): Taking the slow path is not necessary if new and old
674 : // values are identical.
675 : GotoIf(Word32Equal(DecodeWord32<PropertyDetails::ConstnessField>(details),
676 : Int32Constant(kConst)),
677 : slow);
678 : }
679 :
680 : Label done(this);
681 : Node* representation =
682 : DecodeWord32<PropertyDetails::RepresentationField>(details);
683 :
684 : CheckFieldType(descriptors, descriptor_name_index, representation, value,
685 62 : slow);
686 : Node* field_index =
687 62 : DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
688 124 : Node* inobject_properties = LoadMapInobjectProperties(object_map);
689 :
690 62 : Label inobject(this), backing_store(this);
691 62 : Branch(UintPtrLessThan(field_index, inobject_properties), &inobject,
692 124 : &backing_store);
693 :
694 62 : BIND(&inobject);
695 : {
696 : Node* field_offset = TimesPointerSize(IntPtrAdd(
697 124 : IntPtrSub(LoadMapInstanceSize(object_map), inobject_properties),
698 248 : field_index));
699 62 : Label tagged_rep(this), double_rep(this);
700 124 : Branch(Word32Equal(representation, Int32Constant(Representation::kDouble)),
701 124 : &double_rep, &tagged_rep);
702 62 : BIND(&double_rep);
703 : {
704 124 : Node* double_value = ChangeNumberToFloat64(value);
705 : if (FLAG_unbox_double_fields) {
706 : StoreObjectFieldNoWriteBarrier(object, field_offset, double_value,
707 62 : MachineRepresentation::kFloat64);
708 : } else {
709 : Node* mutable_heap_number = LoadObjectField(object, field_offset);
710 : StoreHeapNumberValue(mutable_heap_number, double_value);
711 : }
712 62 : Goto(&done);
713 : }
714 :
715 62 : BIND(&tagged_rep);
716 : {
717 62 : StoreObjectField(object, field_offset, value);
718 62 : Goto(&done);
719 62 : }
720 : }
721 :
722 62 : BIND(&backing_store);
723 : {
724 124 : Node* backing_store_index = IntPtrSub(field_index, inobject_properties);
725 62 : Label tagged_rep(this), double_rep(this);
726 124 : Branch(Word32Equal(representation, Int32Constant(Representation::kDouble)),
727 124 : &double_rep, &tagged_rep);
728 62 : BIND(&double_rep);
729 : {
730 124 : Node* double_value = ChangeNumberToFloat64(value);
731 : Node* mutable_heap_number =
732 62 : LoadFixedArrayElement(properties, backing_store_index);
733 62 : StoreHeapNumberValue(mutable_heap_number, double_value);
734 62 : Goto(&done);
735 : }
736 62 : BIND(&tagged_rep);
737 : {
738 62 : StoreFixedArrayElement(properties, backing_store_index, value);
739 62 : Goto(&done);
740 62 : }
741 : }
742 124 : BIND(&done);
743 62 : }
744 :
745 62 : void KeyedStoreGenericAssembler::EmitGenericPropertyStore(
746 : Node* receiver, Node* receiver_map, const StoreICParameters* p, Label* slow,
747 : UseStubCache use_stub_cache) {
748 62 : VARIABLE(var_accessor_pair, MachineRepresentation::kTagged);
749 124 : VARIABLE(var_accessor_holder, MachineRepresentation::kTagged);
750 62 : Label stub_cache(this), fast_properties(this), dictionary_properties(this),
751 62 : accessor(this), readonly(this);
752 124 : Node* bitfield3 = LoadMapBitField3(receiver_map);
753 : Branch(IsSetWord32<Map::DictionaryMap>(bitfield3), &dictionary_properties,
754 62 : &fast_properties);
755 :
756 62 : BIND(&fast_properties);
757 : {
758 62 : Comment("fast property store");
759 124 : Node* descriptors = LoadMapDescriptors(receiver_map);
760 62 : Label descriptor_found(this), lookup_transition(this);
761 124 : VARIABLE(var_name_index, MachineType::PointerRepresentation());
762 62 : Label* notfound = use_stub_cache == kUseStubCache ? &stub_cache : slow;
763 : DescriptorLookup(p->name, descriptors, bitfield3, &descriptor_found,
764 62 : &var_name_index, &lookup_transition);
765 :
766 62 : BIND(&descriptor_found);
767 : {
768 62 : Node* name_index = var_name_index.value();
769 : Node* details =
770 : LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
771 : Label data_property(this);
772 62 : JumpIfDataProperty(details, &data_property, &readonly);
773 :
774 : // Accessor case.
775 : // TODO(jkummerow): Implement a trimmed-down LoadAccessorFromFastObject.
776 124 : VARIABLE(var_details, MachineRepresentation::kWord32);
777 : LoadPropertyFromFastObject(receiver, receiver_map, descriptors,
778 62 : name_index, &var_details, &var_accessor_pair);
779 62 : var_accessor_holder.Bind(receiver);
780 62 : Goto(&accessor);
781 :
782 62 : BIND(&data_property);
783 : {
784 62 : CheckForAssociatedProtector(p->name, slow);
785 124 : Node* properties = LoadFastProperties(receiver);
786 : OverwriteExistingFastProperty(receiver, receiver_map, properties,
787 : descriptors, name_index, details,
788 62 : p->value, slow);
789 124 : Return(p->value);
790 62 : }
791 : }
792 :
793 62 : BIND(&lookup_transition);
794 : {
795 62 : Comment("lookup transition");
796 62 : VARIABLE(var_handler, MachineRepresentation::kTagged);
797 62 : Label tuple3(this), fixedarray(this), found_handler(this, &var_handler);
798 : Node* maybe_handler =
799 : LoadObjectField(receiver_map, Map::kTransitionsOrPrototypeInfoOffset);
800 124 : GotoIf(TaggedIsSmi(maybe_handler), notfound);
801 124 : Node* handler_map = LoadMap(maybe_handler);
802 124 : GotoIf(WordEqual(handler_map, Tuple3MapConstant()), &tuple3);
803 124 : GotoIf(WordEqual(handler_map, FixedArrayMapConstant()), &fixedarray);
804 :
805 : // TODO(jkummerow): Consider implementing TransitionArray search.
806 62 : Goto(notfound);
807 :
808 124 : VARIABLE(var_transition_cell, MachineRepresentation::kTagged);
809 62 : Label check_key(this, &var_transition_cell);
810 62 : BIND(&tuple3);
811 : {
812 : var_transition_cell.Bind(LoadObjectField(
813 62 : maybe_handler, StoreHandler::kTransitionOrHolderCellOffset));
814 62 : Goto(&check_key);
815 : }
816 :
817 62 : BIND(&fixedarray);
818 : {
819 : var_transition_cell.Bind(LoadFixedArrayElement(
820 62 : maybe_handler, StoreHandler::kTransitionMapOrHolderCellIndex));
821 62 : Goto(&check_key);
822 : }
823 :
824 62 : BIND(&check_key);
825 : {
826 62 : Node* transition = LoadWeakCellValue(var_transition_cell.value(), slow);
827 124 : Node* transition_bitfield3 = LoadMapBitField3(transition);
828 62 : GotoIf(IsSetWord32<Map::Deprecated>(transition_bitfield3), slow);
829 : Node* nof =
830 : DecodeWord32<Map::NumberOfOwnDescriptorsBits>(transition_bitfield3);
831 186 : Node* last_added = Int32Sub(nof, Int32Constant(1));
832 124 : Node* transition_descriptors = LoadMapDescriptors(transition);
833 62 : Node* key = DescriptorArrayGetKey(transition_descriptors, last_added);
834 186 : GotoIf(WordNotEqual(key, p->name), slow);
835 62 : var_handler.Bind(maybe_handler);
836 62 : Goto(&found_handler);
837 : }
838 :
839 62 : BIND(&found_handler);
840 : {
841 62 : Comment("KeyedStoreGeneric found transition handler");
842 62 : HandleStoreICHandlerCase(p, var_handler.value(), notfound);
843 62 : }
844 62 : }
845 : }
846 :
847 62 : BIND(&dictionary_properties);
848 : {
849 62 : Comment("dictionary property store");
850 : // We checked for LAST_CUSTOM_ELEMENTS_RECEIVER before, which rules out
851 : // seeing global objects here (which would need special handling).
852 :
853 62 : VARIABLE(var_name_index, MachineType::PointerRepresentation());
854 62 : Label dictionary_found(this, &var_name_index), not_found(this);
855 124 : Node* properties = LoadSlowProperties(receiver);
856 : NameDictionaryLookup<NameDictionary>(properties, p->name, &dictionary_found,
857 62 : &var_name_index, ¬_found);
858 62 : BIND(&dictionary_found);
859 : {
860 : Label overwrite(this);
861 : Node* details = LoadDetailsByKeyIndex<NameDictionary>(
862 62 : properties, var_name_index.value());
863 62 : JumpIfDataProperty(details, &overwrite, &readonly);
864 :
865 : // Accessor case.
866 : var_accessor_pair.Bind(LoadValueByKeyIndex<NameDictionary>(
867 124 : properties, var_name_index.value()));
868 62 : var_accessor_holder.Bind(receiver);
869 62 : Goto(&accessor);
870 :
871 62 : BIND(&overwrite);
872 : {
873 62 : CheckForAssociatedProtector(p->name, slow);
874 : StoreValueByKeyIndex<NameDictionary>(properties, var_name_index.value(),
875 62 : p->value);
876 124 : Return(p->value);
877 62 : }
878 : }
879 :
880 62 : BIND(¬_found);
881 : {
882 62 : CheckForAssociatedProtector(p->name, slow);
883 : Label extensible(this);
884 124 : GotoIf(IsPrivateSymbol(p->name), &extensible);
885 124 : Node* bitfield2 = LoadMapBitField2(receiver_map);
886 62 : Branch(IsSetWord32(bitfield2, 1 << Map::kIsExtensible), &extensible,
887 124 : slow);
888 :
889 62 : BIND(&extensible);
890 : LookupPropertyOnPrototypeChain(receiver_map, p->name, &accessor,
891 : &var_accessor_pair, &var_accessor_holder,
892 62 : &readonly, slow);
893 62 : Add<NameDictionary>(properties, p->name, p->value, slow);
894 124 : Return(p->value);
895 62 : }
896 : }
897 :
898 62 : BIND(&accessor);
899 : {
900 : Label not_callable(this);
901 62 : Node* accessor_pair = var_accessor_pair.value();
902 186 : GotoIf(IsAccessorInfoMap(LoadMap(accessor_pair)), slow);
903 : CSA_ASSERT(this, HasInstanceType(accessor_pair, ACCESSOR_PAIR_TYPE));
904 : Node* setter = LoadObjectField(accessor_pair, AccessorPair::kSetterOffset);
905 124 : Node* setter_map = LoadMap(setter);
906 : // FunctionTemplateInfo setters are not supported yet.
907 124 : GotoIf(IsFunctionTemplateInfoMap(setter_map), slow);
908 124 : GotoIfNot(IsCallableMap(setter_map), ¬_callable);
909 :
910 62 : Callable callable = CodeFactory::Call(isolate());
911 62 : CallJS(callable, p->context, setter, receiver, p->value);
912 124 : Return(p->value);
913 :
914 62 : BIND(¬_callable);
915 : {
916 : Label strict(this);
917 62 : BranchIfStrictMode(p->vector, p->slot, &strict);
918 124 : Return(p->value);
919 :
920 62 : BIND(&strict);
921 : {
922 124 : Node* message = SmiConstant(MessageTemplate::kNoSetterInCallback);
923 : TailCallRuntime(Runtime::kThrowTypeError, p->context, message, p->name,
924 62 : var_accessor_holder.value());
925 62 : }
926 62 : }
927 : }
928 :
929 62 : BIND(&readonly);
930 : {
931 : Label strict(this);
932 62 : BranchIfStrictMode(p->vector, p->slot, &strict);
933 124 : Return(p->value);
934 :
935 62 : BIND(&strict);
936 : {
937 124 : Node* message = SmiConstant(MessageTemplate::kStrictReadOnlyProperty);
938 62 : Node* type = Typeof(p->receiver);
939 : TailCallRuntime(Runtime::kThrowTypeError, p->context, message, p->name,
940 62 : type, p->receiver);
941 62 : }
942 : }
943 :
944 62 : if (use_stub_cache == kUseStubCache) {
945 31 : BIND(&stub_cache);
946 31 : Comment("stub cache probe");
947 31 : VARIABLE(var_handler, MachineRepresentation::kTagged);
948 31 : Label found_handler(this, &var_handler), stub_cache_miss(this);
949 : TryProbeStubCache(isolate()->store_stub_cache(), receiver, p->name,
950 31 : &found_handler, &var_handler, &stub_cache_miss);
951 31 : BIND(&found_handler);
952 : {
953 31 : Comment("KeyedStoreGeneric found handler");
954 31 : HandleStoreICHandlerCase(p, var_handler.value(), &stub_cache_miss);
955 : }
956 31 : BIND(&stub_cache_miss);
957 : {
958 31 : Comment("KeyedStoreGeneric_miss");
959 : TailCallRuntime(Runtime::kKeyedStoreIC_Miss, p->context, p->value,
960 31 : p->slot, p->vector, p->receiver, p->name);
961 31 : }
962 62 : }
963 62 : }
964 :
965 31 : void KeyedStoreGenericAssembler::KeyedStoreGeneric() {
966 : typedef StoreWithVectorDescriptor Descriptor;
967 :
968 31 : Node* receiver = Parameter(Descriptor::kReceiver);
969 31 : Node* name = Parameter(Descriptor::kName);
970 31 : Node* value = Parameter(Descriptor::kValue);
971 31 : Node* slot = Parameter(Descriptor::kSlot);
972 31 : Node* vector = Parameter(Descriptor::kVector);
973 31 : Node* context = Parameter(Descriptor::kContext);
974 :
975 31 : VARIABLE(var_index, MachineType::PointerRepresentation());
976 62 : VARIABLE(var_unique, MachineRepresentation::kTagged);
977 31 : var_unique.Bind(name); // Dummy initialization.
978 31 : Label if_index(this), if_unique_name(this), slow(this);
979 :
980 62 : GotoIf(TaggedIsSmi(receiver), &slow);
981 62 : Node* receiver_map = LoadMap(receiver);
982 62 : Node* instance_type = LoadMapInstanceType(receiver_map);
983 : // Receivers requiring non-standard element accesses (interceptors, access
984 : // checks, strings and string wrappers, proxies) are handled in the runtime.
985 : GotoIf(Int32LessThanOrEqual(instance_type,
986 62 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
987 62 : &slow);
988 :
989 31 : TryToName(name, &if_index, &var_index, &if_unique_name, &var_unique, &slow);
990 :
991 31 : BIND(&if_index);
992 : {
993 31 : Comment("integer index");
994 : EmitGenericElementStore(receiver, receiver_map, instance_type,
995 31 : var_index.value(), value, context, &slow);
996 : }
997 :
998 31 : BIND(&if_unique_name);
999 : {
1000 31 : Comment("key is unique name");
1001 : StoreICParameters p(context, receiver, var_unique.value(), value, slot,
1002 31 : vector);
1003 31 : EmitGenericPropertyStore(receiver, receiver_map, &p, &slow);
1004 : }
1005 :
1006 31 : BIND(&slow);
1007 : {
1008 31 : Comment("KeyedStoreGeneric_slow");
1009 62 : VARIABLE(var_language_mode, MachineRepresentation::kTaggedSigned,
1010 : SmiConstant(Smi::FromEnum(LanguageMode::kStrict)));
1011 31 : Label call_runtime(this);
1012 31 : BranchIfStrictMode(vector, slot, &call_runtime);
1013 62 : var_language_mode.Bind(SmiConstant(Smi::FromEnum(LanguageMode::kSloppy)));
1014 31 : Goto(&call_runtime);
1015 31 : BIND(&call_runtime);
1016 : TailCallRuntime(Runtime::kSetProperty, context, receiver, name, value,
1017 62 : var_language_mode.value());
1018 31 : }
1019 31 : }
1020 :
1021 31 : void KeyedStoreGenericAssembler::StoreIC_Uninitialized() {
1022 : typedef StoreWithVectorDescriptor Descriptor;
1023 :
1024 31 : Node* receiver = Parameter(Descriptor::kReceiver);
1025 31 : Node* name = Parameter(Descriptor::kName);
1026 31 : Node* value = Parameter(Descriptor::kValue);
1027 31 : Node* slot = Parameter(Descriptor::kSlot);
1028 31 : Node* vector = Parameter(Descriptor::kVector);
1029 31 : Node* context = Parameter(Descriptor::kContext);
1030 :
1031 : Label miss(this);
1032 :
1033 62 : GotoIf(TaggedIsSmi(receiver), &miss);
1034 62 : Node* receiver_map = LoadMap(receiver);
1035 62 : Node* instance_type = LoadMapInstanceType(receiver_map);
1036 : // Receivers requiring non-standard element accesses (interceptors, access
1037 : // checks, strings and string wrappers, proxies) are handled in the runtime.
1038 : GotoIf(Int32LessThanOrEqual(instance_type,
1039 62 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
1040 62 : &miss);
1041 :
1042 : // Optimistically write the state transition to the vector.
1043 : StoreFeedbackVectorSlot(vector, slot,
1044 : LoadRoot(Heap::kpremonomorphic_symbolRootIndex),
1045 62 : SKIP_WRITE_BARRIER, 0, SMI_PARAMETERS);
1046 :
1047 : StoreICParameters p(context, receiver, name, value, slot, vector);
1048 : EmitGenericPropertyStore(receiver, receiver_map, &p, &miss,
1049 31 : kDontUseStubCache);
1050 :
1051 31 : BIND(&miss);
1052 : {
1053 : // Undo the optimistic state transition.
1054 : StoreFeedbackVectorSlot(vector, slot,
1055 : LoadRoot(Heap::kuninitialized_symbolRootIndex),
1056 62 : SKIP_WRITE_BARRIER, 0, SMI_PARAMETERS);
1057 : TailCallRuntime(Runtime::kStoreIC_Miss, context, value, slot, vector,
1058 : receiver, name);
1059 31 : }
1060 31 : }
1061 :
1062 : } // namespace internal
1063 : } // namespace v8
|