Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/ic/keyed-store-generic.h"
6 :
7 : #include "src/code-factory.h"
8 : #include "src/code-stub-assembler.h"
9 : #include "src/contexts.h"
10 : #include "src/ic/accessor-assembler.h"
11 : #include "src/interface-descriptors.h"
12 : #include "src/isolate.h"
13 : #include "src/objects-inl.h"
14 :
15 : namespace v8 {
16 : namespace internal {
17 :
18 : using compiler::Node;
19 :
20 : class KeyedStoreGenericAssembler : public AccessorAssembler {
21 : public:
22 : explicit KeyedStoreGenericAssembler(compiler::CodeAssemblerState* state)
23 : : AccessorAssembler(state) {}
24 :
25 : void KeyedStoreGeneric(LanguageMode language_mode);
26 :
27 : void StoreIC_Uninitialized(LanguageMode language_mode);
28 :
29 : private:
30 : enum UpdateLength {
31 : kDontChangeLength,
32 : kIncrementLengthByOne,
33 : kBumpLengthWithGap
34 : };
35 :
36 : enum UseStubCache { kUseStubCache, kDontUseStubCache };
37 :
38 : void EmitGenericElementStore(Node* receiver, Node* receiver_map,
39 : Node* instance_type, Node* intptr_index,
40 : Node* value, Node* context, Label* slow);
41 :
42 : void EmitGenericPropertyStore(Node* receiver, Node* receiver_map,
43 : const StoreICParameters* p, Label* slow,
44 : LanguageMode language_mode,
45 : UseStubCache use_stub_cache = kUseStubCache);
46 :
47 : void BranchIfPrototypesHaveNonFastElements(Node* receiver_map,
48 : Label* non_fast_elements,
49 : Label* only_fast_elements);
50 :
51 : void TryRewriteElements(Node* receiver, Node* receiver_map, Node* elements,
52 : Node* native_context, ElementsKind from_kind,
53 : ElementsKind to_kind, Label* bailout);
54 :
55 : void StoreElementWithCapacity(Node* receiver, Node* receiver_map,
56 : Node* elements, Node* elements_kind,
57 : Node* intptr_index, Node* value, Node* context,
58 : Label* slow, UpdateLength update_length);
59 :
60 : void MaybeUpdateLengthAndReturn(Node* receiver, Node* index, Node* value,
61 : UpdateLength update_length);
62 :
63 : void TryChangeToHoleyMapHelper(Node* receiver, Node* receiver_map,
64 : Node* native_context, ElementsKind packed_kind,
65 : ElementsKind holey_kind, Label* done,
66 : Label* map_mismatch, Label* bailout);
67 : void TryChangeToHoleyMap(Node* receiver, Node* receiver_map,
68 : Node* current_elements_kind, Node* context,
69 : ElementsKind packed_kind, Label* bailout);
70 : void TryChangeToHoleyMapMulti(Node* receiver, Node* receiver_map,
71 : Node* current_elements_kind, Node* context,
72 : ElementsKind packed_kind,
73 : ElementsKind packed_kind_2, Label* bailout);
74 :
75 : void LookupPropertyOnPrototypeChain(Node* receiver_map, Node* name,
76 : Label* accessor,
77 : Variable* var_accessor_pair,
78 : Variable* var_accessor_holder,
79 : Label* readonly, Label* bailout);
80 :
81 : void CheckFieldType(Node* descriptors, Node* name_index, Node* representation,
82 : Node* value, Label* bailout);
83 : void OverwriteExistingFastProperty(Node* object, Node* object_map,
84 : Node* properties, Node* descriptors,
85 : Node* descriptor_name_index, Node* details,
86 : Node* value, Label* slow);
87 : };
88 :
89 86 : void KeyedStoreGenericGenerator::Generate(compiler::CodeAssemblerState* state,
90 : LanguageMode language_mode) {
91 : KeyedStoreGenericAssembler assembler(state);
92 86 : assembler.KeyedStoreGeneric(language_mode);
93 86 : }
94 :
95 86 : void StoreICUninitializedGenerator::Generate(
96 : compiler::CodeAssemblerState* state, LanguageMode language_mode) {
97 : KeyedStoreGenericAssembler assembler(state);
98 86 : assembler.StoreIC_Uninitialized(language_mode);
99 86 : }
100 :
101 516 : void KeyedStoreGenericAssembler::BranchIfPrototypesHaveNonFastElements(
102 : Node* receiver_map, Label* non_fast_elements, Label* only_fast_elements) {
103 516 : VARIABLE(var_map, MachineRepresentation::kTagged);
104 516 : var_map.Bind(receiver_map);
105 516 : Label loop_body(this, &var_map);
106 516 : Goto(&loop_body);
107 :
108 516 : BIND(&loop_body);
109 : {
110 516 : Node* map = var_map.value();
111 516 : Node* prototype = LoadMapPrototype(map);
112 516 : GotoIf(WordEqual(prototype, NullConstant()), only_fast_elements);
113 516 : Node* prototype_map = LoadMap(prototype);
114 516 : var_map.Bind(prototype_map);
115 516 : Node* instance_type = LoadMapInstanceType(prototype_map);
116 : STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
117 : STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
118 : GotoIf(Int32LessThanOrEqual(instance_type,
119 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
120 516 : non_fast_elements);
121 516 : Node* elements_kind = LoadMapElementsKind(prototype_map);
122 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
123 516 : GotoIf(IsFastElementsKind(elements_kind), &loop_body);
124 516 : GotoIf(Word32Equal(elements_kind, Int32Constant(NO_ELEMENTS)), &loop_body);
125 516 : Goto(non_fast_elements);
126 516 : }
127 516 : }
128 :
129 774 : void KeyedStoreGenericAssembler::TryRewriteElements(
130 : Node* receiver, Node* receiver_map, Node* elements, Node* native_context,
131 : ElementsKind from_kind, ElementsKind to_kind, Label* bailout) {
132 : DCHECK(IsFastPackedElementsKind(from_kind));
133 : ElementsKind holey_from_kind = GetHoleyElementsKind(from_kind);
134 : ElementsKind holey_to_kind = GetHoleyElementsKind(to_kind);
135 774 : if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
136 516 : TrapAllocationMemento(receiver, bailout);
137 : }
138 1548 : Label perform_transition(this), check_holey_map(this);
139 1548 : VARIABLE(var_target_map, MachineRepresentation::kTagged);
140 : // Check if the receiver has the default |from_kind| map.
141 : {
142 : Node* packed_map =
143 774 : LoadContextElement(native_context, Context::ArrayMapIndex(from_kind));
144 774 : GotoIf(WordNotEqual(receiver_map, packed_map), &check_holey_map);
145 : var_target_map.Bind(
146 774 : LoadContextElement(native_context, Context::ArrayMapIndex(to_kind)));
147 774 : Goto(&perform_transition);
148 : }
149 :
150 : // Check if the receiver has the default |holey_from_kind| map.
151 774 : BIND(&check_holey_map);
152 : {
153 : Node* holey_map = LoadContextElement(
154 774 : native_context, Context::ArrayMapIndex(holey_from_kind));
155 774 : GotoIf(WordNotEqual(receiver_map, holey_map), bailout);
156 : var_target_map.Bind(LoadContextElement(
157 774 : native_context, Context::ArrayMapIndex(holey_to_kind)));
158 774 : Goto(&perform_transition);
159 : }
160 :
161 : // Found a supported transition target map, perform the transition!
162 774 : BIND(&perform_transition);
163 : {
164 774 : if (IsFastDoubleElementsKind(from_kind) !=
165 : IsFastDoubleElementsKind(to_kind)) {
166 516 : Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
167 : GrowElementsCapacity(receiver, elements, from_kind, to_kind, capacity,
168 516 : capacity, INTPTR_PARAMETERS, bailout);
169 : }
170 774 : StoreMap(receiver, var_target_map.value());
171 774 : }
172 774 : }
173 :
174 344 : void KeyedStoreGenericAssembler::TryChangeToHoleyMapHelper(
175 : Node* receiver, Node* receiver_map, Node* native_context,
176 : ElementsKind packed_kind, ElementsKind holey_kind, Label* done,
177 : Label* map_mismatch, Label* bailout) {
178 : Node* packed_map =
179 344 : LoadContextElement(native_context, Context::ArrayMapIndex(packed_kind));
180 344 : GotoIf(WordNotEqual(receiver_map, packed_map), map_mismatch);
181 344 : if (AllocationSite::GetMode(packed_kind, holey_kind) ==
182 : TRACK_ALLOCATION_SITE) {
183 86 : TrapAllocationMemento(receiver, bailout);
184 : }
185 : Node* holey_map =
186 344 : LoadContextElement(native_context, Context::ArrayMapIndex(holey_kind));
187 344 : StoreMap(receiver, holey_map);
188 344 : Goto(done);
189 344 : }
190 :
191 172 : void KeyedStoreGenericAssembler::TryChangeToHoleyMap(
192 : Node* receiver, Node* receiver_map, Node* current_elements_kind,
193 : Node* context, ElementsKind packed_kind, Label* bailout) {
194 : ElementsKind holey_kind = GetHoleyElementsKind(packed_kind);
195 172 : Label already_holey(this);
196 :
197 : GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind)),
198 172 : &already_holey);
199 172 : Node* native_context = LoadNativeContext(context);
200 : TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, packed_kind,
201 172 : holey_kind, &already_holey, bailout, bailout);
202 172 : BIND(&already_holey);
203 172 : }
204 :
205 86 : void KeyedStoreGenericAssembler::TryChangeToHoleyMapMulti(
206 : Node* receiver, Node* receiver_map, Node* current_elements_kind,
207 : Node* context, ElementsKind packed_kind, ElementsKind packed_kind_2,
208 : Label* bailout) {
209 : ElementsKind holey_kind = GetHoleyElementsKind(packed_kind);
210 : ElementsKind holey_kind_2 = GetHoleyElementsKind(packed_kind_2);
211 172 : Label already_holey(this), check_other_kind(this);
212 :
213 : GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind)),
214 86 : &already_holey);
215 : GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind_2)),
216 86 : &already_holey);
217 :
218 86 : Node* native_context = LoadNativeContext(context);
219 : TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, packed_kind,
220 : holey_kind, &already_holey, &check_other_kind,
221 86 : bailout);
222 86 : BIND(&check_other_kind);
223 : TryChangeToHoleyMapHelper(receiver, receiver_map, native_context,
224 : packed_kind_2, holey_kind_2, &already_holey,
225 86 : bailout, bailout);
226 172 : BIND(&already_holey);
227 86 : }
228 :
229 1548 : void KeyedStoreGenericAssembler::MaybeUpdateLengthAndReturn(
230 : Node* receiver, Node* index, Node* value, UpdateLength update_length) {
231 1548 : if (update_length != kDontChangeLength) {
232 1032 : Node* new_length = SmiTag(IntPtrAdd(index, IntPtrConstant(1)));
233 : StoreObjectFieldNoWriteBarrier(receiver, JSArray::kLengthOffset, new_length,
234 1032 : MachineRepresentation::kTagged);
235 : }
236 1548 : Return(value);
237 1548 : }
238 :
239 258 : void KeyedStoreGenericAssembler::StoreElementWithCapacity(
240 : Node* receiver, Node* receiver_map, Node* elements, Node* elements_kind,
241 : Node* intptr_index, Node* value, Node* context, Label* slow,
242 : UpdateLength update_length) {
243 258 : if (update_length != kDontChangeLength) {
244 : CSA_ASSERT(this, Word32Equal(LoadMapInstanceType(receiver_map),
245 : Int32Constant(JS_ARRAY_TYPE)));
246 : // Check if the length property is writable. The fast check is only
247 : // supported for fast properties.
248 172 : GotoIf(IsDictionaryMap(receiver_map), slow);
249 : // The length property is non-configurable, so it's guaranteed to always
250 : // be the first property.
251 172 : Node* descriptors = LoadMapDescriptors(receiver_map);
252 : Node* details =
253 172 : LoadFixedArrayElement(descriptors, DescriptorArray::ToDetailsIndex(0));
254 172 : GotoIf(IsSetSmi(details, PropertyDetails::kAttributesReadOnlyMask), slow);
255 : }
256 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
257 : const int kHeaderSize = FixedArray::kHeaderSize - kHeapObjectTag;
258 :
259 516 : Label check_double_elements(this), check_cow_elements(this);
260 258 : Node* elements_map = LoadMap(elements);
261 : GotoIf(WordNotEqual(elements_map, LoadRoot(Heap::kFixedArrayMapRootIndex)),
262 258 : &check_double_elements);
263 :
264 : // FixedArray backing store -> Smi or object elements.
265 : {
266 : Node* offset = ElementOffsetFromIndex(intptr_index, FAST_ELEMENTS,
267 258 : INTPTR_PARAMETERS, kHeaderSize);
268 : // Check if we're about to overwrite the hole. We can safely do that
269 : // only if there can be no setters on the prototype chain.
270 : // If we know that we're storing beyond the previous array length, we
271 : // can skip the hole check (and always assume the hole).
272 : {
273 : Label hole_check_passed(this);
274 258 : if (update_length == kDontChangeLength) {
275 86 : Node* element = Load(MachineType::AnyTagged(), elements, offset);
276 86 : GotoIf(WordNotEqual(element, TheHoleConstant()), &hole_check_passed);
277 : }
278 : BranchIfPrototypesHaveNonFastElements(receiver_map, slow,
279 258 : &hole_check_passed);
280 258 : BIND(&hole_check_passed);
281 : }
282 :
283 : // Check if the value we're storing matches the elements_kind. Smis
284 : // can always be stored.
285 : {
286 : Label non_smi_value(this);
287 258 : GotoIfNot(TaggedIsSmi(value), &non_smi_value);
288 : // If we're about to introduce holes, ensure holey elements.
289 258 : if (update_length == kBumpLengthWithGap) {
290 : TryChangeToHoleyMapMulti(receiver, receiver_map, elements_kind, context,
291 86 : FAST_SMI_ELEMENTS, FAST_ELEMENTS, slow);
292 : }
293 : StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset,
294 258 : value);
295 258 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
296 :
297 258 : BIND(&non_smi_value);
298 : }
299 :
300 : // Check if we already have object elements; just do the store if so.
301 : {
302 : Label must_transition(this);
303 : STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
304 : STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
305 : GotoIf(Int32LessThanOrEqual(elements_kind,
306 : Int32Constant(FAST_HOLEY_SMI_ELEMENTS)),
307 258 : &must_transition);
308 258 : if (update_length == kBumpLengthWithGap) {
309 : TryChangeToHoleyMap(receiver, receiver_map, elements_kind, context,
310 86 : FAST_ELEMENTS, slow);
311 : }
312 258 : Store(elements, offset, value);
313 258 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
314 :
315 258 : BIND(&must_transition);
316 : }
317 :
318 : // Transition to the required ElementsKind.
319 : {
320 258 : Label transition_to_double(this), transition_to_object(this);
321 258 : Node* native_context = LoadNativeContext(context);
322 : Branch(WordEqual(LoadMap(value), LoadRoot(Heap::kHeapNumberMapRootIndex)),
323 258 : &transition_to_double, &transition_to_object);
324 258 : BIND(&transition_to_double);
325 : {
326 : // If we're adding holes at the end, always transition to a holey
327 : // elements kind, otherwise try to remain packed.
328 : ElementsKind target_kind = update_length == kBumpLengthWithGap
329 : ? FAST_HOLEY_DOUBLE_ELEMENTS
330 258 : : FAST_DOUBLE_ELEMENTS;
331 : TryRewriteElements(receiver, receiver_map, elements, native_context,
332 258 : FAST_SMI_ELEMENTS, target_kind, slow);
333 : // Reload migrated elements.
334 258 : Node* double_elements = LoadElements(receiver);
335 : Node* double_offset = ElementOffsetFromIndex(
336 258 : intptr_index, FAST_DOUBLE_ELEMENTS, INTPTR_PARAMETERS, kHeaderSize);
337 : // Make sure we do not store signalling NaNs into double arrays.
338 258 : Node* double_value = Float64SilenceNaN(LoadHeapNumberValue(value));
339 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, double_elements,
340 258 : double_offset, double_value);
341 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value,
342 258 : update_length);
343 : }
344 :
345 258 : BIND(&transition_to_object);
346 : {
347 : // If we're adding holes at the end, always transition to a holey
348 : // elements kind, otherwise try to remain packed.
349 : ElementsKind target_kind = update_length == kBumpLengthWithGap
350 : ? FAST_HOLEY_ELEMENTS
351 258 : : FAST_ELEMENTS;
352 : TryRewriteElements(receiver, receiver_map, elements, native_context,
353 258 : FAST_SMI_ELEMENTS, target_kind, slow);
354 : // The elements backing store didn't change, no reload necessary.
355 : CSA_ASSERT(this, WordEqual(elements, LoadElements(receiver)));
356 258 : Store(elements, offset, value);
357 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value,
358 258 : update_length);
359 258 : }
360 : }
361 : }
362 :
363 258 : BIND(&check_double_elements);
364 258 : Node* fixed_double_array_map = LoadRoot(Heap::kFixedDoubleArrayMapRootIndex);
365 : GotoIf(WordNotEqual(elements_map, fixed_double_array_map),
366 258 : &check_cow_elements);
367 : // FixedDoubleArray backing store -> double elements.
368 : {
369 : Node* offset = ElementOffsetFromIndex(intptr_index, FAST_DOUBLE_ELEMENTS,
370 258 : INTPTR_PARAMETERS, kHeaderSize);
371 : // Check if we're about to overwrite the hole. We can safely do that
372 : // only if there can be no setters on the prototype chain.
373 : {
374 : Label hole_check_passed(this);
375 : // If we know that we're storing beyond the previous array length, we
376 : // can skip the hole check (and always assume the hole).
377 258 : if (update_length == kDontChangeLength) {
378 : Label found_hole(this);
379 : LoadDoubleWithHoleCheck(elements, offset, &found_hole,
380 86 : MachineType::None());
381 86 : Goto(&hole_check_passed);
382 86 : BIND(&found_hole);
383 : }
384 : BranchIfPrototypesHaveNonFastElements(receiver_map, slow,
385 258 : &hole_check_passed);
386 258 : BIND(&hole_check_passed);
387 : }
388 :
389 : // Try to store the value as a double.
390 : {
391 : Label non_number_value(this);
392 258 : Node* double_value = TryTaggedToFloat64(value, &non_number_value);
393 :
394 : // Make sure we do not store signalling NaNs into double arrays.
395 258 : double_value = Float64SilenceNaN(double_value);
396 : // If we're about to introduce holes, ensure holey elements.
397 258 : if (update_length == kBumpLengthWithGap) {
398 : TryChangeToHoleyMap(receiver, receiver_map, elements_kind, context,
399 86 : FAST_DOUBLE_ELEMENTS, slow);
400 : }
401 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, elements, offset,
402 258 : double_value);
403 258 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
404 :
405 258 : BIND(&non_number_value);
406 : }
407 :
408 : // Transition to object elements.
409 : {
410 258 : Node* native_context = LoadNativeContext(context);
411 : ElementsKind target_kind = update_length == kBumpLengthWithGap
412 : ? FAST_HOLEY_ELEMENTS
413 258 : : FAST_ELEMENTS;
414 : TryRewriteElements(receiver, receiver_map, elements, native_context,
415 258 : FAST_DOUBLE_ELEMENTS, target_kind, slow);
416 : // Reload migrated elements.
417 258 : Node* fast_elements = LoadElements(receiver);
418 : Node* fast_offset = ElementOffsetFromIndex(
419 258 : intptr_index, FAST_ELEMENTS, INTPTR_PARAMETERS, kHeaderSize);
420 258 : Store(fast_elements, fast_offset, value);
421 258 : MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
422 : }
423 : }
424 :
425 258 : BIND(&check_cow_elements);
426 : {
427 : // TODO(jkummerow): Use GrowElementsCapacity instead of bailing out.
428 258 : Goto(slow);
429 258 : }
430 258 : }
431 :
432 86 : void KeyedStoreGenericAssembler::EmitGenericElementStore(
433 : Node* receiver, Node* receiver_map, Node* instance_type, Node* intptr_index,
434 : Node* value, Node* context, Label* slow) {
435 172 : Label if_fast(this), if_in_bounds(this), if_increment_length_by_one(this),
436 86 : if_bump_length_with_gap(this), if_grow(this), if_nonfast(this),
437 86 : if_typed_array(this), if_dictionary(this);
438 86 : Node* elements = LoadElements(receiver);
439 86 : Node* elements_kind = LoadMapElementsKind(receiver_map);
440 86 : Branch(IsFastElementsKind(elements_kind), &if_fast, &if_nonfast);
441 86 : BIND(&if_fast);
442 :
443 86 : Label if_array(this);
444 86 : GotoIf(Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE)), &if_array);
445 : {
446 86 : Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
447 86 : Branch(UintPtrLessThan(intptr_index, capacity), &if_in_bounds, &if_grow);
448 : }
449 86 : BIND(&if_array);
450 : {
451 86 : Node* length = SmiUntag(LoadJSArrayLength(receiver));
452 86 : GotoIf(UintPtrLessThan(intptr_index, length), &if_in_bounds);
453 86 : Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
454 86 : GotoIf(UintPtrGreaterThanOrEqual(intptr_index, capacity), &if_grow);
455 : Branch(WordEqual(intptr_index, length), &if_increment_length_by_one,
456 86 : &if_bump_length_with_gap);
457 : }
458 :
459 86 : BIND(&if_in_bounds);
460 : {
461 : StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind,
462 : intptr_index, value, context, slow,
463 86 : kDontChangeLength);
464 : }
465 :
466 86 : BIND(&if_increment_length_by_one);
467 : {
468 : StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind,
469 : intptr_index, value, context, slow,
470 86 : kIncrementLengthByOne);
471 : }
472 :
473 86 : BIND(&if_bump_length_with_gap);
474 : {
475 : StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind,
476 : intptr_index, value, context, slow,
477 86 : kBumpLengthWithGap);
478 : }
479 :
480 : // Out-of-capacity accesses (index >= capacity) jump here. Additionally,
481 : // an ElementsKind transition might be necessary.
482 : // The index can also be negative at this point! Jump to the runtime in that
483 : // case to convert it to a named property.
484 86 : BIND(&if_grow);
485 : {
486 86 : Comment("Grow backing store");
487 : // TODO(jkummerow): Support inline backing store growth.
488 86 : Goto(slow);
489 : }
490 :
491 : // Any ElementsKind > LAST_FAST_ELEMENTS_KIND jumps here for further dispatch.
492 86 : BIND(&if_nonfast);
493 : {
494 : STATIC_ASSERT(LAST_ELEMENTS_KIND == LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
495 : GotoIf(Int32GreaterThanOrEqual(
496 : elements_kind,
497 : Int32Constant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)),
498 86 : &if_typed_array);
499 : GotoIf(Word32Equal(elements_kind, Int32Constant(DICTIONARY_ELEMENTS)),
500 86 : &if_dictionary);
501 86 : Goto(slow);
502 : }
503 :
504 86 : BIND(&if_dictionary);
505 : {
506 86 : Comment("Dictionary");
507 : // TODO(jkummerow): Support storing to dictionary elements.
508 86 : Goto(slow);
509 : }
510 :
511 86 : BIND(&if_typed_array);
512 : {
513 86 : Comment("Typed array");
514 : // TODO(jkummerow): Support typed arrays.
515 86 : Goto(slow);
516 86 : }
517 86 : }
518 :
519 172 : void KeyedStoreGenericAssembler::LookupPropertyOnPrototypeChain(
520 : Node* receiver_map, Node* name, Label* accessor,
521 : Variable* var_accessor_pair, Variable* var_accessor_holder, Label* readonly,
522 : Label* bailout) {
523 172 : Label ok_to_write(this);
524 344 : VARIABLE(var_holder, MachineRepresentation::kTagged);
525 172 : var_holder.Bind(LoadMapPrototype(receiver_map));
526 344 : VARIABLE(var_holder_map, MachineRepresentation::kTagged);
527 172 : var_holder_map.Bind(LoadMap(var_holder.value()));
528 :
529 172 : Variable* merged_variables[] = {&var_holder, &var_holder_map};
530 344 : Label loop(this, arraysize(merged_variables), merged_variables);
531 172 : Goto(&loop);
532 172 : BIND(&loop);
533 : {
534 172 : Node* holder = var_holder.value();
535 172 : Node* holder_map = var_holder_map.value();
536 172 : Node* instance_type = LoadMapInstanceType(holder_map);
537 : Label next_proto(this);
538 : {
539 172 : Label found(this), found_fast(this), found_dict(this), found_global(this);
540 344 : VARIABLE(var_meta_storage, MachineRepresentation::kTagged);
541 344 : VARIABLE(var_entry, MachineType::PointerRepresentation());
542 : TryLookupProperty(holder, holder_map, instance_type, name, &found_fast,
543 : &found_dict, &found_global, &var_meta_storage,
544 172 : &var_entry, &next_proto, bailout);
545 172 : BIND(&found_fast);
546 : {
547 172 : Node* descriptors = var_meta_storage.value();
548 172 : Node* name_index = var_entry.value();
549 : Node* details =
550 : LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
551 172 : JumpIfDataProperty(details, &ok_to_write, readonly);
552 :
553 : // Accessor case.
554 : // TODO(jkummerow): Implement a trimmed-down LoadAccessorFromFastObject.
555 172 : VARIABLE(var_details, MachineRepresentation::kWord32);
556 : LoadPropertyFromFastObject(holder, holder_map, descriptors, name_index,
557 172 : &var_details, var_accessor_pair);
558 172 : var_accessor_holder->Bind(holder);
559 172 : Goto(accessor);
560 : }
561 :
562 172 : BIND(&found_dict);
563 : {
564 172 : Node* dictionary = var_meta_storage.value();
565 172 : Node* entry = var_entry.value();
566 : Node* details =
567 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, entry);
568 172 : JumpIfDataProperty(details, &ok_to_write, readonly);
569 :
570 : // Accessor case.
571 : var_accessor_pair->Bind(
572 172 : LoadValueByKeyIndex<NameDictionary>(dictionary, entry));
573 172 : var_accessor_holder->Bind(holder);
574 172 : Goto(accessor);
575 : }
576 :
577 172 : BIND(&found_global);
578 : {
579 172 : Node* dictionary = var_meta_storage.value();
580 172 : Node* entry = var_entry.value();
581 : Node* property_cell =
582 : LoadValueByKeyIndex<GlobalDictionary>(dictionary, entry);
583 : Node* value =
584 172 : LoadObjectField(property_cell, PropertyCell::kValueOffset);
585 172 : GotoIf(WordEqual(value, TheHoleConstant()), &next_proto);
586 : Node* details = LoadAndUntagToWord32ObjectField(
587 172 : property_cell, PropertyCell::kDetailsOffset);
588 172 : JumpIfDataProperty(details, &ok_to_write, readonly);
589 :
590 : // Accessor case.
591 172 : var_accessor_pair->Bind(value);
592 172 : var_accessor_holder->Bind(holder);
593 172 : Goto(accessor);
594 172 : }
595 : }
596 :
597 172 : BIND(&next_proto);
598 : // Bailout if it can be an integer indexed exotic case.
599 : GotoIf(Word32Equal(instance_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
600 172 : bailout);
601 172 : Node* proto = LoadMapPrototype(holder_map);
602 172 : GotoIf(WordEqual(proto, NullConstant()), &ok_to_write);
603 172 : var_holder.Bind(proto);
604 172 : var_holder_map.Bind(LoadMap(proto));
605 172 : Goto(&loop);
606 : }
607 344 : BIND(&ok_to_write);
608 172 : }
609 :
610 172 : void KeyedStoreGenericAssembler::CheckFieldType(Node* descriptors,
611 : Node* name_index,
612 : Node* representation,
613 : Node* value, Label* bailout) {
614 344 : Label r_smi(this), r_double(this), r_heapobject(this), all_fine(this);
615 : // Ignore FLAG_track_fields etc. and always emit code for all checks,
616 : // because this builtin is part of the snapshot and therefore should
617 : // be flag independent.
618 : GotoIf(Word32Equal(representation, Int32Constant(Representation::kSmi)),
619 172 : &r_smi);
620 : GotoIf(Word32Equal(representation, Int32Constant(Representation::kDouble)),
621 172 : &r_double);
622 : GotoIf(
623 : Word32Equal(representation, Int32Constant(Representation::kHeapObject)),
624 172 : &r_heapobject);
625 : GotoIf(Word32Equal(representation, Int32Constant(Representation::kNone)),
626 172 : bailout);
627 : CSA_ASSERT(this, Word32Equal(representation,
628 : Int32Constant(Representation::kTagged)));
629 172 : Goto(&all_fine);
630 :
631 172 : BIND(&r_smi);
632 172 : { Branch(TaggedIsSmi(value), &all_fine, bailout); }
633 :
634 172 : BIND(&r_double);
635 : {
636 172 : GotoIf(TaggedIsSmi(value), &all_fine);
637 172 : Node* value_map = LoadMap(value);
638 : // While supporting mutable HeapNumbers would be straightforward, such
639 : // objects should not end up here anyway.
640 : CSA_ASSERT(this,
641 : WordNotEqual(value_map,
642 : LoadRoot(Heap::kMutableHeapNumberMapRootIndex)));
643 172 : Branch(IsHeapNumberMap(value_map), &all_fine, bailout);
644 : }
645 :
646 172 : BIND(&r_heapobject);
647 : {
648 172 : GotoIf(TaggedIsSmi(value), bailout);
649 : Node* field_type =
650 : LoadValueByKeyIndex<DescriptorArray>(descriptors, name_index);
651 172 : intptr_t kNoneType = reinterpret_cast<intptr_t>(FieldType::None());
652 172 : intptr_t kAnyType = reinterpret_cast<intptr_t>(FieldType::Any());
653 : // FieldType::None can't hold any value.
654 172 : GotoIf(WordEqual(field_type, IntPtrConstant(kNoneType)), bailout);
655 : // FieldType::Any can hold any value.
656 172 : GotoIf(WordEqual(field_type, IntPtrConstant(kAnyType)), &all_fine);
657 : CSA_ASSERT(this, IsWeakCell(field_type));
658 : // Cleared WeakCells count as FieldType::None, which can't hold any value.
659 172 : field_type = LoadWeakCellValue(field_type, bailout);
660 : // FieldType::Class(...) performs a map check.
661 : CSA_ASSERT(this, IsMap(field_type));
662 172 : Branch(WordEqual(LoadMap(value), field_type), &all_fine, bailout);
663 : }
664 :
665 344 : BIND(&all_fine);
666 172 : }
667 :
668 172 : void KeyedStoreGenericAssembler::OverwriteExistingFastProperty(
669 : Node* object, Node* object_map, Node* properties, Node* descriptors,
670 : Node* descriptor_name_index, Node* details, Node* value, Label* slow) {
671 : // Properties in descriptors can't be overwritten without map transition.
672 : GotoIf(Word32NotEqual(DecodeWord32<PropertyDetails::LocationField>(details),
673 : Int32Constant(kField)),
674 344 : slow);
675 :
676 : if (FLAG_track_constant_fields) {
677 : // TODO(ishell): Taking the slow path is not necessary if new and old
678 : // values are identical.
679 : GotoIf(Word32Equal(DecodeWord32<PropertyDetails::ConstnessField>(details),
680 : Int32Constant(kConst)),
681 : slow);
682 : }
683 :
684 : Label done(this);
685 : Node* representation =
686 : DecodeWord32<PropertyDetails::RepresentationField>(details);
687 :
688 : CheckFieldType(descriptors, descriptor_name_index, representation, value,
689 172 : slow);
690 : Node* field_index =
691 172 : DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
692 172 : Node* inobject_properties = LoadMapInobjectProperties(object_map);
693 :
694 172 : Label inobject(this), backing_store(this);
695 : Branch(UintPtrLessThan(field_index, inobject_properties), &inobject,
696 172 : &backing_store);
697 :
698 172 : BIND(&inobject);
699 : {
700 : Node* field_offset =
701 : IntPtrMul(IntPtrSub(LoadMapInstanceSize(object_map),
702 : IntPtrSub(inobject_properties, field_index)),
703 172 : IntPtrConstant(kPointerSize));
704 172 : Label tagged_rep(this), double_rep(this);
705 : Branch(Word32Equal(representation, Int32Constant(Representation::kDouble)),
706 172 : &double_rep, &tagged_rep);
707 172 : BIND(&double_rep);
708 : {
709 172 : Node* double_value = ChangeNumberToFloat64(value);
710 : if (FLAG_unbox_double_fields) {
711 : StoreObjectFieldNoWriteBarrier(object, field_offset, double_value,
712 172 : MachineRepresentation::kFloat64);
713 : } else {
714 : Node* mutable_heap_number = LoadObjectField(object, field_offset);
715 : StoreHeapNumberValue(mutable_heap_number, double_value);
716 : }
717 172 : Goto(&done);
718 : }
719 :
720 172 : BIND(&tagged_rep);
721 : {
722 172 : StoreObjectField(object, field_offset, value);
723 172 : Goto(&done);
724 172 : }
725 : }
726 :
727 172 : BIND(&backing_store);
728 : {
729 172 : Node* backing_store_index = IntPtrSub(field_index, inobject_properties);
730 172 : Label tagged_rep(this), double_rep(this);
731 : Branch(Word32Equal(representation, Int32Constant(Representation::kDouble)),
732 172 : &double_rep, &tagged_rep);
733 172 : BIND(&double_rep);
734 : {
735 172 : Node* double_value = ChangeNumberToFloat64(value);
736 : Node* mutable_heap_number =
737 172 : LoadFixedArrayElement(properties, backing_store_index);
738 172 : StoreHeapNumberValue(mutable_heap_number, double_value);
739 172 : Goto(&done);
740 : }
741 172 : BIND(&tagged_rep);
742 : {
743 172 : StoreFixedArrayElement(properties, backing_store_index, value);
744 172 : Goto(&done);
745 172 : }
746 : }
747 344 : BIND(&done);
748 172 : }
749 :
750 172 : void KeyedStoreGenericAssembler::EmitGenericPropertyStore(
751 : Node* receiver, Node* receiver_map, const StoreICParameters* p, Label* slow,
752 : LanguageMode language_mode, UseStubCache use_stub_cache) {
753 172 : VARIABLE(var_accessor_pair, MachineRepresentation::kTagged);
754 344 : VARIABLE(var_accessor_holder, MachineRepresentation::kTagged);
755 172 : Label stub_cache(this), fast_properties(this), dictionary_properties(this),
756 172 : accessor(this), readonly(this);
757 172 : Node* properties = LoadProperties(receiver);
758 172 : Node* properties_map = LoadMap(properties);
759 : Branch(WordEqual(properties_map, LoadRoot(Heap::kHashTableMapRootIndex)),
760 172 : &dictionary_properties, &fast_properties);
761 :
762 172 : BIND(&fast_properties);
763 : {
764 172 : Comment("fast property store");
765 172 : Node* bitfield3 = LoadMapBitField3(receiver_map);
766 172 : Node* descriptors = LoadMapDescriptors(receiver_map);
767 : Label descriptor_found(this);
768 344 : VARIABLE(var_name_index, MachineType::PointerRepresentation());
769 : // TODO(jkummerow): Maybe look for existing map transitions?
770 172 : Label* notfound = use_stub_cache == kUseStubCache ? &stub_cache : slow;
771 : DescriptorLookup(p->name, descriptors, bitfield3, &descriptor_found,
772 172 : &var_name_index, notfound);
773 :
774 172 : BIND(&descriptor_found);
775 : {
776 172 : Node* name_index = var_name_index.value();
777 : Node* details =
778 : LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
779 : Label data_property(this);
780 172 : JumpIfDataProperty(details, &data_property, &readonly);
781 :
782 : // Accessor case.
783 : // TODO(jkummerow): Implement a trimmed-down LoadAccessorFromFastObject.
784 344 : VARIABLE(var_details, MachineRepresentation::kWord32);
785 : LoadPropertyFromFastObject(receiver, receiver_map, descriptors,
786 172 : name_index, &var_details, &var_accessor_pair);
787 172 : var_accessor_holder.Bind(receiver);
788 172 : Goto(&accessor);
789 :
790 172 : BIND(&data_property);
791 : {
792 172 : CheckForAssociatedProtector(p->name, slow);
793 : OverwriteExistingFastProperty(receiver, receiver_map, properties,
794 : descriptors, name_index, details,
795 172 : p->value, slow);
796 172 : Return(p->value);
797 172 : }
798 172 : }
799 : }
800 :
801 172 : BIND(&dictionary_properties);
802 : {
803 172 : Comment("dictionary property store");
804 : // We checked for LAST_CUSTOM_ELEMENTS_RECEIVER before, which rules out
805 : // seeing global objects here (which would need special handling).
806 :
807 172 : VARIABLE(var_name_index, MachineType::PointerRepresentation());
808 172 : Label dictionary_found(this, &var_name_index), not_found(this);
809 : NameDictionaryLookup<NameDictionary>(properties, p->name, &dictionary_found,
810 172 : &var_name_index, ¬_found);
811 172 : BIND(&dictionary_found);
812 : {
813 : Label overwrite(this);
814 : Node* details = LoadDetailsByKeyIndex<NameDictionary>(
815 172 : properties, var_name_index.value());
816 172 : JumpIfDataProperty(details, &overwrite, &readonly);
817 :
818 : // Accessor case.
819 : var_accessor_pair.Bind(LoadValueByKeyIndex<NameDictionary>(
820 344 : properties, var_name_index.value()));
821 172 : var_accessor_holder.Bind(receiver);
822 172 : Goto(&accessor);
823 :
824 172 : BIND(&overwrite);
825 : {
826 172 : CheckForAssociatedProtector(p->name, slow);
827 : StoreValueByKeyIndex<NameDictionary>(properties, var_name_index.value(),
828 172 : p->value);
829 172 : Return(p->value);
830 172 : }
831 : }
832 :
833 172 : BIND(¬_found);
834 : {
835 172 : CheckForAssociatedProtector(p->name, slow);
836 : Label extensible(this);
837 172 : GotoIf(IsPrivateSymbol(p->name), &extensible);
838 172 : Node* bitfield2 = LoadMapBitField2(receiver_map);
839 : Branch(IsSetWord32(bitfield2, 1 << Map::kIsExtensible), &extensible,
840 172 : slow);
841 :
842 172 : BIND(&extensible);
843 : LookupPropertyOnPrototypeChain(receiver_map, p->name, &accessor,
844 : &var_accessor_pair, &var_accessor_holder,
845 172 : &readonly, slow);
846 172 : Add<NameDictionary>(properties, p->name, p->value, slow);
847 172 : Return(p->value);
848 172 : }
849 : }
850 :
851 172 : BIND(&accessor);
852 : {
853 : Label not_callable(this);
854 172 : Node* accessor_pair = var_accessor_pair.value();
855 172 : GotoIf(IsAccessorInfoMap(LoadMap(accessor_pair)), slow);
856 : CSA_ASSERT(this, HasInstanceType(accessor_pair, ACCESSOR_PAIR_TYPE));
857 172 : Node* setter = LoadObjectField(accessor_pair, AccessorPair::kSetterOffset);
858 172 : Node* setter_map = LoadMap(setter);
859 : // FunctionTemplateInfo setters are not supported yet.
860 172 : GotoIf(IsFunctionTemplateInfoMap(setter_map), slow);
861 172 : GotoIfNot(IsCallableMap(setter_map), ¬_callable);
862 :
863 172 : Callable callable = CodeFactory::Call(isolate());
864 172 : CallJS(callable, p->context, setter, receiver, p->value);
865 172 : Return(p->value);
866 :
867 172 : BIND(¬_callable);
868 : {
869 172 : if (language_mode == STRICT) {
870 : Node* message =
871 86 : SmiConstant(Smi::FromInt(MessageTemplate::kNoSetterInCallback));
872 : TailCallRuntime(Runtime::kThrowTypeError, p->context, message, p->name,
873 86 : var_accessor_holder.value());
874 : } else {
875 : DCHECK_EQ(SLOPPY, language_mode);
876 86 : Return(p->value);
877 : }
878 172 : }
879 : }
880 :
881 172 : BIND(&readonly);
882 : {
883 172 : if (language_mode == STRICT) {
884 : Node* message =
885 86 : SmiConstant(Smi::FromInt(MessageTemplate::kStrictReadOnlyProperty));
886 86 : Node* type = Typeof(p->receiver);
887 : TailCallRuntime(Runtime::kThrowTypeError, p->context, message, p->name,
888 86 : type, p->receiver);
889 : } else {
890 : DCHECK_EQ(SLOPPY, language_mode);
891 86 : Return(p->value);
892 : }
893 : }
894 :
895 172 : if (use_stub_cache == kUseStubCache) {
896 86 : BIND(&stub_cache);
897 86 : Comment("stub cache probe");
898 86 : VARIABLE(var_handler, MachineRepresentation::kTagged);
899 86 : Label found_handler(this, &var_handler), stub_cache_miss(this);
900 : TryProbeStubCache(isolate()->store_stub_cache(), receiver, p->name,
901 86 : &found_handler, &var_handler, &stub_cache_miss);
902 86 : BIND(&found_handler);
903 : {
904 86 : Comment("KeyedStoreGeneric found handler");
905 86 : HandleStoreICHandlerCase(p, var_handler.value(), &stub_cache_miss);
906 : }
907 86 : BIND(&stub_cache_miss);
908 : {
909 86 : Comment("KeyedStoreGeneric_miss");
910 : TailCallRuntime(Runtime::kKeyedStoreIC_Miss, p->context, p->value,
911 86 : p->slot, p->vector, p->receiver, p->name);
912 86 : }
913 172 : }
914 172 : }
915 :
916 86 : void KeyedStoreGenericAssembler::KeyedStoreGeneric(LanguageMode language_mode) {
917 : typedef StoreWithVectorDescriptor Descriptor;
918 :
919 86 : Node* receiver = Parameter(Descriptor::kReceiver);
920 86 : Node* name = Parameter(Descriptor::kName);
921 86 : Node* value = Parameter(Descriptor::kValue);
922 86 : Node* slot = Parameter(Descriptor::kSlot);
923 86 : Node* vector = Parameter(Descriptor::kVector);
924 86 : Node* context = Parameter(Descriptor::kContext);
925 :
926 86 : VARIABLE(var_index, MachineType::PointerRepresentation());
927 172 : VARIABLE(var_unique, MachineRepresentation::kTagged);
928 86 : var_unique.Bind(name); // Dummy initialization.
929 86 : Label if_index(this), if_unique_name(this), slow(this);
930 :
931 86 : GotoIf(TaggedIsSmi(receiver), &slow);
932 86 : Node* receiver_map = LoadMap(receiver);
933 86 : Node* instance_type = LoadMapInstanceType(receiver_map);
934 : // Receivers requiring non-standard element accesses (interceptors, access
935 : // checks, strings and string wrappers, proxies) are handled in the runtime.
936 : GotoIf(Int32LessThanOrEqual(instance_type,
937 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
938 86 : &slow);
939 :
940 86 : TryToName(name, &if_index, &var_index, &if_unique_name, &var_unique, &slow);
941 :
942 86 : BIND(&if_index);
943 : {
944 86 : Comment("integer index");
945 : EmitGenericElementStore(receiver, receiver_map, instance_type,
946 86 : var_index.value(), value, context, &slow);
947 : }
948 :
949 86 : BIND(&if_unique_name);
950 : {
951 86 : Comment("key is unique name");
952 : StoreICParameters p(context, receiver, var_unique.value(), value, slot,
953 86 : vector);
954 86 : EmitGenericPropertyStore(receiver, receiver_map, &p, &slow, language_mode);
955 : }
956 :
957 86 : BIND(&slow);
958 : {
959 86 : Comment("KeyedStoreGeneric_slow");
960 : TailCallRuntime(Runtime::kSetProperty, context, receiver, name, value,
961 86 : SmiConstant(language_mode));
962 86 : }
963 86 : }
964 :
965 86 : void KeyedStoreGenericAssembler::StoreIC_Uninitialized(
966 : LanguageMode language_mode) {
967 : typedef StoreWithVectorDescriptor Descriptor;
968 :
969 86 : Node* receiver = Parameter(Descriptor::kReceiver);
970 86 : Node* name = Parameter(Descriptor::kName);
971 86 : Node* value = Parameter(Descriptor::kValue);
972 86 : Node* slot = Parameter(Descriptor::kSlot);
973 86 : Node* vector = Parameter(Descriptor::kVector);
974 86 : Node* context = Parameter(Descriptor::kContext);
975 :
976 : Label miss(this);
977 :
978 86 : GotoIf(TaggedIsSmi(receiver), &miss);
979 86 : Node* receiver_map = LoadMap(receiver);
980 86 : Node* instance_type = LoadMapInstanceType(receiver_map);
981 : // Receivers requiring non-standard element accesses (interceptors, access
982 : // checks, strings and string wrappers, proxies) are handled in the runtime.
983 : GotoIf(Int32LessThanOrEqual(instance_type,
984 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
985 86 : &miss);
986 :
987 : // Optimistically write the state transition to the vector.
988 : StoreFixedArrayElement(vector, slot,
989 : LoadRoot(Heap::kpremonomorphic_symbolRootIndex),
990 86 : SKIP_WRITE_BARRIER, 0, SMI_PARAMETERS);
991 :
992 : StoreICParameters p(context, receiver, name, value, slot, vector);
993 : EmitGenericPropertyStore(receiver, receiver_map, &p, &miss, language_mode,
994 86 : kDontUseStubCache);
995 :
996 86 : BIND(&miss);
997 : {
998 : // Undo the optimistic state transition.
999 : StoreFixedArrayElement(vector, slot,
1000 : LoadRoot(Heap::kuninitialized_symbolRootIndex),
1001 86 : SKIP_WRITE_BARRIER, 0, SMI_PARAMETERS);
1002 : TailCallRuntime(Runtime::kStoreIC_Miss, context, value, slot, vector,
1003 86 : receiver, name);
1004 86 : }
1005 86 : }
1006 :
1007 : } // namespace internal
1008 : } // namespace v8
|