Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 : #include "src/code-stub-assembler.h"
5 : #include "src/code-factory.h"
6 : #include "src/frames-inl.h"
7 : #include "src/frames.h"
8 :
9 : namespace v8 {
10 : namespace internal {
11 :
12 : using compiler::Node;
13 : template <class T>
14 : using TNode = compiler::TNode<T>;
15 :
16 55854 : CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
17 : : compiler::CodeAssembler(state) {
18 : if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
19 : HandleBreakOnNode();
20 : }
21 55854 : }
22 :
23 0 : void CodeStubAssembler::HandleBreakOnNode() {
24 : // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
25 : // string specifying the name of a stub and NODE is number specifying node id.
26 0 : const char* name = state()->name();
27 0 : size_t name_length = strlen(name);
28 0 : if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
29 : // Different name.
30 0 : return;
31 : }
32 0 : size_t option_length = strlen(FLAG_csa_trap_on_node);
33 0 : if (option_length < name_length + 2 ||
34 0 : FLAG_csa_trap_on_node[name_length] != ',') {
35 : // Option is too short.
36 : return;
37 : }
38 0 : const char* start = &FLAG_csa_trap_on_node[name_length + 1];
39 : char* end;
40 0 : int node_id = static_cast<int>(strtol(start, &end, 10));
41 0 : if (start == end) {
42 : // Bad node id.
43 : return;
44 : }
45 0 : BreakOnNode(node_id);
46 : }
47 :
48 0 : void CodeStubAssembler::Assert(const NodeGenerator& condition_body,
49 : const char* message, const char* file, int line,
50 : Node* extra_node1, const char* extra_node1_name,
51 : Node* extra_node2, const char* extra_node2_name,
52 : Node* extra_node3, const char* extra_node3_name,
53 : Node* extra_node4, const char* extra_node4_name,
54 : Node* extra_node5,
55 : const char* extra_node5_name) {
56 : #if defined(DEBUG)
57 : if (FLAG_debug_code) {
58 : Check(condition_body, message, file, line, extra_node1, extra_node1_name,
59 : extra_node2, extra_node2_name, extra_node3, extra_node3_name,
60 : extra_node4, extra_node4_name, extra_node5, extra_node5_name);
61 : }
62 : #endif
63 0 : }
64 :
65 : #ifdef DEBUG
66 : namespace {
67 : void MaybePrintNodeWithName(CodeStubAssembler* csa, Node* node,
68 : const char* node_name) {
69 : if (node != nullptr) {
70 : csa->CallRuntime(Runtime::kPrintWithNameForAssert, csa->SmiConstant(0),
71 : csa->StringConstant(node_name), node);
72 : }
73 : }
74 : } // namespace
75 : #endif
76 :
77 0 : void CodeStubAssembler::Check(const NodeGenerator& condition_body,
78 : const char* message, const char* file, int line,
79 : Node* extra_node1, const char* extra_node1_name,
80 : Node* extra_node2, const char* extra_node2_name,
81 : Node* extra_node3, const char* extra_node3_name,
82 : Node* extra_node4, const char* extra_node4_name,
83 : Node* extra_node5, const char* extra_node5_name) {
84 0 : Label ok(this);
85 0 : Label not_ok(this, Label::kDeferred);
86 0 : if (message != nullptr && FLAG_code_comments) {
87 0 : Comment("[ Assert: %s", message);
88 : } else {
89 0 : Comment("[ Assert");
90 : }
91 0 : Node* condition = condition_body();
92 : DCHECK_NOT_NULL(condition);
93 0 : Branch(condition, &ok, ¬_ok);
94 :
95 : BIND(¬_ok);
96 : DCHECK_NOT_NULL(message);
97 : char chars[1024];
98 : Vector<char> buffer(chars);
99 0 : if (file != nullptr) {
100 0 : SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
101 : } else {
102 0 : SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
103 : }
104 0 : Node* message_node = StringConstant(&(buffer[0]));
105 :
106 : #ifdef DEBUG
107 : // Only print the extra nodes in debug builds.
108 : MaybePrintNodeWithName(this, extra_node1, extra_node1_name);
109 : MaybePrintNodeWithName(this, extra_node2, extra_node2_name);
110 : MaybePrintNodeWithName(this, extra_node3, extra_node3_name);
111 : MaybePrintNodeWithName(this, extra_node4, extra_node4_name);
112 : MaybePrintNodeWithName(this, extra_node5, extra_node5_name);
113 : #endif
114 :
115 0 : DebugAbort(message_node);
116 0 : Unreachable();
117 :
118 : BIND(&ok);
119 0 : Comment("] Assert");
120 0 : }
121 :
122 14270 : Node* CodeStubAssembler::Select(SloppyTNode<BoolT> condition,
123 : const NodeGenerator& true_body,
124 : const NodeGenerator& false_body,
125 : MachineRepresentation rep) {
126 14270 : VARIABLE(value, rep);
127 14270 : Label vtrue(this), vfalse(this), end(this);
128 14270 : Branch(condition, &vtrue, &vfalse);
129 :
130 : BIND(&vtrue);
131 : {
132 14270 : value.Bind(true_body());
133 14270 : Goto(&end);
134 : }
135 : BIND(&vfalse);
136 : {
137 14270 : value.Bind(false_body());
138 14270 : Goto(&end);
139 : }
140 :
141 : BIND(&end);
142 28540 : return value.value();
143 : }
144 :
145 12119 : Node* CodeStubAssembler::SelectConstant(Node* condition, Node* true_value,
146 : Node* false_value,
147 : MachineRepresentation rep) {
148 12119 : return Select(condition, [=] { return true_value; },
149 48476 : [=] { return false_value; }, rep);
150 : }
151 :
152 0 : Node* CodeStubAssembler::SelectInt32Constant(Node* condition, int true_value,
153 : int false_value) {
154 : return SelectConstant(condition, Int32Constant(true_value),
155 0 : Int32Constant(false_value),
156 0 : MachineRepresentation::kWord32);
157 : }
158 :
159 0 : Node* CodeStubAssembler::SelectIntPtrConstant(Node* condition, int true_value,
160 : int false_value) {
161 0 : return SelectConstant(condition, IntPtrConstant(true_value),
162 0 : IntPtrConstant(false_value),
163 0 : MachineType::PointerRepresentation());
164 : }
165 :
166 1016 : Node* CodeStubAssembler::SelectBooleanConstant(Node* condition) {
167 : return SelectConstant(condition, TrueConstant(), FalseConstant(),
168 1016 : MachineRepresentation::kTagged);
169 : }
170 :
171 2232 : Node* CodeStubAssembler::SelectSmiConstant(Node* condition, Smi* true_value,
172 : Smi* false_value) {
173 : return SelectConstant(condition, SmiConstant(true_value),
174 2232 : SmiConstant(false_value),
175 6696 : MachineRepresentation::kTaggedSigned);
176 : }
177 :
178 83056 : Node* CodeStubAssembler::NoContextConstant() { return SmiConstant(0); }
179 :
180 : #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
181 : compiler::TNode<std::remove_reference<decltype( \
182 : *std::declval<Heap>().rootAccessorName())>::type> \
183 : CodeStubAssembler::name##Constant() { \
184 : return UncheckedCast<std::remove_reference<decltype( \
185 : *std::declval<Heap>().rootAccessorName())>::type>( \
186 : LoadRoot(Heap::k##rootIndexName##RootIndex)); \
187 : }
188 129318 : HEAP_CONSTANT_LIST(HEAP_CONSTANT_ACCESSOR);
189 : #undef HEAP_CONSTANT_ACCESSOR
190 :
191 : #define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
192 : compiler::TNode<BoolT> CodeStubAssembler::Is##name( \
193 : SloppyTNode<Object> value) { \
194 : return WordEqual(value, name##Constant()); \
195 : } \
196 : compiler::TNode<BoolT> CodeStubAssembler::IsNot##name( \
197 : SloppyTNode<Object> value) { \
198 : return WordNotEqual(value, name##Constant()); \
199 : }
200 140778 : HEAP_CONSTANT_LIST(HEAP_CONSTANT_TEST);
201 : #undef HEAP_CONSTANT_TEST
202 :
203 0 : Node* CodeStubAssembler::HashSeed() {
204 477 : return LoadAndUntagToWord32Root(Heap::kHashSeedRootIndex);
205 : }
206 :
207 93 : Node* CodeStubAssembler::StaleRegisterConstant() {
208 186 : return LoadRoot(Heap::kStaleRegisterRootIndex);
209 : }
210 :
211 53917 : Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
212 53917 : if (mode == SMI_PARAMETERS) {
213 4870 : return SmiConstant(value);
214 : } else {
215 : DCHECK_EQ(INTPTR_PARAMETERS, mode);
216 102964 : return IntPtrConstant(value);
217 : }
218 : }
219 :
220 1271 : bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
221 : ParameterMode mode) {
222 : int32_t constant_test;
223 : Smi* smi_test;
224 1271 : if (mode == INTPTR_PARAMETERS) {
225 775 : if (ToInt32Constant(test, constant_test) && constant_test == 0) {
226 : return true;
227 : }
228 : } else {
229 : DCHECK_EQ(mode, SMI_PARAMETERS);
230 651 : if (ToSmiConstant(test, smi_test) && smi_test->value() == 0) {
231 : return true;
232 : }
233 : }
234 : return false;
235 : }
236 :
237 1728 : bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
238 : int* value,
239 : ParameterMode mode) {
240 : int32_t int32_constant;
241 1728 : if (mode == INTPTR_PARAMETERS) {
242 1369 : if (ToInt32Constant(maybe_constant, int32_constant)) {
243 415 : *value = int32_constant;
244 415 : return true;
245 : }
246 : } else {
247 : DCHECK_EQ(mode, SMI_PARAMETERS);
248 : Smi* smi_constant;
249 359 : if (ToSmiConstant(maybe_constant, smi_constant)) {
250 12 : *value = Smi::ToInt(smi_constant);
251 6 : return true;
252 : }
253 : }
254 : return false;
255 : }
256 :
257 440 : Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) {
258 440 : Comment("IntPtrRoundUpToPowerOfTwo32");
259 : CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
260 1320 : value = IntPtrSub(value, IntPtrConstant(1));
261 2640 : for (int i = 1; i <= 16; i *= 2) {
262 8800 : value = WordOr(value, WordShr(value, IntPtrConstant(i)));
263 : }
264 1320 : return IntPtrAdd(value, IntPtrConstant(1));
265 : }
266 :
267 0 : Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
268 0 : if (mode == SMI_PARAMETERS) {
269 0 : return TaggedIsSmi(value);
270 : } else {
271 0 : return Int32Constant(1);
272 : }
273 : }
274 :
275 0 : TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
276 : // value && !(value & (value - 1))
277 : return WordEqual(
278 : Select(
279 0 : WordEqual(value, IntPtrConstant(0)),
280 0 : [=] { return IntPtrConstant(1); },
281 0 : [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); },
282 : MachineType::PointerRepresentation()),
283 0 : IntPtrConstant(0));
284 : }
285 :
286 31 : TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
287 62 : Node* one = Float64Constant(1.0);
288 62 : Node* one_half = Float64Constant(0.5);
289 :
290 : Label return_x(this);
291 :
292 : // Round up {x} towards Infinity.
293 93 : VARIABLE(var_x, MachineRepresentation::kFloat64, Float64Ceil(x));
294 :
295 93 : GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
296 62 : &return_x);
297 93 : var_x.Bind(Float64Sub(var_x.value(), one));
298 31 : Goto(&return_x);
299 :
300 : BIND(&return_x);
301 62 : return TNode<Float64T>::UncheckedCast(var_x.value());
302 : }
303 :
304 62 : TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
305 62 : if (IsFloat64RoundUpSupported()) {
306 60 : return Float64RoundUp(x);
307 : }
308 :
309 4 : Node* one = Float64Constant(1.0);
310 4 : Node* zero = Float64Constant(0.0);
311 4 : Node* two_52 = Float64Constant(4503599627370496.0E0);
312 4 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
313 :
314 2 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
315 2 : Label return_x(this), return_minus_x(this);
316 :
317 : // Check if {x} is greater than zero.
318 2 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
319 2 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
320 4 : &if_xnotgreaterthanzero);
321 :
322 : BIND(&if_xgreaterthanzero);
323 : {
324 : // Just return {x} unless it's in the range ]0,2^52[.
325 4 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
326 :
327 : // Round positive {x} towards Infinity.
328 6 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
329 6 : GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
330 6 : var_x.Bind(Float64Add(var_x.value(), one));
331 2 : Goto(&return_x);
332 : }
333 :
334 : BIND(&if_xnotgreaterthanzero);
335 : {
336 : // Just return {x} unless it's in the range ]-2^52,0[
337 4 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
338 4 : GotoIfNot(Float64LessThan(x, zero), &return_x);
339 :
340 : // Round negated {x} towards Infinity and return the result negated.
341 4 : Node* minus_x = Float64Neg(x);
342 6 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
343 6 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
344 6 : var_x.Bind(Float64Sub(var_x.value(), one));
345 2 : Goto(&return_minus_x);
346 : }
347 :
348 : BIND(&return_minus_x);
349 6 : var_x.Bind(Float64Neg(var_x.value()));
350 2 : Goto(&return_x);
351 :
352 : BIND(&return_x);
353 4 : return TNode<Float64T>::UncheckedCast(var_x.value());
354 : }
355 :
356 63 : TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
357 63 : if (IsFloat64RoundDownSupported()) {
358 60 : return Float64RoundDown(x);
359 : }
360 :
361 6 : Node* one = Float64Constant(1.0);
362 6 : Node* zero = Float64Constant(0.0);
363 6 : Node* two_52 = Float64Constant(4503599627370496.0E0);
364 6 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
365 :
366 3 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
367 3 : Label return_x(this), return_minus_x(this);
368 :
369 : // Check if {x} is greater than zero.
370 3 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
371 3 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
372 6 : &if_xnotgreaterthanzero);
373 :
374 : BIND(&if_xgreaterthanzero);
375 : {
376 : // Just return {x} unless it's in the range ]0,2^52[.
377 6 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
378 :
379 : // Round positive {x} towards -Infinity.
380 9 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
381 9 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
382 9 : var_x.Bind(Float64Sub(var_x.value(), one));
383 3 : Goto(&return_x);
384 : }
385 :
386 : BIND(&if_xnotgreaterthanzero);
387 : {
388 : // Just return {x} unless it's in the range ]-2^52,0[
389 6 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
390 6 : GotoIfNot(Float64LessThan(x, zero), &return_x);
391 :
392 : // Round negated {x} towards -Infinity and return the result negated.
393 6 : Node* minus_x = Float64Neg(x);
394 9 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
395 9 : GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
396 9 : var_x.Bind(Float64Add(var_x.value(), one));
397 3 : Goto(&return_minus_x);
398 : }
399 :
400 : BIND(&return_minus_x);
401 9 : var_x.Bind(Float64Neg(var_x.value()));
402 3 : Goto(&return_x);
403 :
404 : BIND(&return_x);
405 6 : return TNode<Float64T>::UncheckedCast(var_x.value());
406 : }
407 :
408 141 : TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
409 141 : if (IsFloat64RoundTiesEvenSupported()) {
410 140 : return Float64RoundTiesEven(x);
411 : }
412 : // See ES#sec-touint8clamp for details.
413 2 : Node* f = Float64Floor(x);
414 3 : Node* f_and_half = Float64Add(f, Float64Constant(0.5));
415 :
416 1 : VARIABLE(var_result, MachineRepresentation::kFloat64);
417 1 : Label return_f(this), return_f_plus_one(this), done(this);
418 :
419 2 : GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
420 2 : GotoIf(Float64LessThan(x, f_and_half), &return_f);
421 : {
422 3 : Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
423 2 : Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
424 2 : &return_f_plus_one);
425 : }
426 :
427 : BIND(&return_f);
428 1 : var_result.Bind(f);
429 1 : Goto(&done);
430 :
431 : BIND(&return_f_plus_one);
432 3 : var_result.Bind(Float64Add(f, Float64Constant(1.0)));
433 1 : Goto(&done);
434 :
435 : BIND(&done);
436 2 : return TNode<Float64T>::UncheckedCast(var_result.value());
437 : }
438 :
439 1215 : TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
440 1215 : if (IsFloat64RoundTruncateSupported()) {
441 1176 : return Float64RoundTruncate(x);
442 : }
443 :
444 78 : Node* one = Float64Constant(1.0);
445 78 : Node* zero = Float64Constant(0.0);
446 78 : Node* two_52 = Float64Constant(4503599627370496.0E0);
447 78 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
448 :
449 39 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
450 39 : Label return_x(this), return_minus_x(this);
451 :
452 : // Check if {x} is greater than 0.
453 39 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
454 39 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
455 78 : &if_xnotgreaterthanzero);
456 :
457 : BIND(&if_xgreaterthanzero);
458 : {
459 39 : if (IsFloat64RoundDownSupported()) {
460 0 : var_x.Bind(Float64RoundDown(x));
461 : } else {
462 : // Just return {x} unless it's in the range ]0,2^52[.
463 78 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
464 :
465 : // Round positive {x} towards -Infinity.
466 117 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
467 117 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
468 117 : var_x.Bind(Float64Sub(var_x.value(), one));
469 : }
470 39 : Goto(&return_x);
471 : }
472 :
473 : BIND(&if_xnotgreaterthanzero);
474 : {
475 39 : if (IsFloat64RoundUpSupported()) {
476 0 : var_x.Bind(Float64RoundUp(x));
477 0 : Goto(&return_x);
478 : } else {
479 : // Just return {x} unless its in the range ]-2^52,0[.
480 78 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
481 78 : GotoIfNot(Float64LessThan(x, zero), &return_x);
482 :
483 : // Round negated {x} towards -Infinity and return result negated.
484 78 : Node* minus_x = Float64Neg(x);
485 117 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
486 117 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
487 117 : var_x.Bind(Float64Sub(var_x.value(), one));
488 39 : Goto(&return_minus_x);
489 : }
490 : }
491 :
492 : BIND(&return_minus_x);
493 117 : var_x.Bind(Float64Neg(var_x.value()));
494 39 : Goto(&return_x);
495 :
496 : BIND(&return_x);
497 78 : return TNode<Float64T>::UncheckedCast(var_x.value());
498 : }
499 :
500 0 : Node* CodeStubAssembler::SmiShiftBitsConstant() {
501 215466 : return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
502 : }
503 :
504 5455 : TNode<Smi> CodeStubAssembler::SmiFromWord32(SloppyTNode<Int32T> value) {
505 5455 : TNode<IntPtrT> value_intptr = ChangeInt32ToIntPtr(value);
506 : return BitcastWordToTaggedSigned(
507 10910 : WordShl(value_intptr, SmiShiftBitsConstant()));
508 : }
509 :
510 65000 : TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
511 : int32_t constant_value;
512 65000 : if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
513 25604 : return SmiConstant(constant_value);
514 : }
515 78792 : return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
516 : }
517 :
518 62970 : TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
519 : intptr_t constant_value;
520 62970 : if (ToIntPtrConstant(value, constant_value)) {
521 280 : return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
522 : }
523 : return UncheckedCast<IntPtrT>(
524 125380 : WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()));
525 : }
526 :
527 24613 : TNode<Int32T> CodeStubAssembler::SmiToWord32(SloppyTNode<Smi> value) {
528 24613 : TNode<IntPtrT> result = SmiUntag(value);
529 24613 : return TruncateWordToWord32(result);
530 : }
531 :
532 10428 : TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
533 20856 : return ChangeInt32ToFloat64(SmiToWord32(value));
534 : }
535 :
536 496 : TNode<Smi> CodeStubAssembler::SmiMax(SloppyTNode<Smi> a, SloppyTNode<Smi> b) {
537 992 : return SelectTaggedConstant(SmiLessThan(a, b), b, a);
538 : }
539 :
540 186 : TNode<Smi> CodeStubAssembler::SmiMin(SloppyTNode<Smi> a, SloppyTNode<Smi> b) {
541 372 : return SelectTaggedConstant(SmiLessThan(a, b), a, b);
542 : }
543 :
544 6 : TNode<Object> CodeStubAssembler::NumberMax(SloppyTNode<Object> a,
545 : SloppyTNode<Object> b) {
546 : // TODO(danno): This could be optimized by specifically handling smi cases.
547 6 : VARIABLE(result, MachineRepresentation::kTagged);
548 6 : Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
549 6 : GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
550 6 : GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
551 6 : result.Bind(NanConstant());
552 6 : Goto(&done);
553 : BIND(&greater_than_equal_a);
554 6 : result.Bind(a);
555 6 : Goto(&done);
556 : BIND(&greater_than_equal_b);
557 6 : result.Bind(b);
558 6 : Goto(&done);
559 : BIND(&done);
560 12 : return TNode<Object>::UncheckedCast(result.value());
561 : }
562 :
563 6 : TNode<Object> CodeStubAssembler::NumberMin(SloppyTNode<Object> a,
564 : SloppyTNode<Object> b) {
565 : // TODO(danno): This could be optimized by specifically handling smi cases.
566 6 : VARIABLE(result, MachineRepresentation::kTagged);
567 6 : Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
568 6 : GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
569 6 : GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
570 6 : result.Bind(NanConstant());
571 6 : Goto(&done);
572 : BIND(&greater_than_equal_a);
573 6 : result.Bind(b);
574 6 : Goto(&done);
575 : BIND(&greater_than_equal_b);
576 6 : result.Bind(a);
577 6 : Goto(&done);
578 : BIND(&done);
579 12 : return TNode<Object>::UncheckedCast(result.value());
580 : }
581 :
582 279 : Node* CodeStubAssembler::SmiMod(Node* a, Node* b) {
583 279 : VARIABLE(var_result, MachineRepresentation::kTagged);
584 279 : Label return_result(this, &var_result),
585 279 : return_minuszero(this, Label::kDeferred),
586 279 : return_nan(this, Label::kDeferred);
587 :
588 : // Untag {a} and {b}.
589 558 : a = SmiToWord32(a);
590 558 : b = SmiToWord32(b);
591 :
592 : // Return NaN if {b} is zero.
593 837 : GotoIf(Word32Equal(b, Int32Constant(0)), &return_nan);
594 :
595 : // Check if {a} is non-negative.
596 279 : Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
597 558 : Branch(Int32LessThanOrEqual(Int32Constant(0), a), &if_aisnotnegative,
598 558 : &if_aisnegative);
599 :
600 : BIND(&if_aisnotnegative);
601 : {
602 : // Fast case, don't need to check any other edge cases.
603 558 : Node* r = Int32Mod(a, b);
604 558 : var_result.Bind(SmiFromWord32(r));
605 279 : Goto(&return_result);
606 : }
607 :
608 : BIND(&if_aisnegative);
609 : {
610 : if (SmiValuesAre32Bits()) {
611 : // Check if {a} is kMinInt and {b} is -1 (only relevant if the
612 : // kMinInt is actually representable as a Smi).
613 : Label join(this);
614 837 : GotoIfNot(Word32Equal(a, Int32Constant(kMinInt)), &join);
615 837 : GotoIf(Word32Equal(b, Int32Constant(-1)), &return_minuszero);
616 279 : Goto(&join);
617 279 : BIND(&join);
618 : }
619 :
620 : // Perform the integer modulus operation.
621 558 : Node* r = Int32Mod(a, b);
622 :
623 : // Check if {r} is zero, and if so return -0, because we have to
624 : // take the sign of the left hand side {a}, which is negative.
625 837 : GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
626 :
627 : // The remainder {r} can be outside the valid Smi range on 32bit
628 : // architectures, so we cannot just say SmiFromWord32(r) here.
629 558 : var_result.Bind(ChangeInt32ToTagged(r));
630 279 : Goto(&return_result);
631 : }
632 :
633 : BIND(&return_minuszero);
634 279 : var_result.Bind(MinusZeroConstant());
635 279 : Goto(&return_result);
636 :
637 : BIND(&return_nan);
638 279 : var_result.Bind(NanConstant());
639 279 : Goto(&return_result);
640 :
641 : BIND(&return_result);
642 558 : return TNode<Object>::UncheckedCast(var_result.value());
643 : }
644 :
645 341 : Node* CodeStubAssembler::SmiMul(Node* a, Node* b) {
646 341 : VARIABLE(var_result, MachineRepresentation::kTagged);
647 682 : VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64);
648 682 : VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64);
649 341 : Label return_result(this, &var_result);
650 :
651 : // Both {a} and {b} are Smis. Convert them to integers and multiply.
652 682 : Node* lhs32 = SmiToWord32(a);
653 682 : Node* rhs32 = SmiToWord32(b);
654 682 : Node* pair = Int32MulWithOverflow(lhs32, rhs32);
655 :
656 341 : Node* overflow = Projection(1, pair);
657 :
658 : // Check if the multiplication overflowed.
659 341 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
660 341 : Branch(overflow, &if_overflow, &if_notoverflow);
661 : BIND(&if_notoverflow);
662 : {
663 : // If the answer is zero, we may need to return -0.0, depending on the
664 : // input.
665 341 : Label answer_zero(this), answer_not_zero(this);
666 341 : Node* answer = Projection(0, pair);
667 682 : Node* zero = Int32Constant(0);
668 682 : Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
669 : BIND(&answer_not_zero);
670 : {
671 682 : var_result.Bind(ChangeInt32ToTagged(answer));
672 341 : Goto(&return_result);
673 : }
674 : BIND(&answer_zero);
675 : {
676 682 : Node* or_result = Word32Or(lhs32, rhs32);
677 341 : Label if_should_be_negative_zero(this), if_should_be_zero(this);
678 341 : Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
679 682 : &if_should_be_zero);
680 : BIND(&if_should_be_negative_zero);
681 : {
682 341 : var_result.Bind(MinusZeroConstant());
683 341 : Goto(&return_result);
684 : }
685 : BIND(&if_should_be_zero);
686 : {
687 682 : var_result.Bind(SmiConstant(0));
688 341 : Goto(&return_result);
689 341 : }
690 341 : }
691 : }
692 : BIND(&if_overflow);
693 : {
694 682 : var_lhs_float64.Bind(SmiToFloat64(a));
695 682 : var_rhs_float64.Bind(SmiToFloat64(b));
696 1364 : Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
697 682 : Node* result = AllocateHeapNumberWithValue(value);
698 341 : var_result.Bind(result);
699 341 : Goto(&return_result);
700 : }
701 :
702 : BIND(&return_result);
703 682 : return var_result.value();
704 : }
705 :
706 186 : Node* CodeStubAssembler::TrySmiDiv(Node* dividend, Node* divisor,
707 : Label* bailout) {
708 : // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
709 : // is zero.
710 372 : GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
711 :
712 : // Do floating point division if {dividend} is zero and {divisor} is
713 : // negative.
714 186 : Label dividend_is_zero(this), dividend_is_not_zero(this);
715 372 : Branch(WordEqual(dividend, SmiConstant(0)), ÷nd_is_zero,
716 186 : ÷nd_is_not_zero);
717 :
718 : BIND(÷nd_is_zero);
719 : {
720 558 : GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
721 186 : Goto(÷nd_is_not_zero);
722 : }
723 : BIND(÷nd_is_not_zero);
724 :
725 372 : Node* untagged_divisor = SmiToWord32(divisor);
726 372 : Node* untagged_dividend = SmiToWord32(dividend);
727 :
728 : // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
729 : // if the Smi size is 31) and {divisor} is -1.
730 186 : Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
731 372 : Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
732 372 : &divisor_is_minus_one, &divisor_is_not_minus_one);
733 :
734 : BIND(&divisor_is_minus_one);
735 : {
736 : GotoIf(Word32Equal(
737 : untagged_dividend,
738 372 : Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
739 372 : bailout);
740 186 : Goto(&divisor_is_not_minus_one);
741 : }
742 : BIND(&divisor_is_not_minus_one);
743 :
744 372 : Node* untagged_result = Int32Div(untagged_dividend, untagged_divisor);
745 372 : Node* truncated = Int32Mul(untagged_result, untagged_divisor);
746 :
747 : // Do floating point division if the remainder is not 0.
748 372 : GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
749 :
750 558 : return SmiFromWord32(untagged_result);
751 : }
752 :
753 28926 : TNode<Int32T> CodeStubAssembler::TruncateWordToWord32(
754 : SloppyTNode<IntPtrT> value) {
755 28926 : if (Is64()) {
756 28926 : return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
757 : }
758 : return ReinterpretCast<Int32T>(value);
759 : }
760 :
761 64411 : TNode<BoolT> CodeStubAssembler::TaggedIsSmi(SloppyTNode<Object> a) {
762 64411 : return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
763 128822 : IntPtrConstant(0));
764 : }
765 :
766 8393 : TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(SloppyTNode<Object> a) {
767 : return WordNotEqual(
768 8393 : WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
769 16786 : IntPtrConstant(0));
770 : }
771 :
772 2176 : TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(SloppyTNode<Object> a) {
773 : return WordEqual(WordAnd(BitcastTaggedToWord(a),
774 2176 : IntPtrConstant(kSmiTagMask | kSmiSignMask)),
775 4352 : IntPtrConstant(0));
776 : }
777 :
778 0 : TNode<BoolT> CodeStubAssembler::WordIsWordAligned(SloppyTNode<WordT> word) {
779 : return WordEqual(IntPtrConstant(0),
780 0 : WordAnd(word, IntPtrConstant(kPointerSize - 1)));
781 : }
782 :
783 : #if DEBUG
784 : void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
785 : CodeAssembler::Bind(label, debug_info);
786 : }
787 : #else
788 675400 : void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
789 : #endif // DEBUG
790 :
791 465 : void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
792 : Node* receiver_map, Label* definitely_no_elements,
793 : Label* possibly_elements) {
794 : CSA_SLOW_ASSERT(this, IsMap(receiver_map));
795 465 : VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
796 465 : Label loop_body(this, &var_map);
797 930 : Node* empty_fixed_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
798 : Node* empty_slow_element_dictionary =
799 930 : LoadRoot(Heap::kEmptySlowElementDictionaryRootIndex);
800 465 : Goto(&loop_body);
801 :
802 : BIND(&loop_body);
803 : {
804 465 : Node* map = var_map.value();
805 930 : Node* prototype = LoadMapPrototype(map);
806 465 : GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements);
807 930 : Node* prototype_map = LoadMap(prototype);
808 : // Pessimistically assume elements if a Proxy, Special API Object,
809 : // or JSValue wrapper is found on the prototype chain. After this
810 : // instance type check, it's not necessary to check for interceptors or
811 : // access checks.
812 465 : GotoIf(Int32LessThanOrEqual(LoadMapInstanceType(prototype_map),
813 1860 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
814 930 : possibly_elements);
815 930 : Node* prototype_elements = LoadElements(prototype);
816 465 : var_map.Bind(prototype_map);
817 930 : GotoIf(WordEqual(prototype_elements, empty_fixed_array), &loop_body);
818 465 : Branch(WordEqual(prototype_elements, empty_slow_element_dictionary),
819 930 : &loop_body, possibly_elements);
820 465 : }
821 465 : }
822 :
823 93 : void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
824 : Label* if_false) {
825 186 : GotoIf(TaggedIsSmi(object), if_false);
826 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
827 186 : Branch(IsJSReceiver(object), if_true, if_false);
828 93 : }
829 :
830 93 : void CodeStubAssembler::BranchIfJSObject(Node* object, Label* if_true,
831 : Label* if_false) {
832 186 : GotoIf(TaggedIsSmi(object), if_false);
833 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
834 186 : Branch(IsJSObject(object), if_true, if_false);
835 93 : }
836 :
837 651 : void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
838 : Label* if_true, Label* if_false) {
839 : // Bailout if receiver is a Smi.
840 1302 : GotoIf(TaggedIsSmi(object), if_false);
841 :
842 1302 : Node* map = LoadMap(object);
843 1302 : GotoIfNot(IsJSArrayMap(map), if_false);
844 :
845 : // Bailout if receiver has slow elements.
846 1302 : Node* elements_kind = LoadMapElementsKind(map);
847 1302 : GotoIfNot(IsFastElementsKind(elements_kind), if_false);
848 :
849 : // Check prototype chain if receiver does not have packed elements
850 1302 : GotoIfNot(IsPrototypeInitialArrayPrototype(context, map), if_false);
851 :
852 1302 : Branch(IsArrayProtectorCellInvalid(), if_false, if_true);
853 651 : }
854 :
855 0 : void CodeStubAssembler::BranchIfFastJSArrayForCopy(Node* object, Node* context,
856 : Label* if_true,
857 : Label* if_false) {
858 0 : GotoIf(IsSpeciesProtectorCellInvalid(), if_false);
859 0 : BranchIfFastJSArray(object, context, if_true, if_false);
860 0 : }
861 :
862 34841 : Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags,
863 : Node* top_address, Node* limit_address) {
864 34841 : Node* top = Load(MachineType::Pointer(), top_address);
865 34841 : Node* limit = Load(MachineType::Pointer(), limit_address);
866 :
867 : // If there's not enough space, call the runtime.
868 34841 : VARIABLE(result, MachineRepresentation::kTagged);
869 34841 : Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
870 34841 : Label merge_runtime(this, &result);
871 :
872 34841 : bool needs_double_alignment = flags & kDoubleAlignment;
873 :
874 34841 : if (flags & kAllowLargeObjectAllocation) {
875 : Label next(this);
876 2462 : GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
877 :
878 : Node* runtime_flags = SmiConstant(
879 1231 : Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
880 3693 : AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
881 : Node* const runtime_result =
882 : CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
883 2462 : SmiTag(size_in_bytes), runtime_flags);
884 1231 : result.Bind(runtime_result);
885 1231 : Goto(&merge_runtime);
886 :
887 1231 : BIND(&next);
888 : }
889 :
890 69682 : VARIABLE(adjusted_size, MachineType::PointerRepresentation(), size_in_bytes);
891 :
892 34841 : if (needs_double_alignment) {
893 0 : Label not_aligned(this), done_alignment(this, &adjusted_size);
894 :
895 0 : Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), ¬_aligned,
896 0 : &done_alignment);
897 :
898 : BIND(¬_aligned);
899 0 : Node* not_aligned_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
900 0 : adjusted_size.Bind(not_aligned_size);
901 0 : Goto(&done_alignment);
902 :
903 0 : BIND(&done_alignment);
904 : }
905 :
906 104523 : Node* new_top = IntPtrAdd(top, adjusted_size.value());
907 :
908 34841 : Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
909 69682 : &no_runtime_call);
910 :
911 : BIND(&runtime_call);
912 : Node* runtime_result;
913 34841 : if (flags & kPretenured) {
914 : Node* runtime_flags = SmiConstant(
915 1333 : Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
916 3999 : AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
917 : runtime_result =
918 : CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
919 2666 : SmiTag(size_in_bytes), runtime_flags);
920 : } else {
921 : runtime_result = CallRuntime(Runtime::kAllocateInNewSpace,
922 67016 : NoContextConstant(), SmiTag(size_in_bytes));
923 : }
924 34841 : result.Bind(runtime_result);
925 34841 : Goto(&merge_runtime);
926 :
927 : // When there is enough space, return `top' and bump it up.
928 : BIND(&no_runtime_call);
929 : Node* no_runtime_result = top;
930 : StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
931 34841 : new_top);
932 :
933 69682 : VARIABLE(address, MachineType::PointerRepresentation(), no_runtime_result);
934 :
935 34841 : if (needs_double_alignment) {
936 0 : Label needs_filler(this), done_filling(this, &address);
937 0 : Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &done_filling,
938 0 : &needs_filler);
939 :
940 : BIND(&needs_filler);
941 : // Store a filler and increase the address by kPointerSize.
942 : StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
943 0 : LoadRoot(Heap::kOnePointerFillerMapRootIndex));
944 0 : address.Bind(IntPtrAdd(no_runtime_result, IntPtrConstant(4)));
945 :
946 0 : Goto(&done_filling);
947 :
948 0 : BIND(&done_filling);
949 : }
950 :
951 69682 : no_runtime_result = BitcastWordToTagged(
952 139364 : IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
953 :
954 34841 : result.Bind(no_runtime_result);
955 34841 : Goto(&merge_runtime);
956 :
957 : BIND(&merge_runtime);
958 69682 : return result.value();
959 : }
960 :
961 0 : Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
962 : AllocationFlags flags,
963 : Node* top_address,
964 : Node* limit_address) {
965 : DCHECK_EQ(flags & kDoubleAlignment, 0);
966 33877 : return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
967 : }
968 :
969 0 : Node* CodeStubAssembler::AllocateRawDoubleAligned(Node* size_in_bytes,
970 : AllocationFlags flags,
971 : Node* top_address,
972 : Node* limit_address) {
973 : #if defined(V8_HOST_ARCH_32_BIT)
974 : return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
975 : limit_address);
976 : #elif defined(V8_HOST_ARCH_64_BIT)
977 : // Allocation on 64 bit machine is naturally double aligned
978 : return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
979 964 : limit_address);
980 : #else
981 : #error Architecture not supported
982 : #endif
983 : }
984 :
985 403 : Node* CodeStubAssembler::AllocateInNewSpace(Node* size_in_bytes,
986 : AllocationFlags flags) {
987 : DCHECK(flags == kNone || flags == kDoubleAlignment);
988 : CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
989 7847 : return Allocate(size_in_bytes, flags);
990 : }
991 :
992 34841 : Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
993 34841 : Comment("Allocate");
994 : bool const new_space = !(flags & kPretenured);
995 : Node* top_address = ExternalConstant(
996 : new_space
997 33508 : ? ExternalReference::new_space_allocation_top_address(isolate())
998 103190 : : ExternalReference::old_space_allocation_top_address(isolate()));
999 : DCHECK_EQ(kPointerSize,
1000 : ExternalReference::new_space_allocation_limit_address(isolate())
1001 : .address() -
1002 : ExternalReference::new_space_allocation_top_address(isolate())
1003 : .address());
1004 : DCHECK_EQ(kPointerSize,
1005 : ExternalReference::old_space_allocation_limit_address(isolate())
1006 : .address() -
1007 : ExternalReference::old_space_allocation_top_address(isolate())
1008 : .address());
1009 104523 : Node* limit_address = IntPtrAdd(top_address, IntPtrConstant(kPointerSize));
1010 :
1011 34841 : if (flags & kDoubleAlignment) {
1012 : return AllocateRawDoubleAligned(size_in_bytes, flags, top_address,
1013 964 : limit_address);
1014 : } else {
1015 : return AllocateRawUnaligned(size_in_bytes, flags, top_address,
1016 33877 : limit_address);
1017 : }
1018 : }
1019 :
1020 1232 : Node* CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1021 : AllocationFlags flags) {
1022 1232 : CHECK(flags == kNone || flags == kDoubleAlignment);
1023 : DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1024 2464 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1025 : }
1026 :
1027 17971 : Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
1028 35942 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1029 : }
1030 :
1031 2511 : Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) {
1032 10044 : return BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset));
1033 : }
1034 :
1035 1953 : Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
1036 3906 : return InnerAllocate(previous, IntPtrConstant(offset));
1037 : }
1038 :
1039 1231 : Node* CodeStubAssembler::IsRegularHeapObjectSize(Node* size) {
1040 : return UintPtrLessThanOrEqual(size,
1041 3693 : IntPtrConstant(kMaxRegularHeapObjectSize));
1042 : }
1043 :
1044 1798 : void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
1045 : Label* if_false) {
1046 3596 : Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1047 1798 : if_bigint(this, Label::kDeferred);
1048 : // Rule out false {value}.
1049 3596 : GotoIf(WordEqual(value, BooleanConstant(false)), if_false);
1050 :
1051 : // Check if {value} is a Smi or a HeapObject.
1052 3596 : Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1053 :
1054 : BIND(&if_smi);
1055 : {
1056 : // The {value} is a Smi, only need to check against zero.
1057 3596 : BranchIfSmiEqual(value, SmiConstant(0), if_false, if_true);
1058 : }
1059 :
1060 : BIND(&if_notsmi);
1061 : {
1062 : // Check if {value} is the empty string.
1063 3596 : GotoIf(IsEmptyString(value), if_false);
1064 :
1065 : // The {value} is a HeapObject, load its map.
1066 3596 : Node* value_map = LoadMap(value);
1067 :
1068 : // Only null, undefined and document.all have the undetectable bit set,
1069 : // so we can return false immediately when that bit is set.
1070 3596 : GotoIf(IsUndetectableMap(value_map), if_false);
1071 :
1072 : // We still need to handle numbers specially, but all other {value}s
1073 : // that make it here yield true.
1074 3596 : GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1075 3596 : Branch(IsBigInt(value), &if_bigint, if_true);
1076 :
1077 : BIND(&if_heapnumber);
1078 : {
1079 : // Load the floating point value of {value}.
1080 : Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
1081 1798 : MachineType::Float64());
1082 :
1083 : // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1084 5394 : Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1085 3596 : if_true, if_false);
1086 : }
1087 :
1088 : BIND(&if_bigint);
1089 : {
1090 : Node* result =
1091 : CallRuntime(Runtime::kBigIntToBoolean, NoContextConstant(), value);
1092 : CSA_ASSERT(this, IsBoolean(result));
1093 3596 : Branch(WordEqual(result, BooleanConstant(true)), if_true, if_false);
1094 : }
1095 1798 : }
1096 1798 : }
1097 :
1098 248 : Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) {
1099 248 : Node* frame_pointer = LoadFramePointer();
1100 496 : return Load(rep, frame_pointer, IntPtrConstant(offset));
1101 : }
1102 :
1103 186 : Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
1104 186 : Node* frame_pointer = LoadParentFramePointer();
1105 372 : return Load(rep, frame_pointer, IntPtrConstant(offset));
1106 : }
1107 :
1108 3215 : Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
1109 : MachineType rep) {
1110 6430 : return Load(rep, buffer, IntPtrConstant(offset));
1111 : }
1112 :
1113 425002 : Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1114 : int offset, MachineType rep) {
1115 850004 : return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
1116 : }
1117 :
1118 4941 : Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1119 : SloppyTNode<IntPtrT> offset,
1120 : MachineType rep) {
1121 9882 : return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
1122 : }
1123 :
1124 4973 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1125 : SloppyTNode<HeapObject> object, int offset) {
1126 4973 : if (Is64()) {
1127 : #if V8_TARGET_LITTLE_ENDIAN
1128 4973 : offset += kPointerSize / 2;
1129 : #endif
1130 : return ChangeInt32ToIntPtr(
1131 9946 : LoadObjectField(object, offset, MachineType::Int32()));
1132 : } else {
1133 0 : return SmiToWord(LoadObjectField(object, offset, MachineType::AnyTagged()));
1134 : }
1135 : }
1136 :
1137 3236 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
1138 : int offset) {
1139 3236 : if (Is64()) {
1140 : #if V8_TARGET_LITTLE_ENDIAN
1141 3236 : offset += kPointerSize / 2;
1142 : #endif
1143 : return UncheckedCast<Int32T>(
1144 3236 : LoadObjectField(object, offset, MachineType::Int32()));
1145 : } else {
1146 : return SmiToWord32(
1147 0 : LoadObjectField(object, offset, MachineType::AnyTagged()));
1148 : }
1149 : }
1150 :
1151 1302 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
1152 1302 : if (Is64()) {
1153 : #if V8_TARGET_LITTLE_ENDIAN
1154 1302 : index += kPointerSize / 2;
1155 : #endif
1156 : return ChangeInt32ToIntPtr(
1157 3906 : Load(MachineType::Int32(), base, IntPtrConstant(index)));
1158 : } else {
1159 : return SmiToWord(
1160 0 : Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
1161 : }
1162 : }
1163 :
1164 477 : Node* CodeStubAssembler::LoadAndUntagToWord32Root(
1165 : Heap::RootListIndex root_index) {
1166 : Node* roots_array_start =
1167 954 : ExternalConstant(ExternalReference::roots_array_start(isolate()));
1168 477 : int index = root_index * kPointerSize;
1169 477 : if (Is64()) {
1170 : #if V8_TARGET_LITTLE_ENDIAN
1171 477 : index += kPointerSize / 2;
1172 : #endif
1173 954 : return Load(MachineType::Int32(), roots_array_start, IntPtrConstant(index));
1174 : } else {
1175 : return SmiToWord32(Load(MachineType::AnyTagged(), roots_array_start,
1176 0 : IntPtrConstant(index)));
1177 : }
1178 : }
1179 :
1180 33466 : Node* CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
1181 33466 : if (Is64()) {
1182 33466 : int zero_offset = offset + kPointerSize / 2;
1183 : int payload_offset = offset;
1184 : #if V8_TARGET_LITTLE_ENDIAN
1185 : std::swap(zero_offset, payload_offset);
1186 : #endif
1187 : StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1188 100398 : IntPtrConstant(zero_offset), Int32Constant(0));
1189 : return StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1190 33466 : IntPtrConstant(payload_offset),
1191 100398 : TruncateInt64ToInt32(value));
1192 : } else {
1193 : return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
1194 0 : IntPtrConstant(offset), SmiTag(value));
1195 : }
1196 : }
1197 :
1198 31952 : TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1199 : SloppyTNode<HeapNumber> object) {
1200 : return TNode<Float64T>::UncheckedCast(LoadObjectField(
1201 31952 : object, HeapNumber::kValueOffset, MachineType::Float64()));
1202 : }
1203 :
1204 104834 : TNode<Map> CodeStubAssembler::LoadMap(SloppyTNode<HeapObject> object) {
1205 104834 : return UncheckedCast<Map>(LoadObjectField(object, HeapObject::kMapOffset));
1206 : }
1207 :
1208 34900 : TNode<Int32T> CodeStubAssembler::LoadInstanceType(
1209 : SloppyTNode<HeapObject> object) {
1210 69800 : return LoadMapInstanceType(LoadMap(object));
1211 : }
1212 :
1213 5091 : Node* CodeStubAssembler::HasInstanceType(Node* object,
1214 : InstanceType instance_type) {
1215 15273 : return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1216 : }
1217 :
1218 496 : Node* CodeStubAssembler::DoesntHaveInstanceType(Node* object,
1219 : InstanceType instance_type) {
1220 1984 : return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1221 : }
1222 :
1223 0 : Node* CodeStubAssembler::TaggedDoesntHaveInstanceType(Node* any_tagged,
1224 : InstanceType type) {
1225 : /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1226 0 : Node* tagged_is_smi = TaggedIsSmi(any_tagged);
1227 0 : return Select(tagged_is_smi, [=]() { return tagged_is_smi; },
1228 0 : [=]() { return DoesntHaveInstanceType(any_tagged, type); },
1229 0 : MachineRepresentation::kBit);
1230 : }
1231 :
1232 217 : TNode<Int32T> CodeStubAssembler::LoadHashForJSObject(
1233 : SloppyTNode<JSObject> jsobject, SloppyTNode<Int32T> instance_type) {
1234 217 : VARIABLE(var_hash, MachineRepresentation::kWord32);
1235 217 : Label if_global_proxy(this, Label::kDeferred);
1236 217 : GotoIf(IsJSGlobalProxyInstanceType(instance_type), &if_global_proxy);
1237 :
1238 : Node* properties_or_hash =
1239 : LoadObjectField(jsobject, JSObject::kPropertiesOrHashOffset);
1240 :
1241 217 : Label if_smi(this);
1242 434 : GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
1243 :
1244 434 : Node* type = LoadInstanceType(properties_or_hash);
1245 217 : Label if_property_array(this), if_property_dictionary(this), done(this);
1246 434 : GotoIf(InstanceTypeEqual(type, PROPERTY_ARRAY_TYPE), &if_property_array);
1247 434 : GotoIf(InstanceTypeEqual(type, HASH_TABLE_TYPE), &if_property_dictionary);
1248 :
1249 434 : var_hash.Bind(Int32Constant(PropertyArray::kNoHashSentinel));
1250 217 : Goto(&done);
1251 :
1252 : BIND(&if_smi);
1253 : {
1254 434 : var_hash.Bind(SmiToWord32(properties_or_hash));
1255 217 : Goto(&done);
1256 : }
1257 :
1258 : BIND(&if_property_array);
1259 : {
1260 : Node* length_and_hash_int32 = LoadAndUntagToWord32ObjectField(
1261 434 : properties_or_hash, PropertyArray::kLengthAndHashOffset);
1262 : var_hash.Bind(
1263 217 : DecodeWord32<PropertyArray::HashField>(length_and_hash_int32));
1264 217 : Goto(&done);
1265 : }
1266 :
1267 : BIND(&if_property_dictionary);
1268 : {
1269 : var_hash.Bind(SmiToWord32(LoadFixedArrayElement(
1270 651 : properties_or_hash, NameDictionary::kObjectHashIndex)));
1271 217 : Goto(&done);
1272 : }
1273 :
1274 : BIND(&if_global_proxy);
1275 : {
1276 : Node* hash = LoadObjectField(jsobject, JSGlobalProxy::kHashOffset);
1277 : var_hash.Bind(SelectConstant(TaggedIsSmi(hash), SmiToWord32(hash),
1278 : Int32Constant(PropertyArray::kNoHashSentinel),
1279 868 : MachineRepresentation::kWord32));
1280 217 : Goto(&done);
1281 : }
1282 :
1283 : BIND(&done);
1284 434 : return UncheckedCast<Int32T>(var_hash.value());
1285 : }
1286 :
1287 3044 : TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1288 : SloppyTNode<JSObject> object) {
1289 : CSA_SLOW_ASSERT(this, Word32Not(IsDictionaryMap(LoadMap(object))));
1290 : Node* properties = LoadObjectField(object, JSObject::kPropertiesOrHashOffset);
1291 : return SelectTaggedConstant<HeapObject>(
1292 6088 : TaggedIsSmi(properties), EmptyFixedArrayConstant(), properties);
1293 : }
1294 :
1295 4178 : TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1296 : SloppyTNode<JSObject> object) {
1297 : CSA_SLOW_ASSERT(this, IsDictionaryMap(LoadMap(object)));
1298 : Node* properties = LoadObjectField(object, JSObject::kPropertiesOrHashOffset);
1299 : return SelectTaggedConstant<HeapObject>(
1300 8356 : TaggedIsSmi(properties), EmptyPropertyDictionaryConstant(), properties);
1301 : }
1302 :
1303 12334 : TNode<FixedArrayBase> CodeStubAssembler::LoadElements(
1304 : SloppyTNode<JSObject> object) {
1305 12334 : return CAST(LoadObjectField(object, JSObject::kElementsOffset));
1306 : }
1307 :
1308 7424 : TNode<Object> CodeStubAssembler::LoadJSArrayLength(SloppyTNode<JSArray> array) {
1309 : CSA_ASSERT(this, IsJSArray(array));
1310 7424 : return CAST(LoadObjectField(array, JSArray::kLengthOffset));
1311 : }
1312 :
1313 155 : TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(
1314 : SloppyTNode<JSArray> array) {
1315 1082 : TNode<Object> length = LoadJSArrayLength(array);
1316 : CSA_ASSERT(this, IsFastElementsKind(LoadMapElementsKind(LoadMap(array))));
1317 : // JSArray length is always a positive Smi for fast arrays.
1318 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
1319 155 : return UncheckedCast<Smi>(length);
1320 : }
1321 :
1322 4232 : TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1323 : SloppyTNode<FixedArrayBase> array) {
1324 4232 : return CAST(LoadObjectField(array, FixedArrayBase::kLengthOffset));
1325 : }
1326 :
1327 558 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1328 : SloppyTNode<FixedArrayBase> array) {
1329 1966 : return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1330 : }
1331 :
1332 12677 : TNode<Int32T> CodeStubAssembler::LoadMapBitField(SloppyTNode<Map> map) {
1333 : CSA_SLOW_ASSERT(this, IsMap(map));
1334 : return UncheckedCast<Int32T>(
1335 12677 : LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8()));
1336 : }
1337 :
1338 2858 : TNode<Int32T> CodeStubAssembler::LoadMapBitField2(SloppyTNode<Map> map) {
1339 : CSA_SLOW_ASSERT(this, IsMap(map));
1340 : return UncheckedCast<Int32T>(
1341 2858 : LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()));
1342 : }
1343 :
1344 2833 : TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(SloppyTNode<Map> map) {
1345 : CSA_SLOW_ASSERT(this, IsMap(map));
1346 : return UncheckedCast<Uint32T>(
1347 2833 : LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()));
1348 : }
1349 :
1350 57541 : TNode<Int32T> CodeStubAssembler::LoadMapInstanceType(SloppyTNode<Map> map) {
1351 : return UncheckedCast<Int32T>(
1352 57541 : LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8()));
1353 : }
1354 :
1355 2362 : TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(SloppyTNode<Map> map) {
1356 : CSA_SLOW_ASSERT(this, IsMap(map));
1357 4724 : Node* bit_field2 = LoadMapBitField2(map);
1358 2362 : return Signed(DecodeWord32<Map::ElementsKindBits>(bit_field2));
1359 : }
1360 :
1361 4104 : TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(
1362 : SloppyTNode<Map> map) {
1363 : CSA_SLOW_ASSERT(this, IsMap(map));
1364 4104 : return CAST(LoadObjectField(map, Map::kDescriptorsOffset));
1365 : }
1366 :
1367 7787 : TNode<Object> CodeStubAssembler::LoadMapPrototype(SloppyTNode<Map> map) {
1368 : CSA_SLOW_ASSERT(this, IsMap(map));
1369 7787 : return CAST(LoadObjectField(map, Map::kPrototypeOffset));
1370 : }
1371 :
1372 31 : TNode<PrototypeInfo> CodeStubAssembler::LoadMapPrototypeInfo(
1373 : SloppyTNode<Map> map, Label* if_no_proto_info) {
1374 : CSA_ASSERT(this, IsMap(map));
1375 : Node* prototype_info =
1376 : LoadObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1377 62 : GotoIf(TaggedIsSmi(prototype_info), if_no_proto_info);
1378 : GotoIfNot(WordEqual(LoadMap(prototype_info),
1379 93 : LoadRoot(Heap::kPrototypeInfoMapRootIndex)),
1380 31 : if_no_proto_info);
1381 31 : return CAST(prototype_info);
1382 : }
1383 :
1384 2787 : TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSize(SloppyTNode<Map> map) {
1385 : CSA_SLOW_ASSERT(this, IsMap(map));
1386 : return ChangeInt32ToIntPtr(
1387 5574 : LoadObjectField(map, Map::kInstanceSizeOffset, MachineType::Uint8()));
1388 : }
1389 :
1390 471 : TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectProperties(
1391 : SloppyTNode<Map> map) {
1392 : CSA_SLOW_ASSERT(this, IsMap(map));
1393 : // See Map::GetInObjectProperties() for details.
1394 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
1395 : CSA_ASSERT(this, IsJSObjectMap(map));
1396 : return ChangeInt32ToIntPtr(LoadObjectField(
1397 : map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
1398 942 : MachineType::Uint8()));
1399 : }
1400 :
1401 31 : TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1402 : SloppyTNode<Map> map) {
1403 : CSA_SLOW_ASSERT(this, IsMap(map));
1404 : // See Map::GetConstructorFunctionIndex() for details.
1405 : STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
1406 : CSA_ASSERT(this, Int32LessThanOrEqual(LoadMapInstanceType(map),
1407 : Int32Constant(LAST_PRIMITIVE_TYPE)));
1408 : return ChangeInt32ToIntPtr(LoadObjectField(
1409 : map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
1410 62 : MachineType::Uint8()));
1411 : }
1412 :
1413 124 : TNode<Object> CodeStubAssembler::LoadMapConstructor(SloppyTNode<Map> map) {
1414 : CSA_SLOW_ASSERT(this, IsMap(map));
1415 124 : TVARIABLE(Object, result,
1416 : LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1417 :
1418 124 : Label done(this), loop(this, &result);
1419 124 : Goto(&loop);
1420 : BIND(&loop);
1421 : {
1422 248 : GotoIf(TaggedIsSmi(result), &done);
1423 : Node* is_map_type =
1424 248 : InstanceTypeEqual(LoadInstanceType(CAST(result)), MAP_TYPE);
1425 124 : GotoIfNot(is_map_type, &done);
1426 : result =
1427 : LoadObjectField(CAST(result), Map::kConstructorOrBackPointerOffset);
1428 124 : Goto(&loop);
1429 : }
1430 : BIND(&done);
1431 124 : return result;
1432 : }
1433 :
1434 465 : Node* CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) {
1435 : CSA_SLOW_ASSERT(this, IsMap(map));
1436 930 : Node* bit_field3 = LoadMapBitField3(map);
1437 465 : return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3);
1438 : }
1439 :
1440 235 : TNode<Uint32T> CodeStubAssembler::LoadNameHashField(SloppyTNode<Name> name) {
1441 : CSA_ASSERT(this, IsName(name));
1442 235 : return LoadObjectField<Uint32T>(name, Name::kHashFieldOffset);
1443 : }
1444 :
1445 4531 : TNode<Uint32T> CodeStubAssembler::LoadNameHash(SloppyTNode<Name> name,
1446 : Label* if_hash_not_computed) {
1447 : TNode<Uint32T> hash_field = LoadNameHashField(name);
1448 4531 : if (if_hash_not_computed != nullptr) {
1449 186 : GotoIf(IsSetWord32(hash_field, Name::kHashNotComputedMask),
1450 372 : if_hash_not_computed);
1451 : }
1452 9062 : return Unsigned(Word32Shr(hash_field, Int32Constant(Name::kHashShift)));
1453 : }
1454 :
1455 2139 : TNode<Smi> CodeStubAssembler::LoadStringLength(SloppyTNode<String> object) {
1456 : CSA_ASSERT(this, IsString(object));
1457 2139 : return LoadObjectField<Smi>(object, String::kLengthOffset);
1458 : }
1459 :
1460 31 : Node* CodeStubAssembler::PointerToSeqStringData(Node* seq_string) {
1461 : CSA_ASSERT(this, IsString(seq_string));
1462 : CSA_ASSERT(this,
1463 : IsSequentialStringInstanceType(LoadInstanceType(seq_string)));
1464 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
1465 : return IntPtrAdd(
1466 : BitcastTaggedToWord(seq_string),
1467 93 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag));
1468 : }
1469 :
1470 62 : Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1471 : CSA_ASSERT(this, IsJSValue(object));
1472 62 : return LoadObjectField(object, JSValue::kValueOffset);
1473 : }
1474 :
1475 1333 : Node* CodeStubAssembler::LoadWeakCellValueUnchecked(Node* weak_cell) {
1476 : // TODO(ishell): fix callers.
1477 1333 : return LoadObjectField(weak_cell, WeakCell::kValueOffset);
1478 : }
1479 :
1480 3937 : Node* CodeStubAssembler::LoadWeakCellValue(Node* weak_cell, Label* if_cleared) {
1481 : CSA_ASSERT(this, IsWeakCell(weak_cell));
1482 : Node* value = LoadWeakCellValueUnchecked(weak_cell);
1483 3937 : if (if_cleared != nullptr) {
1484 7626 : GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared);
1485 : }
1486 3937 : return value;
1487 : }
1488 :
1489 79755 : Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node,
1490 : int additional_offset,
1491 : ParameterMode parameter_mode) {
1492 : CSA_SLOW_ASSERT(this, IntPtrGreaterThanOrEqual(
1493 : ParameterToWord(index_node, parameter_mode),
1494 : IntPtrConstant(0)));
1495 : int32_t header_size =
1496 79755 : FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1497 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
1498 79755 : parameter_mode, header_size);
1499 79755 : return Load(MachineType::AnyTagged(), object, offset);
1500 : }
1501 :
1502 1922 : Node* CodeStubAssembler::LoadFixedTypedArrayElement(
1503 : Node* data_pointer, Node* index_node, ElementsKind elements_kind,
1504 : ParameterMode parameter_mode) {
1505 : Node* offset =
1506 1922 : ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
1507 1922 : MachineType type;
1508 1922 : switch (elements_kind) {
1509 : case UINT8_ELEMENTS: /* fall through */
1510 : case UINT8_CLAMPED_ELEMENTS:
1511 403 : type = MachineType::Uint8();
1512 403 : break;
1513 : case INT8_ELEMENTS:
1514 217 : type = MachineType::Int8();
1515 217 : break;
1516 : case UINT16_ELEMENTS:
1517 217 : type = MachineType::Uint16();
1518 217 : break;
1519 : case INT16_ELEMENTS:
1520 217 : type = MachineType::Int16();
1521 217 : break;
1522 : case UINT32_ELEMENTS:
1523 217 : type = MachineType::Uint32();
1524 217 : break;
1525 : case INT32_ELEMENTS:
1526 217 : type = MachineType::Int32();
1527 217 : break;
1528 : case FLOAT32_ELEMENTS:
1529 217 : type = MachineType::Float32();
1530 217 : break;
1531 : case FLOAT64_ELEMENTS:
1532 217 : type = MachineType::Float64();
1533 217 : break;
1534 : default:
1535 0 : UNREACHABLE();
1536 : }
1537 1922 : return Load(type, data_pointer, offset);
1538 : }
1539 :
1540 1674 : Node* CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
1541 : Node* data_pointer, Node* index_node, ElementsKind elements_kind,
1542 : ParameterMode parameter_mode) {
1543 : Node* value = LoadFixedTypedArrayElement(data_pointer, index_node,
1544 1674 : elements_kind, parameter_mode);
1545 1674 : switch (elements_kind) {
1546 : case ElementsKind::INT8_ELEMENTS:
1547 : case ElementsKind::UINT8_CLAMPED_ELEMENTS:
1548 : case ElementsKind::UINT8_ELEMENTS:
1549 : case ElementsKind::INT16_ELEMENTS:
1550 : case ElementsKind::UINT16_ELEMENTS:
1551 1860 : return SmiFromWord32(value);
1552 : case ElementsKind::INT32_ELEMENTS:
1553 372 : return ChangeInt32ToTagged(value);
1554 : case ElementsKind::UINT32_ELEMENTS:
1555 372 : return ChangeUint32ToTagged(value);
1556 : case ElementsKind::FLOAT32_ELEMENTS:
1557 558 : return AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(value));
1558 : case ElementsKind::FLOAT64_ELEMENTS:
1559 372 : return AllocateHeapNumberWithValue(value);
1560 : default:
1561 0 : UNREACHABLE();
1562 : }
1563 : }
1564 :
1565 8463 : Node* CodeStubAssembler::LoadFeedbackVectorSlot(Node* object,
1566 : Node* slot_index_node,
1567 : int additional_offset,
1568 : ParameterMode parameter_mode) {
1569 : CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
1570 : CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
1571 : int32_t header_size =
1572 8463 : FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
1573 : Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
1574 8463 : parameter_mode, header_size);
1575 8463 : return Load(MachineType::AnyTagged(), object, offset);
1576 : }
1577 :
1578 5969 : Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
1579 : Node* object, Node* index_node, int additional_offset,
1580 : ParameterMode parameter_mode) {
1581 : CSA_SLOW_ASSERT(this, Word32Or(IsFixedArray(object), IsHashTable(object)));
1582 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1583 : int32_t header_size =
1584 5969 : FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1585 : #if V8_TARGET_LITTLE_ENDIAN
1586 5969 : if (Is64()) {
1587 5969 : header_size += kPointerSize / 2;
1588 : }
1589 : #endif
1590 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
1591 5969 : parameter_mode, header_size);
1592 5969 : if (Is64()) {
1593 5969 : return Load(MachineType::Int32(), object, offset);
1594 : } else {
1595 0 : return SmiToWord32(Load(MachineType::AnyTagged(), object, offset));
1596 : }
1597 : }
1598 :
1599 905 : Node* CodeStubAssembler::LoadFixedDoubleArrayElement(
1600 : Node* object, Node* index_node, MachineType machine_type,
1601 : int additional_offset, ParameterMode parameter_mode, Label* if_hole) {
1602 : CSA_SLOW_ASSERT(this, IsFixedDoubleArray(object));
1603 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1604 : CSA_ASSERT(this, IsFixedDoubleArray(object));
1605 : int32_t header_size =
1606 905 : FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
1607 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_DOUBLE_ELEMENTS,
1608 905 : parameter_mode, header_size);
1609 905 : return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
1610 : }
1611 :
1612 2211 : Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset,
1613 : Label* if_hole,
1614 : MachineType machine_type) {
1615 2211 : if (if_hole) {
1616 : // TODO(ishell): Compare only the upper part for the hole once the
1617 : // compiler is able to fold addition of already complex |offset| with
1618 : // |kIeeeDoubleExponentWordOffset| into one addressing mode.
1619 1932 : if (Is64()) {
1620 1932 : Node* element = Load(MachineType::Uint64(), base, offset);
1621 5796 : GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
1622 : } else {
1623 : Node* element_upper = Load(
1624 : MachineType::Uint32(), base,
1625 0 : IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
1626 0 : GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
1627 0 : if_hole);
1628 : }
1629 : }
1630 2211 : if (machine_type.IsNone()) {
1631 : // This means the actual value is not needed.
1632 : return nullptr;
1633 : }
1634 1740 : return Load(machine_type, base, offset);
1635 : }
1636 :
1637 66080 : TNode<Object> CodeStubAssembler::LoadContextElement(
1638 : SloppyTNode<Context> context, int slot_index) {
1639 : int offset = Context::SlotOffset(slot_index);
1640 : return UncheckedCast<Object>(
1641 132160 : Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)));
1642 : }
1643 :
1644 1339 : TNode<Object> CodeStubAssembler::LoadContextElement(
1645 : SloppyTNode<Context> context, SloppyTNode<IntPtrT> slot_index) {
1646 : Node* offset =
1647 : IntPtrAdd(TimesPointerSize(slot_index),
1648 5356 : IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
1649 1339 : return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
1650 : }
1651 :
1652 31 : void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
1653 : int slot_index,
1654 : SloppyTNode<Object> value) {
1655 : int offset = Context::SlotOffset(slot_index);
1656 62 : Store(context, IntPtrConstant(offset), value);
1657 31 : }
1658 :
1659 186 : void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
1660 : SloppyTNode<IntPtrT> slot_index,
1661 : SloppyTNode<Object> value) {
1662 : Node* offset =
1663 : IntPtrAdd(TimesPointerSize(slot_index),
1664 744 : IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
1665 186 : Store(context, offset, value);
1666 186 : }
1667 :
1668 6511 : void CodeStubAssembler::StoreContextElementNoWriteBarrier(
1669 : SloppyTNode<Context> context, int slot_index, SloppyTNode<Object> value) {
1670 : int offset = Context::SlotOffset(slot_index);
1671 : StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
1672 13022 : IntPtrConstant(offset), value);
1673 6511 : }
1674 :
1675 10555 : TNode<Context> CodeStubAssembler::LoadNativeContext(
1676 : SloppyTNode<Context> context) {
1677 : return UncheckedCast<Context>(
1678 29567 : LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX));
1679 : }
1680 :
1681 285 : TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
1682 : SloppyTNode<Int32T> kind, SloppyTNode<Context> native_context) {
1683 : CSA_ASSERT(this, IsFastElementsKind(kind));
1684 : CSA_ASSERT(this, IsNativeContext(native_context));
1685 : Node* offset = IntPtrAdd(IntPtrConstant(Context::FIRST_JS_ARRAY_MAP_SLOT),
1686 570 : ChangeInt32ToIntPtr(kind));
1687 285 : return UncheckedCast<Map>(LoadContextElement(native_context, offset));
1688 : }
1689 :
1690 1302 : TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
1691 : ElementsKind kind, SloppyTNode<Context> native_context) {
1692 : CSA_ASSERT(this, IsNativeContext(native_context));
1693 : return UncheckedCast<Map>(
1694 1302 : LoadContextElement(native_context, Context::ArrayMapIndex(kind)));
1695 : }
1696 :
1697 3385 : Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function,
1698 : Label* if_bailout) {
1699 : CSA_ASSERT(this, TaggedIsNotSmi(function));
1700 : CSA_ASSERT(this, IsJSFunction(function));
1701 : CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)));
1702 : CSA_ASSERT(this, IsClearWord32(LoadMapBitField(LoadMap(function)),
1703 : 1 << Map::kHasNonInstancePrototype));
1704 : Node* proto_or_map =
1705 : LoadObjectField(function, JSFunction::kPrototypeOrInitialMapOffset);
1706 6770 : GotoIf(IsTheHole(proto_or_map), if_bailout);
1707 :
1708 3385 : VARIABLE(var_result, MachineRepresentation::kTagged, proto_or_map);
1709 3385 : Label done(this, &var_result);
1710 6770 : GotoIfNot(IsMap(proto_or_map), &done);
1711 :
1712 6770 : var_result.Bind(LoadMapPrototype(proto_or_map));
1713 3385 : Goto(&done);
1714 :
1715 : BIND(&done);
1716 6770 : return var_result.value();
1717 : }
1718 :
1719 62 : void CodeStubAssembler::StoreHeapNumberValue(SloppyTNode<HeapNumber> object,
1720 : SloppyTNode<Float64T> value) {
1721 : StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
1722 11197 : MachineRepresentation::kFloat64);
1723 62 : }
1724 :
1725 11769 : Node* CodeStubAssembler::StoreObjectField(
1726 : Node* object, int offset, Node* value) {
1727 : DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
1728 23538 : return Store(object, IntPtrConstant(offset - kHeapObjectTag), value);
1729 : }
1730 :
1731 1457 : Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
1732 : Node* value) {
1733 : int const_offset;
1734 1457 : if (ToInt32Constant(offset, const_offset)) {
1735 0 : return StoreObjectField(object, const_offset, value);
1736 : }
1737 2914 : return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)),
1738 2914 : value);
1739 : }
1740 :
1741 89945 : Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
1742 : Node* object, int offset, Node* value, MachineRepresentation rep) {
1743 : return StoreNoWriteBarrier(rep, object,
1744 179890 : IntPtrConstant(offset - kHeapObjectTag), value);
1745 : }
1746 :
1747 1271 : Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
1748 : Node* object, Node* offset, Node* value, MachineRepresentation rep) {
1749 : int const_offset;
1750 1271 : if (ToInt32Constant(offset, const_offset)) {
1751 155 : return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
1752 : }
1753 : return StoreNoWriteBarrier(
1754 3348 : rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
1755 : }
1756 :
1757 3971 : Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
1758 : CSA_SLOW_ASSERT(this, IsMap(map));
1759 : return StoreWithMapWriteBarrier(
1760 7942 : object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
1761 : }
1762 :
1763 27702 : Node* CodeStubAssembler::StoreMapNoWriteBarrier(
1764 : Node* object, Heap::RootListIndex map_root_index) {
1765 55404 : return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
1766 : }
1767 :
1768 36276 : Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
1769 : CSA_SLOW_ASSERT(this, IsMap(map));
1770 : return StoreNoWriteBarrier(
1771 : MachineRepresentation::kTagged, object,
1772 72552 : IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
1773 : }
1774 :
1775 16581 : Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
1776 : Heap::RootListIndex root_index) {
1777 16581 : if (Heap::RootIsImmortalImmovable(root_index)) {
1778 32976 : return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
1779 : } else {
1780 186 : return StoreObjectField(object, offset, LoadRoot(root_index));
1781 : }
1782 : }
1783 :
1784 17831 : Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
1785 : Node* value,
1786 : WriteBarrierMode barrier_mode,
1787 : int additional_offset,
1788 : ParameterMode parameter_mode) {
1789 : CSA_SLOW_ASSERT(
1790 : this, Word32Or(IsHashTable(object),
1791 : Word32Or(IsFixedArray(object), IsPropertyArray(object))));
1792 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1793 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
1794 : barrier_mode == UPDATE_WRITE_BARRIER);
1795 : STATIC_ASSERT(FixedArray::kHeaderSize == PropertyArray::kHeaderSize);
1796 : int header_size =
1797 17831 : FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1798 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
1799 17831 : parameter_mode, header_size);
1800 17831 : if (barrier_mode == SKIP_WRITE_BARRIER) {
1801 : return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
1802 7687 : value);
1803 : } else {
1804 10144 : return Store(object, offset, value);
1805 : }
1806 : }
1807 :
1808 605 : Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
1809 : Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
1810 : CSA_ASSERT(this, IsFixedDoubleArray(object));
1811 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
1812 : Node* offset =
1813 : ElementOffsetFromIndex(index_node, PACKED_DOUBLE_ELEMENTS, parameter_mode,
1814 605 : FixedArray::kHeaderSize - kHeapObjectTag);
1815 : MachineRepresentation rep = MachineRepresentation::kFloat64;
1816 605 : return StoreNoWriteBarrier(rep, object, offset, value);
1817 : }
1818 :
1819 8339 : Node* CodeStubAssembler::StoreFeedbackVectorSlot(Node* object,
1820 : Node* slot_index_node,
1821 : Node* value,
1822 : WriteBarrierMode barrier_mode,
1823 : int additional_offset,
1824 : ParameterMode parameter_mode) {
1825 : CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
1826 : CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
1827 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
1828 : barrier_mode == UPDATE_WRITE_BARRIER);
1829 : int header_size =
1830 8339 : FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
1831 : Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
1832 8339 : parameter_mode, header_size);
1833 8339 : if (barrier_mode == SKIP_WRITE_BARRIER) {
1834 : return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
1835 7006 : value);
1836 : } else {
1837 1333 : return Store(object, offset, value);
1838 : }
1839 : }
1840 :
1841 186 : void CodeStubAssembler::EnsureArrayLengthWritable(Node* map, Label* bailout) {
1842 : // Check whether the length property is writable. The length property is the
1843 : // only default named property on arrays. It's nonconfigurable, hence is
1844 : // guaranteed to stay the first property.
1845 372 : Node* descriptors = LoadMapDescriptors(map);
1846 : Node* details =
1847 186 : LoadFixedArrayElement(descriptors, DescriptorArray::ToDetailsIndex(0));
1848 372 : GotoIf(IsSetSmi(details, PropertyDetails::kAttributesReadOnlyMask), bailout);
1849 186 : }
1850 :
1851 124 : Node* CodeStubAssembler::EnsureArrayPushable(Node* receiver, Label* bailout) {
1852 : // Disallow pushing onto prototypes. It might be the JSArray prototype.
1853 : // Disallow pushing onto non-extensible objects.
1854 124 : Comment("Disallow pushing onto prototypes");
1855 248 : Node* map = LoadMap(receiver);
1856 248 : Node* bit_field2 = LoadMapBitField2(map);
1857 : int mask = static_cast<int>(Map::IsPrototypeMapBits::kMask) |
1858 : (1 << Map::kIsExtensible);
1859 372 : Node* test = Word32And(bit_field2, Int32Constant(mask));
1860 372 : GotoIf(Word32NotEqual(test, Int32Constant(1 << Map::kIsExtensible)), bailout);
1861 :
1862 : // Disallow pushing onto arrays in dictionary named property mode. We need
1863 : // to figure out whether the length property is still writable.
1864 124 : Comment("Disallow pushing onto arrays in dictionary named property mode");
1865 248 : GotoIf(IsDictionaryMap(map), bailout);
1866 :
1867 124 : EnsureArrayLengthWritable(map, bailout);
1868 :
1869 : Node* kind = DecodeWord32<Map::ElementsKindBits>(bit_field2);
1870 124 : return kind;
1871 : }
1872 :
1873 457 : void CodeStubAssembler::PossiblyGrowElementsCapacity(
1874 : ParameterMode mode, ElementsKind kind, Node* array, Node* length,
1875 : Variable* var_elements, Node* growth, Label* bailout) {
1876 457 : Label fits(this, var_elements);
1877 : Node* capacity =
1878 1371 : TaggedToParameter(LoadFixedArrayBaseLength(var_elements->value()), mode);
1879 : // length and growth nodes are already in a ParameterMode appropriate
1880 : // representation.
1881 457 : Node* new_length = IntPtrOrSmiAdd(growth, length, mode);
1882 914 : GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
1883 457 : Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
1884 : var_elements->Bind(GrowElementsCapacity(array, var_elements->value(), kind,
1885 : kind, capacity, new_capacity, mode,
1886 457 : bailout));
1887 457 : Goto(&fits);
1888 457 : BIND(&fits);
1889 457 : }
1890 :
1891 147 : TNode<Smi> CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
1892 : SloppyTNode<JSArray> array,
1893 : CodeStubArguments* args,
1894 : TVariable<IntPtrT>* arg_index,
1895 : Label* bailout) {
1896 : CSA_SLOW_ASSERT(this, IsJSArray(array));
1897 147 : Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
1898 : Label pre_bailout(this);
1899 147 : Label success(this);
1900 : TVARIABLE(Smi, var_tagged_length);
1901 : ParameterMode mode = OptimalParameterMode();
1902 294 : VARIABLE(var_length, OptimalParameterRepresentation(),
1903 : TaggedToParameter(LoadFastJSArrayLength(array), mode));
1904 441 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
1905 :
1906 : // Resize the capacity of the fixed array if it doesn't fit.
1907 : TNode<IntPtrT> first = *arg_index;
1908 : Node* growth = WordToParameter(IntPtrSub(args->GetLength(), first), mode);
1909 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
1910 147 : &var_elements, growth, &pre_bailout);
1911 :
1912 : // Push each argument onto the end of the array now that there is enough
1913 : // capacity.
1914 147 : CodeStubAssembler::VariableList push_vars({&var_length}, zone());
1915 147 : Node* elements = var_elements.value();
1916 : args->ForEach(
1917 : push_vars,
1918 147 : [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
1919 : TryStoreArrayElement(kind, mode, &pre_bailout, elements,
1920 147 : var_length.value(), arg);
1921 147 : Increment(&var_length, 1, mode);
1922 147 : },
1923 294 : first, nullptr);
1924 : {
1925 147 : TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
1926 : var_tagged_length = length;
1927 147 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
1928 147 : Goto(&success);
1929 : }
1930 :
1931 : BIND(&pre_bailout);
1932 : {
1933 147 : TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
1934 : var_tagged_length = length;
1935 294 : Node* diff = SmiSub(length, LoadFastJSArrayLength(array));
1936 147 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
1937 294 : *arg_index = IntPtrAdd(*arg_index, SmiUntag(diff));
1938 147 : Goto(bailout);
1939 : }
1940 :
1941 : BIND(&success);
1942 147 : return var_tagged_length;
1943 : }
1944 :
1945 612 : void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind,
1946 : ParameterMode mode, Label* bailout,
1947 : Node* elements, Node* index,
1948 : Node* value) {
1949 612 : if (IsSmiElementsKind(kind)) {
1950 420 : GotoIf(TaggedIsNotSmi(value), bailout);
1951 402 : } else if (IsDoubleElementsKind(kind)) {
1952 204 : GotoIfNotNumber(value, bailout);
1953 : }
1954 612 : if (IsDoubleElementsKind(kind)) {
1955 408 : Node* double_value = ChangeNumberToFloat64(value);
1956 : StoreFixedDoubleArrayElement(elements, index,
1957 408 : Float64SilenceNaN(double_value), mode);
1958 : } else {
1959 : WriteBarrierMode barrier_mode =
1960 408 : IsSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
1961 408 : StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode);
1962 : }
1963 612 : }
1964 :
1965 279 : void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
1966 : Node* value, Label* bailout) {
1967 : CSA_SLOW_ASSERT(this, IsJSArray(array));
1968 279 : Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
1969 : ParameterMode mode = OptimalParameterMode();
1970 279 : VARIABLE(var_length, OptimalParameterRepresentation(),
1971 : TaggedToParameter(LoadFastJSArrayLength(array), mode));
1972 837 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
1973 :
1974 : // Resize the capacity of the fixed array if it doesn't fit.
1975 279 : Node* growth = IntPtrOrSmiConstant(1, mode);
1976 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
1977 279 : &var_elements, growth, bailout);
1978 :
1979 : // Push each argument onto the end of the array now that there is enough
1980 : // capacity.
1981 : TryStoreArrayElement(kind, mode, bailout, var_elements.value(),
1982 279 : var_length.value(), value);
1983 279 : Increment(&var_length, 1, mode);
1984 :
1985 279 : Node* length = ParameterToTagged(var_length.value(), mode);
1986 558 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
1987 279 : }
1988 :
1989 31 : Node* CodeStubAssembler::AllocateCellWithValue(Node* value,
1990 : WriteBarrierMode mode) {
1991 31 : Node* result = Allocate(Cell::kSize, kNone);
1992 31 : StoreMapNoWriteBarrier(result, Heap::kCellMapRootIndex);
1993 31 : StoreCellValue(result, value, mode);
1994 31 : return result;
1995 : }
1996 :
1997 1271 : Node* CodeStubAssembler::LoadCellValue(Node* cell) {
1998 : CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
1999 1271 : return LoadObjectField(cell, Cell::kValueOffset);
2000 : }
2001 :
2002 124 : Node* CodeStubAssembler::StoreCellValue(Node* cell, Node* value,
2003 : WriteBarrierMode mode) {
2004 : CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
2005 : DCHECK(mode == SKIP_WRITE_BARRIER || mode == UPDATE_WRITE_BARRIER);
2006 :
2007 124 : if (mode == UPDATE_WRITE_BARRIER) {
2008 0 : return StoreObjectField(cell, Cell::kValueOffset, value);
2009 : } else {
2010 124 : return StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
2011 : }
2012 : }
2013 :
2014 11166 : TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
2015 11166 : Node* result = Allocate(HeapNumber::kSize, kNone);
2016 : Heap::RootListIndex heap_map_index =
2017 : mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex
2018 11166 : : Heap::kMutableHeapNumberMapRootIndex;
2019 11166 : StoreMapNoWriteBarrier(result, heap_map_index);
2020 11166 : return UncheckedCast<HeapNumber>(result);
2021 : }
2022 :
2023 11135 : TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
2024 : SloppyTNode<Float64T> value, MutableMode mode) {
2025 11135 : TNode<HeapNumber> result = AllocateHeapNumber(mode);
2026 : StoreHeapNumberValue(result, value);
2027 11135 : return result;
2028 : }
2029 :
2030 868 : Node* CodeStubAssembler::AllocateSeqOneByteString(int length,
2031 : AllocationFlags flags) {
2032 868 : Comment("AllocateSeqOneByteString");
2033 868 : if (length == 0) {
2034 0 : return LoadRoot(Heap::kempty_stringRootIndex);
2035 : }
2036 868 : Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
2037 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
2038 868 : StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
2039 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
2040 1736 : SmiConstant(length));
2041 : // Initialize both used and unused parts of hash field slot at once.
2042 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
2043 : IntPtrConstant(String::kEmptyHashField),
2044 1736 : MachineType::PointerRepresentation());
2045 868 : return result;
2046 : }
2047 :
2048 0 : Node* CodeStubAssembler::IsZeroOrFixedArray(Node* object) {
2049 0 : Label out(this);
2050 0 : VARIABLE(var_result, MachineRepresentation::kWord32, Int32Constant(1));
2051 :
2052 0 : GotoIf(WordEqual(object, SmiConstant(0)), &out);
2053 0 : GotoIf(IsFixedArray(object), &out);
2054 :
2055 0 : var_result.Bind(Int32Constant(0));
2056 0 : Goto(&out);
2057 :
2058 : BIND(&out);
2059 0 : return var_result.value();
2060 : }
2061 :
2062 1736 : Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
2063 : ParameterMode mode,
2064 : AllocationFlags flags) {
2065 1736 : Comment("AllocateSeqOneByteString");
2066 : CSA_SLOW_ASSERT(this, IsZeroOrFixedArray(context));
2067 : CSA_SLOW_ASSERT(this, MatchesParameterMode(length, mode));
2068 1736 : VARIABLE(var_result, MachineRepresentation::kTagged);
2069 :
2070 : // Compute the SeqOneByteString size and check if it fits into new space.
2071 1736 : Label if_lengthiszero(this), if_sizeissmall(this),
2072 1736 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
2073 5208 : GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
2074 :
2075 : Node* raw_size = GetArrayAllocationSize(
2076 : length, UINT8_ELEMENTS, mode,
2077 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
2078 5208 : Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
2079 3472 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
2080 3472 : &if_sizeissmall, &if_notsizeissmall);
2081 :
2082 : BIND(&if_sizeissmall);
2083 : {
2084 : // Just allocate the SeqOneByteString in new space.
2085 : Node* result = AllocateInNewSpace(size, flags);
2086 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
2087 1736 : StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
2088 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
2089 1736 : ParameterToTagged(length, mode));
2090 : // Initialize both used and unused parts of hash field slot at once.
2091 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
2092 : IntPtrConstant(String::kEmptyHashField),
2093 3472 : MachineType::PointerRepresentation());
2094 1736 : var_result.Bind(result);
2095 1736 : Goto(&if_join);
2096 : }
2097 :
2098 : BIND(&if_notsizeissmall);
2099 : {
2100 : // We might need to allocate in large object space, go to the runtime.
2101 : Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
2102 : ParameterToTagged(length, mode));
2103 1736 : var_result.Bind(result);
2104 1736 : Goto(&if_join);
2105 : }
2106 :
2107 : BIND(&if_lengthiszero);
2108 : {
2109 3472 : var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
2110 1736 : Goto(&if_join);
2111 : }
2112 :
2113 : BIND(&if_join);
2114 3472 : return var_result.value();
2115 : }
2116 :
2117 899 : Node* CodeStubAssembler::AllocateSeqTwoByteString(int length,
2118 : AllocationFlags flags) {
2119 899 : Comment("AllocateSeqTwoByteString");
2120 899 : if (length == 0) {
2121 0 : return LoadRoot(Heap::kempty_stringRootIndex);
2122 : }
2123 899 : Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
2124 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
2125 899 : StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
2126 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
2127 1798 : SmiConstant(length));
2128 : // Initialize both used and unused parts of hash field slot at once.
2129 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
2130 : IntPtrConstant(String::kEmptyHashField),
2131 1798 : MachineType::PointerRepresentation());
2132 899 : return result;
2133 : }
2134 :
2135 1581 : Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
2136 : ParameterMode mode,
2137 : AllocationFlags flags) {
2138 : CSA_SLOW_ASSERT(this, IsFixedArray(context));
2139 : CSA_SLOW_ASSERT(this, MatchesParameterMode(length, mode));
2140 1581 : Comment("AllocateSeqTwoByteString");
2141 1581 : VARIABLE(var_result, MachineRepresentation::kTagged);
2142 :
2143 : // Compute the SeqTwoByteString size and check if it fits into new space.
2144 1581 : Label if_lengthiszero(this), if_sizeissmall(this),
2145 1581 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
2146 4743 : GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
2147 :
2148 : Node* raw_size = GetArrayAllocationSize(
2149 : length, UINT16_ELEMENTS, mode,
2150 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
2151 4743 : Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
2152 3162 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
2153 3162 : &if_sizeissmall, &if_notsizeissmall);
2154 :
2155 : BIND(&if_sizeissmall);
2156 : {
2157 : // Just allocate the SeqTwoByteString in new space.
2158 : Node* result = AllocateInNewSpace(size, flags);
2159 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
2160 1581 : StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
2161 : StoreObjectFieldNoWriteBarrier(
2162 : result, SeqTwoByteString::kLengthOffset,
2163 3162 : mode == SMI_PARAMETERS ? length : SmiFromWord(length));
2164 : // Initialize both used and unused parts of hash field slot at once.
2165 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
2166 : IntPtrConstant(String::kEmptyHashField),
2167 3162 : MachineType::PointerRepresentation());
2168 1581 : var_result.Bind(result);
2169 1581 : Goto(&if_join);
2170 : }
2171 :
2172 : BIND(&if_notsizeissmall);
2173 : {
2174 : // We might need to allocate in large object space, go to the runtime.
2175 : Node* result =
2176 : CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
2177 1581 : mode == SMI_PARAMETERS ? length : SmiFromWord(length));
2178 1581 : var_result.Bind(result);
2179 1581 : Goto(&if_join);
2180 : }
2181 :
2182 : BIND(&if_lengthiszero);
2183 : {
2184 3162 : var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
2185 1581 : Goto(&if_join);
2186 : }
2187 :
2188 : BIND(&if_join);
2189 3162 : return var_result.value();
2190 : }
2191 :
2192 1426 : Node* CodeStubAssembler::AllocateSlicedString(
2193 : Heap::RootListIndex map_root_index, Node* length, Node* parent,
2194 : Node* offset) {
2195 : CSA_ASSERT(this, IsString(parent));
2196 : CSA_ASSERT(this, TaggedIsSmi(length));
2197 : CSA_ASSERT(this, TaggedIsSmi(offset));
2198 1426 : Node* result = Allocate(SlicedString::kSize);
2199 : DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
2200 1426 : StoreMapNoWriteBarrier(result, map_root_index);
2201 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
2202 1426 : MachineRepresentation::kTagged);
2203 : // Initialize both used and unused parts of hash field slot at once.
2204 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot,
2205 1426 : IntPtrConstant(String::kEmptyHashField),
2206 2852 : MachineType::PointerRepresentation());
2207 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
2208 1426 : MachineRepresentation::kTagged);
2209 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
2210 1426 : MachineRepresentation::kTagged);
2211 1426 : return result;
2212 : }
2213 :
2214 0 : Node* CodeStubAssembler::AllocateSlicedOneByteString(Node* length, Node* parent,
2215 : Node* offset) {
2216 : return AllocateSlicedString(Heap::kSlicedOneByteStringMapRootIndex, length,
2217 713 : parent, offset);
2218 : }
2219 :
2220 0 : Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent,
2221 : Node* offset) {
2222 : return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
2223 713 : offset);
2224 : }
2225 :
2226 248 : Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
2227 : Node* length, Node* first,
2228 : Node* second,
2229 : AllocationFlags flags) {
2230 : CSA_ASSERT(this, IsString(first));
2231 : CSA_ASSERT(this, IsString(second));
2232 : CSA_ASSERT(this, TaggedIsSmi(length));
2233 248 : Node* result = Allocate(ConsString::kSize, flags);
2234 : DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
2235 248 : StoreMapNoWriteBarrier(result, map_root_index);
2236 : StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
2237 248 : MachineRepresentation::kTagged);
2238 : // Initialize both used and unused parts of hash field slot at once.
2239 : StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot,
2240 248 : IntPtrConstant(String::kEmptyHashField),
2241 496 : MachineType::PointerRepresentation());
2242 : bool const new_space = !(flags & kPretenured);
2243 248 : if (new_space) {
2244 : StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
2245 248 : MachineRepresentation::kTagged);
2246 : StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
2247 248 : MachineRepresentation::kTagged);
2248 : } else {
2249 0 : StoreObjectField(result, ConsString::kFirstOffset, first);
2250 0 : StoreObjectField(result, ConsString::kSecondOffset, second);
2251 : }
2252 248 : return result;
2253 : }
2254 :
2255 0 : Node* CodeStubAssembler::AllocateOneByteConsString(Node* length, Node* first,
2256 : Node* second,
2257 : AllocationFlags flags) {
2258 : return AllocateConsString(Heap::kConsOneByteStringMapRootIndex, length, first,
2259 124 : second, flags);
2260 : }
2261 :
2262 0 : Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first,
2263 : Node* second,
2264 : AllocationFlags flags) {
2265 : return AllocateConsString(Heap::kConsStringMapRootIndex, length, first,
2266 124 : second, flags);
2267 : }
2268 :
2269 124 : Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
2270 : Node* right, AllocationFlags flags) {
2271 : CSA_ASSERT(this, IsFixedArray(context));
2272 : CSA_ASSERT(this, IsString(left));
2273 : CSA_ASSERT(this, IsString(right));
2274 : CSA_ASSERT(this, TaggedIsSmi(length));
2275 : // Added string can be a cons string.
2276 124 : Comment("Allocating ConsString");
2277 248 : Node* left_instance_type = LoadInstanceType(left);
2278 248 : Node* right_instance_type = LoadInstanceType(right);
2279 :
2280 : // Compute intersection and difference of instance types.
2281 : Node* anded_instance_types =
2282 248 : Word32And(left_instance_type, right_instance_type);
2283 : Node* xored_instance_types =
2284 248 : Word32Xor(left_instance_type, right_instance_type);
2285 :
2286 : // We create a one-byte cons string if
2287 : // 1. both strings are one-byte, or
2288 : // 2. at least one of the strings is two-byte, but happens to contain only
2289 : // one-byte characters.
2290 : // To do this, we check
2291 : // 1. if both strings are one-byte, or if the one-byte data hint is set in
2292 : // both strings, or
2293 : // 2. if one of the strings has the one-byte data hint set and the other
2294 : // string is one-byte.
2295 : STATIC_ASSERT(kOneByteStringTag != 0);
2296 : STATIC_ASSERT(kOneByteDataHintTag != 0);
2297 : Label one_byte_map(this);
2298 124 : Label two_byte_map(this);
2299 248 : VARIABLE(result, MachineRepresentation::kTagged);
2300 124 : Label done(this, &result);
2301 : GotoIf(IsSetWord32(anded_instance_types,
2302 124 : kStringEncodingMask | kOneByteDataHintTag),
2303 248 : &one_byte_map);
2304 : Branch(Word32NotEqual(Word32And(xored_instance_types,
2305 : Int32Constant(kStringEncodingMask |
2306 248 : kOneByteDataHintMask)),
2307 496 : Int32Constant(kOneByteStringTag | kOneByteDataHintTag)),
2308 248 : &two_byte_map, &one_byte_map);
2309 :
2310 : BIND(&one_byte_map);
2311 124 : Comment("One-byte ConsString");
2312 124 : result.Bind(AllocateOneByteConsString(length, left, right, flags));
2313 124 : Goto(&done);
2314 :
2315 : BIND(&two_byte_map);
2316 124 : Comment("Two-byte ConsString");
2317 124 : result.Bind(AllocateTwoByteConsString(length, left, right, flags));
2318 124 : Goto(&done);
2319 :
2320 : BIND(&done);
2321 :
2322 248 : return result.value();
2323 : }
2324 :
2325 434 : Node* CodeStubAssembler::AllocateNameDictionary(int at_least_space_for) {
2326 868 : return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
2327 : }
2328 :
2329 440 : Node* CodeStubAssembler::AllocateNameDictionary(Node* at_least_space_for) {
2330 : CSA_ASSERT(this, UintPtrLessThanOrEqual(
2331 : at_least_space_for,
2332 : IntPtrConstant(NameDictionary::kMaxCapacity)));
2333 880 : Node* capacity = HashTableComputeCapacity(at_least_space_for);
2334 440 : return AllocateNameDictionaryWithCapacity(capacity);
2335 : }
2336 :
2337 564 : Node* CodeStubAssembler::AllocateNameDictionaryWithCapacity(Node* capacity) {
2338 : CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
2339 : Node* length = EntryToIndex<NameDictionary>(capacity);
2340 : Node* store_size = IntPtrAdd(TimesPointerSize(length),
2341 2256 : IntPtrConstant(NameDictionary::kHeaderSize));
2342 :
2343 : Node* result = AllocateInNewSpace(store_size);
2344 564 : Comment("Initialize NameDictionary");
2345 : // Initialize FixedArray fields.
2346 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kHashTableMapRootIndex));
2347 564 : StoreMapNoWriteBarrier(result, Heap::kHashTableMapRootIndex);
2348 : StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
2349 564 : SmiFromWord(length));
2350 : // Initialized HashTable fields.
2351 1128 : Node* zero = SmiConstant(0);
2352 : StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
2353 564 : SKIP_WRITE_BARRIER);
2354 : StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
2355 564 : zero, SKIP_WRITE_BARRIER);
2356 : StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
2357 1128 : SmiTag(capacity), SKIP_WRITE_BARRIER);
2358 : // Initialize Dictionary fields.
2359 : Node* filler = UndefinedConstant();
2360 : StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
2361 : SmiConstant(PropertyDetails::kInitialIndex),
2362 1128 : SKIP_WRITE_BARRIER);
2363 : StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
2364 : SmiConstant(PropertyArray::kNoHashSentinel),
2365 1128 : SKIP_WRITE_BARRIER);
2366 :
2367 : // Initialize NameDictionary elements.
2368 1128 : Node* result_word = BitcastTaggedToWord(result);
2369 : Node* start_address = IntPtrAdd(
2370 : result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
2371 : NameDictionary::kElementsStartIndex) -
2372 1692 : kHeapObjectTag));
2373 : Node* end_address = IntPtrAdd(
2374 2256 : result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
2375 564 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
2376 564 : return result;
2377 : }
2378 :
2379 124 : Node* CodeStubAssembler::CopyNameDictionary(Node* dictionary,
2380 : Label* large_object_fallback) {
2381 : CSA_ASSERT(this, IsHashTable(dictionary));
2382 124 : Comment("Copy boilerplate property dict");
2383 248 : Node* capacity = SmiUntag(GetCapacity<NameDictionary>(dictionary));
2384 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(capacity, IntPtrConstant(0)));
2385 : GotoIf(UintPtrGreaterThan(
2386 248 : capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
2387 248 : large_object_fallback);
2388 124 : Node* properties = AllocateNameDictionaryWithCapacity(capacity);
2389 372 : Node* length = SmiUntag(LoadFixedArrayBaseLength(dictionary));
2390 : CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
2391 124 : SKIP_WRITE_BARRIER, INTPTR_PARAMETERS);
2392 124 : return properties;
2393 : }
2394 :
2395 241 : Node* CodeStubAssembler::AllocateStruct(Node* map, AllocationFlags flags) {
2396 241 : Comment("AllocateStruct");
2397 : CSA_ASSERT(this, IsMap(map));
2398 482 : Node* size = TimesPointerSize(LoadMapInstanceSize(map));
2399 241 : Node* object = Allocate(size, flags);
2400 241 : StoreMapNoWriteBarrier(object, map);
2401 241 : InitializeStructBody(object, map, size, Struct::kHeaderSize);
2402 241 : return object;
2403 : }
2404 :
2405 241 : void CodeStubAssembler::InitializeStructBody(Node* object, Node* map,
2406 : Node* size, int start_offset) {
2407 : CSA_SLOW_ASSERT(this, IsMap(map));
2408 241 : Comment("InitializeStructBody");
2409 : Node* filler = UndefinedConstant();
2410 : // Calculate the untagged field addresses.
2411 482 : object = BitcastTaggedToWord(object);
2412 : Node* start_address =
2413 723 : IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
2414 : Node* end_address =
2415 964 : IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
2416 241 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
2417 241 : }
2418 :
2419 1827 : Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties,
2420 : Node* elements,
2421 : AllocationFlags flags) {
2422 : CSA_ASSERT(this, IsMap(map));
2423 3654 : Node* size = TimesPointerSize(LoadMapInstanceSize(map));
2424 : Node* object = AllocateInNewSpace(size, flags);
2425 1827 : StoreMapNoWriteBarrier(object, map);
2426 1827 : InitializeJSObjectFromMap(object, map, size, properties, elements);
2427 1827 : return object;
2428 : }
2429 :
2430 2199 : void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
2431 : Node* size, Node* properties,
2432 : Node* elements) {
2433 : CSA_SLOW_ASSERT(this, IsMap(map));
2434 : // This helper assumes that the object is in new-space, as guarded by the
2435 : // check in AllocatedJSObjectFromMap.
2436 2199 : if (properties == nullptr) {
2437 : CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
2438 : StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
2439 1325 : Heap::kEmptyFixedArrayRootIndex);
2440 : } else {
2441 : CSA_ASSERT(this, Word32Or(Word32Or(IsPropertyArray(properties),
2442 : IsDictionary(properties)),
2443 : IsEmptyFixedArray(properties)));
2444 : StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
2445 874 : properties);
2446 : }
2447 2199 : if (elements == nullptr) {
2448 : StoreObjectFieldRoot(object, JSObject::kElementsOffset,
2449 1759 : Heap::kEmptyFixedArrayRootIndex);
2450 : } else {
2451 : CSA_ASSERT(this, IsFixedArray(elements));
2452 440 : StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
2453 : }
2454 2199 : InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize);
2455 2199 : }
2456 :
2457 2323 : void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map,
2458 : Node* size, int start_offset) {
2459 : CSA_SLOW_ASSERT(this, IsMap(map));
2460 : // TODO(cbruni): activate in-object slack tracking machinery.
2461 2323 : Comment("InitializeJSObjectBody");
2462 : Node* filler = UndefinedConstant();
2463 : // Calculate the untagged field addresses.
2464 4646 : object = BitcastTaggedToWord(object);
2465 : Node* start_address =
2466 6969 : IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
2467 : Node* end_address =
2468 9292 : IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
2469 2323 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
2470 2323 : }
2471 :
2472 3128 : void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
2473 : Node* end_address,
2474 : Node* value) {
2475 3128 : Comment("StoreFieldsNoWriteBarrier");
2476 : CSA_ASSERT(this, WordIsWordAligned(start_address));
2477 : CSA_ASSERT(this, WordIsWordAligned(end_address));
2478 : BuildFastLoop(start_address, end_address,
2479 : [this, value](Node* current) {
2480 : StoreNoWriteBarrier(MachineRepresentation::kTagged, current,
2481 3128 : value);
2482 : },
2483 6256 : kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
2484 3128 : }
2485 :
2486 899 : Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
2487 : Node* array_map, Node* length, Node* allocation_site) {
2488 899 : Comment("begin allocation of JSArray without elements");
2489 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2490 : CSA_SLOW_ASSERT(this, IsMap(array_map));
2491 : int base_size = JSArray::kSize;
2492 899 : if (allocation_site != nullptr) {
2493 : base_size += AllocationMemento::kSize;
2494 : }
2495 :
2496 1798 : Node* size = IntPtrConstant(base_size);
2497 : Node* array =
2498 899 : AllocateUninitializedJSArray(array_map, length, allocation_site, size);
2499 899 : return array;
2500 : }
2501 :
2502 : std::pair<Node*, Node*>
2503 837 : CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
2504 : ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
2505 : Node* capacity, ParameterMode capacity_mode) {
2506 837 : Comment("begin allocation of JSArray with elements");
2507 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2508 : CSA_SLOW_ASSERT(this, IsMap(array_map));
2509 : int base_size = JSArray::kSize;
2510 :
2511 837 : if (allocation_site != nullptr) {
2512 : base_size += AllocationMemento::kSize;
2513 : }
2514 :
2515 : int elements_offset = base_size;
2516 :
2517 : // Compute space for elements
2518 837 : base_size += FixedArray::kHeaderSize;
2519 837 : Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
2520 :
2521 : Node* array =
2522 837 : AllocateUninitializedJSArray(array_map, length, allocation_site, size);
2523 :
2524 837 : Node* elements = InnerAllocate(array, elements_offset);
2525 837 : StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements);
2526 : // Setup elements object.
2527 : STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
2528 : Heap::RootListIndex elements_map_index =
2529 : IsDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex
2530 837 : : Heap::kFixedArrayMapRootIndex;
2531 : DCHECK(Heap::RootIsImmortalImmovable(elements_map_index));
2532 837 : StoreMapNoWriteBarrier(elements, elements_map_index);
2533 : Node* capacity_smi = ParameterToTagged(capacity, capacity_mode);
2534 : CSA_ASSERT(this, SmiGreaterThan(capacity_smi, SmiConstant(0)));
2535 : StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
2536 837 : capacity_smi);
2537 837 : return {array, elements};
2538 : }
2539 :
2540 1736 : Node* CodeStubAssembler::AllocateUninitializedJSArray(Node* array_map,
2541 : Node* length,
2542 : Node* allocation_site,
2543 : Node* size_in_bytes) {
2544 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2545 : CSA_SLOW_ASSERT(this, IsMap(array_map));
2546 :
2547 : // Allocate space for the JSArray and the elements FixedArray in one go.
2548 : Node* array = AllocateInNewSpace(size_in_bytes);
2549 :
2550 1736 : Comment("write JSArray headers");
2551 1736 : StoreMapNoWriteBarrier(array, array_map);
2552 :
2553 1736 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2554 :
2555 : StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
2556 1736 : Heap::kEmptyFixedArrayRootIndex);
2557 :
2558 1736 : if (allocation_site != nullptr) {
2559 : InitializeAllocationMemento(array, IntPtrConstant(JSArray::kSize),
2560 806 : allocation_site);
2561 : }
2562 1736 : return array;
2563 : }
2564 :
2565 1054 : Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
2566 : Node* capacity, Node* length,
2567 : Node* allocation_site,
2568 : ParameterMode capacity_mode) {
2569 : CSA_SLOW_ASSERT(this, IsMap(array_map));
2570 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
2571 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
2572 :
2573 : int capacity_as_constant;
2574 1054 : Node *array = nullptr, *elements = nullptr;
2575 1054 : if (IsIntPtrOrSmiConstantZero(capacity, capacity_mode)) {
2576 : // Array is empty. Use the shared empty fixed array instead of allocating a
2577 : // new one.
2578 : array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
2579 248 : allocation_site);
2580 : StoreObjectFieldRoot(array, JSArray::kElementsOffset,
2581 248 : Heap::kEmptyFixedArrayRootIndex);
2582 806 : } else if (TryGetIntPtrOrSmiConstantValue(capacity, &capacity_as_constant,
2583 1209 : capacity_mode) &&
2584 403 : capacity_as_constant > 0) {
2585 : // Allocate both array and elements object, and initialize the JSArray.
2586 806 : std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
2587 : kind, array_map, length, allocation_site, capacity, capacity_mode);
2588 : // Fill in the elements with holes.
2589 : FillFixedArrayWithValue(kind, elements,
2590 : IntPtrOrSmiConstant(0, capacity_mode), capacity,
2591 403 : Heap::kTheHoleValueRootIndex, capacity_mode);
2592 : } else {
2593 806 : Label out(this), empty(this), nonempty(this);
2594 806 : VARIABLE(var_array, MachineRepresentation::kTagged);
2595 :
2596 : Branch(SmiEqual(ParameterToTagged(capacity, capacity_mode), SmiConstant(0)),
2597 1209 : &empty, &nonempty);
2598 :
2599 : BIND(&empty);
2600 : {
2601 : // Array is empty. Use the shared empty fixed array instead of allocating
2602 : // a new one.
2603 : var_array.Bind(AllocateUninitializedJSArrayWithoutElements(
2604 403 : array_map, length, allocation_site));
2605 : StoreObjectFieldRoot(var_array.value(), JSArray::kElementsOffset,
2606 403 : Heap::kEmptyFixedArrayRootIndex);
2607 403 : Goto(&out);
2608 : }
2609 :
2610 : BIND(&nonempty);
2611 : {
2612 : // Allocate both array and elements object, and initialize the JSArray.
2613 : Node* array;
2614 806 : std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
2615 : kind, array_map, length, allocation_site, capacity, capacity_mode);
2616 403 : var_array.Bind(array);
2617 : // Fill in the elements with holes.
2618 : FillFixedArrayWithValue(kind, elements,
2619 : IntPtrOrSmiConstant(0, capacity_mode), capacity,
2620 403 : Heap::kTheHoleValueRootIndex, capacity_mode);
2621 403 : Goto(&out);
2622 : }
2623 :
2624 : BIND(&out);
2625 806 : array = var_array.value();
2626 : }
2627 :
2628 1054 : return array;
2629 : }
2630 :
2631 0 : Node* CodeStubAssembler::ExtractFastJSArray(Node* context, Node* array,
2632 : Node* begin, Node* count,
2633 : ParameterMode mode, Node* capacity,
2634 : Node* allocation_site) {
2635 0 : Node* original_array_map = LoadMap(array);
2636 0 : Node* elements_kind = LoadMapElementsKind(original_array_map);
2637 :
2638 : // Use the cannonical map for the Array's ElementsKind
2639 : Node* native_context = LoadNativeContext(context);
2640 0 : Node* array_map = LoadJSArrayElementsMap(elements_kind, native_context);
2641 :
2642 : Node* new_elements =
2643 0 : ExtractFixedArray(LoadElements(array), begin, count, capacity,
2644 0 : ExtractFixedArrayFlag::kAllFixedArrays, mode);
2645 :
2646 : Node* result = AllocateUninitializedJSArrayWithoutElements(
2647 0 : array_map, ParameterToTagged(count, mode), allocation_site);
2648 0 : StoreObjectField(result, JSObject::kElementsOffset, new_elements);
2649 0 : return result;
2650 : }
2651 :
2652 124 : Node* CodeStubAssembler::CloneFastJSArray(Node* context, Node* array,
2653 : ParameterMode mode,
2654 : Node* allocation_site) {
2655 248 : Node* length = LoadJSArrayLength(array);
2656 248 : Node* elements = LoadElements(array);
2657 :
2658 248 : Node* original_array_map = LoadMap(array);
2659 248 : Node* elements_kind = LoadMapElementsKind(original_array_map);
2660 :
2661 124 : Node* new_elements = CloneFixedArray(elements);
2662 :
2663 : // Use the cannonical map for the Array's ElementsKind
2664 : Node* native_context = LoadNativeContext(context);
2665 248 : Node* array_map = LoadJSArrayElementsMap(elements_kind, native_context);
2666 : Node* result = AllocateUninitializedJSArrayWithoutElements(array_map, length,
2667 124 : allocation_site);
2668 124 : StoreObjectField(result, JSObject::kElementsOffset, new_elements);
2669 124 : return result;
2670 : }
2671 :
2672 5132 : Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
2673 : Node* capacity_node,
2674 : ParameterMode mode,
2675 : AllocationFlags flags,
2676 : Node* fixed_array_map) {
2677 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
2678 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
2679 : IntPtrOrSmiConstant(0, mode), mode));
2680 : Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode);
2681 :
2682 5132 : if (IsDoubleElementsKind(kind)) flags |= kDoubleAlignment;
2683 : // Allocate both array and elements object, and initialize the JSArray.
2684 5132 : Node* array = Allocate(total_size, flags);
2685 5132 : if (fixed_array_map != nullptr) {
2686 : // Conservatively only skip the write barrier if there are no allocation
2687 : // flags, this ensures that the object hasn't ended up in LOS. Note that the
2688 : // fixed array map is currently alwasys immortal and technically wouldn't
2689 : // need the write barrier even in LOS, but it's better to not take chances
2690 : // in case this invariant changes later, since it's difficult to enforce
2691 : // locally here.
2692 2246 : if (flags == CodeStubAssembler::kNone) {
2693 1046 : StoreMapNoWriteBarrier(array, fixed_array_map);
2694 : } else {
2695 1200 : StoreMap(array, fixed_array_map);
2696 : }
2697 : } else {
2698 : Heap::RootListIndex map_index = IsDoubleElementsKind(kind)
2699 : ? Heap::kFixedDoubleArrayMapRootIndex
2700 2886 : : Heap::kFixedArrayMapRootIndex;
2701 : DCHECK(Heap::RootIsImmortalImmovable(map_index));
2702 2886 : StoreMapNoWriteBarrier(array, map_index);
2703 : }
2704 : StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
2705 5132 : ParameterToTagged(capacity_node, mode));
2706 5132 : return array;
2707 : }
2708 :
2709 1046 : Node* CodeStubAssembler::ExtractFixedArray(Node* fixed_array, Node* first,
2710 : Node* count, Node* capacity,
2711 : ExtractFixedArrayFlags extract_flags,
2712 : ParameterMode parameter_mode) {
2713 1046 : VARIABLE(var_result, MachineRepresentation::kTagged);
2714 2092 : VARIABLE(var_fixed_array_map, MachineRepresentation::kTagged);
2715 : const AllocationFlags flags =
2716 : (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly)
2717 : ? CodeStubAssembler::kNone
2718 1046 : : CodeStubAssembler::kAllowLargeObjectAllocation;
2719 1046 : if (first == nullptr) {
2720 713 : first = IntPtrOrSmiConstant(0, parameter_mode);
2721 : }
2722 1046 : if (count == nullptr) {
2723 : count =
2724 303 : IntPtrOrSmiSub(TaggedToParameter(LoadFixedArrayBaseLength(fixed_array),
2725 : parameter_mode),
2726 606 : first, parameter_mode);
2727 :
2728 : CSA_ASSERT(
2729 : this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant(0, parameter_mode),
2730 : count, parameter_mode));
2731 : }
2732 1046 : if (capacity == nullptr) {
2733 : capacity = count;
2734 : } else {
2735 : CSA_ASSERT(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
2736 : IntPtrOrSmiAdd(first, count, parameter_mode), capacity,
2737 : parameter_mode)));
2738 : }
2739 :
2740 1046 : Label if_fixed_double_array(this), empty(this), cow(this),
2741 3138 : done(this, {&var_result, &var_fixed_array_map});
2742 2092 : var_fixed_array_map.Bind(LoadMap(fixed_array));
2743 3138 : GotoIf(WordEqual(IntPtrOrSmiConstant(0, parameter_mode), count), &empty);
2744 :
2745 1046 : if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
2746 296 : if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
2747 592 : GotoIf(IsFixedDoubleArrayMap(var_fixed_array_map.value()),
2748 592 : &if_fixed_double_array);
2749 : } else {
2750 : CSA_ASSERT(this, IsFixedDoubleArrayMap(var_fixed_array_map.value()));
2751 : }
2752 : } else {
2753 : DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays);
2754 : CSA_ASSERT(this, Word32BinaryNot(
2755 : IsFixedDoubleArrayMap(var_fixed_array_map.value())));
2756 : }
2757 :
2758 1046 : if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
2759 2092 : Label new_space_check(this, {&var_fixed_array_map});
2760 : Branch(WordEqual(var_fixed_array_map.value(),
2761 1046 : LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
2762 1046 : &cow, &new_space_check);
2763 :
2764 : BIND(&new_space_check);
2765 :
2766 : bool handle_old_space = true;
2767 1046 : if (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly) {
2768 : handle_old_space = false;
2769 : CSA_ASSERT(this, Word32BinaryNot(FixedArraySizeDoesntFitInNewSpace(
2770 : count, FixedArray::kHeaderSize, parameter_mode)));
2771 : } else {
2772 : int constant_count;
2773 : handle_old_space =
2774 : !TryGetIntPtrOrSmiConstantValue(count, &constant_count,
2775 940 : parameter_mode) ||
2776 18 : (constant_count >
2777 18 : FixedArray::GetMaxLengthForNewSpaceAllocation(PACKED_ELEMENTS));
2778 : }
2779 :
2780 1046 : Label old_space(this, Label::kDeferred);
2781 1046 : if (handle_old_space) {
2782 : GotoIfFixedArraySizeDoesntFitInNewSpace(
2783 904 : capacity, &old_space, FixedArray::kHeaderSize, parameter_mode);
2784 : }
2785 :
2786 1046 : Comment("Copy PACKED_ELEMENTS new space");
2787 :
2788 : ElementsKind kind = PACKED_ELEMENTS;
2789 : Node* to_elements =
2790 : AllocateFixedArray(kind, capacity, parameter_mode,
2791 2092 : AllocationFlag::kNone, var_fixed_array_map.value());
2792 1046 : var_result.Bind(to_elements);
2793 : CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first, count,
2794 1046 : capacity, SKIP_WRITE_BARRIER, parameter_mode);
2795 1046 : Goto(&done);
2796 :
2797 1046 : if (handle_old_space) {
2798 : BIND(&old_space);
2799 : {
2800 904 : Comment("Copy PACKED_ELEMENTS old space");
2801 :
2802 : to_elements = AllocateFixedArray(kind, capacity, parameter_mode, flags,
2803 904 : var_fixed_array_map.value());
2804 904 : var_result.Bind(to_elements);
2805 : CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first,
2806 : count, capacity, UPDATE_WRITE_BARRIER,
2807 904 : parameter_mode);
2808 904 : Goto(&done);
2809 : }
2810 : }
2811 :
2812 : BIND(&cow);
2813 : {
2814 1046 : if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
2815 568 : GotoIf(WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), first),
2816 568 : &new_space_check);
2817 :
2818 284 : var_result.Bind(fixed_array);
2819 284 : Goto(&done);
2820 : } else {
2821 1524 : var_fixed_array_map.Bind(LoadRoot(Heap::kFixedArrayMapRootIndex));
2822 762 : Goto(&new_space_check);
2823 : }
2824 1046 : }
2825 : } else {
2826 0 : Goto(&if_fixed_double_array);
2827 : }
2828 :
2829 1046 : if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
2830 : BIND(&if_fixed_double_array);
2831 :
2832 296 : Comment("Copy PACKED_DOUBLE_ELEMENTS");
2833 :
2834 : ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
2835 : Node* to_elements = AllocateFixedArray(kind, capacity, parameter_mode,
2836 296 : flags, var_fixed_array_map.value());
2837 296 : var_result.Bind(to_elements);
2838 : CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first, count,
2839 296 : capacity, SKIP_WRITE_BARRIER, parameter_mode);
2840 :
2841 296 : Goto(&done);
2842 : }
2843 :
2844 : BIND(&empty);
2845 : {
2846 1046 : Comment("Copy empty array");
2847 :
2848 1046 : var_result.Bind(EmptyFixedArrayConstant());
2849 1046 : Goto(&done);
2850 : }
2851 :
2852 : BIND(&done);
2853 2092 : return var_result.value();
2854 : }
2855 :
2856 620 : void CodeStubAssembler::InitializePropertyArrayLength(Node* property_array,
2857 : Node* length,
2858 : ParameterMode mode) {
2859 : CSA_SLOW_ASSERT(this, IsPropertyArray(property_array));
2860 : CSA_ASSERT(
2861 : this, IntPtrOrSmiGreaterThan(length, IntPtrOrSmiConstant(0, mode), mode));
2862 : CSA_ASSERT(
2863 : this,
2864 : IntPtrOrSmiLessThanOrEqual(
2865 : length, IntPtrOrSmiConstant(PropertyArray::LengthField::kMax, mode),
2866 : mode));
2867 : StoreObjectFieldNoWriteBarrier(
2868 : property_array, PropertyArray::kLengthAndHashOffset,
2869 620 : ParameterToTagged(length, mode), MachineRepresentation::kTaggedSigned);
2870 620 : }
2871 :
2872 620 : Node* CodeStubAssembler::AllocatePropertyArray(Node* capacity_node,
2873 : ParameterMode mode,
2874 : AllocationFlags flags) {
2875 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
2876 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
2877 : IntPtrOrSmiConstant(0, mode), mode));
2878 : Node* total_size = GetPropertyArrayAllocationSize(capacity_node, mode);
2879 :
2880 620 : Node* array = Allocate(total_size, flags);
2881 : Heap::RootListIndex map_index = Heap::kPropertyArrayMapRootIndex;
2882 : DCHECK(Heap::RootIsImmortalImmovable(map_index));
2883 620 : StoreMapNoWriteBarrier(array, map_index);
2884 620 : InitializePropertyArrayLength(array, capacity_node, mode);
2885 620 : return array;
2886 : }
2887 :
2888 620 : void CodeStubAssembler::FillPropertyArrayWithUndefined(Node* array,
2889 : Node* from_node,
2890 : Node* to_node,
2891 : ParameterMode mode) {
2892 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
2893 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
2894 : CSA_SLOW_ASSERT(this, IsPropertyArray(array));
2895 : STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32);
2896 : ElementsKind kind = PACKED_ELEMENTS;
2897 : Node* value = UndefinedConstant();
2898 : BuildFastFixedArrayForEach(array, kind, from_node, to_node,
2899 : [this, value](Node* array, Node* offset) {
2900 : StoreNoWriteBarrier(
2901 : MachineRepresentation::kTagged, array,
2902 620 : offset, value);
2903 : },
2904 1240 : mode);
2905 620 : }
2906 :
2907 4419 : void CodeStubAssembler::FillFixedArrayWithValue(
2908 : ElementsKind kind, Node* array, Node* from_node, Node* to_node,
2909 : Heap::RootListIndex value_root_index, ParameterMode mode) {
2910 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
2911 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
2912 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
2913 : bool is_double = IsDoubleElementsKind(kind);
2914 : DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
2915 : value_root_index == Heap::kUndefinedValueRootIndex);
2916 : DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex);
2917 : STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32);
2918 : Node* double_hole =
2919 17676 : Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
2920 8838 : : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
2921 8838 : Node* value = LoadRoot(value_root_index);
2922 :
2923 : BuildFastFixedArrayForEach(
2924 : array, kind, from_node, to_node,
2925 6000 : [this, value, is_double, double_hole](Node* array, Node* offset) {
2926 6000 : if (is_double) {
2927 : // Don't use doubles to store the hole double, since manipulating the
2928 : // signaling NaN used for the hole in C++, e.g. with bit_cast, will
2929 : // change its value on ia32 (the x87 stack is used to return values
2930 : // and stores to the stack silently clear the signalling bit).
2931 : //
2932 : // TODO(danno): When we have a Float32/Float64 wrapper class that
2933 : // preserves double bits during manipulation, remove this code/change
2934 : // this to an indexed Float64 store.
2935 873 : if (Is64()) {
2936 : StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset,
2937 873 : double_hole);
2938 : } else {
2939 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
2940 0 : double_hole);
2941 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array,
2942 0 : IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
2943 0 : double_hole);
2944 : }
2945 : } else {
2946 : StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
2947 5127 : value);
2948 : }
2949 6000 : },
2950 13257 : mode);
2951 4419 : }
2952 :
2953 4631 : void CodeStubAssembler::CopyFixedArrayElements(
2954 : ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
2955 : Node* to_array, Node* first_element, Node* element_count, Node* capacity,
2956 : WriteBarrierMode barrier_mode, ParameterMode mode) {
2957 : CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
2958 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
2959 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
2960 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
2961 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
2962 : const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
2963 4631 : Comment("[ CopyFixedArrayElements");
2964 :
2965 : // Typed array elements are not supported.
2966 : DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
2967 : DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
2968 :
2969 : Label done(this);
2970 : bool from_double_elements = IsDoubleElementsKind(from_kind);
2971 : bool to_double_elements = IsDoubleElementsKind(to_kind);
2972 : bool doubles_to_objects_conversion =
2973 5689 : IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
2974 : bool needs_write_barrier =
2975 4631 : doubles_to_objects_conversion ||
2976 904 : (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
2977 : bool element_offset_matches =
2978 4631 : !needs_write_barrier && (Is64() || IsDoubleElementsKind(from_kind) ==
2979 : IsDoubleElementsKind(to_kind));
2980 : Node* double_hole =
2981 18524 : Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
2982 9262 : : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
2983 :
2984 4631 : if (doubles_to_objects_conversion) {
2985 : // If the copy might trigger a GC, make sure that the FixedArray is
2986 : // pre-initialized with holes to make sure that it's always in a
2987 : // consistent state.
2988 : FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
2989 447 : capacity, Heap::kTheHoleValueRootIndex, mode);
2990 4184 : } else if (element_count != capacity) {
2991 : FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
2992 2918 : Heap::kTheHoleValueRootIndex, mode);
2993 : }
2994 :
2995 : Node* first_from_element_offset =
2996 4631 : ElementOffsetFromIndex(first_element, from_kind, mode, 0);
2997 : Node* limit_offset = IntPtrAdd(first_from_element_offset,
2998 13893 : IntPtrConstant(first_element_offset));
2999 9262 : VARIABLE(
3000 : var_from_offset, MachineType::PointerRepresentation(),
3001 : ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count, mode),
3002 : from_kind, mode, first_element_offset));
3003 : // This second variable is used only when the element sizes of source and
3004 : // destination arrays do not match.
3005 9262 : VARIABLE(var_to_offset, MachineType::PointerRepresentation());
3006 4631 : if (element_offset_matches) {
3007 3280 : var_to_offset.Bind(var_from_offset.value());
3008 : } else {
3009 : var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
3010 1351 : first_element_offset));
3011 : }
3012 :
3013 4631 : Variable* vars[] = {&var_from_offset, &var_to_offset};
3014 9262 : Label decrement(this, 2, vars);
3015 :
3016 : Node* to_array_adjusted =
3017 : element_offset_matches
3018 11191 : ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
3019 13893 : : to_array;
3020 :
3021 13893 : Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
3022 :
3023 : BIND(&decrement);
3024 : {
3025 : Node* from_offset = IntPtrSub(
3026 : var_from_offset.value(),
3027 18524 : IntPtrConstant(from_double_elements ? kDoubleSize : kPointerSize));
3028 4631 : var_from_offset.Bind(from_offset);
3029 :
3030 : Node* to_offset;
3031 4631 : if (element_offset_matches) {
3032 : to_offset = from_offset;
3033 : } else {
3034 2702 : to_offset = IntPtrSub(
3035 : var_to_offset.value(),
3036 4053 : IntPtrConstant(to_double_elements ? kDoubleSize : kPointerSize));
3037 1351 : var_to_offset.Bind(to_offset);
3038 : }
3039 :
3040 4631 : Label next_iter(this), store_double_hole(this);
3041 : Label* if_hole;
3042 4631 : if (doubles_to_objects_conversion) {
3043 : // The target elements array is already preinitialized with holes, so we
3044 : // can just proceed with the next iteration.
3045 : if_hole = &next_iter;
3046 4184 : } else if (IsDoubleElementsKind(to_kind)) {
3047 : if_hole = &store_double_hole;
3048 : } else {
3049 : // In all the other cases don't check for holes and copy the data as is.
3050 : if_hole = nullptr;
3051 : }
3052 :
3053 : Node* value = LoadElementAndPrepareForStore(
3054 4631 : from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
3055 :
3056 4631 : if (needs_write_barrier) {
3057 1351 : CHECK_EQ(to_array, to_array_adjusted);
3058 1351 : Store(to_array_adjusted, to_offset, value);
3059 3280 : } else if (to_double_elements) {
3060 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
3061 964 : to_offset, value);
3062 : } else {
3063 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array_adjusted,
3064 2316 : to_offset, value);
3065 : }
3066 4631 : Goto(&next_iter);
3067 :
3068 4631 : if (if_hole == &store_double_hole) {
3069 : BIND(&store_double_hole);
3070 : // Don't use doubles to store the hole double, since manipulating the
3071 : // signaling NaN used for the hole in C++, e.g. with bit_cast, will
3072 : // change its value on ia32 (the x87 stack is used to return values
3073 : // and stores to the stack silently clear the signalling bit).
3074 : //
3075 : // TODO(danno): When we have a Float32/Float64 wrapper class that
3076 : // preserves double bits during manipulation, remove this code/change
3077 : // this to an indexed Float64 store.
3078 964 : if (Is64()) {
3079 : StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
3080 964 : to_offset, double_hole);
3081 : } else {
3082 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
3083 0 : to_offset, double_hole);
3084 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
3085 0 : IntPtrAdd(to_offset, IntPtrConstant(kPointerSize)),
3086 0 : double_hole);
3087 : }
3088 964 : Goto(&next_iter);
3089 : }
3090 :
3091 : BIND(&next_iter);
3092 9262 : Node* compare = WordNotEqual(from_offset, limit_offset);
3093 9262 : Branch(compare, &decrement, &done);
3094 : }
3095 :
3096 : BIND(&done);
3097 9262 : Comment("] CopyFixedArrayElements");
3098 4631 : }
3099 :
3100 620 : void CodeStubAssembler::CopyPropertyArrayValues(Node* from_array,
3101 : Node* to_array,
3102 : Node* property_count,
3103 : WriteBarrierMode barrier_mode,
3104 : ParameterMode mode) {
3105 : CSA_SLOW_ASSERT(this, MatchesParameterMode(property_count, mode));
3106 : CSA_SLOW_ASSERT(this, Word32Or(IsPropertyArray(from_array),
3107 : IsEmptyFixedArray(from_array)));
3108 : CSA_SLOW_ASSERT(this, IsPropertyArray(to_array));
3109 620 : Comment("[ CopyPropertyArrayValues");
3110 :
3111 620 : bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
3112 620 : Node* start = IntPtrOrSmiConstant(0, mode);
3113 : ElementsKind kind = PACKED_ELEMENTS;
3114 : BuildFastFixedArrayForEach(
3115 : from_array, kind, start, property_count,
3116 620 : [this, to_array, needs_write_barrier](Node* array, Node* offset) {
3117 620 : Node* value = Load(MachineType::AnyTagged(), array, offset);
3118 :
3119 620 : if (needs_write_barrier) {
3120 0 : Store(to_array, offset, value);
3121 : } else {
3122 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, offset,
3123 620 : value);
3124 : }
3125 620 : },
3126 1860 : mode);
3127 620 : Comment("] CopyPropertyArrayValues");
3128 620 : }
3129 :
3130 3403 : void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
3131 : Node* from_index, Node* to_index,
3132 : Node* character_count,
3133 : String::Encoding from_encoding,
3134 : String::Encoding to_encoding,
3135 : ParameterMode mode) {
3136 : // Cannot assert IsString(from_string) and IsString(to_string) here because
3137 : // CSA::SubString can pass in faked sequential strings when handling external
3138 : // subject strings.
3139 : CSA_SLOW_ASSERT(this, MatchesParameterMode(character_count, mode));
3140 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_index, mode));
3141 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_index, mode));
3142 3403 : bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
3143 3403 : bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
3144 : DCHECK_IMPLIES(to_one_byte, from_one_byte);
3145 : Comment("CopyStringCharacters %s -> %s",
3146 : from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING",
3147 3403 : to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
3148 :
3149 3403 : ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
3150 3403 : ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
3151 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3152 : int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
3153 : Node* from_offset =
3154 3403 : ElementOffsetFromIndex(from_index, from_kind, mode, header_size);
3155 : Node* to_offset =
3156 3403 : ElementOffsetFromIndex(to_index, to_kind, mode, header_size);
3157 3403 : Node* byte_count = ElementOffsetFromIndex(character_count, from_kind, mode);
3158 6806 : Node* limit_offset = IntPtrAdd(from_offset, byte_count);
3159 :
3160 : // Prepare the fast loop
3161 : MachineType type =
3162 3403 : from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
3163 : MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
3164 3403 : : MachineRepresentation::kWord16;
3165 3403 : int from_increment = 1 << ElementsKindToShiftSize(from_kind);
3166 3403 : int to_increment = 1 << ElementsKindToShiftSize(to_kind);
3167 :
3168 3403 : VARIABLE(current_to_offset, MachineType::PointerRepresentation(), to_offset);
3169 3403 : VariableList vars({¤t_to_offset}, zone());
3170 3403 : int to_index_constant = 0, from_index_constant = 0;
3171 3403 : Smi* to_index_smi = nullptr;
3172 3403 : Smi* from_index_smi = nullptr;
3173 : bool index_same =
3174 3403 : (from_encoding == to_encoding) &&
3175 3366 : (from_index == to_index ||
3176 0 : ((mode == INTPTR_PARAMETERS) &&
3177 :
3178 0 : ToInt32Constant(from_index, from_index_constant) &&
3179 0 : ToInt32Constant(to_index, to_index_constant) &&
3180 3366 : from_index_constant == to_index_constant) ||
3181 3880 : ((mode == SMI_PARAMETERS) && ToSmiConstant(from_index, from_index_smi) &&
3182 780 : ToSmiConstant(to_index, to_index_smi) &&
3183 266 : to_index_smi == from_index_smi));
3184 : BuildFastLoop(vars, from_offset, limit_offset,
3185 : [this, from_string, to_string, ¤t_to_offset, to_increment,
3186 3403 : type, rep, index_same](Node* offset) {
3187 3403 : Node* value = Load(type, from_string, offset);
3188 : StoreNoWriteBarrier(
3189 : rep, to_string,
3190 3403 : index_same ? offset : current_to_offset.value(), value);
3191 3403 : if (!index_same) {
3192 3143 : Increment(¤t_to_offset, to_increment);
3193 : }
3194 3403 : },
3195 10209 : from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
3196 3403 : }
3197 :
3198 4631 : Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
3199 : Node* offset,
3200 : ElementsKind from_kind,
3201 : ElementsKind to_kind,
3202 : Label* if_hole) {
3203 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
3204 4631 : if (IsDoubleElementsKind(from_kind)) {
3205 : Node* value =
3206 1058 : LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
3207 1058 : if (!IsDoubleElementsKind(to_kind)) {
3208 894 : value = AllocateHeapNumberWithValue(value);
3209 : }
3210 1058 : return value;
3211 :
3212 : } else {
3213 3573 : Node* value = Load(MachineType::AnyTagged(), array, offset);
3214 3573 : if (if_hole) {
3215 353 : GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
3216 : }
3217 3573 : if (IsDoubleElementsKind(to_kind)) {
3218 353 : if (IsSmiElementsKind(from_kind)) {
3219 706 : value = SmiToFloat64(value);
3220 : } else {
3221 0 : value = LoadHeapNumberValue(value);
3222 : }
3223 : }
3224 3573 : return value;
3225 : }
3226 : }
3227 :
3228 971 : Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
3229 : ParameterMode mode) {
3230 : CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
3231 971 : Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
3232 971 : Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
3233 971 : Node* padding = IntPtrOrSmiConstant(16, mode);
3234 971 : return IntPtrOrSmiAdd(new_capacity, padding, mode);
3235 : }
3236 :
3237 62 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
3238 : ElementsKind kind, Node* key,
3239 : Label* bailout) {
3240 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
3241 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
3242 : CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
3243 124 : Node* capacity = LoadFixedArrayBaseLength(elements);
3244 :
3245 : ParameterMode mode = OptimalParameterMode();
3246 : capacity = TaggedToParameter(capacity, mode);
3247 : key = TaggedToParameter(key, mode);
3248 :
3249 : return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
3250 62 : bailout);
3251 : }
3252 :
3253 502 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
3254 : ElementsKind kind, Node* key,
3255 : Node* capacity,
3256 : ParameterMode mode,
3257 : Label* bailout) {
3258 502 : Comment("TryGrowElementsCapacity");
3259 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
3260 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
3261 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
3262 : CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
3263 :
3264 : // If the gap growth is too big, fall back to the runtime.
3265 502 : Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
3266 502 : Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
3267 1004 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
3268 :
3269 : // Calculate the capacity of the new backing store.
3270 : Node* new_capacity = CalculateNewElementsCapacity(
3271 502 : IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
3272 : return GrowElementsCapacity(object, elements, kind, kind, capacity,
3273 502 : new_capacity, mode, bailout);
3274 : }
3275 :
3276 1982 : Node* CodeStubAssembler::GrowElementsCapacity(
3277 : Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
3278 : Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
3279 1982 : Comment("[ GrowElementsCapacity");
3280 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
3281 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
3282 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
3283 : CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
3284 :
3285 : // If size of the allocation for the new capacity doesn't fit in a page
3286 : // that we can bump-pointer allocate from, fall back to the runtime.
3287 1982 : int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
3288 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(
3289 : new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
3290 3964 : bailout);
3291 :
3292 : // Allocate the new backing store.
3293 1982 : Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
3294 :
3295 : // Copy the elements from the old elements store to the new.
3296 : // The size-check above guarantees that the |new_elements| is allocated
3297 : // in new space so we can skip the write barrier.
3298 : CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
3299 1982 : new_capacity, SKIP_WRITE_BARRIER, mode);
3300 :
3301 1982 : StoreObjectField(object, JSObject::kElementsOffset, new_elements);
3302 1982 : Comment("] GrowElementsCapacity");
3303 1982 : return new_elements;
3304 : }
3305 :
3306 527 : void CodeStubAssembler::InitializeAllocationMemento(Node* base,
3307 : Node* base_allocation_size,
3308 : Node* allocation_site) {
3309 527 : Comment("[Initialize AllocationMemento");
3310 527 : Node* memento = InnerAllocate(base, base_allocation_size);
3311 527 : StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex);
3312 : StoreObjectFieldNoWriteBarrier(
3313 527 : memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
3314 527 : if (FLAG_allocation_site_pretenuring) {
3315 : Node* count = LoadObjectField(allocation_site,
3316 : AllocationSite::kPretenureCreateCountOffset);
3317 1581 : Node* incremented_count = SmiAdd(count, SmiConstant(1));
3318 : StoreObjectFieldNoWriteBarrier(allocation_site,
3319 : AllocationSite::kPretenureCreateCountOffset,
3320 527 : incremented_count);
3321 : }
3322 527 : Comment("]");
3323 527 : }
3324 :
3325 1672 : Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
3326 : Label* if_valueisnotnumber) {
3327 1672 : Label out(this);
3328 3344 : VARIABLE(var_result, MachineRepresentation::kFloat64);
3329 :
3330 : // Check if the {value} is a Smi or a HeapObject.
3331 1672 : Label if_valueissmi(this), if_valueisnotsmi(this);
3332 3344 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
3333 :
3334 : BIND(&if_valueissmi);
3335 : {
3336 : // Convert the Smi {value}.
3337 3344 : var_result.Bind(SmiToFloat64(value));
3338 1672 : Goto(&out);
3339 : }
3340 :
3341 : BIND(&if_valueisnotsmi);
3342 : {
3343 : // Check if {value} is a HeapNumber.
3344 : Label if_valueisheapnumber(this);
3345 3344 : Branch(IsHeapNumber(value), &if_valueisheapnumber, if_valueisnotnumber);
3346 :
3347 : BIND(&if_valueisheapnumber);
3348 : {
3349 : // Load the floating point value.
3350 3344 : var_result.Bind(LoadHeapNumberValue(value));
3351 1672 : Goto(&out);
3352 1672 : }
3353 : }
3354 : BIND(&out);
3355 3344 : return var_result.value();
3356 : }
3357 :
3358 868 : Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
3359 : // We might need to loop once due to ToNumber conversion.
3360 868 : VARIABLE(var_value, MachineRepresentation::kTagged);
3361 1736 : VARIABLE(var_result, MachineRepresentation::kFloat64);
3362 868 : Label loop(this, &var_value), done_loop(this, &var_result);
3363 868 : var_value.Bind(value);
3364 868 : Goto(&loop);
3365 : BIND(&loop);
3366 : {
3367 : Label if_valueisnotnumber(this, Label::kDeferred);
3368 :
3369 : // Load the current {value}.
3370 868 : value = var_value.value();
3371 :
3372 : // Convert {value} to Float64 if it is a number and convert it to a number
3373 : // otherwise.
3374 868 : Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
3375 868 : var_result.Bind(result);
3376 868 : Goto(&done_loop);
3377 :
3378 : BIND(&if_valueisnotnumber);
3379 : {
3380 : // Convert the {value} to a Number first.
3381 868 : var_value.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, value));
3382 868 : Goto(&loop);
3383 868 : }
3384 : }
3385 : BIND(&done_loop);
3386 1736 : return var_result.value();
3387 : }
3388 :
3389 992 : Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
3390 992 : VARIABLE(var_result, MachineRepresentation::kWord32);
3391 992 : Label done(this);
3392 : TaggedToWord32OrBigIntImpl<Feedback::kNone, Object::Conversion::kToNumber>(
3393 992 : context, value, &done, &var_result);
3394 : BIND(&done);
3395 1984 : return var_result.value();
3396 : }
3397 :
3398 : // Truncate {value} to word32 and jump to {if_number} if it is a Number,
3399 : // or find that it is a BigInt and jump to {if_bigint}.
3400 372 : void CodeStubAssembler::TaggedToWord32OrBigInt(Node* context, Node* value,
3401 : Label* if_number,
3402 : Variable* var_word32,
3403 : Label* if_bigint,
3404 : Variable* var_bigint) {
3405 : TaggedToWord32OrBigIntImpl<Feedback::kNone, Object::Conversion::kToNumeric>(
3406 372 : context, value, if_number, var_word32, if_bigint, var_bigint);
3407 372 : }
3408 :
3409 : // Truncate {value} to word32 and jump to {if_number} if it is a Number,
3410 : // or find that it is a BigInt and jump to {if_bigint}. In either case,
3411 : // store the type feedback in {var_feedback}.
3412 1767 : void CodeStubAssembler::TaggedToWord32OrBigIntWithFeedback(
3413 : Node* context, Node* value, Label* if_number, Variable* var_word32,
3414 : Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
3415 : TaggedToWord32OrBigIntImpl<Feedback::kCollect,
3416 : Object::Conversion::kToNumeric>(
3417 : context, value, if_number, var_word32, if_bigint, var_bigint,
3418 1767 : var_feedback);
3419 1767 : }
3420 :
3421 : template <CodeStubAssembler::Feedback feedback, Object::Conversion conversion>
3422 3131 : void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
3423 : Node* context, Node* value, Label* if_number, Variable* var_word32,
3424 : Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
3425 : DCHECK(var_word32->rep() == MachineRepresentation::kWord32);
3426 : DCHECK(var_bigint == nullptr ||
3427 : var_bigint->rep() == MachineRepresentation::kTagged);
3428 : DCHECK(var_feedback == nullptr ||
3429 : var_feedback->rep() == MachineRepresentation::kTaggedSigned);
3430 :
3431 : // We might need to loop after conversion.
3432 3131 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
3433 : if (feedback == Feedback::kCollect) {
3434 3534 : var_feedback->Bind(SmiConstant(BinaryOperationFeedback::kNone));
3435 : } else {
3436 : DCHECK(var_feedback == nullptr);
3437 : }
3438 3131 : Variable* loop_vars[] = {&var_value, var_feedback};
3439 : int num_vars = feedback == Feedback::kCollect ? arraysize(loop_vars)
3440 : : arraysize(loop_vars) - 1;
3441 6262 : Label loop(this, num_vars, loop_vars);
3442 3131 : Goto(&loop);
3443 : BIND(&loop);
3444 : {
3445 3131 : value = var_value.value();
3446 3131 : Label not_smi(this), is_heap_number(this), is_oddball(this),
3447 3131 : is_bigint(this);
3448 6262 : GotoIf(TaggedIsNotSmi(value), ¬_smi);
3449 :
3450 : // {value} is a Smi.
3451 6262 : var_word32->Bind(SmiToWord32(value));
3452 : if (feedback == Feedback::kCollect) {
3453 1767 : var_feedback->Bind(
3454 : SmiOr(var_feedback->value(),
3455 7068 : SmiConstant(BinaryOperationFeedback::kSignedSmall)));
3456 : }
3457 3131 : Goto(if_number);
3458 :
3459 : BIND(¬_smi);
3460 6262 : Node* map = LoadMap(value);
3461 6262 : GotoIf(IsHeapNumberMap(map), &is_heap_number);
3462 6262 : Node* instance_type = LoadMapInstanceType(map);
3463 : if (conversion == Object::Conversion::kToNumeric) {
3464 2139 : GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
3465 : }
3466 :
3467 : // Not HeapNumber (or BigInt if conversion == kToNumeric).
3468 : {
3469 : if (feedback == Feedback::kCollect) {
3470 : // We do not require an Or with earlier feedback here because once we
3471 : // convert the value to a Numeric, we cannot reach this path. We can
3472 : // only reach this path on the first pass when the feedback is kNone.
3473 : CSA_ASSERT(this, SmiEqual(var_feedback->value(),
3474 : SmiConstant(BinaryOperationFeedback::kNone)));
3475 : }
3476 6262 : GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
3477 : // Not an oddball either -> convert.
3478 : auto builtin = conversion == Object::Conversion::kToNumeric
3479 : ? Builtins::kNonNumberToNumeric
3480 : : Builtins::kNonNumberToNumber;
3481 3131 : var_value.Bind(CallBuiltin(builtin, context, value));
3482 : if (feedback == Feedback::kCollect) {
3483 3534 : var_feedback->Bind(SmiConstant(BinaryOperationFeedback::kAny));
3484 : }
3485 3131 : Goto(&loop);
3486 :
3487 : BIND(&is_oddball);
3488 3131 : var_value.Bind(LoadObjectField(value, Oddball::kToNumberOffset));
3489 : if (feedback == Feedback::kCollect) {
3490 1767 : var_feedback->Bind(
3491 3534 : SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
3492 : }
3493 3131 : Goto(&loop);
3494 : }
3495 :
3496 : BIND(&is_heap_number);
3497 3131 : var_word32->Bind(TruncateHeapNumberValueToWord32(value));
3498 : if (feedback == Feedback::kCollect) {
3499 1767 : var_feedback->Bind(SmiOr(var_feedback->value(),
3500 7068 : SmiConstant(BinaryOperationFeedback::kNumber)));
3501 : }
3502 3131 : Goto(if_number);
3503 :
3504 : if (conversion == Object::Conversion::kToNumeric) {
3505 : BIND(&is_bigint);
3506 2139 : var_bigint->Bind(value);
3507 : if (feedback == Feedback::kCollect) {
3508 1767 : var_feedback->Bind(
3509 : SmiOr(var_feedback->value(),
3510 7068 : SmiConstant(BinaryOperationFeedback::kBigInt)));
3511 : }
3512 2139 : Goto(if_bigint);
3513 3131 : }
3514 3131 : }
3515 3131 : }
3516 :
3517 3162 : Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
3518 6324 : Node* value = LoadHeapNumberValue(object);
3519 6324 : return TruncateFloat64ToWord32(value);
3520 : }
3521 :
3522 1413 : TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(
3523 : SloppyTNode<Float64T> value) {
3524 1413 : TNode<Int32T> value32 = RoundFloat64ToInt32(value);
3525 1413 : TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
3526 :
3527 1413 : Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this);
3528 :
3529 1413 : Label if_valueisequal(this), if_valueisnotequal(this);
3530 2826 : Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
3531 : BIND(&if_valueisequal);
3532 : {
3533 4239 : GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_valueisint32);
3534 1413 : Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
3535 2826 : Int32Constant(0)),
3536 2826 : &if_valueisheapnumber, &if_valueisint32);
3537 : }
3538 : BIND(&if_valueisnotequal);
3539 1413 : Goto(&if_valueisheapnumber);
3540 :
3541 : TVARIABLE(Number, var_result);
3542 : BIND(&if_valueisint32);
3543 : {
3544 1413 : if (Is64()) {
3545 2826 : TNode<Smi> result = SmiTag(ChangeInt32ToIntPtr(value32));
3546 : var_result = result;
3547 1413 : Goto(&if_join);
3548 : } else {
3549 0 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
3550 : TNode<BoolT> overflow = Projection<1>(pair);
3551 0 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
3552 0 : Branch(overflow, &if_overflow, &if_notoverflow);
3553 : BIND(&if_overflow);
3554 0 : Goto(&if_valueisheapnumber);
3555 : BIND(&if_notoverflow);
3556 : {
3557 0 : TNode<IntPtrT> result = ChangeInt32ToIntPtr(Projection<0>(pair));
3558 0 : var_result = BitcastWordToTaggedSigned(result);
3559 0 : Goto(&if_join);
3560 0 : }
3561 : }
3562 : }
3563 : BIND(&if_valueisheapnumber);
3564 : {
3565 2826 : var_result = AllocateHeapNumberWithValue(value);
3566 1413 : Goto(&if_join);
3567 : }
3568 : BIND(&if_join);
3569 1413 : return var_result;
3570 : }
3571 :
3572 2418 : TNode<Number> CodeStubAssembler::ChangeInt32ToTagged(
3573 : SloppyTNode<Int32T> value) {
3574 2418 : if (Is64()) {
3575 7254 : return SmiTag(ChangeInt32ToIntPtr(value));
3576 : }
3577 : TVARIABLE(Number, var_result);
3578 0 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
3579 : TNode<BoolT> overflow = Projection<1>(pair);
3580 0 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
3581 0 : if_join(this);
3582 0 : Branch(overflow, &if_overflow, &if_notoverflow);
3583 : BIND(&if_overflow);
3584 : {
3585 0 : TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
3586 0 : TNode<HeapNumber> result = AllocateHeapNumberWithValue(value64);
3587 : var_result = result;
3588 : }
3589 0 : Goto(&if_join);
3590 : BIND(&if_notoverflow);
3591 : {
3592 : TNode<Smi> result =
3593 0 : BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
3594 : var_result = result;
3595 : }
3596 0 : Goto(&if_join);
3597 : BIND(&if_join);
3598 : return var_result;
3599 : }
3600 :
3601 744 : TNode<Number> CodeStubAssembler::ChangeUint32ToTagged(
3602 : SloppyTNode<Uint32T> value) {
3603 1488 : Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
3604 744 : if_join(this);
3605 : TVARIABLE(Number, var_result);
3606 : // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
3607 1488 : Branch(Uint32LessThan(Int32Constant(Smi::kMaxValue), value), &if_overflow,
3608 1488 : &if_not_overflow);
3609 :
3610 : BIND(&if_not_overflow);
3611 : {
3612 744 : if (Is64()) {
3613 1488 : var_result =
3614 1488 : SmiTag(ReinterpretCast<IntPtrT>(ChangeUint32ToUint64(value)));
3615 : } else {
3616 : // If tagging {value} results in an overflow, we need to use a HeapNumber
3617 : // to represent it.
3618 : // TODO(tebbi): This overflow can never happen.
3619 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(
3620 0 : UncheckedCast<Int32T>(value), UncheckedCast<Int32T>(value));
3621 : TNode<BoolT> overflow = Projection<1>(pair);
3622 0 : GotoIf(overflow, &if_overflow);
3623 :
3624 : TNode<Smi> result =
3625 0 : BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
3626 : var_result = result;
3627 : }
3628 : }
3629 744 : Goto(&if_join);
3630 :
3631 : BIND(&if_overflow);
3632 : {
3633 744 : TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
3634 1488 : var_result = AllocateHeapNumberWithValue(float64_value);
3635 : }
3636 744 : Goto(&if_join);
3637 :
3638 : BIND(&if_join);
3639 744 : return var_result;
3640 : }
3641 :
3642 837 : Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
3643 : char const* method_name) {
3644 837 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
3645 :
3646 : // Check if the {value} is a Smi or a HeapObject.
3647 837 : Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
3648 837 : if_valueisstring(this);
3649 1674 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
3650 : BIND(&if_valueisnotsmi);
3651 : {
3652 : // Load the instance type of the {value}.
3653 1674 : Node* value_instance_type = LoadInstanceType(value);
3654 :
3655 : // Check if the {value} is already String.
3656 : Label if_valueisnotstring(this, Label::kDeferred);
3657 : Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
3658 1674 : &if_valueisnotstring);
3659 : BIND(&if_valueisnotstring);
3660 : {
3661 : // Check if the {value} is null.
3662 : Label if_valueisnullorundefined(this, Label::kDeferred),
3663 837 : if_valueisnotnullorundefined(this, Label::kDeferred),
3664 837 : if_valueisnotnull(this, Label::kDeferred);
3665 : Branch(WordEqual(value, NullConstant()), &if_valueisnullorundefined,
3666 837 : &if_valueisnotnull);
3667 : BIND(&if_valueisnotnull);
3668 : {
3669 : // Check if the {value} is undefined.
3670 : Branch(WordEqual(value, UndefinedConstant()),
3671 837 : &if_valueisnullorundefined, &if_valueisnotnullorundefined);
3672 : BIND(&if_valueisnotnullorundefined);
3673 : {
3674 : // Convert the {value} to a String.
3675 837 : var_value.Bind(CallBuiltin(Builtins::kToString, context, value));
3676 837 : Goto(&if_valueisstring);
3677 : }
3678 : }
3679 :
3680 : BIND(&if_valueisnullorundefined);
3681 : {
3682 : // The {value} is either null or undefined.
3683 : CallRuntime(Runtime::kThrowCalledOnNullOrUndefined, context,
3684 837 : StringConstant(method_name));
3685 837 : Unreachable();
3686 837 : }
3687 837 : }
3688 : }
3689 : BIND(&if_valueissmi);
3690 : {
3691 : // The {value} is a Smi, convert it to a String.
3692 837 : Callable callable = CodeFactory::NumberToString(isolate());
3693 837 : var_value.Bind(CallStub(callable, context, value));
3694 837 : Goto(&if_valueisstring);
3695 : }
3696 : BIND(&if_valueisstring);
3697 1674 : return var_value.value();
3698 : }
3699 :
3700 352 : TNode<Float64T> CodeStubAssembler::ChangeNumberToFloat64(
3701 : SloppyTNode<Number> value) {
3702 : // TODO(tebbi): Remove assert once argument is TNode instead of SloppyTNode.
3703 : CSA_SLOW_ASSERT(this, IsNumber(value));
3704 352 : TVARIABLE(Float64T, result);
3705 352 : Label smi(this);
3706 352 : Label done(this, &result);
3707 704 : GotoIf(TaggedIsSmi(value), &smi);
3708 352 : result = LoadHeapNumberValue(CAST(value));
3709 352 : Goto(&done);
3710 :
3711 : BIND(&smi);
3712 : {
3713 352 : result = SmiToFloat64(CAST(value));
3714 352 : Goto(&done);
3715 : }
3716 :
3717 : BIND(&done);
3718 352 : return result;
3719 : }
3720 :
3721 93 : TNode<UintPtrT> CodeStubAssembler::ChangeNonnegativeNumberToUintPtr(
3722 : SloppyTNode<Number> value) {
3723 : // TODO(tebbi): Remove assert once argument is TNode instead of SloppyTNode.
3724 : CSA_SLOW_ASSERT(this, IsNumber(value));
3725 93 : TVARIABLE(UintPtrT, result);
3726 93 : Label smi(this), done(this, &result);
3727 186 : GotoIf(TaggedIsSmi(value), &smi);
3728 :
3729 : TNode<HeapNumber> value_hn = CAST(value);
3730 279 : result = ChangeFloat64ToUintPtr(LoadHeapNumberValue(value_hn));
3731 93 : Goto(&done);
3732 :
3733 : BIND(&smi);
3734 : TNode<Smi> value_smi = CAST(value);
3735 : CSA_SLOW_ASSERT(this, SmiLessThan(SmiConstant(-1), value_smi));
3736 : result = UncheckedCast<UintPtrT>(SmiToWord(value_smi));
3737 93 : Goto(&done);
3738 :
3739 : BIND(&done);
3740 93 : return result;
3741 : }
3742 :
3743 35183 : Node* CodeStubAssembler::TimesPointerSize(Node* value) {
3744 105549 : return WordShl(value, IntPtrConstant(kPointerSizeLog2));
3745 : }
3746 :
3747 248 : Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
3748 : PrimitiveType primitive_type,
3749 : char const* method_name) {
3750 : // We might need to loop once due to JSValue unboxing.
3751 248 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
3752 248 : Label loop(this, &var_value), done_loop(this),
3753 248 : done_throw(this, Label::kDeferred);
3754 248 : Goto(&loop);
3755 : BIND(&loop);
3756 : {
3757 : // Load the current {value}.
3758 248 : value = var_value.value();
3759 :
3760 : // Check if the {value} is a Smi or a HeapObject.
3761 248 : GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
3762 : ? &done_loop
3763 744 : : &done_throw);
3764 :
3765 : // Load the map of the {value}.
3766 496 : Node* value_map = LoadMap(value);
3767 :
3768 : // Load the instance type of the {value}.
3769 496 : Node* value_instance_type = LoadMapInstanceType(value_map);
3770 :
3771 : // Check if {value} is a JSValue.
3772 248 : Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
3773 : Branch(InstanceTypeEqual(value_instance_type, JS_VALUE_TYPE),
3774 496 : &if_valueisvalue, &if_valueisnotvalue);
3775 :
3776 : BIND(&if_valueisvalue);
3777 : {
3778 : // Load the actual value from the {value}.
3779 248 : var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
3780 248 : Goto(&loop);
3781 : }
3782 :
3783 : BIND(&if_valueisnotvalue);
3784 : {
3785 248 : switch (primitive_type) {
3786 : case PrimitiveType::kBoolean:
3787 62 : GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
3788 62 : break;
3789 : case PrimitiveType::kNumber:
3790 31 : GotoIf(WordEqual(value_map, HeapNumberMapConstant()), &done_loop);
3791 31 : break;
3792 : case PrimitiveType::kString:
3793 124 : GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
3794 62 : break;
3795 : case PrimitiveType::kSymbol:
3796 93 : GotoIf(WordEqual(value_map, SymbolMapConstant()), &done_loop);
3797 93 : break;
3798 : }
3799 248 : Goto(&done_throw);
3800 248 : }
3801 : }
3802 :
3803 : BIND(&done_throw);
3804 : {
3805 : const char* primitive_name = nullptr;
3806 248 : switch (primitive_type) {
3807 : case PrimitiveType::kBoolean:
3808 : primitive_name = "Boolean";
3809 62 : break;
3810 : case PrimitiveType::kNumber:
3811 : primitive_name = "Number";
3812 31 : break;
3813 : case PrimitiveType::kString:
3814 : primitive_name = "String";
3815 62 : break;
3816 : case PrimitiveType::kSymbol:
3817 : primitive_name = "Symbol";
3818 93 : break;
3819 : }
3820 248 : CHECK_NOT_NULL(primitive_name);
3821 :
3822 : // The {value} is not a compatible receiver for this method.
3823 : ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
3824 248 : primitive_name);
3825 : }
3826 :
3827 : BIND(&done_loop);
3828 496 : return var_value.value();
3829 : }
3830 :
3831 961 : void CodeStubAssembler::ThrowIncompatibleMethodReceiver(Node* context,
3832 : const char* method_name,
3833 : Node* receiver) {
3834 : CallRuntime(Runtime::kThrowIncompatibleMethodReceiver, context,
3835 961 : StringConstant(method_name), receiver);
3836 961 : Unreachable();
3837 961 : }
3838 :
3839 899 : Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
3840 : InstanceType instance_type,
3841 : char const* method_name) {
3842 1798 : Label out(this), throw_exception(this, Label::kDeferred);
3843 1798 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
3844 :
3845 1798 : GotoIf(TaggedIsSmi(value), &throw_exception);
3846 :
3847 : // Load the instance type of the {value}.
3848 1798 : var_value_map.Bind(LoadMap(value));
3849 2697 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
3850 :
3851 1798 : Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
3852 1798 : &throw_exception);
3853 :
3854 : // The {value} is not a compatible receiver for this method.
3855 : BIND(&throw_exception);
3856 899 : ThrowIncompatibleMethodReceiver(context, method_name, value);
3857 :
3858 : BIND(&out);
3859 1798 : return var_value_map.value();
3860 : }
3861 :
3862 248 : Node* CodeStubAssembler::ThrowIfNotJSReceiver(
3863 : Node* context, Node* value, MessageTemplate::Template msg_template,
3864 : const char* method_name) {
3865 496 : Label out(this), throw_exception(this, Label::kDeferred);
3866 496 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
3867 :
3868 496 : GotoIf(TaggedIsSmi(value), &throw_exception);
3869 :
3870 : // Load the instance type of the {value}.
3871 496 : var_value_map.Bind(LoadMap(value));
3872 744 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
3873 :
3874 496 : Branch(IsJSReceiverInstanceType(value_instance_type), &out, &throw_exception);
3875 :
3876 : // The {value} is not a compatible receiver for this method.
3877 : BIND(&throw_exception);
3878 248 : ThrowTypeError(context, msg_template, method_name);
3879 :
3880 : BIND(&out);
3881 496 : return var_value_map.value();
3882 : }
3883 :
3884 3298 : void CodeStubAssembler::ThrowTypeError(Node* context,
3885 : MessageTemplate::Template message,
3886 : char const* arg0, char const* arg1) {
3887 : Node* arg0_node = nullptr;
3888 4569 : if (arg0) arg0_node = StringConstant(arg0);
3889 : Node* arg1_node = nullptr;
3890 3546 : if (arg1) arg1_node = StringConstant(arg1);
3891 3298 : ThrowTypeError(context, message, arg0_node, arg1_node);
3892 3298 : }
3893 :
3894 4736 : void CodeStubAssembler::ThrowTypeError(Node* context,
3895 : MessageTemplate::Template message,
3896 : Node* arg0, Node* arg1, Node* arg2) {
3897 9472 : Node* template_index = SmiConstant(message);
3898 4736 : if (arg0 == nullptr) {
3899 : CallRuntime(Runtime::kThrowTypeError, context, template_index);
3900 2709 : } else if (arg1 == nullptr) {
3901 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0);
3902 806 : } else if (arg2 == nullptr) {
3903 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1);
3904 : } else {
3905 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1,
3906 : arg2);
3907 : }
3908 4736 : Unreachable();
3909 4736 : }
3910 :
3911 49090 : Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) {
3912 147270 : return Word32Equal(instance_type, Int32Constant(type));
3913 : }
3914 :
3915 31 : Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) {
3916 : CSA_SLOW_ASSERT(this, IsMap(map));
3917 62 : Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
3918 : uint32_t mask =
3919 : 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
3920 : USE(mask);
3921 : // Interceptors or access checks imply special receiver.
3922 : CSA_ASSERT(this,
3923 : SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special,
3924 : Int32Constant(1), MachineRepresentation::kWord32));
3925 31 : return is_special;
3926 : }
3927 :
3928 620 : TNode<BoolT> CodeStubAssembler::IsDictionaryMap(SloppyTNode<Map> map) {
3929 : CSA_SLOW_ASSERT(this, IsMap(map));
3930 1240 : Node* bit_field3 = LoadMapBitField3(map);
3931 620 : return IsSetWord32<Map::DictionaryMap>(bit_field3);
3932 : }
3933 :
3934 31 : Node* CodeStubAssembler::IsExtensibleMap(Node* map) {
3935 : CSA_ASSERT(this, IsMap(map));
3936 93 : return IsSetWord32(LoadMapBitField2(map), 1 << Map::kIsExtensible);
3937 : }
3938 :
3939 5331 : Node* CodeStubAssembler::IsCallableMap(Node* map) {
3940 : CSA_ASSERT(this, IsMap(map));
3941 15993 : return IsSetWord32(LoadMapBitField(map), 1 << Map::kIsCallable);
3942 : }
3943 :
3944 372 : Node* CodeStubAssembler::IsDeprecatedMap(Node* map) {
3945 : CSA_ASSERT(this, IsMap(map));
3946 744 : return IsSetWord32<Map::Deprecated>(LoadMapBitField3(map));
3947 : }
3948 :
3949 2232 : Node* CodeStubAssembler::IsUndetectableMap(Node* map) {
3950 : CSA_ASSERT(this, IsMap(map));
3951 6696 : return IsSetWord32(LoadMapBitField(map), 1 << Map::kIsUndetectable);
3952 : }
3953 :
3954 992 : Node* CodeStubAssembler::IsArrayProtectorCellInvalid() {
3955 1984 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
3956 1984 : Node* cell = LoadRoot(Heap::kArrayProtectorRootIndex);
3957 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
3958 1984 : return WordEqual(cell_value, invalid);
3959 : }
3960 :
3961 0 : Node* CodeStubAssembler::IsSpeciesProtectorCellInvalid() {
3962 0 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
3963 0 : Node* cell = LoadRoot(Heap::kSpeciesProtectorRootIndex);
3964 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
3965 0 : return WordEqual(cell_value, invalid);
3966 : }
3967 :
3968 837 : Node* CodeStubAssembler::IsPrototypeInitialArrayPrototype(Node* context,
3969 : Node* map) {
3970 : Node* const native_context = LoadNativeContext(context);
3971 : Node* const initial_array_prototype = LoadContextElement(
3972 1674 : native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
3973 1674 : Node* proto = LoadMapPrototype(map);
3974 1674 : return WordEqual(proto, initial_array_prototype);
3975 : }
3976 :
3977 372 : Node* CodeStubAssembler::IsCallable(Node* object) {
3978 744 : return IsCallableMap(LoadMap(object));
3979 : }
3980 :
3981 0 : Node* CodeStubAssembler::IsCell(Node* object) {
3982 0 : return WordEqual(LoadMap(object), LoadRoot(Heap::kCellMapRootIndex));
3983 : }
3984 :
3985 570 : Node* CodeStubAssembler::IsConstructorMap(Node* map) {
3986 : CSA_ASSERT(this, IsMap(map));
3987 1710 : return IsSetWord32(LoadMapBitField(map), 1 << Map::kIsConstructor);
3988 : }
3989 :
3990 93 : Node* CodeStubAssembler::IsConstructor(Node* object) {
3991 186 : return IsConstructorMap(LoadMap(object));
3992 : }
3993 :
3994 155 : Node* CodeStubAssembler::IsFunctionWithPrototypeSlotMap(Node* map) {
3995 : CSA_ASSERT(this, IsMap(map));
3996 465 : return IsSetWord32(LoadMapBitField(map), 1 << Map::kHasPrototypeSlot);
3997 : }
3998 :
3999 155 : Node* CodeStubAssembler::IsSpecialReceiverInstanceType(Node* instance_type) {
4000 : STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
4001 : return Int32LessThanOrEqual(instance_type,
4002 465 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
4003 : }
4004 :
4005 11215 : Node* CodeStubAssembler::IsStringInstanceType(Node* instance_type) {
4006 : STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
4007 33645 : return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
4008 : }
4009 :
4010 3751 : Node* CodeStubAssembler::IsOneByteStringInstanceType(Node* instance_type) {
4011 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
4012 : return Word32Equal(
4013 7502 : Word32And(instance_type, Int32Constant(kStringEncodingMask)),
4014 18755 : Int32Constant(kOneByteStringTag));
4015 : }
4016 :
4017 2511 : Node* CodeStubAssembler::IsSequentialStringInstanceType(Node* instance_type) {
4018 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
4019 : return Word32Equal(
4020 5022 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
4021 12555 : Int32Constant(kSeqStringTag));
4022 : }
4023 :
4024 31 : Node* CodeStubAssembler::IsConsStringInstanceType(Node* instance_type) {
4025 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
4026 : return Word32Equal(
4027 62 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
4028 155 : Int32Constant(kConsStringTag));
4029 : }
4030 :
4031 0 : Node* CodeStubAssembler::IsIndirectStringInstanceType(Node* instance_type) {
4032 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
4033 : STATIC_ASSERT(kIsIndirectStringMask == 0x1);
4034 : STATIC_ASSERT(kIsIndirectStringTag == 0x1);
4035 0 : return Word32And(instance_type, Int32Constant(kIsIndirectStringMask));
4036 : }
4037 :
4038 0 : Node* CodeStubAssembler::IsExternalStringInstanceType(Node* instance_type) {
4039 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
4040 : return Word32Equal(
4041 0 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
4042 0 : Int32Constant(kExternalStringTag));
4043 : }
4044 :
4045 0 : Node* CodeStubAssembler::IsShortExternalStringInstanceType(
4046 : Node* instance_type) {
4047 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
4048 : STATIC_ASSERT(kShortExternalStringTag != 0);
4049 5022 : return IsSetWord32(instance_type, kShortExternalStringMask);
4050 : }
4051 :
4052 5524 : Node* CodeStubAssembler::IsJSReceiverInstanceType(Node* instance_type) {
4053 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4054 : return Int32GreaterThanOrEqual(instance_type,
4055 16572 : Int32Constant(FIRST_JS_RECEIVER_TYPE));
4056 : }
4057 :
4058 31 : Node* CodeStubAssembler::IsArrayIteratorInstanceType(Node* instance_type) {
4059 : return Uint32LessThan(
4060 : Int32Constant(LAST_ARRAY_ITERATOR_TYPE - FIRST_ARRAY_ITERATOR_TYPE),
4061 155 : Int32Sub(instance_type, Int32Constant(FIRST_ARRAY_ITERATOR_TYPE)));
4062 : }
4063 :
4064 1519 : Node* CodeStubAssembler::IsJSReceiverMap(Node* map) {
4065 3038 : return IsJSReceiverInstanceType(LoadMapInstanceType(map));
4066 : }
4067 :
4068 1395 : Node* CodeStubAssembler::IsJSReceiver(Node* object) {
4069 2790 : return IsJSReceiverMap(LoadMap(object));
4070 : }
4071 :
4072 0 : Node* CodeStubAssembler::IsNullOrJSReceiver(Node* object) {
4073 0 : return Word32Or(IsJSReceiver(object), IsNull(object));
4074 : }
4075 :
4076 0 : Node* CodeStubAssembler::IsJSGlobalProxyInstanceType(Node* instance_type) {
4077 217 : return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
4078 : }
4079 :
4080 310 : Node* CodeStubAssembler::IsJSObjectInstanceType(Node* instance_type) {
4081 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4082 : return Int32GreaterThanOrEqual(instance_type,
4083 930 : Int32Constant(FIRST_JS_OBJECT_TYPE));
4084 : }
4085 :
4086 93 : Node* CodeStubAssembler::IsJSObjectMap(Node* map) {
4087 : CSA_ASSERT(this, IsMap(map));
4088 186 : return IsJSObjectInstanceType(LoadMapInstanceType(map));
4089 : }
4090 :
4091 93 : Node* CodeStubAssembler::IsJSObject(Node* object) {
4092 186 : return IsJSObjectMap(LoadMap(object));
4093 : }
4094 :
4095 62 : Node* CodeStubAssembler::IsJSProxy(Node* object) {
4096 62 : return HasInstanceType(object, JS_PROXY_TYPE);
4097 : }
4098 :
4099 217 : Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) {
4100 217 : return HasInstanceType(object, JS_GLOBAL_PROXY_TYPE);
4101 : }
4102 :
4103 14532 : Node* CodeStubAssembler::IsMap(Node* map) { return IsMetaMap(LoadMap(map)); }
4104 :
4105 0 : Node* CodeStubAssembler::IsJSValueInstanceType(Node* instance_type) {
4106 3323 : return InstanceTypeEqual(instance_type, JS_VALUE_TYPE);
4107 : }
4108 :
4109 0 : Node* CodeStubAssembler::IsJSValue(Node* object) {
4110 0 : return IsJSValueMap(LoadMap(object));
4111 : }
4112 :
4113 0 : Node* CodeStubAssembler::IsJSValueMap(Node* map) {
4114 0 : return IsJSValueInstanceType(LoadMapInstanceType(map));
4115 : }
4116 :
4117 0 : Node* CodeStubAssembler::IsJSArrayInstanceType(Node* instance_type) {
4118 4346 : return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
4119 : }
4120 :
4121 0 : Node* CodeStubAssembler::IsJSArray(Node* object) {
4122 0 : return IsJSArrayMap(LoadMap(object));
4123 : }
4124 :
4125 1023 : Node* CodeStubAssembler::IsJSArrayMap(Node* map) {
4126 2046 : return IsJSArrayInstanceType(LoadMapInstanceType(map));
4127 : }
4128 :
4129 0 : Node* CodeStubAssembler::IsFixedArray(Node* object) {
4130 0 : return HasInstanceType(object, FIXED_ARRAY_TYPE);
4131 : }
4132 :
4133 0 : Node* CodeStubAssembler::IsPropertyArray(Node* object) {
4134 0 : return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
4135 : }
4136 :
4137 : // This complicated check is due to elements oddities. If a smi array is empty
4138 : // after Array.p.shift, it is replaced by the empty array constant. If it is
4139 : // later filled with a double element, we try to grow it but pass in a double
4140 : // elements kind. Usually this would cause a size mismatch (since the source
4141 : // fixed array has HOLEY_ELEMENTS and destination has
4142 : // HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
4143 : // source array is empty.
4144 : // TODO(jgruber): It might we worth creating an empty_double_array constant to
4145 : // simplify this case.
4146 0 : Node* CodeStubAssembler::IsFixedArrayWithKindOrEmpty(Node* object,
4147 : ElementsKind kind) {
4148 0 : Label out(this);
4149 0 : VARIABLE(var_result, MachineRepresentation::kWord32, Int32Constant(1));
4150 :
4151 0 : GotoIf(IsFixedArrayWithKind(object, kind), &out);
4152 :
4153 0 : Node* const length = LoadFixedArrayBaseLength(object);
4154 0 : GotoIf(SmiEqual(length, SmiConstant(0)), &out);
4155 :
4156 0 : var_result.Bind(Int32Constant(0));
4157 0 : Goto(&out);
4158 :
4159 : BIND(&out);
4160 0 : return var_result.value();
4161 : }
4162 :
4163 0 : Node* CodeStubAssembler::IsFixedArrayWithKind(Node* object, ElementsKind kind) {
4164 0 : if (IsDoubleElementsKind(kind)) {
4165 0 : return IsFixedDoubleArray(object);
4166 : } else {
4167 : DCHECK(IsSmiOrObjectElementsKind(kind));
4168 0 : return Word32Or(IsFixedArray(object), IsHashTable(object));
4169 : }
4170 : }
4171 :
4172 0 : Node* CodeStubAssembler::IsWeakCell(Node* object) {
4173 0 : return IsWeakCellMap(LoadMap(object));
4174 : }
4175 :
4176 217 : Node* CodeStubAssembler::IsBoolean(Node* object) {
4177 651 : return IsBooleanMap(LoadMap(object));
4178 : }
4179 :
4180 217 : Node* CodeStubAssembler::IsPropertyCell(Node* object) {
4181 651 : return IsPropertyCellMap(LoadMap(object));
4182 : }
4183 :
4184 0 : Node* CodeStubAssembler::IsAccessorInfo(Node* object) {
4185 0 : return IsAccessorInfoMap(LoadMap(object));
4186 : }
4187 :
4188 3416 : Node* CodeStubAssembler::IsAccessorPair(Node* object) {
4189 10248 : return IsAccessorPairMap(LoadMap(object));
4190 : }
4191 :
4192 0 : Node* CodeStubAssembler::IsAllocationSite(Node* object) {
4193 0 : return IsAllocationSiteMap(LoadMap(object));
4194 : }
4195 :
4196 0 : Node* CodeStubAssembler::IsAnyHeapNumber(Node* object) {
4197 0 : return Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object));
4198 : }
4199 :
4200 19844 : Node* CodeStubAssembler::IsHeapNumber(Node* object) {
4201 59532 : return IsHeapNumberMap(LoadMap(object));
4202 : }
4203 :
4204 0 : Node* CodeStubAssembler::IsMutableHeapNumber(Node* object) {
4205 0 : return IsMutableHeapNumberMap(LoadMap(object));
4206 : }
4207 :
4208 0 : Node* CodeStubAssembler::IsFeedbackVector(Node* object) {
4209 0 : return IsFeedbackVectorMap(LoadMap(object));
4210 : }
4211 :
4212 31 : Node* CodeStubAssembler::IsName(Node* object) {
4213 31 : return Int32LessThanOrEqual(LoadInstanceType(object),
4214 155 : Int32Constant(LAST_NAME_TYPE));
4215 : }
4216 :
4217 5276 : Node* CodeStubAssembler::IsString(Node* object) {
4218 10552 : return IsStringInstanceType(LoadInstanceType(object));
4219 : }
4220 :
4221 31 : Node* CodeStubAssembler::IsSymbolInstanceType(Node* instance_type) {
4222 496 : return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
4223 : }
4224 :
4225 570 : Node* CodeStubAssembler::IsSymbol(Node* object) {
4226 1710 : return IsSymbolMap(LoadMap(object));
4227 : }
4228 :
4229 2542 : Node* CodeStubAssembler::IsBigIntInstanceType(Node* instance_type) {
4230 10949 : return InstanceTypeEqual(instance_type, BIGINT_TYPE);
4231 : }
4232 :
4233 3751 : Node* CodeStubAssembler::IsBigInt(Node* object) {
4234 7502 : return IsBigIntInstanceType(LoadInstanceType(object));
4235 : }
4236 :
4237 31 : Node* CodeStubAssembler::IsPrimitiveInstanceType(Node* instance_type) {
4238 : return Int32LessThanOrEqual(instance_type,
4239 93 : Int32Constant(LAST_PRIMITIVE_TYPE));
4240 : }
4241 :
4242 502 : Node* CodeStubAssembler::IsPrivateSymbol(Node* object) {
4243 : return Select(
4244 : IsSymbol(object),
4245 502 : [=] {
4246 : Node* const flags =
4247 1506 : SmiToWord32(CAST(LoadObjectField(object, Symbol::kFlagsOffset)));
4248 : const int kPrivateMask = 1 << Symbol::kPrivateBit;
4249 502 : return IsSetWord32(flags, kPrivateMask);
4250 : },
4251 2510 : [=] { return Int32Constant(0); }, MachineRepresentation::kWord32);
4252 : }
4253 :
4254 0 : Node* CodeStubAssembler::IsNativeContext(Node* object) {
4255 0 : return WordEqual(LoadMap(object), LoadRoot(Heap::kNativeContextMapRootIndex));
4256 : }
4257 :
4258 62 : Node* CodeStubAssembler::IsFixedDoubleArray(Node* object) {
4259 124 : return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
4260 : }
4261 :
4262 0 : Node* CodeStubAssembler::IsHashTable(Node* object) {
4263 0 : return HasInstanceType(object, HASH_TABLE_TYPE);
4264 : }
4265 :
4266 0 : Node* CodeStubAssembler::IsDictionary(Node* object) {
4267 0 : return Word32Or(IsHashTable(object), IsUnseededNumberDictionary(object));
4268 : }
4269 :
4270 0 : Node* CodeStubAssembler::IsUnseededNumberDictionary(Node* object) {
4271 : return WordEqual(LoadMap(object),
4272 0 : LoadRoot(Heap::kUnseededNumberDictionaryMapRootIndex));
4273 : }
4274 :
4275 0 : Node* CodeStubAssembler::IsJSFunctionInstanceType(Node* instance_type) {
4276 3633 : return InstanceTypeEqual(instance_type, JS_FUNCTION_TYPE);
4277 : }
4278 :
4279 310 : Node* CodeStubAssembler::IsJSFunction(Node* object) {
4280 620 : return IsJSFunctionMap(LoadMap(object));
4281 : }
4282 :
4283 310 : Node* CodeStubAssembler::IsJSFunctionMap(Node* map) {
4284 620 : return IsJSFunctionInstanceType(LoadMapInstanceType(map));
4285 : }
4286 :
4287 0 : Node* CodeStubAssembler::IsJSTypedArray(Node* object) {
4288 0 : return HasInstanceType(object, JS_TYPED_ARRAY_TYPE);
4289 : }
4290 :
4291 0 : Node* CodeStubAssembler::IsJSArrayBuffer(Node* object) {
4292 0 : return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
4293 : }
4294 :
4295 0 : Node* CodeStubAssembler::IsFixedTypedArray(Node* object) {
4296 0 : Node* instance_type = LoadInstanceType(object);
4297 : return Word32And(
4298 : Int32GreaterThanOrEqual(instance_type,
4299 0 : Int32Constant(FIRST_FIXED_TYPED_ARRAY_TYPE)),
4300 : Int32LessThanOrEqual(instance_type,
4301 0 : Int32Constant(LAST_FIXED_TYPED_ARRAY_TYPE)));
4302 : }
4303 :
4304 310 : Node* CodeStubAssembler::IsJSRegExp(Node* object) {
4305 310 : return HasInstanceType(object, JS_REGEXP_TYPE);
4306 : }
4307 :
4308 31 : Node* CodeStubAssembler::IsNumeric(Node* object) {
4309 : return Select(
4310 62 : TaggedIsSmi(object), [=] { return Int32Constant(1); },
4311 93 : [=] { return Word32Or(IsHeapNumber(object), IsBigInt(object)); },
4312 124 : MachineRepresentation::kWord32);
4313 : }
4314 :
4315 502 : Node* CodeStubAssembler::IsNumber(Node* object) {
4316 1004 : return Select(TaggedIsSmi(object), [=] { return Int32Constant(1); },
4317 502 : [=] { return IsHeapNumber(object); },
4318 2008 : MachineRepresentation::kWord32);
4319 : }
4320 :
4321 1028 : Node* CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(Node* element_count,
4322 : int base_size,
4323 : ParameterMode mode) {
4324 : int max_newspace_elements =
4325 1028 : (kMaxRegularHeapObjectSize - base_size) / kPointerSize;
4326 : return IntPtrOrSmiGreaterThan(
4327 1028 : element_count, IntPtrOrSmiConstant(max_newspace_elements, mode), mode);
4328 : }
4329 :
4330 0 : Node* CodeStubAssembler::IsNumberNormalized(Node* number) {
4331 : CSA_ASSERT(this, IsNumber(number));
4332 :
4333 0 : VARIABLE(var_result, MachineRepresentation::kWord32, Int32Constant(1));
4334 0 : Label out(this);
4335 :
4336 0 : GotoIf(TaggedIsSmi(number), &out);
4337 :
4338 0 : Node* const value = LoadHeapNumberValue(number);
4339 0 : Node* const smi_min = Float64Constant(static_cast<double>(Smi::kMinValue));
4340 0 : Node* const smi_max = Float64Constant(static_cast<double>(Smi::kMaxValue));
4341 :
4342 0 : GotoIf(Float64LessThan(value, smi_min), &out);
4343 0 : GotoIf(Float64GreaterThan(value, smi_max), &out);
4344 0 : GotoIfNot(Float64Equal(value, value), &out); // NaN.
4345 :
4346 0 : var_result.Bind(Int32Constant(0));
4347 0 : Goto(&out);
4348 :
4349 : BIND(&out);
4350 0 : return var_result.value();
4351 : }
4352 :
4353 0 : Node* CodeStubAssembler::IsNumberPositive(Node* number) {
4354 : CSA_ASSERT(this, IsNumber(number));
4355 0 : Node* const float_zero = Float64Constant(0.);
4356 0 : return Select(TaggedIsSmi(number),
4357 0 : [=] { return TaggedIsPositiveSmi(number); },
4358 0 : [=] {
4359 0 : Node* v = LoadHeapNumberValue(number);
4360 0 : return Float64GreaterThanOrEqual(v, float_zero);
4361 : },
4362 0 : MachineRepresentation::kWord32);
4363 : }
4364 :
4365 6 : Node* CodeStubAssembler::IsNumberArrayIndex(Node* number) {
4366 12 : VARIABLE(var_result, MachineRepresentation::kWord32, Int32Constant(1));
4367 :
4368 6 : Label check_upper_bound(this), check_is_integer(this), out(this),
4369 6 : return_false(this);
4370 :
4371 12 : GotoIfNumberGreaterThanOrEqual(number, NumberConstant(0), &check_upper_bound);
4372 6 : Goto(&return_false);
4373 :
4374 : BIND(&check_upper_bound);
4375 : GotoIfNumberGreaterThanOrEqual(number, NumberConstant(kMaxUInt32),
4376 12 : &return_false);
4377 6 : Goto(&check_is_integer);
4378 :
4379 : BIND(&check_is_integer);
4380 12 : GotoIf(TaggedIsSmi(number), &out);
4381 : // Check that the HeapNumber is a valid uint32
4382 12 : Node* value = LoadHeapNumberValue(number);
4383 12 : Node* int_value = ChangeFloat64ToUint32(value);
4384 18 : GotoIf(Float64Equal(value, ChangeUint32ToFloat64(int_value)), &out);
4385 6 : Goto(&return_false);
4386 :
4387 : BIND(&return_false);
4388 12 : var_result.Bind(Int32Constant(0));
4389 6 : Goto(&out);
4390 :
4391 : BIND(&out);
4392 12 : return var_result.value();
4393 : }
4394 :
4395 1178 : TNode<Uint32T> CodeStubAssembler::StringCharCodeAt(
4396 : SloppyTNode<String> string, Node* index, ParameterMode parameter_mode) {
4397 : CSA_ASSERT(this, MatchesParameterMode(index, parameter_mode));
4398 : CSA_ASSERT(this, IsString(string));
4399 :
4400 : // Translate the {index} into a Word.
4401 : index = ParameterToWord(index, parameter_mode);
4402 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(index, IntPtrConstant(0)));
4403 : CSA_ASSERT(this, IntPtrLessThan(index, SmiUntag(LoadStringLength(string))));
4404 :
4405 1178 : VARIABLE(var_result, MachineRepresentation::kWord32);
4406 :
4407 1178 : Label return_result(this), if_runtime(this, Label::kDeferred),
4408 1178 : if_stringistwobyte(this), if_stringisonebyte(this);
4409 :
4410 2356 : ToDirectStringAssembler to_direct(state(), string);
4411 1178 : to_direct.TryToDirect(&if_runtime);
4412 2356 : Node* const offset = IntPtrAdd(index, to_direct.offset());
4413 : Node* const instance_type = to_direct.instance_type();
4414 :
4415 : Node* const string_data = to_direct.PointerToData(&if_runtime);
4416 :
4417 : // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
4418 : Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
4419 2356 : &if_stringistwobyte);
4420 :
4421 : BIND(&if_stringisonebyte);
4422 : {
4423 1178 : var_result.Bind(Load(MachineType::Uint8(), string_data, offset));
4424 1178 : Goto(&return_result);
4425 : }
4426 :
4427 : BIND(&if_stringistwobyte);
4428 : {
4429 : var_result.Bind(Load(MachineType::Uint16(), string_data,
4430 3534 : WordShl(offset, IntPtrConstant(1))));
4431 1178 : Goto(&return_result);
4432 : }
4433 :
4434 : BIND(&if_runtime);
4435 : {
4436 : Node* result = CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(),
4437 2356 : string, SmiTag(index));
4438 2356 : var_result.Bind(SmiToWord32(result));
4439 1178 : Goto(&return_result);
4440 : }
4441 :
4442 : BIND(&return_result);
4443 2356 : return UncheckedCast<Uint32T>(var_result.value());
4444 : }
4445 :
4446 868 : Node* CodeStubAssembler::StringFromCharCode(Node* code) {
4447 868 : VARIABLE(var_result, MachineRepresentation::kTagged);
4448 :
4449 : // Check if the {code} is a one-byte char code.
4450 868 : Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
4451 868 : if_done(this);
4452 1736 : Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
4453 1736 : &if_codeisonebyte, &if_codeistwobyte);
4454 : BIND(&if_codeisonebyte);
4455 : {
4456 : // Load the isolate wide single character string cache.
4457 1736 : Node* cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex);
4458 1736 : Node* code_index = ChangeUint32ToWord(code);
4459 :
4460 : // Check if we have an entry for the {code} in the single character string
4461 : // cache already.
4462 : Label if_entryisundefined(this, Label::kDeferred),
4463 868 : if_entryisnotundefined(this);
4464 868 : Node* entry = LoadFixedArrayElement(cache, code_index);
4465 : Branch(WordEqual(entry, UndefinedConstant()), &if_entryisundefined,
4466 868 : &if_entryisnotundefined);
4467 :
4468 : BIND(&if_entryisundefined);
4469 : {
4470 : // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
4471 868 : Node* result = AllocateSeqOneByteString(1);
4472 : StoreNoWriteBarrier(
4473 : MachineRepresentation::kWord8, result,
4474 1736 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
4475 868 : StoreFixedArrayElement(cache, code_index, result);
4476 868 : var_result.Bind(result);
4477 868 : Goto(&if_done);
4478 : }
4479 :
4480 : BIND(&if_entryisnotundefined);
4481 : {
4482 : // Return the entry from the {cache}.
4483 868 : var_result.Bind(entry);
4484 868 : Goto(&if_done);
4485 868 : }
4486 : }
4487 :
4488 : BIND(&if_codeistwobyte);
4489 : {
4490 : // Allocate a new SeqTwoByteString for {code}.
4491 868 : Node* result = AllocateSeqTwoByteString(1);
4492 : StoreNoWriteBarrier(
4493 : MachineRepresentation::kWord16, result,
4494 1736 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
4495 868 : var_result.Bind(result);
4496 868 : Goto(&if_done);
4497 : }
4498 :
4499 : BIND(&if_done);
4500 : CSA_ASSERT(this, IsString(var_result.value()));
4501 1736 : return var_result.value();
4502 : }
4503 :
4504 : // A wrapper around CopyStringCharacters which determines the correct string
4505 : // encoding, allocates a corresponding sequential string, and then copies the
4506 : // given character range using CopyStringCharacters.
4507 : // |from_string| must be a sequential string. |from_index| and
4508 : // |character_count| must be Smis s.t.
4509 : // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
4510 1426 : Node* CodeStubAssembler::AllocAndCopyStringCharacters(Node* context, Node* from,
4511 : Node* from_instance_type,
4512 : Node* from_index,
4513 : Node* character_count) {
4514 2852 : Label end(this), one_byte_sequential(this), two_byte_sequential(this);
4515 2852 : Variable var_result(this, MachineRepresentation::kTagged);
4516 :
4517 2852 : Node* const smi_zero = SmiConstant(0);
4518 :
4519 : Branch(IsOneByteStringInstanceType(from_instance_type), &one_byte_sequential,
4520 2852 : &two_byte_sequential);
4521 :
4522 : // The subject string is a sequential one-byte string.
4523 : BIND(&one_byte_sequential);
4524 : {
4525 : Node* result =
4526 1426 : AllocateSeqOneByteString(context, SmiToWord(character_count));
4527 : CopyStringCharacters(from, result, from_index, smi_zero, character_count,
4528 : String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING,
4529 1426 : CodeStubAssembler::SMI_PARAMETERS);
4530 1426 : var_result.Bind(result);
4531 :
4532 1426 : Goto(&end);
4533 : }
4534 :
4535 : // The subject string is a sequential two-byte string.
4536 : BIND(&two_byte_sequential);
4537 : {
4538 : Node* result =
4539 1426 : AllocateSeqTwoByteString(context, SmiToWord(character_count));
4540 : CopyStringCharacters(from, result, from_index, smi_zero, character_count,
4541 : String::TWO_BYTE_ENCODING, String::TWO_BYTE_ENCODING,
4542 1426 : CodeStubAssembler::SMI_PARAMETERS);
4543 1426 : var_result.Bind(result);
4544 :
4545 1426 : Goto(&end);
4546 : }
4547 :
4548 : BIND(&end);
4549 2852 : return var_result.value();
4550 : }
4551 :
4552 :
4553 713 : Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
4554 : Node* to, SubStringFlags flags) {
4555 : DCHECK(flags == SubStringFlags::NONE ||
4556 : flags == SubStringFlags::FROM_TO_ARE_BOUNDED);
4557 713 : VARIABLE(var_result, MachineRepresentation::kTagged);
4558 1426 : ToDirectStringAssembler to_direct(state(), string);
4559 713 : Label end(this), runtime(this);
4560 :
4561 : // Make sure first argument is a string.
4562 : CSA_ASSERT(this, TaggedIsNotSmi(string));
4563 : CSA_ASSERT(this, IsString(string));
4564 :
4565 : // Make sure that both from and to are non-negative smis.
4566 :
4567 713 : if (flags == SubStringFlags::NONE) {
4568 1178 : GotoIfNot(TaggedIsPositiveSmi(from), &runtime);
4569 1178 : GotoIfNot(TaggedIsPositiveSmi(to), &runtime);
4570 : } else {
4571 : CSA_ASSERT(this, TaggedIsPositiveSmi(from));
4572 : CSA_ASSERT(this, TaggedIsPositiveSmi(to));
4573 : }
4574 :
4575 1426 : Node* const substr_length = SmiSub(to, from);
4576 : Node* const string_length = LoadStringLength(string);
4577 :
4578 : // Begin dispatching based on substring length.
4579 :
4580 713 : Label original_string_or_invalid_length(this);
4581 : GotoIf(SmiAboveOrEqual(substr_length, string_length),
4582 1426 : &original_string_or_invalid_length);
4583 :
4584 : // A real substring (substr_length < string_length).
4585 :
4586 713 : Label single_char(this);
4587 2139 : GotoIf(SmiEqual(substr_length, SmiConstant(1)), &single_char);
4588 :
4589 : // TODO(jgruber): Add an additional case for substring of length == 0?
4590 :
4591 : // Deal with different string types: update the index if necessary
4592 : // and extract the underlying string.
4593 :
4594 713 : Node* const direct_string = to_direct.TryToDirect(&runtime);
4595 2139 : Node* const offset = SmiAdd(from, SmiTag(to_direct.offset()));
4596 : Node* const instance_type = to_direct.instance_type();
4597 :
4598 : // The subject string can only be external or sequential string of either
4599 : // encoding at this point.
4600 713 : Label external_string(this);
4601 : {
4602 : Label next(this);
4603 :
4604 : // Short slice. Copy instead of slicing.
4605 : GotoIf(SmiLessThan(substr_length, SmiConstant(SlicedString::kMinLength)),
4606 2139 : &next);
4607 :
4608 : // Allocate new sliced string.
4609 :
4610 713 : Counters* counters = isolate()->counters();
4611 713 : IncrementCounter(counters->sub_string_native(), 1);
4612 :
4613 713 : Label one_byte_slice(this), two_byte_slice(this);
4614 : Branch(IsOneByteStringInstanceType(to_direct.instance_type()),
4615 1426 : &one_byte_slice, &two_byte_slice);
4616 :
4617 : BIND(&one_byte_slice);
4618 : {
4619 : var_result.Bind(
4620 713 : AllocateSlicedOneByteString(substr_length, direct_string, offset));
4621 713 : Goto(&end);
4622 : }
4623 :
4624 : BIND(&two_byte_slice);
4625 : {
4626 : var_result.Bind(
4627 713 : AllocateSlicedTwoByteString(substr_length, direct_string, offset));
4628 713 : Goto(&end);
4629 : }
4630 :
4631 : BIND(&next);
4632 :
4633 : // The subject string can only be external or sequential string of either
4634 : // encoding at this point.
4635 713 : GotoIf(to_direct.is_external(), &external_string);
4636 :
4637 : var_result.Bind(AllocAndCopyStringCharacters(
4638 713 : context, direct_string, instance_type, offset, substr_length));
4639 :
4640 713 : IncrementCounter(counters->sub_string_native(), 1);
4641 :
4642 1426 : Goto(&end);
4643 : }
4644 :
4645 : // Handle external string.
4646 : BIND(&external_string);
4647 : {
4648 : Node* const fake_sequential_string = to_direct.PointerToString(&runtime);
4649 :
4650 : var_result.Bind(AllocAndCopyStringCharacters(
4651 713 : context, fake_sequential_string, instance_type, offset, substr_length));
4652 :
4653 713 : Counters* counters = isolate()->counters();
4654 713 : IncrementCounter(counters->sub_string_native(), 1);
4655 :
4656 713 : Goto(&end);
4657 : }
4658 :
4659 : // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
4660 : BIND(&single_char);
4661 : {
4662 1426 : Node* char_code = StringCharCodeAt(string, from);
4663 713 : var_result.Bind(StringFromCharCode(char_code));
4664 713 : Goto(&end);
4665 : }
4666 :
4667 : BIND(&original_string_or_invalid_length);
4668 : {
4669 713 : if (flags == SubStringFlags::NONE) {
4670 : // Longer than original string's length or negative: unsafe arguments.
4671 1178 : GotoIf(SmiAbove(substr_length, string_length), &runtime);
4672 : } else {
4673 : // with flag SubStringFlags::FROM_TO_ARE_BOUNDED, the only way we can
4674 : // get here is if substr_length is equal to string_length.
4675 : CSA_ASSERT(this, SmiEqual(substr_length, string_length));
4676 : }
4677 :
4678 : // Equal length - check if {from, to} == {0, str.length}.
4679 2139 : GotoIf(SmiAbove(from, SmiConstant(0)), &runtime);
4680 :
4681 : // Return the original string (substr_length == string_length).
4682 :
4683 713 : Counters* counters = isolate()->counters();
4684 713 : IncrementCounter(counters->sub_string_native(), 1);
4685 :
4686 713 : var_result.Bind(string);
4687 713 : Goto(&end);
4688 : }
4689 :
4690 : // Fall back to a runtime call.
4691 : BIND(&runtime);
4692 : {
4693 : var_result.Bind(
4694 713 : CallRuntime(Runtime::kSubString, context, string, from, to));
4695 713 : Goto(&end);
4696 : }
4697 :
4698 : BIND(&end);
4699 : CSA_ASSERT(this, IsString(var_result.value()));
4700 1426 : return var_result.value();
4701 : }
4702 :
4703 2511 : ToDirectStringAssembler::ToDirectStringAssembler(
4704 : compiler::CodeAssemblerState* state, Node* string, Flags flags)
4705 : : CodeStubAssembler(state),
4706 : var_string_(this, MachineRepresentation::kTagged, string),
4707 : var_instance_type_(this, MachineRepresentation::kWord32),
4708 : var_offset_(this, MachineType::PointerRepresentation()),
4709 : var_is_external_(this, MachineRepresentation::kWord32),
4710 2511 : flags_(flags) {
4711 : CSA_ASSERT(this, TaggedIsNotSmi(string));
4712 : CSA_ASSERT(this, IsString(string));
4713 :
4714 2511 : var_string_.Bind(string);
4715 5022 : var_offset_.Bind(IntPtrConstant(0));
4716 5022 : var_instance_type_.Bind(LoadInstanceType(string));
4717 5022 : var_is_external_.Bind(Int32Constant(0));
4718 2511 : }
4719 :
4720 2511 : Node* ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
4721 2511 : VariableList vars({&var_string_, &var_offset_, &var_instance_type_}, zone());
4722 2511 : Label dispatch(this, vars);
4723 2511 : Label if_iscons(this);
4724 2511 : Label if_isexternal(this);
4725 2511 : Label if_issliced(this);
4726 2511 : Label if_isthin(this);
4727 2511 : Label out(this);
4728 :
4729 : Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
4730 5022 : &dispatch);
4731 :
4732 : // Dispatch based on string representation.
4733 : BIND(&dispatch);
4734 : {
4735 : int32_t values[] = {
4736 : kSeqStringTag, kConsStringTag, kExternalStringTag,
4737 : kSlicedStringTag, kThinStringTag,
4738 2511 : };
4739 : Label* labels[] = {
4740 : &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
4741 2511 : };
4742 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
4743 :
4744 : Node* const representation = Word32And(
4745 10044 : var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
4746 2511 : Switch(representation, if_bailout, values, labels, arraysize(values));
4747 : }
4748 :
4749 : // Cons string. Check whether it is flat, then fetch first part.
4750 : // Flat cons strings have an empty second part.
4751 : BIND(&if_iscons);
4752 : {
4753 2511 : Node* const string = var_string_.value();
4754 2511 : GotoIfNot(IsEmptyString(LoadObjectField(string, ConsString::kSecondOffset)),
4755 5022 : if_bailout);
4756 :
4757 : Node* const lhs = LoadObjectField(string, ConsString::kFirstOffset);
4758 2511 : var_string_.Bind(lhs);
4759 5022 : var_instance_type_.Bind(LoadInstanceType(lhs));
4760 :
4761 2511 : Goto(&dispatch);
4762 : }
4763 :
4764 : // Sliced string. Fetch parent and correct start index by offset.
4765 : BIND(&if_issliced);
4766 : {
4767 2511 : if (flags_ & kDontUnpackSlicedStrings) {
4768 31 : Goto(if_bailout);
4769 : } else {
4770 2480 : Node* const string = var_string_.value();
4771 : Node* const sliced_offset =
4772 4960 : LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
4773 7440 : var_offset_.Bind(IntPtrAdd(var_offset_.value(), sliced_offset));
4774 :
4775 : Node* const parent = LoadObjectField(string, SlicedString::kParentOffset);
4776 2480 : var_string_.Bind(parent);
4777 4960 : var_instance_type_.Bind(LoadInstanceType(parent));
4778 :
4779 2480 : Goto(&dispatch);
4780 : }
4781 : }
4782 :
4783 : // Thin string. Fetch the actual string.
4784 : BIND(&if_isthin);
4785 : {
4786 2511 : Node* const string = var_string_.value();
4787 : Node* const actual_string =
4788 : LoadObjectField(string, ThinString::kActualOffset);
4789 5022 : Node* const actual_instance_type = LoadInstanceType(actual_string);
4790 :
4791 2511 : var_string_.Bind(actual_string);
4792 2511 : var_instance_type_.Bind(actual_instance_type);
4793 :
4794 2511 : Goto(&dispatch);
4795 : }
4796 :
4797 : // External string.
4798 : BIND(&if_isexternal);
4799 5022 : var_is_external_.Bind(Int32Constant(1));
4800 2511 : Goto(&out);
4801 :
4802 : BIND(&out);
4803 5022 : return var_string_.value();
4804 : }
4805 :
4806 2511 : Node* ToDirectStringAssembler::TryToSequential(StringPointerKind ptr_kind,
4807 : Label* if_bailout) {
4808 2511 : CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
4809 :
4810 2511 : VARIABLE(var_result, MachineType::PointerRepresentation());
4811 2511 : Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
4812 2511 : Branch(is_external(), &if_isexternal, &if_issequential);
4813 :
4814 : BIND(&if_issequential);
4815 : {
4816 : STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
4817 : SeqTwoByteString::kHeaderSize);
4818 7533 : Node* result = BitcastTaggedToWord(var_string_.value());
4819 2511 : if (ptr_kind == PTR_TO_DATA) {
4820 3596 : result = IntPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
4821 3596 : kHeapObjectTag));
4822 : }
4823 2511 : var_result.Bind(result);
4824 2511 : Goto(&out);
4825 : }
4826 :
4827 : BIND(&if_isexternal);
4828 : {
4829 : GotoIf(IsShortExternalStringInstanceType(var_instance_type_.value()),
4830 5022 : if_bailout);
4831 :
4832 2511 : Node* const string = var_string_.value();
4833 : Node* result = LoadObjectField(string, ExternalString::kResourceDataOffset,
4834 2511 : MachineType::Pointer());
4835 2511 : if (ptr_kind == PTR_TO_STRING) {
4836 1426 : result = IntPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
4837 1426 : kHeapObjectTag));
4838 : }
4839 2511 : var_result.Bind(result);
4840 2511 : Goto(&out);
4841 : }
4842 :
4843 : BIND(&out);
4844 5022 : return var_result.value();
4845 : }
4846 :
4847 837 : void CodeStubAssembler::BranchIfCanDerefIndirectString(Node* string,
4848 : Node* instance_type,
4849 : Label* can_deref,
4850 : Label* cannot_deref) {
4851 : CSA_ASSERT(this, IsString(string));
4852 : Node* representation =
4853 2511 : Word32And(instance_type, Int32Constant(kStringRepresentationMask));
4854 2511 : GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), can_deref);
4855 1674 : GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)),
4856 1674 : cannot_deref);
4857 : // Cons string.
4858 : Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
4859 1674 : GotoIf(IsEmptyString(rhs), can_deref);
4860 837 : Goto(cannot_deref);
4861 837 : }
4862 :
4863 837 : void CodeStubAssembler::DerefIndirectString(Variable* var_string,
4864 : Node* instance_type) {
4865 : #ifdef DEBUG
4866 : Label can_deref(this), cannot_deref(this);
4867 : BranchIfCanDerefIndirectString(var_string->value(), instance_type, &can_deref,
4868 : &cannot_deref);
4869 : BIND(&cannot_deref);
4870 : DebugBreak(); // Should be able to dereference string.
4871 : Goto(&can_deref);
4872 : BIND(&can_deref);
4873 : #endif // DEBUG
4874 :
4875 : STATIC_ASSERT(ThinString::kActualOffset == ConsString::kFirstOffset);
4876 : var_string->Bind(
4877 1674 : LoadObjectField(var_string->value(), ThinString::kActualOffset));
4878 837 : }
4879 :
4880 837 : void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
4881 : Node* instance_type,
4882 : Label* did_deref,
4883 : Label* cannot_deref) {
4884 837 : Label deref(this);
4885 : BranchIfCanDerefIndirectString(var_string->value(), instance_type, &deref,
4886 837 : cannot_deref);
4887 :
4888 : BIND(&deref);
4889 : {
4890 837 : DerefIndirectString(var_string, instance_type);
4891 837 : Goto(did_deref);
4892 837 : }
4893 837 : }
4894 :
4895 279 : void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
4896 : Node* left_instance_type,
4897 : Variable* var_right,
4898 : Node* right_instance_type,
4899 : Label* did_something) {
4900 558 : Label did_nothing_left(this), did_something_left(this),
4901 279 : didnt_do_anything(this);
4902 : MaybeDerefIndirectString(var_left, left_instance_type, &did_something_left,
4903 279 : &did_nothing_left);
4904 :
4905 : BIND(&did_something_left);
4906 : {
4907 : MaybeDerefIndirectString(var_right, right_instance_type, did_something,
4908 279 : did_something);
4909 : }
4910 :
4911 : BIND(&did_nothing_left);
4912 : {
4913 : MaybeDerefIndirectString(var_right, right_instance_type, did_something,
4914 279 : &didnt_do_anything);
4915 : }
4916 :
4917 279 : BIND(&didnt_do_anything);
4918 : // Fall through if neither string was an indirect string.
4919 279 : }
4920 :
4921 124 : Node* CodeStubAssembler::StringAdd(Node* context, Node* left, Node* right,
4922 : AllocationFlags flags) {
4923 124 : VARIABLE(result, MachineRepresentation::kTagged);
4924 124 : Label check_right(this), runtime(this, Label::kDeferred), cons(this),
4925 124 : done(this, &result), done_native(this, &result);
4926 124 : Counters* counters = isolate()->counters();
4927 :
4928 : Node* left_length = LoadStringLength(left);
4929 372 : GotoIf(WordNotEqual(IntPtrConstant(0), left_length), &check_right);
4930 124 : result.Bind(right);
4931 124 : Goto(&done_native);
4932 :
4933 : BIND(&check_right);
4934 : Node* right_length = LoadStringLength(right);
4935 372 : GotoIf(WordNotEqual(IntPtrConstant(0), right_length), &cons);
4936 124 : result.Bind(left);
4937 124 : Goto(&done_native);
4938 :
4939 : BIND(&cons);
4940 : {
4941 : CSA_ASSERT(this, TaggedIsSmi(left_length));
4942 : CSA_ASSERT(this, TaggedIsSmi(right_length));
4943 248 : Node* new_length = SmiAdd(left_length, right_length);
4944 :
4945 : // If new length is greater than String::kMaxLength, goto runtime to
4946 : // throw. Note: we also need to invalidate the string length protector, so
4947 : // can't just throw here directly.
4948 : GotoIf(SmiAboveOrEqual(new_length, SmiConstant(String::kMaxLength)),
4949 372 : &runtime);
4950 :
4951 124 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
4952 248 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
4953 124 : Variable* input_vars[2] = {&var_left, &var_right};
4954 248 : Label non_cons(this, 2, input_vars);
4955 124 : Label slow(this, Label::kDeferred);
4956 : GotoIf(SmiLessThan(new_length, SmiConstant(ConsString::kMinLength)),
4957 372 : &non_cons);
4958 :
4959 : result.Bind(NewConsString(context, new_length, var_left.value(),
4960 124 : var_right.value(), flags));
4961 124 : Goto(&done_native);
4962 :
4963 : BIND(&non_cons);
4964 :
4965 124 : Comment("Full string concatenate");
4966 372 : Node* left_instance_type = LoadInstanceType(var_left.value());
4967 372 : Node* right_instance_type = LoadInstanceType(var_right.value());
4968 : // Compute intersection and difference of instance types.
4969 :
4970 : Node* ored_instance_types =
4971 248 : Word32Or(left_instance_type, right_instance_type);
4972 : Node* xored_instance_types =
4973 248 : Word32Xor(left_instance_type, right_instance_type);
4974 :
4975 : // Check if both strings have the same encoding and both are sequential.
4976 248 : GotoIf(IsSetWord32(xored_instance_types, kStringEncodingMask), &runtime);
4977 248 : GotoIf(IsSetWord32(ored_instance_types, kStringRepresentationMask), &slow);
4978 :
4979 124 : Label two_byte(this);
4980 : GotoIf(Word32Equal(Word32And(ored_instance_types,
4981 248 : Int32Constant(kStringEncodingMask)),
4982 496 : Int32Constant(kTwoByteStringTag)),
4983 248 : &two_byte);
4984 : // One-byte sequential string case
4985 : Node* new_string =
4986 124 : AllocateSeqOneByteString(context, new_length, SMI_PARAMETERS);
4987 : CopyStringCharacters(var_left.value(), new_string, SmiConstant(0),
4988 : SmiConstant(0), left_length, String::ONE_BYTE_ENCODING,
4989 372 : String::ONE_BYTE_ENCODING, SMI_PARAMETERS);
4990 : CopyStringCharacters(var_right.value(), new_string, SmiConstant(0),
4991 : left_length, right_length, String::ONE_BYTE_ENCODING,
4992 248 : String::ONE_BYTE_ENCODING, SMI_PARAMETERS);
4993 124 : result.Bind(new_string);
4994 124 : Goto(&done_native);
4995 :
4996 : BIND(&two_byte);
4997 : {
4998 : // Two-byte sequential string case
4999 : new_string =
5000 124 : AllocateSeqTwoByteString(context, new_length, SMI_PARAMETERS);
5001 : CopyStringCharacters(var_left.value(), new_string, SmiConstant(0),
5002 : SmiConstant(0), left_length,
5003 : String::TWO_BYTE_ENCODING, String::TWO_BYTE_ENCODING,
5004 372 : SMI_PARAMETERS);
5005 : CopyStringCharacters(var_right.value(), new_string, SmiConstant(0),
5006 : left_length, right_length, String::TWO_BYTE_ENCODING,
5007 248 : String::TWO_BYTE_ENCODING, SMI_PARAMETERS);
5008 124 : result.Bind(new_string);
5009 124 : Goto(&done_native);
5010 : }
5011 :
5012 : BIND(&slow);
5013 : {
5014 : // Try to unwrap indirect strings, restart the above attempt on success.
5015 : MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
5016 124 : right_instance_type, &non_cons);
5017 124 : Goto(&runtime);
5018 124 : }
5019 : }
5020 : BIND(&runtime);
5021 : {
5022 124 : result.Bind(CallRuntime(Runtime::kStringAdd, context, left, right));
5023 124 : Goto(&done);
5024 : }
5025 :
5026 : BIND(&done_native);
5027 : {
5028 124 : IncrementCounter(counters->string_add_native(), 1);
5029 124 : Goto(&done);
5030 : }
5031 :
5032 : BIND(&done);
5033 248 : return result.value();
5034 : }
5035 :
5036 31 : Node* CodeStubAssembler::StringFromCodePoint(Node* codepoint,
5037 : UnicodeEncoding encoding) {
5038 31 : VARIABLE(var_result, MachineRepresentation::kTagged, EmptyStringConstant());
5039 :
5040 31 : Label if_isword16(this), if_isword32(this), return_result(this);
5041 :
5042 62 : Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
5043 62 : &if_isword32);
5044 :
5045 : BIND(&if_isword16);
5046 : {
5047 31 : var_result.Bind(StringFromCharCode(codepoint));
5048 31 : Goto(&return_result);
5049 : }
5050 :
5051 : BIND(&if_isword32);
5052 : {
5053 31 : switch (encoding) {
5054 : case UnicodeEncoding::UTF16:
5055 : break;
5056 : case UnicodeEncoding::UTF32: {
5057 : // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
5058 0 : Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
5059 :
5060 : // lead = (codepoint >> 10) + LEAD_OFFSET
5061 : Node* lead =
5062 0 : Int32Add(Word32Shr(codepoint, Int32Constant(10)), lead_offset);
5063 :
5064 : // trail = (codepoint & 0x3FF) + 0xDC00;
5065 0 : Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
5066 0 : Int32Constant(0xDC00));
5067 :
5068 : // codpoint = (trail << 16) | lead;
5069 0 : codepoint = Word32Or(Word32Shl(trail, Int32Constant(16)), lead);
5070 0 : break;
5071 : }
5072 : }
5073 :
5074 31 : Node* value = AllocateSeqTwoByteString(2);
5075 : StoreNoWriteBarrier(
5076 : MachineRepresentation::kWord32, value,
5077 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
5078 62 : codepoint);
5079 31 : var_result.Bind(value);
5080 31 : Goto(&return_result);
5081 : }
5082 :
5083 : BIND(&return_result);
5084 : CSA_ASSERT(this, IsString(var_result.value()));
5085 62 : return var_result.value();
5086 : }
5087 :
5088 533 : TNode<Number> CodeStubAssembler::StringToNumber(SloppyTNode<Context> context,
5089 : SloppyTNode<String> input) {
5090 : CSA_SLOW_ASSERT(this, IsString(input));
5091 533 : Label runtime(this, Label::kDeferred);
5092 533 : Label end(this);
5093 :
5094 : TVARIABLE(Number, var_result);
5095 :
5096 : // Check if string has a cached array index.
5097 : TNode<Uint32T> hash = LoadNameHashField(input);
5098 533 : GotoIf(IsSetWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
5099 1066 : &runtime);
5100 :
5101 1599 : var_result = SmiTag(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash));
5102 533 : Goto(&end);
5103 :
5104 : BIND(&runtime);
5105 : {
5106 : var_result = CAST(CallRuntime(Runtime::kStringToNumber, context, input));
5107 533 : Goto(&end);
5108 : }
5109 :
5110 : BIND(&end);
5111 533 : return var_result;
5112 : }
5113 :
5114 192 : Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) {
5115 192 : VARIABLE(result, MachineRepresentation::kTagged);
5116 192 : Label runtime(this, Label::kDeferred), smi(this), done(this, &result);
5117 :
5118 : // Load the number string cache.
5119 384 : Node* number_string_cache = LoadRoot(Heap::kNumberStringCacheRootIndex);
5120 :
5121 : // Make the hash mask from the length of the number string cache. It
5122 : // contains two elements (number and string) for each cache entry.
5123 : // TODO(ishell): cleanup mask handling.
5124 : Node* mask =
5125 576 : BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
5126 384 : Node* one = IntPtrConstant(1);
5127 384 : mask = IntPtrSub(mask, one);
5128 :
5129 384 : GotoIf(TaggedIsSmi(argument), &smi);
5130 :
5131 : // Argument isn't smi, check to see if it's a heap-number.
5132 384 : GotoIfNot(IsHeapNumber(argument), &runtime);
5133 :
5134 : // Make a hash from the two 32-bit values of the double.
5135 : Node* low =
5136 192 : LoadObjectField(argument, HeapNumber::kValueOffset, MachineType::Int32());
5137 : Node* high = LoadObjectField(argument, HeapNumber::kValueOffset + kIntSize,
5138 192 : MachineType::Int32());
5139 384 : Node* hash = Word32Xor(low, high);
5140 384 : hash = ChangeInt32ToIntPtr(hash);
5141 384 : hash = WordShl(hash, one);
5142 576 : Node* index = WordAnd(hash, WordSar(mask, SmiShiftBitsConstant()));
5143 :
5144 : // Cache entry's key must be a heap number
5145 192 : Node* number_key = LoadFixedArrayElement(number_string_cache, index);
5146 384 : GotoIf(TaggedIsSmi(number_key), &runtime);
5147 384 : GotoIfNot(IsHeapNumber(number_key), &runtime);
5148 :
5149 : // Cache entry's key must match the heap number value we're looking for.
5150 : Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
5151 192 : MachineType::Int32());
5152 : Node* high_compare = LoadObjectField(
5153 192 : number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
5154 384 : GotoIfNot(Word32Equal(low, low_compare), &runtime);
5155 384 : GotoIfNot(Word32Equal(high, high_compare), &runtime);
5156 :
5157 : // Heap number match, return value from cache entry.
5158 384 : IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
5159 192 : result.Bind(LoadFixedArrayElement(number_string_cache, index, kPointerSize));
5160 192 : Goto(&done);
5161 :
5162 : BIND(&runtime);
5163 : {
5164 : // No cache entry, go to the runtime.
5165 192 : result.Bind(CallRuntime(Runtime::kNumberToString, context, argument));
5166 : }
5167 192 : Goto(&done);
5168 :
5169 : BIND(&smi);
5170 : {
5171 : // Load the smi key, make sure it matches the smi we're looking for.
5172 : Node* smi_index = BitcastWordToTagged(
5173 960 : WordAnd(WordShl(BitcastTaggedToWord(argument), one), mask));
5174 : Node* smi_key = LoadFixedArrayElement(number_string_cache, smi_index, 0,
5175 192 : SMI_PARAMETERS);
5176 384 : GotoIf(WordNotEqual(smi_key, argument), &runtime);
5177 :
5178 : // Smi match, return value from cache entry.
5179 384 : IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
5180 : result.Bind(LoadFixedArrayElement(number_string_cache, smi_index,
5181 192 : kPointerSize, SMI_PARAMETERS));
5182 192 : Goto(&done);
5183 : }
5184 :
5185 : BIND(&done);
5186 : CSA_ASSERT(this, IsString(result.value()));
5187 384 : return result.value();
5188 : }
5189 :
5190 775 : Node* CodeStubAssembler::ToName(Node* context, Node* value) {
5191 775 : Label end(this);
5192 1550 : VARIABLE(var_result, MachineRepresentation::kTagged);
5193 :
5194 775 : Label is_number(this);
5195 1550 : GotoIf(TaggedIsSmi(value), &is_number);
5196 :
5197 775 : Label not_name(this);
5198 1550 : Node* value_instance_type = LoadInstanceType(value);
5199 : STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
5200 1550 : GotoIf(Int32GreaterThan(value_instance_type, Int32Constant(LAST_NAME_TYPE)),
5201 1550 : ¬_name);
5202 :
5203 775 : var_result.Bind(value);
5204 775 : Goto(&end);
5205 :
5206 : BIND(&is_number);
5207 : {
5208 775 : Callable callable = CodeFactory::NumberToString(isolate());
5209 775 : var_result.Bind(CallStub(callable, context, value));
5210 775 : Goto(&end);
5211 : }
5212 :
5213 : BIND(¬_name);
5214 : {
5215 : GotoIf(InstanceTypeEqual(value_instance_type, HEAP_NUMBER_TYPE),
5216 1550 : &is_number);
5217 :
5218 : Label not_oddball(this);
5219 : GotoIfNot(InstanceTypeEqual(value_instance_type, ODDBALL_TYPE),
5220 1550 : ¬_oddball);
5221 :
5222 775 : var_result.Bind(LoadObjectField(value, Oddball::kToStringOffset));
5223 775 : Goto(&end);
5224 :
5225 : BIND(¬_oddball);
5226 : {
5227 775 : var_result.Bind(CallRuntime(Runtime::kToName, context, value));
5228 775 : Goto(&end);
5229 775 : }
5230 : }
5231 :
5232 : BIND(&end);
5233 : CSA_ASSERT(this, IsName(var_result.value()));
5234 1550 : return var_result.value();
5235 : }
5236 :
5237 502 : Node* CodeStubAssembler::NonNumberToNumberOrNumeric(Node* context, Node* input,
5238 : Object::Conversion mode) {
5239 : CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
5240 : CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(input)));
5241 :
5242 : // We might need to loop once here due to ToPrimitive conversions.
5243 502 : VARIABLE(var_input, MachineRepresentation::kTagged, input);
5244 1004 : VARIABLE(var_result, MachineRepresentation::kTagged);
5245 502 : Label loop(this, &var_input);
5246 502 : Label end(this);
5247 502 : Goto(&loop);
5248 : BIND(&loop);
5249 : {
5250 : // Load the current {input} value (known to be a HeapObject).
5251 502 : Node* input = var_input.value();
5252 :
5253 : // Dispatch on the {input} instance type.
5254 1004 : Node* input_instance_type = LoadInstanceType(input);
5255 502 : Label if_inputisstring(this), if_inputisoddball(this),
5256 502 : if_inputisbigint(this), if_inputisreceiver(this, Label::kDeferred),
5257 502 : if_inputisother(this, Label::kDeferred);
5258 1004 : GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
5259 502 : GotoIf(IsBigIntInstanceType(input_instance_type), &if_inputisbigint);
5260 : GotoIf(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE),
5261 1004 : &if_inputisoddball);
5262 : Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
5263 1004 : &if_inputisother);
5264 :
5265 : BIND(&if_inputisstring);
5266 : {
5267 : // The {input} is a String, use the fast stub to convert it to a Number.
5268 1004 : var_result.Bind(StringToNumber(context, input));
5269 502 : Goto(&end);
5270 : }
5271 :
5272 : BIND(&if_inputisbigint);
5273 502 : if (mode == Object::Conversion::kToNumeric) {
5274 31 : var_result.Bind(input);
5275 31 : Goto(&end);
5276 : } else {
5277 : DCHECK_EQ(mode, Object::Conversion::kToNumber);
5278 471 : Goto(&if_inputisother);
5279 : }
5280 :
5281 : BIND(&if_inputisoddball);
5282 : {
5283 : // The {input} is an Oddball, we just need to load the Number value of it.
5284 502 : var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
5285 502 : Goto(&end);
5286 : }
5287 :
5288 : BIND(&if_inputisreceiver);
5289 : {
5290 : // The {input} is a JSReceiver, we need to convert it to a Primitive first
5291 : // using the ToPrimitive type conversion, preferably yielding a Number.
5292 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
5293 502 : isolate(), ToPrimitiveHint::kNumber);
5294 502 : Node* result = CallStub(callable, context, input);
5295 :
5296 : // Check if the {result} is already a Number/Numeric.
5297 502 : Label if_done(this), if_notdone(this);
5298 : Branch(mode == Object::Conversion::kToNumber ? IsNumber(result)
5299 : : IsNumeric(result),
5300 1004 : &if_done, &if_notdone);
5301 :
5302 : BIND(&if_done);
5303 : {
5304 : // The ToPrimitive conversion already gave us a Number/Numeric, so we're
5305 : // done.
5306 502 : var_result.Bind(result);
5307 502 : Goto(&end);
5308 : }
5309 :
5310 : BIND(&if_notdone);
5311 : {
5312 : // We now have a Primitive {result}, but it's not yet a Number/Numeric.
5313 502 : var_input.Bind(result);
5314 502 : Goto(&loop);
5315 : }
5316 : }
5317 :
5318 : BIND(&if_inputisother);
5319 : {
5320 : // The {input} is something else (e.g. Symbol), let the runtime figure
5321 : // out the correct exception.
5322 : // Note: We cannot tail call to the runtime here, as js-to-wasm
5323 : // trampolines also use this code currently, and they declare all
5324 : // outgoing parameters as untagged, while we would push a tagged
5325 : // object here.
5326 : auto function_id = mode == Object::Conversion::kToNumber
5327 : ? Runtime::kToNumber
5328 502 : : Runtime::kToNumeric;
5329 502 : var_result.Bind(CallRuntime(function_id, context, input));
5330 502 : Goto(&end);
5331 502 : }
5332 : }
5333 :
5334 : BIND(&end);
5335 : if (mode == Object::Conversion::kToNumeric) {
5336 : CSA_ASSERT(this, IsNumeric(var_result.value()));
5337 : } else {
5338 : DCHECK_EQ(mode, Object::Conversion::kToNumber);
5339 : CSA_ASSERT(this, IsNumber(var_result.value()));
5340 : }
5341 1004 : return var_result.value();
5342 : }
5343 :
5344 31 : TNode<Number> CodeStubAssembler::NonNumberToNumber(
5345 : SloppyTNode<Context> context, SloppyTNode<HeapObject> input) {
5346 471 : return CAST(NonNumberToNumberOrNumeric(context, input,
5347 : Object::Conversion::kToNumber));
5348 : }
5349 :
5350 31 : TNode<Numeric> CodeStubAssembler::NonNumberToNumeric(
5351 : SloppyTNode<Context> context, SloppyTNode<HeapObject> input) {
5352 : Node* result = NonNumberToNumberOrNumeric(context, input,
5353 31 : Object::Conversion::kToNumeric);
5354 : CSA_SLOW_ASSERT(this, IsNumeric(result));
5355 31 : return UncheckedCast<Numeric>(result);
5356 : }
5357 :
5358 440 : TNode<Number> CodeStubAssembler::ToNumber(SloppyTNode<Context> context,
5359 : SloppyTNode<Object> input) {
5360 440 : TVARIABLE(Number, var_result);
5361 440 : Label end(this);
5362 :
5363 440 : Label not_smi(this, Label::kDeferred);
5364 880 : GotoIfNot(TaggedIsSmi(input), ¬_smi);
5365 : TNode<Smi> input_smi = CAST(input);
5366 : var_result = input_smi;
5367 440 : Goto(&end);
5368 :
5369 : BIND(¬_smi);
5370 : {
5371 : Label not_heap_number(this, Label::kDeferred);
5372 : TNode<HeapObject> input_ho = CAST(input);
5373 880 : GotoIfNot(IsHeapNumber(input_ho), ¬_heap_number);
5374 :
5375 : TNode<HeapNumber> input_hn = CAST(input_ho);
5376 : var_result = input_hn;
5377 440 : Goto(&end);
5378 :
5379 : BIND(¬_heap_number);
5380 : {
5381 : var_result = NonNumberToNumber(context, input_ho);
5382 440 : Goto(&end);
5383 440 : }
5384 : }
5385 :
5386 : BIND(&end);
5387 440 : return var_result;
5388 : }
5389 :
5390 186 : void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
5391 : Variable* var_numeric) {
5392 186 : TaggedToNumeric<Feedback::kNone>(context, value, done, var_numeric);
5393 186 : }
5394 :
5395 558 : void CodeStubAssembler::TaggedToNumericWithFeedback(Node* context, Node* value,
5396 : Label* done,
5397 : Variable* var_numeric,
5398 : Variable* var_feedback) {
5399 : TaggedToNumeric<Feedback::kCollect>(context, value, done, var_numeric,
5400 558 : var_feedback);
5401 558 : }
5402 :
5403 : template <CodeStubAssembler::Feedback feedback>
5404 744 : void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
5405 : Variable* var_numeric,
5406 : Variable* var_feedback) {
5407 744 : var_numeric->Bind(value);
5408 1488 : Label if_smi(this), if_heapnumber(this), if_bigint(this), if_oddball(this);
5409 1488 : GotoIf(TaggedIsSmi(value), &if_smi);
5410 1488 : Node* map = LoadMap(value);
5411 1488 : GotoIf(IsHeapNumberMap(map), &if_heapnumber);
5412 1488 : Node* instance_type = LoadMapInstanceType(map);
5413 744 : GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
5414 :
5415 : // {value} is not a Numeric yet.
5416 2232 : GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
5417 744 : var_numeric->Bind(CallBuiltin(Builtins::kNonNumberToNumeric, context, value));
5418 : if (feedback == Feedback::kCollect) {
5419 1116 : var_feedback->Bind(SmiConstant(BinaryOperationFeedback::kAny));
5420 : }
5421 744 : Goto(done);
5422 :
5423 : BIND(&if_smi);
5424 : if (feedback == Feedback::kCollect) {
5425 1116 : var_feedback->Bind(SmiConstant(BinaryOperationFeedback::kSignedSmall));
5426 : }
5427 744 : Goto(done);
5428 :
5429 : BIND(&if_heapnumber);
5430 : if (feedback == Feedback::kCollect) {
5431 1116 : var_feedback->Bind(SmiConstant(BinaryOperationFeedback::kNumber));
5432 : }
5433 744 : Goto(done);
5434 :
5435 : BIND(&if_bigint);
5436 : if (feedback == Feedback::kCollect) {
5437 1116 : var_feedback->Bind(SmiConstant(BinaryOperationFeedback::kBigInt));
5438 : }
5439 744 : Goto(done);
5440 :
5441 : BIND(&if_oddball);
5442 744 : var_numeric->Bind(LoadObjectField(value, Oddball::kToNumberOffset));
5443 : if (feedback == Feedback::kCollect) {
5444 1116 : var_feedback->Bind(SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
5445 : }
5446 1488 : Goto(done);
5447 744 : }
5448 :
5449 : // ES#sec-touint32
5450 68 : TNode<Number> CodeStubAssembler::ToUint32(SloppyTNode<Context> context,
5451 : SloppyTNode<Object> input) {
5452 136 : Node* const float_zero = Float64Constant(0.0);
5453 136 : Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
5454 :
5455 : Label out(this);
5456 :
5457 136 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
5458 :
5459 : // Early exit for positive smis.
5460 : {
5461 : // TODO(jgruber): This branch and the recheck below can be removed once we
5462 : // have a ToNumber with multiple exits.
5463 : Label next(this, Label::kDeferred);
5464 136 : Branch(TaggedIsPositiveSmi(input), &out, &next);
5465 68 : BIND(&next);
5466 : }
5467 :
5468 136 : Node* const number = ToNumber(context, input);
5469 68 : var_result.Bind(number);
5470 :
5471 : // Perhaps we have a positive smi now.
5472 : {
5473 : Label next(this, Label::kDeferred);
5474 136 : Branch(TaggedIsPositiveSmi(number), &out, &next);
5475 68 : BIND(&next);
5476 : }
5477 :
5478 68 : Label if_isnegativesmi(this), if_isheapnumber(this);
5479 136 : Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
5480 :
5481 : BIND(&if_isnegativesmi);
5482 : {
5483 136 : Node* const uint32_value = SmiToWord32(number);
5484 136 : Node* float64_value = ChangeUint32ToFloat64(uint32_value);
5485 136 : var_result.Bind(AllocateHeapNumberWithValue(float64_value));
5486 68 : Goto(&out);
5487 : }
5488 :
5489 : BIND(&if_isheapnumber);
5490 : {
5491 : Label return_zero(this);
5492 136 : Node* const value = LoadHeapNumberValue(number);
5493 :
5494 : {
5495 : // +-0.
5496 : Label next(this);
5497 136 : Branch(Float64Equal(value, float_zero), &return_zero, &next);
5498 68 : BIND(&next);
5499 : }
5500 :
5501 : {
5502 : // NaN.
5503 : Label next(this);
5504 136 : Branch(Float64Equal(value, value), &next, &return_zero);
5505 68 : BIND(&next);
5506 : }
5507 :
5508 : {
5509 : // +Infinity.
5510 : Label next(this);
5511 : Node* const positive_infinity =
5512 136 : Float64Constant(std::numeric_limits<double>::infinity());
5513 136 : Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
5514 68 : BIND(&next);
5515 : }
5516 :
5517 : {
5518 : // -Infinity.
5519 : Label next(this);
5520 : Node* const negative_infinity =
5521 136 : Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
5522 136 : Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
5523 68 : BIND(&next);
5524 : }
5525 :
5526 : // * Let int be the mathematical value that is the same sign as number and
5527 : // whose magnitude is floor(abs(number)).
5528 : // * Let int32bit be int modulo 2^32.
5529 : // * Return int32bit.
5530 : {
5531 136 : Node* x = Float64Trunc(value);
5532 136 : x = Float64Mod(x, float_two_32);
5533 136 : x = Float64Add(x, float_two_32);
5534 136 : x = Float64Mod(x, float_two_32);
5535 :
5536 136 : Node* const result = ChangeFloat64ToTagged(x);
5537 68 : var_result.Bind(result);
5538 68 : Goto(&out);
5539 : }
5540 :
5541 : BIND(&return_zero);
5542 : {
5543 136 : var_result.Bind(SmiConstant(0));
5544 68 : Goto(&out);
5545 68 : }
5546 : }
5547 :
5548 : BIND(&out);
5549 136 : return CAST(var_result.value());
5550 : }
5551 :
5552 161 : TNode<String> CodeStubAssembler::ToString(SloppyTNode<Context> context,
5553 : SloppyTNode<Object> input) {
5554 161 : Label is_number(this);
5555 161 : Label runtime(this, Label::kDeferred), done(this);
5556 322 : VARIABLE(result, MachineRepresentation::kTagged);
5557 322 : GotoIf(TaggedIsSmi(input), &is_number);
5558 :
5559 161 : TNode<Map> input_map = LoadMap(CAST(input));
5560 161 : TNode<Int32T> input_instance_type = LoadMapInstanceType(input_map);
5561 :
5562 161 : result.Bind(input);
5563 322 : GotoIf(IsStringInstanceType(input_instance_type), &done);
5564 :
5565 161 : Label not_heap_number(this);
5566 322 : Branch(IsHeapNumberMap(input_map), &is_number, ¬_heap_number);
5567 :
5568 : BIND(&is_number);
5569 161 : result.Bind(NumberToString(context, input));
5570 161 : Goto(&done);
5571 :
5572 : BIND(¬_heap_number);
5573 : {
5574 322 : GotoIfNot(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE), &runtime);
5575 161 : result.Bind(LoadObjectField(CAST(input), Oddball::kToStringOffset));
5576 161 : Goto(&done);
5577 : }
5578 :
5579 : BIND(&runtime);
5580 : {
5581 161 : result.Bind(CallRuntime(Runtime::kToString, context, input));
5582 161 : Goto(&done);
5583 : }
5584 :
5585 : BIND(&done);
5586 322 : return CAST(result.value());
5587 : }
5588 :
5589 1116 : Node* CodeStubAssembler::ToString_Inline(Node* const context,
5590 : Node* const input) {
5591 1116 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
5592 1116 : Label stub_call(this, Label::kDeferred), out(this);
5593 :
5594 2232 : GotoIf(TaggedIsSmi(input), &stub_call);
5595 2232 : Branch(IsString(input), &out, &stub_call);
5596 :
5597 : BIND(&stub_call);
5598 1116 : var_result.Bind(CallBuiltin(Builtins::kToString, context, input));
5599 1116 : Goto(&out);
5600 :
5601 : BIND(&out);
5602 2232 : return var_result.value();
5603 : }
5604 :
5605 62 : Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
5606 124 : Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
5607 124 : VARIABLE(result, MachineRepresentation::kTagged);
5608 62 : Label done(this, &result);
5609 :
5610 62 : BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
5611 :
5612 : BIND(&if_isreceiver);
5613 : {
5614 : // Convert {input} to a primitive first passing Number hint.
5615 62 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
5616 62 : result.Bind(CallStub(callable, context, input));
5617 62 : Goto(&done);
5618 : }
5619 :
5620 : BIND(&if_isnotreceiver);
5621 : {
5622 62 : result.Bind(input);
5623 62 : Goto(&done);
5624 : }
5625 :
5626 : BIND(&done);
5627 124 : return result.value();
5628 : }
5629 :
5630 341 : Node* CodeStubAssembler::ToSmiIndex(Node* const input, Node* const context,
5631 : Label* range_error) {
5632 341 : VARIABLE(result, MachineRepresentation::kTagged, input);
5633 341 : Label check_undefined(this), return_zero(this), defined(this),
5634 341 : negative_check(this), done(this);
5635 1023 : Branch(TaggedIsSmi(result.value()), &negative_check, &check_undefined);
5636 :
5637 : BIND(&check_undefined);
5638 1023 : Branch(IsUndefined(result.value()), &return_zero, &defined);
5639 :
5640 : BIND(&defined);
5641 : result.Bind(ToInteger(context, result.value(),
5642 1023 : CodeStubAssembler::kTruncateMinusZero));
5643 1023 : GotoIfNot(TaggedIsSmi(result.value()), range_error);
5644 : CSA_ASSERT(this, TaggedIsSmi(result.value()));
5645 341 : Goto(&negative_check);
5646 :
5647 : BIND(&negative_check);
5648 1023 : Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
5649 :
5650 : BIND(&return_zero);
5651 682 : result.Bind(SmiConstant(0));
5652 341 : Goto(&done);
5653 :
5654 : BIND(&done);
5655 : CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value()));
5656 682 : return result.value();
5657 : }
5658 :
5659 31 : Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context,
5660 : Label* range_error) {
5661 31 : VARIABLE(result, MachineRepresentation::kTagged, input);
5662 31 : Label to_integer(this), negative_check(this), return_zero(this), done(this);
5663 93 : Branch(TaggedIsSmi(result.value()), &negative_check, &to_integer);
5664 :
5665 : BIND(&to_integer);
5666 : result.Bind(ToInteger(context, result.value(),
5667 93 : CodeStubAssembler::kTruncateMinusZero));
5668 93 : GotoIf(TaggedIsSmi(result.value()), &negative_check);
5669 : // result.value() can still be a negative HeapNumber here.
5670 : Branch(IsTrue(CallBuiltin(Builtins::kLessThan, context, result.value(),
5671 62 : SmiConstant(0))),
5672 62 : &return_zero, range_error);
5673 :
5674 : BIND(&negative_check);
5675 93 : Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
5676 :
5677 : BIND(&return_zero);
5678 62 : result.Bind(SmiConstant(0));
5679 31 : Goto(&done);
5680 :
5681 : BIND(&done);
5682 : CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value()));
5683 62 : return result.value();
5684 : }
5685 :
5686 372 : Node* CodeStubAssembler::ToLength_Inline(Node* const context,
5687 : Node* const input) {
5688 744 : Node* const smi_zero = SmiConstant(0);
5689 : return Select(
5690 1488 : TaggedIsSmi(input), [=] { return SmiMax(input, smi_zero); },
5691 372 : [=] { return CallBuiltin(Builtins::kToLength, context, input); },
5692 1488 : MachineRepresentation::kTagged);
5693 : }
5694 :
5695 1054 : TNode<Number> CodeStubAssembler::ToInteger(SloppyTNode<Context> context,
5696 : SloppyTNode<Object> input,
5697 : ToIntegerTruncationMode mode) {
5698 : // We might need to loop once for ToNumber conversion.
5699 1054 : TVARIABLE(Object, var_arg, input);
5700 1054 : Label loop(this, &var_arg), out(this);
5701 1054 : Goto(&loop);
5702 : BIND(&loop);
5703 : {
5704 : // Shared entry points.
5705 : Label return_zero(this, Label::kDeferred);
5706 :
5707 : // Load the current {arg} value.
5708 : TNode<Object> arg = var_arg;
5709 :
5710 : // Check if {arg} is a Smi.
5711 2108 : GotoIf(TaggedIsSmi(arg), &out);
5712 :
5713 : // Check if {arg} is a HeapNumber.
5714 1054 : Label if_argisheapnumber(this),
5715 1054 : if_argisnotheapnumber(this, Label::kDeferred);
5716 2108 : Branch(IsHeapNumber(arg), &if_argisheapnumber, &if_argisnotheapnumber);
5717 :
5718 : BIND(&if_argisheapnumber);
5719 : {
5720 : TNode<HeapNumber> arg_hn = CAST(arg);
5721 : // Load the floating-point value of {arg}.
5722 2108 : Node* arg_value = LoadHeapNumberValue(arg_hn);
5723 :
5724 : // Check if {arg} is NaN.
5725 2108 : GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
5726 :
5727 : // Truncate {arg} towards zero.
5728 1054 : TNode<Float64T> value = Float64Trunc(arg_value);
5729 :
5730 1054 : if (mode == kTruncateMinusZero) {
5731 : // Truncate -0.0 to 0.
5732 2232 : GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
5733 : }
5734 :
5735 2108 : var_arg = ChangeFloat64ToTagged(value);
5736 1054 : Goto(&out);
5737 : }
5738 :
5739 : BIND(&if_argisnotheapnumber);
5740 : {
5741 : // Need to convert {arg} to a Number first.
5742 1054 : var_arg = UncheckedCast<Object>(
5743 : CallBuiltin(Builtins::kNonNumberToNumber, context, arg));
5744 1054 : Goto(&loop);
5745 : }
5746 :
5747 : BIND(&return_zero);
5748 2108 : var_arg = SmiConstant(0);
5749 2108 : Goto(&out);
5750 : }
5751 :
5752 : BIND(&out);
5753 1054 : return CAST(var_arg);
5754 : }
5755 :
5756 10445 : TNode<Uint32T> CodeStubAssembler::DecodeWord32(SloppyTNode<Word32T> word32,
5757 : uint32_t shift, uint32_t mask) {
5758 : return UncheckedCast<Uint32T>(Word32Shr(
5759 31335 : Word32And(word32, Int32Constant(mask)), static_cast<int>(shift)));
5760 : }
5761 :
5762 12951 : Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) {
5763 51804 : return WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift));
5764 : }
5765 :
5766 217 : Node* CodeStubAssembler::UpdateWord(Node* word, Node* value, uint32_t shift,
5767 : uint32_t mask) {
5768 651 : Node* encoded_value = WordShl(value, static_cast<int>(shift));
5769 434 : Node* inverted_mask = IntPtrConstant(~static_cast<intptr_t>(mask));
5770 : // Ensure the {value} fits fully in the mask.
5771 : CSA_ASSERT(this, WordEqual(WordAnd(encoded_value, inverted_mask),
5772 : IntPtrConstant(0)));
5773 651 : return WordOr(WordAnd(word, inverted_mask), encoded_value);
5774 : }
5775 :
5776 0 : void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
5777 0 : if (FLAG_native_code_counters && counter->Enabled()) {
5778 0 : Node* counter_address = ExternalConstant(ExternalReference(counter));
5779 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
5780 0 : Int32Constant(value));
5781 : }
5782 0 : }
5783 :
5784 4240 : void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
5785 : DCHECK_GT(delta, 0);
5786 4240 : if (FLAG_native_code_counters && counter->Enabled()) {
5787 0 : Node* counter_address = ExternalConstant(ExternalReference(counter));
5788 0 : Node* value = Load(MachineType::Int32(), counter_address);
5789 0 : value = Int32Add(value, Int32Constant(delta));
5790 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
5791 : }
5792 4240 : }
5793 :
5794 0 : void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
5795 : DCHECK_GT(delta, 0);
5796 0 : if (FLAG_native_code_counters && counter->Enabled()) {
5797 0 : Node* counter_address = ExternalConstant(ExternalReference(counter));
5798 0 : Node* value = Load(MachineType::Int32(), counter_address);
5799 0 : value = Int32Sub(value, Int32Constant(delta));
5800 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
5801 : }
5802 0 : }
5803 :
5804 27851 : void CodeStubAssembler::Increment(Variable* variable, int value,
5805 : ParameterMode mode) {
5806 : DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
5807 : variable->rep() == MachineType::PointerRepresentation());
5808 : DCHECK_IMPLIES(mode == SMI_PARAMETERS,
5809 : variable->rep() == MachineRepresentation::kTagged ||
5810 : variable->rep() == MachineRepresentation::kTaggedSigned);
5811 : variable->Bind(IntPtrOrSmiAdd(variable->value(),
5812 27851 : IntPtrOrSmiConstant(value, mode), mode));
5813 27851 : }
5814 :
5815 31 : void CodeStubAssembler::Use(Label* label) {
5816 124 : GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
5817 31 : }
5818 :
5819 688 : void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
5820 : Variable* var_index, Label* if_keyisunique,
5821 : Variable* var_unique, Label* if_bailout,
5822 : Label* if_notinternalized) {
5823 : DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
5824 : DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
5825 688 : Comment("TryToName");
5826 :
5827 688 : Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this),
5828 688 : if_keyisother(this, Label::kDeferred);
5829 : // Handle Smi and HeapNumber keys.
5830 688 : var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
5831 688 : Goto(if_keyisindex);
5832 :
5833 : BIND(&if_keyisnotindex);
5834 1376 : Node* key_map = LoadMap(key);
5835 688 : var_unique->Bind(key);
5836 : // Symbols are unique.
5837 1376 : GotoIf(IsSymbolMap(key_map), if_keyisunique);
5838 1376 : Node* key_instance_type = LoadMapInstanceType(key_map);
5839 : // Miss if |key| is not a String.
5840 : STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
5841 1376 : GotoIfNot(IsStringInstanceType(key_instance_type), &if_keyisother);
5842 :
5843 : // |key| is a String. Check if it has a cached array index.
5844 : Node* hash = LoadNameHashField(key);
5845 : GotoIf(IsClearWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
5846 1376 : &if_hascachedindex);
5847 : // No cached array index. If the string knows that it contains an index,
5848 : // then it must be an uncacheable index. Handle this case in the runtime.
5849 1376 : GotoIf(IsClearWord32(hash, Name::kIsNotArrayIndexMask), if_bailout);
5850 : // Check if we have a ThinString.
5851 : GotoIf(InstanceTypeEqual(key_instance_type, THIN_STRING_TYPE),
5852 1376 : &if_thinstring);
5853 : GotoIf(InstanceTypeEqual(key_instance_type, THIN_ONE_BYTE_STRING_TYPE),
5854 1376 : &if_thinstring);
5855 : // Finally, check if |key| is internalized.
5856 : STATIC_ASSERT(kNotInternalizedTag != 0);
5857 688 : GotoIf(IsSetWord32(key_instance_type, kIsNotInternalizedMask),
5858 2064 : if_notinternalized != nullptr ? if_notinternalized : if_bailout);
5859 688 : Goto(if_keyisunique);
5860 :
5861 : BIND(&if_thinstring);
5862 688 : var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
5863 688 : Goto(if_keyisunique);
5864 :
5865 : BIND(&if_hascachedindex);
5866 688 : var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
5867 688 : Goto(if_keyisindex);
5868 :
5869 : BIND(&if_keyisother);
5870 1376 : GotoIfNot(InstanceTypeEqual(key_instance_type, ODDBALL_TYPE), if_bailout);
5871 688 : var_unique->Bind(LoadObjectField(key, Oddball::kToStringOffset));
5872 1376 : Goto(if_keyisunique);
5873 688 : }
5874 :
5875 93 : void CodeStubAssembler::TryInternalizeString(
5876 : Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
5877 : Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
5878 : DCHECK(var_index->rep() == MachineType::PointerRepresentation());
5879 : DCHECK_EQ(var_internalized->rep(), MachineRepresentation::kTagged);
5880 : CSA_SLOW_ASSERT(this, IsString(string));
5881 : Node* function = ExternalConstant(
5882 186 : ExternalReference::try_internalize_string_function(isolate()));
5883 : Node* result = CallCFunction1(MachineType::AnyTagged(),
5884 93 : MachineType::AnyTagged(), function, string);
5885 : Label internalized(this);
5886 186 : GotoIf(TaggedIsNotSmi(result), &internalized);
5887 186 : Node* word_result = SmiUntag(result);
5888 186 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
5889 186 : if_not_internalized);
5890 186 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
5891 186 : if_bailout);
5892 93 : var_index->Bind(word_result);
5893 93 : Goto(if_index);
5894 :
5895 : BIND(&internalized);
5896 93 : var_internalized->Bind(result);
5897 93 : Goto(if_internalized);
5898 93 : }
5899 :
5900 : template <typename Dictionary>
5901 21234 : Node* CodeStubAssembler::EntryToIndex(Node* entry, int field_index) {
5902 63702 : Node* entry_index = IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
5903 : return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
5904 63702 : field_index));
5905 : }
5906 :
5907 : template Node* CodeStubAssembler::EntryToIndex<NameDictionary>(Node*, int);
5908 : template Node* CodeStubAssembler::EntryToIndex<GlobalDictionary>(Node*, int);
5909 : template Node* CodeStubAssembler::EntryToIndex<SeededNumberDictionary>(Node*,
5910 : int);
5911 :
5912 : // This must be kept in sync with HashTableBase::ComputeCapacity().
5913 440 : TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
5914 : SloppyTNode<IntPtrT> at_least_space_for) {
5915 : Node* capacity = IntPtrRoundUpToPowerOfTwo32(IntPtrAdd(
5916 880 : at_least_space_for, WordShr(at_least_space_for, IntPtrConstant(1))));
5917 880 : return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
5918 : }
5919 :
5920 534 : TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
5921 : SloppyTNode<IntPtrT> right) {
5922 : intptr_t left_constant;
5923 : intptr_t right_constant;
5924 969 : if (ToIntPtrConstant(left, left_constant) &&
5925 435 : ToIntPtrConstant(right, right_constant)) {
5926 435 : return IntPtrConstant(std::max(left_constant, right_constant));
5927 : }
5928 : return SelectConstant(IntPtrGreaterThanOrEqual(left, right), left, right,
5929 198 : MachineType::PointerRepresentation());
5930 : }
5931 :
5932 125 : TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
5933 : SloppyTNode<IntPtrT> right) {
5934 : intptr_t left_constant;
5935 : intptr_t right_constant;
5936 126 : if (ToIntPtrConstant(left, left_constant) &&
5937 1 : ToIntPtrConstant(right, right_constant)) {
5938 1 : return IntPtrConstant(std::min(left_constant, right_constant));
5939 : }
5940 : return SelectConstant(IntPtrLessThanOrEqual(left, right), left, right,
5941 248 : MachineType::PointerRepresentation());
5942 : }
5943 :
5944 : template <class Dictionary>
5945 : Node* CodeStubAssembler::GetNextEnumerationIndex(Node* dictionary) {
5946 : return LoadFixedArrayElement(dictionary,
5947 403 : Dictionary::kNextEnumerationIndexIndex);
5948 : }
5949 :
5950 : template <class Dictionary>
5951 : void CodeStubAssembler::SetNextEnumerationIndex(Node* dictionary,
5952 : Node* next_enum_index_smi) {
5953 403 : StoreFixedArrayElement(dictionary, Dictionary::kNextEnumerationIndexIndex,
5954 : next_enum_index_smi, SKIP_WRITE_BARRIER);
5955 : }
5956 :
5957 : template <>
5958 0 : Node* CodeStubAssembler::LoadName<NameDictionary>(Node* key) {
5959 : CSA_ASSERT(this, Word32Or(IsTheHole(key), IsName(key)));
5960 0 : return key;
5961 : }
5962 :
5963 : template <>
5964 0 : Node* CodeStubAssembler::LoadName<GlobalDictionary>(Node* key) {
5965 : CSA_ASSERT(this, IsPropertyCell(key));
5966 : CSA_ASSERT(this, IsNotTheHole(key));
5967 0 : return LoadObjectField(key, PropertyCell::kNameOffset);
5968 : }
5969 :
5970 : template <typename Dictionary>
5971 4345 : void CodeStubAssembler::NameDictionaryLookup(Node* dictionary,
5972 : Node* unique_name, Label* if_found,
5973 : Variable* var_name_index,
5974 : Label* if_not_found,
5975 : int inlined_probes,
5976 : LookupMode mode) {
5977 : CSA_ASSERT(this, IsDictionary(dictionary));
5978 : DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
5979 : DCHECK_IMPLIES(mode == kFindInsertionIndex,
5980 : inlined_probes == 0 && if_found == nullptr);
5981 4345 : Comment("NameDictionaryLookup");
5982 :
5983 8690 : Node* capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
5984 13035 : Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
5985 13035 : Node* hash = ChangeUint32ToWord(LoadNameHash(unique_name));
5986 :
5987 : // See Dictionary::FirstProbe().
5988 8690 : Node* count = IntPtrConstant(0);
5989 8690 : Node* entry = WordAnd(hash, mask);
5990 : Node* undefined = UndefinedConstant();
5991 :
5992 24458 : for (int i = 0; i < inlined_probes; i++) {
5993 : Node* index = EntryToIndex<Dictionary>(entry);
5994 15768 : var_name_index->Bind(index);
5995 :
5996 15768 : Node* current = LoadFixedArrayElement(dictionary, index);
5997 31536 : GotoIf(WordEqual(current, undefined), if_not_found);
5998 : current = LoadName<Dictionary>(current);
5999 31536 : GotoIf(WordEqual(current, unique_name), if_found);
6000 :
6001 : // See Dictionary::NextProbe().
6002 31536 : count = IntPtrConstant(i + 1);
6003 47304 : entry = WordAnd(IntPtrAdd(entry, count), mask);
6004 : }
6005 4345 : if (mode == kFindInsertionIndex) {
6006 : // Appease the variable merging algorithm for "Goto(&loop)" below.
6007 806 : var_name_index->Bind(IntPtrConstant(0));
6008 : }
6009 :
6010 4345 : VARIABLE(var_count, MachineType::PointerRepresentation(), count);
6011 8690 : VARIABLE(var_entry, MachineType::PointerRepresentation(), entry);
6012 4345 : Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
6013 8690 : Label loop(this, 3, loop_vars);
6014 4345 : Goto(&loop);
6015 : BIND(&loop);
6016 : {
6017 4345 : Node* entry = var_entry.value();
6018 :
6019 : Node* index = EntryToIndex<Dictionary>(entry);
6020 4345 : var_name_index->Bind(index);
6021 :
6022 4345 : Node* current = LoadFixedArrayElement(dictionary, index);
6023 8690 : GotoIf(WordEqual(current, undefined), if_not_found);
6024 4345 : if (mode == kFindExisting) {
6025 : current = LoadName<Dictionary>(current);
6026 7884 : GotoIf(WordEqual(current, unique_name), if_found);
6027 : } else {
6028 : DCHECK_EQ(kFindInsertionIndex, mode);
6029 403 : GotoIf(WordEqual(current, TheHoleConstant()), if_not_found);
6030 : }
6031 :
6032 : // See Dictionary::NextProbe().
6033 4345 : Increment(&var_count);
6034 17380 : entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
6035 :
6036 4345 : var_entry.Bind(entry);
6037 4345 : Goto(&loop);
6038 4345 : }
6039 4345 : }
6040 :
6041 : // Instantiate template methods to workaround GCC compilation issue.
6042 : template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
6043 : Node*, Node*, Label*, Variable*, Label*, int, LookupMode);
6044 : template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
6045 : Node*, Node*, Label*, Variable*, Label*, int, LookupMode);
6046 :
6047 0 : Node* CodeStubAssembler::ComputeIntegerHash(Node* key) {
6048 0 : return ComputeIntegerHash(key, IntPtrConstant(kZeroHashSeed));
6049 : }
6050 :
6051 675 : Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) {
6052 : // See v8::internal::ComputeIntegerHash()
6053 1350 : Node* hash = TruncateWordToWord32(key);
6054 1350 : hash = Word32Xor(hash, seed);
6055 2700 : hash = Int32Add(Word32Xor(hash, Int32Constant(0xffffffff)),
6056 3375 : Word32Shl(hash, Int32Constant(15)));
6057 2700 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
6058 2700 : hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
6059 2700 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
6060 2025 : hash = Int32Mul(hash, Int32Constant(2057));
6061 2700 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
6062 2025 : return Word32And(hash, Int32Constant(0x3fffffff));
6063 : }
6064 :
6065 : template <typename Dictionary>
6066 483 : void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary,
6067 : Node* intptr_index,
6068 : Label* if_found,
6069 : Variable* var_entry,
6070 : Label* if_not_found) {
6071 : CSA_ASSERT(this, IsDictionary(dictionary));
6072 : DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
6073 483 : Comment("NumberDictionaryLookup");
6074 :
6075 966 : Node* capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
6076 1449 : Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
6077 :
6078 : Node* int32_seed = std::is_same<Dictionary, SeededNumberDictionary>::value
6079 : ? HashSeed()
6080 12 : : Int32Constant(kZeroHashSeed);
6081 1449 : Node* hash = ChangeUint32ToWord(ComputeIntegerHash(intptr_index, int32_seed));
6082 483 : Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
6083 :
6084 : // See Dictionary::FirstProbe().
6085 966 : Node* count = IntPtrConstant(0);
6086 966 : Node* entry = WordAnd(hash, mask);
6087 :
6088 : Node* undefined = UndefinedConstant();
6089 : Node* the_hole = TheHoleConstant();
6090 :
6091 483 : VARIABLE(var_count, MachineType::PointerRepresentation(), count);
6092 483 : Variable* loop_vars[] = {&var_count, var_entry};
6093 966 : Label loop(this, 2, loop_vars);
6094 483 : var_entry->Bind(entry);
6095 483 : Goto(&loop);
6096 : BIND(&loop);
6097 : {
6098 483 : Node* entry = var_entry->value();
6099 :
6100 : Node* index = EntryToIndex<Dictionary>(entry);
6101 483 : Node* current = LoadFixedArrayElement(dictionary, index);
6102 966 : GotoIf(WordEqual(current, undefined), if_not_found);
6103 : Label next_probe(this);
6104 : {
6105 483 : Label if_currentissmi(this), if_currentisnotsmi(this);
6106 966 : Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
6107 : BIND(&if_currentissmi);
6108 : {
6109 966 : Node* current_value = SmiUntag(current);
6110 966 : Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
6111 : }
6112 : BIND(&if_currentisnotsmi);
6113 : {
6114 966 : GotoIf(WordEqual(current, the_hole), &next_probe);
6115 : // Current must be the Number.
6116 966 : Node* current_value = LoadHeapNumberValue(current);
6117 966 : Branch(Float64Equal(current_value, key_as_float64), if_found,
6118 966 : &next_probe);
6119 483 : }
6120 : }
6121 :
6122 : BIND(&next_probe);
6123 : // See Dictionary::NextProbe().
6124 483 : Increment(&var_count);
6125 1932 : entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
6126 :
6127 483 : var_entry->Bind(entry);
6128 483 : Goto(&loop);
6129 483 : }
6130 483 : }
6131 :
6132 : template <class Dictionary>
6133 : void CodeStubAssembler::FindInsertionEntry(Node* dictionary, Node* key,
6134 : Variable* var_key_index) {
6135 : UNREACHABLE();
6136 : }
6137 :
6138 : template <>
6139 403 : void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
6140 : Node* dictionary, Node* key, Variable* var_key_index) {
6141 403 : Label done(this);
6142 : NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
6143 403 : &done, 0, kFindInsertionIndex);
6144 403 : BIND(&done);
6145 403 : }
6146 :
6147 : template <class Dictionary>
6148 : void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value,
6149 : Node* index, Node* enum_index) {
6150 : UNREACHABLE(); // Use specializations instead.
6151 : }
6152 :
6153 : template <>
6154 403 : void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary,
6155 : Node* name, Node* value,
6156 : Node* index,
6157 : Node* enum_index) {
6158 : CSA_SLOW_ASSERT(this, IsDictionary(dictionary));
6159 :
6160 : // Store name and value.
6161 403 : StoreFixedArrayElement(dictionary, index, name);
6162 : StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
6163 :
6164 : // Prepare details of the new property.
6165 : PropertyDetails d(kData, NONE, PropertyCellType::kNoCell);
6166 : enum_index =
6167 403 : SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
6168 : // We OR over the actual index below, so we expect the initial value to be 0.
6169 : DCHECK_EQ(0, d.dictionary_index());
6170 1209 : VARIABLE(var_details, MachineRepresentation::kTaggedSigned,
6171 : SmiOr(SmiConstant(d.AsSmi()), enum_index));
6172 :
6173 : // Private names must be marked non-enumerable.
6174 403 : Label not_private(this, &var_details);
6175 1209 : GotoIfNot(IsSymbolMap(LoadMap(name)), ¬_private);
6176 806 : Node* flags = SmiToWord32(CAST(LoadObjectField(name, Symbol::kFlagsOffset)));
6177 : const int kPrivateMask = 1 << Symbol::kPrivateBit;
6178 806 : GotoIfNot(IsSetWord32(flags, kPrivateMask), ¬_private);
6179 : Node* dont_enum =
6180 806 : SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
6181 1209 : var_details.Bind(SmiOr(var_details.value(), dont_enum));
6182 403 : Goto(¬_private);
6183 : BIND(¬_private);
6184 :
6185 : // Finally, store the details.
6186 : StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
6187 806 : var_details.value());
6188 403 : }
6189 :
6190 : template <>
6191 0 : void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary,
6192 : Node* key, Node* value,
6193 : Node* index,
6194 : Node* enum_index) {
6195 0 : UNIMPLEMENTED();
6196 : }
6197 :
6198 : template <class Dictionary>
6199 403 : void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value,
6200 : Label* bailout) {
6201 : CSA_SLOW_ASSERT(this, IsDictionary(dictionary));
6202 : Node* capacity = GetCapacity<Dictionary>(dictionary);
6203 : Node* nof = GetNumberOfElements<Dictionary>(dictionary);
6204 1209 : Node* new_nof = SmiAdd(nof, SmiConstant(1));
6205 : // Require 33% to still be free after adding additional_elements.
6206 : // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
6207 : // But that's OK here because it's only used for a comparison.
6208 1209 : Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
6209 806 : GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
6210 : // Require rehashing if more than 50% of free elements are deleted elements.
6211 : Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
6212 : CSA_ASSERT(this, SmiAbove(capacity, new_nof));
6213 806 : Node* half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
6214 806 : GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
6215 :
6216 : Node* enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
6217 1209 : Node* new_enum_index = SmiAdd(enum_index, SmiConstant(1));
6218 : Node* max_enum_index =
6219 806 : SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
6220 806 : GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
6221 :
6222 : // No more bailouts after this point.
6223 : // Operations from here on can have side effects.
6224 :
6225 : SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
6226 : SetNumberOfElements<Dictionary>(dictionary, new_nof);
6227 :
6228 403 : VARIABLE(var_key_index, MachineType::PointerRepresentation());
6229 403 : FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
6230 403 : InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
6231 806 : enum_index);
6232 403 : }
6233 :
6234 : template void CodeStubAssembler::Add<NameDictionary>(Node*, Node*, Node*,
6235 : Label*);
6236 :
6237 818 : void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name,
6238 : Node* descriptors, Node* nof,
6239 : Label* if_found,
6240 : Variable* var_name_index,
6241 : Label* if_not_found) {
6242 818 : Comment("DescriptorLookupLinear");
6243 1636 : Node* first_inclusive = IntPtrConstant(DescriptorArray::ToKeyIndex(0));
6244 1636 : Node* factor = IntPtrConstant(DescriptorArray::kEntrySize);
6245 2454 : Node* last_exclusive = IntPtrAdd(first_inclusive, IntPtrMul(nof, factor));
6246 :
6247 : BuildFastLoop(last_exclusive, first_inclusive,
6248 : [this, descriptors, unique_name, if_found,
6249 818 : var_name_index](Node* name_index) {
6250 : Node* candidate_name =
6251 818 : LoadFixedArrayElement(descriptors, name_index);
6252 818 : var_name_index->Bind(name_index);
6253 2454 : GotoIf(WordEqual(candidate_name, unique_name), if_found);
6254 818 : },
6255 : -DescriptorArray::kEntrySize, INTPTR_PARAMETERS,
6256 1636 : IndexAdvanceMode::kPre);
6257 818 : Goto(if_not_found);
6258 818 : }
6259 :
6260 818 : Node* CodeStubAssembler::DescriptorArrayNumberOfEntries(Node* descriptors) {
6261 : return LoadAndUntagToWord32FixedArrayElement(
6262 1636 : descriptors, IntPtrConstant(DescriptorArray::kDescriptorLengthIndex));
6263 : }
6264 :
6265 : namespace {
6266 :
6267 4152 : Node* DescriptorNumberToIndex(CodeStubAssembler* a, Node* descriptor_number) {
6268 8304 : Node* descriptor_size = a->Int32Constant(DescriptorArray::kEntrySize);
6269 8304 : Node* index = a->Int32Mul(descriptor_number, descriptor_size);
6270 8304 : return a->ChangeInt32ToIntPtr(index);
6271 : }
6272 :
6273 : } // namespace
6274 :
6275 818 : Node* CodeStubAssembler::DescriptorArrayToKeyIndex(Node* descriptor_number) {
6276 818 : return IntPtrAdd(IntPtrConstant(DescriptorArray::ToKeyIndex(0)),
6277 3272 : DescriptorNumberToIndex(this, descriptor_number));
6278 : }
6279 :
6280 1636 : Node* CodeStubAssembler::DescriptorArrayGetSortedKeyIndex(
6281 : Node* descriptors, Node* descriptor_number) {
6282 : const int details_offset = DescriptorArray::ToDetailsIndex(0) * kPointerSize;
6283 : Node* details = LoadAndUntagToWord32FixedArrayElement(
6284 : descriptors, DescriptorNumberToIndex(this, descriptor_number),
6285 1636 : details_offset);
6286 1636 : return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
6287 : }
6288 :
6289 1698 : Node* CodeStubAssembler::DescriptorArrayGetKey(Node* descriptors,
6290 : Node* descriptor_number) {
6291 : const int key_offset = DescriptorArray::ToKeyIndex(0) * kPointerSize;
6292 : return LoadFixedArrayElement(descriptors,
6293 : DescriptorNumberToIndex(this, descriptor_number),
6294 1698 : key_offset);
6295 : }
6296 :
6297 818 : void CodeStubAssembler::DescriptorLookupBinary(Node* unique_name,
6298 : Node* descriptors, Node* nof,
6299 : Label* if_found,
6300 : Variable* var_name_index,
6301 : Label* if_not_found) {
6302 818 : Comment("DescriptorLookupBinary");
6303 1636 : VARIABLE(var_low, MachineRepresentation::kWord32, Int32Constant(0));
6304 : Node* limit =
6305 3272 : Int32Sub(DescriptorArrayNumberOfEntries(descriptors), Int32Constant(1));
6306 1636 : VARIABLE(var_high, MachineRepresentation::kWord32, limit);
6307 : Node* hash = LoadNameHashField(unique_name);
6308 : CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
6309 :
6310 : // Assume non-empty array.
6311 : CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
6312 :
6313 818 : Variable* loop_vars[] = {&var_high, &var_low};
6314 1636 : Label binary_loop(this, 2, loop_vars);
6315 818 : Goto(&binary_loop);
6316 : BIND(&binary_loop);
6317 : {
6318 : // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
6319 : Node* mid =
6320 : Int32Add(var_low.value(),
6321 5726 : Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1));
6322 : // mid_name = descriptors->GetSortedKey(mid).
6323 818 : Node* sorted_key_index = DescriptorArrayGetSortedKeyIndex(descriptors, mid);
6324 818 : Node* mid_name = DescriptorArrayGetKey(descriptors, sorted_key_index);
6325 :
6326 : Node* mid_hash = LoadNameHashField(mid_name);
6327 :
6328 818 : Label mid_greater(this), mid_less(this), merge(this);
6329 1636 : Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
6330 : BIND(&mid_greater);
6331 : {
6332 818 : var_high.Bind(mid);
6333 818 : Goto(&merge);
6334 : }
6335 : BIND(&mid_less);
6336 : {
6337 2454 : var_low.Bind(Int32Add(mid, Int32Constant(1)));
6338 818 : Goto(&merge);
6339 : }
6340 : BIND(&merge);
6341 4090 : GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
6342 : }
6343 :
6344 818 : Label scan_loop(this, &var_low);
6345 818 : Goto(&scan_loop);
6346 : BIND(&scan_loop);
6347 : {
6348 2454 : GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
6349 :
6350 : Node* sort_index =
6351 818 : DescriptorArrayGetSortedKeyIndex(descriptors, var_low.value());
6352 818 : Node* current_name = DescriptorArrayGetKey(descriptors, sort_index);
6353 : Node* current_hash = LoadNameHashField(current_name);
6354 1636 : GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
6355 : Label next(this);
6356 1636 : GotoIf(WordNotEqual(current_name, unique_name), &next);
6357 1636 : GotoIf(Int32GreaterThanOrEqual(sort_index, nof), if_not_found);
6358 818 : var_name_index->Bind(DescriptorArrayToKeyIndex(sort_index));
6359 818 : Goto(if_found);
6360 :
6361 : BIND(&next);
6362 3272 : var_low.Bind(Int32Add(var_low.value(), Int32Constant(1)));
6363 818 : Goto(&scan_loop);
6364 818 : }
6365 818 : }
6366 :
6367 818 : void CodeStubAssembler::DescriptorLookup(Node* unique_name, Node* descriptors,
6368 : Node* bitfield3, Label* if_found,
6369 : Variable* var_name_index,
6370 : Label* if_not_found) {
6371 818 : Comment("DescriptorArrayLookup");
6372 : Node* nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
6373 2454 : GotoIf(Word32Equal(nof, Int32Constant(0)), if_not_found);
6374 818 : Label linear_search(this), binary_search(this);
6375 : const int kMaxElementsForLinearSearch = 32;
6376 1636 : Branch(Int32LessThanOrEqual(nof, Int32Constant(kMaxElementsForLinearSearch)),
6377 1636 : &linear_search, &binary_search);
6378 : BIND(&linear_search);
6379 : {
6380 818 : DescriptorLookupLinear(unique_name, descriptors, ChangeInt32ToIntPtr(nof),
6381 1636 : if_found, var_name_index, if_not_found);
6382 : }
6383 : BIND(&binary_search);
6384 : {
6385 : DescriptorLookupBinary(unique_name, descriptors, nof, if_found,
6386 818 : var_name_index, if_not_found);
6387 818 : }
6388 818 : }
6389 :
6390 694 : void CodeStubAssembler::TryLookupProperty(
6391 : Node* object, Node* map, Node* instance_type, Node* unique_name,
6392 : Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
6393 : Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found,
6394 : Label* if_bailout) {
6395 : DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep());
6396 : DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
6397 :
6398 694 : Label if_objectisspecial(this);
6399 : STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
6400 : GotoIf(Int32LessThanOrEqual(instance_type,
6401 1388 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
6402 1388 : &if_objectisspecial);
6403 :
6404 : uint32_t mask =
6405 : 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
6406 : CSA_ASSERT(this, Word32BinaryNot(IsSetWord32(LoadMapBitField(map), mask)));
6407 : USE(mask);
6408 :
6409 1388 : Node* bit_field3 = LoadMapBitField3(map);
6410 694 : Label if_isfastmap(this), if_isslowmap(this);
6411 : Branch(IsSetWord32<Map::DictionaryMap>(bit_field3), &if_isslowmap,
6412 694 : &if_isfastmap);
6413 : BIND(&if_isfastmap);
6414 : {
6415 1388 : Node* descriptors = LoadMapDescriptors(map);
6416 694 : var_meta_storage->Bind(descriptors);
6417 :
6418 : DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
6419 694 : var_name_index, if_not_found);
6420 : }
6421 : BIND(&if_isslowmap);
6422 : {
6423 1388 : Node* dictionary = LoadSlowProperties(object);
6424 694 : var_meta_storage->Bind(dictionary);
6425 :
6426 : NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
6427 694 : var_name_index, if_not_found);
6428 : }
6429 : BIND(&if_objectisspecial);
6430 : {
6431 : // Handle global object here and bailout for other special objects.
6432 : GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
6433 1388 : if_bailout);
6434 :
6435 : // Handle interceptors and access checks in runtime.
6436 1388 : Node* bit_field = LoadMapBitField(map);
6437 : int mask = 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
6438 1388 : GotoIf(IsSetWord32(bit_field, mask), if_bailout);
6439 :
6440 1388 : Node* dictionary = LoadSlowProperties(object);
6441 694 : var_meta_storage->Bind(dictionary);
6442 :
6443 : NameDictionaryLookup<GlobalDictionary>(
6444 694 : dictionary, unique_name, if_found_global, var_name_index, if_not_found);
6445 694 : }
6446 694 : }
6447 :
6448 409 : void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
6449 : Node* instance_type,
6450 : Node* unique_name, Label* if_found,
6451 : Label* if_not_found,
6452 : Label* if_bailout) {
6453 409 : Comment("TryHasOwnProperty");
6454 409 : VARIABLE(var_meta_storage, MachineRepresentation::kTagged);
6455 818 : VARIABLE(var_name_index, MachineType::PointerRepresentation());
6456 :
6457 409 : Label if_found_global(this);
6458 : TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
6459 : &if_found_global, &var_meta_storage, &var_name_index,
6460 409 : if_not_found, if_bailout);
6461 :
6462 : BIND(&if_found_global);
6463 : {
6464 409 : VARIABLE(var_value, MachineRepresentation::kTagged);
6465 818 : VARIABLE(var_details, MachineRepresentation::kWord32);
6466 : // Check if the property cell is not deleted.
6467 : LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
6468 : var_name_index.value(), &var_value,
6469 409 : &var_details, if_not_found);
6470 818 : Goto(if_found);
6471 409 : }
6472 409 : }
6473 :
6474 155 : Node* CodeStubAssembler::GetMethod(Node* context, Node* object,
6475 : Handle<Name> name,
6476 : Label* if_null_or_undefined) {
6477 155 : Node* method = GetProperty(context, object, name);
6478 :
6479 310 : GotoIf(IsUndefined(method), if_null_or_undefined);
6480 310 : GotoIf(IsNull(method), if_null_or_undefined);
6481 :
6482 155 : return method;
6483 : }
6484 :
6485 409 : void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
6486 : Node* descriptors,
6487 : Node* name_index,
6488 : Variable* var_details,
6489 : Variable* var_value) {
6490 : DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
6491 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
6492 409 : Comment("[ LoadPropertyFromFastObject");
6493 :
6494 : Node* details =
6495 : LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
6496 409 : var_details->Bind(details);
6497 :
6498 : Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
6499 :
6500 409 : Label if_in_field(this), if_in_descriptor(this), done(this);
6501 818 : Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
6502 818 : &if_in_descriptor);
6503 : BIND(&if_in_field);
6504 : {
6505 : Node* field_index =
6506 409 : DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
6507 : Node* representation =
6508 : DecodeWord32<PropertyDetails::RepresentationField>(details);
6509 :
6510 818 : Node* inobject_properties = LoadMapInobjectProperties(map);
6511 :
6512 409 : Label if_inobject(this), if_backing_store(this);
6513 818 : VARIABLE(var_double_value, MachineRepresentation::kFloat64);
6514 409 : Label rebox_double(this, &var_double_value);
6515 409 : Branch(UintPtrLessThan(field_index, inobject_properties), &if_inobject,
6516 818 : &if_backing_store);
6517 : BIND(&if_inobject);
6518 : {
6519 409 : Comment("if_inobject");
6520 : Node* field_offset = TimesPointerSize(
6521 818 : IntPtrAdd(IntPtrSub(LoadMapInstanceSize(map), inobject_properties),
6522 1636 : field_index));
6523 :
6524 409 : Label if_double(this), if_tagged(this);
6525 : Branch(Word32NotEqual(representation,
6526 818 : Int32Constant(Representation::kDouble)),
6527 818 : &if_tagged, &if_double);
6528 : BIND(&if_tagged);
6529 : {
6530 409 : var_value->Bind(LoadObjectField(object, field_offset));
6531 409 : Goto(&done);
6532 : }
6533 : BIND(&if_double);
6534 : {
6535 : if (FLAG_unbox_double_fields) {
6536 : var_double_value.Bind(
6537 409 : LoadObjectField(object, field_offset, MachineType::Float64()));
6538 : } else {
6539 : Node* mutable_heap_number = LoadObjectField(object, field_offset);
6540 : var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
6541 : }
6542 409 : Goto(&rebox_double);
6543 409 : }
6544 : }
6545 : BIND(&if_backing_store);
6546 : {
6547 409 : Comment("if_backing_store");
6548 818 : Node* properties = LoadFastProperties(object);
6549 818 : field_index = IntPtrSub(field_index, inobject_properties);
6550 409 : Node* value = LoadFixedArrayElement(properties, field_index);
6551 :
6552 409 : Label if_double(this), if_tagged(this);
6553 : Branch(Word32NotEqual(representation,
6554 818 : Int32Constant(Representation::kDouble)),
6555 818 : &if_tagged, &if_double);
6556 : BIND(&if_tagged);
6557 : {
6558 409 : var_value->Bind(value);
6559 409 : Goto(&done);
6560 : }
6561 : BIND(&if_double);
6562 : {
6563 818 : var_double_value.Bind(LoadHeapNumberValue(value));
6564 409 : Goto(&rebox_double);
6565 409 : }
6566 : }
6567 : BIND(&rebox_double);
6568 : {
6569 409 : Comment("rebox_double");
6570 1227 : Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
6571 409 : var_value->Bind(heap_number);
6572 409 : Goto(&done);
6573 409 : }
6574 : }
6575 : BIND(&if_in_descriptor);
6576 : {
6577 : var_value->Bind(
6578 409 : LoadValueByKeyIndex<DescriptorArray>(descriptors, name_index));
6579 409 : Goto(&done);
6580 : }
6581 : BIND(&done);
6582 :
6583 818 : Comment("] LoadPropertyFromFastObject");
6584 409 : }
6585 :
6586 2145 : void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
6587 : Node* name_index,
6588 : Variable* var_details,
6589 : Variable* var_value) {
6590 2145 : Comment("LoadPropertyFromNameDictionary");
6591 : CSA_ASSERT(this, IsDictionary(dictionary));
6592 :
6593 : var_details->Bind(
6594 2145 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
6595 2145 : var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
6596 :
6597 2145 : Comment("] LoadPropertyFromNameDictionary");
6598 2145 : }
6599 :
6600 632 : void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
6601 : Node* name_index,
6602 : Variable* var_details,
6603 : Variable* var_value,
6604 : Label* if_deleted) {
6605 632 : Comment("[ LoadPropertyFromGlobalDictionary");
6606 : CSA_ASSERT(this, IsDictionary(dictionary));
6607 :
6608 632 : Node* property_cell = LoadFixedArrayElement(dictionary, name_index);
6609 : CSA_ASSERT(this, IsPropertyCell(property_cell));
6610 :
6611 : Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
6612 632 : GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
6613 :
6614 632 : var_value->Bind(value);
6615 :
6616 : Node* details = LoadAndUntagToWord32ObjectField(property_cell,
6617 1264 : PropertyCell::kDetailsOffset);
6618 632 : var_details->Bind(details);
6619 :
6620 632 : Comment("] LoadPropertyFromGlobalDictionary");
6621 632 : }
6622 :
6623 : // |value| is the property backing store's contents, which is either a value
6624 : // or an accessor pair, as specified by |details|.
6625 : // Returns either the original value, or the result of the getter call.
6626 3323 : Node* CodeStubAssembler::CallGetterIfAccessor(Node* value, Node* details,
6627 : Node* context, Node* receiver,
6628 : Label* if_bailout,
6629 : GetOwnPropertyMode mode) {
6630 3323 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
6631 3323 : Label done(this), if_accessor_info(this, Label::kDeferred);
6632 :
6633 : Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
6634 9969 : GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
6635 :
6636 : // Accessor case.
6637 6646 : GotoIfNot(IsAccessorPair(value), &if_accessor_info);
6638 :
6639 : // AccessorPair case.
6640 : {
6641 3323 : if (mode == kCallJSGetter) {
6642 : Node* accessor_pair = value;
6643 : Node* getter =
6644 : LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
6645 6584 : Node* getter_map = LoadMap(getter);
6646 6584 : Node* instance_type = LoadMapInstanceType(getter_map);
6647 : // FunctionTemplateInfo getters are not supported yet.
6648 : GotoIf(InstanceTypeEqual(instance_type, FUNCTION_TEMPLATE_INFO_TYPE),
6649 6584 : if_bailout);
6650 :
6651 : // Return undefined if the {getter} is not callable.
6652 3292 : var_value.Bind(UndefinedConstant());
6653 6584 : GotoIfNot(IsCallableMap(getter_map), &done);
6654 :
6655 : // Call the accessor.
6656 3292 : Callable callable = CodeFactory::Call(isolate());
6657 3292 : Node* result = CallJS(callable, context, getter, receiver);
6658 3292 : var_value.Bind(result);
6659 : }
6660 3323 : Goto(&done);
6661 : }
6662 :
6663 : // AccessorInfo case.
6664 : BIND(&if_accessor_info);
6665 : {
6666 : Node* accessor_info = value;
6667 : CSA_ASSERT(this, IsAccessorInfo(value));
6668 : CSA_ASSERT(this, TaggedIsNotSmi(receiver));
6669 3323 : Label if_array(this), if_function(this), if_value(this);
6670 :
6671 : // Dispatch based on {receiver} instance type.
6672 6646 : Node* receiver_map = LoadMap(receiver);
6673 6646 : Node* receiver_instance_type = LoadMapInstanceType(receiver_map);
6674 3323 : GotoIf(IsJSArrayInstanceType(receiver_instance_type), &if_array);
6675 3323 : GotoIf(IsJSFunctionInstanceType(receiver_instance_type), &if_function);
6676 : Branch(IsJSValueInstanceType(receiver_instance_type), &if_value,
6677 3323 : if_bailout);
6678 :
6679 : // JSArray AccessorInfo case.
6680 : BIND(&if_array);
6681 : {
6682 : // We only deal with the "length" accessor on JSArray.
6683 : GotoIfNot(IsLengthString(
6684 3323 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
6685 6646 : if_bailout);
6686 6646 : var_value.Bind(LoadJSArrayLength(receiver));
6687 3323 : Goto(&done);
6688 : }
6689 :
6690 : // JSFunction AccessorInfo case.
6691 : BIND(&if_function);
6692 : {
6693 : // We only deal with the "prototype" accessor on JSFunction here.
6694 : GotoIfNot(IsPrototypeString(
6695 3323 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
6696 6646 : if_bailout);
6697 :
6698 : // if (!(has_prototype_slot() && !has_non_instance_prototype())) use
6699 : // generic property loading mechanism.
6700 : int has_prototype_slot_mask = 1 << Map::kHasPrototypeSlot;
6701 : int has_non_instance_prototype_mask = 1 << Map::kHasNonInstancePrototype;
6702 : GotoIfNot(
6703 3323 : Word32Equal(Word32And(LoadMapBitField(receiver_map),
6704 : Int32Constant(has_prototype_slot_mask |
6705 13292 : has_non_instance_prototype_mask)),
6706 13292 : Int32Constant(has_prototype_slot_mask)),
6707 6646 : if_bailout);
6708 3323 : var_value.Bind(LoadJSFunctionPrototype(receiver, if_bailout));
6709 3323 : Goto(&done);
6710 : }
6711 :
6712 : // JSValue AccessorInfo case.
6713 : BIND(&if_value);
6714 : {
6715 : // We only deal with the "length" accessor on JSValue string wrappers.
6716 : GotoIfNot(IsLengthString(
6717 3323 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
6718 6646 : if_bailout);
6719 : Node* receiver_value = LoadJSValueValue(receiver);
6720 6646 : GotoIfNot(TaggedIsNotSmi(receiver_value), if_bailout);
6721 6646 : GotoIfNot(IsString(receiver_value), if_bailout);
6722 3323 : var_value.Bind(LoadStringLength(receiver_value));
6723 3323 : Goto(&done);
6724 3323 : }
6725 : }
6726 :
6727 : BIND(&done);
6728 6646 : return var_value.value();
6729 : }
6730 :
6731 99 : void CodeStubAssembler::TryGetOwnProperty(
6732 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
6733 : Node* unique_name, Label* if_found_value, Variable* var_value,
6734 : Label* if_not_found, Label* if_bailout) {
6735 : TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
6736 : if_found_value, var_value, nullptr, nullptr, if_not_found,
6737 99 : if_bailout);
6738 99 : }
6739 :
6740 223 : void CodeStubAssembler::TryGetOwnProperty(
6741 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
6742 : Node* unique_name, Label* if_found_value, Variable* var_value,
6743 : Variable* var_details, Variable* var_raw_value, Label* if_not_found,
6744 : Label* if_bailout, GetOwnPropertyMode mode) {
6745 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
6746 223 : Comment("TryGetOwnProperty");
6747 :
6748 223 : VARIABLE(var_meta_storage, MachineRepresentation::kTagged);
6749 446 : VARIABLE(var_entry, MachineType::PointerRepresentation());
6750 :
6751 223 : Label if_found_fast(this), if_found_dict(this), if_found_global(this);
6752 :
6753 446 : VARIABLE(local_var_details, MachineRepresentation::kWord32);
6754 223 : if (!var_details) {
6755 : var_details = &local_var_details;
6756 : }
6757 223 : Variable* vars[] = {var_value, var_details};
6758 446 : Label if_found(this, 2, vars);
6759 :
6760 : TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
6761 : &if_found_dict, &if_found_global, &var_meta_storage,
6762 223 : &var_entry, if_not_found, if_bailout);
6763 : BIND(&if_found_fast);
6764 : {
6765 223 : Node* descriptors = var_meta_storage.value();
6766 223 : Node* name_index = var_entry.value();
6767 :
6768 : LoadPropertyFromFastObject(object, map, descriptors, name_index,
6769 223 : var_details, var_value);
6770 223 : Goto(&if_found);
6771 : }
6772 : BIND(&if_found_dict);
6773 : {
6774 223 : Node* dictionary = var_meta_storage.value();
6775 223 : Node* entry = var_entry.value();
6776 223 : LoadPropertyFromNameDictionary(dictionary, entry, var_details, var_value);
6777 223 : Goto(&if_found);
6778 : }
6779 : BIND(&if_found_global);
6780 : {
6781 223 : Node* dictionary = var_meta_storage.value();
6782 223 : Node* entry = var_entry.value();
6783 :
6784 : LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
6785 223 : if_not_found);
6786 223 : Goto(&if_found);
6787 : }
6788 : // Here we have details and value which could be an accessor.
6789 : BIND(&if_found);
6790 : {
6791 : // TODO(ishell): Execute C++ accessor in case of accessor info
6792 223 : if (var_raw_value) {
6793 124 : var_raw_value->Bind(var_value->value());
6794 : }
6795 : Node* value = CallGetterIfAccessor(var_value->value(), var_details->value(),
6796 223 : context, receiver, if_bailout, mode);
6797 223 : var_value->Bind(value);
6798 223 : Goto(if_found_value);
6799 223 : }
6800 223 : }
6801 :
6802 409 : void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
6803 : Node* instance_type,
6804 : Node* intptr_index, Label* if_found,
6805 : Label* if_absent, Label* if_not_found,
6806 : Label* if_bailout) {
6807 : // Handle special objects in runtime.
6808 : GotoIf(Int32LessThanOrEqual(instance_type,
6809 818 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
6810 818 : if_bailout);
6811 :
6812 818 : Node* elements_kind = LoadMapElementsKind(map);
6813 :
6814 : // TODO(verwaest): Support other elements kinds as well.
6815 409 : Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
6816 409 : if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
6817 409 : if_typedarray(this);
6818 : // clang-format off
6819 : int32_t values[] = {
6820 : // Handled by {if_isobjectorsmi}.
6821 : PACKED_SMI_ELEMENTS, HOLEY_SMI_ELEMENTS, PACKED_ELEMENTS,
6822 : HOLEY_ELEMENTS,
6823 : // Handled by {if_isdouble}.
6824 : PACKED_DOUBLE_ELEMENTS, HOLEY_DOUBLE_ELEMENTS,
6825 : // Handled by {if_isdictionary}.
6826 : DICTIONARY_ELEMENTS,
6827 : // Handled by {if_isfaststringwrapper}.
6828 : FAST_STRING_WRAPPER_ELEMENTS,
6829 : // Handled by {if_isslowstringwrapper}.
6830 : SLOW_STRING_WRAPPER_ELEMENTS,
6831 : // Handled by {if_not_found}.
6832 : NO_ELEMENTS,
6833 : // Handled by {if_typed_array}.
6834 : UINT8_ELEMENTS,
6835 : INT8_ELEMENTS,
6836 : UINT16_ELEMENTS,
6837 : INT16_ELEMENTS,
6838 : UINT32_ELEMENTS,
6839 : INT32_ELEMENTS,
6840 : FLOAT32_ELEMENTS,
6841 : FLOAT64_ELEMENTS,
6842 : UINT8_CLAMPED_ELEMENTS,
6843 409 : };
6844 : Label* labels[] = {
6845 : &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
6846 : &if_isobjectorsmi,
6847 : &if_isdouble, &if_isdouble,
6848 : &if_isdictionary,
6849 : &if_isfaststringwrapper,
6850 : &if_isslowstringwrapper,
6851 : if_not_found,
6852 : &if_typedarray,
6853 : &if_typedarray,
6854 : &if_typedarray,
6855 : &if_typedarray,
6856 : &if_typedarray,
6857 : &if_typedarray,
6858 : &if_typedarray,
6859 : &if_typedarray,
6860 : &if_typedarray,
6861 409 : };
6862 : // clang-format on
6863 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
6864 409 : Switch(elements_kind, if_bailout, values, labels, arraysize(values));
6865 :
6866 : BIND(&if_isobjectorsmi);
6867 : {
6868 818 : Node* elements = LoadElements(object);
6869 : Node* length = LoadAndUntagFixedArrayBaseLength(elements);
6870 :
6871 818 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
6872 :
6873 409 : Node* element = LoadFixedArrayElement(elements, intptr_index);
6874 : Node* the_hole = TheHoleConstant();
6875 818 : Branch(WordEqual(element, the_hole), if_not_found, if_found);
6876 : }
6877 : BIND(&if_isdouble);
6878 : {
6879 818 : Node* elements = LoadElements(object);
6880 : Node* length = LoadAndUntagFixedArrayBaseLength(elements);
6881 :
6882 818 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
6883 :
6884 : // Check if the element is a double hole, but don't load it.
6885 : LoadFixedDoubleArrayElement(elements, intptr_index, MachineType::None(), 0,
6886 409 : INTPTR_PARAMETERS, if_not_found);
6887 409 : Goto(if_found);
6888 : }
6889 : BIND(&if_isdictionary);
6890 : {
6891 : // Negative keys must be converted to property names.
6892 1227 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
6893 :
6894 409 : VARIABLE(var_entry, MachineType::PointerRepresentation());
6895 818 : Node* elements = LoadElements(object);
6896 : NumberDictionaryLookup<SeededNumberDictionary>(
6897 409 : elements, intptr_index, if_found, &var_entry, if_not_found);
6898 : }
6899 : BIND(&if_isfaststringwrapper);
6900 : {
6901 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
6902 : Node* string = LoadJSValueValue(object);
6903 : CSA_ASSERT(this, IsString(string));
6904 : Node* length = LoadStringLength(string);
6905 1227 : GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
6906 409 : Goto(&if_isobjectorsmi);
6907 : }
6908 : BIND(&if_isslowstringwrapper);
6909 : {
6910 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
6911 : Node* string = LoadJSValueValue(object);
6912 : CSA_ASSERT(this, IsString(string));
6913 : Node* length = LoadStringLength(string);
6914 1227 : GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
6915 409 : Goto(&if_isdictionary);
6916 : }
6917 : BIND(&if_typedarray);
6918 : {
6919 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
6920 818 : GotoIf(IsDetachedBuffer(buffer), if_absent);
6921 :
6922 : Node* length = TryToIntptr(
6923 409 : LoadObjectField(object, JSTypedArray::kLengthOffset), if_bailout);
6924 818 : Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
6925 : }
6926 : BIND(&if_oob);
6927 : {
6928 : // Positive OOB indices mean "not found", negative indices must be
6929 : // converted to property names.
6930 1227 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
6931 409 : Goto(if_not_found);
6932 409 : }
6933 409 : }
6934 :
6935 : // Instantiate template methods to workaround GCC compilation issue.
6936 : template void CodeStubAssembler::NumberDictionaryLookup<SeededNumberDictionary>(
6937 : Node*, Node*, Label*, Variable*, Label*);
6938 : template void CodeStubAssembler::NumberDictionaryLookup<
6939 : UnseededNumberDictionary>(Node*, Node*, Label*, Variable*, Label*);
6940 :
6941 403 : void CodeStubAssembler::TryPrototypeChainLookup(
6942 : Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
6943 : const LookupInHolder& lookup_element_in_holder, Label* if_end,
6944 : Label* if_bailout, Label* if_proxy) {
6945 : // Ensure receiver is JSReceiver, otherwise bailout.
6946 403 : Label if_objectisnotsmi(this);
6947 806 : Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
6948 : BIND(&if_objectisnotsmi);
6949 :
6950 806 : Node* map = LoadMap(receiver);
6951 806 : Node* instance_type = LoadMapInstanceType(map);
6952 : {
6953 : Label if_objectisreceiver(this);
6954 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6955 : STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
6956 : Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
6957 806 : if_bailout);
6958 : BIND(&if_objectisreceiver);
6959 :
6960 403 : if (if_proxy) {
6961 744 : GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
6962 403 : }
6963 : }
6964 :
6965 806 : VARIABLE(var_index, MachineType::PointerRepresentation());
6966 806 : VARIABLE(var_unique, MachineRepresentation::kTagged);
6967 :
6968 403 : Label if_keyisindex(this), if_iskeyunique(this);
6969 : TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
6970 403 : if_bailout);
6971 :
6972 : BIND(&if_iskeyunique);
6973 : {
6974 403 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
6975 806 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
6976 806 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
6977 : instance_type);
6978 :
6979 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
6980 403 : &var_holder_instance_type};
6981 806 : Label loop(this, arraysize(merged_variables), merged_variables);
6982 403 : Goto(&loop);
6983 : BIND(&loop);
6984 : {
6985 403 : Node* holder_map = var_holder_map.value();
6986 403 : Node* holder_instance_type = var_holder_instance_type.value();
6987 :
6988 : Label next_proto(this);
6989 : lookup_property_in_holder(receiver, var_holder.value(), holder_map,
6990 : holder_instance_type, var_unique.value(),
6991 403 : &next_proto, if_bailout);
6992 : BIND(&next_proto);
6993 :
6994 : // Bailout if it can be an integer indexed exotic case.
6995 : GotoIf(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
6996 806 : if_bailout);
6997 :
6998 806 : Node* proto = LoadMapPrototype(holder_map);
6999 :
7000 403 : Label if_not_null(this);
7001 403 : Branch(WordEqual(proto, NullConstant()), if_end, &if_not_null);
7002 : BIND(&if_not_null);
7003 :
7004 806 : Node* map = LoadMap(proto);
7005 806 : Node* instance_type = LoadMapInstanceType(map);
7006 :
7007 403 : var_holder.Bind(proto);
7008 403 : var_holder_map.Bind(map);
7009 403 : var_holder_instance_type.Bind(instance_type);
7010 806 : Goto(&loop);
7011 403 : }
7012 : }
7013 : BIND(&if_keyisindex);
7014 : {
7015 403 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
7016 806 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
7017 806 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
7018 : instance_type);
7019 :
7020 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
7021 403 : &var_holder_instance_type};
7022 806 : Label loop(this, arraysize(merged_variables), merged_variables);
7023 403 : Goto(&loop);
7024 : BIND(&loop);
7025 : {
7026 : Label next_proto(this);
7027 : lookup_element_in_holder(receiver, var_holder.value(),
7028 : var_holder_map.value(),
7029 : var_holder_instance_type.value(),
7030 403 : var_index.value(), &next_proto, if_bailout);
7031 : BIND(&next_proto);
7032 :
7033 1209 : Node* proto = LoadMapPrototype(var_holder_map.value());
7034 :
7035 403 : Label if_not_null(this);
7036 403 : Branch(WordEqual(proto, NullConstant()), if_end, &if_not_null);
7037 : BIND(&if_not_null);
7038 :
7039 806 : Node* map = LoadMap(proto);
7040 806 : Node* instance_type = LoadMapInstanceType(map);
7041 :
7042 403 : var_holder.Bind(proto);
7043 403 : var_holder_map.Bind(map);
7044 403 : var_holder_instance_type.Bind(instance_type);
7045 806 : Goto(&loop);
7046 403 : }
7047 403 : }
7048 403 : }
7049 :
7050 93 : Node* CodeStubAssembler::HasInPrototypeChain(Node* context, Node* object,
7051 : Node* prototype) {
7052 : CSA_ASSERT(this, TaggedIsNotSmi(object));
7053 93 : VARIABLE(var_result, MachineRepresentation::kTagged);
7054 93 : Label return_false(this), return_true(this),
7055 93 : return_runtime(this, Label::kDeferred), return_result(this);
7056 :
7057 : // Loop through the prototype chain looking for the {prototype}.
7058 279 : VARIABLE(var_object_map, MachineRepresentation::kTagged, LoadMap(object));
7059 93 : Label loop(this, &var_object_map);
7060 93 : Goto(&loop);
7061 : BIND(&loop);
7062 : {
7063 : // Check if we can determine the prototype directly from the {object_map}.
7064 93 : Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
7065 93 : Node* object_map = var_object_map.value();
7066 186 : Node* object_instance_type = LoadMapInstanceType(object_map);
7067 : Branch(IsSpecialReceiverInstanceType(object_instance_type),
7068 186 : &if_objectisspecial, &if_objectisdirect);
7069 : BIND(&if_objectisspecial);
7070 : {
7071 : // The {object_map} is a special receiver map or a primitive map, check
7072 : // if we need to use the if_objectisspecial path in the runtime.
7073 : GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
7074 186 : &return_runtime);
7075 186 : Node* object_bitfield = LoadMapBitField(object_map);
7076 : int mask =
7077 : 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
7078 93 : Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
7079 186 : &if_objectisdirect);
7080 : }
7081 : BIND(&if_objectisdirect);
7082 :
7083 : // Check the current {object} prototype.
7084 186 : Node* object_prototype = LoadMapPrototype(object_map);
7085 186 : GotoIf(IsNull(object_prototype), &return_false);
7086 186 : GotoIf(WordEqual(object_prototype, prototype), &return_true);
7087 :
7088 : // Continue with the prototype.
7089 : CSA_ASSERT(this, TaggedIsNotSmi(object_prototype));
7090 186 : var_object_map.Bind(LoadMap(object_prototype));
7091 186 : Goto(&loop);
7092 : }
7093 :
7094 : BIND(&return_true);
7095 93 : var_result.Bind(TrueConstant());
7096 93 : Goto(&return_result);
7097 :
7098 : BIND(&return_false);
7099 93 : var_result.Bind(FalseConstant());
7100 93 : Goto(&return_result);
7101 :
7102 : BIND(&return_runtime);
7103 : {
7104 : // Fallback to the runtime implementation.
7105 : var_result.Bind(
7106 93 : CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype));
7107 : }
7108 93 : Goto(&return_result);
7109 :
7110 : BIND(&return_result);
7111 186 : return var_result.value();
7112 : }
7113 :
7114 62 : Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
7115 : Node* object) {
7116 62 : VARIABLE(var_result, MachineRepresentation::kTagged);
7117 62 : Label return_runtime(this, Label::kDeferred), return_result(this);
7118 :
7119 : // Goto runtime if {object} is a Smi.
7120 124 : GotoIf(TaggedIsSmi(object), &return_runtime);
7121 :
7122 : // Goto runtime if {callable} is a Smi.
7123 124 : GotoIf(TaggedIsSmi(callable), &return_runtime);
7124 :
7125 : // Load map of {callable}.
7126 124 : Node* callable_map = LoadMap(callable);
7127 :
7128 : // Goto runtime if {callable} is not a JSFunction.
7129 124 : Node* callable_instance_type = LoadMapInstanceType(callable_map);
7130 : GotoIfNot(InstanceTypeEqual(callable_instance_type, JS_FUNCTION_TYPE),
7131 124 : &return_runtime);
7132 :
7133 : // Goto runtime if {callable} is not a constructor or has
7134 : // a non-instance "prototype".
7135 124 : Node* callable_bitfield = LoadMapBitField(callable_map);
7136 : GotoIfNot(
7137 : Word32Equal(Word32And(callable_bitfield,
7138 : Int32Constant((1 << Map::kHasNonInstancePrototype) |
7139 124 : (1 << Map::kIsConstructor))),
7140 248 : Int32Constant(1 << Map::kIsConstructor)),
7141 124 : &return_runtime);
7142 :
7143 : // Get the "prototype" (or initial map) of the {callable}.
7144 : Node* callable_prototype =
7145 : LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
7146 : {
7147 : Label callable_prototype_valid(this);
7148 124 : VARIABLE(var_callable_prototype, MachineRepresentation::kTagged,
7149 : callable_prototype);
7150 :
7151 : // Resolve the "prototype" if the {callable} has an initial map. Afterwards
7152 : // the {callable_prototype} will be either the JSReceiver prototype object
7153 : // or the hole value, which means that no instances of the {callable} were
7154 : // created so far and hence we should return false.
7155 : Node* callable_prototype_instance_type =
7156 124 : LoadInstanceType(callable_prototype);
7157 : GotoIfNot(InstanceTypeEqual(callable_prototype_instance_type, MAP_TYPE),
7158 124 : &callable_prototype_valid);
7159 : var_callable_prototype.Bind(
7160 62 : LoadObjectField(callable_prototype, Map::kPrototypeOffset));
7161 62 : Goto(&callable_prototype_valid);
7162 : BIND(&callable_prototype_valid);
7163 124 : callable_prototype = var_callable_prototype.value();
7164 : }
7165 :
7166 : // Loop through the prototype chain looking for the {callable} prototype.
7167 62 : var_result.Bind(HasInPrototypeChain(context, object, callable_prototype));
7168 62 : Goto(&return_result);
7169 :
7170 : BIND(&return_runtime);
7171 : {
7172 : // Fallback to the runtime implementation.
7173 : var_result.Bind(
7174 62 : CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
7175 : }
7176 62 : Goto(&return_result);
7177 :
7178 : BIND(&return_result);
7179 124 : return var_result.value();
7180 : }
7181 :
7182 181342 : Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
7183 : ElementsKind kind,
7184 : ParameterMode mode,
7185 : int base_size) {
7186 181342 : int element_size_shift = ElementsKindToShiftSize(kind);
7187 181342 : int element_size = 1 << element_size_shift;
7188 : int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
7189 181342 : intptr_t index = 0;
7190 : bool constant_index = false;
7191 181342 : if (mode == SMI_PARAMETERS) {
7192 17172 : element_size_shift -= kSmiShiftBits;
7193 : Smi* smi_index;
7194 17172 : constant_index = ToSmiConstant(index_node, smi_index);
7195 21272 : if (constant_index) index = smi_index->value();
7196 34344 : index_node = BitcastTaggedToWord(index_node);
7197 : } else {
7198 : DCHECK(mode == INTPTR_PARAMETERS);
7199 164170 : constant_index = ToIntPtrConstant(index_node, index);
7200 : }
7201 181342 : if (constant_index) {
7202 129926 : return IntPtrConstant(base_size + element_size * index);
7203 : }
7204 :
7205 : Node* shifted_index =
7206 : (element_size_shift == 0)
7207 : ? index_node
7208 : : ((element_size_shift > 0)
7209 313867 : ? WordShl(index_node, IntPtrConstant(element_size_shift))
7210 362209 : : WordShr(index_node, IntPtrConstant(-element_size_shift)));
7211 349137 : return IntPtrAdd(IntPtrConstant(base_size), shifted_index);
7212 : }
7213 :
7214 186 : Node* CodeStubAssembler::LoadFeedbackVector(Node* closure) {
7215 : Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset);
7216 186 : return LoadObjectField(cell, Cell::kValueOffset);
7217 : }
7218 :
7219 186 : Node* CodeStubAssembler::LoadFeedbackVectorForStub() {
7220 : Node* function =
7221 186 : LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset);
7222 186 : return LoadFeedbackVector(function);
7223 : }
7224 :
7225 4650 : void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* feedback_vector,
7226 : Node* slot_id) {
7227 : // This method is used for binary op and compare feedback. These
7228 : // vector nodes are initialized with a smi 0, so we can simply OR
7229 : // our new feedback in place.
7230 4650 : Node* previous_feedback = LoadFeedbackVectorSlot(feedback_vector, slot_id);
7231 9300 : Node* combined_feedback = SmiOr(previous_feedback, feedback);
7232 4650 : Label end(this);
7233 :
7234 9300 : GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
7235 : {
7236 : StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
7237 4650 : SKIP_WRITE_BARRIER);
7238 : // Reset profiler ticks.
7239 : StoreObjectFieldNoWriteBarrier(
7240 9300 : feedback_vector, FeedbackVector::kProfilerTicksOffset, SmiConstant(0));
7241 4650 : Goto(&end);
7242 : }
7243 :
7244 4650 : BIND(&end);
7245 4650 : }
7246 :
7247 3906 : void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
7248 : Node* feedback) {
7249 11718 : existing_feedback->Bind(SmiOr(existing_feedback->value(), feedback));
7250 3906 : }
7251 :
7252 217 : void CodeStubAssembler::CheckForAssociatedProtector(Node* name,
7253 : Label* if_protector) {
7254 : // This list must be kept in sync with LookupIterator::UpdateProtector!
7255 : // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
7256 217 : GotoIf(WordEqual(name, LoadRoot(Heap::kconstructor_stringRootIndex)),
7257 217 : if_protector);
7258 217 : GotoIf(WordEqual(name, LoadRoot(Heap::kiterator_symbolRootIndex)),
7259 217 : if_protector);
7260 217 : GotoIf(WordEqual(name, LoadRoot(Heap::kspecies_symbolRootIndex)),
7261 217 : if_protector);
7262 217 : GotoIf(WordEqual(name, LoadRoot(Heap::kis_concat_spreadable_symbolRootIndex)),
7263 217 : if_protector);
7264 : // Fall through if no case matched.
7265 217 : }
7266 :
7267 341 : Node* CodeStubAssembler::LoadReceiverMap(Node* receiver) {
7268 341 : return Select(TaggedIsSmi(receiver),
7269 341 : [=] { return LoadRoot(Heap::kHeapNumberMapRootIndex); },
7270 682 : [=] { return LoadMap(receiver); },
7271 1364 : MachineRepresentation::kTagged);
7272 : }
7273 :
7274 6219 : Node* CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
7275 6219 : VARIABLE(var_intptr_key, MachineType::PointerRepresentation());
7276 6219 : Label done(this, &var_intptr_key), key_is_smi(this);
7277 12438 : GotoIf(TaggedIsSmi(key), &key_is_smi);
7278 : // Try to convert a heap number to a Smi.
7279 12438 : GotoIfNot(IsHeapNumber(key), miss);
7280 : {
7281 12438 : Node* value = LoadHeapNumberValue(key);
7282 12438 : Node* int_value = RoundFloat64ToInt32(value);
7283 18657 : GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
7284 12438 : var_intptr_key.Bind(ChangeInt32ToIntPtr(int_value));
7285 6219 : Goto(&done);
7286 : }
7287 :
7288 : BIND(&key_is_smi);
7289 : {
7290 12438 : var_intptr_key.Bind(SmiUntag(key));
7291 6219 : Goto(&done);
7292 : }
7293 :
7294 : BIND(&done);
7295 12438 : return var_intptr_key.value();
7296 : }
7297 :
7298 295 : Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
7299 : Node* value, Label* bailout) {
7300 : // Mapped arguments are actual arguments. Unmapped arguments are values added
7301 : // to the arguments object after it was created for the call. Mapped arguments
7302 : // are stored in the context at indexes given by elements[key + 2]. Unmapped
7303 : // arguments are stored as regular indexed properties in the arguments array,
7304 : // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
7305 : // look at argument object construction.
7306 : //
7307 : // The sloppy arguments elements array has a special format:
7308 : //
7309 : // 0: context
7310 : // 1: unmapped arguments array
7311 : // 2: mapped_index0,
7312 : // 3: mapped_index1,
7313 : // ...
7314 : //
7315 : // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
7316 : // If key + 2 >= elements.length then attempt to look in the unmapped
7317 : // arguments array (given by elements[1]) and return the value at key, missing
7318 : // to the runtime if the unmapped arguments array is not a fixed array or if
7319 : // key >= unmapped_arguments_array.length.
7320 : //
7321 : // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
7322 : // in the unmapped arguments array, as described above. Otherwise, t is a Smi
7323 : // index into the context array given at elements[0]. Return the value at
7324 : // context[t].
7325 :
7326 295 : bool is_load = value == nullptr;
7327 :
7328 590 : GotoIfNot(TaggedIsSmi(key), bailout);
7329 590 : key = SmiUntag(key);
7330 885 : GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
7331 :
7332 590 : Node* elements = LoadElements(receiver);
7333 : Node* elements_length = LoadAndUntagFixedArrayBaseLength(elements);
7334 :
7335 295 : VARIABLE(var_result, MachineRepresentation::kTagged);
7336 295 : if (!is_load) {
7337 0 : var_result.Bind(value);
7338 : }
7339 295 : Label if_mapped(this), if_unmapped(this), end(this, &var_result);
7340 590 : Node* intptr_two = IntPtrConstant(2);
7341 590 : Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
7342 :
7343 590 : GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
7344 :
7345 : Node* mapped_index =
7346 590 : LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
7347 295 : Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
7348 :
7349 : BIND(&if_mapped);
7350 : {
7351 : CSA_ASSERT(this, TaggedIsSmi(mapped_index));
7352 590 : mapped_index = SmiUntag(mapped_index);
7353 295 : Node* the_context = LoadFixedArrayElement(elements, 0);
7354 : // Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement
7355 : // methods for accessing Context.
7356 : STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
7357 : DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag,
7358 : FixedArray::OffsetOfElementAt(0));
7359 295 : if (is_load) {
7360 295 : Node* result = LoadFixedArrayElement(the_context, mapped_index);
7361 : CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
7362 295 : var_result.Bind(result);
7363 : } else {
7364 0 : StoreFixedArrayElement(the_context, mapped_index, value);
7365 : }
7366 295 : Goto(&end);
7367 : }
7368 :
7369 : BIND(&if_unmapped);
7370 : {
7371 295 : Node* backing_store = LoadFixedArrayElement(elements, 1);
7372 590 : GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()),
7373 295 : bailout);
7374 :
7375 : Node* backing_store_length =
7376 : LoadAndUntagFixedArrayBaseLength(backing_store);
7377 590 : GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
7378 :
7379 : // The key falls into unmapped range.
7380 295 : if (is_load) {
7381 295 : Node* result = LoadFixedArrayElement(backing_store, key);
7382 295 : GotoIf(WordEqual(result, TheHoleConstant()), bailout);
7383 295 : var_result.Bind(result);
7384 : } else {
7385 0 : StoreFixedArrayElement(backing_store, key, value);
7386 : }
7387 295 : Goto(&end);
7388 : }
7389 :
7390 : BIND(&end);
7391 590 : return var_result.value();
7392 : }
7393 :
7394 17462 : Node* CodeStubAssembler::LoadScriptContext(Node* context, int context_index) {
7395 : Node* native_context = LoadNativeContext(context);
7396 : Node* script_context_table =
7397 34924 : LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX);
7398 :
7399 : int offset =
7400 17462 : ScriptContextTable::GetContextOffset(context_index) - kHeapObjectTag;
7401 : return Load(MachineType::AnyTagged(), script_context_table,
7402 34924 : IntPtrConstant(offset));
7403 : }
7404 :
7405 : namespace {
7406 :
7407 : // Converts typed array elements kind to a machine representations.
7408 3200 : MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
7409 3200 : switch (kind) {
7410 : case UINT8_CLAMPED_ELEMENTS:
7411 : case UINT8_ELEMENTS:
7412 : case INT8_ELEMENTS:
7413 : return MachineRepresentation::kWord8;
7414 : case UINT16_ELEMENTS:
7415 : case INT16_ELEMENTS:
7416 392 : return MachineRepresentation::kWord16;
7417 : case UINT32_ELEMENTS:
7418 : case INT32_ELEMENTS:
7419 590 : return MachineRepresentation::kWord32;
7420 : case FLOAT32_ELEMENTS:
7421 153 : return MachineRepresentation::kFloat32;
7422 : case FLOAT64_ELEMENTS:
7423 168 : return MachineRepresentation::kFloat64;
7424 : default:
7425 0 : UNREACHABLE();
7426 : }
7427 : }
7428 :
7429 : } // namespace
7430 :
7431 5060 : void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
7432 : Node* index, Node* value,
7433 : ParameterMode mode) {
7434 5060 : if (IsFixedTypedArrayElementsKind(kind)) {
7435 : if (kind == UINT8_CLAMPED_ELEMENTS) {
7436 : CSA_ASSERT(this,
7437 : Word32Equal(value, Word32And(Int32Constant(0xff), value)));
7438 : }
7439 3200 : Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
7440 3200 : MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
7441 3200 : StoreNoWriteBarrier(rep, elements, offset, value);
7442 8260 : return;
7443 : }
7444 :
7445 : WriteBarrierMode barrier_mode =
7446 1860 : IsSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
7447 1860 : if (IsDoubleElementsKind(kind)) {
7448 : // Make sure we do not store signalling NaNs into double arrays.
7449 802 : value = Float64SilenceNaN(value);
7450 401 : StoreFixedDoubleArrayElement(elements, index, value, mode);
7451 : } else {
7452 1459 : StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode);
7453 : }
7454 : }
7455 :
7456 141 : Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
7457 141 : Label done(this);
7458 282 : Node* int32_zero = Int32Constant(0);
7459 282 : Node* int32_255 = Int32Constant(255);
7460 282 : VARIABLE(var_value, MachineRepresentation::kWord32, int32_value);
7461 282 : GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
7462 141 : var_value.Bind(int32_zero);
7463 282 : GotoIf(Int32LessThan(int32_value, int32_zero), &done);
7464 141 : var_value.Bind(int32_255);
7465 141 : Goto(&done);
7466 : BIND(&done);
7467 282 : return var_value.value();
7468 : }
7469 :
7470 141 : Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
7471 141 : Label done(this);
7472 423 : VARIABLE(var_value, MachineRepresentation::kWord32, Int32Constant(0));
7473 423 : GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
7474 282 : var_value.Bind(Int32Constant(255));
7475 423 : GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
7476 : {
7477 282 : Node* rounded_value = Float64RoundToEven(float64_value);
7478 282 : var_value.Bind(TruncateFloat64ToWord32(rounded_value));
7479 141 : Goto(&done);
7480 : }
7481 : BIND(&done);
7482 282 : return var_value.value();
7483 : }
7484 :
7485 3200 : Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
7486 : Node* input, ElementsKind elements_kind, Label* bailout) {
7487 : DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
7488 :
7489 : MachineRepresentation rep;
7490 3200 : switch (elements_kind) {
7491 : case UINT8_ELEMENTS:
7492 : case INT8_ELEMENTS:
7493 : case UINT16_ELEMENTS:
7494 : case INT16_ELEMENTS:
7495 : case UINT32_ELEMENTS:
7496 : case INT32_ELEMENTS:
7497 : case UINT8_CLAMPED_ELEMENTS:
7498 : rep = MachineRepresentation::kWord32;
7499 : break;
7500 : case FLOAT32_ELEMENTS:
7501 : rep = MachineRepresentation::kFloat32;
7502 153 : break;
7503 : case FLOAT64_ELEMENTS:
7504 : rep = MachineRepresentation::kFloat64;
7505 168 : break;
7506 : default:
7507 0 : UNREACHABLE();
7508 : }
7509 :
7510 3200 : VARIABLE(var_result, rep);
7511 3200 : Label done(this, &var_result), if_smi(this), if_heapnumber(this);
7512 6400 : GotoIf(TaggedIsSmi(input), &if_smi);
7513 : // We can handle both HeapNumber and Oddball here, since Oddball has the
7514 : // same layout as the HeapNumber for the HeapNumber::value field. This
7515 : // way we can also properly optimize stores of oddballs to typed arrays.
7516 6400 : GotoIf(IsHeapNumber(input), &if_heapnumber);
7517 6400 : Branch(HasInstanceType(input, ODDBALL_TYPE), &if_heapnumber, bailout);
7518 :
7519 : BIND(&if_heapnumber);
7520 : {
7521 6400 : Node* value = LoadHeapNumberValue(input);
7522 3200 : if (rep == MachineRepresentation::kWord32) {
7523 2879 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
7524 141 : value = Float64ToUint8Clamped(value);
7525 : } else {
7526 5476 : value = TruncateFloat64ToWord32(value);
7527 : }
7528 321 : } else if (rep == MachineRepresentation::kFloat32) {
7529 306 : value = TruncateFloat64ToFloat32(value);
7530 : } else {
7531 : DCHECK_EQ(MachineRepresentation::kFloat64, rep);
7532 : }
7533 3200 : var_result.Bind(value);
7534 3200 : Goto(&done);
7535 : }
7536 :
7537 : BIND(&if_smi);
7538 : {
7539 6400 : Node* value = SmiToWord32(input);
7540 3200 : if (rep == MachineRepresentation::kFloat32) {
7541 306 : value = RoundInt32ToFloat32(value);
7542 3047 : } else if (rep == MachineRepresentation::kFloat64) {
7543 336 : value = ChangeInt32ToFloat64(value);
7544 : } else {
7545 : DCHECK_EQ(MachineRepresentation::kWord32, rep);
7546 2879 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
7547 141 : value = Int32ToUint8Clamped(value);
7548 : }
7549 : }
7550 3200 : var_result.Bind(value);
7551 3200 : Goto(&done);
7552 : }
7553 :
7554 : BIND(&done);
7555 6400 : return var_result.value();
7556 : }
7557 :
7558 5060 : void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
7559 : bool is_jsarray,
7560 : ElementsKind elements_kind,
7561 : KeyedAccessStoreMode store_mode,
7562 : Label* bailout) {
7563 : CSA_ASSERT(this, Word32BinaryNot(IsJSProxy(object)));
7564 10120 : Node* elements = LoadElements(object);
7565 5060 : if (IsSmiOrObjectElementsKind(elements_kind) &&
7566 : store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
7567 : // Bailout in case of COW elements.
7568 : GotoIf(WordNotEqual(LoadMap(elements),
7569 2964 : LoadRoot(Heap::kFixedArrayMapRootIndex)),
7570 988 : bailout);
7571 : }
7572 : // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
7573 : ParameterMode parameter_mode = INTPTR_PARAMETERS;
7574 5060 : key = TryToIntptr(key, bailout);
7575 :
7576 5060 : if (IsFixedTypedArrayElementsKind(elements_kind)) {
7577 3200 : Label done(this);
7578 : // TODO(ishell): call ToNumber() on value and don't bailout but be careful
7579 : // to call it only once if we decide to bailout because of bounds checks.
7580 :
7581 3200 : value = PrepareValueForWriteToTypedArray(value, elements_kind, bailout);
7582 :
7583 : // There must be no allocations between the buffer load and
7584 : // and the actual store to backing store, because GC may decide that
7585 : // the buffer is not alive or move the elements.
7586 : // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
7587 :
7588 : // Check if buffer has been neutered.
7589 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
7590 6400 : GotoIf(IsDetachedBuffer(buffer), bailout);
7591 :
7592 : // Bounds check.
7593 : Node* length = TaggedToParameter(
7594 : CAST(LoadObjectField(object, JSTypedArray::kLengthOffset)),
7595 : parameter_mode);
7596 :
7597 3200 : if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
7598 : // Skip the store if we write beyond the length.
7599 508 : GotoIfNot(IntPtrLessThan(key, length), &done);
7600 : // ... but bailout if the key is negative.
7601 : } else {
7602 : DCHECK_EQ(STANDARD_STORE, store_mode);
7603 : }
7604 6400 : GotoIfNot(UintPtrLessThan(key, length), bailout);
7605 :
7606 : // Backing store = external_pointer + base_pointer.
7607 : Node* external_pointer =
7608 : LoadObjectField(elements, FixedTypedArrayBase::kExternalPointerOffset,
7609 3200 : MachineType::Pointer());
7610 : Node* base_pointer =
7611 : LoadObjectField(elements, FixedTypedArrayBase::kBasePointerOffset);
7612 : Node* backing_store =
7613 9600 : IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer));
7614 3200 : StoreElement(backing_store, elements_kind, key, value, parameter_mode);
7615 3200 : Goto(&done);
7616 :
7617 : BIND(&done);
7618 8260 : return;
7619 : }
7620 : DCHECK(IsSmiOrObjectElementsKind(elements_kind) ||
7621 : IsDoubleElementsKind(elements_kind));
7622 :
7623 3515 : Node* length = is_jsarray ? LoadJSArrayLength(object)
7624 5785 : : LoadFixedArrayBaseLength(elements);
7625 : length = TaggedToParameter(length, parameter_mode);
7626 :
7627 : // In case value is stored into a fast smi array, assure that the value is
7628 : // a smi before manipulating the backing store. Otherwise the backing store
7629 : // may be left in an invalid state.
7630 1860 : if (IsSmiElementsKind(elements_kind)) {
7631 1006 : GotoIfNot(TaggedIsSmi(value), bailout);
7632 1357 : } else if (IsDoubleElementsKind(elements_kind)) {
7633 401 : value = TryTaggedToFloat64(value, bailout);
7634 : }
7635 :
7636 1860 : if (IsGrowStoreMode(store_mode)) {
7637 : elements = CheckForCapacityGrow(object, elements, elements_kind, length,
7638 440 : key, parameter_mode, is_jsarray, bailout);
7639 : } else {
7640 2840 : GotoIfNot(UintPtrLessThan(key, length), bailout);
7641 :
7642 1420 : if ((store_mode == STORE_NO_TRANSITION_HANDLE_COW) &&
7643 : IsSmiOrObjectElementsKind(elements_kind)) {
7644 : elements = CopyElementsOnWrite(object, elements, elements_kind, length,
7645 471 : parameter_mode, bailout);
7646 : }
7647 : }
7648 1860 : StoreElement(elements, elements_kind, key, value, parameter_mode);
7649 : }
7650 :
7651 440 : Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
7652 : ElementsKind kind, Node* length,
7653 : Node* key, ParameterMode mode,
7654 : bool is_js_array,
7655 : Label* bailout) {
7656 440 : VARIABLE(checked_elements, MachineRepresentation::kTagged);
7657 440 : Label grow_case(this), no_grow_case(this), done(this);
7658 :
7659 : Node* condition;
7660 440 : if (IsHoleyOrDictionaryElementsKind(kind)) {
7661 438 : condition = UintPtrGreaterThanOrEqual(key, length);
7662 : } else {
7663 442 : condition = WordEqual(key, length);
7664 : }
7665 440 : Branch(condition, &grow_case, &no_grow_case);
7666 :
7667 : BIND(&grow_case);
7668 : {
7669 : Node* current_capacity =
7670 880 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
7671 :
7672 440 : checked_elements.Bind(elements);
7673 :
7674 : Label fits_capacity(this);
7675 880 : GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
7676 : {
7677 : Node* new_elements = TryGrowElementsCapacity(
7678 440 : object, elements, kind, key, current_capacity, mode, bailout);
7679 :
7680 440 : checked_elements.Bind(new_elements);
7681 440 : Goto(&fits_capacity);
7682 : }
7683 : BIND(&fits_capacity);
7684 :
7685 440 : if (is_js_array) {
7686 1209 : Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
7687 : StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
7688 403 : ParameterToTagged(new_length, mode));
7689 : }
7690 440 : Goto(&done);
7691 : }
7692 :
7693 : BIND(&no_grow_case);
7694 : {
7695 880 : GotoIfNot(UintPtrLessThan(key, length), bailout);
7696 440 : checked_elements.Bind(elements);
7697 440 : Goto(&done);
7698 : }
7699 :
7700 : BIND(&done);
7701 880 : return checked_elements.value();
7702 : }
7703 :
7704 471 : Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
7705 : ElementsKind kind, Node* length,
7706 : ParameterMode mode,
7707 : Label* bailout) {
7708 471 : VARIABLE(new_elements_var, MachineRepresentation::kTagged, elements);
7709 471 : Label done(this);
7710 :
7711 : GotoIfNot(
7712 1413 : WordEqual(LoadMap(elements), LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
7713 471 : &done);
7714 : {
7715 : Node* capacity =
7716 942 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
7717 : Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
7718 471 : length, capacity, mode, bailout);
7719 :
7720 471 : new_elements_var.Bind(new_elements);
7721 471 : Goto(&done);
7722 : }
7723 :
7724 : BIND(&done);
7725 942 : return new_elements_var.value();
7726 : }
7727 :
7728 911 : void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
7729 : ElementsKind from_kind,
7730 : ElementsKind to_kind,
7731 : bool is_jsarray,
7732 : Label* bailout) {
7733 : DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind));
7734 911 : if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
7735 761 : TrapAllocationMemento(object, bailout);
7736 : }
7737 :
7738 911 : if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
7739 366 : Comment("Non-simple map transition");
7740 732 : Node* elements = LoadElements(object);
7741 :
7742 : Label done(this);
7743 366 : GotoIf(WordEqual(elements, EmptyFixedArrayConstant()), &done);
7744 :
7745 : // TODO(ishell): Use OptimalParameterMode().
7746 : ParameterMode mode = INTPTR_PARAMETERS;
7747 1098 : Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
7748 : Node* array_length =
7749 1086 : is_jsarray ? SmiUntag(LoadFastJSArrayLength(object)) : elements_length;
7750 :
7751 : CSA_ASSERT(this, WordNotEqual(elements_length, IntPtrConstant(0)));
7752 :
7753 : GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
7754 366 : elements_length, mode, bailout);
7755 366 : Goto(&done);
7756 366 : BIND(&done);
7757 : }
7758 :
7759 911 : StoreMap(object, map);
7760 911 : }
7761 :
7762 978 : void CodeStubAssembler::TrapAllocationMemento(Node* object,
7763 : Label* memento_found) {
7764 978 : Comment("[ TrapAllocationMemento");
7765 : Label no_memento_found(this);
7766 978 : Label top_check(this), map_check(this);
7767 :
7768 : Node* new_space_top_address = ExternalConstant(
7769 1956 : ExternalReference::new_space_allocation_top_address(isolate()));
7770 : const int kMementoMapOffset = JSArray::kSize;
7771 : const int kMementoLastWordOffset =
7772 : kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
7773 :
7774 : // Bail out if the object is not in new space.
7775 1956 : Node* object_word = BitcastTaggedToWord(object);
7776 978 : Node* object_page = PageFromAddress(object_word);
7777 : {
7778 : Node* page_flags = Load(MachineType::IntPtr(), object_page,
7779 1956 : IntPtrConstant(Page::kFlagsOffset));
7780 : GotoIf(WordEqual(WordAnd(page_flags,
7781 1956 : IntPtrConstant(MemoryChunk::kIsInNewSpaceMask)),
7782 3912 : IntPtrConstant(0)),
7783 1956 : &no_memento_found);
7784 : }
7785 :
7786 : Node* memento_last_word = IntPtrAdd(
7787 2934 : object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
7788 978 : Node* memento_last_word_page = PageFromAddress(memento_last_word);
7789 :
7790 978 : Node* new_space_top = Load(MachineType::Pointer(), new_space_top_address);
7791 978 : Node* new_space_top_page = PageFromAddress(new_space_top);
7792 :
7793 : // If the object is in new space, we need to check whether respective
7794 : // potential memento object is on the same page as the current top.
7795 1956 : GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
7796 :
7797 : // The object is on a different page than allocation top. Bail out if the
7798 : // object sits on the page boundary as no memento can follow and we cannot
7799 : // touch the memory following it.
7800 978 : Branch(WordEqual(object_page, memento_last_word_page), &map_check,
7801 1956 : &no_memento_found);
7802 :
7803 : // If top is on the same page as the current object, we need to check whether
7804 : // we are below top.
7805 : BIND(&top_check);
7806 : {
7807 978 : Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
7808 1956 : &no_memento_found, &map_check);
7809 : }
7810 :
7811 : // Memento map check.
7812 : BIND(&map_check);
7813 : {
7814 : Node* memento_map = LoadObjectField(object, kMementoMapOffset);
7815 : Branch(
7816 978 : WordEqual(memento_map, LoadRoot(Heap::kAllocationMementoMapRootIndex)),
7817 978 : memento_found, &no_memento_found);
7818 : }
7819 : BIND(&no_memento_found);
7820 1956 : Comment("] TrapAllocationMemento");
7821 978 : }
7822 :
7823 2934 : Node* CodeStubAssembler::PageFromAddress(Node* address) {
7824 8802 : return WordAnd(address, IntPtrConstant(~Page::kPageAlignmentMask));
7825 : }
7826 :
7827 217 : Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
7828 : Node* feedback_vector, Node* slot) {
7829 434 : Node* size = IntPtrConstant(AllocationSite::kSize);
7830 217 : Node* site = Allocate(size, CodeStubAssembler::kPretenured);
7831 217 : StoreMapNoWriteBarrier(site, Heap::kAllocationSiteMapRootIndex);
7832 : // Should match AllocationSite::Initialize.
7833 : Node* field = UpdateWord<AllocationSite::ElementsKindBits>(
7834 651 : IntPtrConstant(0), IntPtrConstant(GetInitialFastElementsKind()));
7835 : StoreObjectFieldNoWriteBarrier(
7836 434 : site, AllocationSite::kTransitionInfoOrBoilerplateOffset, SmiTag(field));
7837 :
7838 : // Unlike literals, constructed arrays don't have nested sites
7839 434 : Node* zero = SmiConstant(0);
7840 217 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
7841 :
7842 : // Pretenuring calculation field.
7843 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
7844 217 : zero);
7845 :
7846 : // Pretenuring memento creation count field.
7847 : StoreObjectFieldNoWriteBarrier(
7848 217 : site, AllocationSite::kPretenureCreateCountOffset, zero);
7849 :
7850 : // Store an empty fixed array for the code dependency.
7851 : StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
7852 217 : Heap::kEmptyFixedArrayRootIndex);
7853 :
7854 : // Link the object to the allocation site list
7855 : Node* site_list = ExternalConstant(
7856 434 : ExternalReference::allocation_sites_list_address(isolate()));
7857 217 : Node* next_site = LoadBufferObject(site_list, 0);
7858 :
7859 : // TODO(mvstanton): This is a store to a weak pointer, which we may want to
7860 : // mark as such in order to skip the write barrier, once we have a unified
7861 : // system for weakness. For now we decided to keep it like this because having
7862 : // an initial write barrier backed store makes this pointer strong until the
7863 : // next GC, and allocation sites are designed to survive several GCs anyway.
7864 217 : StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
7865 217 : StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site);
7866 :
7867 : StoreFeedbackVectorSlot(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
7868 217 : CodeStubAssembler::SMI_PARAMETERS);
7869 217 : return site;
7870 : }
7871 :
7872 1116 : Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
7873 : Node* slot,
7874 : Node* value) {
7875 2232 : Node* size = IntPtrConstant(WeakCell::kSize);
7876 1116 : Node* cell = Allocate(size, CodeStubAssembler::kPretenured);
7877 :
7878 : // Initialize the WeakCell.
7879 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kWeakCellMapRootIndex));
7880 1116 : StoreMapNoWriteBarrier(cell, Heap::kWeakCellMapRootIndex);
7881 1116 : StoreObjectField(cell, WeakCell::kValueOffset, value);
7882 :
7883 : // Store the WeakCell in the feedback vector.
7884 : StoreFeedbackVectorSlot(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0,
7885 1116 : CodeStubAssembler::SMI_PARAMETERS);
7886 1116 : return cell;
7887 : }
7888 :
7889 279 : void CodeStubAssembler::HandleSlackTracking(Node* context, Node* object,
7890 : Node* initial_map,
7891 : int start_offset) {
7892 : Node* instance_size_words = ChangeUint32ToWord(LoadObjectField(
7893 837 : initial_map, Map::kInstanceSizeOffset, MachineType::Uint8()));
7894 279 : Node* instance_size = TimesPointerSize(instance_size_words);
7895 :
7896 : // Perform in-object slack tracking if requested.
7897 558 : Node* bit_field3 = LoadMapBitField3(initial_map);
7898 279 : Label end(this), slack_tracking(this), finalize(this, Label::kDeferred);
7899 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
7900 279 : GotoIf(IsSetWord32<Map::ConstructionCounter>(bit_field3), &slack_tracking);
7901 :
7902 : // Initialize remaining fields.
7903 : {
7904 279 : Comment("no slack tracking");
7905 279 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset),
7906 558 : instance_size, Heap::kUndefinedValueRootIndex);
7907 279 : Goto(&end);
7908 : }
7909 :
7910 : {
7911 : BIND(&slack_tracking);
7912 :
7913 : // Decrease generous allocation count.
7914 : STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
7915 279 : Comment("update allocation count");
7916 : Node* new_bit_field3 = Int32Sub(
7917 837 : bit_field3, Int32Constant(1 << Map::ConstructionCounter::kShift));
7918 : StoreObjectFieldNoWriteBarrier(initial_map, Map::kBitField3Offset,
7919 : new_bit_field3,
7920 279 : MachineRepresentation::kWord32);
7921 279 : GotoIf(IsClearWord32<Map::ConstructionCounter>(new_bit_field3), &finalize);
7922 :
7923 : Node* unused_fields = LoadObjectField(
7924 279 : initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8());
7925 : Node* used_size = IntPtrSub(
7926 1116 : instance_size, TimesPointerSize(ChangeUint32ToWord(unused_fields)));
7927 :
7928 279 : Comment("initialize filler fields (no finalize)");
7929 : InitializeFieldsWithRoot(object, used_size, instance_size,
7930 279 : Heap::kOnePointerFillerMapRootIndex);
7931 :
7932 279 : Comment("initialize undefined fields (no finalize)");
7933 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
7934 558 : Heap::kUndefinedValueRootIndex);
7935 279 : Goto(&end);
7936 : }
7937 :
7938 : {
7939 : // Finalize the instance size.
7940 : BIND(&finalize);
7941 :
7942 : Node* unused_fields = LoadObjectField(
7943 279 : initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8());
7944 : Node* used_size = IntPtrSub(
7945 1116 : instance_size, TimesPointerSize(ChangeUint32ToWord(unused_fields)));
7946 :
7947 279 : Comment("initialize filler fields (finalize)");
7948 : InitializeFieldsWithRoot(object, used_size, instance_size,
7949 279 : Heap::kOnePointerFillerMapRootIndex);
7950 :
7951 279 : Comment("initialize undefined fields (finalize)");
7952 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
7953 558 : Heap::kUndefinedValueRootIndex);
7954 :
7955 : CallRuntime(Runtime::kFinalizeInstanceSize, context, initial_map);
7956 279 : Goto(&end);
7957 : }
7958 :
7959 279 : BIND(&end);
7960 279 : }
7961 :
7962 17811 : Node* CodeStubAssembler::BuildFastLoop(
7963 : const CodeStubAssembler::VariableList& vars, Node* start_index,
7964 : Node* end_index, const FastLoopBody& body, int increment,
7965 : ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
7966 : CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
7967 : CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
7968 : MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
7969 : ? MachineType::PointerRepresentation()
7970 17811 : : MachineRepresentation::kTaggedSigned;
7971 17811 : VARIABLE(var, index_rep, start_index);
7972 17811 : VariableList vars_copy(vars, zone());
7973 17811 : vars_copy.Add(&var, zone());
7974 17811 : Label loop(this, vars_copy);
7975 17811 : Label after_loop(this);
7976 : // Introduce an explicit second check of the termination condition before the
7977 : // loop that helps turbofan generate better code. If there's only a single
7978 : // check, then the CodeStubAssembler forces it to be at the beginning of the
7979 : // loop requiring a backwards branch at the end of the loop (it's not possible
7980 : // to force the loop header check at the end of the loop and branch forward to
7981 : // it from the pre-header). The extra branch is slower in the case that the
7982 : // loop actually iterates.
7983 53433 : Branch(WordEqual(var.value(), end_index), &after_loop, &loop);
7984 : BIND(&loop);
7985 : {
7986 17811 : if (advance_mode == IndexAdvanceMode::kPre) {
7987 8058 : Increment(&var, increment, parameter_mode);
7988 : }
7989 17811 : body(var.value());
7990 17811 : if (advance_mode == IndexAdvanceMode::kPost) {
7991 9753 : Increment(&var, increment, parameter_mode);
7992 : }
7993 53433 : Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
7994 : }
7995 : BIND(&after_loop);
7996 35622 : return var.value();
7997 : }
7998 :
7999 5907 : void CodeStubAssembler::BuildFastFixedArrayForEach(
8000 : const CodeStubAssembler::VariableList& vars, Node* fixed_array,
8001 : ElementsKind kind, Node* first_element_inclusive,
8002 : Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
8003 : ParameterMode mode, ForEachDirection direction) {
8004 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
8005 : CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
8006 : CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
8007 : CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(fixed_array, kind),
8008 : IsPropertyArray(fixed_array)));
8009 : int32_t first_val;
8010 5907 : bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
8011 : int32_t last_val;
8012 5907 : bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
8013 5907 : if (constant_first && constent_last) {
8014 496 : int delta = last_val - first_val;
8015 : DCHECK_GE(delta, 0);
8016 496 : if (delta <= kElementLoopUnrollThreshold) {
8017 465 : if (direction == ForEachDirection::kForward) {
8018 0 : for (int i = first_val; i < last_val; ++i) {
8019 0 : Node* index = IntPtrConstant(i);
8020 : Node* offset =
8021 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
8022 0 : FixedArray::kHeaderSize - kHeapObjectTag);
8023 0 : body(fixed_array, offset);
8024 : }
8025 : } else {
8026 2511 : for (int i = last_val - 1; i >= first_val; --i) {
8027 4092 : Node* index = IntPtrConstant(i);
8028 : Node* offset =
8029 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
8030 2046 : FixedArray::kHeaderSize - kHeapObjectTag);
8031 2046 : body(fixed_array, offset);
8032 : }
8033 : }
8034 465 : return;
8035 : }
8036 : }
8037 :
8038 : Node* start =
8039 : ElementOffsetFromIndex(first_element_inclusive, kind, mode,
8040 5442 : FixedArray::kHeaderSize - kHeapObjectTag);
8041 : Node* limit =
8042 : ElementOffsetFromIndex(last_element_exclusive, kind, mode,
8043 5442 : FixedArray::kHeaderSize - kHeapObjectTag);
8044 5442 : if (direction == ForEachDirection::kReverse) std::swap(start, limit);
8045 :
8046 : int increment = IsDoubleElementsKind(kind) ? kDoubleSize : kPointerSize;
8047 : BuildFastLoop(
8048 : vars, start, limit,
8049 5442 : [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
8050 : direction == ForEachDirection::kReverse ? -increment : increment,
8051 : INTPTR_PARAMETERS,
8052 : direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
8053 16326 : : IndexAdvanceMode::kPost);
8054 : }
8055 :
8056 1028 : void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
8057 : Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
8058 : GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size, mode),
8059 2056 : doesnt_fit);
8060 1028 : }
8061 :
8062 1395 : void CodeStubAssembler::InitializeFieldsWithRoot(
8063 : Node* object, Node* start_offset, Node* end_offset,
8064 : Heap::RootListIndex root_index) {
8065 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
8066 4185 : start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
8067 4185 : end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
8068 2790 : Node* root_value = LoadRoot(root_index);
8069 : BuildFastLoop(end_offset, start_offset,
8070 : [this, object, root_value](Node* current) {
8071 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object,
8072 1395 : current, root_value);
8073 : },
8074 : -kPointerSize, INTPTR_PARAMETERS,
8075 2790 : CodeStubAssembler::IndexAdvanceMode::kPre);
8076 1395 : }
8077 :
8078 290 : void CodeStubAssembler::BranchIfNumericRelationalComparison(
8079 : RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true,
8080 : Label* if_false) {
8081 : CSA_SLOW_ASSERT(this, IsNumber(lhs));
8082 : CSA_SLOW_ASSERT(this, IsNumber(rhs));
8083 :
8084 290 : Label end(this);
8085 580 : VARIABLE(result, MachineRepresentation::kTagged);
8086 :
8087 : // Shared entry for floating point comparison.
8088 290 : Label do_fcmp(this);
8089 580 : VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64);
8090 580 : VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64);
8091 :
8092 : // Check if the {lhs} is a Smi or a HeapObject.
8093 290 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
8094 580 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
8095 :
8096 : BIND(&if_lhsissmi);
8097 : {
8098 : // Check if {rhs} is a Smi or a HeapObject.
8099 290 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
8100 580 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
8101 :
8102 : BIND(&if_rhsissmi);
8103 : {
8104 : // Both {lhs} and {rhs} are Smi, so just perform a fast Smi comparison.
8105 290 : switch (mode) {
8106 : case kLessThan:
8107 0 : BranchIfSmiLessThan(lhs, rhs, if_true, if_false);
8108 0 : break;
8109 : case kLessThanOrEqual:
8110 0 : BranchIfSmiLessThanOrEqual(lhs, rhs, if_true, if_false);
8111 0 : break;
8112 : case kGreaterThan:
8113 0 : BranchIfSmiLessThan(rhs, lhs, if_true, if_false);
8114 0 : break;
8115 : case kGreaterThanOrEqual:
8116 290 : BranchIfSmiLessThanOrEqual(rhs, lhs, if_true, if_false);
8117 290 : break;
8118 : }
8119 : }
8120 :
8121 : BIND(&if_rhsisnotsmi);
8122 : {
8123 : CSA_ASSERT(this, IsHeapNumber(rhs));
8124 : // Convert the {lhs} and {rhs} to floating point values, and
8125 : // perform a floating point comparison.
8126 580 : var_fcmp_lhs.Bind(SmiToFloat64(lhs));
8127 580 : var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
8128 290 : Goto(&do_fcmp);
8129 290 : }
8130 : }
8131 :
8132 : BIND(&if_lhsisnotsmi);
8133 : {
8134 : CSA_ASSERT(this, IsHeapNumber(lhs));
8135 :
8136 : // Check if {rhs} is a Smi or a HeapObject.
8137 290 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
8138 580 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
8139 :
8140 : BIND(&if_rhsissmi);
8141 : {
8142 : // Convert the {lhs} and {rhs} to floating point values, and
8143 : // perform a floating point comparison.
8144 580 : var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
8145 580 : var_fcmp_rhs.Bind(SmiToFloat64(rhs));
8146 290 : Goto(&do_fcmp);
8147 : }
8148 :
8149 : BIND(&if_rhsisnotsmi);
8150 : {
8151 : CSA_ASSERT(this, IsHeapNumber(rhs));
8152 :
8153 : // Convert the {lhs} and {rhs} to floating point values, and
8154 : // perform a floating point comparison.
8155 580 : var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
8156 580 : var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
8157 290 : Goto(&do_fcmp);
8158 290 : }
8159 : }
8160 :
8161 : BIND(&do_fcmp);
8162 : {
8163 : // Load the {lhs} and {rhs} floating point values.
8164 290 : Node* lhs = var_fcmp_lhs.value();
8165 290 : Node* rhs = var_fcmp_rhs.value();
8166 :
8167 : // Perform a fast floating point comparison.
8168 290 : switch (mode) {
8169 : case kLessThan:
8170 0 : Branch(Float64LessThan(lhs, rhs), if_true, if_false);
8171 0 : break;
8172 : case kLessThanOrEqual:
8173 0 : Branch(Float64LessThanOrEqual(lhs, rhs), if_true, if_false);
8174 0 : break;
8175 : case kGreaterThan:
8176 0 : Branch(Float64GreaterThan(lhs, rhs), if_true, if_false);
8177 0 : break;
8178 : case kGreaterThanOrEqual:
8179 580 : Branch(Float64GreaterThanOrEqual(lhs, rhs), if_true, if_false);
8180 290 : break;
8181 : }
8182 290 : }
8183 290 : }
8184 :
8185 284 : void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(Node* lhs, Node* rhs,
8186 : Label* if_true) {
8187 284 : Label if_false(this);
8188 : BranchIfNumericRelationalComparison(kGreaterThanOrEqual, lhs, rhs, if_true,
8189 284 : &if_false);
8190 284 : BIND(&if_false);
8191 284 : }
8192 :
8193 496 : Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode,
8194 : Node* lhs, Node* rhs,
8195 : Node* context,
8196 : Variable* var_type_feedback) {
8197 992 : Label return_true(this), return_false(this), end(this);
8198 992 : VARIABLE(result, MachineRepresentation::kTagged);
8199 :
8200 : // Shared entry for floating point comparison.
8201 496 : Label do_fcmp(this);
8202 992 : VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64);
8203 992 : VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64);
8204 :
8205 : // We might need to loop several times due to ToPrimitive and/or ToNumber
8206 : // conversions.
8207 992 : VARIABLE(var_lhs, MachineRepresentation::kTagged, lhs);
8208 992 : VARIABLE(var_rhs, MachineRepresentation::kTagged, rhs);
8209 496 : VariableList loop_variable_list({&var_lhs, &var_rhs}, zone());
8210 496 : if (var_type_feedback != nullptr) {
8211 : // Initialize the type feedback to None. The current feedback is combined
8212 : // with the previous feedback.
8213 744 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
8214 372 : loop_variable_list.Add(var_type_feedback, zone());
8215 : }
8216 496 : Label loop(this, loop_variable_list);
8217 496 : Goto(&loop);
8218 : BIND(&loop);
8219 : {
8220 : // Load the current {lhs} and {rhs} values.
8221 496 : lhs = var_lhs.value();
8222 496 : rhs = var_rhs.value();
8223 :
8224 : // Check if the {lhs} is a Smi or a HeapObject.
8225 496 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
8226 992 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
8227 :
8228 : BIND(&if_lhsissmi);
8229 : {
8230 : // Check if {rhs} is a Smi or a HeapObject.
8231 496 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
8232 992 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
8233 :
8234 : BIND(&if_rhsissmi);
8235 : {
8236 : // Both {lhs} and {rhs} are Smi, so just perform a fast Smi comparison.
8237 496 : if (var_type_feedback != nullptr) {
8238 : CombineFeedback(var_type_feedback,
8239 744 : SmiConstant(CompareOperationFeedback::kSignedSmall));
8240 : }
8241 496 : switch (mode) {
8242 : case kLessThan:
8243 124 : BranchIfSmiLessThan(lhs, rhs, &return_true, &return_false);
8244 124 : break;
8245 : case kLessThanOrEqual:
8246 124 : BranchIfSmiLessThanOrEqual(lhs, rhs, &return_true, &return_false);
8247 124 : break;
8248 : case kGreaterThan:
8249 124 : BranchIfSmiLessThan(rhs, lhs, &return_true, &return_false);
8250 124 : break;
8251 : case kGreaterThanOrEqual:
8252 124 : BranchIfSmiLessThanOrEqual(rhs, lhs, &return_true, &return_false);
8253 124 : break;
8254 : }
8255 : }
8256 :
8257 : BIND(&if_rhsisnotsmi);
8258 : {
8259 : // Check if the {rhs} is a HeapNumber.
8260 496 : Label if_rhsisnumber(this), if_rhsisnotnumber(this, Label::kDeferred);
8261 992 : Branch(IsHeapNumber(rhs), &if_rhsisnumber, &if_rhsisnotnumber);
8262 :
8263 : BIND(&if_rhsisnumber);
8264 : {
8265 : // Convert the {lhs} and {rhs} to floating point values, and
8266 : // perform a floating point comparison.
8267 496 : if (var_type_feedback != nullptr) {
8268 : CombineFeedback(var_type_feedback,
8269 744 : SmiConstant(CompareOperationFeedback::kNumber));
8270 : }
8271 992 : var_fcmp_lhs.Bind(SmiToFloat64(lhs));
8272 992 : var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
8273 496 : Goto(&do_fcmp);
8274 : }
8275 :
8276 : BIND(&if_rhsisnotnumber);
8277 : {
8278 : // The {rhs} is not a HeapNumber and {lhs} is an Smi.
8279 496 : if (var_type_feedback != nullptr) {
8280 : var_type_feedback->Bind(
8281 744 : SmiConstant(CompareOperationFeedback::kAny));
8282 : }
8283 : // Convert the {rhs} to a Number; we don't need to perform the
8284 : // dedicated ToPrimitive(rhs, hint Number) operation, as the
8285 : // ToNumber(rhs) will by itself already invoke ToPrimitive with
8286 : // a Number hint.
8287 496 : var_rhs.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, rhs));
8288 496 : Goto(&loop);
8289 496 : }
8290 496 : }
8291 : }
8292 :
8293 : BIND(&if_lhsisnotsmi);
8294 : {
8295 : // Load the map of {lhs}.
8296 992 : Node* lhs_map = LoadMap(lhs);
8297 :
8298 : // Check if {rhs} is a Smi or a HeapObject.
8299 496 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
8300 992 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
8301 :
8302 : BIND(&if_rhsissmi);
8303 : {
8304 : // Check if the {lhs} is a HeapNumber.
8305 496 : Label if_lhsisnumber(this), if_lhsisnotnumber(this, Label::kDeferred);
8306 992 : Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
8307 :
8308 : BIND(&if_lhsisnumber);
8309 : {
8310 : // Convert the {lhs} and {rhs} to floating point values, and
8311 : // perform a floating point comparison.
8312 496 : if (var_type_feedback != nullptr) {
8313 : CombineFeedback(var_type_feedback,
8314 744 : SmiConstant(CompareOperationFeedback::kNumber));
8315 : }
8316 992 : var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
8317 992 : var_fcmp_rhs.Bind(SmiToFloat64(rhs));
8318 496 : Goto(&do_fcmp);
8319 : }
8320 :
8321 : BIND(&if_lhsisnotnumber);
8322 : {
8323 : // The {lhs} is not a HeapNumber and {rhs} is an Smi.
8324 496 : if (var_type_feedback != nullptr) {
8325 : var_type_feedback->Bind(
8326 744 : SmiConstant(CompareOperationFeedback::kAny));
8327 : }
8328 : // Convert the {lhs} to a Number; we don't need to perform the
8329 : // dedicated ToPrimitive(lhs, hint Number) operation, as the
8330 : // ToNumber(lhs) will by itself already invoke ToPrimitive with
8331 : // a Number hint.
8332 496 : var_lhs.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, lhs));
8333 496 : Goto(&loop);
8334 496 : }
8335 : }
8336 :
8337 : BIND(&if_rhsisnotsmi);
8338 : {
8339 : // Load the map of {rhs}.
8340 992 : Node* rhs_map = LoadMap(rhs);
8341 :
8342 : // Check if {lhs} is a HeapNumber.
8343 496 : Label if_lhsisnumber(this), if_lhsisnotnumber(this);
8344 992 : Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
8345 :
8346 : BIND(&if_lhsisnumber);
8347 : {
8348 : // Check if {rhs} is also a HeapNumber.
8349 496 : Label if_rhsisnumber(this), if_rhsisnotnumber(this, Label::kDeferred);
8350 496 : Branch(WordEqual(lhs_map, rhs_map), &if_rhsisnumber,
8351 992 : &if_rhsisnotnumber);
8352 :
8353 : BIND(&if_rhsisnumber);
8354 : {
8355 : // Convert the {lhs} and {rhs} to floating point values, and
8356 : // perform a floating point comparison.
8357 496 : if (var_type_feedback != nullptr) {
8358 : CombineFeedback(var_type_feedback,
8359 744 : SmiConstant(CompareOperationFeedback::kNumber));
8360 : }
8361 992 : var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
8362 992 : var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
8363 496 : Goto(&do_fcmp);
8364 : }
8365 :
8366 : BIND(&if_rhsisnotnumber);
8367 : {
8368 : // The {rhs} is not a HeapNumber and {lhs} is a HeapNumber.
8369 496 : if (var_type_feedback != nullptr) {
8370 : var_type_feedback->Bind(
8371 744 : SmiConstant(CompareOperationFeedback::kAny));
8372 : }
8373 : // Convert the {rhs} to a Number; we don't need to perform
8374 : // dedicated ToPrimitive(rhs, hint Number) operation, as the
8375 : // ToNumber(rhs) will by itself already invoke ToPrimitive with
8376 : // a Number hint.
8377 : var_rhs.Bind(
8378 496 : CallBuiltin(Builtins::kNonNumberToNumber, context, rhs));
8379 496 : Goto(&loop);
8380 496 : }
8381 : }
8382 :
8383 : BIND(&if_lhsisnotnumber);
8384 : {
8385 : // Load the instance type of {lhs}.
8386 992 : Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
8387 :
8388 : // Check if {lhs} is a String.
8389 496 : Label if_lhsisstring(this), if_lhsisnotstring(this, Label::kDeferred);
8390 : Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
8391 992 : &if_lhsisnotstring);
8392 :
8393 : BIND(&if_lhsisstring);
8394 : {
8395 : // Load the instance type of {rhs}.
8396 992 : Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
8397 :
8398 : // Check if {rhs} is also a String.
8399 : Label if_rhsisstring(this, Label::kDeferred),
8400 496 : if_rhsisnotstring(this, Label::kDeferred);
8401 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
8402 992 : &if_rhsisnotstring);
8403 :
8404 : BIND(&if_rhsisstring);
8405 : {
8406 : // Both {lhs} and {rhs} are strings.
8407 496 : if (var_type_feedback != nullptr) {
8408 : CombineFeedback(var_type_feedback,
8409 744 : SmiConstant(CompareOperationFeedback::kString));
8410 : }
8411 496 : switch (mode) {
8412 : case kLessThan:
8413 : result.Bind(CallBuiltin(Builtins::kStringLessThan, context,
8414 124 : lhs, rhs));
8415 124 : Goto(&end);
8416 124 : break;
8417 : case kLessThanOrEqual:
8418 : result.Bind(CallBuiltin(Builtins::kStringLessThanOrEqual,
8419 124 : context, lhs, rhs));
8420 124 : Goto(&end);
8421 124 : break;
8422 : case kGreaterThan:
8423 : result.Bind(CallBuiltin(Builtins::kStringGreaterThan, context,
8424 124 : lhs, rhs));
8425 124 : Goto(&end);
8426 124 : break;
8427 : case kGreaterThanOrEqual:
8428 : result.Bind(CallBuiltin(Builtins::kStringGreaterThanOrEqual,
8429 124 : context, lhs, rhs));
8430 124 : Goto(&end);
8431 124 : break;
8432 : }
8433 : }
8434 :
8435 : BIND(&if_rhsisnotstring);
8436 : {
8437 : // The {lhs} is a String and {rhs} is not a String.
8438 496 : if (var_type_feedback != nullptr) {
8439 : var_type_feedback->Bind(
8440 744 : SmiConstant(CompareOperationFeedback::kAny));
8441 : }
8442 : // The {lhs} is a String, while {rhs} is neither a Number nor a
8443 : // String, so we need to call ToPrimitive(rhs, hint Number) if
8444 : // {rhs} is a receiver or ToNumber(lhs) and ToNumber(rhs) in the
8445 : // other cases.
8446 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
8447 : Label if_rhsisreceiver(this, Label::kDeferred),
8448 496 : if_rhsisnotreceiver(this, Label::kDeferred);
8449 : Branch(IsJSReceiverInstanceType(rhs_instance_type),
8450 992 : &if_rhsisreceiver, &if_rhsisnotreceiver);
8451 :
8452 : BIND(&if_rhsisreceiver);
8453 : {
8454 : // Convert {rhs} to a primitive first passing Number hint.
8455 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
8456 496 : isolate(), ToPrimitiveHint::kNumber);
8457 496 : var_rhs.Bind(CallStub(callable, context, rhs));
8458 496 : Goto(&loop);
8459 : }
8460 :
8461 : BIND(&if_rhsisnotreceiver);
8462 : {
8463 : // Convert both {lhs} and {rhs} to Number.
8464 496 : var_lhs.Bind(CallBuiltin(Builtins::kToNumber, context, lhs));
8465 496 : var_rhs.Bind(CallBuiltin(Builtins::kToNumber, context, rhs));
8466 496 : Goto(&loop);
8467 496 : }
8468 496 : }
8469 : }
8470 :
8471 : BIND(&if_lhsisnotstring);
8472 : {
8473 496 : if (var_type_feedback != nullptr) {
8474 : // The {lhs} is not an Smi, HeapNumber or String and {rhs} is not
8475 : // an Smi: collect NumberOrOddball feedback if {lhs} is an Oddball
8476 : // and {rhs} is either a HeapNumber or Oddball.
8477 372 : Label collect_any_feedback(this), collect_oddball_feedback(this),
8478 372 : collect_feedback_done(this);
8479 : GotoIfNot(InstanceTypeEqual(lhs_instance_type, ODDBALL_TYPE),
8480 744 : &collect_any_feedback);
8481 :
8482 744 : Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
8483 : GotoIf(InstanceTypeEqual(rhs_instance_type, HEAP_NUMBER_TYPE),
8484 744 : &collect_oddball_feedback);
8485 : Branch(InstanceTypeEqual(rhs_instance_type, ODDBALL_TYPE),
8486 744 : &collect_oddball_feedback, &collect_any_feedback);
8487 :
8488 : BIND(&collect_oddball_feedback);
8489 : {
8490 : CombineFeedback(
8491 : var_type_feedback,
8492 744 : SmiConstant(CompareOperationFeedback::kNumberOrOddball));
8493 372 : Goto(&collect_feedback_done);
8494 : }
8495 :
8496 : BIND(&collect_any_feedback);
8497 : {
8498 : var_type_feedback->Bind(
8499 744 : SmiConstant(CompareOperationFeedback::kAny));
8500 372 : Goto(&collect_feedback_done);
8501 : }
8502 :
8503 372 : BIND(&collect_feedback_done);
8504 : }
8505 : // The {lhs} is neither a Number nor a String, so we need to call
8506 : // ToPrimitive(lhs, hint Number) if {lhs} is a receiver or
8507 : // ToNumber(lhs) and ToNumber(rhs) in the other cases.
8508 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
8509 : Label if_lhsisreceiver(this, Label::kDeferred),
8510 496 : if_lhsisnotreceiver(this, Label::kDeferred);
8511 : Branch(IsJSReceiverInstanceType(lhs_instance_type),
8512 992 : &if_lhsisreceiver, &if_lhsisnotreceiver);
8513 :
8514 : BIND(&if_lhsisreceiver);
8515 : {
8516 : // Convert {lhs} to a primitive first passing Number hint.
8517 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
8518 496 : isolate(), ToPrimitiveHint::kNumber);
8519 496 : var_lhs.Bind(CallStub(callable, context, lhs));
8520 496 : Goto(&loop);
8521 : }
8522 :
8523 : BIND(&if_lhsisnotreceiver);
8524 : {
8525 : // Convert both {lhs} and {rhs} to Number.
8526 496 : var_lhs.Bind(CallBuiltin(Builtins::kToNumber, context, lhs));
8527 496 : var_rhs.Bind(CallBuiltin(Builtins::kToNumber, context, rhs));
8528 496 : Goto(&loop);
8529 496 : }
8530 496 : }
8531 496 : }
8532 496 : }
8533 496 : }
8534 : }
8535 :
8536 : BIND(&do_fcmp);
8537 : {
8538 : // Load the {lhs} and {rhs} floating point values.
8539 496 : Node* lhs = var_fcmp_lhs.value();
8540 496 : Node* rhs = var_fcmp_rhs.value();
8541 :
8542 : // Perform a fast floating point comparison.
8543 496 : switch (mode) {
8544 : case kLessThan:
8545 248 : Branch(Float64LessThan(lhs, rhs), &return_true, &return_false);
8546 124 : break;
8547 : case kLessThanOrEqual:
8548 248 : Branch(Float64LessThanOrEqual(lhs, rhs), &return_true, &return_false);
8549 124 : break;
8550 : case kGreaterThan:
8551 248 : Branch(Float64GreaterThan(lhs, rhs), &return_true, &return_false);
8552 124 : break;
8553 : case kGreaterThanOrEqual:
8554 124 : Branch(Float64GreaterThanOrEqual(lhs, rhs), &return_true,
8555 248 : &return_false);
8556 124 : break;
8557 : }
8558 : }
8559 :
8560 : BIND(&return_true);
8561 : {
8562 992 : result.Bind(BooleanConstant(true));
8563 496 : Goto(&end);
8564 : }
8565 :
8566 : BIND(&return_false);
8567 : {
8568 992 : result.Bind(BooleanConstant(false));
8569 496 : Goto(&end);
8570 : }
8571 :
8572 : BIND(&end);
8573 992 : return result.value();
8574 : }
8575 :
8576 558 : Node* CodeStubAssembler::CollectFeedbackForString(Node* instance_type) {
8577 : Node* feedback = SelectSmiConstant(
8578 : Word32Equal(
8579 1116 : Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
8580 2232 : Int32Constant(kInternalizedTag)),
8581 : CompareOperationFeedback::kInternalizedString,
8582 1116 : CompareOperationFeedback::kString);
8583 558 : return feedback;
8584 : }
8585 :
8586 341 : void CodeStubAssembler::GenerateEqual_Same(Node* value, Label* if_equal,
8587 : Label* if_notequal,
8588 : Variable* var_type_feedback) {
8589 : // In case of abstract or strict equality checks, we need additional checks
8590 : // for NaN values because they are not considered equal, even if both the
8591 : // left and the right hand side reference exactly the same value.
8592 :
8593 682 : Label if_smi(this), if_heapnumber(this);
8594 682 : GotoIf(TaggedIsSmi(value), &if_smi);
8595 :
8596 682 : Node* value_map = LoadMap(value);
8597 682 : GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
8598 :
8599 : // For non-HeapNumbers, all we do is collect type feedback.
8600 341 : if (var_type_feedback != nullptr) {
8601 372 : Node* instance_type = LoadMapInstanceType(value_map);
8602 :
8603 186 : Label if_string(this), if_receiver(this), if_symbol(this),
8604 186 : if_other(this, Label::kDeferred);
8605 372 : GotoIf(IsStringInstanceType(instance_type), &if_string);
8606 372 : GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
8607 186 : Branch(IsSymbolInstanceType(instance_type), &if_symbol, &if_other);
8608 :
8609 : BIND(&if_string);
8610 : {
8611 : CombineFeedback(var_type_feedback,
8612 186 : CollectFeedbackForString(instance_type));
8613 186 : Goto(if_equal);
8614 : }
8615 :
8616 : BIND(&if_symbol);
8617 : {
8618 : CombineFeedback(var_type_feedback,
8619 372 : SmiConstant(CompareOperationFeedback::kSymbol));
8620 186 : Goto(if_equal);
8621 : }
8622 :
8623 : BIND(&if_receiver);
8624 : {
8625 : CombineFeedback(var_type_feedback,
8626 372 : SmiConstant(CompareOperationFeedback::kReceiver));
8627 186 : Goto(if_equal);
8628 : }
8629 :
8630 : // TODO(neis): Introduce BigInt CompareOperationFeedback and collect here
8631 : // and elsewhere?
8632 :
8633 : BIND(&if_other);
8634 : {
8635 : CombineFeedback(var_type_feedback,
8636 372 : SmiConstant(CompareOperationFeedback::kAny));
8637 186 : Goto(if_equal);
8638 186 : }
8639 : } else {
8640 155 : Goto(if_equal);
8641 : }
8642 :
8643 : BIND(&if_heapnumber);
8644 : {
8645 341 : if (var_type_feedback != nullptr) {
8646 : CombineFeedback(var_type_feedback,
8647 372 : SmiConstant(CompareOperationFeedback::kNumber));
8648 : }
8649 682 : Node* number_value = LoadHeapNumberValue(value);
8650 341 : BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
8651 : }
8652 :
8653 : BIND(&if_smi);
8654 : {
8655 341 : if (var_type_feedback != nullptr) {
8656 : CombineFeedback(var_type_feedback,
8657 372 : SmiConstant(CompareOperationFeedback::kSignedSmall));
8658 : }
8659 341 : Goto(if_equal);
8660 341 : }
8661 341 : }
8662 :
8663 : // ES6 section 7.2.12 Abstract Equality Comparison
8664 124 : Node* CodeStubAssembler::Equal(Node* left, Node* right, Node* context,
8665 : Variable* var_type_feedback) {
8666 : // This is a slightly optimized version of Object::Equals. Whenever you
8667 : // change something functionality wise in here, remember to update the
8668 : // Object::Equals method as well.
8669 :
8670 248 : Label if_equal(this), if_notequal(this), do_float_comparison(this),
8671 124 : do_right_stringtonumber(this, Label::kDeferred), end(this);
8672 248 : VARIABLE(result, MachineRepresentation::kTagged);
8673 : TVARIABLE(Float64T, var_left_float);
8674 : TVARIABLE(Float64T, var_right_float);
8675 :
8676 : // We can avoid code duplication by exploiting the fact that abstract equality
8677 : // is symmetric.
8678 124 : Label use_symmetry(this);
8679 :
8680 : // We might need to loop several times due to ToPrimitive and/or ToNumber
8681 : // conversions.
8682 248 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
8683 248 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
8684 124 : VariableList loop_variable_list({&var_left, &var_right}, zone());
8685 124 : if (var_type_feedback != nullptr) {
8686 : // Initialize the type feedback to None. The current feedback is combined
8687 : // with the previous feedback.
8688 186 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
8689 93 : loop_variable_list.Add(var_type_feedback, zone());
8690 : }
8691 124 : Label loop(this, loop_variable_list);
8692 124 : Goto(&loop);
8693 : BIND(&loop);
8694 : {
8695 124 : left = var_left.value();
8696 124 : right = var_right.value();
8697 :
8698 : Label if_notsame(this);
8699 248 : GotoIf(WordNotEqual(left, right), &if_notsame);
8700 : {
8701 : // {left} and {right} reference the exact same value, yet we need special
8702 : // treatment for HeapNumber, as NaN is not equal to NaN.
8703 124 : GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
8704 : }
8705 :
8706 : BIND(&if_notsame);
8707 124 : Label if_left_smi(this), if_left_not_smi(this);
8708 248 : Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
8709 :
8710 : BIND(&if_left_smi);
8711 : {
8712 124 : Label if_right_smi(this), if_right_not_smi(this);
8713 248 : Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
8714 :
8715 : BIND(&if_right_smi);
8716 : {
8717 : // We have already checked for {left} and {right} being the same value,
8718 : // so when we get here they must be different Smis.
8719 124 : if (var_type_feedback != nullptr) {
8720 : CombineFeedback(var_type_feedback,
8721 186 : SmiConstant(CompareOperationFeedback::kSignedSmall));
8722 : }
8723 124 : Goto(&if_notequal);
8724 : }
8725 :
8726 : BIND(&if_right_not_smi);
8727 248 : Node* right_map = LoadMap(right);
8728 124 : Label if_right_heapnumber(this), if_right_boolean(this),
8729 124 : if_right_bigint(this, Label::kDeferred),
8730 124 : if_right_receiver(this, Label::kDeferred);
8731 248 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
8732 : // {left} is Smi and {right} is not HeapNumber or Smi.
8733 124 : if (var_type_feedback != nullptr) {
8734 186 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
8735 : }
8736 248 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
8737 248 : Node* right_type = LoadMapInstanceType(right_map);
8738 248 : GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
8739 124 : GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
8740 : Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
8741 248 : &if_notequal);
8742 :
8743 : BIND(&if_right_heapnumber);
8744 : {
8745 248 : var_left_float = SmiToFloat64(left);
8746 248 : var_right_float = LoadHeapNumberValue(right);
8747 124 : if (var_type_feedback != nullptr) {
8748 : CombineFeedback(var_type_feedback,
8749 186 : SmiConstant(CompareOperationFeedback::kNumber));
8750 : }
8751 124 : Goto(&do_float_comparison);
8752 : }
8753 :
8754 : BIND(&if_right_boolean);
8755 : {
8756 124 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
8757 124 : Goto(&loop);
8758 : }
8759 :
8760 : BIND(&if_right_bigint);
8761 : {
8762 : result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
8763 124 : NoContextConstant(), right, left));
8764 124 : Goto(&end);
8765 : }
8766 :
8767 : BIND(&if_right_receiver);
8768 : {
8769 124 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
8770 124 : var_right.Bind(CallStub(callable, context, right));
8771 124 : Goto(&loop);
8772 124 : }
8773 : }
8774 :
8775 : BIND(&if_left_not_smi);
8776 : {
8777 248 : GotoIf(TaggedIsSmi(right), &use_symmetry);
8778 :
8779 124 : Label if_left_symbol(this), if_left_number(this), if_left_string(this),
8780 124 : if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
8781 124 : if_left_receiver(this);
8782 :
8783 248 : Node* left_map = LoadMap(left);
8784 248 : Node* right_map = LoadMap(right);
8785 248 : Node* left_type = LoadMapInstanceType(left_map);
8786 248 : Node* right_type = LoadMapInstanceType(right_map);
8787 :
8788 248 : GotoIf(Int32LessThan(left_type, Int32Constant(FIRST_NONSTRING_TYPE)),
8789 248 : &if_left_string);
8790 248 : GotoIf(InstanceTypeEqual(left_type, SYMBOL_TYPE), &if_left_symbol);
8791 248 : GotoIf(InstanceTypeEqual(left_type, HEAP_NUMBER_TYPE), &if_left_number);
8792 248 : GotoIf(InstanceTypeEqual(left_type, ODDBALL_TYPE), &if_left_oddball);
8793 248 : GotoIf(InstanceTypeEqual(left_type, BIGINT_TYPE), &if_left_bigint);
8794 124 : Goto(&if_left_receiver);
8795 :
8796 : BIND(&if_left_string);
8797 : {
8798 248 : GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
8799 124 : result.Bind(CallBuiltin(Builtins::kStringEqual, context, left, right));
8800 124 : if (var_type_feedback != nullptr) {
8801 : CombineFeedback(var_type_feedback,
8802 : SmiOr(CollectFeedbackForString(left_type),
8803 372 : CollectFeedbackForString(right_type)));
8804 : }
8805 124 : Goto(&end);
8806 : }
8807 :
8808 : BIND(&if_left_number);
8809 : {
8810 : Label if_right_not_number(this);
8811 248 : GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
8812 :
8813 248 : var_left_float = LoadHeapNumberValue(left);
8814 248 : var_right_float = LoadHeapNumberValue(right);
8815 124 : if (var_type_feedback != nullptr) {
8816 : CombineFeedback(var_type_feedback,
8817 186 : SmiConstant(CompareOperationFeedback::kNumber));
8818 : }
8819 124 : Goto(&do_float_comparison);
8820 :
8821 : BIND(&if_right_not_number);
8822 : {
8823 : Label if_right_boolean(this);
8824 124 : if (var_type_feedback != nullptr) {
8825 : var_type_feedback->Bind(
8826 186 : SmiConstant(CompareOperationFeedback::kAny));
8827 : }
8828 248 : GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
8829 248 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
8830 124 : GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
8831 : Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
8832 248 : &if_notequal);
8833 :
8834 : BIND(&if_right_boolean);
8835 : {
8836 124 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
8837 124 : Goto(&loop);
8838 124 : }
8839 124 : }
8840 : }
8841 :
8842 : BIND(&if_left_bigint);
8843 : {
8844 124 : if (var_type_feedback != nullptr) {
8845 186 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
8846 : }
8847 :
8848 124 : Label if_right_heapnumber(this), if_right_bigint(this),
8849 124 : if_right_string(this), if_right_boolean(this);
8850 248 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
8851 124 : GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
8852 248 : GotoIf(IsStringInstanceType(right_type), &if_right_string);
8853 248 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
8854 : Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
8855 248 : &if_notequal);
8856 :
8857 : BIND(&if_right_heapnumber);
8858 : {
8859 : result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
8860 124 : NoContextConstant(), left, right));
8861 124 : Goto(&end);
8862 : }
8863 :
8864 : BIND(&if_right_bigint);
8865 : {
8866 : result.Bind(CallRuntime(Runtime::kBigIntEqual, NoContextConstant(),
8867 124 : left, right));
8868 124 : Goto(&end);
8869 : }
8870 :
8871 : BIND(&if_right_string);
8872 : {
8873 : result.Bind(CallRuntime(Runtime::kBigIntEqualToString,
8874 124 : NoContextConstant(), left, right));
8875 124 : Goto(&end);
8876 : }
8877 :
8878 : BIND(&if_right_boolean);
8879 : {
8880 124 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
8881 124 : Goto(&loop);
8882 124 : }
8883 : }
8884 :
8885 : BIND(&if_left_oddball);
8886 : {
8887 124 : if (var_type_feedback != nullptr) {
8888 186 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
8889 : }
8890 :
8891 : Label if_left_boolean(this);
8892 248 : GotoIf(IsBooleanMap(left_map), &if_left_boolean);
8893 : // {left} is either Null or Undefined. Check if {right} is
8894 : // undetectable (which includes Null and Undefined).
8895 248 : Branch(IsUndetectableMap(right_map), &if_equal, &if_notequal);
8896 :
8897 : BIND(&if_left_boolean);
8898 : {
8899 : // If {right} is a Boolean too, it must be a different Boolean.
8900 248 : GotoIf(WordEqual(right_map, left_map), &if_notequal);
8901 : // Otherwise, convert {left} to number and try again.
8902 124 : var_left.Bind(LoadObjectField(left, Oddball::kToNumberOffset));
8903 124 : Goto(&loop);
8904 124 : }
8905 : }
8906 :
8907 : BIND(&if_left_symbol);
8908 : {
8909 : Label if_right_receiver(this);
8910 248 : GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
8911 : // {right} is not a JSReceiver and also not the same Symbol as {left},
8912 : // so the result is "not equal".
8913 124 : if (var_type_feedback != nullptr) {
8914 : Label if_right_symbol(this);
8915 93 : GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
8916 186 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
8917 93 : Goto(&if_notequal);
8918 :
8919 : BIND(&if_right_symbol);
8920 : {
8921 : CombineFeedback(var_type_feedback,
8922 186 : SmiConstant(CompareOperationFeedback::kSymbol));
8923 93 : Goto(&if_notequal);
8924 93 : }
8925 : } else {
8926 31 : Goto(&if_notequal);
8927 : }
8928 :
8929 : BIND(&if_right_receiver);
8930 : {
8931 : // {left} is a Primitive and {right} is a JSReceiver, so swapping
8932 : // the order is not observable.
8933 124 : if (var_type_feedback != nullptr) {
8934 : var_type_feedback->Bind(
8935 186 : SmiConstant(CompareOperationFeedback::kAny));
8936 : }
8937 124 : Goto(&use_symmetry);
8938 124 : }
8939 : }
8940 :
8941 : BIND(&if_left_receiver);
8942 : {
8943 : CSA_ASSERT(this, IsJSReceiverInstanceType(left_type));
8944 : Label if_right_not_receiver(this);
8945 248 : GotoIfNot(IsJSReceiverInstanceType(right_type), &if_right_not_receiver);
8946 :
8947 : // {left} and {right} are different JSReceiver references.
8948 124 : if (var_type_feedback != nullptr) {
8949 : CombineFeedback(var_type_feedback,
8950 186 : SmiConstant(CompareOperationFeedback::kReceiver));
8951 : }
8952 124 : Goto(&if_notequal);
8953 :
8954 : BIND(&if_right_not_receiver);
8955 : {
8956 124 : if (var_type_feedback != nullptr) {
8957 : var_type_feedback->Bind(
8958 186 : SmiConstant(CompareOperationFeedback::kAny));
8959 : }
8960 : Label if_right_null_or_undefined(this);
8961 248 : GotoIf(IsUndetectableMap(right_map), &if_right_null_or_undefined);
8962 :
8963 : // {right} is a Primitive; convert {left} to Primitive too.
8964 124 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
8965 124 : var_left.Bind(CallStub(callable, context, left));
8966 124 : Goto(&loop);
8967 :
8968 : BIND(&if_right_null_or_undefined);
8969 372 : Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
8970 124 : }
8971 124 : }
8972 : }
8973 :
8974 : BIND(&do_right_stringtonumber);
8975 : {
8976 124 : var_right.Bind(CallBuiltin(Builtins::kStringToNumber, context, right));
8977 124 : Goto(&loop);
8978 : }
8979 :
8980 : BIND(&use_symmetry);
8981 : {
8982 124 : var_left.Bind(right);
8983 124 : var_right.Bind(left);
8984 124 : Goto(&loop);
8985 124 : }
8986 : }
8987 :
8988 : BIND(&do_float_comparison);
8989 : {
8990 : Branch(Float64Equal(var_left_float, var_right_float), &if_equal,
8991 248 : &if_notequal);
8992 : }
8993 :
8994 : BIND(&if_equal);
8995 : {
8996 124 : result.Bind(TrueConstant());
8997 124 : Goto(&end);
8998 : }
8999 :
9000 : BIND(&if_notequal);
9001 : {
9002 124 : result.Bind(FalseConstant());
9003 124 : Goto(&end);
9004 : }
9005 :
9006 : BIND(&end);
9007 248 : return result.value();
9008 : }
9009 :
9010 217 : Node* CodeStubAssembler::StrictEqual(Node* lhs, Node* rhs,
9011 : Variable* var_type_feedback) {
9012 : // Pseudo-code for the algorithm below:
9013 : //
9014 : // if (lhs == rhs) {
9015 : // if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
9016 : // return true;
9017 : // }
9018 : // if (!lhs->IsSmi()) {
9019 : // if (lhs->IsHeapNumber()) {
9020 : // if (rhs->IsSmi()) {
9021 : // return Smi::ToInt(rhs) == HeapNumber::cast(lhs)->value();
9022 : // } else if (rhs->IsHeapNumber()) {
9023 : // return HeapNumber::cast(rhs)->value() ==
9024 : // HeapNumber::cast(lhs)->value();
9025 : // } else {
9026 : // return false;
9027 : // }
9028 : // } else {
9029 : // if (rhs->IsSmi()) {
9030 : // return false;
9031 : // } else {
9032 : // if (lhs->IsString()) {
9033 : // if (rhs->IsString()) {
9034 : // return %StringEqual(lhs, rhs);
9035 : // } else {
9036 : // return false;
9037 : // }
9038 : // } else if (lhs->IsBigInt()) {
9039 : // if (rhs->IsBigInt()) {
9040 : // return %BigIntEqual(lhs, rhs);
9041 : // } else {
9042 : // return false;
9043 : // }
9044 : // } else {
9045 : // return false;
9046 : // }
9047 : // }
9048 : // }
9049 : // } else {
9050 : // if (rhs->IsSmi()) {
9051 : // return false;
9052 : // } else {
9053 : // if (rhs->IsHeapNumber()) {
9054 : // return Smi::ToInt(lhs) == HeapNumber::cast(rhs)->value();
9055 : // } else {
9056 : // return false;
9057 : // }
9058 : // }
9059 : // }
9060 :
9061 434 : Label if_equal(this), if_notequal(this), end(this);
9062 434 : VARIABLE(result, MachineRepresentation::kTagged);
9063 :
9064 : // Check if {lhs} and {rhs} refer to the same object.
9065 217 : Label if_same(this), if_notsame(this);
9066 434 : Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
9067 :
9068 : BIND(&if_same);
9069 : {
9070 : // The {lhs} and {rhs} reference the exact same value, yet we need special
9071 : // treatment for HeapNumber, as NaN is not equal to NaN.
9072 217 : if (var_type_feedback != nullptr) {
9073 186 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
9074 : }
9075 217 : GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
9076 : }
9077 :
9078 : BIND(&if_notsame);
9079 : {
9080 : // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
9081 : // BigInt and String they can still be considered equal.
9082 :
9083 217 : if (var_type_feedback != nullptr) {
9084 186 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
9085 : }
9086 :
9087 : // Check if {lhs} is a Smi or a HeapObject.
9088 217 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
9089 434 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
9090 :
9091 : BIND(&if_lhsisnotsmi);
9092 : {
9093 : // Load the map of {lhs}.
9094 434 : Node* lhs_map = LoadMap(lhs);
9095 :
9096 : // Check if {lhs} is a HeapNumber.
9097 217 : Label if_lhsisnumber(this), if_lhsisnotnumber(this);
9098 434 : Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
9099 :
9100 : BIND(&if_lhsisnumber);
9101 : {
9102 : // Check if {rhs} is a Smi or a HeapObject.
9103 217 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
9104 434 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
9105 :
9106 : BIND(&if_rhsissmi);
9107 : {
9108 : // Convert {lhs} and {rhs} to floating point values.
9109 434 : Node* lhs_value = LoadHeapNumberValue(lhs);
9110 434 : Node* rhs_value = SmiToFloat64(rhs);
9111 :
9112 217 : if (var_type_feedback != nullptr) {
9113 : var_type_feedback->Bind(
9114 186 : SmiConstant(CompareOperationFeedback::kNumber));
9115 : }
9116 :
9117 : // Perform a floating point comparison of {lhs} and {rhs}.
9118 434 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
9119 : }
9120 :
9121 : BIND(&if_rhsisnotsmi);
9122 : {
9123 : // Load the map of {rhs}.
9124 434 : Node* rhs_map = LoadMap(rhs);
9125 :
9126 : // Check if {rhs} is also a HeapNumber.
9127 217 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
9128 434 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
9129 :
9130 : BIND(&if_rhsisnumber);
9131 : {
9132 : // Convert {lhs} and {rhs} to floating point values.
9133 434 : Node* lhs_value = LoadHeapNumberValue(lhs);
9134 434 : Node* rhs_value = LoadHeapNumberValue(rhs);
9135 :
9136 217 : if (var_type_feedback != nullptr) {
9137 : var_type_feedback->Bind(
9138 186 : SmiConstant(CompareOperationFeedback::kNumber));
9139 : }
9140 :
9141 : // Perform a floating point comparison of {lhs} and {rhs}.
9142 434 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
9143 : }
9144 :
9145 : BIND(&if_rhsisnotnumber);
9146 434 : Goto(&if_notequal);
9147 217 : }
9148 : }
9149 :
9150 : BIND(&if_lhsisnotnumber);
9151 : {
9152 : // Check if {rhs} is a Smi or a HeapObject.
9153 217 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
9154 434 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
9155 :
9156 : BIND(&if_rhsissmi);
9157 217 : Goto(&if_notequal);
9158 :
9159 : BIND(&if_rhsisnotsmi);
9160 : {
9161 : // Load the instance type of {lhs}.
9162 434 : Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
9163 :
9164 : // Check if {lhs} is a String.
9165 217 : Label if_lhsisstring(this), if_lhsisnotstring(this);
9166 : Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
9167 434 : &if_lhsisnotstring);
9168 :
9169 : BIND(&if_lhsisstring);
9170 : {
9171 : // Load the instance type of {rhs}.
9172 434 : Node* rhs_instance_type = LoadInstanceType(rhs);
9173 :
9174 : // Check if {rhs} is also a String.
9175 : Label if_rhsisstring(this, Label::kDeferred),
9176 217 : if_rhsisnotstring(this);
9177 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
9178 434 : &if_rhsisnotstring);
9179 :
9180 : BIND(&if_rhsisstring);
9181 : {
9182 217 : if (var_type_feedback != nullptr) {
9183 : Node* lhs_feedback =
9184 93 : CollectFeedbackForString(lhs_instance_type);
9185 : Node* rhs_feedback =
9186 93 : CollectFeedbackForString(rhs_instance_type);
9187 186 : var_type_feedback->Bind(SmiOr(lhs_feedback, rhs_feedback));
9188 : }
9189 : result.Bind(CallBuiltin(Builtins::kStringEqual,
9190 217 : NoContextConstant(), lhs, rhs));
9191 217 : Goto(&end);
9192 : }
9193 :
9194 : BIND(&if_rhsisnotstring);
9195 434 : Goto(&if_notequal);
9196 : }
9197 :
9198 : BIND(&if_lhsisnotstring);
9199 :
9200 : // Check if {lhs} is a BigInt.
9201 217 : Label if_lhsisbigint(this), if_lhsisnotbigint(this);
9202 : Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
9203 217 : &if_lhsisnotbigint);
9204 :
9205 : BIND(&if_lhsisbigint);
9206 : {
9207 : // Load the instance type of {rhs}.
9208 434 : Node* rhs_instance_type = LoadInstanceType(rhs);
9209 :
9210 : // Check if {rhs} is also a BigInt.
9211 : Label if_rhsisbigint(this, Label::kDeferred),
9212 217 : if_rhsisnotbigint(this);
9213 : Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
9214 217 : &if_rhsisnotbigint);
9215 :
9216 : BIND(&if_rhsisbigint);
9217 : {
9218 : if (var_type_feedback != nullptr) {
9219 : CSA_ASSERT(
9220 : this,
9221 : WordEqual(var_type_feedback->value(),
9222 : SmiConstant(CompareOperationFeedback::kAny)));
9223 : }
9224 : result.Bind(CallRuntime(Runtime::kBigIntEqual,
9225 217 : NoContextConstant(), lhs, rhs));
9226 217 : Goto(&end);
9227 : }
9228 :
9229 : BIND(&if_rhsisnotbigint);
9230 434 : Goto(&if_notequal);
9231 : }
9232 :
9233 : BIND(&if_lhsisnotbigint);
9234 217 : if (var_type_feedback != nullptr) {
9235 : // Load the instance type of {rhs}.
9236 186 : Node* rhs_instance_type = LoadInstanceType(rhs);
9237 :
9238 93 : Label if_lhsissymbol(this), if_lhsisreceiver(this);
9239 : GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
9240 186 : &if_lhsisreceiver);
9241 : Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
9242 93 : &if_notequal);
9243 :
9244 : BIND(&if_lhsisreceiver);
9245 : {
9246 : GotoIfNot(IsJSReceiverInstanceType(rhs_instance_type),
9247 186 : &if_notequal);
9248 : var_type_feedback->Bind(
9249 186 : SmiConstant(CompareOperationFeedback::kReceiver));
9250 93 : Goto(&if_notequal);
9251 : }
9252 :
9253 : BIND(&if_lhsissymbol);
9254 : {
9255 93 : GotoIfNot(IsSymbolInstanceType(rhs_instance_type), &if_notequal);
9256 : var_type_feedback->Bind(
9257 186 : SmiConstant(CompareOperationFeedback::kSymbol));
9258 93 : Goto(&if_notequal);
9259 93 : }
9260 : } else {
9261 124 : Goto(&if_notequal);
9262 217 : }
9263 217 : }
9264 217 : }
9265 : }
9266 :
9267 : BIND(&if_lhsissmi);
9268 : {
9269 : // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
9270 : // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
9271 : // HeapNumber with an equal floating point value.
9272 :
9273 : // Check if {rhs} is a Smi or a HeapObject.
9274 217 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
9275 434 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
9276 :
9277 : BIND(&if_rhsissmi);
9278 217 : if (var_type_feedback != nullptr) {
9279 : var_type_feedback->Bind(
9280 186 : SmiConstant(CompareOperationFeedback::kSignedSmall));
9281 : }
9282 217 : Goto(&if_notequal);
9283 :
9284 : BIND(&if_rhsisnotsmi);
9285 : {
9286 : // Load the map of the {rhs}.
9287 434 : Node* rhs_map = LoadMap(rhs);
9288 :
9289 : // The {rhs} could be a HeapNumber with the same value as {lhs}.
9290 217 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
9291 434 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
9292 :
9293 : BIND(&if_rhsisnumber);
9294 : {
9295 : // Convert {lhs} and {rhs} to floating point values.
9296 434 : Node* lhs_value = SmiToFloat64(lhs);
9297 434 : Node* rhs_value = LoadHeapNumberValue(rhs);
9298 :
9299 217 : if (var_type_feedback != nullptr) {
9300 : var_type_feedback->Bind(
9301 186 : SmiConstant(CompareOperationFeedback::kNumber));
9302 : }
9303 :
9304 : // Perform a floating point comparison of {lhs} and {rhs}.
9305 434 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
9306 : }
9307 :
9308 : BIND(&if_rhsisnotnumber);
9309 434 : Goto(&if_notequal);
9310 217 : }
9311 217 : }
9312 : }
9313 :
9314 : BIND(&if_equal);
9315 : {
9316 217 : result.Bind(TrueConstant());
9317 217 : Goto(&end);
9318 : }
9319 :
9320 : BIND(&if_notequal);
9321 : {
9322 217 : result.Bind(FalseConstant());
9323 217 : Goto(&end);
9324 : }
9325 :
9326 : BIND(&end);
9327 434 : return result.value();
9328 : }
9329 :
9330 : // ECMA#sec-samevalue
9331 : // This algorithm differs from the Strict Equality Comparison Algorithm in its
9332 : // treatment of signed zeroes and NaNs.
9333 341 : void CodeStubAssembler::BranchIfSameValue(Node* lhs, Node* rhs, Label* if_true,
9334 : Label* if_false) {
9335 341 : VARIABLE(var_lhs_value, MachineRepresentation::kFloat64);
9336 682 : VARIABLE(var_rhs_value, MachineRepresentation::kFloat64);
9337 341 : Label do_fcmp(this);
9338 :
9339 : // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
9340 : // StrictEqual - SameValue considers two NaNs to be equal.
9341 682 : GotoIf(WordEqual(lhs, rhs), if_true);
9342 :
9343 : // Check if the {lhs} is a Smi.
9344 341 : Label if_lhsissmi(this), if_lhsisheapobject(this);
9345 682 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
9346 :
9347 : BIND(&if_lhsissmi);
9348 : {
9349 : // Since {lhs} is a Smi, the comparison can only yield true
9350 : // iff the {rhs} is a HeapNumber with the same float64 value.
9351 682 : GotoIf(TaggedIsSmi(rhs), if_false);
9352 682 : GotoIfNot(IsHeapNumber(rhs), if_false);
9353 682 : var_lhs_value.Bind(SmiToFloat64(lhs));
9354 682 : var_rhs_value.Bind(LoadHeapNumberValue(rhs));
9355 341 : Goto(&do_fcmp);
9356 : }
9357 :
9358 : BIND(&if_lhsisheapobject);
9359 : {
9360 : // Check if the {rhs} is a Smi.
9361 341 : Label if_rhsissmi(this), if_rhsisheapobject(this);
9362 682 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisheapobject);
9363 :
9364 : BIND(&if_rhsissmi);
9365 : {
9366 : // Since {rhs} is a Smi, the comparison can only yield true
9367 : // iff the {lhs} is a HeapNumber with the same float64 value.
9368 682 : GotoIfNot(IsHeapNumber(lhs), if_false);
9369 682 : var_lhs_value.Bind(LoadHeapNumberValue(lhs));
9370 682 : var_rhs_value.Bind(SmiToFloat64(rhs));
9371 341 : Goto(&do_fcmp);
9372 : }
9373 :
9374 : BIND(&if_rhsisheapobject);
9375 : {
9376 : // Now this can only yield true if either both {lhs} and {rhs} are
9377 : // HeapNumbers with the same value, or both are Strings with the same
9378 : // character sequence, or both are BigInts with the same value.
9379 341 : Label if_lhsisheapnumber(this), if_lhsisstring(this),
9380 341 : if_lhsisbigint(this);
9381 682 : Node* const lhs_map = LoadMap(lhs);
9382 682 : GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
9383 682 : Node* const lhs_instance_type = LoadMapInstanceType(lhs_map);
9384 682 : GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
9385 : Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
9386 341 : if_false);
9387 :
9388 : BIND(&if_lhsisheapnumber);
9389 : {
9390 682 : GotoIfNot(IsHeapNumber(rhs), if_false);
9391 682 : var_lhs_value.Bind(LoadHeapNumberValue(lhs));
9392 682 : var_rhs_value.Bind(LoadHeapNumberValue(rhs));
9393 341 : Goto(&do_fcmp);
9394 : }
9395 :
9396 : BIND(&if_lhsisstring);
9397 : {
9398 : // Now we can only yield true if {rhs} is also a String
9399 : // with the same sequence of characters.
9400 682 : GotoIfNot(IsString(rhs), if_false);
9401 : Node* const result =
9402 341 : CallBuiltin(Builtins::kStringEqual, NoContextConstant(), lhs, rhs);
9403 682 : Branch(IsTrue(result), if_true, if_false);
9404 : }
9405 :
9406 : BIND(&if_lhsisbigint);
9407 : {
9408 682 : GotoIfNot(IsBigInt(rhs), if_false);
9409 : Node* const result =
9410 : CallRuntime(Runtime::kBigIntEqual, NoContextConstant(), lhs, rhs);
9411 682 : Branch(IsTrue(result), if_true, if_false);
9412 341 : }
9413 341 : }
9414 : }
9415 :
9416 : BIND(&do_fcmp);
9417 : {
9418 341 : Node* const lhs_value = var_lhs_value.value();
9419 341 : Node* const rhs_value = var_rhs_value.value();
9420 :
9421 341 : Label if_equal(this), if_notequal(this);
9422 682 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
9423 :
9424 : BIND(&if_equal);
9425 : {
9426 : // We still need to handle the case when {lhs} and {rhs} are -0.0 and
9427 : // 0.0 (or vice versa). Compare the high word to
9428 : // distinguish between the two.
9429 682 : Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_value);
9430 682 : Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_value);
9431 :
9432 : // If x is +0 and y is -0, return false.
9433 : // If x is -0 and y is +0, return false.
9434 682 : Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
9435 : }
9436 :
9437 : BIND(&if_notequal);
9438 : {
9439 : // Return true iff both {rhs} and {lhs} are NaN.
9440 682 : GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
9441 682 : Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
9442 341 : }
9443 341 : }
9444 341 : }
9445 :
9446 372 : Node* CodeStubAssembler::HasProperty(Node* object, Node* key, Node* context,
9447 : HasPropertyLookupMode mode) {
9448 744 : Label call_runtime(this, Label::kDeferred), return_true(this),
9449 372 : return_false(this), end(this), if_proxy(this, Label::kDeferred);
9450 :
9451 : CodeStubAssembler::LookupInHolder lookup_property_in_holder =
9452 : [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
9453 : Node* holder_instance_type, Node* unique_name,
9454 : Label* next_holder, Label* if_bailout) {
9455 : TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
9456 372 : &return_true, next_holder, if_bailout);
9457 : };
9458 :
9459 : CodeStubAssembler::LookupInHolder lookup_element_in_holder =
9460 : [this, &return_true, &return_false](
9461 : Node* receiver, Node* holder, Node* holder_map,
9462 : Node* holder_instance_type, Node* index, Label* next_holder,
9463 : Label* if_bailout) {
9464 : TryLookupElement(holder, holder_map, holder_instance_type, index,
9465 372 : &return_true, &return_false, next_holder, if_bailout);
9466 : };
9467 :
9468 : TryPrototypeChainLookup(object, key, lookup_property_in_holder,
9469 : lookup_element_in_holder, &return_false,
9470 372 : &call_runtime, &if_proxy);
9471 :
9472 744 : VARIABLE(result, MachineRepresentation::kTagged);
9473 :
9474 : BIND(&if_proxy);
9475 : {
9476 372 : Node* name = ToName(context, key);
9477 372 : switch (mode) {
9478 : case kHasProperty:
9479 682 : GotoIf(IsPrivateSymbol(name), &return_false);
9480 :
9481 : result.Bind(
9482 341 : CallBuiltin(Builtins::kProxyHasProperty, context, object, name));
9483 341 : Goto(&end);
9484 341 : break;
9485 : case kForInHasProperty:
9486 31 : Goto(&call_runtime);
9487 31 : break;
9488 : }
9489 : }
9490 :
9491 : BIND(&return_true);
9492 : {
9493 744 : result.Bind(BooleanConstant(true));
9494 372 : Goto(&end);
9495 : }
9496 :
9497 : BIND(&return_false);
9498 : {
9499 744 : result.Bind(BooleanConstant(false));
9500 372 : Goto(&end);
9501 : }
9502 :
9503 : BIND(&call_runtime);
9504 : {
9505 : Runtime::FunctionId fallback_runtime_function_id;
9506 372 : switch (mode) {
9507 : case kHasProperty:
9508 : fallback_runtime_function_id = Runtime::kHasProperty;
9509 341 : break;
9510 : case kForInHasProperty:
9511 : fallback_runtime_function_id = Runtime::kForInHasProperty;
9512 31 : break;
9513 : }
9514 :
9515 : result.Bind(
9516 372 : CallRuntime(fallback_runtime_function_id, context, object, key));
9517 372 : Goto(&end);
9518 : }
9519 :
9520 : BIND(&end);
9521 744 : return result.value();
9522 : }
9523 :
9524 124 : Node* CodeStubAssembler::ClassOf(Node* value) {
9525 124 : VARIABLE(var_result, MachineRepresentation::kTaggedPointer);
9526 124 : Label if_function_template_info(this, Label::kDeferred),
9527 124 : if_no_class_name(this, Label::kDeferred),
9528 124 : if_function(this, Label::kDeferred), if_object(this, Label::kDeferred),
9529 124 : if_primitive(this, Label::kDeferred), return_result(this);
9530 :
9531 : // Check if {value} is a Smi.
9532 248 : GotoIf(TaggedIsSmi(value), &if_primitive);
9533 :
9534 248 : Node* value_map = LoadMap(value);
9535 248 : Node* value_instance_type = LoadMapInstanceType(value_map);
9536 :
9537 : // Check if {value} is a JSFunction or JSBoundFunction.
9538 : STATIC_ASSERT(LAST_TYPE == LAST_FUNCTION_TYPE);
9539 : GotoIf(Uint32LessThanOrEqual(Int32Constant(FIRST_FUNCTION_TYPE),
9540 248 : value_instance_type),
9541 248 : &if_function);
9542 :
9543 : // Check if {value} is a primitive HeapObject.
9544 : STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
9545 248 : GotoIfNot(IsJSReceiverInstanceType(value_instance_type), &if_primitive);
9546 :
9547 : // Load the {value}s constructor, and check that it's a JSFunction.
9548 248 : Node* constructor = LoadMapConstructor(value_map);
9549 : GotoIf(HasInstanceType(constructor, FUNCTION_TEMPLATE_INFO_TYPE),
9550 248 : &if_function_template_info);
9551 248 : GotoIfNot(IsJSFunction(constructor), &if_object);
9552 :
9553 : // Return the instance class name for the {constructor}.
9554 : Node* shared_info =
9555 : LoadObjectField(constructor, JSFunction::kSharedFunctionInfoOffset);
9556 : Node* instance_class_name = LoadObjectField(
9557 : shared_info, SharedFunctionInfo::kInstanceClassNameOffset);
9558 124 : var_result.Bind(instance_class_name);
9559 124 : Goto(&return_result);
9560 :
9561 : // For remote objects the constructor might be given as FTI.
9562 : BIND(&if_function_template_info);
9563 : Node* class_name =
9564 : LoadObjectField(constructor, FunctionTemplateInfo::kClassNameOffset);
9565 248 : GotoIf(IsUndefined(class_name), &if_no_class_name);
9566 124 : var_result.Bind(class_name);
9567 124 : Goto(&return_result);
9568 :
9569 : BIND(&if_no_class_name);
9570 248 : var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
9571 124 : Goto(&return_result);
9572 :
9573 : BIND(&if_function);
9574 248 : var_result.Bind(LoadRoot(Heap::kFunction_stringRootIndex));
9575 124 : Goto(&return_result);
9576 :
9577 : BIND(&if_object);
9578 248 : var_result.Bind(LoadRoot(Heap::kObject_stringRootIndex));
9579 124 : Goto(&return_result);
9580 :
9581 : BIND(&if_primitive);
9582 124 : var_result.Bind(NullConstant());
9583 124 : Goto(&return_result);
9584 :
9585 : BIND(&return_result);
9586 248 : return var_result.value();
9587 : }
9588 :
9589 124 : Node* CodeStubAssembler::Typeof(Node* value) {
9590 124 : VARIABLE(result_var, MachineRepresentation::kTagged);
9591 :
9592 124 : Label return_number(this, Label::kDeferred), if_oddball(this),
9593 124 : return_function(this), return_undefined(this), return_object(this),
9594 124 : return_string(this), return_bigint(this), return_result(this);
9595 :
9596 248 : GotoIf(TaggedIsSmi(value), &return_number);
9597 :
9598 248 : Node* map = LoadMap(value);
9599 :
9600 248 : GotoIf(IsHeapNumberMap(map), &return_number);
9601 :
9602 248 : Node* instance_type = LoadMapInstanceType(map);
9603 :
9604 248 : GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
9605 :
9606 : Node* callable_or_undetectable_mask = Word32And(
9607 124 : LoadMapBitField(map),
9608 620 : Int32Constant(1 << Map::kIsCallable | 1 << Map::kIsUndetectable));
9609 :
9610 : GotoIf(Word32Equal(callable_or_undetectable_mask,
9611 248 : Int32Constant(1 << Map::kIsCallable)),
9612 248 : &return_function);
9613 :
9614 248 : GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
9615 248 : &return_undefined);
9616 :
9617 248 : GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
9618 :
9619 248 : GotoIf(IsStringInstanceType(instance_type), &return_string);
9620 :
9621 124 : GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
9622 :
9623 : CSA_ASSERT(this, InstanceTypeEqual(instance_type, SYMBOL_TYPE));
9624 248 : result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
9625 124 : Goto(&return_result);
9626 :
9627 : BIND(&return_number);
9628 : {
9629 248 : result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
9630 124 : Goto(&return_result);
9631 : }
9632 :
9633 : BIND(&if_oddball);
9634 : {
9635 : Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
9636 124 : result_var.Bind(type);
9637 124 : Goto(&return_result);
9638 : }
9639 :
9640 : BIND(&return_function);
9641 : {
9642 248 : result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
9643 124 : Goto(&return_result);
9644 : }
9645 :
9646 : BIND(&return_undefined);
9647 : {
9648 248 : result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
9649 124 : Goto(&return_result);
9650 : }
9651 :
9652 : BIND(&return_object);
9653 : {
9654 248 : result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
9655 124 : Goto(&return_result);
9656 : }
9657 :
9658 : BIND(&return_string);
9659 : {
9660 248 : result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
9661 124 : Goto(&return_result);
9662 : }
9663 :
9664 : BIND(&return_bigint);
9665 : {
9666 248 : result_var.Bind(HeapConstant(isolate()->factory()->bigint_string()));
9667 124 : Goto(&return_result);
9668 : }
9669 :
9670 : BIND(&return_result);
9671 248 : return result_var.value();
9672 : }
9673 :
9674 124 : Node* CodeStubAssembler::GetSuperConstructor(Node* active_function,
9675 : Node* context) {
9676 : CSA_ASSERT(this, IsJSFunction(active_function));
9677 :
9678 248 : Label is_not_constructor(this, Label::kDeferred), out(this);
9679 248 : VARIABLE(result, MachineRepresentation::kTagged);
9680 :
9681 248 : Node* map = LoadMap(active_function);
9682 248 : Node* prototype = LoadMapPrototype(map);
9683 248 : Node* prototype_map = LoadMap(prototype);
9684 248 : GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
9685 :
9686 124 : result.Bind(prototype);
9687 124 : Goto(&out);
9688 :
9689 : BIND(&is_not_constructor);
9690 : {
9691 : CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
9692 : active_function);
9693 124 : Unreachable();
9694 : }
9695 :
9696 : BIND(&out);
9697 248 : return result.value();
9698 : }
9699 :
9700 124 : Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
9701 : Node* context) {
9702 124 : VARIABLE(var_result, MachineRepresentation::kTagged);
9703 124 : Label if_notcallable(this, Label::kDeferred),
9704 124 : if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
9705 124 : if_nohandler(this, Label::kDeferred), return_true(this),
9706 124 : return_false(this), return_result(this, &var_result);
9707 :
9708 : // Ensure that the {callable} is actually a JSReceiver.
9709 248 : GotoIf(TaggedIsSmi(callable), &if_notreceiver);
9710 248 : GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
9711 :
9712 : // Load the @@hasInstance property from {callable}.
9713 : Node* inst_of_handler =
9714 124 : GetProperty(context, callable, HasInstanceSymbolConstant());
9715 :
9716 : // Optimize for the likely case where {inst_of_handler} is the builtin
9717 : // Function.prototype[@@hasInstance] method, and emit a direct call in
9718 : // that case without any additional checking.
9719 : Node* native_context = LoadNativeContext(context);
9720 : Node* function_has_instance =
9721 248 : LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
9722 124 : GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
9723 248 : &if_otherhandler);
9724 : {
9725 : // TODO(6786): A direct call to a TFJ builtin breaks the lazy
9726 : // deserialization mechanism in two ways: first, we always pass in a
9727 : // callable containing the DeserializeLazy code object (assuming that
9728 : // FunctionPrototypeHasInstance is lazy). Second, a direct call (without
9729 : // going through CodeFactory::Call) to DeserializeLazy will not initialize
9730 : // new_target properly. For now we can avoid this by marking
9731 : // FunctionPrototypeHasInstance as eager, but this should be fixed at some
9732 : // point.
9733 : //
9734 : // Call to Function.prototype[@@hasInstance] directly.
9735 : Callable builtin(BUILTIN_CODE(isolate(), FunctionPrototypeHasInstance),
9736 372 : CallTrampolineDescriptor(isolate()));
9737 124 : Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
9738 124 : var_result.Bind(result);
9739 124 : Goto(&return_result);
9740 : }
9741 :
9742 : BIND(&if_otherhandler);
9743 : {
9744 : // Check if there's actually an {inst_of_handler}.
9745 248 : GotoIf(IsNull(inst_of_handler), &if_nohandler);
9746 248 : GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
9747 :
9748 : // Call the {inst_of_handler} for {callable} and {object}.
9749 : Node* result = CallJS(
9750 : CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
9751 248 : context, inst_of_handler, callable, object);
9752 :
9753 : // Convert the {result} to a Boolean.
9754 124 : BranchIfToBooleanIsTrue(result, &return_true, &return_false);
9755 : }
9756 :
9757 : BIND(&if_nohandler);
9758 : {
9759 : // Ensure that the {callable} is actually Callable.
9760 248 : GotoIfNot(IsCallable(callable), &if_notcallable);
9761 :
9762 : // Use the OrdinaryHasInstance algorithm.
9763 : Node* result =
9764 124 : CallBuiltin(Builtins::kOrdinaryHasInstance, context, callable, object);
9765 124 : var_result.Bind(result);
9766 124 : Goto(&return_result);
9767 : }
9768 :
9769 : BIND(&if_notcallable);
9770 : {
9771 : CallRuntime(Runtime::kThrowNonCallableInInstanceOfCheck, context);
9772 124 : Unreachable();
9773 : }
9774 :
9775 : BIND(&if_notreceiver);
9776 : {
9777 : CallRuntime(Runtime::kThrowNonObjectInInstanceOfCheck, context);
9778 124 : Unreachable();
9779 : }
9780 :
9781 : BIND(&return_true);
9782 124 : var_result.Bind(TrueConstant());
9783 124 : Goto(&return_result);
9784 :
9785 : BIND(&return_false);
9786 124 : var_result.Bind(FalseConstant());
9787 124 : Goto(&return_result);
9788 :
9789 : BIND(&return_result);
9790 248 : return var_result.value();
9791 : }
9792 :
9793 558 : Node* CodeStubAssembler::NumberInc(Node* value) {
9794 558 : VARIABLE(var_result, MachineRepresentation::kTagged);
9795 1116 : VARIABLE(var_finc_value, MachineRepresentation::kFloat64);
9796 558 : Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
9797 1116 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
9798 :
9799 : BIND(&if_issmi);
9800 : {
9801 : // Try fast Smi addition first.
9802 1116 : Node* one = SmiConstant(1);
9803 558 : Node* pair = IntPtrAddWithOverflow(BitcastTaggedToWord(value),
9804 2790 : BitcastTaggedToWord(one));
9805 558 : Node* overflow = Projection(1, pair);
9806 :
9807 : // Check if the Smi addition overflowed.
9808 558 : Label if_overflow(this), if_notoverflow(this);
9809 558 : Branch(overflow, &if_overflow, &if_notoverflow);
9810 :
9811 : BIND(&if_notoverflow);
9812 1674 : var_result.Bind(BitcastWordToTaggedSigned(Projection(0, pair)));
9813 558 : Goto(&end);
9814 :
9815 : BIND(&if_overflow);
9816 : {
9817 1116 : var_finc_value.Bind(SmiToFloat64(value));
9818 558 : Goto(&do_finc);
9819 558 : }
9820 : }
9821 :
9822 : BIND(&if_isnotsmi);
9823 : {
9824 : CSA_ASSERT(this, IsHeapNumber(value));
9825 :
9826 : // Load the HeapNumber value.
9827 1116 : var_finc_value.Bind(LoadHeapNumberValue(value));
9828 558 : Goto(&do_finc);
9829 : }
9830 :
9831 : BIND(&do_finc);
9832 : {
9833 558 : Node* finc_value = var_finc_value.value();
9834 1116 : Node* one = Float64Constant(1.0);
9835 1116 : Node* finc_result = Float64Add(finc_value, one);
9836 1116 : var_result.Bind(AllocateHeapNumberWithValue(finc_result));
9837 558 : Goto(&end);
9838 : }
9839 :
9840 : BIND(&end);
9841 1116 : return var_result.value();
9842 : }
9843 :
9844 93 : Node* CodeStubAssembler::NumberDec(Node* value) {
9845 93 : VARIABLE(var_result, MachineRepresentation::kTagged);
9846 186 : VARIABLE(var_fdec_value, MachineRepresentation::kFloat64);
9847 93 : Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
9848 186 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
9849 :
9850 : BIND(&if_issmi);
9851 : {
9852 : // Try fast Smi addition first.
9853 186 : Node* one = SmiConstant(1);
9854 93 : Node* pair = IntPtrSubWithOverflow(BitcastTaggedToWord(value),
9855 465 : BitcastTaggedToWord(one));
9856 93 : Node* overflow = Projection(1, pair);
9857 :
9858 : // Check if the Smi addition overflowed.
9859 93 : Label if_overflow(this), if_notoverflow(this);
9860 93 : Branch(overflow, &if_overflow, &if_notoverflow);
9861 :
9862 : BIND(&if_notoverflow);
9863 279 : var_result.Bind(BitcastWordToTaggedSigned(Projection(0, pair)));
9864 93 : Goto(&end);
9865 :
9866 : BIND(&if_overflow);
9867 : {
9868 186 : var_fdec_value.Bind(SmiToFloat64(value));
9869 93 : Goto(&do_fdec);
9870 93 : }
9871 : }
9872 :
9873 : BIND(&if_isnotsmi);
9874 : {
9875 : CSA_ASSERT(this, IsHeapNumber(value));
9876 :
9877 : // Load the HeapNumber value.
9878 186 : var_fdec_value.Bind(LoadHeapNumberValue(value));
9879 93 : Goto(&do_fdec);
9880 : }
9881 :
9882 : BIND(&do_fdec);
9883 : {
9884 93 : Node* fdec_value = var_fdec_value.value();
9885 186 : Node* minus_one = Float64Constant(-1.0);
9886 186 : Node* fdec_result = Float64Add(fdec_value, minus_one);
9887 186 : var_result.Bind(AllocateHeapNumberWithValue(fdec_result));
9888 93 : Goto(&end);
9889 : }
9890 :
9891 : BIND(&end);
9892 186 : return var_result.value();
9893 : }
9894 :
9895 6 : Node* CodeStubAssembler::NumberAdd(Node* a, Node* b) {
9896 6 : VARIABLE(var_result, MachineRepresentation::kTagged);
9897 12 : VARIABLE(var_fadd_value, MachineRepresentation::kFloat64);
9898 6 : Label float_add(this, Label::kDeferred), end(this);
9899 12 : GotoIf(TaggedIsNotSmi(a), &float_add);
9900 12 : GotoIf(TaggedIsNotSmi(b), &float_add);
9901 :
9902 : // Try fast Smi addition first.
9903 : Node* pair =
9904 24 : IntPtrAddWithOverflow(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
9905 6 : Node* overflow = Projection(1, pair);
9906 :
9907 : // Check if the Smi addition overflowed.
9908 6 : Label if_overflow(this), if_notoverflow(this);
9909 6 : GotoIf(overflow, &float_add);
9910 :
9911 18 : var_result.Bind(BitcastWordToTaggedSigned(Projection(0, pair)));
9912 6 : Goto(&end);
9913 :
9914 : BIND(&float_add);
9915 : {
9916 : var_result.Bind(ChangeFloat64ToTagged(
9917 30 : Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b))));
9918 6 : Goto(&end);
9919 : }
9920 :
9921 : BIND(&end);
9922 12 : return var_result.value();
9923 : }
9924 :
9925 6 : Node* CodeStubAssembler::NumberSub(Node* a, Node* b) {
9926 6 : VARIABLE(var_result, MachineRepresentation::kTagged);
9927 12 : VARIABLE(var_fsub_value, MachineRepresentation::kFloat64);
9928 6 : Label float_sub(this, Label::kDeferred), end(this);
9929 12 : GotoIf(TaggedIsNotSmi(a), &float_sub);
9930 12 : GotoIf(TaggedIsNotSmi(b), &float_sub);
9931 :
9932 : // Try fast Smi subtraction first.
9933 : Node* pair =
9934 24 : IntPtrSubWithOverflow(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
9935 6 : Node* overflow = Projection(1, pair);
9936 :
9937 : // Check if the Smi subtraction overflowed.
9938 6 : Label if_overflow(this), if_notoverflow(this);
9939 6 : GotoIf(overflow, &float_sub);
9940 :
9941 18 : var_result.Bind(BitcastWordToTaggedSigned(Projection(0, pair)));
9942 6 : Goto(&end);
9943 :
9944 : BIND(&float_sub);
9945 : {
9946 : var_result.Bind(ChangeFloat64ToTagged(
9947 30 : Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b))));
9948 6 : Goto(&end);
9949 : }
9950 :
9951 : BIND(&end);
9952 12 : return var_result.value();
9953 : }
9954 :
9955 235 : void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
9956 235 : Label is_number(this);
9957 470 : GotoIf(TaggedIsSmi(input), &is_number);
9958 470 : Branch(IsHeapNumber(input), &is_number, is_not_number);
9959 235 : BIND(&is_number);
9960 235 : }
9961 :
9962 62 : void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
9963 124 : GotoIf(TaggedIsSmi(input), is_number);
9964 124 : GotoIf(IsHeapNumber(input), is_number);
9965 62 : }
9966 :
9967 1302 : Node* CodeStubAssembler::BitwiseOp(Node* left32, Node* right32,
9968 : Token::Value bitwise_op) {
9969 1302 : switch (bitwise_op) {
9970 : case Token::BIT_AND:
9971 651 : return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
9972 : case Token::BIT_OR:
9973 651 : return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
9974 : case Token::BIT_XOR:
9975 651 : return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
9976 : case Token::SHL:
9977 : return ChangeInt32ToTagged(
9978 1085 : Signed(Word32Shl(left32, Word32And(right32, Int32Constant(0x1f)))));
9979 : case Token::SAR:
9980 : return ChangeInt32ToTagged(
9981 1085 : Signed(Word32Sar(left32, Word32And(right32, Int32Constant(0x1f)))));
9982 : case Token::SHR:
9983 : return ChangeUint32ToTagged(
9984 1085 : Unsigned(Word32Shr(left32, Word32And(right32, Int32Constant(0x1f)))));
9985 : default:
9986 : break;
9987 : }
9988 0 : UNREACHABLE();
9989 : }
9990 :
9991 186 : Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map,
9992 : Node* array_type, Node* context,
9993 : IterationKind mode) {
9994 : int kBaseMapIndex = 0;
9995 : switch (mode) {
9996 : case IterationKind::kKeys:
9997 : kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX;
9998 : break;
9999 : case IterationKind::kValues:
10000 : kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
10001 : break;
10002 : case IterationKind::kEntries:
10003 : kBaseMapIndex = Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX;
10004 : break;
10005 : }
10006 :
10007 : // Fast Array iterator map index:
10008 : // (kBaseIndex + kFastIteratorOffset) + ElementsKind (for JSArrays)
10009 : // kBaseIndex + (ElementsKind - UINT8_ELEMENTS) (for JSTypedArrays)
10010 : const int kFastIteratorOffset =
10011 : Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX -
10012 : Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
10013 : STATIC_ASSERT(kFastIteratorOffset ==
10014 : (Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX -
10015 : Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX));
10016 :
10017 : // Slow Array iterator map index: (kBaseIndex + kSlowIteratorOffset)
10018 : const int kSlowIteratorOffset =
10019 : Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX -
10020 : Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
10021 : STATIC_ASSERT(kSlowIteratorOffset ==
10022 : (Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX -
10023 : Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX));
10024 :
10025 : // Assert: Type(array) is Object
10026 : CSA_ASSERT(this, IsJSReceiverInstanceType(array_type));
10027 :
10028 186 : VARIABLE(var_result, MachineRepresentation::kTagged);
10029 372 : VARIABLE(var_map_index, MachineType::PointerRepresentation());
10030 372 : VARIABLE(var_array_map, MachineRepresentation::kTagged);
10031 :
10032 186 : Label return_result(this);
10033 186 : Label allocate_iterator(this);
10034 :
10035 186 : if (mode == IterationKind::kKeys) {
10036 : // There are only two key iterator maps, branch depending on whether or not
10037 : // the receiver is a TypedArray or not.
10038 :
10039 62 : Label if_istypedarray(this), if_isgeneric(this);
10040 :
10041 : Branch(InstanceTypeEqual(array_type, JS_TYPED_ARRAY_TYPE), &if_istypedarray,
10042 124 : &if_isgeneric);
10043 :
10044 : BIND(&if_isgeneric);
10045 : {
10046 62 : Label if_isfast(this), if_isslow(this);
10047 62 : BranchIfFastJSArray(array, context, &if_isfast, &if_isslow);
10048 :
10049 : BIND(&if_isfast);
10050 : {
10051 : var_map_index.Bind(
10052 124 : IntPtrConstant(Context::FAST_ARRAY_KEY_ITERATOR_MAP_INDEX));
10053 62 : var_array_map.Bind(array_map);
10054 62 : Goto(&allocate_iterator);
10055 : }
10056 :
10057 : BIND(&if_isslow);
10058 : {
10059 : var_map_index.Bind(
10060 124 : IntPtrConstant(Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX));
10061 62 : var_array_map.Bind(UndefinedConstant());
10062 62 : Goto(&allocate_iterator);
10063 62 : }
10064 : }
10065 :
10066 : BIND(&if_istypedarray);
10067 : {
10068 : var_map_index.Bind(
10069 124 : IntPtrConstant(Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX));
10070 62 : var_array_map.Bind(UndefinedConstant());
10071 62 : Goto(&allocate_iterator);
10072 62 : }
10073 : } else {
10074 124 : Label if_istypedarray(this), if_isgeneric(this);
10075 : Branch(InstanceTypeEqual(array_type, JS_TYPED_ARRAY_TYPE), &if_istypedarray,
10076 248 : &if_isgeneric);
10077 :
10078 : BIND(&if_isgeneric);
10079 : {
10080 124 : Label if_isfast(this), if_isslow(this);
10081 124 : BranchIfFastJSArray(array, context, &if_isfast, &if_isslow);
10082 :
10083 : BIND(&if_isfast);
10084 : {
10085 124 : Label if_ispacked(this), if_isholey(this);
10086 248 : Node* elements_kind = LoadMapElementsKind(array_map);
10087 : Branch(IsHoleyFastElementsKind(elements_kind), &if_isholey,
10088 124 : &if_ispacked);
10089 :
10090 : BIND(&if_isholey);
10091 : {
10092 : // Fast holey JSArrays can treat the hole as undefined if the
10093 : // protector cell is valid, and the prototype chain is unchanged from
10094 : // its initial state (because the protector cell is only tracked for
10095 : // initial the Array and Object prototypes). Check these conditions
10096 : // here, and take the slow path if any fail.
10097 248 : GotoIf(IsArrayProtectorCellInvalid(), &if_isslow);
10098 :
10099 : Node* native_context = LoadNativeContext(context);
10100 :
10101 248 : Node* prototype = LoadMapPrototype(array_map);
10102 : Node* array_prototype = LoadContextElement(
10103 248 : native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
10104 248 : GotoIfNot(WordEqual(prototype, array_prototype), &if_isslow);
10105 :
10106 248 : Node* map = LoadMap(prototype);
10107 248 : prototype = LoadMapPrototype(map);
10108 : Node* object_prototype = LoadContextElement(
10109 248 : native_context, Context::INITIAL_OBJECT_PROTOTYPE_INDEX);
10110 248 : GotoIfNot(WordEqual(prototype, object_prototype), &if_isslow);
10111 :
10112 248 : map = LoadMap(prototype);
10113 248 : prototype = LoadMapPrototype(map);
10114 248 : Branch(IsNull(prototype), &if_ispacked, &if_isslow);
10115 : }
10116 : BIND(&if_ispacked);
10117 : {
10118 : Node* map_index =
10119 124 : IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset),
10120 620 : ChangeUint32ToWord(LoadMapElementsKind(array_map)));
10121 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
10122 : map_index, IntPtrConstant(kBaseMapIndex +
10123 : kFastIteratorOffset)));
10124 : CSA_ASSERT(this, IntPtrLessThan(map_index,
10125 : IntPtrConstant(kBaseMapIndex +
10126 : kSlowIteratorOffset)));
10127 :
10128 124 : var_map_index.Bind(map_index);
10129 124 : var_array_map.Bind(array_map);
10130 124 : Goto(&allocate_iterator);
10131 124 : }
10132 : }
10133 :
10134 : BIND(&if_isslow);
10135 : {
10136 : Node* map_index = IntPtrAdd(IntPtrConstant(kBaseMapIndex),
10137 124 : IntPtrConstant(kSlowIteratorOffset));
10138 124 : var_map_index.Bind(map_index);
10139 124 : var_array_map.Bind(UndefinedConstant());
10140 124 : Goto(&allocate_iterator);
10141 124 : }
10142 : }
10143 :
10144 : BIND(&if_istypedarray);
10145 : {
10146 : Node* map_index =
10147 124 : IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS),
10148 620 : ChangeUint32ToWord(LoadMapElementsKind(array_map)));
10149 : CSA_ASSERT(
10150 : this, IntPtrLessThan(map_index, IntPtrConstant(kBaseMapIndex +
10151 : kFastIteratorOffset)));
10152 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(map_index,
10153 : IntPtrConstant(kBaseMapIndex)));
10154 124 : var_map_index.Bind(map_index);
10155 124 : var_array_map.Bind(UndefinedConstant());
10156 124 : Goto(&allocate_iterator);
10157 124 : }
10158 : }
10159 :
10160 : BIND(&allocate_iterator);
10161 : {
10162 : Node* map = LoadFixedArrayElement(LoadNativeContext(context),
10163 372 : var_map_index.value());
10164 186 : var_result.Bind(AllocateJSArrayIterator(array, var_array_map.value(), map));
10165 186 : Goto(&return_result);
10166 : }
10167 :
10168 : BIND(&return_result);
10169 372 : return var_result.value();
10170 : }
10171 :
10172 186 : Node* CodeStubAssembler::AllocateJSArrayIterator(Node* array, Node* array_map,
10173 : Node* map) {
10174 186 : Node* iterator = Allocate(JSArrayIterator::kSize);
10175 186 : StoreMapNoWriteBarrier(iterator, map);
10176 : StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOrHashOffset,
10177 186 : Heap::kEmptyFixedArrayRootIndex);
10178 : StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
10179 186 : Heap::kEmptyFixedArrayRootIndex);
10180 : StoreObjectFieldNoWriteBarrier(iterator,
10181 186 : JSArrayIterator::kIteratedObjectOffset, array);
10182 : StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
10183 372 : SmiConstant(0));
10184 : StoreObjectFieldNoWriteBarrier(
10185 186 : iterator, JSArrayIterator::kIteratedObjectMapOffset, array_map);
10186 186 : return iterator;
10187 : }
10188 :
10189 93 : Node* CodeStubAssembler::AllocateJSIteratorResult(Node* context, Node* value,
10190 : Node* done) {
10191 : CSA_ASSERT(this, IsBoolean(done));
10192 : Node* native_context = LoadNativeContext(context);
10193 : Node* map =
10194 186 : LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
10195 93 : Node* result = Allocate(JSIteratorResult::kSize);
10196 93 : StoreMapNoWriteBarrier(result, map);
10197 : StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
10198 93 : Heap::kEmptyFixedArrayRootIndex);
10199 : StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
10200 93 : Heap::kEmptyFixedArrayRootIndex);
10201 93 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
10202 93 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
10203 93 : return result;
10204 : }
10205 :
10206 62 : Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
10207 : Node* key,
10208 : Node* value) {
10209 : Node* native_context = LoadNativeContext(context);
10210 124 : Node* length = SmiConstant(2);
10211 : int const elements_size = FixedArray::SizeFor(2);
10212 : Node* elements =
10213 62 : Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize);
10214 : StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
10215 62 : Heap::kFixedArrayMapRootIndex);
10216 62 : StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
10217 62 : StoreFixedArrayElement(elements, 0, key);
10218 62 : StoreFixedArrayElement(elements, 1, value);
10219 : Node* array_map = LoadContextElement(
10220 124 : native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX);
10221 62 : Node* array = InnerAllocate(elements, elements_size);
10222 62 : StoreMapNoWriteBarrier(array, array_map);
10223 : StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
10224 62 : Heap::kEmptyFixedArrayRootIndex);
10225 62 : StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
10226 62 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
10227 : Node* iterator_map =
10228 124 : LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
10229 62 : Node* result = InnerAllocate(array, JSArray::kSize);
10230 62 : StoreMapNoWriteBarrier(result, iterator_map);
10231 : StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
10232 62 : Heap::kEmptyFixedArrayRootIndex);
10233 : StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
10234 62 : Heap::kEmptyFixedArrayRootIndex);
10235 62 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
10236 : StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
10237 62 : Heap::kFalseValueRootIndex);
10238 62 : return result;
10239 : }
10240 :
10241 279 : Node* CodeStubAssembler::TypedArraySpeciesCreateByLength(Node* context,
10242 : Node* originalArray,
10243 : Node* len) {
10244 : // TODO(tebbi): Install a fast path as well, which avoids the runtime
10245 : // call.
10246 : return CallRuntime(Runtime::kTypedArraySpeciesCreateByLength, context,
10247 558 : originalArray, len);
10248 : }
10249 :
10250 5748 : Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
10251 : CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
10252 :
10253 : Node* buffer_bit_field = LoadObjectField(
10254 5748 : buffer, JSArrayBuffer::kBitFieldOffset, MachineType::Uint32());
10255 5748 : return IsSetWord32<JSArrayBuffer::WasNeutered>(buffer_bit_field);
10256 : }
10257 :
10258 1988 : CodeStubArguments::CodeStubArguments(
10259 : CodeStubAssembler* assembler, SloppyTNode<IntPtrT> argc, Node* fp,
10260 : CodeStubAssembler::ParameterMode param_mode, ReceiverMode receiver_mode)
10261 : : assembler_(assembler),
10262 : argc_mode_(param_mode),
10263 : receiver_mode_(receiver_mode),
10264 : argc_(argc),
10265 : arguments_(),
10266 3976 : fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
10267 : Node* offset = assembler_->ElementOffsetFromIndex(
10268 : argc_, PACKED_ELEMENTS, param_mode,
10269 1988 : (StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize);
10270 5964 : arguments_ = assembler_->UncheckedCast<RawPtr<Object>>(
10271 : assembler_->IntPtrAdd(fp_, offset));
10272 1988 : }
10273 :
10274 1308 : TNode<Object> CodeStubArguments::GetReceiver() const {
10275 : DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
10276 : return assembler_->UncheckedCast<Object>(
10277 : assembler_->Load(MachineType::AnyTagged(), arguments_,
10278 2616 : assembler_->IntPtrConstant(kPointerSize)));
10279 : }
10280 :
10281 2604 : TNode<RawPtr<Object>> CodeStubArguments::AtIndexPtr(
10282 : Node* index, CodeStubAssembler::ParameterMode mode) const {
10283 : typedef compiler::Node Node;
10284 : Node* negated_index = assembler_->IntPtrOrSmiSub(
10285 2604 : assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
10286 : Node* offset = assembler_->ElementOffsetFromIndex(negated_index,
10287 2604 : PACKED_ELEMENTS, mode, 0);
10288 : return assembler_->UncheckedCast<RawPtr<Object>>(assembler_->IntPtrAdd(
10289 2604 : assembler_->UncheckedCast<IntPtrT>(arguments_), offset));
10290 : }
10291 :
10292 2573 : TNode<Object> CodeStubArguments::AtIndex(
10293 : Node* index, CodeStubAssembler::ParameterMode mode) const {
10294 : DCHECK_EQ(argc_mode_, mode);
10295 : CSA_ASSERT(assembler_,
10296 : assembler_->UintPtrOrSmiLessThan(index, GetLength(), mode));
10297 : return assembler_->UncheckedCast<Object>(
10298 5146 : assembler_->Load(MachineType::AnyTagged(), AtIndexPtr(index, mode)));
10299 : }
10300 :
10301 2511 : TNode<Object> CodeStubArguments::AtIndex(int index) const {
10302 5022 : return AtIndex(assembler_->IntPtrConstant(index));
10303 : }
10304 :
10305 1922 : TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
10306 : int index, SloppyTNode<Object> default_value) {
10307 1922 : CodeStubAssembler::TVariable<Object> result(assembler_);
10308 3844 : CodeStubAssembler::Label argument_missing(assembler_),
10309 3844 : argument_done(assembler_, &result);
10310 :
10311 : assembler_->GotoIf(assembler_->UintPtrOrSmiGreaterThanOrEqual(
10312 : assembler_->IntPtrOrSmiConstant(index, argc_mode_),
10313 : argc_, argc_mode_),
10314 3844 : &argument_missing);
10315 1922 : result = AtIndex(index);
10316 1922 : assembler_->Goto(&argument_done);
10317 :
10318 1922 : assembler_->BIND(&argument_missing);
10319 : result = default_value;
10320 1922 : assembler_->Goto(&argument_done);
10321 :
10322 1922 : assembler_->BIND(&argument_done);
10323 1922 : return result;
10324 : }
10325 :
10326 525 : void CodeStubArguments::ForEach(
10327 : const CodeStubAssembler::VariableList& vars,
10328 : const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
10329 : CodeStubAssembler::ParameterMode mode) {
10330 525 : assembler_->Comment("CodeStubArguments::ForEach");
10331 525 : if (first == nullptr) {
10332 192 : first = assembler_->IntPtrOrSmiConstant(0, mode);
10333 : }
10334 525 : if (last == nullptr) {
10335 : DCHECK_EQ(mode, argc_mode_);
10336 : last = argc_;
10337 : }
10338 : Node* start = assembler_->IntPtrSub(
10339 : assembler_->UncheckedCast<IntPtrT>(arguments_),
10340 1575 : assembler_->ElementOffsetFromIndex(first, PACKED_ELEMENTS, mode));
10341 : Node* end = assembler_->IntPtrSub(
10342 : assembler_->UncheckedCast<IntPtrT>(arguments_),
10343 1575 : assembler_->ElementOffsetFromIndex(last, PACKED_ELEMENTS, mode));
10344 : assembler_->BuildFastLoop(vars, start, end,
10345 525 : [this, &body](Node* current) {
10346 : Node* arg = assembler_->Load(
10347 525 : MachineType::AnyTagged(), current);
10348 525 : body(arg);
10349 525 : },
10350 : -kPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
10351 1050 : CodeStubAssembler::IndexAdvanceMode::kPost);
10352 525 : }
10353 :
10354 3472 : void CodeStubArguments::PopAndReturn(Node* value) {
10355 : Node* pop_count;
10356 3472 : if (receiver_mode_ == ReceiverMode::kHasReceiver) {
10357 : pop_count = assembler_->IntPtrOrSmiAdd(
10358 6944 : argc_, assembler_->IntPtrOrSmiConstant(1, argc_mode_), argc_mode_);
10359 : } else {
10360 : pop_count = argc_;
10361 : }
10362 3472 : assembler_->PopAndReturn(pop_count, value);
10363 3472 : }
10364 :
10365 899 : Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
10366 : return Uint32LessThanOrEqual(elements_kind,
10367 2697 : Int32Constant(LAST_FAST_ELEMENTS_KIND));
10368 : }
10369 :
10370 0 : Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
10371 : CSA_ASSERT(this, IsFastElementsKind(elements_kind));
10372 :
10373 : STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
10374 : STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
10375 : STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
10376 248 : return IsSetWord32(elements_kind, 1);
10377 : }
10378 :
10379 806 : Node* CodeStubAssembler::IsElementsKindGreaterThan(
10380 : Node* target_kind, ElementsKind reference_kind) {
10381 2418 : return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
10382 : }
10383 :
10384 1401 : Node* CodeStubAssembler::IsDebugActive() {
10385 : Node* is_debug_active = Load(
10386 : MachineType::Uint8(),
10387 2802 : ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
10388 4203 : return Word32NotEqual(is_debug_active, Int32Constant(0));
10389 : }
10390 :
10391 1641 : Node* CodeStubAssembler::IsPromiseHookEnabledOrDebugIsActive() {
10392 : Node* const promise_hook_or_debug_is_active =
10393 : Load(MachineType::Uint8(),
10394 : ExternalConstant(
10395 : ExternalReference::promise_hook_or_debug_is_active_address(
10396 3282 : isolate())));
10397 4923 : return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0));
10398 : }
10399 :
10400 1337 : Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
10401 : Node* shared_info,
10402 : Node* context) {
10403 : CSA_SLOW_ASSERT(this, IsMap(map));
10404 :
10405 : Node* const code =
10406 : LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset);
10407 :
10408 : // TODO(ishell): All the callers of this function pass map loaded from
10409 : // Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove
10410 : // map parameter.
10411 : CSA_ASSERT(this, Word32BinaryNot(IsConstructorMap(map)));
10412 : CSA_ASSERT(this, Word32BinaryNot(IsFunctionWithPrototypeSlotMap(map)));
10413 1337 : Node* const fun = Allocate(JSFunction::kSizeWithoutPrototype);
10414 1337 : StoreMapNoWriteBarrier(fun, map);
10415 : StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
10416 1337 : Heap::kEmptyFixedArrayRootIndex);
10417 : StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
10418 1337 : Heap::kEmptyFixedArrayRootIndex);
10419 : StoreObjectFieldRoot(fun, JSFunction::kFeedbackVectorOffset,
10420 1337 : Heap::kUndefinedCellRootIndex);
10421 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
10422 1337 : shared_info);
10423 1337 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
10424 1337 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeOffset, code);
10425 1337 : return fun;
10426 : }
10427 :
10428 967 : Node* CodeStubAssembler::AllocatePromiseReactionJobInfo(
10429 : Node* value, Node* tasks, Node* deferred_promise, Node* deferred_on_resolve,
10430 : Node* deferred_on_reject, Node* context) {
10431 967 : Node* const result = Allocate(PromiseReactionJobInfo::kSize);
10432 967 : StoreMapNoWriteBarrier(result, Heap::kPromiseReactionJobInfoMapRootIndex);
10433 : StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kValueOffset,
10434 967 : value);
10435 : StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kTasksOffset,
10436 967 : tasks);
10437 : StoreObjectFieldNoWriteBarrier(
10438 967 : result, PromiseReactionJobInfo::kDeferredPromiseOffset, deferred_promise);
10439 : StoreObjectFieldNoWriteBarrier(
10440 : result, PromiseReactionJobInfo::kDeferredOnResolveOffset,
10441 967 : deferred_on_resolve);
10442 : StoreObjectFieldNoWriteBarrier(
10443 : result, PromiseReactionJobInfo::kDeferredOnRejectOffset,
10444 967 : deferred_on_reject);
10445 : StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kContextOffset,
10446 967 : context);
10447 967 : return result;
10448 : }
10449 :
10450 0 : Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
10451 : StackFrame::Type frame_type) {
10452 : return WordEqual(marker_or_function,
10453 0 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
10454 : }
10455 :
10456 93 : Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
10457 : StackFrame::Type frame_type) {
10458 : return WordNotEqual(marker_or_function,
10459 279 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
10460 : }
10461 :
10462 248 : void CodeStubAssembler::CheckPrototypeEnumCache(Node* receiver,
10463 : Node* receiver_map,
10464 : Label* if_fast,
10465 : Label* if_slow) {
10466 248 : VARIABLE(var_object, MachineRepresentation::kTagged, receiver);
10467 496 : VARIABLE(var_object_map, MachineRepresentation::kTagged, receiver_map);
10468 :
10469 744 : Label loop(this, {&var_object, &var_object_map}), done_loop(this);
10470 248 : Goto(&loop);
10471 : BIND(&loop);
10472 : {
10473 : // Check that there are no elements on the current {object}.
10474 : Label if_no_elements(this);
10475 248 : Node* object = var_object.value();
10476 248 : Node* object_map = var_object_map.value();
10477 :
10478 : // The following relies on the elements only aliasing with JSProxy::target,
10479 : // which is a Javascript value and hence cannot be confused with an elements
10480 : // backing store.
10481 : STATIC_ASSERT(JSObject::kElementsOffset == JSProxy::kTargetOffset);
10482 : Node* object_elements = LoadObjectField(object, JSObject::kElementsOffset);
10483 496 : GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
10484 496 : GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
10485 :
10486 : // It might still be an empty JSArray.
10487 496 : GotoIfNot(IsJSArrayMap(object_map), if_slow);
10488 496 : Node* object_length = LoadJSArrayLength(object);
10489 496 : Branch(WordEqual(object_length, SmiConstant(0)), &if_no_elements, if_slow);
10490 :
10491 : // Continue with the {object}s prototype.
10492 : BIND(&if_no_elements);
10493 496 : object = LoadMapPrototype(object_map);
10494 496 : GotoIf(IsNull(object), if_fast);
10495 :
10496 : // For all {object}s but the {receiver}, check that the cache is empty.
10497 248 : var_object.Bind(object);
10498 496 : object_map = LoadMap(object);
10499 248 : var_object_map.Bind(object_map);
10500 248 : Node* object_enum_length = LoadMapEnumLength(object_map);
10501 744 : Branch(WordEqual(object_enum_length, IntPtrConstant(0)), &loop, if_slow);
10502 248 : }
10503 248 : }
10504 :
10505 124 : Node* CodeStubAssembler::CheckEnumCache(Node* receiver, Label* if_empty,
10506 : Label* if_runtime) {
10507 248 : Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
10508 248 : Node* receiver_map = LoadMap(receiver);
10509 :
10510 : // Check if the enum length field of the {receiver} is properly initialized,
10511 : // indicating that there is an enum cache.
10512 124 : Node* receiver_enum_length = LoadMapEnumLength(receiver_map);
10513 : Branch(WordEqual(receiver_enum_length,
10514 248 : IntPtrConstant(kInvalidEnumCacheSentinel)),
10515 248 : &if_no_cache, &if_cache);
10516 :
10517 : BIND(&if_no_cache);
10518 : {
10519 : // Avoid runtime-call for empty dictionary receivers.
10520 248 : GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
10521 248 : Node* properties = LoadSlowProperties(receiver);
10522 : Node* length = LoadFixedArrayElement(
10523 124 : properties, NameDictionary::kNumberOfElementsIndex);
10524 248 : GotoIfNot(WordEqual(length, SmiConstant(0)), if_runtime);
10525 : // Check that there are no elements on the {receiver} and its prototype
10526 : // chain. Given that we do not create an EnumCache for dict-mode objects,
10527 : // directly jump to {if_empty} if there are no elements and no properties
10528 : // on the {receiver}.
10529 124 : CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
10530 : }
10531 :
10532 : // Check that there are no elements on the fast {receiver} and its
10533 : // prototype chain.
10534 : BIND(&if_cache);
10535 124 : CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
10536 :
10537 : BIND(&if_fast);
10538 124 : return receiver_map;
10539 : }
10540 :
10541 0 : void CodeStubAssembler::Print(const char* s) {
10542 0 : std::string formatted(s);
10543 : formatted += "\n";
10544 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
10545 0 : StringConstant(formatted.c_str()));
10546 0 : }
10547 :
10548 0 : void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
10549 0 : if (prefix != nullptr) {
10550 0 : std::string formatted(prefix);
10551 : formatted += ": ";
10552 : Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
10553 0 : formatted.c_str(), TENURED);
10554 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
10555 : HeapConstant(string));
10556 : }
10557 : CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
10558 0 : }
10559 :
10560 : } // namespace internal
10561 : } // namespace v8
|