Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 : #include "src/code-stub-assembler.h"
5 : #include "src/code-factory.h"
6 : #include "src/frames-inl.h"
7 : #include "src/frames.h"
8 :
9 : namespace v8 {
10 : namespace internal {
11 :
12 : using compiler::Node;
13 :
14 116711 : CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
15 : : compiler::CodeAssembler(state) {
16 : if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
17 : HandleBreakOnNode();
18 : }
19 116711 : }
20 :
21 0 : void CodeStubAssembler::HandleBreakOnNode() {
22 : // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
23 : // string specifying the name of a stub and NODE is number specifying node id.
24 0 : const char* name = state()->name();
25 0 : size_t name_length = strlen(name);
26 0 : if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
27 : // Different name.
28 0 : return;
29 : }
30 0 : size_t option_length = strlen(FLAG_csa_trap_on_node);
31 0 : if (option_length < name_length + 2 ||
32 0 : FLAG_csa_trap_on_node[name_length] != ',') {
33 : // Option is too short.
34 : return;
35 : }
36 0 : const char* start = &FLAG_csa_trap_on_node[name_length + 1];
37 : char* end;
38 0 : int node_id = static_cast<int>(strtol(start, &end, 10));
39 0 : if (start == end) {
40 : // Bad node id.
41 : return;
42 : }
43 0 : BreakOnNode(node_id);
44 : }
45 :
46 0 : void CodeStubAssembler::Assert(const NodeGenerator& codition_body,
47 : const char* message, const char* file,
48 : int line) {
49 : #if defined(DEBUG)
50 : if (FLAG_debug_code) {
51 : Label ok(this);
52 : Label not_ok(this, Label::kDeferred);
53 : if (message != nullptr && FLAG_code_comments) {
54 : Comment("[ Assert: %s", message);
55 : } else {
56 : Comment("[ Assert");
57 : }
58 : Node* condition = codition_body();
59 : DCHECK_NOT_NULL(condition);
60 : Branch(condition, &ok, ¬_ok);
61 : BIND(¬_ok);
62 : if (message != nullptr) {
63 : char chars[1024];
64 : Vector<char> buffer(chars);
65 : if (file != nullptr) {
66 : SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file,
67 : line);
68 : } else {
69 : SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
70 : }
71 : CallRuntime(
72 : Runtime::kGlobalPrint, SmiConstant(Smi::kZero),
73 : HeapConstant(factory()->NewStringFromAsciiChecked(&(buffer[0]))));
74 : }
75 : DebugBreak();
76 : Goto(&ok);
77 : BIND(&ok);
78 : Comment("] Assert");
79 : }
80 : #endif
81 0 : }
82 :
83 7739 : Node* CodeStubAssembler::Select(Node* condition, const NodeGenerator& true_body,
84 : const NodeGenerator& false_body,
85 : MachineRepresentation rep) {
86 7739 : VARIABLE(value, rep);
87 7739 : Label vtrue(this), vfalse(this), end(this);
88 7739 : Branch(condition, &vtrue, &vfalse);
89 :
90 7739 : BIND(&vtrue);
91 : {
92 7739 : value.Bind(true_body());
93 7739 : Goto(&end);
94 : }
95 7739 : BIND(&vfalse);
96 : {
97 7739 : value.Bind(false_body());
98 7739 : Goto(&end);
99 : }
100 :
101 7739 : BIND(&end);
102 15478 : return value.value();
103 : }
104 :
105 6657 : Node* CodeStubAssembler::SelectConstant(Node* condition, Node* true_value,
106 : Node* false_value,
107 : MachineRepresentation rep) {
108 6657 : return Select(condition, [=] { return true_value; },
109 26628 : [=] { return false_value; }, rep);
110 : }
111 :
112 0 : Node* CodeStubAssembler::SelectInt32Constant(Node* condition, int true_value,
113 : int false_value) {
114 : return SelectConstant(condition, Int32Constant(true_value),
115 : Int32Constant(false_value),
116 0 : MachineRepresentation::kWord32);
117 : }
118 :
119 344 : Node* CodeStubAssembler::SelectIntPtrConstant(Node* condition, int true_value,
120 : int false_value) {
121 : return SelectConstant(condition, IntPtrConstant(true_value),
122 : IntPtrConstant(false_value),
123 344 : MachineType::PointerRepresentation());
124 : }
125 :
126 2049 : Node* CodeStubAssembler::SelectBooleanConstant(Node* condition) {
127 : return SelectConstant(condition, TrueConstant(), FalseConstant(),
128 2049 : MachineRepresentation::kTagged);
129 : }
130 :
131 215 : Node* CodeStubAssembler::SelectTaggedConstant(Node* condition, Node* true_value,
132 : Node* false_value) {
133 : return SelectConstant(condition, true_value, false_value,
134 516 : MachineRepresentation::kTagged);
135 : }
136 :
137 2666 : Node* CodeStubAssembler::SelectSmiConstant(Node* condition, Smi* true_value,
138 : Smi* false_value) {
139 : return SelectConstant(condition, SmiConstant(true_value),
140 : SmiConstant(false_value),
141 2666 : MachineRepresentation::kTaggedSigned);
142 : }
143 :
144 102389 : Node* CodeStubAssembler::NoContextConstant() { return NumberConstant(0); }
145 :
146 : #define HEAP_CONSTANT_ACCESSOR(rootName, name) \
147 : Node* CodeStubAssembler::name##Constant() { \
148 : return LoadRoot(Heap::k##rootName##RootIndex); \
149 : }
150 169087 : HEAP_CONSTANT_LIST(HEAP_CONSTANT_ACCESSOR);
151 : #undef HEAP_CONSTANT_ACCESSOR
152 :
153 : #define HEAP_CONSTANT_TEST(rootName, name) \
154 : Node* CodeStubAssembler::Is##name(Node* value) { \
155 : return WordEqual(value, name##Constant()); \
156 : }
157 171336 : HEAP_CONSTANT_LIST(HEAP_CONSTANT_TEST);
158 : #undef HEAP_CONSTANT_TEST
159 :
160 0 : Node* CodeStubAssembler::HashSeed() {
161 702 : return LoadAndUntagToWord32Root(Heap::kHashSeedRootIndex);
162 : }
163 :
164 129 : Node* CodeStubAssembler::StaleRegisterConstant() {
165 129 : return LoadRoot(Heap::kStaleRegisterRootIndex);
166 : }
167 :
168 56011 : Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
169 56011 : if (mode == SMI_PARAMETERS) {
170 2329 : return SmiConstant(Smi::FromInt(value));
171 : } else {
172 : DCHECK_EQ(INTPTR_PARAMETERS, mode);
173 53682 : return IntPtrConstant(value);
174 : }
175 : }
176 :
177 1677 : bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test) {
178 : int32_t constant_test;
179 : Smi* smi_test;
180 4687 : if ((ToInt32Constant(test, constant_test) && constant_test == 0) ||
181 1333 : (ToSmiConstant(test, smi_test) && smi_test->value() == 0)) {
182 : return true;
183 : }
184 1333 : return false;
185 : }
186 :
187 351 : Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) {
188 351 : Comment("IntPtrRoundUpToPowerOfTwo32");
189 : CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
190 351 : value = IntPtrSub(value, IntPtrConstant(1));
191 2106 : for (int i = 1; i <= 16; i *= 2) {
192 1755 : value = WordOr(value, WordShr(value, IntPtrConstant(i)));
193 : }
194 351 : return IntPtrAdd(value, IntPtrConstant(1));
195 : }
196 :
197 0 : Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) {
198 : // value && !(value & (value - 1))
199 : return WordEqual(
200 : Select(
201 : WordEqual(value, IntPtrConstant(0)),
202 0 : [=] { return IntPtrConstant(1); },
203 0 : [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); },
204 : MachineType::PointerRepresentation()),
205 0 : IntPtrConstant(0));
206 : }
207 :
208 43 : Node* CodeStubAssembler::Float64Round(Node* x) {
209 43 : Node* one = Float64Constant(1.0);
210 43 : Node* one_half = Float64Constant(0.5);
211 :
212 : Label return_x(this);
213 :
214 : // Round up {x} towards Infinity.
215 86 : VARIABLE(var_x, MachineRepresentation::kFloat64, Float64Ceil(x));
216 :
217 : GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
218 43 : &return_x);
219 43 : var_x.Bind(Float64Sub(var_x.value(), one));
220 43 : Goto(&return_x);
221 :
222 43 : BIND(&return_x);
223 86 : return var_x.value();
224 : }
225 :
226 86 : Node* CodeStubAssembler::Float64Ceil(Node* x) {
227 86 : if (IsFloat64RoundUpSupported()) {
228 84 : return Float64RoundUp(x);
229 : }
230 :
231 2 : Node* one = Float64Constant(1.0);
232 2 : Node* zero = Float64Constant(0.0);
233 2 : Node* two_52 = Float64Constant(4503599627370496.0E0);
234 2 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
235 :
236 2 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
237 2 : Label return_x(this), return_minus_x(this);
238 :
239 : // Check if {x} is greater than zero.
240 2 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
241 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
242 2 : &if_xnotgreaterthanzero);
243 :
244 2 : BIND(&if_xgreaterthanzero);
245 : {
246 : // Just return {x} unless it's in the range ]0,2^52[.
247 2 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
248 :
249 : // Round positive {x} towards Infinity.
250 2 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
251 2 : GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
252 2 : var_x.Bind(Float64Add(var_x.value(), one));
253 2 : Goto(&return_x);
254 : }
255 :
256 2 : BIND(&if_xnotgreaterthanzero);
257 : {
258 : // Just return {x} unless it's in the range ]-2^52,0[
259 2 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
260 2 : GotoIfNot(Float64LessThan(x, zero), &return_x);
261 :
262 : // Round negated {x} towards Infinity and return the result negated.
263 2 : Node* minus_x = Float64Neg(x);
264 2 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
265 2 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
266 2 : var_x.Bind(Float64Sub(var_x.value(), one));
267 2 : Goto(&return_minus_x);
268 : }
269 :
270 2 : BIND(&return_minus_x);
271 2 : var_x.Bind(Float64Neg(var_x.value()));
272 2 : Goto(&return_x);
273 :
274 2 : BIND(&return_x);
275 4 : return var_x.value();
276 : }
277 :
278 86 : Node* CodeStubAssembler::Float64Floor(Node* x) {
279 86 : if (IsFloat64RoundDownSupported()) {
280 84 : return Float64RoundDown(x);
281 : }
282 :
283 2 : Node* one = Float64Constant(1.0);
284 2 : Node* zero = Float64Constant(0.0);
285 2 : Node* two_52 = Float64Constant(4503599627370496.0E0);
286 2 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
287 :
288 2 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
289 2 : Label return_x(this), return_minus_x(this);
290 :
291 : // Check if {x} is greater than zero.
292 2 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
293 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
294 2 : &if_xnotgreaterthanzero);
295 :
296 2 : BIND(&if_xgreaterthanzero);
297 : {
298 : // Just return {x} unless it's in the range ]0,2^52[.
299 2 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
300 :
301 : // Round positive {x} towards -Infinity.
302 2 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
303 2 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
304 2 : var_x.Bind(Float64Sub(var_x.value(), one));
305 2 : Goto(&return_x);
306 : }
307 :
308 2 : BIND(&if_xnotgreaterthanzero);
309 : {
310 : // Just return {x} unless it's in the range ]-2^52,0[
311 2 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
312 2 : GotoIfNot(Float64LessThan(x, zero), &return_x);
313 :
314 : // Round negated {x} towards -Infinity and return the result negated.
315 2 : Node* minus_x = Float64Neg(x);
316 2 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
317 2 : GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
318 2 : var_x.Bind(Float64Add(var_x.value(), one));
319 2 : Goto(&return_minus_x);
320 : }
321 :
322 2 : BIND(&return_minus_x);
323 2 : var_x.Bind(Float64Neg(var_x.value()));
324 2 : Goto(&return_x);
325 :
326 2 : BIND(&return_x);
327 4 : return var_x.value();
328 : }
329 :
330 123 : Node* CodeStubAssembler::Float64RoundToEven(Node* x) {
331 123 : if (IsFloat64RoundTiesEvenSupported()) {
332 123 : return Float64RoundTiesEven(x);
333 : }
334 : // See ES#sec-touint8clamp for details.
335 0 : Node* f = Float64Floor(x);
336 0 : Node* f_and_half = Float64Add(f, Float64Constant(0.5));
337 :
338 0 : VARIABLE(var_result, MachineRepresentation::kFloat64);
339 0 : Label return_f(this), return_f_plus_one(this), done(this);
340 :
341 0 : GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
342 0 : GotoIf(Float64LessThan(x, f_and_half), &return_f);
343 : {
344 0 : Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
345 : Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
346 0 : &return_f_plus_one);
347 : }
348 :
349 0 : BIND(&return_f);
350 0 : var_result.Bind(f);
351 0 : Goto(&done);
352 :
353 0 : BIND(&return_f_plus_one);
354 0 : var_result.Bind(Float64Add(f, Float64Constant(1.0)));
355 0 : Goto(&done);
356 :
357 0 : BIND(&done);
358 0 : return var_result.value();
359 : }
360 :
361 1512 : Node* CodeStubAssembler::Float64Trunc(Node* x) {
362 1512 : if (IsFloat64RoundTruncateSupported()) {
363 1477 : return Float64RoundTruncate(x);
364 : }
365 :
366 35 : Node* one = Float64Constant(1.0);
367 35 : Node* zero = Float64Constant(0.0);
368 35 : Node* two_52 = Float64Constant(4503599627370496.0E0);
369 35 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
370 :
371 35 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
372 35 : Label return_x(this), return_minus_x(this);
373 :
374 : // Check if {x} is greater than 0.
375 35 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
376 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
377 35 : &if_xnotgreaterthanzero);
378 :
379 35 : BIND(&if_xgreaterthanzero);
380 : {
381 35 : if (IsFloat64RoundDownSupported()) {
382 0 : var_x.Bind(Float64RoundDown(x));
383 : } else {
384 : // Just return {x} unless it's in the range ]0,2^52[.
385 35 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
386 :
387 : // Round positive {x} towards -Infinity.
388 35 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
389 35 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
390 35 : var_x.Bind(Float64Sub(var_x.value(), one));
391 : }
392 35 : Goto(&return_x);
393 : }
394 :
395 35 : BIND(&if_xnotgreaterthanzero);
396 : {
397 35 : if (IsFloat64RoundUpSupported()) {
398 0 : var_x.Bind(Float64RoundUp(x));
399 0 : Goto(&return_x);
400 : } else {
401 : // Just return {x} unless its in the range ]-2^52,0[.
402 35 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
403 35 : GotoIfNot(Float64LessThan(x, zero), &return_x);
404 :
405 : // Round negated {x} towards -Infinity and return result negated.
406 35 : Node* minus_x = Float64Neg(x);
407 35 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
408 35 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
409 35 : var_x.Bind(Float64Sub(var_x.value(), one));
410 35 : Goto(&return_minus_x);
411 : }
412 : }
413 :
414 35 : BIND(&return_minus_x);
415 35 : var_x.Bind(Float64Neg(var_x.value()));
416 35 : Goto(&return_x);
417 :
418 35 : BIND(&return_x);
419 70 : return var_x.value();
420 : }
421 :
422 0 : Node* CodeStubAssembler::SmiShiftBitsConstant() {
423 188539 : return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
424 : }
425 :
426 6005 : Node* CodeStubAssembler::SmiFromWord32(Node* value) {
427 6005 : value = ChangeInt32ToIntPtr(value);
428 6005 : return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
429 : }
430 :
431 192590 : Node* CodeStubAssembler::SmiTag(Node* value) {
432 : int32_t constant_value;
433 192590 : if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
434 172358 : return SmiConstant(Smi::FromInt(constant_value));
435 : }
436 106411 : return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
437 : }
438 :
439 76123 : Node* CodeStubAssembler::SmiUntag(Node* value) {
440 76123 : return WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant());
441 : }
442 :
443 29896 : Node* CodeStubAssembler::SmiToWord32(Node* value) {
444 29896 : Node* result = SmiUntag(value);
445 29896 : return TruncateWordToWord32(result);
446 : }
447 :
448 15581 : Node* CodeStubAssembler::SmiToFloat64(Node* value) {
449 15581 : return ChangeInt32ToFloat64(SmiToWord32(value));
450 : }
451 :
452 86 : Node* CodeStubAssembler::SmiMax(Node* a, Node* b) {
453 172 : return SelectTaggedConstant(SmiLessThan(a, b), b, a);
454 : }
455 :
456 215 : Node* CodeStubAssembler::SmiMin(Node* a, Node* b) {
457 430 : return SelectTaggedConstant(SmiLessThan(a, b), a, b);
458 : }
459 :
460 430 : Node* CodeStubAssembler::SmiMod(Node* a, Node* b) {
461 430 : VARIABLE(var_result, MachineRepresentation::kTagged);
462 430 : Label return_result(this, &var_result),
463 430 : return_minuszero(this, Label::kDeferred),
464 430 : return_nan(this, Label::kDeferred);
465 :
466 : // Untag {a} and {b}.
467 430 : a = SmiToWord32(a);
468 430 : b = SmiToWord32(b);
469 :
470 : // Return NaN if {b} is zero.
471 430 : GotoIf(Word32Equal(b, Int32Constant(0)), &return_nan);
472 :
473 : // Check if {a} is non-negative.
474 430 : Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
475 : Branch(Int32LessThanOrEqual(Int32Constant(0), a), &if_aisnotnegative,
476 430 : &if_aisnegative);
477 :
478 430 : BIND(&if_aisnotnegative);
479 : {
480 : // Fast case, don't need to check any other edge cases.
481 430 : Node* r = Int32Mod(a, b);
482 430 : var_result.Bind(SmiFromWord32(r));
483 430 : Goto(&return_result);
484 : }
485 :
486 430 : BIND(&if_aisnegative);
487 : {
488 : if (SmiValuesAre32Bits()) {
489 : // Check if {a} is kMinInt and {b} is -1 (only relevant if the
490 : // kMinInt is actually representable as a Smi).
491 : Label join(this);
492 430 : GotoIfNot(Word32Equal(a, Int32Constant(kMinInt)), &join);
493 430 : GotoIf(Word32Equal(b, Int32Constant(-1)), &return_minuszero);
494 430 : Goto(&join);
495 430 : BIND(&join);
496 : }
497 :
498 : // Perform the integer modulus operation.
499 430 : Node* r = Int32Mod(a, b);
500 :
501 : // Check if {r} is zero, and if so return -0, because we have to
502 : // take the sign of the left hand side {a}, which is negative.
503 430 : GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
504 :
505 : // The remainder {r} can be outside the valid Smi range on 32bit
506 : // architectures, so we cannot just say SmiFromWord32(r) here.
507 430 : var_result.Bind(ChangeInt32ToTagged(r));
508 430 : Goto(&return_result);
509 : }
510 :
511 430 : BIND(&return_minuszero);
512 430 : var_result.Bind(MinusZeroConstant());
513 430 : Goto(&return_result);
514 :
515 430 : BIND(&return_nan);
516 430 : var_result.Bind(NanConstant());
517 430 : Goto(&return_result);
518 :
519 430 : BIND(&return_result);
520 860 : return var_result.value();
521 : }
522 :
523 516 : Node* CodeStubAssembler::SmiMul(Node* a, Node* b) {
524 516 : VARIABLE(var_result, MachineRepresentation::kTagged);
525 1032 : VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64);
526 1032 : VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64);
527 516 : Label return_result(this, &var_result);
528 :
529 : // Both {a} and {b} are Smis. Convert them to integers and multiply.
530 516 : Node* lhs32 = SmiToWord32(a);
531 516 : Node* rhs32 = SmiToWord32(b);
532 516 : Node* pair = Int32MulWithOverflow(lhs32, rhs32);
533 :
534 516 : Node* overflow = Projection(1, pair);
535 :
536 : // Check if the multiplication overflowed.
537 516 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
538 516 : Branch(overflow, &if_overflow, &if_notoverflow);
539 516 : BIND(&if_notoverflow);
540 : {
541 : // If the answer is zero, we may need to return -0.0, depending on the
542 : // input.
543 516 : Label answer_zero(this), answer_not_zero(this);
544 516 : Node* answer = Projection(0, pair);
545 516 : Node* zero = Int32Constant(0);
546 516 : Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
547 516 : BIND(&answer_not_zero);
548 : {
549 516 : var_result.Bind(ChangeInt32ToTagged(answer));
550 516 : Goto(&return_result);
551 : }
552 516 : BIND(&answer_zero);
553 : {
554 516 : Node* or_result = Word32Or(lhs32, rhs32);
555 516 : Label if_should_be_negative_zero(this), if_should_be_zero(this);
556 : Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
557 516 : &if_should_be_zero);
558 516 : BIND(&if_should_be_negative_zero);
559 : {
560 516 : var_result.Bind(MinusZeroConstant());
561 516 : Goto(&return_result);
562 : }
563 516 : BIND(&if_should_be_zero);
564 : {
565 516 : var_result.Bind(SmiConstant(0));
566 516 : Goto(&return_result);
567 516 : }
568 516 : }
569 : }
570 516 : BIND(&if_overflow);
571 : {
572 516 : var_lhs_float64.Bind(SmiToFloat64(a));
573 516 : var_rhs_float64.Bind(SmiToFloat64(b));
574 516 : Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
575 516 : Node* result = AllocateHeapNumberWithValue(value);
576 516 : var_result.Bind(result);
577 516 : Goto(&return_result);
578 : }
579 :
580 516 : BIND(&return_result);
581 1032 : return var_result.value();
582 : }
583 :
584 301 : Node* CodeStubAssembler::TrySmiDiv(Node* dividend, Node* divisor,
585 : Label* bailout) {
586 : // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
587 : // is zero.
588 301 : GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
589 :
590 : // Do floating point division if {dividend} is zero and {divisor} is
591 : // negative.
592 301 : Label dividend_is_zero(this), dividend_is_not_zero(this);
593 : Branch(WordEqual(dividend, SmiConstant(0)), ÷nd_is_zero,
594 301 : ÷nd_is_not_zero);
595 :
596 301 : Bind(÷nd_is_zero);
597 : {
598 301 : GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
599 301 : Goto(÷nd_is_not_zero);
600 : }
601 301 : Bind(÷nd_is_not_zero);
602 :
603 301 : Node* untagged_divisor = SmiToWord32(divisor);
604 301 : Node* untagged_dividend = SmiToWord32(dividend);
605 :
606 : // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
607 : // if the Smi size is 31) and {divisor} is -1.
608 301 : Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
609 : Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
610 301 : &divisor_is_minus_one, &divisor_is_not_minus_one);
611 :
612 301 : Bind(&divisor_is_minus_one);
613 : {
614 : GotoIf(Word32Equal(
615 : untagged_dividend,
616 : Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
617 301 : bailout);
618 301 : Goto(&divisor_is_not_minus_one);
619 : }
620 301 : Bind(&divisor_is_not_minus_one);
621 :
622 301 : Node* untagged_result = Int32Div(untagged_dividend, untagged_divisor);
623 301 : Node* truncated = Int32Mul(untagged_result, untagged_divisor);
624 :
625 : // Do floating point division if the remainder is not 0.
626 301 : GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
627 :
628 602 : return SmiFromWord32(untagged_result);
629 : }
630 :
631 35821 : Node* CodeStubAssembler::TruncateWordToWord32(Node* value) {
632 35821 : if (Is64()) {
633 35821 : return TruncateInt64ToInt32(value);
634 : }
635 : return value;
636 : }
637 :
638 127145 : Node* CodeStubAssembler::TaggedIsSmi(Node* a) {
639 : return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
640 127145 : IntPtrConstant(0));
641 : }
642 :
643 2350 : Node* CodeStubAssembler::TaggedIsNotSmi(Node* a) {
644 : return WordNotEqual(
645 : WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
646 2350 : IntPtrConstant(0));
647 : }
648 :
649 2662 : Node* CodeStubAssembler::TaggedIsPositiveSmi(Node* a) {
650 : return WordEqual(WordAnd(BitcastTaggedToWord(a),
651 : IntPtrConstant(kSmiTagMask | kSmiSignMask)),
652 2662 : IntPtrConstant(0));
653 : }
654 :
655 0 : Node* CodeStubAssembler::WordIsWordAligned(Node* word) {
656 : return WordEqual(IntPtrConstant(0),
657 0 : WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1)));
658 : }
659 :
660 1591 : void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
661 : Node* receiver_map, Label* definitely_no_elements,
662 : Label* possibly_elements) {
663 1591 : VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
664 1591 : Label loop_body(this, &var_map);
665 1591 : Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
666 1591 : Goto(&loop_body);
667 :
668 1591 : BIND(&loop_body);
669 : {
670 1591 : Node* map = var_map.value();
671 : Node* prototype = LoadMapPrototype(map);
672 1591 : GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements);
673 : Node* prototype_map = LoadMap(prototype);
674 : // Pessimistically assume elements if a Proxy, Special API Object,
675 : // or JSValue wrapper is found on the prototype chain. After this
676 : // instance type check, it's not necessary to check for interceptors or
677 : // access checks.
678 : GotoIf(Int32LessThanOrEqual(LoadMapInstanceType(prototype_map),
679 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
680 3182 : possibly_elements);
681 : GotoIf(WordNotEqual(LoadElements(prototype), empty_elements),
682 1591 : possibly_elements);
683 1591 : var_map.Bind(prototype_map);
684 1591 : Goto(&loop_body);
685 1591 : }
686 1591 : }
687 :
688 129 : void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
689 : Label* if_false) {
690 129 : GotoIf(TaggedIsSmi(object), if_false);
691 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
692 : Branch(Int32GreaterThanOrEqual(LoadInstanceType(object),
693 : Int32Constant(FIRST_JS_RECEIVER_TYPE)),
694 129 : if_true, if_false);
695 129 : }
696 :
697 258 : void CodeStubAssembler::BranchIfJSObject(Node* object, Label* if_true,
698 : Label* if_false) {
699 258 : GotoIf(TaggedIsSmi(object), if_false);
700 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
701 : Branch(Int32GreaterThanOrEqual(LoadInstanceType(object),
702 : Int32Constant(FIRST_JS_OBJECT_TYPE)),
703 258 : if_true, if_false);
704 258 : }
705 :
706 946 : void CodeStubAssembler::BranchIfFastJSArray(
707 : Node* object, Node* context, CodeStubAssembler::FastJSArrayAccessMode mode,
708 : Label* if_true, Label* if_false) {
709 : // Bailout if receiver is a Smi.
710 946 : GotoIf(TaggedIsSmi(object), if_false);
711 :
712 : Node* map = LoadMap(object);
713 :
714 : // Bailout if instance type is not JS_ARRAY_TYPE.
715 : GotoIf(Word32NotEqual(LoadMapInstanceType(map), Int32Constant(JS_ARRAY_TYPE)),
716 1892 : if_false);
717 :
718 946 : Node* elements_kind = LoadMapElementsKind(map);
719 :
720 : // Bailout if receiver has slow elements.
721 946 : GotoIfNot(IsFastElementsKind(elements_kind), if_false);
722 :
723 : // Check prototype chain if receiver does not have packed elements.
724 946 : if (mode == FastJSArrayAccessMode::INBOUNDS_READ) {
725 645 : GotoIfNot(IsHoleyFastElementsKind(elements_kind), if_true);
726 : }
727 946 : BranchIfPrototypesHaveNoElements(map, if_true, if_false);
728 946 : }
729 :
730 97229 : Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags,
731 : Node* top_address, Node* limit_address) {
732 97229 : Node* top = Load(MachineType::Pointer(), top_address);
733 97229 : Node* limit = Load(MachineType::Pointer(), limit_address);
734 :
735 : // If there's not enough space, call the runtime.
736 97229 : VARIABLE(result, MachineRepresentation::kTagged);
737 97229 : Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
738 97229 : Label merge_runtime(this, &result);
739 :
740 97229 : bool needs_double_alignment = flags & kDoubleAlignment;
741 :
742 97229 : if (flags & kAllowLargeObjectAllocation) {
743 : Label next(this);
744 1634 : GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
745 :
746 : Node* runtime_flags = SmiConstant(
747 1634 : Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
748 3268 : AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
749 : Node* const runtime_result =
750 : CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
751 3268 : SmiTag(size_in_bytes), runtime_flags);
752 1634 : result.Bind(runtime_result);
753 1634 : Goto(&merge_runtime);
754 :
755 1634 : BIND(&next);
756 : }
757 :
758 194458 : VARIABLE(adjusted_size, MachineType::PointerRepresentation(), size_in_bytes);
759 :
760 97229 : if (needs_double_alignment) {
761 0 : Label not_aligned(this), done_alignment(this, &adjusted_size);
762 :
763 : Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), ¬_aligned,
764 0 : &done_alignment);
765 :
766 0 : BIND(¬_aligned);
767 0 : Node* not_aligned_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
768 0 : adjusted_size.Bind(not_aligned_size);
769 0 : Goto(&done_alignment);
770 :
771 0 : BIND(&done_alignment);
772 : }
773 :
774 97229 : Node* new_top = IntPtrAdd(top, adjusted_size.value());
775 :
776 : Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
777 97229 : &no_runtime_call);
778 :
779 97229 : BIND(&runtime_call);
780 : Node* runtime_result;
781 97229 : if (flags & kPretenured) {
782 : Node* runtime_flags = SmiConstant(
783 57715 : Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
784 115430 : AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
785 : runtime_result =
786 : CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
787 115430 : SmiTag(size_in_bytes), runtime_flags);
788 : } else {
789 : runtime_result = CallRuntime(Runtime::kAllocateInNewSpace,
790 79028 : NoContextConstant(), SmiTag(size_in_bytes));
791 : }
792 97229 : result.Bind(runtime_result);
793 97229 : Goto(&merge_runtime);
794 :
795 : // When there is enough space, return `top' and bump it up.
796 97229 : BIND(&no_runtime_call);
797 : Node* no_runtime_result = top;
798 : StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
799 97229 : new_top);
800 :
801 194458 : VARIABLE(address, MachineType::PointerRepresentation(), no_runtime_result);
802 :
803 97229 : if (needs_double_alignment) {
804 0 : Label needs_filler(this), done_filling(this, &address);
805 : Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &done_filling,
806 0 : &needs_filler);
807 :
808 0 : BIND(&needs_filler);
809 : // Store a filler and increase the address by kPointerSize.
810 : StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
811 0 : LoadRoot(Heap::kOnePointerFillerMapRootIndex));
812 0 : address.Bind(IntPtrAdd(no_runtime_result, IntPtrConstant(4)));
813 :
814 0 : Goto(&done_filling);
815 :
816 0 : BIND(&done_filling);
817 : }
818 :
819 : no_runtime_result = BitcastWordToTagged(
820 97229 : IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
821 :
822 97229 : result.Bind(no_runtime_result);
823 97229 : Goto(&merge_runtime);
824 :
825 97229 : BIND(&merge_runtime);
826 194458 : return result.value();
827 : }
828 :
829 0 : Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
830 : AllocationFlags flags,
831 : Node* top_address,
832 : Node* limit_address) {
833 : DCHECK((flags & kDoubleAlignment) == 0);
834 97229 : return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
835 : }
836 :
837 0 : Node* CodeStubAssembler::AllocateRawDoubleAligned(Node* size_in_bytes,
838 : AllocationFlags flags,
839 : Node* top_address,
840 : Node* limit_address) {
841 : #if defined(V8_HOST_ARCH_32_BIT)
842 : return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
843 : limit_address);
844 : #elif defined(V8_HOST_ARCH_64_BIT)
845 : // Allocation on 64 bit machine is naturally double aligned
846 : return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
847 0 : limit_address);
848 : #else
849 : #error Architecture not supported
850 : #endif
851 : }
852 :
853 817 : Node* CodeStubAssembler::AllocateInNewSpace(Node* size_in_bytes,
854 : AllocationFlags flags) {
855 : DCHECK(flags == kNone || flags == kDoubleAlignment);
856 : CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
857 9257 : return Allocate(size_in_bytes, flags);
858 : }
859 :
860 97229 : Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
861 97229 : Comment("Allocate");
862 : bool const new_space = !(flags & kPretenured);
863 : Node* top_address = ExternalConstant(
864 : new_space
865 39514 : ? ExternalReference::new_space_allocation_top_address(isolate())
866 136743 : : ExternalReference::old_space_allocation_top_address(isolate()));
867 : DCHECK_EQ(kPointerSize,
868 : ExternalReference::new_space_allocation_limit_address(isolate())
869 : .address() -
870 : ExternalReference::new_space_allocation_top_address(isolate())
871 : .address());
872 : DCHECK_EQ(kPointerSize,
873 : ExternalReference::old_space_allocation_limit_address(isolate())
874 : .address() -
875 : ExternalReference::old_space_allocation_top_address(isolate())
876 : .address());
877 97229 : Node* limit_address = IntPtrAdd(top_address, IntPtrConstant(kPointerSize));
878 :
879 97229 : if (flags & kDoubleAlignment) {
880 : return AllocateRawDoubleAligned(size_in_bytes, flags, top_address,
881 0 : limit_address);
882 : } else {
883 : return AllocateRawUnaligned(size_in_bytes, flags, top_address,
884 97229 : limit_address);
885 : }
886 : }
887 :
888 0 : Node* CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
889 : AllocationFlags flags) {
890 0 : CHECK(flags == kNone || flags == kDoubleAlignment);
891 : DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
892 0 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
893 : }
894 :
895 21843 : Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
896 21843 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
897 : }
898 :
899 2279 : Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) {
900 2279 : return BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset));
901 : }
902 :
903 1763 : Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
904 1763 : return InnerAllocate(previous, IntPtrConstant(offset));
905 : }
906 :
907 1634 : Node* CodeStubAssembler::IsRegularHeapObjectSize(Node* size) {
908 : return UintPtrLessThanOrEqual(size,
909 1634 : IntPtrConstant(kMaxRegularHeapObjectSize));
910 : }
911 :
912 2365 : void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
913 : Label* if_false) {
914 4730 : Label if_valueissmi(this), if_valueisnotsmi(this),
915 2365 : if_valueisheapnumber(this, Label::kDeferred);
916 :
917 : // Rule out false {value}.
918 2365 : GotoIf(WordEqual(value, BooleanConstant(false)), if_false);
919 :
920 : // Check if {value} is a Smi or a HeapObject.
921 2365 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
922 :
923 2365 : BIND(&if_valueissmi);
924 : {
925 : // The {value} is a Smi, only need to check against zero.
926 2365 : BranchIfSmiEqual(value, SmiConstant(0), if_false, if_true);
927 : }
928 :
929 2365 : BIND(&if_valueisnotsmi);
930 : {
931 : // Check if {value} is the empty string.
932 2365 : GotoIf(IsEmptyString(value), if_false);
933 :
934 : // The {value} is a HeapObject, load its map.
935 : Node* value_map = LoadMap(value);
936 :
937 : // Only null, undefined and document.all have the undetectable bit set,
938 : // so we can return false immediately when that bit is set.
939 : Node* value_map_bitfield = LoadMapBitField(value_map);
940 : Node* value_map_undetectable =
941 2365 : Word32And(value_map_bitfield, Int32Constant(1 << Map::kIsUndetectable));
942 :
943 : // Check if the {value} is undetectable.
944 2365 : GotoIfNot(Word32Equal(value_map_undetectable, Int32Constant(0)), if_false);
945 :
946 : // We still need to handle numbers specially, but all other {value}s
947 : // that make it here yield true.
948 2365 : Branch(IsHeapNumberMap(value_map), &if_valueisheapnumber, if_true);
949 :
950 2365 : BIND(&if_valueisheapnumber);
951 : {
952 : // Load the floating point value of {value}.
953 : Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
954 2365 : MachineType::Float64());
955 :
956 : // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
957 : Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
958 2365 : if_true, if_false);
959 : }
960 2365 : }
961 2365 : }
962 :
963 430 : Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) {
964 430 : Node* frame_pointer = LoadFramePointer();
965 430 : return Load(rep, frame_pointer, IntPtrConstant(offset));
966 : }
967 :
968 26503 : Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
969 26503 : Node* frame_pointer = LoadParentFramePointer();
970 26503 : return Load(rep, frame_pointer, IntPtrConstant(offset));
971 : }
972 :
973 33176 : Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
974 : MachineType rep) {
975 33176 : return Load(rep, buffer, IntPtrConstant(offset));
976 : }
977 :
978 626557 : Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
979 : MachineType rep) {
980 626557 : return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
981 : }
982 :
983 6593 : Node* CodeStubAssembler::LoadObjectField(Node* object, Node* offset,
984 : MachineType rep) {
985 6593 : return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
986 : }
987 :
988 6605 : Node* CodeStubAssembler::LoadAndUntagObjectField(Node* object, int offset) {
989 6605 : if (Is64()) {
990 : #if V8_TARGET_LITTLE_ENDIAN
991 6605 : offset += kPointerSize / 2;
992 : #endif
993 : return ChangeInt32ToInt64(
994 6605 : LoadObjectField(object, offset, MachineType::Int32()));
995 : } else {
996 0 : return SmiToWord(LoadObjectField(object, offset, MachineType::AnyTagged()));
997 : }
998 : }
999 :
1000 2766 : Node* CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
1001 : int offset) {
1002 2766 : if (Is64()) {
1003 : #if V8_TARGET_LITTLE_ENDIAN
1004 2766 : offset += kPointerSize / 2;
1005 : #endif
1006 2766 : return LoadObjectField(object, offset, MachineType::Int32());
1007 : } else {
1008 : return SmiToWord32(
1009 0 : LoadObjectField(object, offset, MachineType::AnyTagged()));
1010 : }
1011 : }
1012 :
1013 2709 : Node* CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
1014 2709 : if (Is64()) {
1015 : #if V8_TARGET_LITTLE_ENDIAN
1016 2709 : index += kPointerSize / 2;
1017 : #endif
1018 : return ChangeInt32ToInt64(
1019 2709 : Load(MachineType::Int32(), base, IntPtrConstant(index)));
1020 : } else {
1021 : return SmiToWord(
1022 0 : Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
1023 : }
1024 : }
1025 :
1026 702 : Node* CodeStubAssembler::LoadAndUntagToWord32Root(
1027 : Heap::RootListIndex root_index) {
1028 : Node* roots_array_start =
1029 702 : ExternalConstant(ExternalReference::roots_array_start(isolate()));
1030 702 : int index = root_index * kPointerSize;
1031 702 : if (Is64()) {
1032 : #if V8_TARGET_LITTLE_ENDIAN
1033 702 : index += kPointerSize / 2;
1034 : #endif
1035 702 : return Load(MachineType::Int32(), roots_array_start, IntPtrConstant(index));
1036 : } else {
1037 : return SmiToWord32(Load(MachineType::AnyTagged(), roots_array_start,
1038 0 : IntPtrConstant(index)));
1039 : }
1040 : }
1041 :
1042 37459 : Node* CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
1043 37459 : if (Is64()) {
1044 37459 : int zero_offset = offset + kPointerSize / 2;
1045 : int payload_offset = offset;
1046 : #if V8_TARGET_LITTLE_ENDIAN
1047 : std::swap(zero_offset, payload_offset);
1048 : #endif
1049 : StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1050 37459 : IntPtrConstant(zero_offset), Int32Constant(0));
1051 : return StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1052 : IntPtrConstant(payload_offset),
1053 37459 : TruncateInt64ToInt32(value));
1054 : } else {
1055 : return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
1056 0 : IntPtrConstant(offset), SmiTag(value));
1057 : }
1058 : }
1059 :
1060 10714 : Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
1061 : return LoadObjectField(object, HeapNumber::kValueOffset,
1062 39855 : MachineType::Float64());
1063 : }
1064 :
1065 57788 : Node* CodeStubAssembler::LoadMap(Node* object) {
1066 161943 : return LoadObjectField(object, HeapObject::kMapOffset);
1067 : }
1068 :
1069 58570 : Node* CodeStubAssembler::LoadInstanceType(Node* object) {
1070 58570 : return LoadMapInstanceType(LoadMap(object));
1071 : }
1072 :
1073 30341 : Node* CodeStubAssembler::HasInstanceType(Node* object,
1074 : InstanceType instance_type) {
1075 30341 : return Word32Equal(LoadInstanceType(object), Int32Constant(instance_type));
1076 : }
1077 :
1078 430 : Node* CodeStubAssembler::DoesntHaveInstanceType(Node* object,
1079 : InstanceType instance_type) {
1080 430 : return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1081 : }
1082 :
1083 8557 : Node* CodeStubAssembler::LoadProperties(Node* object) {
1084 10957 : return LoadObjectField(object, JSObject::kPropertiesOffset);
1085 : }
1086 :
1087 4738 : Node* CodeStubAssembler::LoadElements(Node* object) {
1088 15433 : return LoadObjectField(object, JSObject::kElementsOffset);
1089 : }
1090 :
1091 1935 : Node* CodeStubAssembler::LoadJSArrayLength(Node* array) {
1092 : CSA_ASSERT(this, IsJSArray(array));
1093 2706 : return LoadObjectField(array, JSArray::kLengthOffset);
1094 : }
1095 :
1096 3311 : Node* CodeStubAssembler::LoadFixedArrayBaseLength(Node* array) {
1097 7393 : return LoadObjectField(array, FixedArrayBase::kLengthOffset);
1098 : }
1099 :
1100 1806 : Node* CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(Node* array) {
1101 3724 : return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1102 : }
1103 :
1104 344 : Node* CodeStubAssembler::LoadMapBitField(Node* map) {
1105 : CSA_SLOW_ASSERT(this, IsMap(map));
1106 11659 : return LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8());
1107 : }
1108 :
1109 559 : Node* CodeStubAssembler::LoadMapBitField2(Node* map) {
1110 : CSA_SLOW_ASSERT(this, IsMap(map));
1111 3834 : return LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8());
1112 : }
1113 :
1114 1204 : Node* CodeStubAssembler::LoadMapBitField3(Node* map) {
1115 : CSA_SLOW_ASSERT(this, IsMap(map));
1116 3583 : return LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32());
1117 : }
1118 :
1119 5138 : Node* CodeStubAssembler::LoadMapInstanceType(Node* map) {
1120 78048 : return LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8());
1121 : }
1122 :
1123 2974 : Node* CodeStubAssembler::LoadMapElementsKind(Node* map) {
1124 : CSA_SLOW_ASSERT(this, IsMap(map));
1125 : Node* bit_field2 = LoadMapBitField2(map);
1126 2974 : return DecodeWord32<Map::ElementsKindBits>(bit_field2);
1127 : }
1128 :
1129 4644 : Node* CodeStubAssembler::LoadMapDescriptors(Node* map) {
1130 : CSA_SLOW_ASSERT(this, IsMap(map));
1131 5862 : return LoadObjectField(map, Map::kDescriptorsOffset);
1132 : }
1133 :
1134 2709 : Node* CodeStubAssembler::LoadMapPrototype(Node* map) {
1135 : CSA_SLOW_ASSERT(this, IsMap(map));
1136 6278 : return LoadObjectField(map, Map::kPrototypeOffset);
1137 : }
1138 :
1139 43 : Node* CodeStubAssembler::LoadMapPrototypeInfo(Node* map,
1140 : Label* if_no_proto_info) {
1141 : CSA_ASSERT(this, IsMap(map));
1142 : Node* prototype_info =
1143 43 : LoadObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1144 43 : GotoIf(TaggedIsSmi(prototype_info), if_no_proto_info);
1145 : GotoIfNot(WordEqual(LoadMap(prototype_info),
1146 : LoadRoot(Heap::kPrototypeInfoMapRootIndex)),
1147 86 : if_no_proto_info);
1148 43 : return prototype_info;
1149 : }
1150 :
1151 3581 : Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
1152 : CSA_SLOW_ASSERT(this, IsMap(map));
1153 : return ChangeUint32ToWord(
1154 3581 : LoadObjectField(map, Map::kInstanceSizeOffset, MachineType::Uint8()));
1155 : }
1156 :
1157 781 : Node* CodeStubAssembler::LoadMapInobjectProperties(Node* map) {
1158 : CSA_SLOW_ASSERT(this, IsMap(map));
1159 : // See Map::GetInObjectProperties() for details.
1160 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
1161 : CSA_ASSERT(this,
1162 : Int32GreaterThanOrEqual(LoadMapInstanceType(map),
1163 : Int32Constant(FIRST_JS_OBJECT_TYPE)));
1164 : return ChangeUint32ToWord(LoadObjectField(
1165 : map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
1166 781 : MachineType::Uint8()));
1167 : }
1168 :
1169 43 : Node* CodeStubAssembler::LoadMapConstructorFunctionIndex(Node* map) {
1170 : CSA_SLOW_ASSERT(this, IsMap(map));
1171 : // See Map::GetConstructorFunctionIndex() for details.
1172 : STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
1173 : CSA_ASSERT(this, Int32LessThanOrEqual(LoadMapInstanceType(map),
1174 : Int32Constant(LAST_PRIMITIVE_TYPE)));
1175 : return ChangeUint32ToWord(LoadObjectField(
1176 : map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
1177 43 : MachineType::Uint8()));
1178 : }
1179 :
1180 172 : Node* CodeStubAssembler::LoadMapConstructor(Node* map) {
1181 : CSA_SLOW_ASSERT(this, IsMap(map));
1182 172 : VARIABLE(result, MachineRepresentation::kTagged,
1183 : LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1184 :
1185 172 : Label done(this), loop(this, &result);
1186 172 : Goto(&loop);
1187 172 : BIND(&loop);
1188 : {
1189 172 : GotoIf(TaggedIsSmi(result.value()), &done);
1190 : Node* is_map_type =
1191 172 : Word32Equal(LoadInstanceType(result.value()), Int32Constant(MAP_TYPE));
1192 172 : GotoIfNot(is_map_type, &done);
1193 : result.Bind(
1194 172 : LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset));
1195 172 : Goto(&loop);
1196 : }
1197 172 : BIND(&done);
1198 344 : return result.value();
1199 : }
1200 :
1201 258 : Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField(
1202 : Node* shared, int offset, ParameterMode mode) {
1203 258 : if (Is64()) {
1204 258 : Node* result = LoadObjectField(shared, offset, MachineType::Int32());
1205 258 : if (mode == SMI_PARAMETERS) {
1206 0 : result = SmiTag(result);
1207 : } else {
1208 258 : result = ChangeUint32ToWord(result);
1209 : }
1210 258 : return result;
1211 : } else {
1212 0 : Node* result = LoadObjectField(shared, offset);
1213 0 : if (mode != SMI_PARAMETERS) {
1214 0 : result = SmiUntag(result);
1215 : }
1216 0 : return result;
1217 : }
1218 : }
1219 :
1220 408 : Node* CodeStubAssembler::LoadNameHashField(Node* name) {
1221 : CSA_ASSERT(this, IsName(name));
1222 10912 : return LoadObjectField(name, Name::kHashFieldOffset, MachineType::Uint32());
1223 : }
1224 :
1225 5933 : Node* CodeStubAssembler::LoadNameHash(Node* name, Label* if_hash_not_computed) {
1226 : Node* hash_field = LoadNameHashField(name);
1227 5933 : if (if_hash_not_computed != nullptr) {
1228 : GotoIf(Word32Equal(
1229 : Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)),
1230 : Int32Constant(0)),
1231 0 : if_hash_not_computed);
1232 : }
1233 5933 : return Word32Shr(hash_field, Int32Constant(Name::kHashShift));
1234 : }
1235 :
1236 2054 : Node* CodeStubAssembler::LoadStringLength(Node* object) {
1237 : CSA_ASSERT(this, IsString(object));
1238 4433 : return LoadObjectField(object, String::kLengthOffset);
1239 : }
1240 :
1241 119 : Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1242 : CSA_ASSERT(this, IsJSValue(object));
1243 1337 : return LoadObjectField(object, JSValue::kValueOffset);
1244 : }
1245 :
1246 29180 : Node* CodeStubAssembler::LoadWeakCellValueUnchecked(Node* weak_cell) {
1247 : // TODO(ishell): fix callers.
1248 35286 : return LoadObjectField(weak_cell, WeakCell::kValueOffset);
1249 : }
1250 :
1251 6106 : Node* CodeStubAssembler::LoadWeakCellValue(Node* weak_cell, Label* if_cleared) {
1252 : CSA_ASSERT(this, IsWeakCell(weak_cell));
1253 : Node* value = LoadWeakCellValueUnchecked(weak_cell);
1254 6106 : if (if_cleared != nullptr) {
1255 3526 : GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared);
1256 : }
1257 6106 : return value;
1258 : }
1259 :
1260 157944 : Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node,
1261 : int additional_offset,
1262 : ParameterMode parameter_mode) {
1263 : int32_t header_size =
1264 157944 : FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1265 : Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1266 157944 : parameter_mode, header_size);
1267 157944 : return Load(MachineType::AnyTagged(), object, offset);
1268 : }
1269 :
1270 1892 : Node* CodeStubAssembler::LoadFixedTypedArrayElement(
1271 : Node* data_pointer, Node* index_node, ElementsKind elements_kind,
1272 : ParameterMode parameter_mode) {
1273 : Node* offset =
1274 1892 : ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
1275 : MachineType type;
1276 1892 : switch (elements_kind) {
1277 : case UINT8_ELEMENTS: /* fall through */
1278 : case UINT8_CLAMPED_ELEMENTS:
1279 387 : type = MachineType::Uint8();
1280 387 : break;
1281 : case INT8_ELEMENTS:
1282 215 : type = MachineType::Int8();
1283 215 : break;
1284 : case UINT16_ELEMENTS:
1285 215 : type = MachineType::Uint16();
1286 215 : break;
1287 : case INT16_ELEMENTS:
1288 215 : type = MachineType::Int16();
1289 215 : break;
1290 : case UINT32_ELEMENTS:
1291 215 : type = MachineType::Uint32();
1292 215 : break;
1293 : case INT32_ELEMENTS:
1294 215 : type = MachineType::Int32();
1295 215 : break;
1296 : case FLOAT32_ELEMENTS:
1297 215 : type = MachineType::Float32();
1298 215 : break;
1299 : case FLOAT64_ELEMENTS:
1300 215 : type = MachineType::Float64();
1301 215 : break;
1302 : default:
1303 0 : UNREACHABLE();
1304 : }
1305 1892 : return Load(type, data_pointer, offset);
1306 : }
1307 :
1308 1548 : Node* CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
1309 : Node* data_pointer, Node* index_node, ElementsKind elements_kind,
1310 : ParameterMode parameter_mode) {
1311 : Node* value = LoadFixedTypedArrayElement(data_pointer, index_node,
1312 1548 : elements_kind, parameter_mode);
1313 1548 : switch (elements_kind) {
1314 : case ElementsKind::INT8_ELEMENTS:
1315 : case ElementsKind::UINT8_CLAMPED_ELEMENTS:
1316 : case ElementsKind::UINT8_ELEMENTS:
1317 : case ElementsKind::INT16_ELEMENTS:
1318 : case ElementsKind::UINT16_ELEMENTS:
1319 860 : return SmiFromWord32(value);
1320 : case ElementsKind::INT32_ELEMENTS:
1321 172 : return ChangeInt32ToTagged(value);
1322 : case ElementsKind::UINT32_ELEMENTS:
1323 172 : return ChangeUint32ToTagged(value);
1324 : case ElementsKind::FLOAT32_ELEMENTS:
1325 172 : return AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(value));
1326 : case ElementsKind::FLOAT64_ELEMENTS:
1327 172 : return AllocateHeapNumberWithValue(value);
1328 : default:
1329 0 : UNREACHABLE();
1330 : return nullptr;
1331 : }
1332 : }
1333 :
1334 8183 : Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
1335 : Node* object, Node* index_node, int additional_offset,
1336 : ParameterMode parameter_mode) {
1337 : int32_t header_size =
1338 8183 : FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1339 : #if V8_TARGET_LITTLE_ENDIAN
1340 8183 : if (Is64()) {
1341 8183 : header_size += kPointerSize / 2;
1342 : }
1343 : #endif
1344 : Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1345 8183 : parameter_mode, header_size);
1346 8183 : if (Is64()) {
1347 8183 : return Load(MachineType::Int32(), object, offset);
1348 : } else {
1349 0 : return SmiToWord32(Load(MachineType::AnyTagged(), object, offset));
1350 : }
1351 : }
1352 :
1353 1211 : Node* CodeStubAssembler::LoadFixedDoubleArrayElement(
1354 : Node* object, Node* index_node, MachineType machine_type,
1355 : int additional_offset, ParameterMode parameter_mode, Label* if_hole) {
1356 : CSA_ASSERT(this, IsFixedDoubleArray(object));
1357 : int32_t header_size =
1358 1211 : FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
1359 : Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS,
1360 1211 : parameter_mode, header_size);
1361 1211 : return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
1362 : }
1363 :
1364 2606 : Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset,
1365 : Label* if_hole,
1366 : MachineType machine_type) {
1367 2606 : if (if_hole) {
1368 : // TODO(ishell): Compare only the upper part for the hole once the
1369 : // compiler is able to fold addition of already complex |offset| with
1370 : // |kIeeeDoubleExponentWordOffset| into one addressing mode.
1371 2305 : if (Is64()) {
1372 2305 : Node* element = Load(MachineType::Uint64(), base, offset);
1373 2305 : GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
1374 : } else {
1375 : Node* element_upper = Load(
1376 : MachineType::Uint32(), base,
1377 0 : IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
1378 : GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
1379 0 : if_hole);
1380 : }
1381 : }
1382 2606 : if (machine_type.IsNone()) {
1383 : // This means the actual value is not needed.
1384 : return nullptr;
1385 : }
1386 1868 : return Load(machine_type, base, offset);
1387 : }
1388 :
1389 235513 : Node* CodeStubAssembler::LoadContextElement(Node* context, int slot_index) {
1390 : int offset = Context::SlotOffset(slot_index);
1391 235513 : return Load(MachineType::AnyTagged(), context, IntPtrConstant(offset));
1392 : }
1393 :
1394 774 : Node* CodeStubAssembler::LoadContextElement(Node* context, Node* slot_index) {
1395 : Node* offset =
1396 : IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
1397 774 : IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
1398 774 : return Load(MachineType::AnyTagged(), context, offset);
1399 : }
1400 :
1401 43 : Node* CodeStubAssembler::StoreContextElement(Node* context, int slot_index,
1402 : Node* value) {
1403 : int offset = Context::SlotOffset(slot_index);
1404 43 : return Store(context, IntPtrConstant(offset), value);
1405 : }
1406 :
1407 258 : Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index,
1408 : Node* value) {
1409 : Node* offset =
1410 : IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
1411 258 : IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
1412 258 : return Store(context, offset, value);
1413 : }
1414 :
1415 7838 : Node* CodeStubAssembler::StoreContextElementNoWriteBarrier(Node* context,
1416 : int slot_index,
1417 : Node* value) {
1418 : int offset = Context::SlotOffset(slot_index);
1419 : return StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
1420 7838 : IntPtrConstant(offset), value);
1421 : }
1422 :
1423 124321 : Node* CodeStubAssembler::LoadNativeContext(Node* context) {
1424 140087 : return LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX);
1425 : }
1426 :
1427 1075 : Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind,
1428 : Node* native_context) {
1429 : CSA_ASSERT(this, IsNativeContext(native_context));
1430 1075 : return LoadContextElement(native_context, Context::ArrayMapIndex(kind));
1431 : }
1432 :
1433 172 : Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
1434 : return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
1435 10384 : MachineRepresentation::kFloat64);
1436 : }
1437 :
1438 75193 : Node* CodeStubAssembler::StoreObjectField(
1439 : Node* object, int offset, Node* value) {
1440 : DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
1441 75193 : return Store(object, IntPtrConstant(offset - kHeapObjectTag), value);
1442 : }
1443 :
1444 2494 : Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
1445 : Node* value) {
1446 : int const_offset;
1447 2494 : if (ToInt32Constant(offset, const_offset)) {
1448 0 : return StoreObjectField(object, const_offset, value);
1449 : }
1450 : return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)),
1451 2494 : value);
1452 : }
1453 :
1454 282147 : Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
1455 : Node* object, int offset, Node* value, MachineRepresentation rep) {
1456 : return StoreNoWriteBarrier(rep, object,
1457 282147 : IntPtrConstant(offset - kHeapObjectTag), value);
1458 : }
1459 :
1460 2021 : Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
1461 : Node* object, Node* offset, Node* value, MachineRepresentation rep) {
1462 : int const_offset;
1463 2021 : if (ToInt32Constant(offset, const_offset)) {
1464 258 : return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
1465 : }
1466 : return StoreNoWriteBarrier(
1467 1763 : rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
1468 : }
1469 :
1470 34887 : Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
1471 : CSA_SLOW_ASSERT(this, IsMap(map));
1472 : return StoreWithMapWriteBarrier(
1473 34887 : object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
1474 : }
1475 :
1476 61603 : Node* CodeStubAssembler::StoreMapNoWriteBarrier(
1477 : Node* object, Heap::RootListIndex map_root_index) {
1478 61603 : return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
1479 : }
1480 :
1481 69984 : Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
1482 : CSA_SLOW_ASSERT(this, IsMap(map));
1483 : return StoreNoWriteBarrier(
1484 : MachineRepresentation::kTagged, object,
1485 69984 : IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
1486 : }
1487 :
1488 82692 : Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
1489 : Heap::RootListIndex root_index) {
1490 82692 : if (Heap::RootIsImmortalImmovable(root_index)) {
1491 82692 : return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
1492 : } else {
1493 0 : return StoreObjectField(object, offset, LoadRoot(root_index));
1494 : }
1495 : }
1496 :
1497 139523 : Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
1498 : Node* value,
1499 : WriteBarrierMode barrier_mode,
1500 : int additional_offset,
1501 : ParameterMode parameter_mode) {
1502 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
1503 : barrier_mode == UPDATE_WRITE_BARRIER);
1504 : int header_size =
1505 139523 : FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1506 : Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1507 139523 : parameter_mode, header_size);
1508 139523 : if (barrier_mode == SKIP_WRITE_BARRIER) {
1509 : return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
1510 70866 : value);
1511 : } else {
1512 68657 : return Store(object, offset, value);
1513 : }
1514 : }
1515 :
1516 767 : Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
1517 : Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
1518 : CSA_ASSERT(this, IsFixedDoubleArray(object));
1519 : Node* offset =
1520 : ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode,
1521 767 : FixedArray::kHeaderSize - kHeapObjectTag);
1522 : MachineRepresentation rep = MachineRepresentation::kFloat64;
1523 767 : return StoreNoWriteBarrier(rep, object, offset, value);
1524 : }
1525 :
1526 301 : Node* CodeStubAssembler::EnsureArrayPushable(Node* receiver, Label* bailout) {
1527 : // Disallow pushing onto prototypes. It might be the JSArray prototype.
1528 : // Disallow pushing onto non-extensible objects.
1529 301 : Comment("Disallow pushing onto prototypes");
1530 : Node* map = LoadMap(receiver);
1531 : Node* bit_field2 = LoadMapBitField2(map);
1532 : int mask = static_cast<int>(Map::IsPrototypeMapBits::kMask) |
1533 : (1 << Map::kIsExtensible);
1534 301 : Node* test = Word32And(bit_field2, Int32Constant(mask));
1535 301 : GotoIf(Word32NotEqual(test, Int32Constant(1 << Map::kIsExtensible)), bailout);
1536 :
1537 : // Disallow pushing onto arrays in dictionary named property mode. We need
1538 : // to figure out whether the length property is still writable.
1539 301 : Comment("Disallow pushing onto arrays in dictionary named property mode");
1540 301 : GotoIf(IsDictionaryMap(map), bailout);
1541 :
1542 : // Check whether the length property is writable. The length property is the
1543 : // only default named property on arrays. It's nonconfigurable, hence is
1544 : // guaranteed to stay the first property.
1545 : Node* descriptors = LoadMapDescriptors(map);
1546 : Node* details =
1547 301 : LoadFixedArrayElement(descriptors, DescriptorArray::ToDetailsIndex(0));
1548 301 : GotoIf(IsSetSmi(details, PropertyDetails::kAttributesReadOnlyMask), bailout);
1549 :
1550 : Node* kind = DecodeWord32<Map::ElementsKindBits>(bit_field2);
1551 301 : return kind;
1552 : }
1553 :
1554 579 : void CodeStubAssembler::PossiblyGrowElementsCapacity(
1555 : ParameterMode mode, ElementsKind kind, Node* array, Node* length,
1556 : Variable* var_elements, Node* growth, Label* bailout) {
1557 579 : Label fits(this, var_elements);
1558 : Node* capacity =
1559 579 : TaggedToParameter(LoadFixedArrayBaseLength(var_elements->value()), mode);
1560 : // length and growth nodes are already in a ParameterMode appropriate
1561 : // representation.
1562 579 : Node* new_length = IntPtrOrSmiAdd(growth, length, mode);
1563 579 : GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
1564 579 : Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
1565 : var_elements->Bind(GrowElementsCapacity(array, var_elements->value(), kind,
1566 : kind, capacity, new_capacity, mode,
1567 579 : bailout));
1568 579 : Goto(&fits);
1569 579 : BIND(&fits);
1570 579 : }
1571 :
1572 192 : Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
1573 192 : CodeStubArguments& args,
1574 : Variable& arg_index,
1575 : Label* bailout) {
1576 192 : Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
1577 : Label pre_bailout(this);
1578 192 : Label success(this);
1579 384 : VARIABLE(var_tagged_length, MachineRepresentation::kTagged);
1580 : ParameterMode mode = OptimalParameterMode();
1581 384 : VARIABLE(var_length, OptimalParameterRepresentation(),
1582 : TaggedToParameter(LoadJSArrayLength(array), mode));
1583 384 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
1584 :
1585 : // Resize the capacity of the fixed array if it doesn't fit.
1586 192 : Node* first = arg_index.value();
1587 192 : Node* growth = WordToParameter(IntPtrSub(args.GetLength(), first), mode);
1588 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
1589 192 : &var_elements, growth, &pre_bailout);
1590 :
1591 : // Push each argument onto the end of the array now that there is enough
1592 : // capacity.
1593 192 : CodeStubAssembler::VariableList push_vars({&var_length}, zone());
1594 192 : Node* elements = var_elements.value();
1595 : args.ForEach(
1596 : push_vars,
1597 192 : [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
1598 : TryStoreArrayElement(kind, mode, &pre_bailout, elements,
1599 192 : var_length.value(), arg);
1600 192 : Increment(var_length, 1, mode);
1601 192 : },
1602 384 : first, nullptr);
1603 : {
1604 192 : Node* length = ParameterToTagged(var_length.value(), mode);
1605 192 : var_tagged_length.Bind(length);
1606 192 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
1607 192 : Goto(&success);
1608 : }
1609 :
1610 192 : BIND(&pre_bailout);
1611 : {
1612 192 : Node* length = ParameterToTagged(var_length.value(), mode);
1613 192 : var_tagged_length.Bind(length);
1614 192 : Node* diff = SmiSub(length, LoadJSArrayLength(array));
1615 192 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
1616 192 : arg_index.Bind(IntPtrAdd(arg_index.value(), SmiUntag(diff)));
1617 192 : Goto(bailout);
1618 : }
1619 :
1620 192 : BIND(&success);
1621 384 : return var_tagged_length.value();
1622 : }
1623 :
1624 966 : void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind,
1625 : ParameterMode mode, Label* bailout,
1626 : Node* elements, Node* index,
1627 : Node* value) {
1628 966 : if (IsFastSmiElementsKind(kind)) {
1629 329 : GotoIf(TaggedIsNotSmi(value), bailout);
1630 637 : } else if (IsFastDoubleElementsKind(kind)) {
1631 322 : GotoIfNotNumber(value, bailout);
1632 : }
1633 966 : if (IsFastDoubleElementsKind(kind)) {
1634 322 : Node* double_value = ChangeNumberToFloat64(value);
1635 : StoreFixedDoubleArrayElement(elements, index,
1636 322 : Float64SilenceNaN(double_value), mode);
1637 : } else {
1638 : WriteBarrierMode barrier_mode =
1639 644 : IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
1640 644 : StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode);
1641 : }
1642 966 : }
1643 :
1644 387 : void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
1645 : Node* value, Label* bailout) {
1646 387 : Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
1647 : ParameterMode mode = OptimalParameterMode();
1648 387 : VARIABLE(var_length, OptimalParameterRepresentation(),
1649 : TaggedToParameter(LoadJSArrayLength(array), mode));
1650 774 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
1651 :
1652 : // Resize the capacity of the fixed array if it doesn't fit.
1653 387 : Node* growth = IntPtrOrSmiConstant(1, mode);
1654 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
1655 387 : &var_elements, growth, bailout);
1656 :
1657 : // Push each argument onto the end of the array now that there is enough
1658 : // capacity.
1659 : TryStoreArrayElement(kind, mode, bailout, var_elements.value(),
1660 387 : var_length.value(), value);
1661 387 : Increment(var_length, 1, mode);
1662 :
1663 387 : Node* length = ParameterToTagged(var_length.value(), mode);
1664 774 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
1665 387 : }
1666 :
1667 10810 : Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
1668 10810 : Node* result = Allocate(HeapNumber::kSize, kNone);
1669 : Heap::RootListIndex heap_map_index =
1670 : mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex
1671 10810 : : Heap::kMutableHeapNumberMapRootIndex;
1672 10810 : StoreMapNoWriteBarrier(result, heap_map_index);
1673 10810 : return result;
1674 : }
1675 :
1676 10212 : Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value,
1677 : MutableMode mode) {
1678 10212 : Node* result = AllocateHeapNumber(mode);
1679 : StoreHeapNumberValue(result, value);
1680 10212 : return result;
1681 : }
1682 :
1683 1032 : Node* CodeStubAssembler::AllocateSeqOneByteString(int length,
1684 : AllocationFlags flags) {
1685 1032 : Comment("AllocateSeqOneByteString");
1686 1032 : if (length == 0) {
1687 0 : return LoadRoot(Heap::kempty_stringRootIndex);
1688 : }
1689 1032 : Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
1690 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
1691 1032 : StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
1692 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
1693 1032 : SmiConstant(Smi::FromInt(length)));
1694 : // Initialize both used and unused parts of hash field slot at once.
1695 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
1696 : IntPtrConstant(String::kEmptyHashField),
1697 1032 : MachineType::PointerRepresentation());
1698 1032 : return result;
1699 : }
1700 :
1701 2021 : Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
1702 : ParameterMode mode,
1703 : AllocationFlags flags) {
1704 2021 : Comment("AllocateSeqOneByteString");
1705 2021 : VARIABLE(var_result, MachineRepresentation::kTagged);
1706 :
1707 : // Compute the SeqOneByteString size and check if it fits into new space.
1708 2021 : Label if_lengthiszero(this), if_sizeissmall(this),
1709 2021 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
1710 2021 : GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
1711 :
1712 : Node* raw_size = GetArrayAllocationSize(
1713 : length, UINT8_ELEMENTS, mode,
1714 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
1715 2021 : Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
1716 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
1717 2021 : &if_sizeissmall, &if_notsizeissmall);
1718 :
1719 2021 : BIND(&if_sizeissmall);
1720 : {
1721 : // Just allocate the SeqOneByteString in new space.
1722 : Node* result = AllocateInNewSpace(size, flags);
1723 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
1724 2021 : StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
1725 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
1726 2021 : ParameterToTagged(length, mode));
1727 : // Initialize both used and unused parts of hash field slot at once.
1728 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
1729 : IntPtrConstant(String::kEmptyHashField),
1730 2021 : MachineType::PointerRepresentation());
1731 2021 : var_result.Bind(result);
1732 2021 : Goto(&if_join);
1733 : }
1734 :
1735 2021 : BIND(&if_notsizeissmall);
1736 : {
1737 : // We might need to allocate in large object space, go to the runtime.
1738 : Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
1739 2021 : ParameterToTagged(length, mode));
1740 2021 : var_result.Bind(result);
1741 2021 : Goto(&if_join);
1742 : }
1743 :
1744 2021 : BIND(&if_lengthiszero);
1745 : {
1746 2021 : var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
1747 2021 : Goto(&if_join);
1748 : }
1749 :
1750 2021 : BIND(&if_join);
1751 4042 : return var_result.value();
1752 : }
1753 :
1754 1075 : Node* CodeStubAssembler::AllocateSeqTwoByteString(int length,
1755 : AllocationFlags flags) {
1756 1075 : Comment("AllocateSeqTwoByteString");
1757 1075 : if (length == 0) {
1758 0 : return LoadRoot(Heap::kempty_stringRootIndex);
1759 : }
1760 1075 : Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
1761 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
1762 1075 : StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
1763 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
1764 1075 : SmiConstant(Smi::FromInt(length)));
1765 : // Initialize both used and unused parts of hash field slot at once.
1766 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
1767 : IntPtrConstant(String::kEmptyHashField),
1768 1075 : MachineType::PointerRepresentation());
1769 1075 : return result;
1770 : }
1771 :
1772 1849 : Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
1773 : ParameterMode mode,
1774 : AllocationFlags flags) {
1775 1849 : Comment("AllocateSeqTwoByteString");
1776 1849 : VARIABLE(var_result, MachineRepresentation::kTagged);
1777 :
1778 : // Compute the SeqTwoByteString size and check if it fits into new space.
1779 1849 : Label if_lengthiszero(this), if_sizeissmall(this),
1780 1849 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
1781 1849 : GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
1782 :
1783 : Node* raw_size = GetArrayAllocationSize(
1784 : length, UINT16_ELEMENTS, mode,
1785 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
1786 1849 : Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
1787 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
1788 1849 : &if_sizeissmall, &if_notsizeissmall);
1789 :
1790 1849 : BIND(&if_sizeissmall);
1791 : {
1792 : // Just allocate the SeqTwoByteString in new space.
1793 : Node* result = AllocateInNewSpace(size, flags);
1794 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
1795 1849 : StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
1796 : StoreObjectFieldNoWriteBarrier(
1797 : result, SeqTwoByteString::kLengthOffset,
1798 3698 : mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1799 : // Initialize both used and unused parts of hash field slot at once.
1800 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
1801 : IntPtrConstant(String::kEmptyHashField),
1802 1849 : MachineType::PointerRepresentation());
1803 1849 : var_result.Bind(result);
1804 1849 : Goto(&if_join);
1805 : }
1806 :
1807 1849 : BIND(&if_notsizeissmall);
1808 : {
1809 : // We might need to allocate in large object space, go to the runtime.
1810 : Node* result =
1811 : CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
1812 3698 : mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1813 1849 : var_result.Bind(result);
1814 1849 : Goto(&if_join);
1815 : }
1816 :
1817 1849 : BIND(&if_lengthiszero);
1818 : {
1819 1849 : var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
1820 1849 : Goto(&if_join);
1821 : }
1822 :
1823 1849 : BIND(&if_join);
1824 3698 : return var_result.value();
1825 : }
1826 :
1827 1634 : Node* CodeStubAssembler::AllocateSlicedString(
1828 : Heap::RootListIndex map_root_index, Node* length, Node* parent,
1829 : Node* offset) {
1830 : CSA_ASSERT(this, TaggedIsSmi(length));
1831 1634 : Node* result = Allocate(SlicedString::kSize);
1832 : DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1833 1634 : StoreMapNoWriteBarrier(result, map_root_index);
1834 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
1835 1634 : MachineRepresentation::kTagged);
1836 : // Initialize both used and unused parts of hash field slot at once.
1837 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot,
1838 : IntPtrConstant(String::kEmptyHashField),
1839 1634 : MachineType::PointerRepresentation());
1840 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
1841 1634 : MachineRepresentation::kTagged);
1842 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
1843 1634 : MachineRepresentation::kTagged);
1844 1634 : return result;
1845 : }
1846 :
1847 0 : Node* CodeStubAssembler::AllocateSlicedOneByteString(Node* length, Node* parent,
1848 : Node* offset) {
1849 : return AllocateSlicedString(Heap::kSlicedOneByteStringMapRootIndex, length,
1850 817 : parent, offset);
1851 : }
1852 :
1853 0 : Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent,
1854 : Node* offset) {
1855 : return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
1856 817 : offset);
1857 : }
1858 :
1859 344 : Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
1860 : Node* length, Node* first,
1861 : Node* second,
1862 : AllocationFlags flags) {
1863 : CSA_ASSERT(this, TaggedIsSmi(length));
1864 344 : Node* result = Allocate(ConsString::kSize, flags);
1865 : DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1866 344 : StoreMapNoWriteBarrier(result, map_root_index);
1867 : StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
1868 344 : MachineRepresentation::kTagged);
1869 : // Initialize both used and unused parts of hash field slot at once.
1870 : StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot,
1871 : IntPtrConstant(String::kEmptyHashField),
1872 344 : MachineType::PointerRepresentation());
1873 : bool const new_space = !(flags & kPretenured);
1874 344 : if (new_space) {
1875 : StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
1876 344 : MachineRepresentation::kTagged);
1877 : StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
1878 344 : MachineRepresentation::kTagged);
1879 : } else {
1880 0 : StoreObjectField(result, ConsString::kFirstOffset, first);
1881 0 : StoreObjectField(result, ConsString::kSecondOffset, second);
1882 : }
1883 344 : return result;
1884 : }
1885 :
1886 0 : Node* CodeStubAssembler::AllocateOneByteConsString(Node* length, Node* first,
1887 : Node* second,
1888 : AllocationFlags flags) {
1889 : return AllocateConsString(Heap::kConsOneByteStringMapRootIndex, length, first,
1890 172 : second, flags);
1891 : }
1892 :
1893 0 : Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first,
1894 : Node* second,
1895 : AllocationFlags flags) {
1896 : return AllocateConsString(Heap::kConsStringMapRootIndex, length, first,
1897 172 : second, flags);
1898 : }
1899 :
1900 172 : Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
1901 : Node* right, AllocationFlags flags) {
1902 : CSA_ASSERT(this, TaggedIsSmi(length));
1903 : // Added string can be a cons string.
1904 172 : Comment("Allocating ConsString");
1905 172 : Node* left_instance_type = LoadInstanceType(left);
1906 172 : Node* right_instance_type = LoadInstanceType(right);
1907 :
1908 : // Compute intersection and difference of instance types.
1909 : Node* anded_instance_types =
1910 172 : Word32And(left_instance_type, right_instance_type);
1911 : Node* xored_instance_types =
1912 172 : Word32Xor(left_instance_type, right_instance_type);
1913 :
1914 : // We create a one-byte cons string if
1915 : // 1. both strings are one-byte, or
1916 : // 2. at least one of the strings is two-byte, but happens to contain only
1917 : // one-byte characters.
1918 : // To do this, we check
1919 : // 1. if both strings are one-byte, or if the one-byte data hint is set in
1920 : // both strings, or
1921 : // 2. if one of the strings has the one-byte data hint set and the other
1922 : // string is one-byte.
1923 : STATIC_ASSERT(kOneByteStringTag != 0);
1924 : STATIC_ASSERT(kOneByteDataHintTag != 0);
1925 : Label one_byte_map(this);
1926 172 : Label two_byte_map(this);
1927 344 : VARIABLE(result, MachineRepresentation::kTagged);
1928 172 : Label done(this, &result);
1929 : GotoIf(Word32NotEqual(Word32And(anded_instance_types,
1930 : Int32Constant(kStringEncodingMask |
1931 : kOneByteDataHintTag)),
1932 : Int32Constant(0)),
1933 172 : &one_byte_map);
1934 : Branch(Word32NotEqual(Word32And(xored_instance_types,
1935 : Int32Constant(kStringEncodingMask |
1936 : kOneByteDataHintMask)),
1937 : Int32Constant(kOneByteStringTag | kOneByteDataHintTag)),
1938 172 : &two_byte_map, &one_byte_map);
1939 :
1940 172 : BIND(&one_byte_map);
1941 172 : Comment("One-byte ConsString");
1942 172 : result.Bind(AllocateOneByteConsString(length, left, right, flags));
1943 172 : Goto(&done);
1944 :
1945 172 : BIND(&two_byte_map);
1946 172 : Comment("Two-byte ConsString");
1947 172 : result.Bind(AllocateTwoByteConsString(length, left, right, flags));
1948 172 : Goto(&done);
1949 :
1950 172 : BIND(&done);
1951 :
1952 344 : return result.value();
1953 : }
1954 :
1955 172 : Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length,
1956 : Node* index, Node* input) {
1957 : Node* const max_length =
1958 172 : SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray));
1959 : CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length));
1960 : USE(max_length);
1961 :
1962 : // Allocate the JSRegExpResult.
1963 : // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove
1964 : // unneeded store of elements.
1965 172 : Node* const result = Allocate(JSRegExpResult::kSize);
1966 :
1967 : // TODO(jgruber): Store map as Heap constant?
1968 : Node* const native_context = LoadNativeContext(context);
1969 : Node* const map =
1970 172 : LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX);
1971 172 : StoreMapNoWriteBarrier(result, map);
1972 :
1973 : // Initialize the header before allocating the elements.
1974 : Node* const empty_array = EmptyFixedArrayConstant();
1975 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kEmptyFixedArrayRootIndex));
1976 : StoreObjectFieldNoWriteBarrier(result, JSArray::kPropertiesOffset,
1977 172 : empty_array);
1978 172 : StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, empty_array);
1979 172 : StoreObjectFieldNoWriteBarrier(result, JSArray::kLengthOffset, length);
1980 :
1981 172 : StoreObjectFieldNoWriteBarrier(result, JSRegExpResult::kIndexOffset, index);
1982 172 : StoreObjectField(result, JSRegExpResult::kInputOffset, input);
1983 :
1984 172 : Node* const zero = IntPtrConstant(0);
1985 172 : Node* const length_intptr = SmiUntag(length);
1986 : const ElementsKind elements_kind = FAST_ELEMENTS;
1987 :
1988 172 : Node* const elements = AllocateFixedArray(elements_kind, length_intptr);
1989 172 : StoreObjectField(result, JSArray::kElementsOffset, elements);
1990 :
1991 : // Fill in the elements with undefined.
1992 : FillFixedArrayWithValue(elements_kind, elements, zero, length_intptr,
1993 172 : Heap::kUndefinedValueRootIndex);
1994 :
1995 172 : return result;
1996 : }
1997 :
1998 344 : Node* CodeStubAssembler::AllocateNameDictionary(int at_least_space_for) {
1999 344 : return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
2000 : }
2001 :
2002 351 : Node* CodeStubAssembler::AllocateNameDictionary(Node* at_least_space_for) {
2003 : CSA_ASSERT(this, UintPtrLessThanOrEqual(
2004 : at_least_space_for,
2005 : IntPtrConstant(NameDictionary::kMaxCapacity)));
2006 :
2007 351 : Node* capacity = HashTableComputeCapacity(at_least_space_for);
2008 : CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
2009 :
2010 : Node* length = EntryToIndex<NameDictionary>(capacity);
2011 : Node* store_size =
2012 : IntPtrAdd(WordShl(length, IntPtrConstant(kPointerSizeLog2)),
2013 351 : IntPtrConstant(NameDictionary::kHeaderSize));
2014 :
2015 : Node* result = AllocateInNewSpace(store_size);
2016 351 : Comment("Initialize NameDictionary");
2017 : // Initialize FixedArray fields.
2018 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kHashTableMapRootIndex));
2019 351 : StoreMapNoWriteBarrier(result, Heap::kHashTableMapRootIndex);
2020 : StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
2021 351 : SmiFromWord(length));
2022 : // Initialized HashTable fields.
2023 351 : Node* zero = SmiConstant(0);
2024 : StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
2025 351 : SKIP_WRITE_BARRIER);
2026 : StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
2027 351 : zero, SKIP_WRITE_BARRIER);
2028 : StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
2029 351 : SmiTag(capacity), SKIP_WRITE_BARRIER);
2030 : // Initialize Dictionary fields.
2031 351 : Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex);
2032 : StoreFixedArrayElement(result, NameDictionary::kMaxNumberKeyIndex, filler,
2033 351 : SKIP_WRITE_BARRIER);
2034 : StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
2035 : SmiConstant(PropertyDetails::kInitialIndex),
2036 351 : SKIP_WRITE_BARRIER);
2037 :
2038 : // Initialize NameDictionary elements.
2039 351 : Node* result_word = BitcastTaggedToWord(result);
2040 : Node* start_address = IntPtrAdd(
2041 : result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
2042 : NameDictionary::kElementsStartIndex) -
2043 351 : kHeapObjectTag));
2044 : Node* end_address = IntPtrAdd(
2045 351 : result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
2046 351 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
2047 351 : return result;
2048 : }
2049 :
2050 2370 : Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties,
2051 : Node* elements,
2052 : AllocationFlags flags) {
2053 : CSA_ASSERT(this, IsMap(map));
2054 : Node* size =
2055 2370 : IntPtrMul(LoadMapInstanceSize(map), IntPtrConstant(kPointerSize));
2056 : Node* object = AllocateInNewSpace(size, flags);
2057 2370 : StoreMapNoWriteBarrier(object, map);
2058 2370 : InitializeJSObjectFromMap(object, map, size, properties, elements);
2059 2370 : return object;
2060 : }
2061 :
2062 2370 : void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
2063 : Node* size, Node* properties,
2064 : Node* elements) {
2065 : // This helper assumes that the object is in new-space, as guarded by the
2066 : // check in AllocatedJSObjectFromMap.
2067 2370 : if (properties == nullptr) {
2068 : CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
2069 : StoreObjectFieldRoot(object, JSObject::kPropertiesOffset,
2070 2019 : Heap::kEmptyFixedArrayRootIndex);
2071 : } else {
2072 : StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset,
2073 351 : properties);
2074 : }
2075 2370 : if (elements == nullptr) {
2076 : StoreObjectFieldRoot(object, JSObject::kElementsOffset,
2077 2363 : Heap::kEmptyFixedArrayRootIndex);
2078 : } else {
2079 7 : StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
2080 : }
2081 2370 : InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize);
2082 2370 : }
2083 :
2084 2370 : void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map,
2085 : Node* size, int start_offset) {
2086 : // TODO(cbruni): activate in-object slack tracking machinery.
2087 2370 : Comment("InitializeJSObjectBody");
2088 2370 : Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex);
2089 : // Calculate the untagged field addresses.
2090 2370 : object = BitcastTaggedToWord(object);
2091 : Node* start_address =
2092 2370 : IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
2093 : Node* end_address =
2094 2370 : IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
2095 2370 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
2096 2370 : }
2097 :
2098 2721 : void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
2099 : Node* end_address,
2100 : Node* value) {
2101 2721 : Comment("StoreFieldsNoWriteBarrier");
2102 : CSA_ASSERT(this, WordIsWordAligned(start_address));
2103 : CSA_ASSERT(this, WordIsWordAligned(end_address));
2104 : BuildFastLoop(start_address, end_address,
2105 : [this, value](Node* current) {
2106 : StoreNoWriteBarrier(MachineRepresentation::kTagged, current,
2107 2721 : value);
2108 : },
2109 5442 : kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
2110 2721 : }
2111 :
2112 430 : Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
2113 : ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) {
2114 430 : Comment("begin allocation of JSArray without elements");
2115 : int base_size = JSArray::kSize;
2116 430 : if (allocation_site != nullptr) {
2117 : base_size += AllocationMemento::kSize;
2118 : }
2119 :
2120 430 : Node* size = IntPtrConstant(base_size);
2121 : Node* array = AllocateUninitializedJSArray(kind, array_map, length,
2122 430 : allocation_site, size);
2123 430 : return array;
2124 : }
2125 :
2126 : std::pair<Node*, Node*>
2127 1419 : CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
2128 : ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
2129 : Node* capacity, ParameterMode capacity_mode) {
2130 1419 : Comment("begin allocation of JSArray with elements");
2131 : int base_size = JSArray::kSize;
2132 :
2133 1419 : if (allocation_site != nullptr) {
2134 : base_size += AllocationMemento::kSize;
2135 : }
2136 :
2137 : int elements_offset = base_size;
2138 :
2139 : // Compute space for elements
2140 1419 : base_size += FixedArray::kHeaderSize;
2141 1419 : Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
2142 :
2143 : Node* array = AllocateUninitializedJSArray(kind, array_map, length,
2144 1419 : allocation_site, size);
2145 :
2146 1419 : Node* elements = InnerAllocate(array, elements_offset);
2147 1419 : StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements);
2148 :
2149 1419 : return {array, elements};
2150 : }
2151 :
2152 1849 : Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
2153 : Node* array_map,
2154 : Node* length,
2155 : Node* allocation_site,
2156 : Node* size_in_bytes) {
2157 : // Allocate space for the JSArray and the elements FixedArray in one go.
2158 : Node* array = AllocateInNewSpace(size_in_bytes);
2159 :
2160 1849 : Comment("write JSArray headers");
2161 1849 : StoreMapNoWriteBarrier(array, array_map);
2162 :
2163 : CSA_ASSERT(this, TaggedIsSmi(length));
2164 1849 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2165 :
2166 : StoreObjectFieldRoot(array, JSArray::kPropertiesOffset,
2167 1849 : Heap::kEmptyFixedArrayRootIndex);
2168 :
2169 1849 : if (allocation_site != nullptr) {
2170 688 : InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
2171 : }
2172 1849 : return array;
2173 : }
2174 :
2175 1075 : Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
2176 : Node* capacity, Node* length,
2177 : Node* allocation_site,
2178 : ParameterMode capacity_mode) {
2179 1075 : Node *array = nullptr, *elements = nullptr;
2180 1075 : if (IsIntPtrOrSmiConstantZero(capacity)) {
2181 : // Array is empty. Use the shared empty fixed array instead of allocating a
2182 : // new one.
2183 : array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length,
2184 86 : nullptr);
2185 : StoreObjectFieldRoot(array, JSArray::kElementsOffset,
2186 86 : Heap::kEmptyFixedArrayRootIndex);
2187 : } else {
2188 : // Allocate both array and elements object, and initialize the JSArray.
2189 1978 : std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
2190 : kind, array_map, length, allocation_site, capacity, capacity_mode);
2191 : // Setup elements object.
2192 : Heap::RootListIndex elements_map_index =
2193 : IsFastDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex
2194 989 : : Heap::kFixedArrayMapRootIndex;
2195 : DCHECK(Heap::RootIsImmortalImmovable(elements_map_index));
2196 989 : StoreMapNoWriteBarrier(elements, elements_map_index);
2197 : StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
2198 989 : ParameterToTagged(capacity, capacity_mode));
2199 : // Fill in the elements with holes.
2200 : FillFixedArrayWithValue(kind, elements,
2201 : IntPtrOrSmiConstant(0, capacity_mode), capacity,
2202 989 : Heap::kTheHoleValueRootIndex, capacity_mode);
2203 : }
2204 :
2205 1075 : return array;
2206 : }
2207 :
2208 7855 : Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
2209 : Node* capacity_node,
2210 : ParameterMode mode,
2211 : AllocationFlags flags) {
2212 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
2213 : IntPtrOrSmiConstant(0, mode), mode));
2214 : Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode);
2215 :
2216 : // Allocate both array and elements object, and initialize the JSArray.
2217 7855 : Node* array = Allocate(total_size, flags);
2218 : Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind)
2219 : ? Heap::kFixedDoubleArrayMapRootIndex
2220 7855 : : Heap::kFixedArrayMapRootIndex;
2221 : DCHECK(Heap::RootIsImmortalImmovable(map_index));
2222 7855 : StoreMapNoWriteBarrier(array, map_index);
2223 : StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
2224 7855 : ParameterToTagged(capacity_node, mode));
2225 7855 : return array;
2226 : }
2227 :
2228 5627 : void CodeStubAssembler::FillFixedArrayWithValue(
2229 : ElementsKind kind, Node* array, Node* from_node, Node* to_node,
2230 : Heap::RootListIndex value_root_index, ParameterMode mode) {
2231 : bool is_double = IsFastDoubleElementsKind(kind);
2232 : DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
2233 : value_root_index == Heap::kUndefinedValueRootIndex);
2234 : DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex);
2235 : STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32);
2236 : Node* double_hole =
2237 5627 : Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
2238 5627 : Node* value = LoadRoot(value_root_index);
2239 :
2240 : BuildFastFixedArrayForEach(
2241 : array, kind, from_node, to_node,
2242 7820 : [this, value, is_double, double_hole](Node* array, Node* offset) {
2243 7820 : if (is_double) {
2244 : // Don't use doubles to store the hole double, since manipulating the
2245 : // signaling NaN used for the hole in C++, e.g. with bit_cast, will
2246 : // change its value on ia32 (the x87 stack is used to return values
2247 : // and stores to the stack silently clear the signalling bit).
2248 : //
2249 : // TODO(danno): When we have a Float32/Float64 wrapper class that
2250 : // preserves double bits during manipulation, remove this code/change
2251 : // this to an indexed Float64 store.
2252 1059 : if (Is64()) {
2253 : StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset,
2254 1059 : double_hole);
2255 : } else {
2256 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
2257 0 : double_hole);
2258 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array,
2259 : IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
2260 0 : double_hole);
2261 : }
2262 : } else {
2263 : StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
2264 6761 : value);
2265 : }
2266 7820 : },
2267 16881 : mode);
2268 5627 : }
2269 :
2270 6544 : void CodeStubAssembler::CopyFixedArrayElements(
2271 : ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
2272 : Node* to_array, Node* element_count, Node* capacity,
2273 : WriteBarrierMode barrier_mode, ParameterMode mode) {
2274 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
2275 : const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
2276 6544 : Comment("[ CopyFixedArrayElements");
2277 :
2278 : // Typed array elements are not supported.
2279 : DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
2280 : DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
2281 :
2282 : Label done(this);
2283 : bool from_double_elements = IsFastDoubleElementsKind(from_kind);
2284 : bool to_double_elements = IsFastDoubleElementsKind(to_kind);
2285 : bool element_size_matches =
2286 6544 : Is64() ||
2287 : IsFastDoubleElementsKind(from_kind) == IsFastDoubleElementsKind(to_kind);
2288 : bool doubles_to_objects_conversion =
2289 7552 : IsFastDoubleElementsKind(from_kind) && IsFastObjectElementsKind(to_kind);
2290 : bool needs_write_barrier =
2291 8221 : doubles_to_objects_conversion || (barrier_mode == UPDATE_WRITE_BARRIER &&
2292 : IsFastObjectElementsKind(to_kind));
2293 : Node* double_hole =
2294 6544 : Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
2295 :
2296 6544 : if (doubles_to_objects_conversion) {
2297 : // If the copy might trigger a GC, make sure that the FixedArray is
2298 : // pre-initialized with holes to make sure that it's always in a
2299 : // consistent state.
2300 : FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
2301 387 : capacity, Heap::kTheHoleValueRootIndex, mode);
2302 6157 : } else if (element_count != capacity) {
2303 : FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
2304 2918 : Heap::kTheHoleValueRootIndex, mode);
2305 : }
2306 :
2307 : Node* limit_offset = ElementOffsetFromIndex(
2308 6544 : IntPtrOrSmiConstant(0, mode), from_kind, mode, first_element_offset);
2309 13088 : VARIABLE(var_from_offset, MachineType::PointerRepresentation(),
2310 : ElementOffsetFromIndex(element_count, from_kind, mode,
2311 : first_element_offset));
2312 : // This second variable is used only when the element sizes of source and
2313 : // destination arrays do not match.
2314 13088 : VARIABLE(var_to_offset, MachineType::PointerRepresentation());
2315 6544 : if (element_size_matches) {
2316 6544 : var_to_offset.Bind(var_from_offset.value());
2317 : } else {
2318 : var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
2319 0 : first_element_offset));
2320 : }
2321 :
2322 6544 : Variable* vars[] = {&var_from_offset, &var_to_offset};
2323 13088 : Label decrement(this, 2, vars);
2324 :
2325 6544 : Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
2326 :
2327 6544 : BIND(&decrement);
2328 : {
2329 : Node* from_offset = IntPtrSub(
2330 : var_from_offset.value(),
2331 6544 : IntPtrConstant(from_double_elements ? kDoubleSize : kPointerSize));
2332 6544 : var_from_offset.Bind(from_offset);
2333 :
2334 : Node* to_offset;
2335 6544 : if (element_size_matches) {
2336 : to_offset = from_offset;
2337 : } else {
2338 : to_offset = IntPtrSub(
2339 : var_to_offset.value(),
2340 0 : IntPtrConstant(to_double_elements ? kDoubleSize : kPointerSize));
2341 0 : var_to_offset.Bind(to_offset);
2342 : }
2343 :
2344 6544 : Label next_iter(this), store_double_hole(this);
2345 : Label* if_hole;
2346 6544 : if (doubles_to_objects_conversion) {
2347 : // The target elements array is already preinitialized with holes, so we
2348 : // can just proceed with the next iteration.
2349 : if_hole = &next_iter;
2350 6157 : } else if (IsFastDoubleElementsKind(to_kind)) {
2351 : if_hole = &store_double_hole;
2352 : } else {
2353 : // In all the other cases don't check for holes and copy the data as is.
2354 : if_hole = nullptr;
2355 : }
2356 :
2357 : Node* value = LoadElementAndPrepareForStore(
2358 6544 : from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
2359 :
2360 6544 : if (needs_write_barrier) {
2361 2064 : Store(to_array, to_offset, value);
2362 4480 : } else if (to_double_elements) {
2363 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array, to_offset,
2364 1116 : value);
2365 : } else {
2366 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, to_offset,
2367 3364 : value);
2368 : }
2369 6544 : Goto(&next_iter);
2370 :
2371 6544 : if (if_hole == &store_double_hole) {
2372 1116 : BIND(&store_double_hole);
2373 : // Don't use doubles to store the hole double, since manipulating the
2374 : // signaling NaN used for the hole in C++, e.g. with bit_cast, will
2375 : // change its value on ia32 (the x87 stack is used to return values
2376 : // and stores to the stack silently clear the signalling bit).
2377 : //
2378 : // TODO(danno): When we have a Float32/Float64 wrapper class that
2379 : // preserves double bits during manipulation, remove this code/change
2380 : // this to an indexed Float64 store.
2381 1116 : if (Is64()) {
2382 : StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array, to_offset,
2383 1116 : double_hole);
2384 : } else {
2385 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array, to_offset,
2386 0 : double_hole);
2387 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array,
2388 : IntPtrAdd(to_offset, IntPtrConstant(kPointerSize)),
2389 0 : double_hole);
2390 : }
2391 1116 : Goto(&next_iter);
2392 : }
2393 :
2394 6544 : BIND(&next_iter);
2395 6544 : Node* compare = WordNotEqual(from_offset, limit_offset);
2396 13088 : Branch(compare, &decrement, &done);
2397 : }
2398 :
2399 6544 : BIND(&done);
2400 6544 : IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1);
2401 13088 : Comment("] CopyFixedArrayElements");
2402 6544 : }
2403 :
2404 4027 : void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
2405 : Node* from_index, Node* to_index,
2406 : Node* character_count,
2407 : String::Encoding from_encoding,
2408 : String::Encoding to_encoding,
2409 : ParameterMode mode) {
2410 4027 : bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
2411 4027 : bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
2412 : DCHECK_IMPLIES(to_one_byte, from_one_byte);
2413 : Comment("CopyStringCharacters %s -> %s",
2414 : from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING",
2415 4027 : to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
2416 :
2417 4027 : ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
2418 4027 : ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
2419 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
2420 : int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
2421 : Node* from_offset =
2422 4027 : ElementOffsetFromIndex(from_index, from_kind, mode, header_size);
2423 : Node* to_offset =
2424 4027 : ElementOffsetFromIndex(to_index, to_kind, mode, header_size);
2425 4027 : Node* byte_count = ElementOffsetFromIndex(character_count, from_kind, mode);
2426 4027 : Node* limit_offset = IntPtrAdd(from_offset, byte_count);
2427 :
2428 : // Prepare the fast loop
2429 : MachineType type =
2430 4027 : from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
2431 : MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
2432 4027 : : MachineRepresentation::kWord16;
2433 4027 : int from_increment = 1 << ElementsKindToShiftSize(from_kind);
2434 4027 : int to_increment = 1 << ElementsKindToShiftSize(to_kind);
2435 :
2436 4027 : VARIABLE(current_to_offset, MachineType::PointerRepresentation(), to_offset);
2437 4027 : VariableList vars({¤t_to_offset}, zone());
2438 4027 : int to_index_constant = 0, from_index_constant = 0;
2439 4027 : Smi* to_index_smi = nullptr;
2440 4027 : Smi* from_index_smi = nullptr;
2441 4027 : bool index_same = (from_encoding == to_encoding) &&
2442 3977 : (from_index == to_index ||
2443 4686 : (ToInt32Constant(from_index, from_index_constant) &&
2444 1067 : ToInt32Constant(to_index, to_index_constant) &&
2445 3977 : from_index_constant == to_index_constant) ||
2446 3619 : (ToSmiConstant(from_index, from_index_smi) &&
2447 0 : ToSmiConstant(to_index, to_index_smi) &&
2448 0 : to_index_smi == from_index_smi));
2449 : BuildFastLoop(vars, from_offset, limit_offset,
2450 : [this, from_string, to_string, ¤t_to_offset, to_increment,
2451 4027 : type, rep, index_same](Node* offset) {
2452 4027 : Node* value = Load(type, from_string, offset);
2453 : StoreNoWriteBarrier(
2454 : rep, to_string,
2455 4027 : index_same ? offset : current_to_offset.value(), value);
2456 4027 : if (!index_same) {
2457 3669 : Increment(current_to_offset, to_increment);
2458 : }
2459 4027 : },
2460 8054 : from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
2461 4027 : }
2462 :
2463 6544 : Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
2464 : Node* offset,
2465 : ElementsKind from_kind,
2466 : ElementsKind to_kind,
2467 : Label* if_hole) {
2468 6544 : if (IsFastDoubleElementsKind(from_kind)) {
2469 : Node* value =
2470 1008 : LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
2471 1008 : if (!IsFastDoubleElementsKind(to_kind)) {
2472 387 : value = AllocateHeapNumberWithValue(value);
2473 : }
2474 1008 : return value;
2475 :
2476 : } else {
2477 5536 : Node* value = Load(MachineType::AnyTagged(), array, offset);
2478 5536 : if (if_hole) {
2479 495 : GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
2480 : }
2481 5536 : if (IsFastDoubleElementsKind(to_kind)) {
2482 495 : if (IsFastSmiElementsKind(from_kind)) {
2483 495 : value = SmiToFloat64(value);
2484 : } else {
2485 : value = LoadHeapNumberValue(value);
2486 : }
2487 : }
2488 5536 : return value;
2489 : }
2490 : }
2491 :
2492 2040 : Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
2493 : ParameterMode mode) {
2494 2040 : Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
2495 2040 : Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
2496 2040 : Node* padding = IntPtrOrSmiConstant(16, mode);
2497 2040 : return IntPtrOrSmiAdd(new_capacity, padding, mode);
2498 : }
2499 :
2500 395 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
2501 : ElementsKind kind, Node* key,
2502 : Label* bailout) {
2503 : Node* capacity = LoadFixedArrayBaseLength(elements);
2504 :
2505 : ParameterMode mode = OptimalParameterMode();
2506 : capacity = TaggedToParameter(capacity, mode);
2507 : key = TaggedToParameter(key, mode);
2508 :
2509 : return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
2510 395 : bailout);
2511 : }
2512 :
2513 1447 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
2514 : ElementsKind kind, Node* key,
2515 : Node* capacity,
2516 : ParameterMode mode,
2517 : Label* bailout) {
2518 1447 : Comment("TryGrowElementsCapacity");
2519 :
2520 : // If the gap growth is too big, fall back to the runtime.
2521 1447 : Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
2522 1447 : Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
2523 1447 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
2524 :
2525 : // Calculate the capacity of the new backing store.
2526 : Node* new_capacity = CalculateNewElementsCapacity(
2527 1447 : IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
2528 : return GrowElementsCapacity(object, elements, kind, kind, capacity,
2529 1447 : new_capacity, mode, bailout);
2530 : }
2531 :
2532 3362 : Node* CodeStubAssembler::GrowElementsCapacity(
2533 : Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
2534 : Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
2535 3362 : Comment("[ GrowElementsCapacity");
2536 : // If size of the allocation for the new capacity doesn't fit in a page
2537 : // that we can bump-pointer allocate from, fall back to the runtime.
2538 3362 : int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
2539 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(
2540 : new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
2541 3362 : bailout);
2542 :
2543 : // Allocate the new backing store.
2544 3362 : Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
2545 :
2546 : // Copy the elements from the old elements store to the new.
2547 : // The size-check above guarantees that the |new_elements| is allocated
2548 : // in new space so we can skip the write barrier.
2549 : CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
2550 3362 : new_capacity, SKIP_WRITE_BARRIER, mode);
2551 :
2552 3362 : StoreObjectField(object, JSObject::kElementsOffset, new_elements);
2553 3362 : Comment("] GrowElementsCapacity");
2554 3362 : return new_elements;
2555 : }
2556 :
2557 688 : void CodeStubAssembler::InitializeAllocationMemento(Node* base_allocation,
2558 : int base_allocation_size,
2559 : Node* allocation_site) {
2560 : StoreObjectFieldNoWriteBarrier(
2561 : base_allocation, AllocationMemento::kMapOffset + base_allocation_size,
2562 1376 : HeapConstant(Handle<Map>(isolate()->heap()->allocation_memento_map())));
2563 : StoreObjectFieldNoWriteBarrier(
2564 : base_allocation,
2565 : AllocationMemento::kAllocationSiteOffset + base_allocation_size,
2566 688 : allocation_site);
2567 688 : if (FLAG_allocation_site_pretenuring) {
2568 : Node* count = LoadObjectField(allocation_site,
2569 688 : AllocationSite::kPretenureCreateCountOffset);
2570 688 : Node* incremented_count = SmiAdd(count, SmiConstant(Smi::FromInt(1)));
2571 : StoreObjectFieldNoWriteBarrier(allocation_site,
2572 : AllocationSite::kPretenureCreateCountOffset,
2573 688 : incremented_count);
2574 : }
2575 688 : }
2576 :
2577 3283 : Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
2578 : Label* if_valueisnotnumber) {
2579 3283 : Label out(this);
2580 6566 : VARIABLE(var_result, MachineRepresentation::kFloat64);
2581 :
2582 : // Check if the {value} is a Smi or a HeapObject.
2583 3283 : Label if_valueissmi(this), if_valueisnotsmi(this);
2584 3283 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
2585 :
2586 3283 : BIND(&if_valueissmi);
2587 : {
2588 : // Convert the Smi {value}.
2589 3283 : var_result.Bind(SmiToFloat64(value));
2590 3283 : Goto(&out);
2591 : }
2592 :
2593 3283 : BIND(&if_valueisnotsmi);
2594 : {
2595 : // Check if {value} is a HeapNumber.
2596 : Label if_valueisheapnumber(this);
2597 : Branch(IsHeapNumberMap(LoadMap(value)), &if_valueisheapnumber,
2598 3283 : if_valueisnotnumber);
2599 :
2600 3283 : BIND(&if_valueisheapnumber);
2601 : {
2602 : // Load the floating point value.
2603 3283 : var_result.Bind(LoadHeapNumberValue(value));
2604 3283 : Goto(&out);
2605 3283 : }
2606 : }
2607 3283 : BIND(&out);
2608 6566 : return var_result.value();
2609 : }
2610 :
2611 1204 : Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
2612 : // We might need to loop once due to ToNumber conversion.
2613 1204 : VARIABLE(var_value, MachineRepresentation::kTagged);
2614 2408 : VARIABLE(var_result, MachineRepresentation::kFloat64);
2615 1204 : Label loop(this, &var_value), done_loop(this, &var_result);
2616 1204 : var_value.Bind(value);
2617 1204 : Goto(&loop);
2618 1204 : BIND(&loop);
2619 : {
2620 : Label if_valueisnotnumber(this, Label::kDeferred);
2621 :
2622 : // Load the current {value}.
2623 1204 : value = var_value.value();
2624 :
2625 : // Convert {value} to Float64 if it is a number and convert it to a number
2626 : // otherwise.
2627 1204 : Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
2628 1204 : var_result.Bind(result);
2629 1204 : Goto(&done_loop);
2630 :
2631 1204 : BIND(&if_valueisnotnumber);
2632 : {
2633 : // Convert the {value} to a Number first.
2634 1204 : Callable callable = CodeFactory::NonNumberToNumber(isolate());
2635 1204 : var_value.Bind(CallStub(callable, context, value));
2636 1204 : Goto(&loop);
2637 1204 : }
2638 : }
2639 1204 : BIND(&done_loop);
2640 2408 : return var_result.value();
2641 : }
2642 :
2643 1955 : Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
2644 : // We might need to loop once due to ToNumber conversion.
2645 1955 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
2646 3910 : VARIABLE(var_result, MachineRepresentation::kWord32);
2647 1955 : Label loop(this, &var_value), done_loop(this, &var_result);
2648 1955 : Goto(&loop);
2649 1955 : BIND(&loop);
2650 : {
2651 : // Load the current {value}.
2652 1955 : value = var_value.value();
2653 :
2654 : // Check if the {value} is a Smi or a HeapObject.
2655 1955 : Label if_valueissmi(this), if_valueisnotsmi(this);
2656 1955 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
2657 :
2658 1955 : BIND(&if_valueissmi);
2659 : {
2660 : // Convert the Smi {value}.
2661 1955 : var_result.Bind(SmiToWord32(value));
2662 1955 : Goto(&done_loop);
2663 : }
2664 :
2665 1955 : BIND(&if_valueisnotsmi);
2666 : {
2667 : // Check if {value} is a HeapNumber.
2668 : Label if_valueisheapnumber(this),
2669 1955 : if_valueisnotheapnumber(this, Label::kDeferred);
2670 : Branch(IsHeapNumberMap(LoadMap(value)), &if_valueisheapnumber,
2671 1955 : &if_valueisnotheapnumber);
2672 :
2673 1955 : BIND(&if_valueisheapnumber);
2674 : {
2675 : // Truncate the floating point value.
2676 1955 : var_result.Bind(TruncateHeapNumberValueToWord32(value));
2677 1955 : Goto(&done_loop);
2678 : }
2679 :
2680 1955 : BIND(&if_valueisnotheapnumber);
2681 : {
2682 : // Convert the {value} to a Number first.
2683 1955 : Callable callable = CodeFactory::NonNumberToNumber(isolate());
2684 1955 : var_value.Bind(CallStub(callable, context, value));
2685 1955 : Goto(&loop);
2686 1955 : }
2687 1955 : }
2688 : }
2689 1955 : BIND(&done_loop);
2690 3910 : return var_result.value();
2691 : }
2692 :
2693 4320 : Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
2694 : Node* value = LoadHeapNumberValue(object);
2695 4320 : return TruncateFloat64ToWord32(value);
2696 : }
2697 :
2698 1770 : Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
2699 1770 : Node* value32 = RoundFloat64ToInt32(value);
2700 1770 : Node* value64 = ChangeInt32ToFloat64(value32);
2701 :
2702 1770 : Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this);
2703 :
2704 1770 : Label if_valueisequal(this), if_valueisnotequal(this);
2705 1770 : Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
2706 1770 : BIND(&if_valueisequal);
2707 : {
2708 1770 : GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_valueisint32);
2709 : Branch(Int32LessThan(Float64ExtractHighWord32(value), Int32Constant(0)),
2710 1770 : &if_valueisheapnumber, &if_valueisint32);
2711 : }
2712 1770 : BIND(&if_valueisnotequal);
2713 1770 : Goto(&if_valueisheapnumber);
2714 :
2715 3540 : VARIABLE(var_result, MachineRepresentation::kTagged);
2716 1770 : BIND(&if_valueisint32);
2717 : {
2718 1770 : if (Is64()) {
2719 1770 : Node* result = SmiTag(ChangeInt32ToInt64(value32));
2720 1770 : var_result.Bind(result);
2721 1770 : Goto(&if_join);
2722 : } else {
2723 0 : Node* pair = Int32AddWithOverflow(value32, value32);
2724 0 : Node* overflow = Projection(1, pair);
2725 0 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
2726 0 : Branch(overflow, &if_overflow, &if_notoverflow);
2727 0 : BIND(&if_overflow);
2728 0 : Goto(&if_valueisheapnumber);
2729 0 : BIND(&if_notoverflow);
2730 : {
2731 0 : Node* result = BitcastWordToTaggedSigned(Projection(0, pair));
2732 0 : var_result.Bind(result);
2733 0 : Goto(&if_join);
2734 0 : }
2735 : }
2736 : }
2737 1770 : BIND(&if_valueisheapnumber);
2738 : {
2739 1770 : Node* result = AllocateHeapNumberWithValue(value);
2740 1770 : var_result.Bind(result);
2741 1770 : Goto(&if_join);
2742 : }
2743 1770 : BIND(&if_join);
2744 3540 : return var_result.value();
2745 : }
2746 :
2747 3239 : Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) {
2748 3239 : if (Is64()) {
2749 3239 : return SmiTag(ChangeInt32ToInt64(value));
2750 : }
2751 0 : VARIABLE(var_result, MachineRepresentation::kTagged);
2752 0 : Node* pair = Int32AddWithOverflow(value, value);
2753 0 : Node* overflow = Projection(1, pair);
2754 0 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
2755 0 : if_join(this);
2756 0 : Branch(overflow, &if_overflow, &if_notoverflow);
2757 0 : BIND(&if_overflow);
2758 : {
2759 0 : Node* value64 = ChangeInt32ToFloat64(value);
2760 0 : Node* result = AllocateHeapNumberWithValue(value64);
2761 0 : var_result.Bind(result);
2762 : }
2763 0 : Goto(&if_join);
2764 0 : BIND(&if_notoverflow);
2765 : {
2766 0 : Node* result = BitcastWordToTaggedSigned(Projection(0, pair));
2767 0 : var_result.Bind(result);
2768 : }
2769 0 : Goto(&if_join);
2770 0 : BIND(&if_join);
2771 0 : return var_result.value();
2772 : }
2773 :
2774 946 : Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) {
2775 1892 : Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
2776 946 : if_join(this);
2777 1892 : VARIABLE(var_result, MachineRepresentation::kTagged);
2778 : // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
2779 : Branch(Uint32LessThan(Int32Constant(Smi::kMaxValue), value), &if_overflow,
2780 946 : &if_not_overflow);
2781 :
2782 946 : BIND(&if_not_overflow);
2783 : {
2784 946 : if (Is64()) {
2785 946 : var_result.Bind(SmiTag(ChangeUint32ToUint64(value)));
2786 : } else {
2787 : // If tagging {value} results in an overflow, we need to use a HeapNumber
2788 : // to represent it.
2789 0 : Node* pair = Int32AddWithOverflow(value, value);
2790 0 : Node* overflow = Projection(1, pair);
2791 0 : GotoIf(overflow, &if_overflow);
2792 :
2793 0 : Node* result = BitcastWordToTaggedSigned(Projection(0, pair));
2794 0 : var_result.Bind(result);
2795 : }
2796 : }
2797 946 : Goto(&if_join);
2798 :
2799 946 : BIND(&if_overflow);
2800 : {
2801 946 : Node* float64_value = ChangeUint32ToFloat64(value);
2802 946 : var_result.Bind(AllocateHeapNumberWithValue(float64_value));
2803 : }
2804 946 : Goto(&if_join);
2805 :
2806 946 : BIND(&if_join);
2807 1892 : return var_result.value();
2808 : }
2809 :
2810 258 : Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
2811 : char const* method_name) {
2812 258 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
2813 :
2814 : // Check if the {value} is a Smi or a HeapObject.
2815 258 : Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
2816 258 : if_valueisstring(this);
2817 258 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
2818 258 : BIND(&if_valueisnotsmi);
2819 : {
2820 : // Load the instance type of the {value}.
2821 258 : Node* value_instance_type = LoadInstanceType(value);
2822 :
2823 : // Check if the {value} is already String.
2824 : Label if_valueisnotstring(this, Label::kDeferred);
2825 : Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
2826 258 : &if_valueisnotstring);
2827 258 : BIND(&if_valueisnotstring);
2828 : {
2829 : // Check if the {value} is null.
2830 : Label if_valueisnullorundefined(this, Label::kDeferred),
2831 258 : if_valueisnotnullorundefined(this, Label::kDeferred),
2832 258 : if_valueisnotnull(this, Label::kDeferred);
2833 : Branch(WordEqual(value, NullConstant()), &if_valueisnullorundefined,
2834 258 : &if_valueisnotnull);
2835 258 : BIND(&if_valueisnotnull);
2836 : {
2837 : // Check if the {value} is undefined.
2838 : Branch(WordEqual(value, UndefinedConstant()),
2839 258 : &if_valueisnullorundefined, &if_valueisnotnullorundefined);
2840 258 : BIND(&if_valueisnotnullorundefined);
2841 : {
2842 : // Convert the {value} to a String.
2843 258 : Callable callable = CodeFactory::ToString(isolate());
2844 258 : var_value.Bind(CallStub(callable, context, value));
2845 258 : Goto(&if_valueisstring);
2846 : }
2847 : }
2848 :
2849 258 : BIND(&if_valueisnullorundefined);
2850 : {
2851 : // The {value} is either null or undefined.
2852 : CallRuntime(Runtime::kThrowCalledOnNullOrUndefined, context,
2853 : HeapConstant(factory()->NewStringFromAsciiChecked(
2854 516 : method_name, TENURED)));
2855 258 : Unreachable();
2856 258 : }
2857 258 : }
2858 : }
2859 258 : BIND(&if_valueissmi);
2860 : {
2861 : // The {value} is a Smi, convert it to a String.
2862 258 : Callable callable = CodeFactory::NumberToString(isolate());
2863 258 : var_value.Bind(CallStub(callable, context, value));
2864 258 : Goto(&if_valueisstring);
2865 : }
2866 258 : BIND(&if_valueisstring);
2867 516 : return var_value.value();
2868 : }
2869 :
2870 666 : Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) {
2871 666 : VARIABLE(result, MachineRepresentation::kFloat64);
2872 666 : Label smi(this);
2873 666 : Label done(this, &result);
2874 666 : GotoIf(TaggedIsSmi(value), &smi);
2875 : result.Bind(
2876 666 : LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64()));
2877 666 : Goto(&done);
2878 :
2879 666 : BIND(&smi);
2880 : {
2881 666 : result.Bind(SmiToFloat64(value));
2882 666 : Goto(&done);
2883 : }
2884 :
2885 666 : BIND(&done);
2886 1332 : return result.value();
2887 : }
2888 :
2889 129 : Node* CodeStubAssembler::ChangeNumberToIntPtr(Node* value) {
2890 129 : VARIABLE(result, MachineType::PointerRepresentation());
2891 129 : Label smi(this), done(this, &result);
2892 129 : GotoIf(TaggedIsSmi(value), &smi);
2893 :
2894 : CSA_ASSERT(this, IsHeapNumber(value));
2895 129 : result.Bind(ChangeFloat64ToUintPtr(LoadHeapNumberValue(value)));
2896 129 : Goto(&done);
2897 :
2898 129 : BIND(&smi);
2899 129 : result.Bind(SmiToWord(value));
2900 129 : Goto(&done);
2901 :
2902 129 : BIND(&done);
2903 258 : return result.value();
2904 : }
2905 :
2906 344 : Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
2907 : PrimitiveType primitive_type,
2908 : char const* method_name) {
2909 : // We might need to loop once due to JSValue unboxing.
2910 344 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
2911 344 : Label loop(this, &var_value), done_loop(this),
2912 344 : done_throw(this, Label::kDeferred);
2913 344 : Goto(&loop);
2914 344 : BIND(&loop);
2915 : {
2916 : // Load the current {value}.
2917 344 : value = var_value.value();
2918 :
2919 : // Check if the {value} is a Smi or a HeapObject.
2920 : GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
2921 : ? &done_loop
2922 344 : : &done_throw);
2923 :
2924 : // Load the mape of the {value}.
2925 : Node* value_map = LoadMap(value);
2926 :
2927 : // Load the instance type of the {value}.
2928 : Node* value_instance_type = LoadMapInstanceType(value_map);
2929 :
2930 : // Check if {value} is a JSValue.
2931 344 : Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
2932 : Branch(Word32Equal(value_instance_type, Int32Constant(JS_VALUE_TYPE)),
2933 344 : &if_valueisvalue, &if_valueisnotvalue);
2934 :
2935 344 : BIND(&if_valueisvalue);
2936 : {
2937 : // Load the actual value from the {value}.
2938 344 : var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
2939 344 : Goto(&loop);
2940 : }
2941 :
2942 344 : BIND(&if_valueisnotvalue);
2943 : {
2944 344 : switch (primitive_type) {
2945 : case PrimitiveType::kBoolean:
2946 86 : GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
2947 86 : break;
2948 : case PrimitiveType::kNumber:
2949 : GotoIf(
2950 : Word32Equal(value_instance_type, Int32Constant(HEAP_NUMBER_TYPE)),
2951 43 : &done_loop);
2952 43 : break;
2953 : case PrimitiveType::kString:
2954 86 : GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
2955 86 : break;
2956 : case PrimitiveType::kSymbol:
2957 : GotoIf(Word32Equal(value_instance_type, Int32Constant(SYMBOL_TYPE)),
2958 129 : &done_loop);
2959 129 : break;
2960 : }
2961 344 : Goto(&done_throw);
2962 344 : }
2963 : }
2964 :
2965 344 : BIND(&done_throw);
2966 : {
2967 : const char* primitive_name = nullptr;
2968 : switch (primitive_type) {
2969 : case PrimitiveType::kBoolean:
2970 : primitive_name = "Boolean";
2971 : break;
2972 : case PrimitiveType::kNumber:
2973 : primitive_name = "Number";
2974 : break;
2975 : case PrimitiveType::kString:
2976 : primitive_name = "String";
2977 : break;
2978 : case PrimitiveType::kSymbol:
2979 : primitive_name = "Symbol";
2980 : break;
2981 : }
2982 344 : CHECK_NOT_NULL(primitive_name);
2983 :
2984 : // The {value} is not a compatible receiver for this method.
2985 : CallRuntime(Runtime::kThrowTypeError, context,
2986 : SmiConstant(MessageTemplate::kNotGeneric),
2987 344 : CStringConstant(method_name), CStringConstant(primitive_name));
2988 344 : Unreachable();
2989 : }
2990 :
2991 344 : BIND(&done_loop);
2992 688 : return var_value.value();
2993 : }
2994 :
2995 430 : Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
2996 : InstanceType instance_type,
2997 : char const* method_name) {
2998 860 : Label out(this), throw_exception(this, Label::kDeferred);
2999 860 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
3000 :
3001 430 : GotoIf(TaggedIsSmi(value), &throw_exception);
3002 :
3003 : // Load the instance type of the {value}.
3004 430 : var_value_map.Bind(LoadMap(value));
3005 430 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
3006 :
3007 : Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
3008 430 : &throw_exception);
3009 :
3010 : // The {value} is not a compatible receiver for this method.
3011 430 : BIND(&throw_exception);
3012 : CallRuntime(
3013 : Runtime::kThrowIncompatibleMethodReceiver, context,
3014 : HeapConstant(factory()->NewStringFromAsciiChecked(method_name, TENURED)),
3015 860 : value);
3016 430 : Unreachable();
3017 :
3018 430 : BIND(&out);
3019 860 : return var_value_map.value();
3020 : }
3021 :
3022 86 : Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) {
3023 86 : return Word32Equal(instance_type, Int32Constant(type));
3024 : }
3025 :
3026 43 : Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) {
3027 43 : Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
3028 : uint32_t mask =
3029 : 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
3030 : USE(mask);
3031 : // Interceptors or access checks imply special receiver.
3032 : CSA_ASSERT(this,
3033 : SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special,
3034 : Int32Constant(1), MachineRepresentation::kWord32));
3035 43 : return is_special;
3036 : }
3037 :
3038 817 : Node* CodeStubAssembler::IsDictionaryMap(Node* map) {
3039 : CSA_SLOW_ASSERT(this, IsMap(map));
3040 : Node* bit_field3 = LoadMapBitField3(map);
3041 : return Word32NotEqual(IsSetWord32<Map::DictionaryMap>(bit_field3),
3042 1634 : Int32Constant(0));
3043 : }
3044 :
3045 7001 : Node* CodeStubAssembler::IsCallableMap(Node* map) {
3046 : CSA_ASSERT(this, IsMap(map));
3047 : return Word32NotEqual(
3048 : Word32And(LoadMapBitField(map), Int32Constant(1 << Map::kIsCallable)),
3049 14002 : Int32Constant(0));
3050 : }
3051 :
3052 645 : Node* CodeStubAssembler::IsDeprecatedMap(Node* map) {
3053 : CSA_ASSERT(this, IsMap(map));
3054 645 : return IsSetWord32<Map::Deprecated>(LoadMapBitField3(map));
3055 : }
3056 :
3057 172 : Node* CodeStubAssembler::IsCallable(Node* object) {
3058 172 : return IsCallableMap(LoadMap(object));
3059 : }
3060 :
3061 172 : Node* CodeStubAssembler::IsConstructorMap(Node* map) {
3062 : CSA_ASSERT(this, IsMap(map));
3063 : return Word32NotEqual(
3064 : Word32And(LoadMapBitField(map), Int32Constant(1 << Map::kIsConstructor)),
3065 344 : Int32Constant(0));
3066 : }
3067 :
3068 43 : Node* CodeStubAssembler::IsSpecialReceiverInstanceType(Node* instance_type) {
3069 : STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
3070 : return Int32LessThanOrEqual(instance_type,
3071 43 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
3072 : }
3073 :
3074 8320 : Node* CodeStubAssembler::IsStringInstanceType(Node* instance_type) {
3075 : STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
3076 8320 : return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
3077 : }
3078 :
3079 4257 : Node* CodeStubAssembler::IsOneByteStringInstanceType(Node* instance_type) {
3080 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
3081 : return Word32Equal(
3082 : Word32And(instance_type, Int32Constant(kStringEncodingMask)),
3083 4257 : Int32Constant(kOneByteStringTag));
3084 : }
3085 :
3086 2795 : Node* CodeStubAssembler::IsSequentialStringInstanceType(Node* instance_type) {
3087 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
3088 : return Word32Equal(
3089 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
3090 2795 : Int32Constant(kSeqStringTag));
3091 : }
3092 :
3093 43 : Node* CodeStubAssembler::IsConsStringInstanceType(Node* instance_type) {
3094 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
3095 : return Word32Equal(
3096 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
3097 43 : Int32Constant(kConsStringTag));
3098 : }
3099 :
3100 0 : Node* CodeStubAssembler::IsExternalStringInstanceType(Node* instance_type) {
3101 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
3102 : return Word32Equal(
3103 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
3104 0 : Int32Constant(kExternalStringTag));
3105 : }
3106 :
3107 2795 : Node* CodeStubAssembler::IsShortExternalStringInstanceType(
3108 : Node* instance_type) {
3109 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
3110 : STATIC_ASSERT(kShortExternalStringTag != 0);
3111 : return Word32NotEqual(
3112 : Word32And(instance_type, Int32Constant(kShortExternalStringMask)),
3113 2795 : Int32Constant(0));
3114 : }
3115 :
3116 5038 : Node* CodeStubAssembler::IsJSReceiverInstanceType(Node* instance_type) {
3117 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3118 : return Int32GreaterThanOrEqual(instance_type,
3119 5038 : Int32Constant(FIRST_JS_RECEIVER_TYPE));
3120 : }
3121 :
3122 946 : Node* CodeStubAssembler::IsJSReceiver(Node* object) {
3123 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
3124 946 : return IsJSReceiverInstanceType(LoadInstanceType(object));
3125 : }
3126 :
3127 172 : Node* CodeStubAssembler::IsJSReceiverMap(Node* map) {
3128 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
3129 172 : return IsJSReceiverInstanceType(LoadMapInstanceType(map));
3130 : }
3131 :
3132 0 : Node* CodeStubAssembler::IsJSObject(Node* object) {
3133 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
3134 : return Int32GreaterThanOrEqual(LoadInstanceType(object),
3135 0 : Int32Constant(FIRST_JS_RECEIVER_TYPE));
3136 : }
3137 :
3138 86 : Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) {
3139 : return Word32Equal(LoadInstanceType(object),
3140 86 : Int32Constant(JS_GLOBAL_PROXY_TYPE));
3141 : }
3142 :
3143 129 : Node* CodeStubAssembler::IsMap(Node* map) {
3144 129 : return HasInstanceType(map, MAP_TYPE);
3145 : }
3146 :
3147 0 : Node* CodeStubAssembler::IsJSValue(Node* map) {
3148 0 : return HasInstanceType(map, JS_VALUE_TYPE);
3149 : }
3150 :
3151 0 : Node* CodeStubAssembler::IsJSArray(Node* object) {
3152 0 : return HasInstanceType(object, JS_ARRAY_TYPE);
3153 : }
3154 :
3155 0 : Node* CodeStubAssembler::IsWeakCell(Node* object) {
3156 0 : return IsWeakCellMap(LoadMap(object));
3157 : }
3158 :
3159 129 : Node* CodeStubAssembler::IsBoolean(Node* object) {
3160 129 : return IsBooleanMap(LoadMap(object));
3161 : }
3162 :
3163 344 : Node* CodeStubAssembler::IsPropertyCell(Node* object) {
3164 344 : return IsPropertyCellMap(LoadMap(object));
3165 : }
3166 :
3167 0 : Node* CodeStubAssembler::IsAccessorPair(Node* object) {
3168 0 : return IsAccessorPairMap(LoadMap(object));
3169 : }
3170 :
3171 301 : Node* CodeStubAssembler::IsHeapNumber(Node* object) {
3172 301 : return IsHeapNumberMap(LoadMap(object));
3173 : }
3174 :
3175 0 : Node* CodeStubAssembler::IsName(Node* object) {
3176 : return Int32LessThanOrEqual(LoadInstanceType(object),
3177 0 : Int32Constant(LAST_NAME_TYPE));
3178 : }
3179 :
3180 172 : Node* CodeStubAssembler::IsString(Node* object) {
3181 : return Int32LessThan(LoadInstanceType(object),
3182 172 : Int32Constant(FIRST_NONSTRING_TYPE));
3183 : }
3184 :
3185 358 : Node* CodeStubAssembler::IsSymbol(Node* object) {
3186 358 : return IsSymbolMap(LoadMap(object));
3187 : }
3188 :
3189 265 : Node* CodeStubAssembler::IsPrivateSymbol(Node* object) {
3190 : return Select(
3191 : IsSymbol(object),
3192 265 : [=] {
3193 : Node* const flags =
3194 265 : SmiToWord32(LoadObjectField(object, Symbol::kFlagsOffset));
3195 : const int kPrivateMask = 1 << Symbol::kPrivateBit;
3196 265 : return IsSetWord32(flags, kPrivateMask);
3197 : },
3198 1060 : [=] { return Int32Constant(0); }, MachineRepresentation::kWord32);
3199 : }
3200 :
3201 0 : Node* CodeStubAssembler::IsNativeContext(Node* object) {
3202 0 : return WordEqual(LoadMap(object), LoadRoot(Heap::kNativeContextMapRootIndex));
3203 : }
3204 :
3205 0 : Node* CodeStubAssembler::IsFixedDoubleArray(Node* object) {
3206 0 : return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
3207 : }
3208 :
3209 0 : Node* CodeStubAssembler::IsHashTable(Node* object) {
3210 0 : return WordEqual(LoadMap(object), LoadRoot(Heap::kHashTableMapRootIndex));
3211 : }
3212 :
3213 0 : Node* CodeStubAssembler::IsDictionary(Node* object) {
3214 0 : return Word32Or(IsHashTable(object), IsUnseededNumberDictionary(object));
3215 : }
3216 :
3217 0 : Node* CodeStubAssembler::IsUnseededNumberDictionary(Node* object) {
3218 : return WordEqual(LoadMap(object),
3219 0 : LoadRoot(Heap::kUnseededNumberDictionaryMapRootIndex));
3220 : }
3221 :
3222 28148 : Node* CodeStubAssembler::IsJSFunction(Node* object) {
3223 28320 : return HasInstanceType(object, JS_FUNCTION_TYPE);
3224 : }
3225 :
3226 0 : Node* CodeStubAssembler::IsJSTypedArray(Node* object) {
3227 0 : return HasInstanceType(object, JS_TYPED_ARRAY_TYPE);
3228 : }
3229 :
3230 0 : Node* CodeStubAssembler::IsJSArrayBuffer(Node* object) {
3231 0 : return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
3232 : }
3233 :
3234 0 : Node* CodeStubAssembler::IsFixedTypedArray(Node* object) {
3235 0 : Node* instance_type = LoadInstanceType(object);
3236 : return Word32And(
3237 : Int32GreaterThanOrEqual(instance_type,
3238 : Int32Constant(FIRST_FIXED_TYPED_ARRAY_TYPE)),
3239 : Int32LessThanOrEqual(instance_type,
3240 0 : Int32Constant(LAST_FIXED_TYPED_ARRAY_TYPE)));
3241 : }
3242 :
3243 430 : Node* CodeStubAssembler::IsJSRegExp(Node* object) {
3244 430 : return HasInstanceType(object, JS_REGEXP_TYPE);
3245 : }
3246 :
3247 0 : Node* CodeStubAssembler::IsNumber(Node* object) {
3248 0 : return Select(TaggedIsSmi(object), [=] { return Int32Constant(1); },
3249 0 : [=] { return IsHeapNumber(object); },
3250 0 : MachineRepresentation::kWord32);
3251 : }
3252 :
3253 0 : Node* CodeStubAssembler::IsNumberNormalized(Node* number) {
3254 : CSA_ASSERT(this, IsNumber(number));
3255 :
3256 0 : VARIABLE(var_result, MachineRepresentation::kWord32, Int32Constant(1));
3257 0 : Label out(this);
3258 :
3259 0 : GotoIf(TaggedIsSmi(number), &out);
3260 :
3261 : Node* const value = LoadHeapNumberValue(number);
3262 0 : Node* const smi_min = Float64Constant(static_cast<double>(Smi::kMinValue));
3263 0 : Node* const smi_max = Float64Constant(static_cast<double>(Smi::kMaxValue));
3264 :
3265 0 : GotoIf(Float64LessThan(value, smi_min), &out);
3266 0 : GotoIf(Float64GreaterThan(value, smi_max), &out);
3267 0 : GotoIfNot(Float64Equal(value, value), &out); // NaN.
3268 :
3269 0 : var_result.Bind(Int32Constant(0));
3270 0 : Goto(&out);
3271 :
3272 0 : BIND(&out);
3273 0 : return var_result.value();
3274 : }
3275 :
3276 1376 : Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index,
3277 : ParameterMode parameter_mode) {
3278 : if (parameter_mode == SMI_PARAMETERS) CSA_ASSERT(this, TaggedIsSmi(index));
3279 : CSA_ASSERT(this, IsString(string));
3280 :
3281 : // Translate the {index} into a Word.
3282 : Node* const int_index = ParameterToWord(index, parameter_mode);
3283 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(int_index, IntPtrConstant(0)));
3284 :
3285 1376 : VARIABLE(var_result, MachineRepresentation::kWord32);
3286 :
3287 1376 : Label out(this, &var_result), runtime_generic(this), runtime_external(this);
3288 :
3289 2752 : ToDirectStringAssembler to_direct(state(), string);
3290 1376 : Node* const direct_string = to_direct.TryToDirect(&runtime_generic);
3291 1376 : Node* const offset = IntPtrAdd(int_index, to_direct.offset());
3292 : Node* const instance_type = to_direct.instance_type();
3293 :
3294 : Node* const string_data = to_direct.PointerToData(&runtime_external);
3295 :
3296 : // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
3297 1376 : Label if_stringistwobyte(this), if_stringisonebyte(this);
3298 : Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
3299 1376 : &if_stringistwobyte);
3300 :
3301 1376 : BIND(&if_stringisonebyte);
3302 : {
3303 1376 : var_result.Bind(Load(MachineType::Uint8(), string_data, offset));
3304 1376 : Goto(&out);
3305 : }
3306 :
3307 1376 : BIND(&if_stringistwobyte);
3308 : {
3309 : var_result.Bind(Load(MachineType::Uint16(), string_data,
3310 1376 : WordShl(offset, IntPtrConstant(1))));
3311 1376 : Goto(&out);
3312 : }
3313 :
3314 1376 : BIND(&runtime_generic);
3315 : {
3316 : Node* const smi_index = ParameterToTagged(index, parameter_mode);
3317 : Node* const result = CallRuntime(Runtime::kStringCharCodeAtRT,
3318 1376 : NoContextConstant(), string, smi_index);
3319 1376 : var_result.Bind(SmiToWord32(result));
3320 1376 : Goto(&out);
3321 : }
3322 :
3323 1376 : BIND(&runtime_external);
3324 : {
3325 : Node* const result =
3326 : CallRuntime(Runtime::kExternalStringGetChar, NoContextConstant(),
3327 2752 : direct_string, SmiTag(offset));
3328 1376 : var_result.Bind(SmiToWord32(result));
3329 1376 : Goto(&out);
3330 : }
3331 :
3332 1376 : BIND(&out);
3333 2752 : return var_result.value();
3334 : }
3335 :
3336 1032 : Node* CodeStubAssembler::StringFromCharCode(Node* code) {
3337 1032 : VARIABLE(var_result, MachineRepresentation::kTagged);
3338 :
3339 : // Check if the {code} is a one-byte char code.
3340 1032 : Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
3341 1032 : if_done(this);
3342 : Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
3343 1032 : &if_codeisonebyte, &if_codeistwobyte);
3344 1032 : BIND(&if_codeisonebyte);
3345 : {
3346 : // Load the isolate wide single character string cache.
3347 1032 : Node* cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex);
3348 1032 : Node* code_index = ChangeUint32ToWord(code);
3349 :
3350 : // Check if we have an entry for the {code} in the single character string
3351 : // cache already.
3352 : Label if_entryisundefined(this, Label::kDeferred),
3353 1032 : if_entryisnotundefined(this);
3354 1032 : Node* entry = LoadFixedArrayElement(cache, code_index);
3355 : Branch(WordEqual(entry, UndefinedConstant()), &if_entryisundefined,
3356 1032 : &if_entryisnotundefined);
3357 :
3358 1032 : BIND(&if_entryisundefined);
3359 : {
3360 : // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
3361 1032 : Node* result = AllocateSeqOneByteString(1);
3362 : StoreNoWriteBarrier(
3363 : MachineRepresentation::kWord8, result,
3364 1032 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
3365 1032 : StoreFixedArrayElement(cache, code_index, result);
3366 1032 : var_result.Bind(result);
3367 1032 : Goto(&if_done);
3368 : }
3369 :
3370 1032 : BIND(&if_entryisnotundefined);
3371 : {
3372 : // Return the entry from the {cache}.
3373 1032 : var_result.Bind(entry);
3374 1032 : Goto(&if_done);
3375 1032 : }
3376 : }
3377 :
3378 1032 : BIND(&if_codeistwobyte);
3379 : {
3380 : // Allocate a new SeqTwoByteString for {code}.
3381 1032 : Node* result = AllocateSeqTwoByteString(1);
3382 : StoreNoWriteBarrier(
3383 : MachineRepresentation::kWord16, result,
3384 1032 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
3385 1032 : var_result.Bind(result);
3386 1032 : Goto(&if_done);
3387 : }
3388 :
3389 1032 : BIND(&if_done);
3390 2064 : return var_result.value();
3391 : }
3392 :
3393 : namespace {
3394 :
3395 : // A wrapper around CopyStringCharacters which determines the correct string
3396 : // encoding, allocates a corresponding sequential string, and then copies the
3397 : // given character range using CopyStringCharacters.
3398 : // |from_string| must be a sequential string. |from_index| and
3399 : // |character_count| must be Smis s.t.
3400 : // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
3401 1634 : Node* AllocAndCopyStringCharacters(CodeStubAssembler* a, Node* context,
3402 : Node* from, Node* from_instance_type,
3403 : Node* from_index, Node* character_count) {
3404 : typedef CodeStubAssembler::Label Label;
3405 : typedef CodeStubAssembler::Variable Variable;
3406 :
3407 1634 : Label end(a), one_byte_sequential(a), two_byte_sequential(a);
3408 3268 : Variable var_result(a, MachineRepresentation::kTagged);
3409 :
3410 1634 : Node* const smi_zero = a->SmiConstant(Smi::kZero);
3411 :
3412 : a->Branch(a->IsOneByteStringInstanceType(from_instance_type),
3413 1634 : &one_byte_sequential, &two_byte_sequential);
3414 :
3415 : // The subject string is a sequential one-byte string.
3416 1634 : a->BIND(&one_byte_sequential);
3417 : {
3418 : Node* result =
3419 1634 : a->AllocateSeqOneByteString(context, a->SmiToWord(character_count));
3420 : a->CopyStringCharacters(from, result, from_index, smi_zero, character_count,
3421 : String::ONE_BYTE_ENCODING,
3422 : String::ONE_BYTE_ENCODING,
3423 1634 : CodeStubAssembler::SMI_PARAMETERS);
3424 1634 : var_result.Bind(result);
3425 :
3426 1634 : a->Goto(&end);
3427 : }
3428 :
3429 : // The subject string is a sequential two-byte string.
3430 1634 : a->BIND(&two_byte_sequential);
3431 : {
3432 : Node* result =
3433 1634 : a->AllocateSeqTwoByteString(context, a->SmiToWord(character_count));
3434 : a->CopyStringCharacters(from, result, from_index, smi_zero, character_count,
3435 : String::TWO_BYTE_ENCODING,
3436 : String::TWO_BYTE_ENCODING,
3437 1634 : CodeStubAssembler::SMI_PARAMETERS);
3438 1634 : var_result.Bind(result);
3439 :
3440 1634 : a->Goto(&end);
3441 : }
3442 :
3443 1634 : a->BIND(&end);
3444 3268 : return var_result.value();
3445 : }
3446 :
3447 : } // namespace
3448 :
3449 817 : Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
3450 : Node* to) {
3451 817 : VARIABLE(var_result, MachineRepresentation::kTagged);
3452 1634 : ToDirectStringAssembler to_direct(state(), string);
3453 817 : Label end(this), runtime(this);
3454 :
3455 : // Make sure first argument is a string.
3456 : CSA_ASSERT(this, TaggedIsNotSmi(string));
3457 : CSA_ASSERT(this, IsString(string));
3458 :
3459 : // Make sure that both from and to are non-negative smis.
3460 :
3461 817 : GotoIfNot(TaggedIsPositiveSmi(from), &runtime);
3462 817 : GotoIfNot(TaggedIsPositiveSmi(to), &runtime);
3463 :
3464 817 : Node* const substr_length = SmiSub(to, from);
3465 : Node* const string_length = LoadStringLength(string);
3466 :
3467 : // Begin dispatching based on substring length.
3468 :
3469 817 : Label original_string_or_invalid_length(this);
3470 : GotoIf(SmiAboveOrEqual(substr_length, string_length),
3471 817 : &original_string_or_invalid_length);
3472 :
3473 : // A real substring (substr_length < string_length).
3474 :
3475 817 : Label single_char(this);
3476 817 : GotoIf(SmiEqual(substr_length, SmiConstant(Smi::FromInt(1))), &single_char);
3477 :
3478 : // TODO(jgruber): Add an additional case for substring of length == 0?
3479 :
3480 : // Deal with different string types: update the index if necessary
3481 : // and extract the underlying string.
3482 :
3483 817 : Node* const direct_string = to_direct.TryToDirect(&runtime);
3484 817 : Node* const offset = SmiAdd(from, SmiTag(to_direct.offset()));
3485 : Node* const instance_type = to_direct.instance_type();
3486 :
3487 : // The subject string can only be external or sequential string of either
3488 : // encoding at this point.
3489 817 : Label external_string(this);
3490 : {
3491 817 : if (FLAG_string_slices) {
3492 : Label next(this);
3493 :
3494 : // Short slice. Copy instead of slicing.
3495 : GotoIf(SmiLessThan(substr_length,
3496 : SmiConstant(Smi::FromInt(SlicedString::kMinLength))),
3497 817 : &next);
3498 :
3499 : // Allocate new sliced string.
3500 :
3501 817 : Counters* counters = isolate()->counters();
3502 817 : IncrementCounter(counters->sub_string_native(), 1);
3503 :
3504 817 : Label one_byte_slice(this), two_byte_slice(this);
3505 : Branch(IsOneByteStringInstanceType(to_direct.instance_type()),
3506 817 : &one_byte_slice, &two_byte_slice);
3507 :
3508 817 : BIND(&one_byte_slice);
3509 : {
3510 : var_result.Bind(
3511 817 : AllocateSlicedOneByteString(substr_length, direct_string, offset));
3512 817 : Goto(&end);
3513 : }
3514 :
3515 817 : BIND(&two_byte_slice);
3516 : {
3517 : var_result.Bind(
3518 817 : AllocateSlicedTwoByteString(substr_length, direct_string, offset));
3519 817 : Goto(&end);
3520 : }
3521 :
3522 1634 : BIND(&next);
3523 : }
3524 :
3525 : // The subject string can only be external or sequential string of either
3526 : // encoding at this point.
3527 817 : GotoIf(to_direct.is_external(), &external_string);
3528 :
3529 : var_result.Bind(AllocAndCopyStringCharacters(
3530 817 : this, context, direct_string, instance_type, offset, substr_length));
3531 :
3532 817 : Counters* counters = isolate()->counters();
3533 817 : IncrementCounter(counters->sub_string_native(), 1);
3534 :
3535 817 : Goto(&end);
3536 : }
3537 :
3538 : // Handle external string.
3539 817 : BIND(&external_string);
3540 : {
3541 : Node* const fake_sequential_string = to_direct.PointerToString(&runtime);
3542 :
3543 : var_result.Bind(
3544 : AllocAndCopyStringCharacters(this, context, fake_sequential_string,
3545 817 : instance_type, offset, substr_length));
3546 :
3547 817 : Counters* counters = isolate()->counters();
3548 817 : IncrementCounter(counters->sub_string_native(), 1);
3549 :
3550 817 : Goto(&end);
3551 : }
3552 :
3553 : // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
3554 817 : BIND(&single_char);
3555 : {
3556 817 : Node* char_code = StringCharCodeAt(string, from);
3557 817 : var_result.Bind(StringFromCharCode(char_code));
3558 817 : Goto(&end);
3559 : }
3560 :
3561 817 : BIND(&original_string_or_invalid_length);
3562 : {
3563 : // Longer than original string's length or negative: unsafe arguments.
3564 817 : GotoIf(SmiAbove(substr_length, string_length), &runtime);
3565 :
3566 : // Equal length - check if {from, to} == {0, str.length}.
3567 817 : GotoIf(SmiAbove(from, SmiConstant(Smi::kZero)), &runtime);
3568 :
3569 : // Return the original string (substr_length == string_length).
3570 :
3571 817 : Counters* counters = isolate()->counters();
3572 817 : IncrementCounter(counters->sub_string_native(), 1);
3573 :
3574 817 : var_result.Bind(string);
3575 817 : Goto(&end);
3576 : }
3577 :
3578 : // Fall back to a runtime call.
3579 817 : BIND(&runtime);
3580 : {
3581 : var_result.Bind(
3582 817 : CallRuntime(Runtime::kSubString, context, string, from, to));
3583 817 : Goto(&end);
3584 : }
3585 :
3586 817 : BIND(&end);
3587 1634 : return var_result.value();
3588 : }
3589 :
3590 2795 : ToDirectStringAssembler::ToDirectStringAssembler(
3591 : compiler::CodeAssemblerState* state, Node* string)
3592 : : CodeStubAssembler(state),
3593 : var_string_(this, MachineRepresentation::kTagged, string),
3594 : var_instance_type_(this, MachineRepresentation::kWord32),
3595 : var_offset_(this, MachineType::PointerRepresentation()),
3596 2795 : var_is_external_(this, MachineRepresentation::kWord32) {
3597 : CSA_ASSERT(this, TaggedIsNotSmi(string));
3598 : CSA_ASSERT(this, IsString(string));
3599 :
3600 2795 : var_string_.Bind(string);
3601 2795 : var_offset_.Bind(IntPtrConstant(0));
3602 2795 : var_instance_type_.Bind(LoadInstanceType(string));
3603 2795 : var_is_external_.Bind(Int32Constant(0));
3604 2795 : }
3605 :
3606 2795 : Node* ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
3607 2795 : VariableList vars({&var_string_, &var_offset_, &var_instance_type_}, zone());
3608 2795 : Label dispatch(this, vars);
3609 2795 : Label if_iscons(this);
3610 2795 : Label if_isexternal(this);
3611 2795 : Label if_issliced(this);
3612 2795 : Label if_isthin(this);
3613 2795 : Label out(this);
3614 :
3615 : Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
3616 2795 : &dispatch);
3617 :
3618 : // Dispatch based on string representation.
3619 2795 : BIND(&dispatch);
3620 : {
3621 : int32_t values[] = {
3622 : kSeqStringTag, kConsStringTag, kExternalStringTag,
3623 : kSlicedStringTag, kThinStringTag,
3624 2795 : };
3625 : Label* labels[] = {
3626 : &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
3627 2795 : };
3628 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
3629 :
3630 : Node* const representation = Word32And(
3631 2795 : var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
3632 2795 : Switch(representation, if_bailout, values, labels, arraysize(values));
3633 : }
3634 :
3635 : // Cons string. Check whether it is flat, then fetch first part.
3636 : // Flat cons strings have an empty second part.
3637 2795 : BIND(&if_iscons);
3638 : {
3639 2795 : Node* const string = var_string_.value();
3640 : GotoIfNot(IsEmptyString(LoadObjectField(string, ConsString::kSecondOffset)),
3641 2795 : if_bailout);
3642 :
3643 2795 : Node* const lhs = LoadObjectField(string, ConsString::kFirstOffset);
3644 2795 : var_string_.Bind(lhs);
3645 2795 : var_instance_type_.Bind(LoadInstanceType(lhs));
3646 :
3647 2795 : Goto(&dispatch);
3648 : }
3649 :
3650 : // Sliced string. Fetch parent and correct start index by offset.
3651 2795 : BIND(&if_issliced);
3652 : {
3653 2795 : Node* const string = var_string_.value();
3654 : Node* const sliced_offset =
3655 2795 : LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
3656 2795 : var_offset_.Bind(IntPtrAdd(var_offset_.value(), sliced_offset));
3657 :
3658 2795 : Node* const parent = LoadObjectField(string, SlicedString::kParentOffset);
3659 2795 : var_string_.Bind(parent);
3660 2795 : var_instance_type_.Bind(LoadInstanceType(parent));
3661 :
3662 2795 : Goto(&dispatch);
3663 : }
3664 :
3665 : // Thin string. Fetch the actual string.
3666 2795 : BIND(&if_isthin);
3667 : {
3668 2795 : Node* const string = var_string_.value();
3669 : Node* const actual_string =
3670 2795 : LoadObjectField(string, ThinString::kActualOffset);
3671 2795 : Node* const actual_instance_type = LoadInstanceType(actual_string);
3672 :
3673 2795 : var_string_.Bind(actual_string);
3674 2795 : var_instance_type_.Bind(actual_instance_type);
3675 :
3676 2795 : Goto(&dispatch);
3677 : }
3678 :
3679 : // External string.
3680 2795 : BIND(&if_isexternal);
3681 2795 : var_is_external_.Bind(Int32Constant(1));
3682 2795 : Goto(&out);
3683 :
3684 2795 : BIND(&out);
3685 5590 : return var_string_.value();
3686 : }
3687 :
3688 2795 : Node* ToDirectStringAssembler::TryToSequential(StringPointerKind ptr_kind,
3689 : Label* if_bailout) {
3690 2795 : CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
3691 :
3692 2795 : VARIABLE(var_result, MachineType::PointerRepresentation());
3693 2795 : Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
3694 2795 : Branch(is_external(), &if_isexternal, &if_issequential);
3695 :
3696 2795 : BIND(&if_issequential);
3697 : {
3698 : STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
3699 : SeqTwoByteString::kHeaderSize);
3700 2795 : Node* result = BitcastTaggedToWord(var_string_.value());
3701 2795 : if (ptr_kind == PTR_TO_DATA) {
3702 : result = IntPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
3703 1978 : kHeapObjectTag));
3704 : }
3705 2795 : var_result.Bind(result);
3706 2795 : Goto(&out);
3707 : }
3708 :
3709 2795 : BIND(&if_isexternal);
3710 : {
3711 : GotoIf(IsShortExternalStringInstanceType(var_instance_type_.value()),
3712 2795 : if_bailout);
3713 :
3714 2795 : Node* const string = var_string_.value();
3715 : Node* result = LoadObjectField(string, ExternalString::kResourceDataOffset,
3716 2795 : MachineType::Pointer());
3717 2795 : if (ptr_kind == PTR_TO_STRING) {
3718 : result = IntPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
3719 817 : kHeapObjectTag));
3720 : }
3721 2795 : var_result.Bind(result);
3722 2795 : Goto(&out);
3723 : }
3724 :
3725 2795 : BIND(&out);
3726 5590 : return var_result.value();
3727 : }
3728 :
3729 0 : Node* CodeStubAssembler::TryDerefExternalString(Node* const string,
3730 : Node* const instance_type,
3731 : Label* if_bailout) {
3732 0 : Label out(this);
3733 :
3734 : CSA_ASSERT(this, IsExternalStringInstanceType(instance_type));
3735 0 : GotoIf(IsShortExternalStringInstanceType(instance_type), if_bailout);
3736 :
3737 : // Move the pointer so that offset-wise, it looks like a sequential string.
3738 : STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
3739 :
3740 : Node* resource_data = LoadObjectField(
3741 0 : string, ExternalString::kResourceDataOffset, MachineType::Pointer());
3742 : Node* const fake_sequential_string =
3743 : IntPtrSub(resource_data,
3744 0 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3745 :
3746 0 : return fake_sequential_string;
3747 : }
3748 :
3749 774 : void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
3750 : Node* instance_type,
3751 : Variable* var_did_something) {
3752 1548 : Label deref(this), done(this, var_did_something);
3753 : Node* representation =
3754 774 : Word32And(instance_type, Int32Constant(kStringRepresentationMask));
3755 774 : GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), &deref);
3756 774 : GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)), &done);
3757 : // Cons string.
3758 774 : Node* rhs = LoadObjectField(var_string->value(), ConsString::kSecondOffset);
3759 774 : GotoIf(WordEqual(rhs, EmptyStringConstant()), &deref);
3760 774 : Goto(&done);
3761 :
3762 774 : BIND(&deref);
3763 : STATIC_ASSERT(ThinString::kActualOffset == ConsString::kFirstOffset);
3764 : var_string->Bind(
3765 774 : LoadObjectField(var_string->value(), ThinString::kActualOffset));
3766 774 : var_did_something->Bind(IntPtrConstant(1));
3767 774 : Goto(&done);
3768 :
3769 1548 : BIND(&done);
3770 774 : }
3771 :
3772 387 : void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
3773 : Node* left_instance_type,
3774 : Variable* var_right,
3775 : Node* right_instance_type,
3776 : Label* did_something) {
3777 387 : VARIABLE(var_did_something, MachineType::PointerRepresentation(),
3778 : IntPtrConstant(0));
3779 387 : MaybeDerefIndirectString(var_left, left_instance_type, &var_did_something);
3780 387 : MaybeDerefIndirectString(var_right, right_instance_type, &var_did_something);
3781 :
3782 : GotoIf(WordNotEqual(var_did_something.value(), IntPtrConstant(0)),
3783 387 : did_something);
3784 : // Fall through if neither string was an indirect string.
3785 387 : }
3786 :
3787 172 : Node* CodeStubAssembler::StringAdd(Node* context, Node* left, Node* right,
3788 : AllocationFlags flags) {
3789 172 : VARIABLE(result, MachineRepresentation::kTagged);
3790 172 : Label check_right(this), runtime(this, Label::kDeferred), cons(this),
3791 172 : done(this, &result), done_native(this, &result);
3792 172 : Counters* counters = isolate()->counters();
3793 :
3794 : Node* left_length = LoadStringLength(left);
3795 172 : GotoIf(WordNotEqual(IntPtrConstant(0), left_length), &check_right);
3796 172 : result.Bind(right);
3797 172 : Goto(&done_native);
3798 :
3799 172 : BIND(&check_right);
3800 : Node* right_length = LoadStringLength(right);
3801 172 : GotoIf(WordNotEqual(IntPtrConstant(0), right_length), &cons);
3802 172 : result.Bind(left);
3803 172 : Goto(&done_native);
3804 :
3805 172 : BIND(&cons);
3806 : {
3807 : CSA_ASSERT(this, TaggedIsSmi(left_length));
3808 : CSA_ASSERT(this, TaggedIsSmi(right_length));
3809 172 : Node* new_length = SmiAdd(left_length, right_length);
3810 : GotoIf(SmiAboveOrEqual(new_length, SmiConstant(String::kMaxLength)),
3811 172 : &runtime);
3812 :
3813 172 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
3814 344 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
3815 172 : Variable* input_vars[2] = {&var_left, &var_right};
3816 344 : Label non_cons(this, 2, input_vars);
3817 172 : Label slow(this, Label::kDeferred);
3818 : GotoIf(SmiLessThan(new_length, SmiConstant(ConsString::kMinLength)),
3819 172 : &non_cons);
3820 :
3821 : result.Bind(NewConsString(context, new_length, var_left.value(),
3822 172 : var_right.value(), flags));
3823 172 : Goto(&done_native);
3824 :
3825 172 : BIND(&non_cons);
3826 :
3827 172 : Comment("Full string concatenate");
3828 172 : Node* left_instance_type = LoadInstanceType(var_left.value());
3829 172 : Node* right_instance_type = LoadInstanceType(var_right.value());
3830 : // Compute intersection and difference of instance types.
3831 :
3832 : Node* ored_instance_types =
3833 172 : Word32Or(left_instance_type, right_instance_type);
3834 : Node* xored_instance_types =
3835 172 : Word32Xor(left_instance_type, right_instance_type);
3836 :
3837 : // Check if both strings have the same encoding and both are sequential.
3838 : GotoIf(Word32NotEqual(Word32And(xored_instance_types,
3839 : Int32Constant(kStringEncodingMask)),
3840 : Int32Constant(0)),
3841 172 : &runtime);
3842 : GotoIf(Word32NotEqual(Word32And(ored_instance_types,
3843 : Int32Constant(kStringRepresentationMask)),
3844 : Int32Constant(0)),
3845 172 : &slow);
3846 :
3847 172 : Label two_byte(this);
3848 : GotoIf(Word32Equal(Word32And(ored_instance_types,
3849 : Int32Constant(kStringEncodingMask)),
3850 : Int32Constant(kTwoByteStringTag)),
3851 172 : &two_byte);
3852 : // One-byte sequential string case
3853 : Node* new_string =
3854 172 : AllocateSeqOneByteString(context, new_length, SMI_PARAMETERS);
3855 : CopyStringCharacters(var_left.value(), new_string, SmiConstant(Smi::kZero),
3856 : SmiConstant(Smi::kZero), left_length,
3857 : String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING,
3858 172 : SMI_PARAMETERS);
3859 : CopyStringCharacters(var_right.value(), new_string, SmiConstant(Smi::kZero),
3860 : left_length, right_length, String::ONE_BYTE_ENCODING,
3861 172 : String::ONE_BYTE_ENCODING, SMI_PARAMETERS);
3862 172 : result.Bind(new_string);
3863 172 : Goto(&done_native);
3864 :
3865 172 : BIND(&two_byte);
3866 : {
3867 : // Two-byte sequential string case
3868 : new_string =
3869 172 : AllocateSeqTwoByteString(context, new_length, SMI_PARAMETERS);
3870 : CopyStringCharacters(var_left.value(), new_string,
3871 : SmiConstant(Smi::kZero), SmiConstant(Smi::kZero),
3872 : left_length, String::TWO_BYTE_ENCODING,
3873 172 : String::TWO_BYTE_ENCODING, SMI_PARAMETERS);
3874 : CopyStringCharacters(var_right.value(), new_string,
3875 : SmiConstant(Smi::kZero), left_length, right_length,
3876 : String::TWO_BYTE_ENCODING, String::TWO_BYTE_ENCODING,
3877 172 : SMI_PARAMETERS);
3878 172 : result.Bind(new_string);
3879 172 : Goto(&done_native);
3880 : }
3881 :
3882 172 : BIND(&slow);
3883 : {
3884 : // Try to unwrap indirect strings, restart the above attempt on success.
3885 : MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
3886 172 : right_instance_type, &non_cons);
3887 172 : Goto(&runtime);
3888 172 : }
3889 : }
3890 172 : BIND(&runtime);
3891 : {
3892 172 : result.Bind(CallRuntime(Runtime::kStringAdd, context, left, right));
3893 172 : Goto(&done);
3894 : }
3895 :
3896 172 : BIND(&done_native);
3897 : {
3898 172 : IncrementCounter(counters->string_add_native(), 1);
3899 172 : Goto(&done);
3900 : }
3901 :
3902 172 : BIND(&done);
3903 344 : return result.value();
3904 : }
3905 :
3906 43 : Node* CodeStubAssembler::StringFromCodePoint(Node* codepoint,
3907 : UnicodeEncoding encoding) {
3908 43 : VARIABLE(var_result, MachineRepresentation::kTagged, EmptyStringConstant());
3909 :
3910 43 : Label if_isword16(this), if_isword32(this), return_result(this);
3911 :
3912 : Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
3913 43 : &if_isword32);
3914 :
3915 43 : BIND(&if_isword16);
3916 : {
3917 43 : var_result.Bind(StringFromCharCode(codepoint));
3918 43 : Goto(&return_result);
3919 : }
3920 :
3921 43 : BIND(&if_isword32);
3922 : {
3923 43 : switch (encoding) {
3924 : case UnicodeEncoding::UTF16:
3925 : break;
3926 : case UnicodeEncoding::UTF32: {
3927 : // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
3928 0 : Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
3929 :
3930 : // lead = (codepoint >> 10) + LEAD_OFFSET
3931 : Node* lead =
3932 0 : Int32Add(WordShr(codepoint, Int32Constant(10)), lead_offset);
3933 :
3934 : // trail = (codepoint & 0x3FF) + 0xDC00;
3935 : Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
3936 0 : Int32Constant(0xDC00));
3937 :
3938 : // codpoint = (trail << 16) | lead;
3939 0 : codepoint = Word32Or(WordShl(trail, Int32Constant(16)), lead);
3940 0 : break;
3941 : }
3942 : }
3943 :
3944 43 : Node* value = AllocateSeqTwoByteString(2);
3945 : StoreNoWriteBarrier(
3946 : MachineRepresentation::kWord32, value,
3947 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
3948 43 : codepoint);
3949 43 : var_result.Bind(value);
3950 43 : Goto(&return_result);
3951 : }
3952 :
3953 43 : BIND(&return_result);
3954 86 : return var_result.value();
3955 : }
3956 :
3957 222 : Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) {
3958 222 : Label runtime(this, Label::kDeferred);
3959 222 : Label end(this);
3960 :
3961 444 : VARIABLE(var_result, MachineRepresentation::kTagged);
3962 :
3963 : // Check if string has a cached array index.
3964 : Node* hash = LoadNameHashField(input);
3965 : Node* bit =
3966 222 : Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask));
3967 222 : GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime);
3968 :
3969 : var_result.Bind(
3970 222 : SmiTag(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
3971 222 : Goto(&end);
3972 :
3973 222 : BIND(&runtime);
3974 : {
3975 222 : var_result.Bind(CallRuntime(Runtime::kStringToNumber, context, input));
3976 222 : Goto(&end);
3977 : }
3978 :
3979 222 : BIND(&end);
3980 444 : return var_result.value();
3981 : }
3982 :
3983 996 : Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) {
3984 996 : VARIABLE(result, MachineRepresentation::kTagged);
3985 996 : Label runtime(this, Label::kDeferred), smi(this), done(this, &result);
3986 :
3987 : // Load the number string cache.
3988 996 : Node* number_string_cache = LoadRoot(Heap::kNumberStringCacheRootIndex);
3989 :
3990 : // Make the hash mask from the length of the number string cache. It
3991 : // contains two elements (number and string) for each cache entry.
3992 : // TODO(ishell): cleanup mask handling.
3993 : Node* mask =
3994 996 : BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
3995 996 : Node* one = IntPtrConstant(1);
3996 996 : mask = IntPtrSub(mask, one);
3997 :
3998 996 : GotoIf(TaggedIsSmi(argument), &smi);
3999 :
4000 : // Argument isn't smi, check to see if it's a heap-number.
4001 : Node* map = LoadMap(argument);
4002 996 : GotoIfNot(IsHeapNumberMap(map), &runtime);
4003 :
4004 : // Make a hash from the two 32-bit values of the double.
4005 : Node* low =
4006 996 : LoadObjectField(argument, HeapNumber::kValueOffset, MachineType::Int32());
4007 : Node* high = LoadObjectField(argument, HeapNumber::kValueOffset + kIntSize,
4008 996 : MachineType::Int32());
4009 996 : Node* hash = Word32Xor(low, high);
4010 996 : hash = ChangeInt32ToIntPtr(hash);
4011 996 : hash = WordShl(hash, one);
4012 996 : Node* index = WordAnd(hash, SmiUntag(BitcastWordToTagged(mask)));
4013 :
4014 : // Cache entry's key must be a heap number
4015 996 : Node* number_key = LoadFixedArrayElement(number_string_cache, index);
4016 996 : GotoIf(TaggedIsSmi(number_key), &runtime);
4017 : map = LoadMap(number_key);
4018 996 : GotoIfNot(IsHeapNumberMap(map), &runtime);
4019 :
4020 : // Cache entry's key must match the heap number value we're looking for.
4021 : Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
4022 996 : MachineType::Int32());
4023 : Node* high_compare = LoadObjectField(
4024 996 : number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
4025 996 : GotoIfNot(Word32Equal(low, low_compare), &runtime);
4026 996 : GotoIfNot(Word32Equal(high, high_compare), &runtime);
4027 :
4028 : // Heap number match, return value from cache entry.
4029 996 : IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
4030 996 : result.Bind(LoadFixedArrayElement(number_string_cache, index, kPointerSize));
4031 996 : Goto(&done);
4032 :
4033 996 : BIND(&runtime);
4034 : {
4035 : // No cache entry, go to the runtime.
4036 996 : result.Bind(CallRuntime(Runtime::kNumberToString, context, argument));
4037 : }
4038 996 : Goto(&done);
4039 :
4040 996 : BIND(&smi);
4041 : {
4042 : // Load the smi key, make sure it matches the smi we're looking for.
4043 : Node* smi_index = BitcastWordToTagged(
4044 996 : WordAnd(WordShl(BitcastTaggedToWord(argument), one), mask));
4045 : Node* smi_key = LoadFixedArrayElement(number_string_cache, smi_index, 0,
4046 996 : SMI_PARAMETERS);
4047 996 : GotoIf(WordNotEqual(smi_key, argument), &runtime);
4048 :
4049 : // Smi match, return value from cache entry.
4050 996 : IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
4051 : result.Bind(LoadFixedArrayElement(number_string_cache, smi_index,
4052 996 : kPointerSize, SMI_PARAMETERS));
4053 996 : Goto(&done);
4054 : }
4055 :
4056 996 : BIND(&done);
4057 1992 : return result.value();
4058 : }
4059 :
4060 172 : Node* CodeStubAssembler::ToName(Node* context, Node* value) {
4061 172 : Label end(this);
4062 344 : VARIABLE(var_result, MachineRepresentation::kTagged);
4063 :
4064 172 : Label is_number(this);
4065 172 : GotoIf(TaggedIsSmi(value), &is_number);
4066 :
4067 172 : Label not_name(this);
4068 172 : Node* value_instance_type = LoadInstanceType(value);
4069 : STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
4070 : GotoIf(Int32GreaterThan(value_instance_type, Int32Constant(LAST_NAME_TYPE)),
4071 172 : ¬_name);
4072 :
4073 172 : var_result.Bind(value);
4074 172 : Goto(&end);
4075 :
4076 172 : BIND(&is_number);
4077 : {
4078 172 : Callable callable = CodeFactory::NumberToString(isolate());
4079 172 : var_result.Bind(CallStub(callable, context, value));
4080 172 : Goto(&end);
4081 : }
4082 :
4083 172 : BIND(¬_name);
4084 : {
4085 : GotoIf(Word32Equal(value_instance_type, Int32Constant(HEAP_NUMBER_TYPE)),
4086 172 : &is_number);
4087 :
4088 : Label not_oddball(this);
4089 : GotoIf(Word32NotEqual(value_instance_type, Int32Constant(ODDBALL_TYPE)),
4090 172 : ¬_oddball);
4091 :
4092 172 : var_result.Bind(LoadObjectField(value, Oddball::kToStringOffset));
4093 172 : Goto(&end);
4094 :
4095 172 : BIND(¬_oddball);
4096 : {
4097 172 : var_result.Bind(CallRuntime(Runtime::kToName, context, value));
4098 172 : Goto(&end);
4099 172 : }
4100 : }
4101 :
4102 172 : BIND(&end);
4103 344 : return var_result.value();
4104 : }
4105 :
4106 179 : Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
4107 : // Assert input is a HeapObject (not smi or heap number)
4108 : CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
4109 : CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input))));
4110 :
4111 : // We might need to loop once here due to ToPrimitive conversions.
4112 179 : VARIABLE(var_input, MachineRepresentation::kTagged, input);
4113 358 : VARIABLE(var_result, MachineRepresentation::kTagged);
4114 179 : Label loop(this, &var_input);
4115 179 : Label end(this);
4116 179 : Goto(&loop);
4117 179 : BIND(&loop);
4118 : {
4119 : // Load the current {input} value (known to be a HeapObject).
4120 179 : Node* input = var_input.value();
4121 :
4122 : // Dispatch on the {input} instance type.
4123 179 : Node* input_instance_type = LoadInstanceType(input);
4124 179 : Label if_inputisstring(this), if_inputisoddball(this),
4125 179 : if_inputisreceiver(this, Label::kDeferred),
4126 179 : if_inputisother(this, Label::kDeferred);
4127 179 : GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
4128 : GotoIf(Word32Equal(input_instance_type, Int32Constant(ODDBALL_TYPE)),
4129 179 : &if_inputisoddball);
4130 : Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
4131 179 : &if_inputisother);
4132 :
4133 179 : BIND(&if_inputisstring);
4134 : {
4135 : // The {input} is a String, use the fast stub to convert it to a Number.
4136 179 : var_result.Bind(StringToNumber(context, input));
4137 179 : Goto(&end);
4138 : }
4139 :
4140 179 : BIND(&if_inputisoddball);
4141 : {
4142 : // The {input} is an Oddball, we just need to load the Number value of it.
4143 179 : var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
4144 179 : Goto(&end);
4145 : }
4146 :
4147 179 : BIND(&if_inputisreceiver);
4148 : {
4149 : // The {input} is a JSReceiver, we need to convert it to a Primitive first
4150 : // using the ToPrimitive type conversion, preferably yielding a Number.
4151 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
4152 179 : isolate(), ToPrimitiveHint::kNumber);
4153 179 : Node* result = CallStub(callable, context, input);
4154 :
4155 : // Check if the {result} is already a Number.
4156 179 : Label if_resultisnumber(this), if_resultisnotnumber(this);
4157 179 : GotoIf(TaggedIsSmi(result), &if_resultisnumber);
4158 : Node* result_map = LoadMap(result);
4159 : Branch(IsHeapNumberMap(result_map), &if_resultisnumber,
4160 179 : &if_resultisnotnumber);
4161 :
4162 179 : BIND(&if_resultisnumber);
4163 : {
4164 : // The ToPrimitive conversion already gave us a Number, so we're done.
4165 179 : var_result.Bind(result);
4166 179 : Goto(&end);
4167 : }
4168 :
4169 179 : BIND(&if_resultisnotnumber);
4170 : {
4171 : // We now have a Primitive {result}, but it's not yet a Number.
4172 179 : var_input.Bind(result);
4173 179 : Goto(&loop);
4174 : }
4175 : }
4176 :
4177 179 : BIND(&if_inputisother);
4178 : {
4179 : // The {input} is something else (e.g. Symbol), let the runtime figure
4180 : // out the correct exception.
4181 : // Note: We cannot tail call to the runtime here, as js-to-wasm
4182 : // trampolines also use this code currently, and they declare all
4183 : // outgoing parameters as untagged, while we would push a tagged
4184 : // object here.
4185 179 : var_result.Bind(CallRuntime(Runtime::kToNumber, context, input));
4186 179 : Goto(&end);
4187 179 : }
4188 : }
4189 :
4190 179 : BIND(&end);
4191 358 : return var_result.value();
4192 : }
4193 :
4194 136 : Node* CodeStubAssembler::ToNumber(Node* context, Node* input) {
4195 136 : VARIABLE(var_result, MachineRepresentation::kTagged);
4196 136 : Label end(this);
4197 :
4198 136 : Label not_smi(this, Label::kDeferred);
4199 136 : GotoIfNot(TaggedIsSmi(input), ¬_smi);
4200 136 : var_result.Bind(input);
4201 136 : Goto(&end);
4202 :
4203 136 : BIND(¬_smi);
4204 : {
4205 : Label not_heap_number(this, Label::kDeferred);
4206 : Node* input_map = LoadMap(input);
4207 136 : GotoIfNot(IsHeapNumberMap(input_map), ¬_heap_number);
4208 :
4209 136 : var_result.Bind(input);
4210 136 : Goto(&end);
4211 :
4212 136 : BIND(¬_heap_number);
4213 : {
4214 136 : var_result.Bind(NonNumberToNumber(context, input));
4215 136 : Goto(&end);
4216 136 : }
4217 : }
4218 :
4219 136 : BIND(&end);
4220 272 : return var_result.value();
4221 : }
4222 :
4223 : // ES#sec-touint32
4224 93 : Node* CodeStubAssembler::ToUint32(Node* context, Node* input) {
4225 93 : Node* const float_zero = Float64Constant(0.0);
4226 93 : Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
4227 :
4228 : Label out(this);
4229 :
4230 186 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
4231 :
4232 : // Early exit for positive smis.
4233 : {
4234 : // TODO(jgruber): This branch and the recheck below can be removed once we
4235 : // have a ToNumber with multiple exits.
4236 : Label next(this, Label::kDeferred);
4237 93 : Branch(TaggedIsPositiveSmi(input), &out, &next);
4238 93 : BIND(&next);
4239 : }
4240 :
4241 93 : Node* const number = ToNumber(context, input);
4242 93 : var_result.Bind(number);
4243 :
4244 : // Perhaps we have a positive smi now.
4245 : {
4246 : Label next(this, Label::kDeferred);
4247 93 : Branch(TaggedIsPositiveSmi(number), &out, &next);
4248 93 : BIND(&next);
4249 : }
4250 :
4251 93 : Label if_isnegativesmi(this), if_isheapnumber(this);
4252 93 : Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
4253 :
4254 93 : BIND(&if_isnegativesmi);
4255 : {
4256 93 : Node* const uint32_value = SmiToWord32(number);
4257 93 : Node* float64_value = ChangeUint32ToFloat64(uint32_value);
4258 93 : var_result.Bind(AllocateHeapNumberWithValue(float64_value));
4259 93 : Goto(&out);
4260 : }
4261 :
4262 93 : BIND(&if_isheapnumber);
4263 : {
4264 : Label return_zero(this);
4265 : Node* const value = LoadHeapNumberValue(number);
4266 :
4267 : {
4268 : // +-0.
4269 : Label next(this);
4270 93 : Branch(Float64Equal(value, float_zero), &return_zero, &next);
4271 93 : BIND(&next);
4272 : }
4273 :
4274 : {
4275 : // NaN.
4276 : Label next(this);
4277 93 : Branch(Float64Equal(value, value), &next, &return_zero);
4278 93 : BIND(&next);
4279 : }
4280 :
4281 : {
4282 : // +Infinity.
4283 : Label next(this);
4284 : Node* const positive_infinity =
4285 93 : Float64Constant(std::numeric_limits<double>::infinity());
4286 93 : Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
4287 93 : BIND(&next);
4288 : }
4289 :
4290 : {
4291 : // -Infinity.
4292 : Label next(this);
4293 : Node* const negative_infinity =
4294 93 : Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
4295 93 : Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
4296 93 : BIND(&next);
4297 : }
4298 :
4299 : // * Let int be the mathematical value that is the same sign as number and
4300 : // whose magnitude is floor(abs(number)).
4301 : // * Let int32bit be int modulo 2^32.
4302 : // * Return int32bit.
4303 : {
4304 93 : Node* x = Float64Trunc(value);
4305 93 : x = Float64Mod(x, float_two_32);
4306 93 : x = Float64Add(x, float_two_32);
4307 93 : x = Float64Mod(x, float_two_32);
4308 :
4309 93 : Node* const result = ChangeFloat64ToTagged(x);
4310 93 : var_result.Bind(result);
4311 93 : Goto(&out);
4312 : }
4313 :
4314 93 : BIND(&return_zero);
4315 : {
4316 93 : var_result.Bind(SmiConstant(Smi::kZero));
4317 93 : Goto(&out);
4318 93 : }
4319 : }
4320 :
4321 93 : BIND(&out);
4322 186 : return var_result.value();
4323 : }
4324 :
4325 910 : Node* CodeStubAssembler::ToString(Node* context, Node* input) {
4326 910 : Label is_number(this);
4327 910 : Label runtime(this, Label::kDeferred);
4328 1820 : VARIABLE(result, MachineRepresentation::kTagged);
4329 910 : Label done(this, &result);
4330 :
4331 910 : GotoIf(TaggedIsSmi(input), &is_number);
4332 :
4333 : Node* input_map = LoadMap(input);
4334 : Node* input_instance_type = LoadMapInstanceType(input_map);
4335 :
4336 910 : result.Bind(input);
4337 910 : GotoIf(IsStringInstanceType(input_instance_type), &done);
4338 :
4339 910 : Label not_heap_number(this);
4340 910 : Branch(IsHeapNumberMap(input_map), &is_number, ¬_heap_number);
4341 :
4342 910 : BIND(&is_number);
4343 910 : result.Bind(NumberToString(context, input));
4344 910 : Goto(&done);
4345 :
4346 910 : BIND(¬_heap_number);
4347 : {
4348 : GotoIf(Word32NotEqual(input_instance_type, Int32Constant(ODDBALL_TYPE)),
4349 910 : &runtime);
4350 910 : result.Bind(LoadObjectField(input, Oddball::kToStringOffset));
4351 910 : Goto(&done);
4352 : }
4353 :
4354 910 : BIND(&runtime);
4355 : {
4356 910 : result.Bind(CallRuntime(Runtime::kToString, context, input));
4357 910 : Goto(&done);
4358 : }
4359 :
4360 910 : BIND(&done);
4361 1820 : return result.value();
4362 : }
4363 :
4364 86 : Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
4365 172 : Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
4366 172 : VARIABLE(result, MachineRepresentation::kTagged);
4367 86 : Label done(this, &result);
4368 :
4369 86 : BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
4370 :
4371 86 : BIND(&if_isreceiver);
4372 : {
4373 : // Convert {input} to a primitive first passing Number hint.
4374 86 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
4375 86 : result.Bind(CallStub(callable, context, input));
4376 86 : Goto(&done);
4377 : }
4378 :
4379 86 : BIND(&if_isnotreceiver);
4380 : {
4381 86 : result.Bind(input);
4382 86 : Goto(&done);
4383 : }
4384 :
4385 86 : BIND(&done);
4386 172 : return result.value();
4387 : }
4388 :
4389 473 : Node* CodeStubAssembler::ToSmiIndex(Node* const input, Node* const context,
4390 : Label* range_error) {
4391 473 : VARIABLE(result, MachineRepresentation::kTagged, input);
4392 473 : Label check_undefined(this), return_zero(this), defined(this),
4393 473 : negative_check(this), done(this);
4394 473 : Branch(TaggedIsSmi(result.value()), &negative_check, &check_undefined);
4395 :
4396 473 : BIND(&check_undefined);
4397 473 : Branch(IsUndefined(result.value()), &return_zero, &defined);
4398 :
4399 473 : BIND(&defined);
4400 : result.Bind(ToInteger(context, result.value(),
4401 473 : CodeStubAssembler::kTruncateMinusZero));
4402 473 : GotoIfNot(TaggedIsSmi(result.value()), range_error);
4403 : CSA_ASSERT(this, TaggedIsSmi(result.value()));
4404 473 : Goto(&negative_check);
4405 :
4406 473 : BIND(&negative_check);
4407 473 : Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
4408 :
4409 473 : BIND(&return_zero);
4410 473 : result.Bind(SmiConstant(0));
4411 473 : Goto(&done);
4412 :
4413 473 : BIND(&done);
4414 946 : return result.value();
4415 : }
4416 :
4417 43 : Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context,
4418 : Label* range_error) {
4419 43 : VARIABLE(result, MachineRepresentation::kTagged, input);
4420 43 : Label to_integer(this), negative_check(this), return_zero(this), done(this);
4421 43 : Branch(TaggedIsSmi(result.value()), &negative_check, &to_integer);
4422 :
4423 43 : BIND(&to_integer);
4424 : result.Bind(ToInteger(context, result.value(),
4425 43 : CodeStubAssembler::kTruncateMinusZero));
4426 43 : GotoIfNot(TaggedIsSmi(result.value()), range_error);
4427 : CSA_ASSERT(this, TaggedIsSmi(result.value()));
4428 43 : Goto(&negative_check);
4429 :
4430 43 : BIND(&negative_check);
4431 43 : Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
4432 :
4433 43 : BIND(&return_zero);
4434 43 : result.Bind(SmiConstant(0));
4435 43 : Goto(&done);
4436 :
4437 43 : BIND(&done);
4438 86 : return result.value();
4439 : }
4440 :
4441 1290 : Node* CodeStubAssembler::ToInteger(Node* context, Node* input,
4442 : ToIntegerTruncationMode mode) {
4443 : // We might need to loop once for ToNumber conversion.
4444 1290 : VARIABLE(var_arg, MachineRepresentation::kTagged, input);
4445 1290 : Label loop(this, &var_arg), out(this);
4446 1290 : Goto(&loop);
4447 1290 : BIND(&loop);
4448 : {
4449 : // Shared entry points.
4450 : Label return_zero(this, Label::kDeferred);
4451 :
4452 : // Load the current {arg} value.
4453 1290 : Node* arg = var_arg.value();
4454 :
4455 : // Check if {arg} is a Smi.
4456 1290 : GotoIf(TaggedIsSmi(arg), &out);
4457 :
4458 : // Check if {arg} is a HeapNumber.
4459 1290 : Label if_argisheapnumber(this),
4460 1290 : if_argisnotheapnumber(this, Label::kDeferred);
4461 : Branch(IsHeapNumberMap(LoadMap(arg)), &if_argisheapnumber,
4462 1290 : &if_argisnotheapnumber);
4463 :
4464 1290 : BIND(&if_argisheapnumber);
4465 : {
4466 : // Load the floating-point value of {arg}.
4467 : Node* arg_value = LoadHeapNumberValue(arg);
4468 :
4469 : // Check if {arg} is NaN.
4470 1290 : GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
4471 :
4472 : // Truncate {arg} towards zero.
4473 1290 : Node* value = Float64Trunc(arg_value);
4474 :
4475 1290 : if (mode == kTruncateMinusZero) {
4476 : // Truncate -0.0 to 0.
4477 860 : GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
4478 : }
4479 :
4480 1290 : var_arg.Bind(ChangeFloat64ToTagged(value));
4481 1290 : Goto(&out);
4482 : }
4483 :
4484 1290 : BIND(&if_argisnotheapnumber);
4485 : {
4486 : // Need to convert {arg} to a Number first.
4487 1290 : Callable callable = CodeFactory::NonNumberToNumber(isolate());
4488 1290 : var_arg.Bind(CallStub(callable, context, arg));
4489 1290 : Goto(&loop);
4490 : }
4491 :
4492 1290 : BIND(&return_zero);
4493 1290 : var_arg.Bind(SmiConstant(Smi::kZero));
4494 2580 : Goto(&out);
4495 : }
4496 :
4497 1290 : BIND(&out);
4498 2580 : return var_arg.value();
4499 : }
4500 :
4501 14432 : Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift,
4502 : uint32_t mask) {
4503 : return Word32Shr(Word32And(word32, Int32Constant(mask)),
4504 14432 : static_cast<int>(shift));
4505 : }
4506 :
4507 16282 : Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) {
4508 16282 : return WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift));
4509 : }
4510 :
4511 0 : void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
4512 0 : if (FLAG_native_code_counters && counter->Enabled()) {
4513 0 : Node* counter_address = ExternalConstant(ExternalReference(counter));
4514 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
4515 0 : Int32Constant(value));
4516 : }
4517 0 : }
4518 :
4519 13796 : void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
4520 : DCHECK(delta > 0);
4521 13796 : if (FLAG_native_code_counters && counter->Enabled()) {
4522 0 : Node* counter_address = ExternalConstant(ExternalReference(counter));
4523 0 : Node* value = Load(MachineType::Int32(), counter_address);
4524 0 : value = Int32Add(value, Int32Constant(delta));
4525 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
4526 : }
4527 13796 : }
4528 :
4529 0 : void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
4530 : DCHECK(delta > 0);
4531 0 : if (FLAG_native_code_counters && counter->Enabled()) {
4532 0 : Node* counter_address = ExternalConstant(ExternalReference(counter));
4533 0 : Node* value = Load(MachineType::Int32(), counter_address);
4534 0 : value = Int32Sub(value, Int32Constant(delta));
4535 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
4536 : }
4537 0 : }
4538 :
4539 30186 : void CodeStubAssembler::Increment(Variable& variable, int value,
4540 : ParameterMode mode) {
4541 : DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
4542 : variable.rep() == MachineType::PointerRepresentation());
4543 : DCHECK_IMPLIES(mode == SMI_PARAMETERS,
4544 : variable.rep() == MachineRepresentation::kTagged ||
4545 : variable.rep() == MachineRepresentation::kTaggedSigned);
4546 : variable.Bind(
4547 30186 : IntPtrOrSmiAdd(variable.value(), IntPtrOrSmiConstant(value, mode), mode));
4548 30186 : }
4549 :
4550 43 : void CodeStubAssembler::Use(Label* label) {
4551 43 : GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
4552 43 : }
4553 :
4554 824 : void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
4555 : Variable* var_index, Label* if_keyisunique,
4556 : Variable* var_unique, Label* if_bailout,
4557 : Label* if_notinternalized) {
4558 : DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
4559 : DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
4560 824 : Comment("TryToName");
4561 :
4562 824 : Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this);
4563 : // Handle Smi and HeapNumber keys.
4564 824 : var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
4565 824 : Goto(if_keyisindex);
4566 :
4567 824 : BIND(&if_keyisnotindex);
4568 : Node* key_map = LoadMap(key);
4569 824 : var_unique->Bind(key);
4570 : // Symbols are unique.
4571 824 : GotoIf(IsSymbolMap(key_map), if_keyisunique);
4572 : Node* key_instance_type = LoadMapInstanceType(key_map);
4573 : // Miss if |key| is not a String.
4574 : STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
4575 824 : GotoIfNot(IsStringInstanceType(key_instance_type), if_bailout);
4576 : // |key| is a String. Check if it has a cached array index.
4577 : Node* hash = LoadNameHashField(key);
4578 : Node* contains_index =
4579 824 : Word32And(hash, Int32Constant(Name::kContainsCachedArrayIndexMask));
4580 824 : GotoIf(Word32Equal(contains_index, Int32Constant(0)), &if_hascachedindex);
4581 : // No cached array index. If the string knows that it contains an index,
4582 : // then it must be an uncacheable index. Handle this case in the runtime.
4583 : Node* not_an_index =
4584 824 : Word32And(hash, Int32Constant(Name::kIsNotArrayIndexMask));
4585 824 : GotoIf(Word32Equal(not_an_index, Int32Constant(0)), if_bailout);
4586 : // Check if we have a ThinString.
4587 : GotoIf(Word32Equal(key_instance_type, Int32Constant(THIN_STRING_TYPE)),
4588 824 : &if_thinstring);
4589 : GotoIf(
4590 : Word32Equal(key_instance_type, Int32Constant(THIN_ONE_BYTE_STRING_TYPE)),
4591 824 : &if_thinstring);
4592 : // Finally, check if |key| is internalized.
4593 : STATIC_ASSERT(kNotInternalizedTag != 0);
4594 : Node* not_internalized =
4595 824 : Word32And(key_instance_type, Int32Constant(kIsNotInternalizedMask));
4596 : GotoIf(Word32NotEqual(not_internalized, Int32Constant(0)),
4597 824 : if_notinternalized != nullptr ? if_notinternalized : if_bailout);
4598 824 : Goto(if_keyisunique);
4599 :
4600 824 : BIND(&if_thinstring);
4601 824 : var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
4602 824 : Goto(if_keyisunique);
4603 :
4604 824 : BIND(&if_hascachedindex);
4605 824 : var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
4606 1648 : Goto(if_keyisindex);
4607 824 : }
4608 :
4609 86 : void CodeStubAssembler::TryInternalizeString(
4610 : Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
4611 : Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
4612 : DCHECK(var_index->rep() == MachineType::PointerRepresentation());
4613 : DCHECK(var_internalized->rep() == MachineRepresentation::kTagged);
4614 : Node* function = ExternalConstant(
4615 86 : ExternalReference::try_internalize_string_function(isolate()));
4616 : Node* result = CallCFunction1(MachineType::AnyTagged(),
4617 86 : MachineType::AnyTagged(), function, string);
4618 : Label internalized(this);
4619 86 : GotoIf(TaggedIsNotSmi(result), &internalized);
4620 86 : Node* word_result = SmiUntag(result);
4621 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
4622 86 : if_not_internalized);
4623 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
4624 86 : if_bailout);
4625 86 : var_index->Bind(word_result);
4626 86 : Goto(if_index);
4627 :
4628 86 : BIND(&internalized);
4629 86 : var_internalized->Bind(result);
4630 86 : Goto(if_internalized);
4631 86 : }
4632 :
4633 : template <typename Dictionary>
4634 29105 : Node* CodeStubAssembler::EntryToIndex(Node* entry, int field_index) {
4635 29105 : Node* entry_index = IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
4636 : return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
4637 29105 : field_index));
4638 : }
4639 :
4640 : template Node* CodeStubAssembler::EntryToIndex<NameDictionary>(Node*, int);
4641 : template Node* CodeStubAssembler::EntryToIndex<GlobalDictionary>(Node*, int);
4642 : template Node* CodeStubAssembler::EntryToIndex<SeededNumberDictionary>(Node*,
4643 : int);
4644 :
4645 : // This must be kept in sync with HashTableBase::ComputeCapacity().
4646 351 : Node* CodeStubAssembler::HashTableComputeCapacity(Node* at_least_space_for) {
4647 : Node* capacity = IntPtrRoundUpToPowerOfTwo32(IntPtrAdd(
4648 351 : at_least_space_for, WordShr(at_least_space_for, IntPtrConstant(1))));
4649 351 : return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
4650 : }
4651 :
4652 437 : Node* CodeStubAssembler::IntPtrMax(Node* left, Node* right) {
4653 : return SelectConstant(IntPtrGreaterThanOrEqual(left, right), left, right,
4654 437 : MachineType::PointerRepresentation());
4655 : }
4656 :
4657 172 : Node* CodeStubAssembler::IntPtrMin(Node* left, Node* right) {
4658 : return SelectConstant(IntPtrLessThanOrEqual(left, right), left, right,
4659 172 : MachineType::PointerRepresentation());
4660 : }
4661 :
4662 : template <class Dictionary>
4663 : Node* CodeStubAssembler::GetNextEnumerationIndex(Node* dictionary) {
4664 : return LoadFixedArrayElement(dictionary,
4665 430 : Dictionary::kNextEnumerationIndexIndex);
4666 : }
4667 :
4668 : template <class Dictionary>
4669 : void CodeStubAssembler::SetNextEnumerationIndex(Node* dictionary,
4670 : Node* next_enum_index_smi) {
4671 430 : StoreFixedArrayElement(dictionary, Dictionary::kNextEnumerationIndexIndex,
4672 : next_enum_index_smi, SKIP_WRITE_BARRIER);
4673 : }
4674 :
4675 : template <typename Dictionary>
4676 5933 : void CodeStubAssembler::NameDictionaryLookup(Node* dictionary,
4677 : Node* unique_name, Label* if_found,
4678 : Variable* var_name_index,
4679 : Label* if_not_found,
4680 : int inlined_probes,
4681 : LookupMode mode) {
4682 : CSA_ASSERT(this, IsDictionary(dictionary));
4683 : DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
4684 : DCHECK_IMPLIES(mode == kFindInsertionIndex,
4685 : inlined_probes == 0 && if_found == nullptr);
4686 5933 : Comment("NameDictionaryLookup");
4687 :
4688 5933 : Node* capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
4689 5933 : Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
4690 5933 : Node* hash = ChangeUint32ToWord(LoadNameHash(unique_name));
4691 :
4692 : // See Dictionary::FirstProbe().
4693 5933 : Node* count = IntPtrConstant(0);
4694 5933 : Node* entry = WordAnd(hash, mask);
4695 :
4696 33878 : for (int i = 0; i < inlined_probes; i++) {
4697 : Node* index = EntryToIndex<Dictionary>(entry);
4698 22012 : var_name_index->Bind(index);
4699 :
4700 22012 : Node* current = LoadFixedArrayElement(dictionary, index);
4701 22012 : GotoIf(WordEqual(current, unique_name), if_found);
4702 :
4703 : // See Dictionary::NextProbe().
4704 22012 : count = IntPtrConstant(i + 1);
4705 22012 : entry = WordAnd(IntPtrAdd(entry, count), mask);
4706 : }
4707 5933 : if (mode == kFindInsertionIndex) {
4708 : // Appease the variable merging algorithm for "Goto(&loop)" below.
4709 430 : var_name_index->Bind(IntPtrConstant(0));
4710 : }
4711 :
4712 : Node* undefined = UndefinedConstant();
4713 5933 : Node* the_hole = mode == kFindExisting ? nullptr : TheHoleConstant();
4714 :
4715 5933 : VARIABLE(var_count, MachineType::PointerRepresentation(), count);
4716 11866 : VARIABLE(var_entry, MachineType::PointerRepresentation(), entry);
4717 5933 : Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
4718 11866 : Label loop(this, 3, loop_vars);
4719 5933 : Goto(&loop);
4720 5933 : BIND(&loop);
4721 : {
4722 5933 : Node* entry = var_entry.value();
4723 :
4724 : Node* index = EntryToIndex<Dictionary>(entry);
4725 5933 : var_name_index->Bind(index);
4726 :
4727 5933 : Node* current = LoadFixedArrayElement(dictionary, index);
4728 5933 : GotoIf(WordEqual(current, undefined), if_not_found);
4729 5933 : if (mode == kFindExisting) {
4730 5503 : GotoIf(WordEqual(current, unique_name), if_found);
4731 : } else {
4732 : DCHECK_EQ(kFindInsertionIndex, mode);
4733 430 : GotoIf(WordEqual(current, the_hole), if_not_found);
4734 : }
4735 :
4736 : // See Dictionary::NextProbe().
4737 5933 : Increment(var_count);
4738 5933 : entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
4739 :
4740 5933 : var_entry.Bind(entry);
4741 5933 : Goto(&loop);
4742 5933 : }
4743 5933 : }
4744 :
4745 : // Instantiate template methods to workaround GCC compilation issue.
4746 : template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
4747 : Node*, Node*, Label*, Variable*, Label*, int, LookupMode);
4748 : template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
4749 : Node*, Node*, Label*, Variable*, Label*, int, LookupMode);
4750 :
4751 716 : Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) {
4752 : // See v8::internal::ComputeIntegerHash()
4753 716 : Node* hash = TruncateWordToWord32(key);
4754 716 : hash = Word32Xor(hash, seed);
4755 : hash = Int32Add(Word32Xor(hash, Int32Constant(0xffffffff)),
4756 716 : Word32Shl(hash, Int32Constant(15)));
4757 716 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
4758 716 : hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
4759 716 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
4760 716 : hash = Int32Mul(hash, Int32Constant(2057));
4761 716 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
4762 716 : return Word32And(hash, Int32Constant(0x3fffffff));
4763 : }
4764 :
4765 : template <typename Dictionary>
4766 709 : void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary,
4767 : Node* intptr_index,
4768 : Label* if_found,
4769 : Variable* var_entry,
4770 : Label* if_not_found) {
4771 : CSA_ASSERT(this, IsDictionary(dictionary));
4772 : DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
4773 709 : Comment("NumberDictionaryLookup");
4774 :
4775 709 : Node* capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
4776 709 : Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
4777 :
4778 : Node* int32_seed;
4779 : if (Dictionary::ShapeT::UsesSeed) {
4780 : int32_seed = HashSeed();
4781 : } else {
4782 7 : int32_seed = Int32Constant(kZeroHashSeed);
4783 : }
4784 709 : Node* hash = ChangeUint32ToWord(ComputeIntegerHash(intptr_index, int32_seed));
4785 709 : Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
4786 :
4787 : // See Dictionary::FirstProbe().
4788 709 : Node* count = IntPtrConstant(0);
4789 709 : Node* entry = WordAnd(hash, mask);
4790 :
4791 : Node* undefined = UndefinedConstant();
4792 : Node* the_hole = TheHoleConstant();
4793 :
4794 709 : VARIABLE(var_count, MachineType::PointerRepresentation(), count);
4795 709 : Variable* loop_vars[] = {&var_count, var_entry};
4796 1418 : Label loop(this, 2, loop_vars);
4797 709 : var_entry->Bind(entry);
4798 709 : Goto(&loop);
4799 709 : BIND(&loop);
4800 : {
4801 709 : Node* entry = var_entry->value();
4802 :
4803 : Node* index = EntryToIndex<Dictionary>(entry);
4804 709 : Node* current = LoadFixedArrayElement(dictionary, index);
4805 709 : GotoIf(WordEqual(current, undefined), if_not_found);
4806 : Label next_probe(this);
4807 : {
4808 709 : Label if_currentissmi(this), if_currentisnotsmi(this);
4809 709 : Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
4810 709 : BIND(&if_currentissmi);
4811 : {
4812 709 : Node* current_value = SmiUntag(current);
4813 709 : Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
4814 : }
4815 709 : BIND(&if_currentisnotsmi);
4816 : {
4817 709 : GotoIf(WordEqual(current, the_hole), &next_probe);
4818 : // Current must be the Number.
4819 : Node* current_value = LoadHeapNumberValue(current);
4820 709 : Branch(Float64Equal(current_value, key_as_float64), if_found,
4821 709 : &next_probe);
4822 709 : }
4823 : }
4824 :
4825 709 : BIND(&next_probe);
4826 : // See Dictionary::NextProbe().
4827 709 : Increment(var_count);
4828 709 : entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
4829 :
4830 709 : var_entry->Bind(entry);
4831 709 : Goto(&loop);
4832 709 : }
4833 709 : }
4834 :
4835 : template <class Dictionary>
4836 : void CodeStubAssembler::FindInsertionEntry(Node* dictionary, Node* key,
4837 : Variable* var_key_index) {
4838 : UNREACHABLE();
4839 : }
4840 :
4841 : template <>
4842 430 : void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
4843 : Node* dictionary, Node* key, Variable* var_key_index) {
4844 430 : Label done(this);
4845 : NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
4846 430 : &done, 0, kFindInsertionIndex);
4847 430 : BIND(&done);
4848 430 : }
4849 :
4850 : template <class Dictionary>
4851 : void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value,
4852 : Node* index, Node* enum_index) {
4853 : UNREACHABLE(); // Use specializations instead.
4854 : }
4855 :
4856 : template <>
4857 430 : void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary,
4858 : Node* name, Node* value,
4859 : Node* index,
4860 : Node* enum_index) {
4861 : // Store name and value.
4862 430 : StoreFixedArrayElement(dictionary, index, name);
4863 : StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
4864 :
4865 : // Prepare details of the new property.
4866 : const int kInitialIndex = 0;
4867 : PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell);
4868 : enum_index =
4869 430 : SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
4870 : STATIC_ASSERT(kInitialIndex == 0);
4871 430 : VARIABLE(var_details, MachineRepresentation::kTaggedSigned,
4872 : SmiOr(SmiConstant(d.AsSmi()), enum_index));
4873 :
4874 : // Private names must be marked non-enumerable.
4875 430 : Label not_private(this, &var_details);
4876 430 : GotoIfNot(IsSymbolMap(LoadMap(name)), ¬_private);
4877 430 : Node* flags = SmiToWord32(LoadObjectField(name, Symbol::kFlagsOffset));
4878 : const int kPrivateMask = 1 << Symbol::kPrivateBit;
4879 430 : GotoIfNot(IsSetWord32(flags, kPrivateMask), ¬_private);
4880 : Node* dont_enum =
4881 430 : SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
4882 430 : var_details.Bind(SmiOr(var_details.value(), dont_enum));
4883 430 : Goto(¬_private);
4884 430 : BIND(¬_private);
4885 :
4886 : // Finally, store the details.
4887 : StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
4888 860 : var_details.value());
4889 430 : }
4890 :
4891 : template <>
4892 0 : void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary,
4893 : Node* key, Node* value,
4894 : Node* index,
4895 : Node* enum_index) {
4896 0 : UNIMPLEMENTED();
4897 : }
4898 :
4899 : template <class Dictionary>
4900 430 : void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value,
4901 : Label* bailout) {
4902 : Node* capacity = GetCapacity<Dictionary>(dictionary);
4903 : Node* nof = GetNumberOfElements<Dictionary>(dictionary);
4904 430 : Node* new_nof = SmiAdd(nof, SmiConstant(1));
4905 : // Require 33% to still be free after adding additional_elements.
4906 : // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
4907 : // But that's OK here because it's only used for a comparison.
4908 430 : Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
4909 430 : GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
4910 : // Require rehashing if more than 50% of free elements are deleted elements.
4911 : Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
4912 : CSA_ASSERT(this, SmiAbove(capacity, new_nof));
4913 430 : Node* half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
4914 430 : GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
4915 : Node* enum_index = nullptr;
4916 : if (Dictionary::kIsEnumerable) {
4917 : enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
4918 430 : Node* new_enum_index = SmiAdd(enum_index, SmiConstant(1));
4919 : Node* max_enum_index =
4920 430 : SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
4921 430 : GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
4922 :
4923 : // No more bailouts after this point.
4924 : // Operations from here on can have side effects.
4925 :
4926 : SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
4927 : } else {
4928 : USE(enum_index);
4929 : }
4930 : SetNumberOfElements<Dictionary>(dictionary, new_nof);
4931 :
4932 430 : VARIABLE(var_key_index, MachineType::PointerRepresentation());
4933 430 : FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
4934 430 : InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
4935 860 : enum_index);
4936 430 : }
4937 :
4938 : template void CodeStubAssembler::Add<NameDictionary>(Node*, Node*, Node*,
4939 : Label*);
4940 :
4941 1175 : void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name,
4942 : Node* descriptors, Node* nof,
4943 : Label* if_found,
4944 : Variable* var_name_index,
4945 : Label* if_not_found) {
4946 1175 : Comment("DescriptorLookupLinear");
4947 1175 : Node* first_inclusive = IntPtrConstant(DescriptorArray::ToKeyIndex(0));
4948 1175 : Node* factor = IntPtrConstant(DescriptorArray::kEntrySize);
4949 1175 : Node* last_exclusive = IntPtrAdd(first_inclusive, IntPtrMul(nof, factor));
4950 :
4951 : BuildFastLoop(last_exclusive, first_inclusive,
4952 : [this, descriptors, unique_name, if_found,
4953 1175 : var_name_index](Node* name_index) {
4954 : Node* candidate_name =
4955 1175 : LoadFixedArrayElement(descriptors, name_index);
4956 1175 : var_name_index->Bind(name_index);
4957 1175 : GotoIf(WordEqual(candidate_name, unique_name), if_found);
4958 1175 : },
4959 : -DescriptorArray::kEntrySize, INTPTR_PARAMETERS,
4960 2350 : IndexAdvanceMode::kPre);
4961 1175 : Goto(if_not_found);
4962 1175 : }
4963 :
4964 1175 : Node* CodeStubAssembler::DescriptorArrayNumberOfEntries(Node* descriptors) {
4965 : return LoadAndUntagToWord32FixedArrayElement(
4966 1175 : descriptors, IntPtrConstant(DescriptorArray::kDescriptorLengthIndex));
4967 : }
4968 :
4969 : namespace {
4970 :
4971 5918 : Node* DescriptorNumberToIndex(CodeStubAssembler* a, Node* descriptor_number) {
4972 5918 : Node* descriptor_size = a->Int32Constant(DescriptorArray::kEntrySize);
4973 5918 : Node* index = a->Int32Mul(descriptor_number, descriptor_size);
4974 5918 : return a->ChangeInt32ToIntPtr(index);
4975 : }
4976 :
4977 : } // namespace
4978 :
4979 1218 : Node* CodeStubAssembler::DescriptorArrayToKeyIndex(Node* descriptor_number) {
4980 : return IntPtrAdd(IntPtrConstant(DescriptorArray::ToKeyIndex(0)),
4981 1218 : DescriptorNumberToIndex(this, descriptor_number));
4982 : }
4983 :
4984 2350 : Node* CodeStubAssembler::DescriptorArrayGetSortedKeyIndex(
4985 : Node* descriptors, Node* descriptor_number) {
4986 : const int details_offset = DescriptorArray::ToDetailsIndex(0) * kPointerSize;
4987 : Node* details = LoadAndUntagToWord32FixedArrayElement(
4988 : descriptors, DescriptorNumberToIndex(this, descriptor_number),
4989 2350 : details_offset);
4990 2350 : return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
4991 : }
4992 :
4993 2350 : Node* CodeStubAssembler::DescriptorArrayGetKey(Node* descriptors,
4994 : Node* descriptor_number) {
4995 : const int key_offset = DescriptorArray::ToKeyIndex(0) * kPointerSize;
4996 : return LoadFixedArrayElement(descriptors,
4997 : DescriptorNumberToIndex(this, descriptor_number),
4998 2350 : key_offset);
4999 : }
5000 :
5001 1175 : void CodeStubAssembler::DescriptorLookupBinary(Node* unique_name,
5002 : Node* descriptors, Node* nof,
5003 : Label* if_found,
5004 : Variable* var_name_index,
5005 : Label* if_not_found) {
5006 1175 : Comment("DescriptorLookupBinary");
5007 1175 : VARIABLE(var_low, MachineRepresentation::kWord32, Int32Constant(0));
5008 : Node* limit =
5009 1175 : Int32Sub(DescriptorArrayNumberOfEntries(descriptors), Int32Constant(1));
5010 2350 : VARIABLE(var_high, MachineRepresentation::kWord32, limit);
5011 : Node* hash = LoadNameHashField(unique_name);
5012 : CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
5013 :
5014 : // Assume non-empty array.
5015 : CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
5016 :
5017 1175 : Variable* loop_vars[] = {&var_high, &var_low};
5018 2350 : Label binary_loop(this, 2, loop_vars);
5019 1175 : Goto(&binary_loop);
5020 1175 : BIND(&binary_loop);
5021 : {
5022 : // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
5023 : Node* mid =
5024 : Int32Add(var_low.value(),
5025 1175 : Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1));
5026 : // mid_name = descriptors->GetSortedKey(mid).
5027 1175 : Node* sorted_key_index = DescriptorArrayGetSortedKeyIndex(descriptors, mid);
5028 1175 : Node* mid_name = DescriptorArrayGetKey(descriptors, sorted_key_index);
5029 :
5030 : Node* mid_hash = LoadNameHashField(mid_name);
5031 :
5032 1175 : Label mid_greater(this), mid_less(this), merge(this);
5033 1175 : Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
5034 1175 : BIND(&mid_greater);
5035 : {
5036 1175 : var_high.Bind(mid);
5037 1175 : Goto(&merge);
5038 : }
5039 1175 : BIND(&mid_less);
5040 : {
5041 1175 : var_low.Bind(Int32Add(mid, Int32Constant(1)));
5042 1175 : Goto(&merge);
5043 : }
5044 1175 : BIND(&merge);
5045 2350 : GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
5046 : }
5047 :
5048 1175 : Label scan_loop(this, &var_low);
5049 1175 : Goto(&scan_loop);
5050 1175 : BIND(&scan_loop);
5051 : {
5052 1175 : GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
5053 :
5054 : Node* sort_index =
5055 1175 : DescriptorArrayGetSortedKeyIndex(descriptors, var_low.value());
5056 1175 : Node* current_name = DescriptorArrayGetKey(descriptors, sort_index);
5057 : Node* current_hash = LoadNameHashField(current_name);
5058 1175 : GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
5059 : Label next(this);
5060 1175 : GotoIf(WordNotEqual(current_name, unique_name), &next);
5061 1175 : GotoIf(Int32GreaterThanOrEqual(sort_index, nof), if_not_found);
5062 1175 : var_name_index->Bind(DescriptorArrayToKeyIndex(sort_index));
5063 1175 : Goto(if_found);
5064 :
5065 1175 : BIND(&next);
5066 1175 : var_low.Bind(Int32Add(var_low.value(), Int32Constant(1)));
5067 1175 : Goto(&scan_loop);
5068 1175 : }
5069 1175 : }
5070 :
5071 1175 : void CodeStubAssembler::DescriptorLookup(Node* unique_name, Node* descriptors,
5072 : Node* bitfield3, Label* if_found,
5073 : Variable* var_name_index,
5074 : Label* if_not_found) {
5075 1175 : Comment("DescriptorArrayLookup");
5076 : Node* nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
5077 1175 : GotoIf(Word32Equal(nof, Int32Constant(0)), if_not_found);
5078 1175 : Label linear_search(this), binary_search(this);
5079 : const int kMaxElementsForLinearSearch = 32;
5080 : Branch(Int32LessThanOrEqual(nof, Int32Constant(kMaxElementsForLinearSearch)),
5081 1175 : &linear_search, &binary_search);
5082 1175 : BIND(&linear_search);
5083 : {
5084 : DescriptorLookupLinear(unique_name, descriptors, ChangeInt32ToIntPtr(nof),
5085 1175 : if_found, var_name_index, if_not_found);
5086 : }
5087 1175 : BIND(&binary_search);
5088 : {
5089 : DescriptorLookupBinary(unique_name, descriptors, nof, if_found,
5090 1175 : var_name_index, if_not_found);
5091 1175 : }
5092 1175 : }
5093 :
5094 917 : void CodeStubAssembler::TryLookupProperty(
5095 : Node* object, Node* map, Node* instance_type, Node* unique_name,
5096 : Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
5097 : Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found,
5098 : Label* if_bailout) {
5099 : DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep());
5100 : DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
5101 :
5102 917 : Label if_objectisspecial(this);
5103 : STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
5104 : GotoIf(Int32LessThanOrEqual(instance_type,
5105 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
5106 917 : &if_objectisspecial);
5107 :
5108 : uint32_t mask =
5109 : 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
5110 : CSA_ASSERT(this, Word32BinaryNot(IsSetWord32(LoadMapBitField(map), mask)));
5111 : USE(mask);
5112 :
5113 : Node* bit_field3 = LoadMapBitField3(map);
5114 917 : Label if_isfastmap(this), if_isslowmap(this);
5115 : Branch(IsSetWord32<Map::DictionaryMap>(bit_field3), &if_isslowmap,
5116 917 : &if_isfastmap);
5117 917 : BIND(&if_isfastmap);
5118 : {
5119 : Node* descriptors = LoadMapDescriptors(map);
5120 917 : var_meta_storage->Bind(descriptors);
5121 :
5122 : DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
5123 917 : var_name_index, if_not_found);
5124 : }
5125 917 : BIND(&if_isslowmap);
5126 : {
5127 : Node* dictionary = LoadProperties(object);
5128 917 : var_meta_storage->Bind(dictionary);
5129 :
5130 : NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
5131 917 : var_name_index, if_not_found);
5132 : }
5133 917 : BIND(&if_objectisspecial);
5134 : {
5135 : // Handle global object here and other special objects in runtime.
5136 : GotoIfNot(Word32Equal(instance_type, Int32Constant(JS_GLOBAL_OBJECT_TYPE)),
5137 917 : if_bailout);
5138 :
5139 : // Handle interceptors and access checks in runtime.
5140 : Node* bit_field = LoadMapBitField(map);
5141 : Node* mask = Int32Constant(1 << Map::kHasNamedInterceptor |
5142 917 : 1 << Map::kIsAccessCheckNeeded);
5143 : GotoIf(Word32NotEqual(Word32And(bit_field, mask), Int32Constant(0)),
5144 917 : if_bailout);
5145 :
5146 : Node* dictionary = LoadProperties(object);
5147 917 : var_meta_storage->Bind(dictionary);
5148 :
5149 : NameDictionaryLookup<GlobalDictionary>(
5150 917 : dictionary, unique_name, if_found_global, var_name_index, if_not_found);
5151 917 : }
5152 917 : }
5153 :
5154 609 : void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
5155 : Node* instance_type,
5156 : Node* unique_name, Label* if_found,
5157 : Label* if_not_found,
5158 : Label* if_bailout) {
5159 609 : Comment("TryHasOwnProperty");
5160 609 : VARIABLE(var_meta_storage, MachineRepresentation::kTagged);
5161 1218 : VARIABLE(var_name_index, MachineType::PointerRepresentation());
5162 :
5163 609 : Label if_found_global(this);
5164 : TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
5165 : &if_found_global, &var_meta_storage, &var_name_index,
5166 609 : if_not_found, if_bailout);
5167 609 : BIND(&if_found_global);
5168 : {
5169 609 : VARIABLE(var_value, MachineRepresentation::kTagged);
5170 1218 : VARIABLE(var_details, MachineRepresentation::kWord32);
5171 : // Check if the property cell is not deleted.
5172 : LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
5173 : var_name_index.value(), &var_value,
5174 609 : &var_details, if_not_found);
5175 1218 : Goto(if_found);
5176 609 : }
5177 609 : }
5178 :
5179 566 : void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
5180 : Node* descriptors,
5181 : Node* name_index,
5182 : Variable* var_details,
5183 : Variable* var_value) {
5184 : DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
5185 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
5186 566 : Comment("[ LoadPropertyFromFastObject");
5187 :
5188 : Node* details =
5189 : LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
5190 566 : var_details->Bind(details);
5191 :
5192 : Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
5193 :
5194 566 : Label if_in_field(this), if_in_descriptor(this), done(this);
5195 : Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
5196 566 : &if_in_descriptor);
5197 566 : BIND(&if_in_field);
5198 : {
5199 : Node* field_index =
5200 566 : DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
5201 : Node* representation =
5202 : DecodeWord32<PropertyDetails::RepresentationField>(details);
5203 :
5204 566 : Node* inobject_properties = LoadMapInobjectProperties(map);
5205 :
5206 566 : Label if_inobject(this), if_backing_store(this);
5207 1132 : VARIABLE(var_double_value, MachineRepresentation::kFloat64);
5208 566 : Label rebox_double(this, &var_double_value);
5209 : Branch(UintPtrLessThan(field_index, inobject_properties), &if_inobject,
5210 566 : &if_backing_store);
5211 566 : BIND(&if_inobject);
5212 : {
5213 566 : Comment("if_inobject");
5214 : Node* field_offset =
5215 : IntPtrMul(IntPtrSub(LoadMapInstanceSize(map),
5216 : IntPtrSub(inobject_properties, field_index)),
5217 566 : IntPtrConstant(kPointerSize));
5218 :
5219 566 : Label if_double(this), if_tagged(this);
5220 : Branch(Word32NotEqual(representation,
5221 : Int32Constant(Representation::kDouble)),
5222 566 : &if_tagged, &if_double);
5223 566 : BIND(&if_tagged);
5224 : {
5225 566 : var_value->Bind(LoadObjectField(object, field_offset));
5226 566 : Goto(&done);
5227 : }
5228 566 : BIND(&if_double);
5229 : {
5230 : if (FLAG_unbox_double_fields) {
5231 : var_double_value.Bind(
5232 566 : LoadObjectField(object, field_offset, MachineType::Float64()));
5233 : } else {
5234 : Node* mutable_heap_number = LoadObjectField(object, field_offset);
5235 : var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
5236 : }
5237 566 : Goto(&rebox_double);
5238 566 : }
5239 : }
5240 566 : BIND(&if_backing_store);
5241 : {
5242 566 : Comment("if_backing_store");
5243 : Node* properties = LoadProperties(object);
5244 566 : field_index = IntPtrSub(field_index, inobject_properties);
5245 566 : Node* value = LoadFixedArrayElement(properties, field_index);
5246 :
5247 566 : Label if_double(this), if_tagged(this);
5248 : Branch(Word32NotEqual(representation,
5249 : Int32Constant(Representation::kDouble)),
5250 566 : &if_tagged, &if_double);
5251 566 : BIND(&if_tagged);
5252 : {
5253 566 : var_value->Bind(value);
5254 566 : Goto(&done);
5255 : }
5256 566 : BIND(&if_double);
5257 : {
5258 566 : var_double_value.Bind(LoadHeapNumberValue(value));
5259 566 : Goto(&rebox_double);
5260 566 : }
5261 : }
5262 566 : BIND(&rebox_double);
5263 : {
5264 566 : Comment("rebox_double");
5265 566 : Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
5266 566 : var_value->Bind(heap_number);
5267 566 : Goto(&done);
5268 566 : }
5269 : }
5270 566 : BIND(&if_in_descriptor);
5271 : {
5272 : var_value->Bind(
5273 566 : LoadValueByKeyIndex<DescriptorArray>(descriptors, name_index));
5274 566 : Goto(&done);
5275 : }
5276 566 : BIND(&done);
5277 :
5278 1132 : Comment("] LoadPropertyFromFastObject");
5279 566 : }
5280 :
5281 2716 : void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
5282 : Node* name_index,
5283 : Variable* var_details,
5284 : Variable* var_value) {
5285 2716 : Comment("LoadPropertyFromNameDictionary");
5286 : CSA_ASSERT(this, IsDictionary(dictionary));
5287 :
5288 : var_details->Bind(
5289 2716 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
5290 2716 : var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
5291 :
5292 2716 : Comment("] LoadPropertyFromNameDictionary");
5293 2716 : }
5294 :
5295 745 : void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
5296 : Node* name_index,
5297 : Variable* var_details,
5298 : Variable* var_value,
5299 : Label* if_deleted) {
5300 745 : Comment("[ LoadPropertyFromGlobalDictionary");
5301 : CSA_ASSERT(this, IsDictionary(dictionary));
5302 :
5303 : Node* property_cell =
5304 : LoadValueByKeyIndex<GlobalDictionary>(dictionary, name_index);
5305 :
5306 745 : Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
5307 745 : GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
5308 :
5309 745 : var_value->Bind(value);
5310 :
5311 : Node* details = LoadAndUntagToWord32ObjectField(property_cell,
5312 745 : PropertyCell::kDetailsOffset);
5313 745 : var_details->Bind(details);
5314 :
5315 745 : Comment("] LoadPropertyFromGlobalDictionary");
5316 745 : }
5317 :
5318 : // |value| is the property backing store's contents, which is either a value
5319 : // or an accessor pair, as specified by |details|.
5320 : // Returns either the original value, or the result of the getter call.
5321 4307 : Node* CodeStubAssembler::CallGetterIfAccessor(Node* value, Node* details,
5322 : Node* context, Node* receiver,
5323 : Label* if_bailout) {
5324 4307 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5325 4307 : Label done(this);
5326 :
5327 : Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
5328 4307 : GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
5329 :
5330 : // Accessor case.
5331 : {
5332 : Node* accessor_pair = value;
5333 : GotoIf(Word32Equal(LoadInstanceType(accessor_pair),
5334 : Int32Constant(ACCESSOR_INFO_TYPE)),
5335 4307 : if_bailout);
5336 : CSA_ASSERT(this, IsAccessorPair(accessor_pair));
5337 4307 : Node* getter = LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
5338 : Node* getter_map = LoadMap(getter);
5339 : Node* instance_type = LoadMapInstanceType(getter_map);
5340 : // FunctionTemplateInfo getters are not supported yet.
5341 : GotoIf(
5342 : Word32Equal(instance_type, Int32Constant(FUNCTION_TEMPLATE_INFO_TYPE)),
5343 4307 : if_bailout);
5344 :
5345 : // Return undefined if the {getter} is not callable.
5346 4307 : var_value.Bind(UndefinedConstant());
5347 4307 : GotoIfNot(IsCallableMap(getter_map), &done);
5348 :
5349 : // Call the accessor.
5350 4307 : Callable callable = CodeFactory::Call(isolate());
5351 4307 : Node* result = CallJS(callable, context, getter, receiver);
5352 4307 : var_value.Bind(result);
5353 4307 : Goto(&done);
5354 : }
5355 :
5356 4307 : BIND(&done);
5357 8614 : return var_value.value();
5358 : }
5359 :
5360 136 : void CodeStubAssembler::TryGetOwnProperty(
5361 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
5362 : Node* unique_name, Label* if_found_value, Variable* var_value,
5363 : Label* if_not_found, Label* if_bailout) {
5364 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
5365 136 : Comment("TryGetOwnProperty");
5366 :
5367 136 : VARIABLE(var_meta_storage, MachineRepresentation::kTagged);
5368 272 : VARIABLE(var_entry, MachineType::PointerRepresentation());
5369 :
5370 136 : Label if_found_fast(this), if_found_dict(this), if_found_global(this);
5371 :
5372 272 : VARIABLE(var_details, MachineRepresentation::kWord32);
5373 136 : Variable* vars[] = {var_value, &var_details};
5374 272 : Label if_found(this, 2, vars);
5375 :
5376 : TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
5377 : &if_found_dict, &if_found_global, &var_meta_storage,
5378 136 : &var_entry, if_not_found, if_bailout);
5379 136 : BIND(&if_found_fast);
5380 : {
5381 136 : Node* descriptors = var_meta_storage.value();
5382 136 : Node* name_index = var_entry.value();
5383 :
5384 : LoadPropertyFromFastObject(object, map, descriptors, name_index,
5385 136 : &var_details, var_value);
5386 136 : Goto(&if_found);
5387 : }
5388 136 : BIND(&if_found_dict);
5389 : {
5390 136 : Node* dictionary = var_meta_storage.value();
5391 136 : Node* entry = var_entry.value();
5392 136 : LoadPropertyFromNameDictionary(dictionary, entry, &var_details, var_value);
5393 136 : Goto(&if_found);
5394 : }
5395 136 : BIND(&if_found_global);
5396 : {
5397 136 : Node* dictionary = var_meta_storage.value();
5398 136 : Node* entry = var_entry.value();
5399 :
5400 : LoadPropertyFromGlobalDictionary(dictionary, entry, &var_details, var_value,
5401 136 : if_not_found);
5402 136 : Goto(&if_found);
5403 : }
5404 : // Here we have details and value which could be an accessor.
5405 136 : BIND(&if_found);
5406 : {
5407 : Node* value = CallGetterIfAccessor(var_value->value(), var_details.value(),
5408 136 : context, receiver, if_bailout);
5409 136 : var_value->Bind(value);
5410 136 : Goto(if_found_value);
5411 136 : }
5412 136 : }
5413 :
5414 609 : void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
5415 : Node* instance_type,
5416 : Node* intptr_index, Label* if_found,
5417 : Label* if_absent, Label* if_not_found,
5418 : Label* if_bailout) {
5419 : // Handle special objects in runtime.
5420 : GotoIf(Int32LessThanOrEqual(instance_type,
5421 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
5422 609 : if_bailout);
5423 :
5424 609 : Node* elements_kind = LoadMapElementsKind(map);
5425 :
5426 : // TODO(verwaest): Support other elements kinds as well.
5427 609 : Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
5428 609 : if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
5429 609 : if_typedarray(this);
5430 : // clang-format off
5431 : int32_t values[] = {
5432 : // Handled by {if_isobjectorsmi}.
5433 : FAST_SMI_ELEMENTS, FAST_HOLEY_SMI_ELEMENTS, FAST_ELEMENTS,
5434 : FAST_HOLEY_ELEMENTS,
5435 : // Handled by {if_isdouble}.
5436 : FAST_DOUBLE_ELEMENTS, FAST_HOLEY_DOUBLE_ELEMENTS,
5437 : // Handled by {if_isdictionary}.
5438 : DICTIONARY_ELEMENTS,
5439 : // Handled by {if_isfaststringwrapper}.
5440 : FAST_STRING_WRAPPER_ELEMENTS,
5441 : // Handled by {if_isslowstringwrapper}.
5442 : SLOW_STRING_WRAPPER_ELEMENTS,
5443 : // Handled by {if_not_found}.
5444 : NO_ELEMENTS,
5445 : // Handled by {if_typed_array}.
5446 : UINT8_ELEMENTS,
5447 : INT8_ELEMENTS,
5448 : UINT16_ELEMENTS,
5449 : INT16_ELEMENTS,
5450 : UINT32_ELEMENTS,
5451 : INT32_ELEMENTS,
5452 : FLOAT32_ELEMENTS,
5453 : FLOAT64_ELEMENTS,
5454 : UINT8_CLAMPED_ELEMENTS,
5455 609 : };
5456 : Label* labels[] = {
5457 : &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
5458 : &if_isobjectorsmi,
5459 : &if_isdouble, &if_isdouble,
5460 : &if_isdictionary,
5461 : &if_isfaststringwrapper,
5462 : &if_isslowstringwrapper,
5463 : if_not_found,
5464 : &if_typedarray,
5465 : &if_typedarray,
5466 : &if_typedarray,
5467 : &if_typedarray,
5468 : &if_typedarray,
5469 : &if_typedarray,
5470 : &if_typedarray,
5471 : &if_typedarray,
5472 : &if_typedarray,
5473 609 : };
5474 : // clang-format on
5475 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
5476 609 : Switch(elements_kind, if_bailout, values, labels, arraysize(values));
5477 :
5478 609 : BIND(&if_isobjectorsmi);
5479 : {
5480 : Node* elements = LoadElements(object);
5481 : Node* length = LoadAndUntagFixedArrayBaseLength(elements);
5482 :
5483 609 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
5484 :
5485 609 : Node* element = LoadFixedArrayElement(elements, intptr_index);
5486 : Node* the_hole = TheHoleConstant();
5487 609 : Branch(WordEqual(element, the_hole), if_not_found, if_found);
5488 : }
5489 609 : BIND(&if_isdouble);
5490 : {
5491 : Node* elements = LoadElements(object);
5492 : Node* length = LoadAndUntagFixedArrayBaseLength(elements);
5493 :
5494 609 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
5495 :
5496 : // Check if the element is a double hole, but don't load it.
5497 : LoadFixedDoubleArrayElement(elements, intptr_index, MachineType::None(), 0,
5498 609 : INTPTR_PARAMETERS, if_not_found);
5499 609 : Goto(if_found);
5500 : }
5501 609 : BIND(&if_isdictionary);
5502 : {
5503 : // Negative keys must be converted to property names.
5504 609 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
5505 :
5506 609 : VARIABLE(var_entry, MachineType::PointerRepresentation());
5507 : Node* elements = LoadElements(object);
5508 : NumberDictionaryLookup<SeededNumberDictionary>(
5509 609 : elements, intptr_index, if_found, &var_entry, if_not_found);
5510 : }
5511 609 : BIND(&if_isfaststringwrapper);
5512 : {
5513 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
5514 : Node* string = LoadJSValueValue(object);
5515 : CSA_ASSERT(this, IsStringInstanceType(LoadInstanceType(string)));
5516 : Node* length = LoadStringLength(string);
5517 609 : GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
5518 609 : Goto(&if_isobjectorsmi);
5519 : }
5520 609 : BIND(&if_isslowstringwrapper);
5521 : {
5522 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
5523 : Node* string = LoadJSValueValue(object);
5524 : CSA_ASSERT(this, IsStringInstanceType(LoadInstanceType(string)));
5525 : Node* length = LoadStringLength(string);
5526 609 : GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
5527 609 : Goto(&if_isdictionary);
5528 : }
5529 609 : BIND(&if_typedarray);
5530 : {
5531 609 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
5532 609 : GotoIf(IsDetachedBuffer(buffer), if_absent);
5533 :
5534 : Node* length = TryToIntptr(
5535 609 : LoadObjectField(object, JSTypedArray::kLengthOffset), if_bailout);
5536 609 : Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
5537 : }
5538 609 : BIND(&if_oob);
5539 : {
5540 : // Positive OOB indices mean "not found", negative indices must be
5541 : // converted to property names.
5542 609 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
5543 609 : Goto(if_not_found);
5544 609 : }
5545 609 : }
5546 :
5547 : // Instantiate template methods to workaround GCC compilation issue.
5548 : template void CodeStubAssembler::NumberDictionaryLookup<SeededNumberDictionary>(
5549 : Node*, Node*, Label*, Variable*, Label*);
5550 : template void CodeStubAssembler::NumberDictionaryLookup<
5551 : UnseededNumberDictionary>(Node*, Node*, Label*, Variable*, Label*);
5552 :
5553 602 : void CodeStubAssembler::TryPrototypeChainLookup(
5554 : Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
5555 : const LookupInHolder& lookup_element_in_holder, Label* if_end,
5556 : Label* if_bailout) {
5557 : // Ensure receiver is JSReceiver, otherwise bailout.
5558 602 : Label if_objectisnotsmi(this);
5559 602 : Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
5560 602 : BIND(&if_objectisnotsmi);
5561 :
5562 : Node* map = LoadMap(receiver);
5563 : Node* instance_type = LoadMapInstanceType(map);
5564 : {
5565 : Label if_objectisreceiver(this);
5566 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
5567 : STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
5568 : Branch(
5569 : Int32GreaterThan(instance_type, Int32Constant(FIRST_JS_RECEIVER_TYPE)),
5570 602 : &if_objectisreceiver, if_bailout);
5571 602 : BIND(&if_objectisreceiver);
5572 : }
5573 :
5574 1204 : VARIABLE(var_index, MachineType::PointerRepresentation());
5575 1204 : VARIABLE(var_unique, MachineRepresentation::kTagged);
5576 :
5577 602 : Label if_keyisindex(this), if_iskeyunique(this);
5578 : TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
5579 602 : if_bailout);
5580 :
5581 602 : BIND(&if_iskeyunique);
5582 : {
5583 602 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
5584 1204 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
5585 1204 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
5586 : instance_type);
5587 :
5588 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
5589 602 : &var_holder_instance_type};
5590 1204 : Label loop(this, arraysize(merged_variables), merged_variables);
5591 602 : Goto(&loop);
5592 602 : BIND(&loop);
5593 : {
5594 602 : Node* holder_map = var_holder_map.value();
5595 602 : Node* holder_instance_type = var_holder_instance_type.value();
5596 :
5597 : Label next_proto(this);
5598 : lookup_property_in_holder(receiver, var_holder.value(), holder_map,
5599 : holder_instance_type, var_unique.value(),
5600 602 : &next_proto, if_bailout);
5601 602 : BIND(&next_proto);
5602 :
5603 : // Bailout if it can be an integer indexed exotic case.
5604 : GotoIf(
5605 : Word32Equal(holder_instance_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
5606 602 : if_bailout);
5607 :
5608 : Node* proto = LoadMapPrototype(holder_map);
5609 :
5610 602 : Label if_not_null(this);
5611 602 : Branch(WordEqual(proto, NullConstant()), if_end, &if_not_null);
5612 602 : BIND(&if_not_null);
5613 :
5614 : Node* map = LoadMap(proto);
5615 : Node* instance_type = LoadMapInstanceType(map);
5616 :
5617 602 : var_holder.Bind(proto);
5618 602 : var_holder_map.Bind(map);
5619 602 : var_holder_instance_type.Bind(instance_type);
5620 1204 : Goto(&loop);
5621 602 : }
5622 : }
5623 602 : BIND(&if_keyisindex);
5624 : {
5625 602 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
5626 1204 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
5627 1204 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
5628 : instance_type);
5629 :
5630 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
5631 602 : &var_holder_instance_type};
5632 1204 : Label loop(this, arraysize(merged_variables), merged_variables);
5633 602 : Goto(&loop);
5634 602 : BIND(&loop);
5635 : {
5636 : Label next_proto(this);
5637 : lookup_element_in_holder(receiver, var_holder.value(),
5638 : var_holder_map.value(),
5639 : var_holder_instance_type.value(),
5640 602 : var_index.value(), &next_proto, if_bailout);
5641 602 : BIND(&next_proto);
5642 :
5643 602 : Node* proto = LoadMapPrototype(var_holder_map.value());
5644 :
5645 602 : Label if_not_null(this);
5646 602 : Branch(WordEqual(proto, NullConstant()), if_end, &if_not_null);
5647 602 : BIND(&if_not_null);
5648 :
5649 : Node* map = LoadMap(proto);
5650 : Node* instance_type = LoadMapInstanceType(map);
5651 :
5652 602 : var_holder.Bind(proto);
5653 602 : var_holder_map.Bind(map);
5654 602 : var_holder_instance_type.Bind(instance_type);
5655 1204 : Goto(&loop);
5656 602 : }
5657 602 : }
5658 602 : }
5659 :
5660 86 : Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
5661 : Node* object) {
5662 86 : VARIABLE(var_result, MachineRepresentation::kTagged);
5663 86 : Label return_false(this), return_true(this),
5664 86 : return_runtime(this, Label::kDeferred), return_result(this);
5665 :
5666 : // Goto runtime if {object} is a Smi.
5667 86 : GotoIf(TaggedIsSmi(object), &return_runtime);
5668 :
5669 : // Load map of {object}.
5670 : Node* object_map = LoadMap(object);
5671 :
5672 : // Lookup the {callable} and {object} map in the global instanceof cache.
5673 : // Note: This is safe because we clear the global instanceof cache whenever
5674 : // we change the prototype of any object.
5675 : Node* instanceof_cache_function =
5676 86 : LoadRoot(Heap::kInstanceofCacheFunctionRootIndex);
5677 86 : Node* instanceof_cache_map = LoadRoot(Heap::kInstanceofCacheMapRootIndex);
5678 : {
5679 : Label instanceof_cache_miss(this);
5680 : GotoIfNot(WordEqual(instanceof_cache_function, callable),
5681 86 : &instanceof_cache_miss);
5682 : GotoIfNot(WordEqual(instanceof_cache_map, object_map),
5683 86 : &instanceof_cache_miss);
5684 86 : var_result.Bind(LoadRoot(Heap::kInstanceofCacheAnswerRootIndex));
5685 86 : Goto(&return_result);
5686 86 : BIND(&instanceof_cache_miss);
5687 : }
5688 :
5689 : // Goto runtime if {callable} is a Smi.
5690 86 : GotoIf(TaggedIsSmi(callable), &return_runtime);
5691 :
5692 : // Load map of {callable}.
5693 : Node* callable_map = LoadMap(callable);
5694 :
5695 : // Goto runtime if {callable} is not a JSFunction.
5696 : Node* callable_instance_type = LoadMapInstanceType(callable_map);
5697 : GotoIfNot(
5698 : Word32Equal(callable_instance_type, Int32Constant(JS_FUNCTION_TYPE)),
5699 86 : &return_runtime);
5700 :
5701 : // Goto runtime if {callable} is not a constructor or has
5702 : // a non-instance "prototype".
5703 : Node* callable_bitfield = LoadMapBitField(callable_map);
5704 : GotoIfNot(
5705 : Word32Equal(Word32And(callable_bitfield,
5706 : Int32Constant((1 << Map::kHasNonInstancePrototype) |
5707 : (1 << Map::kIsConstructor))),
5708 : Int32Constant(1 << Map::kIsConstructor)),
5709 86 : &return_runtime);
5710 :
5711 : // Get the "prototype" (or initial map) of the {callable}.
5712 : Node* callable_prototype =
5713 86 : LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
5714 : {
5715 : Label callable_prototype_valid(this);
5716 172 : VARIABLE(var_callable_prototype, MachineRepresentation::kTagged,
5717 : callable_prototype);
5718 :
5719 : // Resolve the "prototype" if the {callable} has an initial map. Afterwards
5720 : // the {callable_prototype} will be either the JSReceiver prototype object
5721 : // or the hole value, which means that no instances of the {callable} were
5722 : // created so far and hence we should return false.
5723 : Node* callable_prototype_instance_type =
5724 86 : LoadInstanceType(callable_prototype);
5725 : GotoIfNot(
5726 : Word32Equal(callable_prototype_instance_type, Int32Constant(MAP_TYPE)),
5727 86 : &callable_prototype_valid);
5728 : var_callable_prototype.Bind(
5729 86 : LoadObjectField(callable_prototype, Map::kPrototypeOffset));
5730 86 : Goto(&callable_prototype_valid);
5731 86 : BIND(&callable_prototype_valid);
5732 172 : callable_prototype = var_callable_prototype.value();
5733 : }
5734 :
5735 : // Update the global instanceof cache with the current {object} map and
5736 : // {callable}. The cached answer will be set when it is known below.
5737 86 : StoreRoot(Heap::kInstanceofCacheFunctionRootIndex, callable);
5738 86 : StoreRoot(Heap::kInstanceofCacheMapRootIndex, object_map);
5739 :
5740 : // Loop through the prototype chain looking for the {callable} prototype.
5741 172 : VARIABLE(var_object_map, MachineRepresentation::kTagged, object_map);
5742 86 : Label loop(this, &var_object_map);
5743 86 : Goto(&loop);
5744 86 : BIND(&loop);
5745 : {
5746 86 : Node* object_map = var_object_map.value();
5747 :
5748 : // Check if the current {object} needs to be access checked.
5749 : Node* object_bitfield = LoadMapBitField(object_map);
5750 : GotoIfNot(
5751 : Word32Equal(Word32And(object_bitfield,
5752 : Int32Constant(1 << Map::kIsAccessCheckNeeded)),
5753 : Int32Constant(0)),
5754 86 : &return_runtime);
5755 :
5756 : // Check if the current {object} is a proxy.
5757 : Node* object_instance_type = LoadMapInstanceType(object_map);
5758 : GotoIf(Word32Equal(object_instance_type, Int32Constant(JS_PROXY_TYPE)),
5759 86 : &return_runtime);
5760 :
5761 : // Check the current {object} prototype.
5762 : Node* object_prototype = LoadMapPrototype(object_map);
5763 86 : GotoIf(WordEqual(object_prototype, NullConstant()), &return_false);
5764 86 : GotoIf(WordEqual(object_prototype, callable_prototype), &return_true);
5765 :
5766 : // Continue with the prototype.
5767 86 : var_object_map.Bind(LoadMap(object_prototype));
5768 86 : Goto(&loop);
5769 : }
5770 :
5771 86 : BIND(&return_true);
5772 86 : StoreRoot(Heap::kInstanceofCacheAnswerRootIndex, BooleanConstant(true));
5773 86 : var_result.Bind(BooleanConstant(true));
5774 86 : Goto(&return_result);
5775 :
5776 86 : BIND(&return_false);
5777 86 : StoreRoot(Heap::kInstanceofCacheAnswerRootIndex, BooleanConstant(false));
5778 86 : var_result.Bind(BooleanConstant(false));
5779 86 : Goto(&return_result);
5780 :
5781 86 : BIND(&return_runtime);
5782 : {
5783 : // Invalidate the global instanceof cache.
5784 86 : StoreRoot(Heap::kInstanceofCacheFunctionRootIndex, SmiConstant(0));
5785 : // Fallback to the runtime implementation.
5786 : var_result.Bind(
5787 86 : CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
5788 : }
5789 86 : Goto(&return_result);
5790 :
5791 86 : BIND(&return_result);
5792 172 : return var_result.value();
5793 : }
5794 :
5795 374209 : Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
5796 : ElementsKind kind,
5797 : ParameterMode mode,
5798 : int base_size) {
5799 374209 : int element_size_shift = ElementsKindToShiftSize(kind);
5800 374209 : int element_size = 1 << element_size_shift;
5801 : int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
5802 374209 : intptr_t index = 0;
5803 : bool constant_index = false;
5804 374209 : if (mode == SMI_PARAMETERS) {
5805 79019 : element_size_shift -= kSmiShiftBits;
5806 : Smi* smi_index;
5807 79019 : constant_index = ToSmiConstant(index_node, smi_index);
5808 79019 : if (constant_index) index = smi_index->value();
5809 79019 : index_node = BitcastTaggedToWord(index_node);
5810 : } else {
5811 : DCHECK(mode == INTPTR_PARAMETERS);
5812 295190 : constant_index = ToIntPtrConstant(index_node, index);
5813 : }
5814 374209 : if (constant_index) {
5815 56792 : return IntPtrConstant(base_size + element_size * index);
5816 : }
5817 :
5818 : Node* shifted_index =
5819 : (element_size_shift == 0)
5820 : ? index_node
5821 : : ((element_size_shift > 0)
5822 233851 : ? WordShl(index_node, IntPtrConstant(element_size_shift))
5823 551268 : : WordShr(index_node, IntPtrConstant(-element_size_shift)));
5824 317417 : return IntPtrAdd(IntPtrConstant(base_size), shifted_index);
5825 : }
5826 :
5827 26503 : Node* CodeStubAssembler::LoadFeedbackVectorForStub() {
5828 : Node* function =
5829 26503 : LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset);
5830 26504 : Node* cell = LoadObjectField(function, JSFunction::kFeedbackVectorOffset);
5831 26504 : return LoadObjectField(cell, Cell::kValueOffset);
5832 : }
5833 :
5834 5762 : void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* feedback_vector,
5835 : Node* slot_id) {
5836 : // This method is used for binary op and compare feedback. These
5837 : // vector nodes are initialized with a smi 0, so we can simply OR
5838 : // our new feedback in place.
5839 5762 : Node* previous_feedback = LoadFixedArrayElement(feedback_vector, slot_id);
5840 5762 : Node* combined_feedback = SmiOr(previous_feedback, feedback);
5841 : StoreFixedArrayElement(feedback_vector, slot_id, combined_feedback,
5842 5762 : SKIP_WRITE_BARRIER);
5843 5762 : }
5844 :
5845 559 : void CodeStubAssembler::CheckForAssociatedProtector(Node* name,
5846 : Label* if_protector) {
5847 : // This list must be kept in sync with LookupIterator::UpdateProtector!
5848 : // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
5849 : GotoIf(WordEqual(name, LoadRoot(Heap::kconstructor_stringRootIndex)),
5850 559 : if_protector);
5851 : GotoIf(WordEqual(name, LoadRoot(Heap::kiterator_symbolRootIndex)),
5852 559 : if_protector);
5853 : GotoIf(WordEqual(name, LoadRoot(Heap::kspecies_symbolRootIndex)),
5854 559 : if_protector);
5855 : GotoIf(WordEqual(name, LoadRoot(Heap::kis_concat_spreadable_symbolRootIndex)),
5856 559 : if_protector);
5857 : // Fall through if no case matched.
5858 559 : }
5859 :
5860 516 : Node* CodeStubAssembler::LoadReceiverMap(Node* receiver) {
5861 : return Select(TaggedIsSmi(receiver),
5862 516 : [=] { return LoadRoot(Heap::kHeapNumberMapRootIndex); },
5863 1032 : [=] { return LoadMap(receiver); },
5864 1548 : MachineRepresentation::kTagged);
5865 : }
5866 :
5867 7501 : Node* CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
5868 7501 : VARIABLE(var_intptr_key, MachineType::PointerRepresentation());
5869 7501 : Label done(this, &var_intptr_key), key_is_smi(this);
5870 7501 : GotoIf(TaggedIsSmi(key), &key_is_smi);
5871 : // Try to convert a heap number to a Smi.
5872 7501 : GotoIfNot(IsHeapNumberMap(LoadMap(key)), miss);
5873 : {
5874 : Node* value = LoadHeapNumberValue(key);
5875 7501 : Node* int_value = RoundFloat64ToInt32(value);
5876 7501 : GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
5877 7501 : var_intptr_key.Bind(ChangeInt32ToIntPtr(int_value));
5878 7501 : Goto(&done);
5879 : }
5880 :
5881 7501 : BIND(&key_is_smi);
5882 : {
5883 7501 : var_intptr_key.Bind(SmiUntag(key));
5884 7501 : Goto(&done);
5885 : }
5886 :
5887 7501 : BIND(&done);
5888 15002 : return var_intptr_key.value();
5889 : }
5890 :
5891 350 : Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
5892 : Node* value, Label* bailout) {
5893 : // Mapped arguments are actual arguments. Unmapped arguments are values added
5894 : // to the arguments object after it was created for the call. Mapped arguments
5895 : // are stored in the context at indexes given by elements[key + 2]. Unmapped
5896 : // arguments are stored as regular indexed properties in the arguments array,
5897 : // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
5898 : // look at argument object construction.
5899 : //
5900 : // The sloppy arguments elements array has a special format:
5901 : //
5902 : // 0: context
5903 : // 1: unmapped arguments array
5904 : // 2: mapped_index0,
5905 : // 3: mapped_index1,
5906 : // ...
5907 : //
5908 : // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
5909 : // If key + 2 >= elements.length then attempt to look in the unmapped
5910 : // arguments array (given by elements[1]) and return the value at key, missing
5911 : // to the runtime if the unmapped arguments array is not a fixed array or if
5912 : // key >= unmapped_arguments_array.length.
5913 : //
5914 : // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
5915 : // in the unmapped arguments array, as described above. Otherwise, t is a Smi
5916 : // index into the context array given at elements[0]. Return the value at
5917 : // context[t].
5918 :
5919 350 : bool is_load = value == nullptr;
5920 :
5921 350 : GotoIfNot(TaggedIsSmi(key), bailout);
5922 350 : key = SmiUntag(key);
5923 350 : GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
5924 :
5925 : Node* elements = LoadElements(receiver);
5926 : Node* elements_length = LoadAndUntagFixedArrayBaseLength(elements);
5927 :
5928 350 : VARIABLE(var_result, MachineRepresentation::kTagged);
5929 350 : if (!is_load) {
5930 0 : var_result.Bind(value);
5931 : }
5932 350 : Label if_mapped(this), if_unmapped(this), end(this, &var_result);
5933 350 : Node* intptr_two = IntPtrConstant(2);
5934 350 : Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
5935 :
5936 350 : GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
5937 :
5938 : Node* mapped_index =
5939 350 : LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
5940 350 : Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
5941 :
5942 350 : BIND(&if_mapped);
5943 : {
5944 : CSA_ASSERT(this, TaggedIsSmi(mapped_index));
5945 350 : mapped_index = SmiUntag(mapped_index);
5946 350 : Node* the_context = LoadFixedArrayElement(elements, 0);
5947 : // Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement
5948 : // methods for accessing Context.
5949 : STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
5950 : DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag,
5951 : FixedArray::OffsetOfElementAt(0));
5952 350 : if (is_load) {
5953 350 : Node* result = LoadFixedArrayElement(the_context, mapped_index);
5954 : CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
5955 350 : var_result.Bind(result);
5956 : } else {
5957 0 : StoreFixedArrayElement(the_context, mapped_index, value);
5958 : }
5959 350 : Goto(&end);
5960 : }
5961 :
5962 350 : BIND(&if_unmapped);
5963 : {
5964 350 : Node* backing_store = LoadFixedArrayElement(elements, 1);
5965 : GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()),
5966 350 : bailout);
5967 :
5968 : Node* backing_store_length =
5969 : LoadAndUntagFixedArrayBaseLength(backing_store);
5970 350 : GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
5971 :
5972 : // The key falls into unmapped range.
5973 350 : if (is_load) {
5974 350 : Node* result = LoadFixedArrayElement(backing_store, key);
5975 350 : GotoIf(WordEqual(result, TheHoleConstant()), bailout);
5976 350 : var_result.Bind(result);
5977 : } else {
5978 0 : StoreFixedArrayElement(backing_store, key, value);
5979 : }
5980 350 : Goto(&end);
5981 : }
5982 :
5983 350 : BIND(&end);
5984 700 : return var_result.value();
5985 : }
5986 :
5987 14992 : Node* CodeStubAssembler::LoadScriptContext(Node* context, int context_index) {
5988 : Node* native_context = LoadNativeContext(context);
5989 : Node* script_context_table =
5990 14992 : LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX);
5991 :
5992 : int offset =
5993 14992 : ScriptContextTable::GetContextOffset(context_index) - kHeapObjectTag;
5994 : return Load(MachineType::AnyTagged(), script_context_table,
5995 14992 : IntPtrConstant(offset));
5996 : }
5997 :
5998 : namespace {
5999 :
6000 : // Converts typed array elements kind to a machine representations.
6001 2693 : MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
6002 2693 : switch (kind) {
6003 : case UINT8_CLAMPED_ELEMENTS:
6004 : case UINT8_ELEMENTS:
6005 : case INT8_ELEMENTS:
6006 : return MachineRepresentation::kWord8;
6007 : case UINT16_ELEMENTS:
6008 : case INT16_ELEMENTS:
6009 369 : return MachineRepresentation::kWord16;
6010 : case UINT32_ELEMENTS:
6011 : case INT32_ELEMENTS:
6012 587 : return MachineRepresentation::kWord32;
6013 : case FLOAT32_ELEMENTS:
6014 154 : return MachineRepresentation::kFloat32;
6015 : case FLOAT64_ELEMENTS:
6016 175 : return MachineRepresentation::kFloat64;
6017 : default:
6018 0 : UNREACHABLE();
6019 : return MachineRepresentation::kNone;
6020 : }
6021 : }
6022 :
6023 : } // namespace
6024 :
6025 5982 : void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
6026 : Node* index, Node* value,
6027 : ParameterMode mode) {
6028 5982 : if (IsFixedTypedArrayElementsKind(kind)) {
6029 : if (kind == UINT8_CLAMPED_ELEMENTS) {
6030 : CSA_ASSERT(this,
6031 : Word32Equal(value, Word32And(Int32Constant(0xff), value)));
6032 : }
6033 2693 : Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
6034 2693 : MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
6035 2693 : StoreNoWriteBarrier(rep, elements, offset, value);
6036 8675 : return;
6037 : }
6038 :
6039 : WriteBarrierMode barrier_mode =
6040 3289 : IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
6041 3289 : if (IsFastDoubleElementsKind(kind)) {
6042 : // Make sure we do not store signalling NaNs into double arrays.
6043 445 : value = Float64SilenceNaN(value);
6044 445 : StoreFixedDoubleArrayElement(elements, index, value, mode);
6045 : } else {
6046 2844 : StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode);
6047 : }
6048 : }
6049 :
6050 123 : Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
6051 123 : Label done(this);
6052 123 : Node* int32_zero = Int32Constant(0);
6053 123 : Node* int32_255 = Int32Constant(255);
6054 246 : VARIABLE(var_value, MachineRepresentation::kWord32, int32_value);
6055 123 : GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
6056 123 : var_value.Bind(int32_zero);
6057 123 : GotoIf(Int32LessThan(int32_value, int32_zero), &done);
6058 123 : var_value.Bind(int32_255);
6059 123 : Goto(&done);
6060 123 : BIND(&done);
6061 246 : return var_value.value();
6062 : }
6063 :
6064 123 : Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
6065 123 : Label done(this);
6066 246 : VARIABLE(var_value, MachineRepresentation::kWord32, Int32Constant(0));
6067 123 : GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
6068 123 : var_value.Bind(Int32Constant(255));
6069 123 : GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
6070 : {
6071 123 : Node* rounded_value = Float64RoundToEven(float64_value);
6072 123 : var_value.Bind(TruncateFloat64ToWord32(rounded_value));
6073 123 : Goto(&done);
6074 : }
6075 123 : BIND(&done);
6076 246 : return var_value.value();
6077 : }
6078 :
6079 2693 : Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
6080 : Node* input, ElementsKind elements_kind, Label* bailout) {
6081 : DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
6082 :
6083 : MachineRepresentation rep;
6084 2693 : switch (elements_kind) {
6085 : case UINT8_ELEMENTS:
6086 : case INT8_ELEMENTS:
6087 : case UINT16_ELEMENTS:
6088 : case INT16_ELEMENTS:
6089 : case UINT32_ELEMENTS:
6090 : case INT32_ELEMENTS:
6091 : case UINT8_CLAMPED_ELEMENTS:
6092 : rep = MachineRepresentation::kWord32;
6093 : break;
6094 : case FLOAT32_ELEMENTS:
6095 : rep = MachineRepresentation::kFloat32;
6096 154 : break;
6097 : case FLOAT64_ELEMENTS:
6098 : rep = MachineRepresentation::kFloat64;
6099 175 : break;
6100 : default:
6101 0 : UNREACHABLE();
6102 : return nullptr;
6103 : }
6104 :
6105 2693 : VARIABLE(var_result, rep);
6106 2693 : Label done(this, &var_result), if_smi(this);
6107 2693 : GotoIf(TaggedIsSmi(input), &if_smi);
6108 : // Try to convert a heap number to a Smi.
6109 2693 : GotoIfNot(IsHeapNumberMap(LoadMap(input)), bailout);
6110 : {
6111 : Node* value = LoadHeapNumberValue(input);
6112 2693 : if (rep == MachineRepresentation::kWord32) {
6113 2364 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
6114 123 : value = Float64ToUint8Clamped(value);
6115 : } else {
6116 2241 : value = TruncateFloat64ToWord32(value);
6117 : }
6118 329 : } else if (rep == MachineRepresentation::kFloat32) {
6119 154 : value = TruncateFloat64ToFloat32(value);
6120 : } else {
6121 : DCHECK_EQ(MachineRepresentation::kFloat64, rep);
6122 : }
6123 2693 : var_result.Bind(value);
6124 2693 : Goto(&done);
6125 : }
6126 :
6127 2693 : BIND(&if_smi);
6128 : {
6129 2693 : Node* value = SmiToWord32(input);
6130 2693 : if (rep == MachineRepresentation::kFloat32) {
6131 154 : value = RoundInt32ToFloat32(value);
6132 2539 : } else if (rep == MachineRepresentation::kFloat64) {
6133 175 : value = ChangeInt32ToFloat64(value);
6134 : } else {
6135 : DCHECK_EQ(MachineRepresentation::kWord32, rep);
6136 2364 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
6137 123 : value = Int32ToUint8Clamped(value);
6138 : }
6139 : }
6140 2693 : var_result.Bind(value);
6141 2693 : Goto(&done);
6142 : }
6143 :
6144 2693 : BIND(&done);
6145 5386 : return var_result.value();
6146 : }
6147 :
6148 5982 : void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
6149 : bool is_jsarray,
6150 : ElementsKind elements_kind,
6151 : KeyedAccessStoreMode store_mode,
6152 : Label* bailout) {
6153 : Node* elements = LoadElements(object);
6154 5982 : if (IsFastSmiOrObjectElementsKind(elements_kind) &&
6155 : store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
6156 : // Bailout in case of COW elements.
6157 : GotoIf(WordNotEqual(LoadMap(elements),
6158 : LoadRoot(Heap::kFixedArrayMapRootIndex)),
6159 4780 : bailout);
6160 : }
6161 : // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
6162 : ParameterMode parameter_mode = INTPTR_PARAMETERS;
6163 5982 : key = TryToIntptr(key, bailout);
6164 :
6165 5982 : if (IsFixedTypedArrayElementsKind(elements_kind)) {
6166 2693 : Label done(this);
6167 : // TODO(ishell): call ToNumber() on value and don't bailout but be careful
6168 : // to call it only once if we decide to bailout because of bounds checks.
6169 :
6170 2693 : value = PrepareValueForWriteToTypedArray(value, elements_kind, bailout);
6171 :
6172 : // There must be no allocations between the buffer load and
6173 : // and the actual store to backing store, because GC may decide that
6174 : // the buffer is not alive or move the elements.
6175 : // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
6176 :
6177 : // Check if buffer has been neutered.
6178 2693 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
6179 2693 : GotoIf(IsDetachedBuffer(buffer), bailout);
6180 :
6181 : // Bounds check.
6182 : Node* length = TaggedToParameter(
6183 2693 : LoadObjectField(object, JSTypedArray::kLengthOffset), parameter_mode);
6184 :
6185 2693 : if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
6186 : // Skip the store if we write beyond the length.
6187 248 : GotoIfNot(IntPtrLessThan(key, length), &done);
6188 : // ... but bailout if the key is negative.
6189 : } else {
6190 : DCHECK_EQ(STANDARD_STORE, store_mode);
6191 : }
6192 2693 : GotoIfNot(UintPtrLessThan(key, length), bailout);
6193 :
6194 : // Backing store = external_pointer + base_pointer.
6195 : Node* external_pointer =
6196 : LoadObjectField(elements, FixedTypedArrayBase::kExternalPointerOffset,
6197 2693 : MachineType::Pointer());
6198 : Node* base_pointer =
6199 2693 : LoadObjectField(elements, FixedTypedArrayBase::kBasePointerOffset);
6200 : Node* backing_store =
6201 2693 : IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer));
6202 2693 : StoreElement(backing_store, elements_kind, key, value, parameter_mode);
6203 2693 : Goto(&done);
6204 :
6205 2693 : BIND(&done);
6206 8675 : return;
6207 : }
6208 : DCHECK(IsFastSmiOrObjectElementsKind(elements_kind) ||
6209 : IsFastDoubleElementsKind(elements_kind));
6210 :
6211 : Node* length = is_jsarray ? LoadObjectField(object, JSArray::kLengthOffset)
6212 3289 : : LoadFixedArrayBaseLength(elements);
6213 : length = TaggedToParameter(length, parameter_mode);
6214 :
6215 : // In case value is stored into a fast smi array, assure that the value is
6216 : // a smi before manipulating the backing store. Otherwise the backing store
6217 : // may be left in an invalid state.
6218 3289 : if (IsFastSmiElementsKind(elements_kind)) {
6219 575 : GotoIfNot(TaggedIsSmi(value), bailout);
6220 2714 : } else if (IsFastDoubleElementsKind(elements_kind)) {
6221 445 : value = TryTaggedToFloat64(value, bailout);
6222 : }
6223 :
6224 3289 : if (IsGrowStoreMode(store_mode)) {
6225 : elements = CheckForCapacityGrow(object, elements, elements_kind, length,
6226 1052 : key, parameter_mode, is_jsarray, bailout);
6227 : } else {
6228 2237 : GotoIfNot(UintPtrLessThan(key, length), bailout);
6229 :
6230 2237 : if ((store_mode == STORE_NO_TRANSITION_HANDLE_COW) &&
6231 : IsFastSmiOrObjectElementsKind(elements_kind)) {
6232 : elements = CopyElementsOnWrite(object, elements, elements_kind, length,
6233 454 : parameter_mode, bailout);
6234 : }
6235 : }
6236 3289 : StoreElement(elements, elements_kind, key, value, parameter_mode);
6237 : }
6238 :
6239 1052 : Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
6240 : ElementsKind kind, Node* length,
6241 : Node* key, ParameterMode mode,
6242 : bool is_js_array,
6243 : Label* bailout) {
6244 1052 : VARIABLE(checked_elements, MachineRepresentation::kTagged);
6245 1052 : Label grow_case(this), no_grow_case(this), done(this);
6246 :
6247 : Node* condition;
6248 1052 : if (IsHoleyElementsKind(kind)) {
6249 242 : condition = UintPtrGreaterThanOrEqual(key, length);
6250 : } else {
6251 810 : condition = WordEqual(key, length);
6252 : }
6253 1052 : Branch(condition, &grow_case, &no_grow_case);
6254 :
6255 1052 : BIND(&grow_case);
6256 : {
6257 : Node* current_capacity =
6258 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
6259 :
6260 1052 : checked_elements.Bind(elements);
6261 :
6262 : Label fits_capacity(this);
6263 1052 : GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
6264 : {
6265 : Node* new_elements = TryGrowElementsCapacity(
6266 1052 : object, elements, kind, key, current_capacity, mode, bailout);
6267 :
6268 1052 : checked_elements.Bind(new_elements);
6269 1052 : Goto(&fits_capacity);
6270 : }
6271 1052 : BIND(&fits_capacity);
6272 :
6273 1052 : if (is_js_array) {
6274 1002 : Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
6275 : StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
6276 1002 : ParameterToTagged(new_length, mode));
6277 : }
6278 1052 : Goto(&done);
6279 : }
6280 :
6281 1052 : BIND(&no_grow_case);
6282 : {
6283 1052 : GotoIfNot(UintPtrLessThan(key, length), bailout);
6284 1052 : checked_elements.Bind(elements);
6285 1052 : Goto(&done);
6286 : }
6287 :
6288 1052 : BIND(&done);
6289 2104 : return checked_elements.value();
6290 : }
6291 :
6292 454 : Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
6293 : ElementsKind kind, Node* length,
6294 : ParameterMode mode,
6295 : Label* bailout) {
6296 454 : VARIABLE(new_elements_var, MachineRepresentation::kTagged, elements);
6297 454 : Label done(this);
6298 :
6299 : GotoIfNot(
6300 : WordEqual(LoadMap(elements), LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
6301 908 : &done);
6302 : {
6303 : Node* capacity =
6304 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
6305 : Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
6306 454 : length, capacity, mode, bailout);
6307 :
6308 454 : new_elements_var.Bind(new_elements);
6309 454 : Goto(&done);
6310 : }
6311 :
6312 454 : BIND(&done);
6313 908 : return new_elements_var.value();
6314 : }
6315 :
6316 2138 : void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
6317 : ElementsKind from_kind,
6318 : ElementsKind to_kind,
6319 : bool is_jsarray,
6320 : Label* bailout) {
6321 : DCHECK(!IsFastHoleyElementsKind(from_kind) ||
6322 : IsFastHoleyElementsKind(to_kind));
6323 2138 : if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
6324 1977 : TrapAllocationMemento(object, bailout);
6325 : }
6326 :
6327 2138 : if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
6328 366 : Comment("Non-simple map transition");
6329 : Node* elements = LoadElements(object);
6330 :
6331 : Node* empty_fixed_array =
6332 732 : HeapConstant(isolate()->factory()->empty_fixed_array());
6333 :
6334 : Label done(this);
6335 366 : GotoIf(WordEqual(elements, empty_fixed_array), &done);
6336 :
6337 : // TODO(ishell): Use OptimalParameterMode().
6338 : ParameterMode mode = INTPTR_PARAMETERS;
6339 366 : Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
6340 : Node* array_length =
6341 352 : is_jsarray ? SmiUntag(LoadObjectField(object, JSArray::kLengthOffset))
6342 718 : : elements_length;
6343 :
6344 : GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
6345 366 : elements_length, mode, bailout);
6346 366 : Goto(&done);
6347 366 : BIND(&done);
6348 : }
6349 :
6350 2138 : StoreMap(object, map);
6351 2138 : }
6352 :
6353 2579 : void CodeStubAssembler::TrapAllocationMemento(Node* object,
6354 : Label* memento_found) {
6355 2579 : Comment("[ TrapAllocationMemento");
6356 : Label no_memento_found(this);
6357 2579 : Label top_check(this), map_check(this);
6358 :
6359 : Node* new_space_top_address = ExternalConstant(
6360 2579 : ExternalReference::new_space_allocation_top_address(isolate()));
6361 : const int kMementoMapOffset = JSArray::kSize;
6362 : const int kMementoLastWordOffset =
6363 : kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
6364 :
6365 : // Bail out if the object is not in new space.
6366 2579 : Node* object_word = BitcastTaggedToWord(object);
6367 2579 : Node* object_page = PageFromAddress(object_word);
6368 : {
6369 : Node* page_flags = Load(MachineType::IntPtr(), object_page,
6370 2579 : IntPtrConstant(Page::kFlagsOffset));
6371 : GotoIf(WordEqual(WordAnd(page_flags,
6372 : IntPtrConstant(MemoryChunk::kIsInNewSpaceMask)),
6373 : IntPtrConstant(0)),
6374 2579 : &no_memento_found);
6375 : }
6376 :
6377 : Node* memento_last_word = IntPtrAdd(
6378 2579 : object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
6379 2579 : Node* memento_last_word_page = PageFromAddress(memento_last_word);
6380 :
6381 2579 : Node* new_space_top = Load(MachineType::Pointer(), new_space_top_address);
6382 2579 : Node* new_space_top_page = PageFromAddress(new_space_top);
6383 :
6384 : // If the object is in new space, we need to check whether respective
6385 : // potential memento object is on the same page as the current top.
6386 2579 : GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
6387 :
6388 : // The object is on a different page than allocation top. Bail out if the
6389 : // object sits on the page boundary as no memento can follow and we cannot
6390 : // touch the memory following it.
6391 : Branch(WordEqual(object_page, memento_last_word_page), &map_check,
6392 2579 : &no_memento_found);
6393 :
6394 : // If top is on the same page as the current object, we need to check whether
6395 : // we are below top.
6396 2579 : BIND(&top_check);
6397 : {
6398 : Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
6399 2579 : &no_memento_found, &map_check);
6400 : }
6401 :
6402 : // Memento map check.
6403 2579 : BIND(&map_check);
6404 : {
6405 2579 : Node* memento_map = LoadObjectField(object, kMementoMapOffset);
6406 : Branch(
6407 : WordEqual(memento_map, LoadRoot(Heap::kAllocationMementoMapRootIndex)),
6408 2579 : memento_found, &no_memento_found);
6409 : }
6410 2579 : BIND(&no_memento_found);
6411 5158 : Comment("] TrapAllocationMemento");
6412 2579 : }
6413 :
6414 7737 : Node* CodeStubAssembler::PageFromAddress(Node* address) {
6415 7737 : return WordAnd(address, IntPtrConstant(~Page::kPageAlignmentMask));
6416 : }
6417 :
6418 28836 : Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
6419 : Node* feedback_vector, Node* slot) {
6420 28836 : Node* size = IntPtrConstant(AllocationSite::kSize);
6421 28836 : Node* site = Allocate(size, CodeStubAssembler::kPretenured);
6422 :
6423 28836 : StoreMap(site, AllocationSiteMapConstant());
6424 28836 : Node* kind = SmiConstant(GetInitialFastElementsKind());
6425 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kTransitionInfoOffset,
6426 28836 : kind);
6427 :
6428 : // Unlike literals, constructed arrays don't have nested sites
6429 28836 : Node* zero = SmiConstant(0);
6430 28836 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
6431 :
6432 : // Pretenuring calculation field.
6433 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
6434 28836 : zero);
6435 :
6436 : // Pretenuring memento creation count field.
6437 : StoreObjectFieldNoWriteBarrier(
6438 28836 : site, AllocationSite::kPretenureCreateCountOffset, zero);
6439 :
6440 : // Store an empty fixed array for the code dependency.
6441 : StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
6442 28836 : Heap::kEmptyFixedArrayRootIndex);
6443 :
6444 : // Link the object to the allocation site list
6445 : Node* site_list = ExternalConstant(
6446 28836 : ExternalReference::allocation_sites_list_address(isolate()));
6447 28836 : Node* next_site = LoadBufferObject(site_list, 0);
6448 :
6449 : // TODO(mvstanton): This is a store to a weak pointer, which we may want to
6450 : // mark as such in order to skip the write barrier, once we have a unified
6451 : // system for weakness. For now we decided to keep it like this because having
6452 : // an initial write barrier backed store makes this pointer strong until the
6453 : // next GC, and allocation sites are designed to survive several GCs anyway.
6454 28836 : StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
6455 28836 : StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site);
6456 :
6457 : StoreFixedArrayElement(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
6458 28836 : CodeStubAssembler::SMI_PARAMETERS);
6459 28836 : return site;
6460 : }
6461 :
6462 28836 : Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
6463 : Node* slot,
6464 : Node* value) {
6465 28836 : Node* size = IntPtrConstant(WeakCell::kSize);
6466 28836 : Node* cell = Allocate(size, CodeStubAssembler::kPretenured);
6467 :
6468 : // Initialize the WeakCell.
6469 : DCHECK(Heap::RootIsImmortalImmovable(Heap::kWeakCellMapRootIndex));
6470 28836 : StoreMapNoWriteBarrier(cell, Heap::kWeakCellMapRootIndex);
6471 28836 : StoreObjectField(cell, WeakCell::kValueOffset, value);
6472 : StoreObjectFieldRoot(cell, WeakCell::kNextOffset,
6473 28836 : Heap::kTheHoleValueRootIndex);
6474 :
6475 : // Store the WeakCell in the feedback vector.
6476 : StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0,
6477 28836 : CodeStubAssembler::SMI_PARAMETERS);
6478 28836 : return cell;
6479 : }
6480 :
6481 18307 : Node* CodeStubAssembler::BuildFastLoop(
6482 : const CodeStubAssembler::VariableList& vars, Node* start_index,
6483 : Node* end_index, const FastLoopBody& body, int increment,
6484 : ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
6485 : MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
6486 : ? MachineType::PointerRepresentation()
6487 18307 : : MachineRepresentation::kTaggedSigned;
6488 18307 : VARIABLE(var, index_rep, start_index);
6489 18307 : VariableList vars_copy(vars, zone());
6490 18307 : vars_copy.Add(&var, zone());
6491 18307 : Label loop(this, vars_copy);
6492 18307 : Label after_loop(this);
6493 : // Introduce an explicit second check of the termination condition before the
6494 : // loop that helps turbofan generate better code. If there's only a single
6495 : // check, then the CodeStubAssembler forces it to be at the beginning of the
6496 : // loop requiring a backwards branch at the end of the loop (it's not possible
6497 : // to force the loop header check at the end of the loop and branch forward to
6498 : // it from the pre-header). The extra branch is slower in the case that the
6499 : // loop actually iterates.
6500 18307 : Branch(WordEqual(var.value(), end_index), &after_loop, &loop);
6501 18307 : BIND(&loop);
6502 : {
6503 18307 : if (advance_mode == IndexAdvanceMode::kPre) {
6504 7791 : Increment(var, increment, parameter_mode);
6505 : }
6506 18307 : body(var.value());
6507 18307 : if (advance_mode == IndexAdvanceMode::kPost) {
6508 10516 : Increment(var, increment, parameter_mode);
6509 : }
6510 18307 : Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
6511 : }
6512 18307 : BIND(&after_loop);
6513 36614 : return var.value();
6514 : }
6515 :
6516 5971 : void CodeStubAssembler::BuildFastFixedArrayForEach(
6517 : const CodeStubAssembler::VariableList& vars, Node* fixed_array,
6518 : ElementsKind kind, Node* first_element_inclusive,
6519 : Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
6520 : ParameterMode mode, ForEachDirection direction) {
6521 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
6522 : int32_t first_val;
6523 5971 : bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
6524 : int32_t last_val;
6525 5971 : bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
6526 5971 : if (constant_first && constent_last) {
6527 688 : int delta = last_val - first_val;
6528 : DCHECK(delta >= 0);
6529 688 : if (delta <= kElementLoopUnrollThreshold) {
6530 645 : if (direction == ForEachDirection::kForward) {
6531 0 : for (int i = first_val; i < last_val; ++i) {
6532 0 : Node* index = IntPtrConstant(i);
6533 : Node* offset =
6534 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
6535 0 : FixedArray::kHeaderSize - kHeapObjectTag);
6536 0 : body(fixed_array, offset);
6537 : }
6538 : } else {
6539 3483 : for (int i = last_val - 1; i >= first_val; --i) {
6540 2838 : Node* index = IntPtrConstant(i);
6541 : Node* offset =
6542 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
6543 2838 : FixedArray::kHeaderSize - kHeapObjectTag);
6544 2838 : body(fixed_array, offset);
6545 : }
6546 : }
6547 645 : return;
6548 : }
6549 : }
6550 :
6551 : Node* start =
6552 : ElementOffsetFromIndex(first_element_inclusive, kind, mode,
6553 5326 : FixedArray::kHeaderSize - kHeapObjectTag);
6554 : Node* limit =
6555 : ElementOffsetFromIndex(last_element_exclusive, kind, mode,
6556 5326 : FixedArray::kHeaderSize - kHeapObjectTag);
6557 5326 : if (direction == ForEachDirection::kReverse) std::swap(start, limit);
6558 :
6559 : int increment = IsFastDoubleElementsKind(kind) ? kDoubleSize : kPointerSize;
6560 : BuildFastLoop(
6561 : vars, start, limit,
6562 5326 : [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
6563 : direction == ForEachDirection::kReverse ? -increment : increment,
6564 : INTPTR_PARAMETERS,
6565 : direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
6566 15978 : : IndexAdvanceMode::kPost);
6567 : }
6568 :
6569 344 : void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
6570 : Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
6571 : int max_newspace_parameters =
6572 344 : (kMaxRegularHeapObjectSize - base_size) / kPointerSize;
6573 : GotoIf(IntPtrOrSmiGreaterThan(
6574 : element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode),
6575 : mode),
6576 344 : doesnt_fit);
6577 344 : }
6578 :
6579 645 : void CodeStubAssembler::InitializeFieldsWithRoot(
6580 : Node* object, Node* start_offset, Node* end_offset,
6581 : Heap::RootListIndex root_index) {
6582 645 : start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
6583 645 : end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
6584 645 : Node* root_value = LoadRoot(root_index);
6585 : BuildFastLoop(end_offset, start_offset,
6586 : [this, object, root_value](Node* current) {
6587 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object,
6588 645 : current, root_value);
6589 : },
6590 : -kPointerSize, INTPTR_PARAMETERS,
6591 1290 : CodeStubAssembler::IndexAdvanceMode::kPre);
6592 645 : }
6593 :
6594 344 : void CodeStubAssembler::BranchIfNumericRelationalComparison(
6595 : RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true,
6596 : Label* if_false) {
6597 344 : Label end(this);
6598 688 : VARIABLE(result, MachineRepresentation::kTagged);
6599 :
6600 : // Shared entry for floating point comparison.
6601 344 : Label do_fcmp(this);
6602 688 : VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64);
6603 688 : VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64);
6604 :
6605 : // Check if the {lhs} is a Smi or a HeapObject.
6606 344 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
6607 344 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
6608 :
6609 344 : BIND(&if_lhsissmi);
6610 : {
6611 : // Check if {rhs} is a Smi or a HeapObject.
6612 344 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
6613 344 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
6614 :
6615 344 : BIND(&if_rhsissmi);
6616 : {
6617 : // Both {lhs} and {rhs} are Smi, so just perform a fast Smi comparison.
6618 344 : switch (mode) {
6619 : case kLessThan:
6620 344 : BranchIfSmiLessThan(lhs, rhs, if_true, if_false);
6621 344 : break;
6622 : case kLessThanOrEqual:
6623 0 : BranchIfSmiLessThanOrEqual(lhs, rhs, if_true, if_false);
6624 0 : break;
6625 : case kGreaterThan:
6626 0 : BranchIfSmiLessThan(rhs, lhs, if_true, if_false);
6627 0 : break;
6628 : case kGreaterThanOrEqual:
6629 0 : BranchIfSmiLessThanOrEqual(rhs, lhs, if_true, if_false);
6630 0 : break;
6631 : }
6632 : }
6633 :
6634 344 : BIND(&if_rhsisnotsmi);
6635 : {
6636 : CSA_ASSERT(this, IsHeapNumberMap(LoadMap(rhs)));
6637 : // Convert the {lhs} and {rhs} to floating point values, and
6638 : // perform a floating point comparison.
6639 344 : var_fcmp_lhs.Bind(SmiToFloat64(lhs));
6640 344 : var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
6641 344 : Goto(&do_fcmp);
6642 344 : }
6643 : }
6644 :
6645 344 : BIND(&if_lhsisnotsmi);
6646 : {
6647 : CSA_ASSERT(this, IsHeapNumberMap(LoadMap(lhs)));
6648 :
6649 : // Check if {rhs} is a Smi or a HeapObject.
6650 344 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
6651 344 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
6652 :
6653 344 : BIND(&if_rhsissmi);
6654 : {
6655 : // Convert the {lhs} and {rhs} to floating point values, and
6656 : // perform a floating point comparison.
6657 344 : var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
6658 344 : var_fcmp_rhs.Bind(SmiToFloat64(rhs));
6659 344 : Goto(&do_fcmp);
6660 : }
6661 :
6662 344 : BIND(&if_rhsisnotsmi);
6663 : {
6664 : CSA_ASSERT(this, IsHeapNumberMap(LoadMap(rhs)));
6665 :
6666 : // Convert the {lhs} and {rhs} to floating point values, and
6667 : // perform a floating point comparison.
6668 344 : var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
6669 344 : var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
6670 344 : Goto(&do_fcmp);
6671 344 : }
6672 : }
6673 :
6674 344 : BIND(&do_fcmp);
6675 : {
6676 : // Load the {lhs} and {rhs} floating point values.
6677 344 : Node* lhs = var_fcmp_lhs.value();
6678 344 : Node* rhs = var_fcmp_rhs.value();
6679 :
6680 : // Perform a fast floating point comparison.
6681 344 : switch (mode) {
6682 : case kLessThan:
6683 344 : Branch(Float64LessThan(lhs, rhs), if_true, if_false);
6684 344 : break;
6685 : case kLessThanOrEqual:
6686 0 : Branch(Float64LessThanOrEqual(lhs, rhs), if_true, if_false);
6687 0 : break;
6688 : case kGreaterThan:
6689 0 : Branch(Float64GreaterThan(lhs, rhs), if_true, if_false);
6690 0 : break;
6691 : case kGreaterThanOrEqual:
6692 0 : Branch(Float64GreaterThanOrEqual(lhs, rhs), if_true, if_false);
6693 0 : break;
6694 : }
6695 344 : }
6696 344 : }
6697 :
6698 344 : void CodeStubAssembler::GotoUnlessNumberLessThan(Node* lhs, Node* rhs,
6699 : Label* if_false) {
6700 344 : Label if_true(this);
6701 344 : BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false);
6702 344 : BIND(&if_true);
6703 344 : }
6704 :
6705 688 : Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode,
6706 : Node* lhs, Node* rhs,
6707 : Node* context) {
6708 1376 : Label return_true(this), return_false(this), end(this);
6709 1376 : VARIABLE(result, MachineRepresentation::kTagged);
6710 :
6711 : // Shared entry for floating point comparison.
6712 688 : Label do_fcmp(this);
6713 1376 : VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64);
6714 1376 : VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64);
6715 :
6716 : // We might need to loop several times due to ToPrimitive and/or ToNumber
6717 : // conversions.
6718 1376 : VARIABLE(var_lhs, MachineRepresentation::kTagged, lhs);
6719 1376 : VARIABLE(var_rhs, MachineRepresentation::kTagged, rhs);
6720 688 : Variable* loop_vars[2] = {&var_lhs, &var_rhs};
6721 1376 : Label loop(this, 2, loop_vars);
6722 688 : Goto(&loop);
6723 688 : BIND(&loop);
6724 : {
6725 : // Load the current {lhs} and {rhs} values.
6726 688 : lhs = var_lhs.value();
6727 688 : rhs = var_rhs.value();
6728 :
6729 : // Check if the {lhs} is a Smi or a HeapObject.
6730 688 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
6731 688 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
6732 :
6733 688 : BIND(&if_lhsissmi);
6734 : {
6735 : // Check if {rhs} is a Smi or a HeapObject.
6736 688 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
6737 688 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
6738 :
6739 688 : BIND(&if_rhsissmi);
6740 : {
6741 : // Both {lhs} and {rhs} are Smi, so just perform a fast Smi comparison.
6742 688 : switch (mode) {
6743 : case kLessThan:
6744 172 : BranchIfSmiLessThan(lhs, rhs, &return_true, &return_false);
6745 172 : break;
6746 : case kLessThanOrEqual:
6747 172 : BranchIfSmiLessThanOrEqual(lhs, rhs, &return_true, &return_false);
6748 172 : break;
6749 : case kGreaterThan:
6750 172 : BranchIfSmiLessThan(rhs, lhs, &return_true, &return_false);
6751 172 : break;
6752 : case kGreaterThanOrEqual:
6753 172 : BranchIfSmiLessThanOrEqual(rhs, lhs, &return_true, &return_false);
6754 172 : break;
6755 : }
6756 : }
6757 :
6758 688 : BIND(&if_rhsisnotsmi);
6759 : {
6760 : // Load the map of {rhs}.
6761 : Node* rhs_map = LoadMap(rhs);
6762 :
6763 : // Check if the {rhs} is a HeapNumber.
6764 688 : Label if_rhsisnumber(this), if_rhsisnotnumber(this, Label::kDeferred);
6765 688 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
6766 :
6767 688 : BIND(&if_rhsisnumber);
6768 : {
6769 : // Convert the {lhs} and {rhs} to floating point values, and
6770 : // perform a floating point comparison.
6771 688 : var_fcmp_lhs.Bind(SmiToFloat64(lhs));
6772 688 : var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
6773 688 : Goto(&do_fcmp);
6774 : }
6775 :
6776 688 : BIND(&if_rhsisnotnumber);
6777 : {
6778 : // Convert the {rhs} to a Number; we don't need to perform the
6779 : // dedicated ToPrimitive(rhs, hint Number) operation, as the
6780 : // ToNumber(rhs) will by itself already invoke ToPrimitive with
6781 : // a Number hint.
6782 688 : Callable callable = CodeFactory::NonNumberToNumber(isolate());
6783 688 : var_rhs.Bind(CallStub(callable, context, rhs));
6784 688 : Goto(&loop);
6785 688 : }
6786 688 : }
6787 : }
6788 :
6789 688 : BIND(&if_lhsisnotsmi);
6790 : {
6791 : // Load the map of {lhs}.
6792 : Node* lhs_map = LoadMap(lhs);
6793 :
6794 : // Check if {rhs} is a Smi or a HeapObject.
6795 688 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
6796 688 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
6797 :
6798 688 : BIND(&if_rhsissmi);
6799 : {
6800 : // Check if the {lhs} is a HeapNumber.
6801 688 : Label if_lhsisnumber(this), if_lhsisnotnumber(this, Label::kDeferred);
6802 688 : Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
6803 :
6804 688 : BIND(&if_lhsisnumber);
6805 : {
6806 : // Convert the {lhs} and {rhs} to floating point values, and
6807 : // perform a floating point comparison.
6808 688 : var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
6809 688 : var_fcmp_rhs.Bind(SmiToFloat64(rhs));
6810 688 : Goto(&do_fcmp);
6811 : }
6812 :
6813 688 : BIND(&if_lhsisnotnumber);
6814 : {
6815 : // Convert the {lhs} to a Number; we don't need to perform the
6816 : // dedicated ToPrimitive(lhs, hint Number) operation, as the
6817 : // ToNumber(lhs) will by itself already invoke ToPrimitive with
6818 : // a Number hint.
6819 688 : Callable callable = CodeFactory::NonNumberToNumber(isolate());
6820 688 : var_lhs.Bind(CallStub(callable, context, lhs));
6821 688 : Goto(&loop);
6822 688 : }
6823 : }
6824 :
6825 688 : BIND(&if_rhsisnotsmi);
6826 : {
6827 : // Load the map of {rhs}.
6828 : Node* rhs_map = LoadMap(rhs);
6829 :
6830 : // Check if {lhs} is a HeapNumber.
6831 688 : Label if_lhsisnumber(this), if_lhsisnotnumber(this);
6832 688 : Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
6833 :
6834 688 : BIND(&if_lhsisnumber);
6835 : {
6836 : // Check if {rhs} is also a HeapNumber.
6837 688 : Label if_rhsisnumber(this), if_rhsisnotnumber(this, Label::kDeferred);
6838 : Branch(WordEqual(lhs_map, rhs_map), &if_rhsisnumber,
6839 688 : &if_rhsisnotnumber);
6840 :
6841 688 : BIND(&if_rhsisnumber);
6842 : {
6843 : // Convert the {lhs} and {rhs} to floating point values, and
6844 : // perform a floating point comparison.
6845 688 : var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
6846 688 : var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
6847 688 : Goto(&do_fcmp);
6848 : }
6849 :
6850 688 : BIND(&if_rhsisnotnumber);
6851 : {
6852 : // Convert the {rhs} to a Number; we don't need to perform
6853 : // dedicated ToPrimitive(rhs, hint Number) operation, as the
6854 : // ToNumber(rhs) will by itself already invoke ToPrimitive with
6855 : // a Number hint.
6856 688 : Callable callable = CodeFactory::NonNumberToNumber(isolate());
6857 688 : var_rhs.Bind(CallStub(callable, context, rhs));
6858 688 : Goto(&loop);
6859 688 : }
6860 : }
6861 :
6862 688 : BIND(&if_lhsisnotnumber);
6863 : {
6864 : // Load the instance type of {lhs}.
6865 : Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
6866 :
6867 : // Check if {lhs} is a String.
6868 688 : Label if_lhsisstring(this), if_lhsisnotstring(this, Label::kDeferred);
6869 : Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
6870 688 : &if_lhsisnotstring);
6871 :
6872 688 : BIND(&if_lhsisstring);
6873 : {
6874 : // Load the instance type of {rhs}.
6875 : Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
6876 :
6877 : // Check if {rhs} is also a String.
6878 : Label if_rhsisstring(this, Label::kDeferred),
6879 688 : if_rhsisnotstring(this, Label::kDeferred);
6880 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
6881 688 : &if_rhsisnotstring);
6882 :
6883 688 : BIND(&if_rhsisstring);
6884 : {
6885 : // Both {lhs} and {rhs} are strings.
6886 688 : switch (mode) {
6887 : case kLessThan:
6888 : result.Bind(CallStub(CodeFactory::StringLessThan(isolate()),
6889 344 : context, lhs, rhs));
6890 172 : Goto(&end);
6891 172 : break;
6892 : case kLessThanOrEqual:
6893 : result.Bind(
6894 : CallStub(CodeFactory::StringLessThanOrEqual(isolate()),
6895 344 : context, lhs, rhs));
6896 172 : Goto(&end);
6897 172 : break;
6898 : case kGreaterThan:
6899 : result.Bind(
6900 : CallStub(CodeFactory::StringGreaterThan(isolate()),
6901 344 : context, lhs, rhs));
6902 172 : Goto(&end);
6903 172 : break;
6904 : case kGreaterThanOrEqual:
6905 : result.Bind(
6906 : CallStub(CodeFactory::StringGreaterThanOrEqual(isolate()),
6907 344 : context, lhs, rhs));
6908 172 : Goto(&end);
6909 172 : break;
6910 : }
6911 : }
6912 :
6913 688 : BIND(&if_rhsisnotstring);
6914 : {
6915 : // The {lhs} is a String, while {rhs} is neither a Number nor a
6916 : // String, so we need to call ToPrimitive(rhs, hint Number) if
6917 : // {rhs} is a receiver or ToNumber(lhs) and ToNumber(rhs) in the
6918 : // other cases.
6919 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6920 : Label if_rhsisreceiver(this, Label::kDeferred),
6921 688 : if_rhsisnotreceiver(this, Label::kDeferred);
6922 : Branch(IsJSReceiverInstanceType(rhs_instance_type),
6923 688 : &if_rhsisreceiver, &if_rhsisnotreceiver);
6924 :
6925 688 : BIND(&if_rhsisreceiver);
6926 : {
6927 : // Convert {rhs} to a primitive first passing Number hint.
6928 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
6929 688 : isolate(), ToPrimitiveHint::kNumber);
6930 688 : var_rhs.Bind(CallStub(callable, context, rhs));
6931 688 : Goto(&loop);
6932 : }
6933 :
6934 688 : BIND(&if_rhsisnotreceiver);
6935 : {
6936 : // Convert both {lhs} and {rhs} to Number.
6937 688 : Callable callable = CodeFactory::ToNumber(isolate());
6938 688 : var_lhs.Bind(CallStub(callable, context, lhs));
6939 688 : var_rhs.Bind(CallStub(callable, context, rhs));
6940 688 : Goto(&loop);
6941 688 : }
6942 688 : }
6943 : }
6944 :
6945 688 : BIND(&if_lhsisnotstring);
6946 : {
6947 : // The {lhs} is neither a Number nor a String, so we need to call
6948 : // ToPrimitive(lhs, hint Number) if {lhs} is a receiver or
6949 : // ToNumber(lhs) and ToNumber(rhs) in the other cases.
6950 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6951 : Label if_lhsisreceiver(this, Label::kDeferred),
6952 688 : if_lhsisnotreceiver(this, Label::kDeferred);
6953 : Branch(IsJSReceiverInstanceType(lhs_instance_type),
6954 688 : &if_lhsisreceiver, &if_lhsisnotreceiver);
6955 :
6956 688 : BIND(&if_lhsisreceiver);
6957 : {
6958 : // Convert {lhs} to a primitive first passing Number hint.
6959 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
6960 688 : isolate(), ToPrimitiveHint::kNumber);
6961 688 : var_lhs.Bind(CallStub(callable, context, lhs));
6962 688 : Goto(&loop);
6963 : }
6964 :
6965 688 : BIND(&if_lhsisnotreceiver);
6966 : {
6967 : // Convert both {lhs} and {rhs} to Number.
6968 688 : Callable callable = CodeFactory::ToNumber(isolate());
6969 688 : var_lhs.Bind(CallStub(callable, context, lhs));
6970 688 : var_rhs.Bind(CallStub(callable, context, rhs));
6971 688 : Goto(&loop);
6972 688 : }
6973 688 : }
6974 688 : }
6975 688 : }
6976 688 : }
6977 : }
6978 :
6979 688 : BIND(&do_fcmp);
6980 : {
6981 : // Load the {lhs} and {rhs} floating point values.
6982 688 : Node* lhs = var_fcmp_lhs.value();
6983 688 : Node* rhs = var_fcmp_rhs.value();
6984 :
6985 : // Perform a fast floating point comparison.
6986 688 : switch (mode) {
6987 : case kLessThan:
6988 172 : Branch(Float64LessThan(lhs, rhs), &return_true, &return_false);
6989 172 : break;
6990 : case kLessThanOrEqual:
6991 172 : Branch(Float64LessThanOrEqual(lhs, rhs), &return_true, &return_false);
6992 172 : break;
6993 : case kGreaterThan:
6994 172 : Branch(Float64GreaterThan(lhs, rhs), &return_true, &return_false);
6995 172 : break;
6996 : case kGreaterThanOrEqual:
6997 : Branch(Float64GreaterThanOrEqual(lhs, rhs), &return_true,
6998 172 : &return_false);
6999 172 : break;
7000 : }
7001 : }
7002 :
7003 688 : BIND(&return_true);
7004 : {
7005 688 : result.Bind(BooleanConstant(true));
7006 688 : Goto(&end);
7007 : }
7008 :
7009 688 : BIND(&return_false);
7010 : {
7011 688 : result.Bind(BooleanConstant(false));
7012 688 : Goto(&end);
7013 : }
7014 :
7015 688 : BIND(&end);
7016 1376 : return result.value();
7017 : }
7018 :
7019 : namespace {
7020 :
7021 903 : void GenerateEqual_Same(CodeStubAssembler* assembler, Node* value,
7022 : CodeStubAssembler::Label* if_equal,
7023 : CodeStubAssembler::Label* if_notequal) {
7024 : // In case of abstract or strict equality checks, we need additional checks
7025 : // for NaN values because they are not considered equal, even if both the
7026 : // left and the right hand side reference exactly the same value.
7027 :
7028 : typedef CodeStubAssembler::Label Label;
7029 :
7030 : // Check if {value} is a Smi or a HeapObject.
7031 903 : Label if_valueissmi(assembler), if_valueisnotsmi(assembler);
7032 : assembler->Branch(assembler->TaggedIsSmi(value), &if_valueissmi,
7033 903 : &if_valueisnotsmi);
7034 :
7035 903 : assembler->BIND(&if_valueisnotsmi);
7036 : {
7037 : // Load the map of {value}.
7038 : Node* value_map = assembler->LoadMap(value);
7039 :
7040 : // Check if {value} (and therefore {rhs}) is a HeapNumber.
7041 903 : Label if_valueisnumber(assembler), if_valueisnotnumber(assembler);
7042 : assembler->Branch(assembler->IsHeapNumberMap(value_map), &if_valueisnumber,
7043 903 : &if_valueisnotnumber);
7044 :
7045 903 : assembler->BIND(&if_valueisnumber);
7046 : {
7047 : // Convert {value} (and therefore {rhs}) to floating point value.
7048 : Node* value_value = assembler->LoadHeapNumberValue(value);
7049 :
7050 : // Check if the HeapNumber value is a NaN.
7051 903 : assembler->BranchIfFloat64IsNaN(value_value, if_notequal, if_equal);
7052 : }
7053 :
7054 903 : assembler->BIND(&if_valueisnotnumber);
7055 1806 : assembler->Goto(if_equal);
7056 : }
7057 :
7058 903 : assembler->BIND(&if_valueissmi);
7059 1806 : assembler->Goto(if_equal);
7060 903 : }
7061 : } // namespace
7062 :
7063 : // ES6 section 7.2.12 Abstract Equality Comparison
7064 172 : Node* CodeStubAssembler::Equal(Node* lhs, Node* rhs, Node* context) {
7065 : // This is a slightly optimized version of Object::Equals represented as
7066 : // scheduled TurboFan graph utilizing the CodeStubAssembler. Whenever you
7067 : // change something functionality wise in here, remember to update the
7068 : // Object::Equals method as well.
7069 :
7070 344 : Label if_equal(this), if_notequal(this),
7071 172 : do_rhsstringtonumber(this, Label::kDeferred), end(this);
7072 344 : VARIABLE(result, MachineRepresentation::kTagged);
7073 :
7074 : // Shared entry for floating point comparison.
7075 172 : Label do_fcmp(this);
7076 344 : VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64);
7077 344 : VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64);
7078 :
7079 : // We might need to loop several times due to ToPrimitive and/or ToNumber
7080 : // conversions.
7081 344 : VARIABLE(var_lhs, MachineRepresentation::kTagged, lhs);
7082 344 : VARIABLE(var_rhs, MachineRepresentation::kTagged, rhs);
7083 172 : Variable* loop_vars[2] = {&var_lhs, &var_rhs};
7084 344 : Label loop(this, 2, loop_vars);
7085 172 : Goto(&loop);
7086 172 : BIND(&loop);
7087 : {
7088 : // Load the current {lhs} and {rhs} values.
7089 172 : lhs = var_lhs.value();
7090 172 : rhs = var_rhs.value();
7091 :
7092 : // Check if {lhs} and {rhs} refer to the same object.
7093 172 : Label if_same(this), if_notsame(this);
7094 172 : Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
7095 :
7096 172 : BIND(&if_same);
7097 : {
7098 : // The {lhs} and {rhs} reference the exact same value, yet we need special
7099 : // treatment for HeapNumber, as NaN is not equal to NaN.
7100 172 : GenerateEqual_Same(this, lhs, &if_equal, &if_notequal);
7101 : }
7102 :
7103 172 : BIND(&if_notsame);
7104 : {
7105 : // Check if {lhs} is a Smi or a HeapObject.
7106 172 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
7107 172 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
7108 :
7109 172 : BIND(&if_lhsissmi);
7110 : {
7111 : // Check if {rhs} is a Smi or a HeapObject.
7112 172 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
7113 172 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7114 :
7115 172 : BIND(&if_rhsissmi);
7116 : // We have already checked for {lhs} and {rhs} being the same value, so
7117 : // if both are Smis when we get here they must not be equal.
7118 172 : Goto(&if_notequal);
7119 :
7120 172 : BIND(&if_rhsisnotsmi);
7121 : {
7122 : // Load the map of {rhs}.
7123 : Node* rhs_map = LoadMap(rhs);
7124 :
7125 : // Check if {rhs} is a HeapNumber.
7126 172 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
7127 172 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
7128 :
7129 172 : BIND(&if_rhsisnumber);
7130 : {
7131 : // Convert {lhs} and {rhs} to floating point values, and
7132 : // perform a floating point comparison.
7133 172 : var_fcmp_lhs.Bind(SmiToFloat64(lhs));
7134 172 : var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
7135 172 : Goto(&do_fcmp);
7136 : }
7137 :
7138 172 : BIND(&if_rhsisnotnumber);
7139 : {
7140 : // Load the instance type of the {rhs}.
7141 : Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
7142 :
7143 : // Check if the {rhs} is a String.
7144 : Label if_rhsisstring(this, Label::kDeferred),
7145 172 : if_rhsisnotstring(this);
7146 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
7147 172 : &if_rhsisnotstring);
7148 :
7149 172 : BIND(&if_rhsisstring);
7150 : {
7151 : // The {rhs} is a String and the {lhs} is a Smi; we need
7152 : // to convert the {rhs} to a Number and compare the output to
7153 : // the Number on the {lhs}.
7154 172 : Goto(&do_rhsstringtonumber);
7155 : }
7156 :
7157 172 : BIND(&if_rhsisnotstring);
7158 : {
7159 : // Check if the {rhs} is a Boolean.
7160 172 : Label if_rhsisboolean(this), if_rhsisnotboolean(this);
7161 : Branch(IsBooleanMap(rhs_map), &if_rhsisboolean,
7162 172 : &if_rhsisnotboolean);
7163 :
7164 172 : BIND(&if_rhsisboolean);
7165 : {
7166 : // The {rhs} is a Boolean, load its number value.
7167 172 : var_rhs.Bind(LoadObjectField(rhs, Oddball::kToNumberOffset));
7168 172 : Goto(&loop);
7169 : }
7170 :
7171 172 : BIND(&if_rhsisnotboolean);
7172 : {
7173 : // Check if the {rhs} is a Receiver.
7174 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
7175 : Label if_rhsisreceiver(this, Label::kDeferred),
7176 172 : if_rhsisnotreceiver(this);
7177 : Branch(IsJSReceiverInstanceType(rhs_instance_type),
7178 172 : &if_rhsisreceiver, &if_rhsisnotreceiver);
7179 :
7180 172 : BIND(&if_rhsisreceiver);
7181 : {
7182 : // Convert {rhs} to a primitive first (passing no hint).
7183 : Callable callable =
7184 172 : CodeFactory::NonPrimitiveToPrimitive(isolate());
7185 172 : var_rhs.Bind(CallStub(callable, context, rhs));
7186 172 : Goto(&loop);
7187 : }
7188 :
7189 172 : BIND(&if_rhsisnotreceiver);
7190 344 : Goto(&if_notequal);
7191 172 : }
7192 172 : }
7193 172 : }
7194 172 : }
7195 : }
7196 :
7197 172 : BIND(&if_lhsisnotsmi);
7198 : {
7199 : // Check if {rhs} is a Smi or a HeapObject.
7200 172 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
7201 172 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7202 :
7203 172 : BIND(&if_rhsissmi);
7204 : {
7205 : // The {lhs} is a HeapObject and the {rhs} is a Smi; swapping {lhs}
7206 : // and {rhs} is not observable and doesn't matter for the result, so
7207 : // we can just swap them and use the Smi handling above (for {lhs}
7208 : // being a Smi).
7209 172 : var_lhs.Bind(rhs);
7210 172 : var_rhs.Bind(lhs);
7211 172 : Goto(&loop);
7212 : }
7213 :
7214 172 : BIND(&if_rhsisnotsmi);
7215 : {
7216 172 : Label if_lhsisstring(this), if_lhsisnumber(this),
7217 172 : if_lhsissymbol(this), if_lhsisoddball(this),
7218 172 : if_lhsisreceiver(this);
7219 :
7220 : // Both {lhs} and {rhs} are HeapObjects, load their maps
7221 : // and their instance types.
7222 : Node* lhs_map = LoadMap(lhs);
7223 : Node* rhs_map = LoadMap(rhs);
7224 :
7225 : // Load the instance types of {lhs} and {rhs}.
7226 : Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
7227 : Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
7228 :
7229 : // Dispatch based on the instance type of {lhs}.
7230 : size_t const kNumCases = FIRST_NONSTRING_TYPE + 3;
7231 : Label* case_labels[kNumCases];
7232 : int32_t case_values[kNumCases];
7233 22188 : for (int32_t i = 0; i < FIRST_NONSTRING_TYPE; ++i) {
7234 44032 : case_labels[i] = new Label(this);
7235 22016 : case_values[i] = i;
7236 : }
7237 172 : case_labels[FIRST_NONSTRING_TYPE + 0] = &if_lhsisnumber;
7238 172 : case_values[FIRST_NONSTRING_TYPE + 0] = HEAP_NUMBER_TYPE;
7239 172 : case_labels[FIRST_NONSTRING_TYPE + 1] = &if_lhsissymbol;
7240 172 : case_values[FIRST_NONSTRING_TYPE + 1] = SYMBOL_TYPE;
7241 172 : case_labels[FIRST_NONSTRING_TYPE + 2] = &if_lhsisoddball;
7242 172 : case_values[FIRST_NONSTRING_TYPE + 2] = ODDBALL_TYPE;
7243 : Switch(lhs_instance_type, &if_lhsisreceiver, case_values, case_labels,
7244 172 : arraysize(case_values));
7245 22188 : for (int32_t i = 0; i < FIRST_NONSTRING_TYPE; ++i) {
7246 22016 : Bind(case_labels[i]);
7247 22016 : Goto(&if_lhsisstring);
7248 22016 : delete case_labels[i];
7249 : }
7250 :
7251 172 : BIND(&if_lhsisstring);
7252 : {
7253 : // Check if {rhs} is also a String.
7254 : Label if_rhsisstring(this, Label::kDeferred),
7255 172 : if_rhsisnotstring(this);
7256 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
7257 172 : &if_rhsisnotstring);
7258 :
7259 172 : BIND(&if_rhsisstring);
7260 : {
7261 : // Both {lhs} and {rhs} are of type String, just do the
7262 : // string comparison then.
7263 172 : Callable callable = CodeFactory::StringEqual(isolate());
7264 172 : result.Bind(CallStub(callable, context, lhs, rhs));
7265 172 : Goto(&end);
7266 : }
7267 :
7268 172 : BIND(&if_rhsisnotstring);
7269 : {
7270 : // The {lhs} is a String and the {rhs} is some other HeapObject.
7271 : // Swapping {lhs} and {rhs} is not observable and doesn't matter
7272 : // for the result, so we can just swap them and use the String
7273 : // handling below (for {rhs} being a String).
7274 172 : var_lhs.Bind(rhs);
7275 172 : var_rhs.Bind(lhs);
7276 172 : Goto(&loop);
7277 172 : }
7278 : }
7279 :
7280 172 : BIND(&if_lhsisnumber);
7281 : {
7282 : // Check if {rhs} is also a HeapNumber.
7283 172 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
7284 : Branch(Word32Equal(lhs_instance_type, rhs_instance_type),
7285 172 : &if_rhsisnumber, &if_rhsisnotnumber);
7286 :
7287 172 : BIND(&if_rhsisnumber);
7288 : {
7289 : // Convert {lhs} and {rhs} to floating point values, and
7290 : // perform a floating point comparison.
7291 172 : var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
7292 172 : var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
7293 172 : Goto(&do_fcmp);
7294 : }
7295 :
7296 172 : BIND(&if_rhsisnotnumber);
7297 : {
7298 : // The {lhs} is a Number, the {rhs} is some other HeapObject.
7299 : Label if_rhsisstring(this, Label::kDeferred),
7300 172 : if_rhsisnotstring(this);
7301 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
7302 172 : &if_rhsisnotstring);
7303 :
7304 172 : BIND(&if_rhsisstring);
7305 : {
7306 : // The {rhs} is a String and the {lhs} is a HeapNumber; we need
7307 : // to convert the {rhs} to a Number and compare the output to
7308 : // the Number on the {lhs}.
7309 172 : Goto(&do_rhsstringtonumber);
7310 : }
7311 :
7312 172 : BIND(&if_rhsisnotstring);
7313 : {
7314 : // Check if the {rhs} is a JSReceiver.
7315 172 : Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
7316 : STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
7317 : Branch(IsJSReceiverInstanceType(rhs_instance_type),
7318 172 : &if_rhsisreceiver, &if_rhsisnotreceiver);
7319 :
7320 172 : BIND(&if_rhsisreceiver);
7321 : {
7322 : // The {lhs} is a Primitive and the {rhs} is a JSReceiver.
7323 : // Swapping {lhs} and {rhs} is not observable and doesn't
7324 : // matter for the result, so we can just swap them and use
7325 : // the JSReceiver handling below (for {lhs} being a
7326 : // JSReceiver).
7327 172 : var_lhs.Bind(rhs);
7328 172 : var_rhs.Bind(lhs);
7329 172 : Goto(&loop);
7330 : }
7331 :
7332 172 : BIND(&if_rhsisnotreceiver);
7333 : {
7334 : // Check if {rhs} is a Boolean.
7335 172 : Label if_rhsisboolean(this), if_rhsisnotboolean(this);
7336 : Branch(IsBooleanMap(rhs_map), &if_rhsisboolean,
7337 172 : &if_rhsisnotboolean);
7338 :
7339 172 : BIND(&if_rhsisboolean);
7340 : {
7341 : // The {rhs} is a Boolean, convert it to a Smi first.
7342 : var_rhs.Bind(
7343 172 : LoadObjectField(rhs, Oddball::kToNumberOffset));
7344 172 : Goto(&loop);
7345 : }
7346 :
7347 172 : BIND(&if_rhsisnotboolean);
7348 344 : Goto(&if_notequal);
7349 172 : }
7350 172 : }
7351 172 : }
7352 : }
7353 :
7354 172 : BIND(&if_lhsisoddball);
7355 : {
7356 : // The {lhs} is an Oddball and {rhs} is some other HeapObject.
7357 172 : Label if_lhsisboolean(this), if_lhsisnotboolean(this);
7358 : Node* boolean_map = BooleanMapConstant();
7359 : Branch(WordEqual(lhs_map, boolean_map), &if_lhsisboolean,
7360 172 : &if_lhsisnotboolean);
7361 :
7362 172 : BIND(&if_lhsisboolean);
7363 : {
7364 : // The {lhs} is a Boolean, check if {rhs} is also a Boolean.
7365 172 : Label if_rhsisboolean(this), if_rhsisnotboolean(this);
7366 : Branch(WordEqual(rhs_map, boolean_map), &if_rhsisboolean,
7367 172 : &if_rhsisnotboolean);
7368 :
7369 172 : BIND(&if_rhsisboolean);
7370 : {
7371 : // Both {lhs} and {rhs} are distinct Boolean values.
7372 172 : Goto(&if_notequal);
7373 : }
7374 :
7375 172 : BIND(&if_rhsisnotboolean);
7376 : {
7377 : // Convert the {lhs} to a Number first.
7378 172 : var_lhs.Bind(LoadObjectField(lhs, Oddball::kToNumberOffset));
7379 172 : Goto(&loop);
7380 172 : }
7381 : }
7382 :
7383 172 : BIND(&if_lhsisnotboolean);
7384 : {
7385 : // The {lhs} is either Null or Undefined; check if the {rhs} is
7386 : // undetectable (i.e. either also Null or Undefined or some
7387 : // undetectable JSReceiver).
7388 : Node* rhs_bitfield = LoadMapBitField(rhs_map);
7389 : Branch(Word32Equal(
7390 : Word32And(rhs_bitfield,
7391 : Int32Constant(1 << Map::kIsUndetectable)),
7392 : Int32Constant(0)),
7393 172 : &if_notequal, &if_equal);
7394 172 : }
7395 : }
7396 :
7397 172 : BIND(&if_lhsissymbol);
7398 : {
7399 : // Check if the {rhs} is a JSReceiver.
7400 172 : Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
7401 : STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
7402 : Branch(IsJSReceiverInstanceType(rhs_instance_type),
7403 172 : &if_rhsisreceiver, &if_rhsisnotreceiver);
7404 :
7405 172 : BIND(&if_rhsisreceiver);
7406 : {
7407 : // The {lhs} is a Primitive and the {rhs} is a JSReceiver.
7408 : // Swapping {lhs} and {rhs} is not observable and doesn't
7409 : // matter for the result, so we can just swap them and use
7410 : // the JSReceiver handling below (for {lhs} being a JSReceiver).
7411 172 : var_lhs.Bind(rhs);
7412 172 : var_rhs.Bind(lhs);
7413 172 : Goto(&loop);
7414 : }
7415 :
7416 172 : BIND(&if_rhsisnotreceiver);
7417 : {
7418 : // The {rhs} is not a JSReceiver and also not the same Symbol
7419 : // as the {lhs}, so this is equality check is considered false.
7420 172 : Goto(&if_notequal);
7421 172 : }
7422 : }
7423 :
7424 172 : BIND(&if_lhsisreceiver);
7425 : {
7426 : // Check if the {rhs} is also a JSReceiver.
7427 172 : Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
7428 : STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
7429 : Branch(IsJSReceiverInstanceType(rhs_instance_type),
7430 172 : &if_rhsisreceiver, &if_rhsisnotreceiver);
7431 :
7432 172 : BIND(&if_rhsisreceiver);
7433 : {
7434 : // Both {lhs} and {rhs} are different JSReceiver references, so
7435 : // this cannot be considered equal.
7436 172 : Goto(&if_notequal);
7437 : }
7438 :
7439 172 : BIND(&if_rhsisnotreceiver);
7440 : {
7441 : // Check if {rhs} is Null or Undefined (an undetectable check
7442 : // is sufficient here, since we already know that {rhs} is not
7443 : // a JSReceiver).
7444 : Label if_rhsisundetectable(this),
7445 172 : if_rhsisnotundetectable(this, Label::kDeferred);
7446 : Node* rhs_bitfield = LoadMapBitField(rhs_map);
7447 : Branch(Word32Equal(
7448 : Word32And(rhs_bitfield,
7449 : Int32Constant(1 << Map::kIsUndetectable)),
7450 : Int32Constant(0)),
7451 172 : &if_rhsisnotundetectable, &if_rhsisundetectable);
7452 :
7453 172 : BIND(&if_rhsisundetectable);
7454 : {
7455 : // Check if {lhs} is an undetectable JSReceiver.
7456 : Node* lhs_bitfield = LoadMapBitField(lhs_map);
7457 : Branch(Word32Equal(
7458 : Word32And(lhs_bitfield,
7459 : Int32Constant(1 << Map::kIsUndetectable)),
7460 : Int32Constant(0)),
7461 172 : &if_notequal, &if_equal);
7462 : }
7463 :
7464 172 : BIND(&if_rhsisnotundetectable);
7465 : {
7466 : // The {rhs} is some Primitive different from Null and
7467 : // Undefined, need to convert {lhs} to Primitive first.
7468 : Callable callable =
7469 172 : CodeFactory::NonPrimitiveToPrimitive(isolate());
7470 172 : var_lhs.Bind(CallStub(callable, context, lhs));
7471 172 : Goto(&loop);
7472 172 : }
7473 172 : }
7474 172 : }
7475 172 : }
7476 172 : }
7477 : }
7478 :
7479 172 : BIND(&do_rhsstringtonumber);
7480 : {
7481 172 : Callable callable = CodeFactory::StringToNumber(isolate());
7482 172 : var_rhs.Bind(CallStub(callable, context, rhs));
7483 172 : Goto(&loop);
7484 172 : }
7485 : }
7486 :
7487 172 : BIND(&do_fcmp);
7488 : {
7489 : // Load the {lhs} and {rhs} floating point values.
7490 172 : Node* lhs = var_fcmp_lhs.value();
7491 172 : Node* rhs = var_fcmp_rhs.value();
7492 :
7493 : // Perform a fast floating point comparison.
7494 172 : Branch(Float64Equal(lhs, rhs), &if_equal, &if_notequal);
7495 : }
7496 :
7497 172 : BIND(&if_equal);
7498 : {
7499 172 : result.Bind(TrueConstant());
7500 172 : Goto(&end);
7501 : }
7502 :
7503 172 : BIND(&if_notequal);
7504 : {
7505 172 : result.Bind(FalseConstant());
7506 172 : Goto(&end);
7507 : }
7508 :
7509 172 : BIND(&end);
7510 344 : return result.value();
7511 : }
7512 :
7513 731 : Node* CodeStubAssembler::StrictEqual(Node* lhs, Node* rhs) {
7514 : // Here's pseudo-code for the algorithm below in case of kDontNegateResult
7515 : // mode; for kNegateResult mode we properly negate the result.
7516 : //
7517 : // if (lhs == rhs) {
7518 : // if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
7519 : // return true;
7520 : // }
7521 : // if (!lhs->IsSmi()) {
7522 : // if (lhs->IsHeapNumber()) {
7523 : // if (rhs->IsSmi()) {
7524 : // return Smi::cast(rhs)->value() == HeapNumber::cast(lhs)->value();
7525 : // } else if (rhs->IsHeapNumber()) {
7526 : // return HeapNumber::cast(rhs)->value() ==
7527 : // HeapNumber::cast(lhs)->value();
7528 : // } else {
7529 : // return false;
7530 : // }
7531 : // } else {
7532 : // if (rhs->IsSmi()) {
7533 : // return false;
7534 : // } else {
7535 : // if (lhs->IsString()) {
7536 : // if (rhs->IsString()) {
7537 : // return %StringEqual(lhs, rhs);
7538 : // } else {
7539 : // return false;
7540 : // }
7541 : // } else {
7542 : // return false;
7543 : // }
7544 : // }
7545 : // }
7546 : // } else {
7547 : // if (rhs->IsSmi()) {
7548 : // return false;
7549 : // } else {
7550 : // if (rhs->IsHeapNumber()) {
7551 : // return Smi::cast(lhs)->value() == HeapNumber::cast(rhs)->value();
7552 : // } else {
7553 : // return false;
7554 : // }
7555 : // }
7556 : // }
7557 :
7558 1462 : Label if_equal(this), if_notequal(this), end(this);
7559 1462 : VARIABLE(result, MachineRepresentation::kTagged);
7560 :
7561 : // Check if {lhs} and {rhs} refer to the same object.
7562 731 : Label if_same(this), if_notsame(this);
7563 731 : Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
7564 :
7565 731 : BIND(&if_same);
7566 : {
7567 : // The {lhs} and {rhs} reference the exact same value, yet we need special
7568 : // treatment for HeapNumber, as NaN is not equal to NaN.
7569 731 : GenerateEqual_Same(this, lhs, &if_equal, &if_notequal);
7570 : }
7571 :
7572 731 : BIND(&if_notsame);
7573 : {
7574 : // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber
7575 : // and String they can still be considered equal.
7576 :
7577 : // Check if {lhs} is a Smi or a HeapObject.
7578 731 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
7579 731 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
7580 :
7581 731 : BIND(&if_lhsisnotsmi);
7582 : {
7583 : // Load the map of {lhs}.
7584 : Node* lhs_map = LoadMap(lhs);
7585 :
7586 : // Check if {lhs} is a HeapNumber.
7587 731 : Label if_lhsisnumber(this), if_lhsisnotnumber(this);
7588 731 : Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
7589 :
7590 731 : BIND(&if_lhsisnumber);
7591 : {
7592 : // Check if {rhs} is a Smi or a HeapObject.
7593 731 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
7594 731 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7595 :
7596 731 : BIND(&if_rhsissmi);
7597 : {
7598 : // Convert {lhs} and {rhs} to floating point values.
7599 : Node* lhs_value = LoadHeapNumberValue(lhs);
7600 731 : Node* rhs_value = SmiToFloat64(rhs);
7601 :
7602 : // Perform a floating point comparison of {lhs} and {rhs}.
7603 731 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
7604 : }
7605 :
7606 731 : BIND(&if_rhsisnotsmi);
7607 : {
7608 : // Load the map of {rhs}.
7609 : Node* rhs_map = LoadMap(rhs);
7610 :
7611 : // Check if {rhs} is also a HeapNumber.
7612 731 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
7613 731 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
7614 :
7615 731 : BIND(&if_rhsisnumber);
7616 : {
7617 : // Convert {lhs} and {rhs} to floating point values.
7618 : Node* lhs_value = LoadHeapNumberValue(lhs);
7619 : Node* rhs_value = LoadHeapNumberValue(rhs);
7620 :
7621 : // Perform a floating point comparison of {lhs} and {rhs}.
7622 731 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
7623 : }
7624 :
7625 731 : BIND(&if_rhsisnotnumber);
7626 1462 : Goto(&if_notequal);
7627 731 : }
7628 : }
7629 :
7630 731 : BIND(&if_lhsisnotnumber);
7631 : {
7632 : // Check if {rhs} is a Smi or a HeapObject.
7633 731 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
7634 731 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7635 :
7636 731 : BIND(&if_rhsissmi);
7637 731 : Goto(&if_notequal);
7638 :
7639 731 : BIND(&if_rhsisnotsmi);
7640 : {
7641 : // Load the instance type of {lhs}.
7642 : Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
7643 :
7644 : // Check if {lhs} is a String.
7645 731 : Label if_lhsisstring(this), if_lhsisnotstring(this);
7646 : Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
7647 731 : &if_lhsisnotstring);
7648 :
7649 731 : BIND(&if_lhsisstring);
7650 : {
7651 : // Load the instance type of {rhs}.
7652 731 : Node* rhs_instance_type = LoadInstanceType(rhs);
7653 :
7654 : // Check if {rhs} is also a String.
7655 : Label if_rhsisstring(this, Label::kDeferred),
7656 731 : if_rhsisnotstring(this);
7657 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
7658 731 : &if_rhsisnotstring);
7659 :
7660 731 : BIND(&if_rhsisstring);
7661 : {
7662 731 : Callable callable = CodeFactory::StringEqual(isolate());
7663 731 : result.Bind(CallStub(callable, NoContextConstant(), lhs, rhs));
7664 731 : Goto(&end);
7665 : }
7666 :
7667 731 : BIND(&if_rhsisnotstring);
7668 1462 : Goto(&if_notequal);
7669 : }
7670 :
7671 731 : BIND(&if_lhsisnotstring);
7672 1462 : Goto(&if_notequal);
7673 731 : }
7674 731 : }
7675 : }
7676 :
7677 731 : BIND(&if_lhsissmi);
7678 : {
7679 : // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
7680 : // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
7681 : // HeapNumber with an equal floating point value.
7682 :
7683 : // Check if {rhs} is a Smi or a HeapObject.
7684 731 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
7685 731 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7686 :
7687 731 : BIND(&if_rhsissmi);
7688 731 : Goto(&if_notequal);
7689 :
7690 731 : BIND(&if_rhsisnotsmi);
7691 : {
7692 : // Load the map of the {rhs}.
7693 : Node* rhs_map = LoadMap(rhs);
7694 :
7695 : // The {rhs} could be a HeapNumber with the same value as {lhs}.
7696 731 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
7697 731 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
7698 :
7699 731 : BIND(&if_rhsisnumber);
7700 : {
7701 : // Convert {lhs} and {rhs} to floating point values.
7702 731 : Node* lhs_value = SmiToFloat64(lhs);
7703 : Node* rhs_value = LoadHeapNumberValue(rhs);
7704 :
7705 : // Perform a floating point comparison of {lhs} and {rhs}.
7706 731 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
7707 : }
7708 :
7709 731 : BIND(&if_rhsisnotnumber);
7710 1462 : Goto(&if_notequal);
7711 731 : }
7712 731 : }
7713 : }
7714 :
7715 731 : BIND(&if_equal);
7716 : {
7717 731 : result.Bind(TrueConstant());
7718 731 : Goto(&end);
7719 : }
7720 :
7721 731 : BIND(&if_notequal);
7722 : {
7723 731 : result.Bind(FalseConstant());
7724 731 : Goto(&end);
7725 : }
7726 :
7727 731 : BIND(&end);
7728 1462 : return result.value();
7729 : }
7730 :
7731 : // ECMA#sec-samevalue
7732 : // This algorithm differs from the Strict Equality Comparison Algorithm in its
7733 : // treatment of signed zeroes and NaNs.
7734 430 : Node* CodeStubAssembler::SameValue(Node* lhs, Node* rhs) {
7735 430 : VARIABLE(var_result, MachineRepresentation::kWord32);
7736 430 : Label strict_equal(this), out(this);
7737 :
7738 430 : Node* const int_false = Int32Constant(0);
7739 430 : Node* const int_true = Int32Constant(1);
7740 :
7741 430 : Label if_equal(this), if_notequal(this);
7742 430 : Branch(WordEqual(lhs, rhs), &if_equal, &if_notequal);
7743 :
7744 430 : BIND(&if_equal);
7745 : {
7746 : // This covers the case when {lhs} == {rhs}. We can simply return true
7747 : // because SameValue considers two NaNs to be equal.
7748 :
7749 430 : var_result.Bind(int_true);
7750 430 : Goto(&out);
7751 : }
7752 :
7753 430 : BIND(&if_notequal);
7754 : {
7755 : // This covers the case when {lhs} != {rhs}. We only handle numbers here
7756 : // and defer to StrictEqual for the rest.
7757 :
7758 430 : Node* const lhs_float = TryTaggedToFloat64(lhs, &strict_equal);
7759 430 : Node* const rhs_float = TryTaggedToFloat64(rhs, &strict_equal);
7760 :
7761 430 : Label if_lhsisnan(this), if_lhsnotnan(this);
7762 430 : BranchIfFloat64IsNaN(lhs_float, &if_lhsisnan, &if_lhsnotnan);
7763 :
7764 430 : BIND(&if_lhsisnan);
7765 : {
7766 : // Return true iff {rhs} is NaN.
7767 :
7768 : Node* const result =
7769 : SelectConstant(Float64Equal(rhs_float, rhs_float), int_false,
7770 430 : int_true, MachineRepresentation::kWord32);
7771 430 : var_result.Bind(result);
7772 430 : Goto(&out);
7773 : }
7774 :
7775 430 : BIND(&if_lhsnotnan);
7776 : {
7777 430 : Label if_floatisequal(this), if_floatnotequal(this);
7778 : Branch(Float64Equal(lhs_float, rhs_float), &if_floatisequal,
7779 430 : &if_floatnotequal);
7780 :
7781 430 : BIND(&if_floatisequal);
7782 : {
7783 : // We still need to handle the case when {lhs} and {rhs} are -0.0 and
7784 : // 0.0 (or vice versa). Compare the high word to
7785 : // distinguish between the two.
7786 :
7787 430 : Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_float);
7788 430 : Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_float);
7789 :
7790 : // If x is +0 and y is -0, return false.
7791 : // If x is -0 and y is +0, return false.
7792 :
7793 430 : Node* const result = Word32Equal(lhs_hi_word, rhs_hi_word);
7794 430 : var_result.Bind(result);
7795 430 : Goto(&out);
7796 : }
7797 :
7798 430 : BIND(&if_floatnotequal);
7799 : {
7800 430 : var_result.Bind(int_false);
7801 430 : Goto(&out);
7802 430 : }
7803 430 : }
7804 : }
7805 :
7806 430 : BIND(&strict_equal);
7807 : {
7808 430 : Node* const is_equal = StrictEqual(lhs, rhs);
7809 430 : Node* const result = WordEqual(is_equal, TrueConstant());
7810 430 : var_result.Bind(result);
7811 430 : Goto(&out);
7812 : }
7813 :
7814 430 : BIND(&out);
7815 860 : return var_result.value();
7816 : }
7817 :
7818 559 : Node* CodeStubAssembler::HasProperty(
7819 : Node* object, Node* key, Node* context,
7820 : Runtime::FunctionId fallback_runtime_function_id) {
7821 1118 : Label call_runtime(this, Label::kDeferred), return_true(this),
7822 559 : return_false(this), end(this);
7823 :
7824 : CodeStubAssembler::LookupInHolder lookup_property_in_holder =
7825 : [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
7826 : Node* holder_instance_type, Node* unique_name,
7827 : Label* next_holder, Label* if_bailout) {
7828 : TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
7829 559 : &return_true, next_holder, if_bailout);
7830 : };
7831 :
7832 : CodeStubAssembler::LookupInHolder lookup_element_in_holder =
7833 : [this, &return_true, &return_false](
7834 : Node* receiver, Node* holder, Node* holder_map,
7835 : Node* holder_instance_type, Node* index, Label* next_holder,
7836 : Label* if_bailout) {
7837 : TryLookupElement(holder, holder_map, holder_instance_type, index,
7838 559 : &return_true, &return_false, next_holder, if_bailout);
7839 : };
7840 :
7841 : TryPrototypeChainLookup(object, key, lookup_property_in_holder,
7842 : lookup_element_in_holder, &return_false,
7843 559 : &call_runtime);
7844 :
7845 1118 : VARIABLE(result, MachineRepresentation::kTagged);
7846 559 : BIND(&return_true);
7847 : {
7848 559 : result.Bind(BooleanConstant(true));
7849 559 : Goto(&end);
7850 : }
7851 :
7852 559 : BIND(&return_false);
7853 : {
7854 559 : result.Bind(BooleanConstant(false));
7855 559 : Goto(&end);
7856 : }
7857 :
7858 559 : BIND(&call_runtime);
7859 : {
7860 : result.Bind(
7861 559 : CallRuntime(fallback_runtime_function_id, context, object, key));
7862 559 : Goto(&end);
7863 : }
7864 :
7865 559 : BIND(&end);
7866 1118 : return result.value();
7867 : }
7868 :
7869 172 : Node* CodeStubAssembler::ClassOf(Node* value) {
7870 172 : VARIABLE(var_result, MachineRepresentation::kTaggedPointer);
7871 172 : Label if_function_template_info(this, Label::kDeferred),
7872 172 : if_no_class_name(this, Label::kDeferred),
7873 172 : if_function(this, Label::kDeferred), if_object(this, Label::kDeferred),
7874 172 : if_primitive(this, Label::kDeferred), return_result(this);
7875 :
7876 : // Check if {value} is a Smi.
7877 172 : GotoIf(TaggedIsSmi(value), &if_primitive);
7878 :
7879 : Node* value_map = LoadMap(value);
7880 : Node* value_instance_type = LoadMapInstanceType(value_map);
7881 :
7882 : // Check if {value} is a JSFunction or JSBoundFunction.
7883 : STATIC_ASSERT(LAST_TYPE == LAST_FUNCTION_TYPE);
7884 : GotoIf(Uint32LessThanOrEqual(Int32Constant(FIRST_FUNCTION_TYPE),
7885 : value_instance_type),
7886 172 : &if_function);
7887 :
7888 : // Check if {value} is a primitive HeapObject.
7889 : STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
7890 : GotoIf(Uint32LessThan(value_instance_type,
7891 : Int32Constant(FIRST_JS_RECEIVER_TYPE)),
7892 172 : &if_primitive);
7893 :
7894 : // Load the {value}s constructor, and check that it's a JSFunction.
7895 172 : Node* constructor = LoadMapConstructor(value_map);
7896 : GotoIf(HasInstanceType(constructor, FUNCTION_TEMPLATE_INFO_TYPE),
7897 172 : &if_function_template_info);
7898 172 : GotoIfNot(IsJSFunction(constructor), &if_object);
7899 :
7900 : // Return the instance class name for the {constructor}.
7901 : Node* shared_info =
7902 172 : LoadObjectField(constructor, JSFunction::kSharedFunctionInfoOffset);
7903 : Node* instance_class_name = LoadObjectField(
7904 172 : shared_info, SharedFunctionInfo::kInstanceClassNameOffset);
7905 172 : var_result.Bind(instance_class_name);
7906 172 : Goto(&return_result);
7907 :
7908 : // For remote objects the constructor might be given as FTI.
7909 172 : BIND(&if_function_template_info);
7910 : Node* class_name =
7911 172 : LoadObjectField(constructor, FunctionTemplateInfo::kClassNameOffset);
7912 172 : GotoIf(IsUndefined(class_name), &if_no_class_name);
7913 172 : var_result.Bind(class_name);
7914 172 : Goto(&return_result);
7915 :
7916 172 : BIND(&if_no_class_name);
7917 172 : var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
7918 172 : Goto(&return_result);
7919 :
7920 172 : BIND(&if_function);
7921 172 : var_result.Bind(LoadRoot(Heap::kFunction_stringRootIndex));
7922 172 : Goto(&return_result);
7923 :
7924 172 : BIND(&if_object);
7925 172 : var_result.Bind(LoadRoot(Heap::kObject_stringRootIndex));
7926 172 : Goto(&return_result);
7927 :
7928 172 : BIND(&if_primitive);
7929 172 : var_result.Bind(NullConstant());
7930 172 : Goto(&return_result);
7931 :
7932 172 : BIND(&return_result);
7933 344 : return var_result.value();
7934 : }
7935 :
7936 172 : Node* CodeStubAssembler::Typeof(Node* value) {
7937 172 : VARIABLE(result_var, MachineRepresentation::kTagged);
7938 :
7939 172 : Label return_number(this, Label::kDeferred), if_oddball(this),
7940 172 : return_function(this), return_undefined(this), return_object(this),
7941 172 : return_string(this), return_result(this);
7942 :
7943 172 : GotoIf(TaggedIsSmi(value), &return_number);
7944 :
7945 : Node* map = LoadMap(value);
7946 :
7947 172 : GotoIf(IsHeapNumberMap(map), &return_number);
7948 :
7949 : Node* instance_type = LoadMapInstanceType(map);
7950 :
7951 172 : GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
7952 :
7953 : Node* callable_or_undetectable_mask = Word32And(
7954 : LoadMapBitField(map),
7955 344 : Int32Constant(1 << Map::kIsCallable | 1 << Map::kIsUndetectable));
7956 :
7957 : GotoIf(Word32Equal(callable_or_undetectable_mask,
7958 : Int32Constant(1 << Map::kIsCallable)),
7959 172 : &return_function);
7960 :
7961 : GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
7962 172 : &return_undefined);
7963 :
7964 172 : GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
7965 :
7966 172 : GotoIf(IsStringInstanceType(instance_type), &return_string);
7967 :
7968 : CSA_ASSERT(this, Word32Equal(instance_type, Int32Constant(SYMBOL_TYPE)));
7969 344 : result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
7970 172 : Goto(&return_result);
7971 :
7972 172 : BIND(&return_number);
7973 : {
7974 344 : result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
7975 172 : Goto(&return_result);
7976 : }
7977 :
7978 172 : BIND(&if_oddball);
7979 : {
7980 172 : Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
7981 172 : result_var.Bind(type);
7982 172 : Goto(&return_result);
7983 : }
7984 :
7985 172 : BIND(&return_function);
7986 : {
7987 344 : result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
7988 172 : Goto(&return_result);
7989 : }
7990 :
7991 172 : BIND(&return_undefined);
7992 : {
7993 344 : result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
7994 172 : Goto(&return_result);
7995 : }
7996 :
7997 172 : BIND(&return_object);
7998 : {
7999 344 : result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
8000 172 : Goto(&return_result);
8001 : }
8002 :
8003 172 : BIND(&return_string);
8004 : {
8005 344 : result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
8006 172 : Goto(&return_result);
8007 : }
8008 :
8009 172 : BIND(&return_result);
8010 344 : return result_var.value();
8011 : }
8012 :
8013 172 : Node* CodeStubAssembler::GetSuperConstructor(Node* active_function,
8014 : Node* context) {
8015 : CSA_ASSERT(this, IsJSFunction(active_function));
8016 :
8017 344 : Label is_not_constructor(this, Label::kDeferred), out(this);
8018 344 : VARIABLE(result, MachineRepresentation::kTagged);
8019 :
8020 : Node* map = LoadMap(active_function);
8021 : Node* prototype = LoadMapPrototype(map);
8022 : Node* prototype_map = LoadMap(prototype);
8023 172 : GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
8024 :
8025 172 : result.Bind(prototype);
8026 172 : Goto(&out);
8027 :
8028 172 : BIND(&is_not_constructor);
8029 : {
8030 : CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
8031 172 : active_function);
8032 172 : Unreachable();
8033 : }
8034 :
8035 172 : BIND(&out);
8036 344 : return result.value();
8037 : }
8038 :
8039 172 : Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
8040 : Node* context) {
8041 172 : VARIABLE(var_result, MachineRepresentation::kTagged);
8042 172 : Label if_notcallable(this, Label::kDeferred),
8043 172 : if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
8044 172 : if_nohandler(this, Label::kDeferred), return_true(this),
8045 172 : return_false(this), return_result(this, &var_result);
8046 :
8047 : // Ensure that the {callable} is actually a JSReceiver.
8048 172 : GotoIf(TaggedIsSmi(callable), &if_notreceiver);
8049 172 : GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
8050 :
8051 : // Load the @@hasInstance property from {callable}.
8052 : Node* inst_of_handler =
8053 172 : GetProperty(context, callable, HasInstanceSymbolConstant());
8054 :
8055 : // Optimize for the likely case where {inst_of_handler} is the builtin
8056 : // Function.prototype[@@hasInstance] method, and emit a direct call in
8057 : // that case without any additional checking.
8058 : Node* native_context = LoadNativeContext(context);
8059 : Node* function_has_instance =
8060 172 : LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
8061 : GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
8062 172 : &if_otherhandler);
8063 : {
8064 : // Call to Function.prototype[@@hasInstance] directly.
8065 : Callable builtin(isolate()->builtins()->FunctionPrototypeHasInstance(),
8066 516 : CallTrampolineDescriptor(isolate()));
8067 172 : Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
8068 172 : var_result.Bind(result);
8069 172 : Goto(&return_result);
8070 : }
8071 :
8072 172 : BIND(&if_otherhandler);
8073 : {
8074 : // Check if there's actually an {inst_of_handler}.
8075 172 : GotoIf(IsNull(inst_of_handler), &if_nohandler);
8076 172 : GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
8077 :
8078 : // Call the {inst_of_handler} for {callable} and {object}.
8079 : Node* result = CallJS(
8080 : CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
8081 344 : context, inst_of_handler, callable, object);
8082 :
8083 : // Convert the {result} to a Boolean.
8084 172 : BranchIfToBooleanIsTrue(result, &return_true, &return_false);
8085 : }
8086 :
8087 172 : BIND(&if_nohandler);
8088 : {
8089 : // Ensure that the {callable} is actually Callable.
8090 172 : GotoIfNot(IsCallable(callable), &if_notcallable);
8091 :
8092 : // Use the OrdinaryHasInstance algorithm.
8093 : Node* result = CallStub(CodeFactory::OrdinaryHasInstance(isolate()),
8094 344 : context, callable, object);
8095 172 : var_result.Bind(result);
8096 172 : Goto(&return_result);
8097 : }
8098 :
8099 172 : BIND(&if_notcallable);
8100 : {
8101 172 : CallRuntime(Runtime::kThrowNonCallableInInstanceOfCheck, context);
8102 172 : Unreachable();
8103 : }
8104 :
8105 172 : BIND(&if_notreceiver);
8106 : {
8107 172 : CallRuntime(Runtime::kThrowNonObjectInInstanceOfCheck, context);
8108 172 : Unreachable();
8109 : }
8110 :
8111 172 : BIND(&return_true);
8112 172 : var_result.Bind(TrueConstant());
8113 172 : Goto(&return_result);
8114 :
8115 172 : BIND(&return_false);
8116 172 : var_result.Bind(FalseConstant());
8117 172 : Goto(&return_result);
8118 :
8119 172 : BIND(&return_result);
8120 344 : return var_result.value();
8121 : }
8122 :
8123 688 : Node* CodeStubAssembler::NumberInc(Node* value) {
8124 688 : VARIABLE(var_result, MachineRepresentation::kTagged);
8125 1376 : VARIABLE(var_finc_value, MachineRepresentation::kFloat64);
8126 688 : Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
8127 688 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
8128 :
8129 688 : BIND(&if_issmi);
8130 : {
8131 : // Try fast Smi addition first.
8132 688 : Node* one = SmiConstant(Smi::FromInt(1));
8133 : Node* pair = IntPtrAddWithOverflow(BitcastTaggedToWord(value),
8134 688 : BitcastTaggedToWord(one));
8135 688 : Node* overflow = Projection(1, pair);
8136 :
8137 : // Check if the Smi addition overflowed.
8138 688 : Label if_overflow(this), if_notoverflow(this);
8139 688 : Branch(overflow, &if_overflow, &if_notoverflow);
8140 :
8141 688 : BIND(&if_notoverflow);
8142 688 : var_result.Bind(BitcastWordToTaggedSigned(Projection(0, pair)));
8143 688 : Goto(&end);
8144 :
8145 688 : BIND(&if_overflow);
8146 : {
8147 688 : var_finc_value.Bind(SmiToFloat64(value));
8148 688 : Goto(&do_finc);
8149 688 : }
8150 : }
8151 :
8152 688 : BIND(&if_isnotsmi);
8153 : {
8154 : // Check if the value is a HeapNumber.
8155 : CSA_ASSERT(this, IsHeapNumberMap(LoadMap(value)));
8156 :
8157 : // Load the HeapNumber value.
8158 688 : var_finc_value.Bind(LoadHeapNumberValue(value));
8159 688 : Goto(&do_finc);
8160 : }
8161 :
8162 688 : BIND(&do_finc);
8163 : {
8164 688 : Node* finc_value = var_finc_value.value();
8165 688 : Node* one = Float64Constant(1.0);
8166 688 : Node* finc_result = Float64Add(finc_value, one);
8167 688 : var_result.Bind(AllocateHeapNumberWithValue(finc_result));
8168 688 : Goto(&end);
8169 : }
8170 :
8171 688 : BIND(&end);
8172 1376 : return var_result.value();
8173 : }
8174 :
8175 129 : Node* CodeStubAssembler::NumberDec(Node* value) {
8176 129 : VARIABLE(var_result, MachineRepresentation::kTagged);
8177 258 : VARIABLE(var_fdec_value, MachineRepresentation::kFloat64);
8178 129 : Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
8179 129 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
8180 :
8181 129 : BIND(&if_issmi);
8182 : {
8183 : // Try fast Smi addition first.
8184 129 : Node* one = SmiConstant(Smi::FromInt(1));
8185 : Node* pair = IntPtrSubWithOverflow(BitcastTaggedToWord(value),
8186 129 : BitcastTaggedToWord(one));
8187 129 : Node* overflow = Projection(1, pair);
8188 :
8189 : // Check if the Smi addition overflowed.
8190 129 : Label if_overflow(this), if_notoverflow(this);
8191 129 : Branch(overflow, &if_overflow, &if_notoverflow);
8192 :
8193 129 : BIND(&if_notoverflow);
8194 129 : var_result.Bind(BitcastWordToTaggedSigned(Projection(0, pair)));
8195 129 : Goto(&end);
8196 :
8197 129 : BIND(&if_overflow);
8198 : {
8199 129 : var_fdec_value.Bind(SmiToFloat64(value));
8200 129 : Goto(&do_fdec);
8201 129 : }
8202 : }
8203 :
8204 129 : BIND(&if_isnotsmi);
8205 : {
8206 : // Check if the value is a HeapNumber.
8207 : CSA_ASSERT(this, IsHeapNumberMap(LoadMap(value)));
8208 :
8209 : // Load the HeapNumber value.
8210 129 : var_fdec_value.Bind(LoadHeapNumberValue(value));
8211 129 : Goto(&do_fdec);
8212 : }
8213 :
8214 129 : BIND(&do_fdec);
8215 : {
8216 129 : Node* fdec_value = var_fdec_value.value();
8217 129 : Node* minus_one = Float64Constant(-1.0);
8218 129 : Node* fdec_result = Float64Add(fdec_value, minus_one);
8219 129 : var_result.Bind(AllocateHeapNumberWithValue(fdec_result));
8220 129 : Goto(&end);
8221 : }
8222 :
8223 129 : BIND(&end);
8224 258 : return var_result.value();
8225 : }
8226 :
8227 365 : void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
8228 365 : Label is_number(this);
8229 365 : GotoIf(TaggedIsSmi(input), &is_number);
8230 : Node* input_map = LoadMap(input);
8231 365 : Branch(IsHeapNumberMap(input_map), &is_number, is_not_number);
8232 365 : BIND(&is_number);
8233 365 : }
8234 :
8235 86 : void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
8236 86 : GotoIf(TaggedIsSmi(input), is_number);
8237 : Node* input_map = LoadMap(input);
8238 86 : GotoIf(IsHeapNumberMap(input_map), is_number);
8239 86 : }
8240 :
8241 258 : Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map,
8242 : Node* array_type, Node* context,
8243 : IterationKind mode) {
8244 : int kBaseMapIndex = 0;
8245 : switch (mode) {
8246 : case IterationKind::kKeys:
8247 : kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX;
8248 : break;
8249 : case IterationKind::kValues:
8250 : kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
8251 : break;
8252 : case IterationKind::kEntries:
8253 : kBaseMapIndex = Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX;
8254 : break;
8255 : }
8256 :
8257 : // Fast Array iterator map index:
8258 : // (kBaseIndex + kFastIteratorOffset) + ElementsKind (for JSArrays)
8259 : // kBaseIndex + (ElementsKind - UINT8_ELEMENTS) (for JSTypedArrays)
8260 : const int kFastIteratorOffset =
8261 : Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX -
8262 : Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
8263 : STATIC_ASSERT(kFastIteratorOffset ==
8264 : (Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX -
8265 : Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX));
8266 :
8267 : // Slow Array iterator map index: (kBaseIndex + kSlowIteratorOffset)
8268 : const int kSlowIteratorOffset =
8269 : Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX -
8270 : Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
8271 : STATIC_ASSERT(kSlowIteratorOffset ==
8272 : (Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX -
8273 : Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX));
8274 :
8275 : // Assert: Type(array) is Object
8276 : CSA_ASSERT(this, IsJSReceiverInstanceType(array_type));
8277 :
8278 258 : VARIABLE(var_result, MachineRepresentation::kTagged);
8279 516 : VARIABLE(var_map_index, MachineType::PointerRepresentation());
8280 516 : VARIABLE(var_array_map, MachineRepresentation::kTagged);
8281 :
8282 258 : Label return_result(this);
8283 258 : Label allocate_iterator(this);
8284 :
8285 258 : if (mode == IterationKind::kKeys) {
8286 : // There are only two key iterator maps, branch depending on whether or not
8287 : // the receiver is a TypedArray or not.
8288 :
8289 86 : Label if_istypedarray(this), if_isgeneric(this);
8290 :
8291 : Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
8292 86 : &if_istypedarray, &if_isgeneric);
8293 :
8294 86 : BIND(&if_isgeneric);
8295 : {
8296 86 : Label if_isfast(this), if_isslow(this);
8297 : BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
8298 86 : &if_isfast, &if_isslow);
8299 :
8300 86 : BIND(&if_isfast);
8301 : {
8302 : var_map_index.Bind(
8303 86 : IntPtrConstant(Context::FAST_ARRAY_KEY_ITERATOR_MAP_INDEX));
8304 86 : var_array_map.Bind(array_map);
8305 86 : Goto(&allocate_iterator);
8306 : }
8307 :
8308 86 : BIND(&if_isslow);
8309 : {
8310 : var_map_index.Bind(
8311 86 : IntPtrConstant(Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX));
8312 86 : var_array_map.Bind(UndefinedConstant());
8313 86 : Goto(&allocate_iterator);
8314 86 : }
8315 : }
8316 :
8317 86 : BIND(&if_istypedarray);
8318 : {
8319 : var_map_index.Bind(
8320 86 : IntPtrConstant(Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX));
8321 86 : var_array_map.Bind(UndefinedConstant());
8322 86 : Goto(&allocate_iterator);
8323 86 : }
8324 : } else {
8325 172 : Label if_istypedarray(this), if_isgeneric(this);
8326 : Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
8327 172 : &if_istypedarray, &if_isgeneric);
8328 :
8329 172 : BIND(&if_isgeneric);
8330 : {
8331 172 : Label if_isfast(this), if_isslow(this);
8332 : BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
8333 172 : &if_isfast, &if_isslow);
8334 :
8335 172 : BIND(&if_isfast);
8336 : {
8337 172 : Label if_ispacked(this), if_isholey(this);
8338 172 : Node* elements_kind = LoadMapElementsKind(array_map);
8339 : Branch(IsHoleyFastElementsKind(elements_kind), &if_isholey,
8340 172 : &if_ispacked);
8341 :
8342 172 : BIND(&if_isholey);
8343 : {
8344 : // Fast holey JSArrays can treat the hole as undefined if the
8345 : // protector cell is valid, and the prototype chain is unchanged from
8346 : // its initial state (because the protector cell is only tracked for
8347 : // initial the Array and Object prototypes). Check these conditions
8348 : // here, and take the slow path if any fail.
8349 172 : Node* protector_cell = LoadRoot(Heap::kArrayProtectorRootIndex);
8350 : DCHECK(isolate()->heap()->array_protector()->IsPropertyCell());
8351 : GotoIfNot(
8352 : WordEqual(
8353 : LoadObjectField(protector_cell, PropertyCell::kValueOffset),
8354 : SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
8355 172 : &if_isslow);
8356 :
8357 : Node* native_context = LoadNativeContext(context);
8358 :
8359 : Node* prototype = LoadMapPrototype(array_map);
8360 : Node* array_prototype = LoadContextElement(
8361 172 : native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
8362 172 : GotoIfNot(WordEqual(prototype, array_prototype), &if_isslow);
8363 :
8364 : Node* map = LoadMap(prototype);
8365 : prototype = LoadMapPrototype(map);
8366 : Node* object_prototype = LoadContextElement(
8367 172 : native_context, Context::INITIAL_OBJECT_PROTOTYPE_INDEX);
8368 172 : GotoIfNot(WordEqual(prototype, object_prototype), &if_isslow);
8369 :
8370 : map = LoadMap(prototype);
8371 : prototype = LoadMapPrototype(map);
8372 172 : Branch(IsNull(prototype), &if_ispacked, &if_isslow);
8373 : }
8374 172 : BIND(&if_ispacked);
8375 : {
8376 : Node* map_index =
8377 172 : IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset),
8378 344 : ChangeUint32ToWord(LoadMapElementsKind(array_map)));
8379 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
8380 : map_index, IntPtrConstant(kBaseMapIndex +
8381 : kFastIteratorOffset)));
8382 : CSA_ASSERT(this, IntPtrLessThan(map_index,
8383 : IntPtrConstant(kBaseMapIndex +
8384 : kSlowIteratorOffset)));
8385 :
8386 172 : var_map_index.Bind(map_index);
8387 172 : var_array_map.Bind(array_map);
8388 172 : Goto(&allocate_iterator);
8389 172 : }
8390 : }
8391 :
8392 172 : BIND(&if_isslow);
8393 : {
8394 : Node* map_index = IntPtrAdd(IntPtrConstant(kBaseMapIndex),
8395 172 : IntPtrConstant(kSlowIteratorOffset));
8396 172 : var_map_index.Bind(map_index);
8397 172 : var_array_map.Bind(UndefinedConstant());
8398 172 : Goto(&allocate_iterator);
8399 172 : }
8400 : }
8401 :
8402 172 : BIND(&if_istypedarray);
8403 : {
8404 : Node* map_index =
8405 172 : IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS),
8406 344 : ChangeUint32ToWord(LoadMapElementsKind(array_map)));
8407 : CSA_ASSERT(
8408 : this, IntPtrLessThan(map_index, IntPtrConstant(kBaseMapIndex +
8409 : kFastIteratorOffset)));
8410 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(map_index,
8411 : IntPtrConstant(kBaseMapIndex)));
8412 172 : var_map_index.Bind(map_index);
8413 172 : var_array_map.Bind(UndefinedConstant());
8414 172 : Goto(&allocate_iterator);
8415 172 : }
8416 : }
8417 :
8418 258 : BIND(&allocate_iterator);
8419 : {
8420 : Node* map = LoadFixedArrayElement(LoadNativeContext(context),
8421 516 : var_map_index.value());
8422 258 : var_result.Bind(AllocateJSArrayIterator(array, var_array_map.value(), map));
8423 258 : Goto(&return_result);
8424 : }
8425 :
8426 258 : BIND(&return_result);
8427 516 : return var_result.value();
8428 : }
8429 :
8430 258 : Node* CodeStubAssembler::AllocateJSArrayIterator(Node* array, Node* array_map,
8431 : Node* map) {
8432 258 : Node* iterator = Allocate(JSArrayIterator::kSize);
8433 258 : StoreMapNoWriteBarrier(iterator, map);
8434 : StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset,
8435 258 : Heap::kEmptyFixedArrayRootIndex);
8436 : StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
8437 258 : Heap::kEmptyFixedArrayRootIndex);
8438 : StoreObjectFieldNoWriteBarrier(iterator,
8439 258 : JSArrayIterator::kIteratedObjectOffset, array);
8440 : StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
8441 258 : SmiConstant(Smi::FromInt(0)));
8442 : StoreObjectFieldNoWriteBarrier(
8443 258 : iterator, JSArrayIterator::kIteratedObjectMapOffset, array_map);
8444 258 : return iterator;
8445 : }
8446 :
8447 86 : Node* CodeStubAssembler::ArraySpeciesCreate(Node* context, Node* originalArray,
8448 : Node* len) {
8449 : // TODO(mvstanton): Install a fast path as well, which avoids the runtime
8450 : // call.
8451 : Node* constructor =
8452 86 : CallRuntime(Runtime::kArraySpeciesConstructor, context, originalArray);
8453 : return ConstructJS(CodeFactory::Construct(isolate()), context, constructor,
8454 86 : len);
8455 : }
8456 :
8457 5409 : Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
8458 : CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
8459 :
8460 : Node* buffer_bit_field = LoadObjectField(
8461 5409 : buffer, JSArrayBuffer::kBitFieldOffset, MachineType::Uint32());
8462 5409 : return IsSetWord32<JSArrayBuffer::WasNeutered>(buffer_bit_field);
8463 : }
8464 :
8465 765 : CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler, Node* argc,
8466 : Node* fp,
8467 : CodeStubAssembler::ParameterMode mode)
8468 : : assembler_(assembler),
8469 : argc_mode_(mode),
8470 : argc_(argc),
8471 : arguments_(nullptr),
8472 765 : fp_(fp != nullptr ? fp : assembler->LoadFramePointer()) {
8473 : Node* offset = assembler->ElementOffsetFromIndex(
8474 : argc_, FAST_ELEMENTS, mode,
8475 765 : (StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize);
8476 765 : arguments_ = assembler_->IntPtrAdd(fp_, offset);
8477 765 : }
8478 :
8479 179 : Node* CodeStubArguments::GetReceiver() const {
8480 : return assembler_->Load(MachineType::AnyTagged(), arguments_,
8481 179 : assembler_->IntPtrConstant(kPointerSize));
8482 : }
8483 :
8484 430 : Node* CodeStubArguments::AtIndexPtr(
8485 : Node* index, CodeStubAssembler::ParameterMode mode) const {
8486 : typedef compiler::Node Node;
8487 : Node* negated_index = assembler_->IntPtrOrSmiSub(
8488 430 : assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
8489 : Node* offset =
8490 430 : assembler_->ElementOffsetFromIndex(negated_index, FAST_ELEMENTS, mode, 0);
8491 430 : return assembler_->IntPtrAdd(arguments_, offset);
8492 : }
8493 :
8494 344 : Node* CodeStubArguments::AtIndex(Node* index,
8495 : CodeStubAssembler::ParameterMode mode) const {
8496 : DCHECK_EQ(argc_mode_, mode);
8497 : CSA_ASSERT(assembler_,
8498 : assembler_->UintPtrOrSmiLessThan(index, GetLength(), mode));
8499 344 : return assembler_->Load(MachineType::AnyTagged(), AtIndexPtr(index, mode));
8500 : }
8501 :
8502 258 : Node* CodeStubArguments::AtIndex(int index) const {
8503 258 : return AtIndex(assembler_->IntPtrConstant(index));
8504 : }
8505 :
8506 758 : void CodeStubArguments::ForEach(
8507 : const CodeStubAssembler::VariableList& vars,
8508 : const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
8509 : CodeStubAssembler::ParameterMode mode) {
8510 758 : assembler_->Comment("CodeStubArguments::ForEach");
8511 758 : if (first == nullptr) {
8512 179 : first = assembler_->IntPtrOrSmiConstant(0, mode);
8513 : }
8514 758 : if (last == nullptr) {
8515 : DCHECK_EQ(mode, argc_mode_);
8516 758 : last = argc_;
8517 : }
8518 : Node* start = assembler_->IntPtrSub(
8519 : arguments_,
8520 758 : assembler_->ElementOffsetFromIndex(first, FAST_ELEMENTS, mode));
8521 : Node* end = assembler_->IntPtrSub(
8522 : arguments_,
8523 758 : assembler_->ElementOffsetFromIndex(last, FAST_ELEMENTS, mode));
8524 : assembler_->BuildFastLoop(vars, start, end,
8525 758 : [this, &body](Node* current) {
8526 : Node* arg = assembler_->Load(
8527 758 : MachineType::AnyTagged(), current);
8528 758 : body(arg);
8529 758 : },
8530 : -kPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
8531 1516 : CodeStubAssembler::IndexAdvanceMode::kPost);
8532 758 : }
8533 :
8534 903 : void CodeStubArguments::PopAndReturn(Node* value) {
8535 : assembler_->PopAndReturn(
8536 903 : assembler_->IntPtrAdd(argc_, assembler_->IntPtrConstant(1)), value);
8537 903 : }
8538 :
8539 1548 : Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
8540 : return Uint32LessThanOrEqual(elements_kind,
8541 1548 : Int32Constant(LAST_FAST_ELEMENTS_KIND));
8542 : }
8543 :
8544 817 : Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
8545 : CSA_ASSERT(this, IsFastElementsKind(elements_kind));
8546 :
8547 : STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == (FAST_SMI_ELEMENTS | 1));
8548 : STATIC_ASSERT(FAST_HOLEY_ELEMENTS == (FAST_ELEMENTS | 1));
8549 : STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == (FAST_DOUBLE_ELEMENTS | 1));
8550 :
8551 : // Check prototype chain if receiver does not have packed elements.
8552 817 : Node* holey_elements = Word32And(elements_kind, Int32Constant(1));
8553 817 : return Word32Equal(holey_elements, Int32Constant(1));
8554 : }
8555 :
8556 1204 : Node* CodeStubAssembler::IsElementsKindGreaterThan(
8557 : Node* target_kind, ElementsKind reference_kind) {
8558 1204 : return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
8559 : }
8560 :
8561 2200 : Node* CodeStubAssembler::IsDebugActive() {
8562 : Node* is_debug_active = Load(
8563 : MachineType::Uint8(),
8564 2200 : ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
8565 2200 : return Word32NotEqual(is_debug_active, Int32Constant(0));
8566 : }
8567 :
8568 2779 : Node* CodeStubAssembler::IsPromiseHookEnabledOrDebugIsActive() {
8569 : Node* const promise_hook_or_debug_is_active =
8570 : Load(MachineType::Uint8(),
8571 : ExternalConstant(
8572 : ExternalReference::promise_hook_or_debug_is_active_address(
8573 2779 : isolate())));
8574 2779 : return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0));
8575 : }
8576 :
8577 2098 : Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
8578 : Node* shared_info,
8579 : Node* context) {
8580 : Node* const code = BitcastTaggedToWord(
8581 2098 : LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset));
8582 : Node* const code_entry =
8583 2098 : IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
8584 :
8585 2098 : Node* const fun = Allocate(JSFunction::kSize);
8586 2098 : StoreMapNoWriteBarrier(fun, map);
8587 : StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset,
8588 2098 : Heap::kEmptyFixedArrayRootIndex);
8589 : StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
8590 2098 : Heap::kEmptyFixedArrayRootIndex);
8591 : StoreObjectFieldRoot(fun, JSFunction::kFeedbackVectorOffset,
8592 2098 : Heap::kUndefinedCellRootIndex);
8593 : StoreObjectFieldRoot(fun, JSFunction::kPrototypeOrInitialMapOffset,
8594 2098 : Heap::kTheHoleValueRootIndex);
8595 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
8596 2098 : shared_info);
8597 2098 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
8598 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeEntryOffset, code_entry,
8599 2098 : MachineType::PointerRepresentation());
8600 : StoreObjectFieldRoot(fun, JSFunction::kNextFunctionLinkOffset,
8601 2098 : Heap::kUndefinedValueRootIndex);
8602 :
8603 2098 : return fun;
8604 : }
8605 :
8606 1942 : Node* CodeStubAssembler::AllocatePromiseReactionJobInfo(
8607 : Node* value, Node* tasks, Node* deferred_promise, Node* deferred_on_resolve,
8608 : Node* deferred_on_reject, Node* context) {
8609 1942 : Node* const result = Allocate(PromiseReactionJobInfo::kSize);
8610 1942 : StoreMapNoWriteBarrier(result, Heap::kPromiseReactionJobInfoMapRootIndex);
8611 : StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kValueOffset,
8612 1942 : value);
8613 : StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kTasksOffset,
8614 1942 : tasks);
8615 : StoreObjectFieldNoWriteBarrier(
8616 1942 : result, PromiseReactionJobInfo::kDeferredPromiseOffset, deferred_promise);
8617 : StoreObjectFieldNoWriteBarrier(
8618 : result, PromiseReactionJobInfo::kDeferredOnResolveOffset,
8619 1942 : deferred_on_resolve);
8620 : StoreObjectFieldNoWriteBarrier(
8621 : result, PromiseReactionJobInfo::kDeferredOnRejectOffset,
8622 1942 : deferred_on_reject);
8623 : StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kContextOffset,
8624 1942 : context);
8625 1942 : return result;
8626 : }
8627 :
8628 0 : Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
8629 : StackFrame::Type frame_type) {
8630 : return WordEqual(marker_or_function,
8631 0 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
8632 : }
8633 :
8634 430 : Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
8635 : StackFrame::Type frame_type) {
8636 : return WordNotEqual(marker_or_function,
8637 430 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
8638 : }
8639 :
8640 0 : void CodeStubAssembler::Print(const char* s) {
8641 : #ifdef DEBUG
8642 : std::string formatted(s);
8643 : formatted += "\n";
8644 : Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
8645 : formatted.c_str(), TENURED);
8646 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), HeapConstant(string));
8647 : #endif
8648 0 : }
8649 :
8650 0 : void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
8651 : #ifdef DEBUG
8652 : if (prefix != nullptr) {
8653 : std::string formatted(prefix);
8654 : formatted += ": ";
8655 : Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
8656 : formatted.c_str(), TENURED);
8657 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
8658 : HeapConstant(string));
8659 : }
8660 : CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
8661 : #endif
8662 0 : }
8663 :
8664 : } // namespace internal
8665 : } // namespace v8
|