Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/code-stub-assembler.h"
6 :
7 : #include "src/code-factory.h"
8 : #include "src/counters.h"
9 : #include "src/frames-inl.h"
10 : #include "src/frames.h"
11 : #include "src/function-kind.h"
12 : #include "src/heap/heap-inl.h" // For Page/MemoryChunk. TODO(jkummerow): Drop.
13 : #include "src/objects/api-callbacks.h"
14 : #include "src/objects/cell.h"
15 : #include "src/objects/descriptor-array.h"
16 : #include "src/objects/heap-number.h"
17 : #include "src/objects/oddball.h"
18 : #include "src/objects/ordered-hash-table-inl.h"
19 : #include "src/objects/property-cell.h"
20 : #include "src/wasm/wasm-objects.h"
21 :
22 : namespace v8 {
23 : namespace internal {
24 :
25 : using compiler::Node;
26 : template <class T>
27 : using TNode = compiler::TNode<T>;
28 : template <class T>
29 : using SloppyTNode = compiler::SloppyTNode<T>;
30 :
31 484441 : CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
32 484441 : : compiler::CodeAssembler(state), BaseBuiltinsFromDSLAssembler(state) {
33 : if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
34 : HandleBreakOnNode();
35 : }
36 484441 : }
37 :
38 0 : void CodeStubAssembler::HandleBreakOnNode() {
39 : // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
40 : // string specifying the name of a stub and NODE is number specifying node id.
41 0 : const char* name = state()->name();
42 0 : size_t name_length = strlen(name);
43 0 : if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
44 : // Different name.
45 0 : return;
46 : }
47 0 : size_t option_length = strlen(FLAG_csa_trap_on_node);
48 0 : if (option_length < name_length + 2 ||
49 0 : FLAG_csa_trap_on_node[name_length] != ',') {
50 : // Option is too short.
51 0 : return;
52 : }
53 0 : const char* start = &FLAG_csa_trap_on_node[name_length + 1];
54 : char* end;
55 0 : int node_id = static_cast<int>(strtol(start, &end, 10));
56 0 : if (start == end) {
57 : // Bad node id.
58 0 : return;
59 : }
60 0 : BreakOnNode(node_id);
61 : }
62 :
63 0 : void CodeStubAssembler::Assert(const BranchGenerator& branch,
64 : const char* message, const char* file, int line,
65 : Node* extra_node1, const char* extra_node1_name,
66 : Node* extra_node2, const char* extra_node2_name,
67 : Node* extra_node3, const char* extra_node3_name,
68 : Node* extra_node4, const char* extra_node4_name,
69 : Node* extra_node5,
70 : const char* extra_node5_name) {
71 : #if defined(DEBUG)
72 : if (FLAG_debug_code) {
73 : Check(branch, message, file, line, extra_node1, extra_node1_name,
74 : extra_node2, extra_node2_name, extra_node3, extra_node3_name,
75 : extra_node4, extra_node4_name, extra_node5, extra_node5_name);
76 : }
77 : #endif
78 0 : }
79 :
80 0 : void CodeStubAssembler::Assert(const NodeGenerator& condition_body,
81 : const char* message, const char* file, int line,
82 : Node* extra_node1, const char* extra_node1_name,
83 : Node* extra_node2, const char* extra_node2_name,
84 : Node* extra_node3, const char* extra_node3_name,
85 : Node* extra_node4, const char* extra_node4_name,
86 : Node* extra_node5,
87 : const char* extra_node5_name) {
88 : #if defined(DEBUG)
89 : if (FLAG_debug_code) {
90 : Check(condition_body, message, file, line, extra_node1, extra_node1_name,
91 : extra_node2, extra_node2_name, extra_node3, extra_node3_name,
92 : extra_node4, extra_node4_name, extra_node5, extra_node5_name);
93 : }
94 : #endif
95 0 : }
96 :
97 : #ifdef DEBUG
98 : namespace {
99 : void MaybePrintNodeWithName(CodeStubAssembler* csa, Node* node,
100 : const char* node_name) {
101 : if (node != nullptr) {
102 : csa->CallRuntime(Runtime::kPrintWithNameForAssert, csa->SmiConstant(0),
103 : csa->StringConstant(node_name), node);
104 : }
105 : }
106 : } // namespace
107 : #endif
108 :
109 0 : void CodeStubAssembler::Check(const BranchGenerator& branch,
110 : const char* message, const char* file, int line,
111 : Node* extra_node1, const char* extra_node1_name,
112 : Node* extra_node2, const char* extra_node2_name,
113 : Node* extra_node3, const char* extra_node3_name,
114 : Node* extra_node4, const char* extra_node4_name,
115 : Node* extra_node5, const char* extra_node5_name) {
116 0 : Label ok(this);
117 0 : Label not_ok(this, Label::kDeferred);
118 0 : if (message != nullptr && FLAG_code_comments) {
119 0 : Comment("[ Assert: ", message);
120 : } else {
121 0 : Comment("[ Assert");
122 : }
123 0 : branch(&ok, ¬_ok);
124 :
125 0 : BIND(¬_ok);
126 : FailAssert(message, file, line, extra_node1, extra_node1_name, extra_node2,
127 : extra_node2_name, extra_node3, extra_node3_name, extra_node4,
128 0 : extra_node4_name, extra_node5, extra_node5_name);
129 :
130 0 : BIND(&ok);
131 0 : Comment("] Assert");
132 0 : }
133 :
134 0 : void CodeStubAssembler::Check(const NodeGenerator& condition_body,
135 : const char* message, const char* file, int line,
136 : Node* extra_node1, const char* extra_node1_name,
137 : Node* extra_node2, const char* extra_node2_name,
138 : Node* extra_node3, const char* extra_node3_name,
139 : Node* extra_node4, const char* extra_node4_name,
140 : Node* extra_node5, const char* extra_node5_name) {
141 0 : BranchGenerator branch = [=](Label* ok, Label* not_ok) {
142 0 : Node* condition = condition_body();
143 : DCHECK_NOT_NULL(condition);
144 0 : Branch(condition, ok, not_ok);
145 0 : };
146 :
147 : Check(branch, message, file, line, extra_node1, extra_node1_name, extra_node2,
148 : extra_node2_name, extra_node3, extra_node3_name, extra_node4,
149 0 : extra_node4_name, extra_node5, extra_node5_name);
150 0 : }
151 :
152 120660 : void CodeStubAssembler::FastCheck(TNode<BoolT> condition) {
153 120660 : Label ok(this);
154 120660 : GotoIf(condition, &ok);
155 120660 : DebugBreak();
156 120660 : Goto(&ok);
157 120660 : BIND(&ok);
158 120660 : }
159 :
160 432 : void CodeStubAssembler::FailAssert(
161 : const char* message, const char* file, int line, Node* extra_node1,
162 : const char* extra_node1_name, Node* extra_node2,
163 : const char* extra_node2_name, Node* extra_node3,
164 : const char* extra_node3_name, Node* extra_node4,
165 : const char* extra_node4_name, Node* extra_node5,
166 : const char* extra_node5_name) {
167 : DCHECK_NOT_NULL(message);
168 : char chars[1024];
169 432 : Vector<char> buffer(chars);
170 432 : if (file != nullptr) {
171 432 : SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
172 : } else {
173 0 : SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
174 : }
175 432 : Node* message_node = StringConstant(&(buffer[0]));
176 :
177 : #ifdef DEBUG
178 : // Only print the extra nodes in debug builds.
179 : MaybePrintNodeWithName(this, extra_node1, extra_node1_name);
180 : MaybePrintNodeWithName(this, extra_node2, extra_node2_name);
181 : MaybePrintNodeWithName(this, extra_node3, extra_node3_name);
182 : MaybePrintNodeWithName(this, extra_node4, extra_node4_name);
183 : MaybePrintNodeWithName(this, extra_node5, extra_node5_name);
184 : #endif
185 :
186 432 : DebugAbort(message_node);
187 432 : Unreachable();
188 432 : }
189 :
190 45468 : Node* CodeStubAssembler::SelectImpl(TNode<BoolT> condition,
191 : const NodeGenerator& true_body,
192 : const NodeGenerator& false_body,
193 : MachineRepresentation rep) {
194 45468 : VARIABLE(value, rep);
195 90936 : Label vtrue(this), vfalse(this), end(this);
196 45468 : Branch(condition, &vtrue, &vfalse);
197 :
198 45468 : BIND(&vtrue);
199 : {
200 45468 : value.Bind(true_body());
201 45468 : Goto(&end);
202 : }
203 45468 : BIND(&vfalse);
204 : {
205 45468 : value.Bind(false_body());
206 45468 : Goto(&end);
207 : }
208 :
209 45468 : BIND(&end);
210 90936 : return value.value();
211 : }
212 :
213 224 : TNode<Int32T> CodeStubAssembler::SelectInt32Constant(
214 : SloppyTNode<BoolT> condition, int true_value, int false_value) {
215 : return SelectConstant<Int32T>(condition, Int32Constant(true_value),
216 224 : Int32Constant(false_value));
217 : }
218 :
219 0 : TNode<IntPtrT> CodeStubAssembler::SelectIntPtrConstant(
220 : SloppyTNode<BoolT> condition, int true_value, int false_value) {
221 : return SelectConstant<IntPtrT>(condition, IntPtrConstant(true_value),
222 0 : IntPtrConstant(false_value));
223 : }
224 :
225 2860 : TNode<Oddball> CodeStubAssembler::SelectBooleanConstant(
226 : SloppyTNode<BoolT> condition) {
227 2860 : return SelectConstant<Oddball>(condition, TrueConstant(), FalseConstant());
228 : }
229 :
230 4144 : TNode<Smi> CodeStubAssembler::SelectSmiConstant(SloppyTNode<BoolT> condition,
231 : Smi true_value,
232 : Smi false_value) {
233 : return SelectConstant<Smi>(condition, SmiConstant(true_value),
234 4144 : SmiConstant(false_value));
235 : }
236 :
237 61372 : TNode<Object> CodeStubAssembler::NoContextConstant() {
238 61372 : return SmiConstant(Context::kNoContext);
239 : }
240 :
241 : #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
242 : compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
243 : std::declval<Heap>().rootAccessorName())>::type>::type> \
244 : CodeStubAssembler::name##Constant() { \
245 : return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
246 : std::declval<Heap>().rootAccessorName())>::type>::type>( \
247 : LoadRoot(RootIndex::k##rootIndexName)); \
248 : }
249 0 : HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)
250 : #undef HEAP_CONSTANT_ACCESSOR
251 :
252 : #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
253 : compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
254 : std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type> \
255 : CodeStubAssembler::name##Constant() { \
256 : return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
257 : std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type>( \
258 : LoadRoot(RootIndex::k##rootIndexName)); \
259 : }
260 290232 : HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)
261 : #undef HEAP_CONSTANT_ACCESSOR
262 :
263 : #define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
264 : compiler::TNode<BoolT> CodeStubAssembler::Is##name( \
265 : SloppyTNode<Object> value) { \
266 : return WordEqual(value, name##Constant()); \
267 : } \
268 : compiler::TNode<BoolT> CodeStubAssembler::IsNot##name( \
269 : SloppyTNode<Object> value) { \
270 : return WordNotEqual(value, name##Constant()); \
271 : }
272 151076 : HEAP_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_TEST)
273 : #undef HEAP_CONSTANT_TEST
274 :
275 119900 : Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
276 119900 : if (mode == SMI_PARAMETERS) {
277 7736 : return SmiConstant(value);
278 : } else {
279 : DCHECK_EQ(INTPTR_PARAMETERS, mode);
280 112164 : return IntPtrConstant(value);
281 : }
282 : }
283 :
284 2744 : bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
285 : ParameterMode mode) {
286 : int32_t constant_test;
287 2744 : Smi smi_test;
288 2744 : if (mode == INTPTR_PARAMETERS) {
289 1512 : if (ToInt32Constant(test, constant_test) && constant_test == 0) {
290 448 : return true;
291 : }
292 : } else {
293 : DCHECK_EQ(mode, SMI_PARAMETERS);
294 1232 : if (ToSmiConstant(test, &smi_test) && smi_test->value() == 0) {
295 504 : return true;
296 : }
297 : }
298 1792 : return false;
299 : }
300 :
301 4012 : bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
302 : int* value,
303 : ParameterMode mode) {
304 : int32_t int32_constant;
305 4012 : if (mode == INTPTR_PARAMETERS) {
306 2880 : if (ToInt32Constant(maybe_constant, int32_constant)) {
307 680 : *value = int32_constant;
308 680 : return true;
309 : }
310 : } else {
311 : DCHECK_EQ(mode, SMI_PARAMETERS);
312 1132 : Smi smi_constant;
313 1132 : if (ToSmiConstant(maybe_constant, &smi_constant)) {
314 4 : *value = Smi::ToInt(smi_constant);
315 4 : return true;
316 : }
317 : }
318 3328 : return false;
319 : }
320 :
321 956 : TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
322 : TNode<IntPtrT> value) {
323 956 : Comment("IntPtrRoundUpToPowerOfTwo32");
324 : CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
325 956 : value = Signed(IntPtrSub(value, IntPtrConstant(1)));
326 5736 : for (int i = 1; i <= 16; i *= 2) {
327 4780 : value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
328 : }
329 956 : return Signed(IntPtrAdd(value, IntPtrConstant(1)));
330 : }
331 :
332 0 : Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
333 0 : if (mode == SMI_PARAMETERS) {
334 0 : return TaggedIsSmi(value);
335 : } else {
336 0 : return Int32Constant(1);
337 : }
338 : }
339 :
340 0 : TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
341 : // value && !(value & (value - 1))
342 : return WordEqual(
343 : Select<IntPtrT>(
344 0 : WordEqual(value, IntPtrConstant(0)),
345 0 : [=] { return IntPtrConstant(1); },
346 0 : [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }),
347 0 : IntPtrConstant(0));
348 : }
349 :
350 56 : TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
351 56 : Node* one = Float64Constant(1.0);
352 56 : Node* one_half = Float64Constant(0.5);
353 :
354 56 : Label return_x(this);
355 :
356 : // Round up {x} towards Infinity.
357 112 : VARIABLE(var_x, MachineRepresentation::kFloat64, Float64Ceil(x));
358 :
359 112 : GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
360 56 : &return_x);
361 56 : var_x.Bind(Float64Sub(var_x.value(), one));
362 56 : Goto(&return_x);
363 :
364 56 : BIND(&return_x);
365 112 : return TNode<Float64T>::UncheckedCast(var_x.value());
366 : }
367 :
368 112 : TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
369 112 : if (IsFloat64RoundUpSupported()) {
370 110 : return Float64RoundUp(x);
371 : }
372 :
373 2 : Node* one = Float64Constant(1.0);
374 2 : Node* zero = Float64Constant(0.0);
375 2 : Node* two_52 = Float64Constant(4503599627370496.0E0);
376 2 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
377 :
378 2 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
379 4 : Label return_x(this), return_minus_x(this);
380 :
381 : // Check if {x} is greater than zero.
382 4 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
383 4 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
384 2 : &if_xnotgreaterthanzero);
385 :
386 2 : BIND(&if_xgreaterthanzero);
387 : {
388 : // Just return {x} unless it's in the range ]0,2^52[.
389 2 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
390 :
391 : // Round positive {x} towards Infinity.
392 2 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
393 2 : GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
394 2 : var_x.Bind(Float64Add(var_x.value(), one));
395 2 : Goto(&return_x);
396 : }
397 :
398 2 : BIND(&if_xnotgreaterthanzero);
399 : {
400 : // Just return {x} unless it's in the range ]-2^52,0[
401 2 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
402 2 : GotoIfNot(Float64LessThan(x, zero), &return_x);
403 :
404 : // Round negated {x} towards Infinity and return the result negated.
405 2 : Node* minus_x = Float64Neg(x);
406 2 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
407 2 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
408 2 : var_x.Bind(Float64Sub(var_x.value(), one));
409 2 : Goto(&return_minus_x);
410 : }
411 :
412 2 : BIND(&return_minus_x);
413 2 : var_x.Bind(Float64Neg(var_x.value()));
414 2 : Goto(&return_x);
415 :
416 2 : BIND(&return_x);
417 4 : return TNode<Float64T>::UncheckedCast(var_x.value());
418 : }
419 :
420 119 : TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
421 119 : if (IsFloat64RoundDownSupported()) {
422 110 : return Float64RoundDown(x);
423 : }
424 :
425 9 : Node* one = Float64Constant(1.0);
426 9 : Node* zero = Float64Constant(0.0);
427 9 : Node* two_52 = Float64Constant(4503599627370496.0E0);
428 9 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
429 :
430 9 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
431 18 : Label return_x(this), return_minus_x(this);
432 :
433 : // Check if {x} is greater than zero.
434 18 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
435 18 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
436 9 : &if_xnotgreaterthanzero);
437 :
438 9 : BIND(&if_xgreaterthanzero);
439 : {
440 : // Just return {x} unless it's in the range ]0,2^52[.
441 9 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
442 :
443 : // Round positive {x} towards -Infinity.
444 9 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
445 9 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
446 9 : var_x.Bind(Float64Sub(var_x.value(), one));
447 9 : Goto(&return_x);
448 : }
449 :
450 9 : BIND(&if_xnotgreaterthanzero);
451 : {
452 : // Just return {x} unless it's in the range ]-2^52,0[
453 9 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
454 9 : GotoIfNot(Float64LessThan(x, zero), &return_x);
455 :
456 : // Round negated {x} towards -Infinity and return the result negated.
457 9 : Node* minus_x = Float64Neg(x);
458 9 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
459 9 : GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
460 9 : var_x.Bind(Float64Add(var_x.value(), one));
461 9 : Goto(&return_minus_x);
462 : }
463 :
464 9 : BIND(&return_minus_x);
465 9 : var_x.Bind(Float64Neg(var_x.value()));
466 9 : Goto(&return_x);
467 :
468 9 : BIND(&return_x);
469 18 : return TNode<Float64T>::UncheckedCast(var_x.value());
470 : }
471 :
472 392 : TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
473 392 : if (IsFloat64RoundTiesEvenSupported()) {
474 385 : return Float64RoundTiesEven(x);
475 : }
476 : // See ES#sec-touint8clamp for details.
477 7 : Node* f = Float64Floor(x);
478 7 : Node* f_and_half = Float64Add(f, Float64Constant(0.5));
479 :
480 7 : VARIABLE(var_result, MachineRepresentation::kFloat64);
481 14 : Label return_f(this), return_f_plus_one(this), done(this);
482 :
483 7 : GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
484 7 : GotoIf(Float64LessThan(x, f_and_half), &return_f);
485 : {
486 7 : Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
487 14 : Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
488 7 : &return_f_plus_one);
489 : }
490 :
491 7 : BIND(&return_f);
492 7 : var_result.Bind(f);
493 7 : Goto(&done);
494 :
495 7 : BIND(&return_f_plus_one);
496 7 : var_result.Bind(Float64Add(f, Float64Constant(1.0)));
497 7 : Goto(&done);
498 :
499 7 : BIND(&done);
500 14 : return TNode<Float64T>::UncheckedCast(var_result.value());
501 : }
502 :
503 340 : TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
504 340 : if (IsFloat64RoundTruncateSupported()) {
505 334 : return Float64RoundTruncate(x);
506 : }
507 :
508 6 : Node* one = Float64Constant(1.0);
509 6 : Node* zero = Float64Constant(0.0);
510 6 : Node* two_52 = Float64Constant(4503599627370496.0E0);
511 6 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
512 :
513 6 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
514 12 : Label return_x(this), return_minus_x(this);
515 :
516 : // Check if {x} is greater than 0.
517 12 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
518 12 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
519 6 : &if_xnotgreaterthanzero);
520 :
521 6 : BIND(&if_xgreaterthanzero);
522 : {
523 6 : if (IsFloat64RoundDownSupported()) {
524 0 : var_x.Bind(Float64RoundDown(x));
525 : } else {
526 : // Just return {x} unless it's in the range ]0,2^52[.
527 6 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
528 :
529 : // Round positive {x} towards -Infinity.
530 6 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
531 6 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
532 6 : var_x.Bind(Float64Sub(var_x.value(), one));
533 : }
534 6 : Goto(&return_x);
535 : }
536 :
537 6 : BIND(&if_xnotgreaterthanzero);
538 : {
539 6 : if (IsFloat64RoundUpSupported()) {
540 0 : var_x.Bind(Float64RoundUp(x));
541 0 : Goto(&return_x);
542 : } else {
543 : // Just return {x} unless its in the range ]-2^52,0[.
544 6 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
545 6 : GotoIfNot(Float64LessThan(x, zero), &return_x);
546 :
547 : // Round negated {x} towards -Infinity and return result negated.
548 6 : Node* minus_x = Float64Neg(x);
549 6 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
550 6 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
551 6 : var_x.Bind(Float64Sub(var_x.value(), one));
552 6 : Goto(&return_minus_x);
553 : }
554 : }
555 :
556 6 : BIND(&return_minus_x);
557 6 : var_x.Bind(Float64Neg(var_x.value()));
558 6 : Goto(&return_x);
559 :
560 6 : BIND(&return_x);
561 12 : return TNode<Float64T>::UncheckedCast(var_x.value());
562 : }
563 :
564 0 : TNode<BoolT> CodeStubAssembler::IsValidSmi(TNode<Smi> smi) {
565 0 : if (SmiValuesAre31Bits() && kSystemPointerSize == kInt64Size) {
566 : // Check that the Smi value is properly sign-extended.
567 0 : TNode<IntPtrT> value = Signed(BitcastTaggedToWord(smi));
568 0 : return WordEqual(value, ChangeInt32ToIntPtr(TruncateIntPtrToInt32(value)));
569 : }
570 0 : return Int32TrueConstant();
571 : }
572 :
573 218416 : Node* CodeStubAssembler::SmiShiftBitsConstant() {
574 218416 : return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
575 : }
576 :
577 12896 : TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) {
578 12896 : TNode<IntPtrT> value_intptr = ChangeInt32ToIntPtr(value);
579 : TNode<Smi> smi =
580 12896 : BitcastWordToTaggedSigned(WordShl(value_intptr, SmiShiftBitsConstant()));
581 12896 : return smi;
582 : }
583 :
584 7332 : TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
585 : intptr_t constant_value;
586 7332 : if (ToIntPtrConstant(value, constant_value)) {
587 44 : return (static_cast<uintptr_t>(constant_value) <=
588 : static_cast<uintptr_t>(Smi::kMaxValue))
589 20 : ? Int32TrueConstant()
590 64 : : Int32FalseConstant();
591 : }
592 :
593 7288 : return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
594 : }
595 :
596 77605 : TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
597 : int32_t constant_value;
598 77605 : if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
599 5329 : return SmiConstant(constant_value);
600 : }
601 : TNode<Smi> smi =
602 72276 : BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
603 72276 : return smi;
604 : }
605 :
606 133377 : TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
607 : intptr_t constant_value;
608 133377 : if (ToIntPtrConstant(value, constant_value)) {
609 473 : return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
610 : }
611 132904 : return Signed(WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()));
612 : }
613 :
614 67508 : TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
615 67508 : TNode<IntPtrT> result = SmiUntag(value);
616 67508 : return TruncateIntPtrToInt32(result);
617 : }
618 :
619 47972 : TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
620 47972 : return ChangeInt32ToFloat64(SmiToInt32(value));
621 : }
622 :
623 2072 : TNode<Smi> CodeStubAssembler::SmiMax(TNode<Smi> a, TNode<Smi> b) {
624 2072 : return SelectConstant<Smi>(SmiLessThan(a, b), b, a);
625 : }
626 :
627 224 : TNode<Smi> CodeStubAssembler::SmiMin(TNode<Smi> a, TNode<Smi> b) {
628 224 : return SelectConstant<Smi>(SmiLessThan(a, b), a, b);
629 : }
630 :
631 4820 : TNode<IntPtrT> CodeStubAssembler::TryIntPtrAdd(TNode<IntPtrT> a,
632 : TNode<IntPtrT> b,
633 : Label* if_overflow) {
634 4820 : TNode<PairT<IntPtrT, BoolT>> pair = IntPtrAddWithOverflow(a, b);
635 4820 : TNode<BoolT> overflow = Projection<1>(pair);
636 4820 : GotoIf(overflow, if_overflow);
637 4820 : return Projection<0>(pair);
638 : }
639 :
640 4148 : TNode<Smi> CodeStubAssembler::TrySmiAdd(TNode<Smi> lhs, TNode<Smi> rhs,
641 : Label* if_overflow) {
642 4148 : if (SmiValuesAre32Bits()) {
643 : return BitcastWordToTaggedSigned(TryIntPtrAdd(
644 4148 : BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs), if_overflow));
645 : } else {
646 : DCHECK(SmiValuesAre31Bits());
647 : TNode<PairT<Int32T, BoolT>> pair =
648 0 : Int32AddWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
649 0 : TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
650 0 : TNode<BoolT> overflow = Projection<1>(pair);
651 0 : GotoIf(overflow, if_overflow);
652 0 : TNode<Int32T> result = Projection<0>(pair);
653 0 : return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
654 : }
655 : }
656 :
657 2748 : TNode<Smi> CodeStubAssembler::TrySmiSub(TNode<Smi> lhs, TNode<Smi> rhs,
658 : Label* if_overflow) {
659 2748 : if (SmiValuesAre32Bits()) {
660 : TNode<PairT<IntPtrT, BoolT>> pair = IntPtrSubWithOverflow(
661 2748 : BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs));
662 2748 : TNode<BoolT> overflow = Projection<1>(pair);
663 2748 : GotoIf(overflow, if_overflow);
664 2748 : TNode<IntPtrT> result = Projection<0>(pair);
665 2748 : return BitcastWordToTaggedSigned(result);
666 : } else {
667 : DCHECK(SmiValuesAre31Bits());
668 : TNode<PairT<Int32T, BoolT>> pair =
669 0 : Int32SubWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
670 0 : TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
671 0 : TNode<BoolT> overflow = Projection<1>(pair);
672 0 : GotoIf(overflow, if_overflow);
673 0 : TNode<Int32T> result = Projection<0>(pair);
674 0 : return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
675 : }
676 : }
677 :
678 564 : TNode<Number> CodeStubAssembler::NumberMax(SloppyTNode<Number> a,
679 : SloppyTNode<Number> b) {
680 : // TODO(danno): This could be optimized by specifically handling smi cases.
681 564 : TVARIABLE(Number, result);
682 1128 : Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
683 564 : GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
684 564 : GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
685 564 : result = NanConstant();
686 564 : Goto(&done);
687 564 : BIND(&greater_than_equal_a);
688 564 : result = a;
689 564 : Goto(&done);
690 564 : BIND(&greater_than_equal_b);
691 564 : result = b;
692 564 : Goto(&done);
693 564 : BIND(&done);
694 1128 : return result.value();
695 : }
696 :
697 620 : TNode<Number> CodeStubAssembler::NumberMin(SloppyTNode<Number> a,
698 : SloppyTNode<Number> b) {
699 : // TODO(danno): This could be optimized by specifically handling smi cases.
700 620 : TVARIABLE(Number, result);
701 1240 : Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
702 620 : GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
703 620 : GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
704 620 : result = NanConstant();
705 620 : Goto(&done);
706 620 : BIND(&greater_than_equal_a);
707 620 : result = b;
708 620 : Goto(&done);
709 620 : BIND(&greater_than_equal_b);
710 620 : result = a;
711 620 : Goto(&done);
712 620 : BIND(&done);
713 1240 : return result.value();
714 : }
715 :
716 392 : TNode<IntPtrT> CodeStubAssembler::ConvertToRelativeIndex(
717 : TNode<Context> context, TNode<Object> index, TNode<IntPtrT> length) {
718 392 : TVARIABLE(IntPtrT, result);
719 :
720 : TNode<Number> const index_int =
721 392 : ToInteger_Inline(context, index, CodeStubAssembler::kTruncateMinusZero);
722 392 : TNode<IntPtrT> zero = IntPtrConstant(0);
723 :
724 784 : Label done(this);
725 784 : Label if_issmi(this), if_isheapnumber(this, Label::kDeferred);
726 392 : Branch(TaggedIsSmi(index_int), &if_issmi, &if_isheapnumber);
727 :
728 392 : BIND(&if_issmi);
729 : {
730 392 : TNode<Smi> const index_smi = CAST(index_int);
731 1568 : result = Select<IntPtrT>(
732 784 : IntPtrLessThan(SmiUntag(index_smi), zero),
733 392 : [=] { return IntPtrMax(IntPtrAdd(length, SmiUntag(index_smi)), zero); },
734 784 : [=] { return IntPtrMin(SmiUntag(index_smi), length); });
735 392 : Goto(&done);
736 : }
737 :
738 392 : BIND(&if_isheapnumber);
739 : {
740 : // If {index} is a heap number, it is definitely out of bounds. If it is
741 : // negative, {index} = max({length} + {index}),0) = 0'. If it is positive,
742 : // set {index} to {length}.
743 392 : TNode<HeapNumber> const index_hn = CAST(index_int);
744 392 : TNode<Float64T> const float_zero = Float64Constant(0.);
745 392 : TNode<Float64T> const index_float = LoadHeapNumberValue(index_hn);
746 784 : result = SelectConstant<IntPtrT>(Float64LessThan(index_float, float_zero),
747 392 : zero, length);
748 392 : Goto(&done);
749 : }
750 392 : BIND(&done);
751 784 : return result.value();
752 : }
753 :
754 392 : TNode<Number> CodeStubAssembler::SmiMod(TNode<Smi> a, TNode<Smi> b) {
755 392 : TVARIABLE(Number, var_result);
756 784 : Label return_result(this, &var_result),
757 784 : return_minuszero(this, Label::kDeferred),
758 784 : return_nan(this, Label::kDeferred);
759 :
760 : // Untag {a} and {b}.
761 392 : TNode<Int32T> int_a = SmiToInt32(a);
762 392 : TNode<Int32T> int_b = SmiToInt32(b);
763 :
764 : // Return NaN if {b} is zero.
765 392 : GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
766 :
767 : // Check if {a} is non-negative.
768 784 : Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
769 784 : Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
770 392 : &if_aisnegative);
771 :
772 392 : BIND(&if_aisnotnegative);
773 : {
774 : // Fast case, don't need to check any other edge cases.
775 392 : TNode<Int32T> r = Int32Mod(int_a, int_b);
776 392 : var_result = SmiFromInt32(r);
777 392 : Goto(&return_result);
778 : }
779 :
780 392 : BIND(&if_aisnegative);
781 : {
782 392 : if (SmiValuesAre32Bits()) {
783 : // Check if {a} is kMinInt and {b} is -1 (only relevant if the
784 : // kMinInt is actually representable as a Smi).
785 392 : Label join(this);
786 392 : GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
787 392 : GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
788 392 : Goto(&join);
789 392 : BIND(&join);
790 : }
791 :
792 : // Perform the integer modulus operation.
793 392 : TNode<Int32T> r = Int32Mod(int_a, int_b);
794 :
795 : // Check if {r} is zero, and if so return -0, because we have to
796 : // take the sign of the left hand side {a}, which is negative.
797 392 : GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
798 :
799 : // The remainder {r} can be outside the valid Smi range on 32bit
800 : // architectures, so we cannot just say SmiFromInt32(r) here.
801 392 : var_result = ChangeInt32ToTagged(r);
802 392 : Goto(&return_result);
803 : }
804 :
805 392 : BIND(&return_minuszero);
806 392 : var_result = MinusZeroConstant();
807 392 : Goto(&return_result);
808 :
809 392 : BIND(&return_nan);
810 392 : var_result = NanConstant();
811 392 : Goto(&return_result);
812 :
813 392 : BIND(&return_result);
814 784 : return var_result.value();
815 : }
816 :
817 784 : TNode<Number> CodeStubAssembler::SmiMul(TNode<Smi> a, TNode<Smi> b) {
818 784 : TVARIABLE(Number, var_result);
819 1568 : VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64);
820 1568 : VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64);
821 1568 : Label return_result(this, &var_result);
822 :
823 : // Both {a} and {b} are Smis. Convert them to integers and multiply.
824 784 : Node* lhs32 = SmiToInt32(a);
825 784 : Node* rhs32 = SmiToInt32(b);
826 784 : Node* pair = Int32MulWithOverflow(lhs32, rhs32);
827 :
828 784 : Node* overflow = Projection(1, pair);
829 :
830 : // Check if the multiplication overflowed.
831 1568 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
832 784 : Branch(overflow, &if_overflow, &if_notoverflow);
833 784 : BIND(&if_notoverflow);
834 : {
835 : // If the answer is zero, we may need to return -0.0, depending on the
836 : // input.
837 1568 : Label answer_zero(this), answer_not_zero(this);
838 784 : Node* answer = Projection(0, pair);
839 784 : Node* zero = Int32Constant(0);
840 784 : Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
841 784 : BIND(&answer_not_zero);
842 : {
843 784 : var_result = ChangeInt32ToTagged(answer);
844 784 : Goto(&return_result);
845 : }
846 784 : BIND(&answer_zero);
847 : {
848 784 : Node* or_result = Word32Or(lhs32, rhs32);
849 1568 : Label if_should_be_negative_zero(this), if_should_be_zero(this);
850 1568 : Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
851 784 : &if_should_be_zero);
852 784 : BIND(&if_should_be_negative_zero);
853 : {
854 784 : var_result = MinusZeroConstant();
855 784 : Goto(&return_result);
856 : }
857 784 : BIND(&if_should_be_zero);
858 : {
859 784 : var_result = SmiConstant(0);
860 784 : Goto(&return_result);
861 784 : }
862 784 : }
863 : }
864 784 : BIND(&if_overflow);
865 : {
866 784 : var_lhs_float64.Bind(SmiToFloat64(a));
867 784 : var_rhs_float64.Bind(SmiToFloat64(b));
868 784 : Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
869 784 : var_result = AllocateHeapNumberWithValue(value);
870 784 : Goto(&return_result);
871 : }
872 :
873 784 : BIND(&return_result);
874 1568 : return var_result.value();
875 : }
876 :
877 336 : TNode<Smi> CodeStubAssembler::TrySmiDiv(TNode<Smi> dividend, TNode<Smi> divisor,
878 : Label* bailout) {
879 : // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
880 : // is zero.
881 336 : GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
882 :
883 : // Do floating point division if {dividend} is zero and {divisor} is
884 : // negative.
885 672 : Label dividend_is_zero(this), dividend_is_not_zero(this);
886 336 : Branch(WordEqual(dividend, SmiConstant(0)), ÷nd_is_zero,
887 336 : ÷nd_is_not_zero);
888 :
889 336 : BIND(÷nd_is_zero);
890 : {
891 336 : GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
892 336 : Goto(÷nd_is_not_zero);
893 : }
894 336 : BIND(÷nd_is_not_zero);
895 :
896 336 : TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
897 336 : TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
898 :
899 : // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
900 : // if the Smi size is 31) and {divisor} is -1.
901 672 : Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
902 672 : Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
903 336 : &divisor_is_minus_one, &divisor_is_not_minus_one);
904 :
905 336 : BIND(&divisor_is_minus_one);
906 : {
907 : GotoIf(Word32Equal(
908 : untagged_dividend,
909 672 : Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
910 336 : bailout);
911 336 : Goto(&divisor_is_not_minus_one);
912 : }
913 336 : BIND(&divisor_is_not_minus_one);
914 :
915 336 : TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
916 336 : TNode<Int32T> truncated = Signed(Int32Mul(untagged_result, untagged_divisor));
917 :
918 : // Do floating point division if the remainder is not 0.
919 336 : GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
920 :
921 672 : return SmiFromInt32(untagged_result);
922 : }
923 :
924 56 : TNode<Smi> CodeStubAssembler::SmiLexicographicCompare(TNode<Smi> x,
925 : TNode<Smi> y) {
926 : TNode<ExternalReference> smi_lexicographic_compare =
927 56 : ExternalConstant(ExternalReference::smi_lexicographic_compare_function());
928 : TNode<ExternalReference> isolate_ptr =
929 56 : ExternalConstant(ExternalReference::isolate_address(isolate()));
930 56 : return CAST(CallCFunction3(MachineType::AnyTagged(), MachineType::Pointer(),
931 : MachineType::AnyTagged(), MachineType::AnyTagged(),
932 : smi_lexicographic_compare, isolate_ptr, x, y));
933 : }
934 :
935 92876 : TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(
936 : SloppyTNode<IntPtrT> value) {
937 92876 : if (Is64()) {
938 92876 : return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
939 : }
940 0 : return ReinterpretCast<Int32T>(value);
941 : }
942 :
943 164976 : TNode<BoolT> CodeStubAssembler::TaggedIsSmi(SloppyTNode<Object> a) {
944 164976 : return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
945 329952 : IntPtrConstant(0));
946 : }
947 :
948 1624 : TNode<BoolT> CodeStubAssembler::TaggedIsSmi(TNode<MaybeObject> a) {
949 : return WordEqual(
950 3248 : WordAnd(BitcastMaybeObjectToWord(a), IntPtrConstant(kSmiTagMask)),
951 4872 : IntPtrConstant(0));
952 : }
953 :
954 23284 : TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(SloppyTNode<Object> a) {
955 : return WordNotEqual(
956 23284 : WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
957 46568 : IntPtrConstant(0));
958 : }
959 :
960 1860 : TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(SloppyTNode<Object> a) {
961 : return WordEqual(WordAnd(BitcastTaggedToWord(a),
962 1860 : IntPtrConstant(kSmiTagMask | kSmiSignMask)),
963 3720 : IntPtrConstant(0));
964 : }
965 :
966 0 : TNode<BoolT> CodeStubAssembler::WordIsAligned(SloppyTNode<WordT> word,
967 : size_t alignment) {
968 : DCHECK(base::bits::IsPowerOfTwo(alignment));
969 0 : return WordEqual(IntPtrConstant(0),
970 0 : WordAnd(word, IntPtrConstant(alignment - 1)));
971 : }
972 :
973 : #if DEBUG
974 : void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
975 : CodeAssembler::Bind(label, debug_info);
976 : }
977 : #endif // DEBUG
978 :
979 1469865 : void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
980 :
981 1232 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
982 : TNode<FixedDoubleArray> array, TNode<Smi> index, Label* if_hole) {
983 : return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
984 1232 : SMI_PARAMETERS, if_hole);
985 : }
986 :
987 0 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
988 : TNode<FixedDoubleArray> array, TNode<IntPtrT> index, Label* if_hole) {
989 : return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
990 0 : INTPTR_PARAMETERS, if_hole);
991 : }
992 :
993 112 : void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
994 : Node* receiver_map, Label* definitely_no_elements,
995 : Label* possibly_elements) {
996 : CSA_SLOW_ASSERT(this, IsMap(receiver_map));
997 112 : VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
998 224 : Label loop_body(this, &var_map);
999 112 : Node* empty_fixed_array = LoadRoot(RootIndex::kEmptyFixedArray);
1000 : Node* empty_slow_element_dictionary =
1001 112 : LoadRoot(RootIndex::kEmptySlowElementDictionary);
1002 112 : Goto(&loop_body);
1003 :
1004 112 : BIND(&loop_body);
1005 : {
1006 112 : Node* map = var_map.value();
1007 112 : Node* prototype = LoadMapPrototype(map);
1008 112 : GotoIf(IsNull(prototype), definitely_no_elements);
1009 112 : Node* prototype_map = LoadMap(prototype);
1010 112 : TNode<Int32T> prototype_instance_type = LoadMapInstanceType(prototype_map);
1011 :
1012 : // Pessimistically assume elements if a Proxy, Special API Object,
1013 : // or JSValue wrapper is found on the prototype chain. After this
1014 : // instance type check, it's not necessary to check for interceptors or
1015 : // access checks.
1016 224 : Label if_custom(this, Label::kDeferred), if_notcustom(this);
1017 : Branch(IsCustomElementsReceiverInstanceType(prototype_instance_type),
1018 112 : &if_custom, &if_notcustom);
1019 :
1020 112 : BIND(&if_custom);
1021 : {
1022 : // For string JSValue wrappers we still support the checks as long
1023 : // as they wrap the empty string.
1024 224 : GotoIfNot(InstanceTypeEqual(prototype_instance_type, JS_VALUE_TYPE),
1025 112 : possibly_elements);
1026 112 : Node* prototype_value = LoadJSValueValue(prototype);
1027 112 : Branch(IsEmptyString(prototype_value), &if_notcustom, possibly_elements);
1028 : }
1029 :
1030 112 : BIND(&if_notcustom);
1031 : {
1032 112 : Node* prototype_elements = LoadElements(prototype);
1033 112 : var_map.Bind(prototype_map);
1034 112 : GotoIf(WordEqual(prototype_elements, empty_fixed_array), &loop_body);
1035 224 : Branch(WordEqual(prototype_elements, empty_slow_element_dictionary),
1036 112 : &loop_body, possibly_elements);
1037 112 : }
1038 112 : }
1039 112 : }
1040 :
1041 1848 : void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
1042 : Label* if_false) {
1043 1848 : GotoIf(TaggedIsSmi(object), if_false);
1044 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1045 1848 : Branch(IsJSReceiver(object), if_true, if_false);
1046 1848 : }
1047 :
1048 2576 : void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
1049 : #ifdef V8_ENABLE_FORCE_SLOW_PATH
1050 : Node* const force_slow_path_addr =
1051 : ExternalConstant(ExternalReference::force_slow_path(isolate()));
1052 : Node* const force_slow = Load(MachineType::Uint8(), force_slow_path_addr);
1053 :
1054 : GotoIf(force_slow, if_true);
1055 : #endif
1056 2576 : }
1057 :
1058 4 : void CodeStubAssembler::GotoIfDebugExecutionModeChecksSideEffects(
1059 : Label* if_true) {
1060 : STATIC_ASSERT(sizeof(DebugInfo::ExecutionMode) >= sizeof(int32_t));
1061 :
1062 : TNode<ExternalReference> execution_mode_address = ExternalConstant(
1063 4 : ExternalReference::debug_execution_mode_address(isolate()));
1064 : TNode<Int32T> execution_mode =
1065 4 : UncheckedCast<Int32T>(Load(MachineType::Int32(), execution_mode_address));
1066 :
1067 8 : GotoIf(Word32Equal(execution_mode, Int32Constant(DebugInfo::kSideEffects)),
1068 4 : if_true);
1069 4 : }
1070 :
1071 7808 : TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
1072 : AllocationFlags flags,
1073 : TNode<RawPtrT> top_address,
1074 : TNode<RawPtrT> limit_address) {
1075 7808 : Label if_out_of_memory(this, Label::kDeferred);
1076 :
1077 : // TODO(jgruber,jkummerow): Extract the slow paths (= probably everything
1078 : // but bump pointer allocation) into a builtin to save code space. The
1079 : // size_in_bytes check may be moved there as well since a non-smi
1080 : // size_in_bytes probably doesn't fit into the bump pointer region
1081 : // (double-check that).
1082 :
1083 : intptr_t size_in_bytes_constant;
1084 7808 : bool size_in_bytes_is_constant = false;
1085 7808 : if (ToIntPtrConstant(size_in_bytes, size_in_bytes_constant)) {
1086 576 : size_in_bytes_is_constant = true;
1087 1152 : CHECK(Internals::IsValidSmi(size_in_bytes_constant));
1088 1152 : CHECK_GT(size_in_bytes_constant, 0);
1089 : } else {
1090 7232 : GotoIfNot(IsValidPositiveSmi(size_in_bytes), &if_out_of_memory);
1091 : }
1092 :
1093 : TNode<RawPtrT> top =
1094 7808 : UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), top_address));
1095 : TNode<RawPtrT> limit =
1096 7808 : UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), limit_address));
1097 :
1098 : // If there's not enough space, call the runtime.
1099 15616 : TVARIABLE(Object, result);
1100 15616 : Label runtime_call(this, Label::kDeferred), no_runtime_call(this), out(this);
1101 :
1102 7808 : bool needs_double_alignment = flags & kDoubleAlignment;
1103 :
1104 7808 : if (flags & kAllowLargeObjectAllocation) {
1105 5892 : Label next(this);
1106 5892 : GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1107 :
1108 : TNode<Smi> runtime_flags = SmiConstant(
1109 11784 : Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1110 11784 : AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
1111 11784 : result = CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
1112 5892 : SmiTag(size_in_bytes), runtime_flags);
1113 5892 : Goto(&out);
1114 :
1115 5892 : BIND(&next);
1116 : }
1117 :
1118 15616 : TVARIABLE(IntPtrT, adjusted_size, size_in_bytes);
1119 :
1120 7808 : if (needs_double_alignment) {
1121 0 : Label next(this);
1122 0 : GotoIfNot(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &next);
1123 :
1124 0 : adjusted_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1125 0 : Goto(&next);
1126 :
1127 0 : BIND(&next);
1128 : }
1129 :
1130 : TNode<IntPtrT> new_top =
1131 7808 : IntPtrAdd(UncheckedCast<IntPtrT>(top), adjusted_size.value());
1132 :
1133 15616 : Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1134 7808 : &no_runtime_call);
1135 :
1136 7808 : BIND(&runtime_call);
1137 : {
1138 7808 : if (flags & kPretenured) {
1139 : TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
1140 0 : AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1141 0 : AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
1142 0 : result = CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
1143 0 : SmiTag(size_in_bytes), runtime_flags);
1144 : } else {
1145 15616 : result = CallRuntime(Runtime::kAllocateInNewSpace, NoContextConstant(),
1146 7808 : SmiTag(size_in_bytes));
1147 : }
1148 7808 : Goto(&out);
1149 : }
1150 :
1151 : // When there is enough space, return `top' and bump it up.
1152 7808 : BIND(&no_runtime_call);
1153 : {
1154 : StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
1155 7808 : new_top);
1156 :
1157 7808 : TVARIABLE(IntPtrT, address, UncheckedCast<IntPtrT>(top));
1158 :
1159 7808 : if (needs_double_alignment) {
1160 0 : Label next(this);
1161 0 : GotoIf(IntPtrEqual(adjusted_size.value(), size_in_bytes), &next);
1162 :
1163 : // Store a filler and increase the address by 4.
1164 : StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
1165 0 : LoadRoot(RootIndex::kOnePointerFillerMap));
1166 0 : address = IntPtrAdd(UncheckedCast<IntPtrT>(top), IntPtrConstant(4));
1167 0 : Goto(&next);
1168 :
1169 0 : BIND(&next);
1170 : }
1171 :
1172 15616 : result = BitcastWordToTagged(
1173 15616 : IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1174 7808 : Goto(&out);
1175 : }
1176 :
1177 7808 : if (!size_in_bytes_is_constant) {
1178 7232 : BIND(&if_out_of_memory);
1179 : CallRuntime(Runtime::kFatalProcessOutOfMemoryInAllocateRaw,
1180 7232 : NoContextConstant());
1181 7232 : Unreachable();
1182 : }
1183 :
1184 7808 : BIND(&out);
1185 15616 : return UncheckedCast<HeapObject>(result.value());
1186 : }
1187 :
1188 5076 : TNode<HeapObject> CodeStubAssembler::AllocateRawUnaligned(
1189 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1190 : TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1191 : DCHECK_EQ(flags & kDoubleAlignment, 0);
1192 5076 : return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1193 : }
1194 :
1195 2732 : TNode<HeapObject> CodeStubAssembler::AllocateRawDoubleAligned(
1196 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1197 : TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1198 : #if defined(V8_HOST_ARCH_32_BIT)
1199 : return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
1200 : limit_address);
1201 : #elif defined(V8_HOST_ARCH_64_BIT)
1202 : // Allocation on 64 bit machine is naturally double aligned
1203 : return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
1204 2732 : limit_address);
1205 : #else
1206 : #error Architecture not supported
1207 : #endif
1208 : }
1209 :
1210 11552 : TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(
1211 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags) {
1212 : DCHECK(flags == kNone || flags == kDoubleAlignment);
1213 : CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
1214 11552 : return Allocate(size_in_bytes, flags);
1215 : }
1216 :
1217 69960 : TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
1218 : AllocationFlags flags) {
1219 69960 : Comment("Allocate");
1220 69960 : bool const new_space = !(flags & kPretenured);
1221 69960 : if (!(flags & kAllowLargeObjectAllocation)) {
1222 : intptr_t size_constant;
1223 64068 : if (ToIntPtrConstant(size_in_bytes, size_constant)) {
1224 93920 : CHECK_LE(size_constant, kMaxRegularHeapObjectSize);
1225 : }
1226 : }
1227 69960 : if (!(flags & kDoubleAlignment) && !(flags & kAllowLargeObjectAllocation)) {
1228 : return OptimizedAllocate(size_in_bytes, new_space
1229 : ? PretenureFlag::NOT_TENURED
1230 62152 : : PretenureFlag::TENURED);
1231 : }
1232 : TNode<ExternalReference> top_address = ExternalConstant(
1233 : new_space
1234 7808 : ? ExternalReference::new_space_allocation_top_address(isolate())
1235 15616 : : ExternalReference::old_space_allocation_top_address(isolate()));
1236 : DCHECK_EQ(kTaggedSize,
1237 : ExternalReference::new_space_allocation_limit_address(isolate())
1238 : .address() -
1239 : ExternalReference::new_space_allocation_top_address(isolate())
1240 : .address());
1241 : DCHECK_EQ(kTaggedSize,
1242 : ExternalReference::old_space_allocation_limit_address(isolate())
1243 : .address() -
1244 : ExternalReference::old_space_allocation_top_address(isolate())
1245 : .address());
1246 : TNode<IntPtrT> limit_address = IntPtrAdd(
1247 7808 : ReinterpretCast<IntPtrT>(top_address), IntPtrConstant(kTaggedSize));
1248 :
1249 7808 : if (flags & kDoubleAlignment) {
1250 : return AllocateRawDoubleAligned(size_in_bytes, flags,
1251 : ReinterpretCast<RawPtrT>(top_address),
1252 2732 : ReinterpretCast<RawPtrT>(limit_address));
1253 : } else {
1254 : return AllocateRawUnaligned(size_in_bytes, flags,
1255 : ReinterpretCast<RawPtrT>(top_address),
1256 5076 : ReinterpretCast<RawPtrT>(limit_address));
1257 : }
1258 : }
1259 :
1260 1584 : TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1261 : AllocationFlags flags) {
1262 1584 : CHECK(flags == kNone || flags == kDoubleAlignment);
1263 : DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1264 1584 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1265 : }
1266 :
1267 35160 : TNode<HeapObject> CodeStubAssembler::Allocate(int size_in_bytes,
1268 : AllocationFlags flags) {
1269 35160 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1270 : }
1271 :
1272 5320 : TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1273 : TNode<IntPtrT> offset) {
1274 : return UncheckedCast<HeapObject>(
1275 5320 : BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset)));
1276 : }
1277 :
1278 4312 : TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1279 : int offset) {
1280 4312 : return InnerAllocate(previous, IntPtrConstant(offset));
1281 : }
1282 :
1283 6060 : TNode<BoolT> CodeStubAssembler::IsRegularHeapObjectSize(TNode<IntPtrT> size) {
1284 : return UintPtrLessThanOrEqual(size,
1285 6060 : IntPtrConstant(kMaxRegularHeapObjectSize));
1286 : }
1287 :
1288 7616 : void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
1289 : Label* if_false) {
1290 15232 : Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1291 15232 : if_bigint(this, Label::kDeferred);
1292 : // Rule out false {value}.
1293 7616 : GotoIf(WordEqual(value, FalseConstant()), if_false);
1294 :
1295 : // Check if {value} is a Smi or a HeapObject.
1296 7616 : Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1297 :
1298 7616 : BIND(&if_smi);
1299 : {
1300 : // The {value} is a Smi, only need to check against zero.
1301 7616 : BranchIfSmiEqual(CAST(value), SmiConstant(0), if_false, if_true);
1302 : }
1303 :
1304 7616 : BIND(&if_notsmi);
1305 : {
1306 : // Check if {value} is the empty string.
1307 7616 : GotoIf(IsEmptyString(value), if_false);
1308 :
1309 : // The {value} is a HeapObject, load its map.
1310 7616 : Node* value_map = LoadMap(value);
1311 :
1312 : // Only null, undefined and document.all have the undetectable bit set,
1313 : // so we can return false immediately when that bit is set.
1314 7616 : GotoIf(IsUndetectableMap(value_map), if_false);
1315 :
1316 : // We still need to handle numbers specially, but all other {value}s
1317 : // that make it here yield true.
1318 7616 : GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1319 7616 : Branch(IsBigInt(value), &if_bigint, if_true);
1320 :
1321 7616 : BIND(&if_heapnumber);
1322 : {
1323 : // Load the floating point value of {value}.
1324 : Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
1325 7616 : MachineType::Float64());
1326 :
1327 : // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1328 15232 : Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1329 7616 : if_true, if_false);
1330 : }
1331 :
1332 7616 : BIND(&if_bigint);
1333 : {
1334 : Node* result =
1335 7616 : CallRuntime(Runtime::kBigIntToBoolean, NoContextConstant(), value);
1336 : CSA_ASSERT(this, IsBoolean(result));
1337 7616 : Branch(WordEqual(result, TrueConstant()), if_true, if_false);
1338 : }
1339 7616 : }
1340 7616 : }
1341 :
1342 2016 : Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
1343 2016 : Node* frame_pointer = LoadParentFramePointer();
1344 2016 : return Load(rep, frame_pointer, IntPtrConstant(offset));
1345 : }
1346 :
1347 3204 : Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
1348 : MachineType rep) {
1349 3204 : return Load(rep, buffer, IntPtrConstant(offset));
1350 : }
1351 :
1352 1014444 : Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1353 : int offset, MachineType rep) {
1354 : CSA_ASSERT(this, IsStrong(object));
1355 1014444 : return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
1356 : }
1357 :
1358 9024 : Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1359 : SloppyTNode<IntPtrT> offset,
1360 : MachineType rep) {
1361 : CSA_ASSERT(this, IsStrong(object));
1362 9024 : return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
1363 : }
1364 :
1365 122520 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1366 : SloppyTNode<HeapObject> object, int offset) {
1367 122520 : if (SmiValuesAre32Bits()) {
1368 : #if V8_TARGET_LITTLE_ENDIAN
1369 122520 : offset += 4;
1370 : #endif
1371 : return ChangeInt32ToIntPtr(
1372 122520 : LoadObjectField(object, offset, MachineType::Int32()));
1373 : } else {
1374 : return SmiToIntPtr(
1375 0 : LoadObjectField(object, offset, MachineType::AnyTagged()));
1376 : }
1377 : }
1378 :
1379 4376 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
1380 : int offset) {
1381 4376 : if (SmiValuesAre32Bits()) {
1382 : #if V8_TARGET_LITTLE_ENDIAN
1383 4376 : offset += 4;
1384 : #endif
1385 : return UncheckedCast<Int32T>(
1386 4376 : LoadObjectField(object, offset, MachineType::Int32()));
1387 : } else {
1388 : return SmiToInt32(
1389 0 : LoadObjectField(object, offset, MachineType::AnyTagged()));
1390 : }
1391 : }
1392 :
1393 1512 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
1394 1512 : if (SmiValuesAre32Bits()) {
1395 : #if V8_TARGET_LITTLE_ENDIAN
1396 1512 : index += 4;
1397 : #endif
1398 : return ChangeInt32ToIntPtr(
1399 1512 : Load(MachineType::Int32(), base, IntPtrConstant(index)));
1400 : } else {
1401 : return SmiToIntPtr(
1402 0 : Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
1403 : }
1404 : }
1405 :
1406 58421 : void CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
1407 58421 : if (SmiValuesAre32Bits()) {
1408 58421 : int zero_offset = offset + 4;
1409 58421 : int payload_offset = offset;
1410 : #if V8_TARGET_LITTLE_ENDIAN
1411 58421 : std::swap(zero_offset, payload_offset);
1412 : #endif
1413 : StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1414 58421 : IntPtrConstant(zero_offset), Int32Constant(0));
1415 : StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1416 58421 : IntPtrConstant(payload_offset),
1417 116842 : TruncateInt64ToInt32(value));
1418 : } else {
1419 : StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
1420 0 : IntPtrConstant(offset), SmiTag(value));
1421 : }
1422 58421 : }
1423 :
1424 86196 : TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1425 : SloppyTNode<HeapNumber> object) {
1426 : return TNode<Float64T>::UncheckedCast(LoadObjectField(
1427 86196 : object, HeapNumber::kValueOffset, MachineType::Float64()));
1428 : }
1429 :
1430 207032 : TNode<Map> CodeStubAssembler::LoadMap(SloppyTNode<HeapObject> object) {
1431 : return UncheckedCast<Map>(LoadObjectField(object, HeapObject::kMapOffset,
1432 207032 : MachineType::TaggedPointer()));
1433 : }
1434 :
1435 69076 : TNode<Int32T> CodeStubAssembler::LoadInstanceType(
1436 : SloppyTNode<HeapObject> object) {
1437 69076 : return LoadMapInstanceType(LoadMap(object));
1438 : }
1439 :
1440 10024 : TNode<BoolT> CodeStubAssembler::HasInstanceType(SloppyTNode<HeapObject> object,
1441 : InstanceType instance_type) {
1442 10024 : return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1443 : }
1444 :
1445 728 : TNode<BoolT> CodeStubAssembler::DoesntHaveInstanceType(
1446 : SloppyTNode<HeapObject> object, InstanceType instance_type) {
1447 728 : return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1448 : }
1449 :
1450 0 : TNode<BoolT> CodeStubAssembler::TaggedDoesntHaveInstanceType(
1451 : SloppyTNode<HeapObject> any_tagged, InstanceType type) {
1452 : /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1453 0 : TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
1454 : return Select<BoolT>(
1455 0 : tagged_is_smi, [=]() { return tagged_is_smi; },
1456 0 : [=]() { return DoesntHaveInstanceType(any_tagged, type); });
1457 : }
1458 :
1459 3756 : TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1460 : SloppyTNode<JSObject> object) {
1461 : CSA_SLOW_ASSERT(this, Word32BinaryNot(IsDictionaryMap(LoadMap(object))));
1462 3756 : TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1463 7512 : return Select<HeapObject>(TaggedIsSmi(properties),
1464 3756 : [=] { return EmptyFixedArrayConstant(); },
1465 15024 : [=] { return CAST(properties); });
1466 : }
1467 :
1468 6344 : TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1469 : SloppyTNode<JSObject> object) {
1470 : CSA_SLOW_ASSERT(this, IsDictionaryMap(LoadMap(object)));
1471 6344 : TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1472 12688 : return Select<HeapObject>(TaggedIsSmi(properties),
1473 6344 : [=] { return EmptyPropertyDictionaryConstant(); },
1474 25376 : [=] { return CAST(properties); });
1475 : }
1476 :
1477 14188 : TNode<Number> CodeStubAssembler::LoadJSArrayLength(SloppyTNode<JSArray> array) {
1478 : CSA_ASSERT(this, IsJSArray(array));
1479 14188 : return CAST(LoadObjectField(array, JSArray::kLengthOffset));
1480 : }
1481 :
1482 0 : TNode<Object> CodeStubAssembler::LoadJSArgumentsObjectWithLength(
1483 : SloppyTNode<JSArgumentsObjectWithLength> array) {
1484 0 : return LoadObjectField(array, JSArgumentsObjectWithLength::kLengthOffset);
1485 : }
1486 :
1487 4496 : TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(
1488 : SloppyTNode<JSArray> array) {
1489 4496 : TNode<Object> length = LoadJSArrayLength(array);
1490 : CSA_ASSERT(this, IsFastElementsKind(LoadElementsKind(array)));
1491 : // JSArray length is always a positive Smi for fast arrays.
1492 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
1493 4496 : return UncheckedCast<Smi>(length);
1494 : }
1495 :
1496 20164 : TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1497 : SloppyTNode<FixedArrayBase> array) {
1498 : CSA_SLOW_ASSERT(this, IsNotWeakFixedArraySubclass(array));
1499 20164 : return CAST(LoadObjectField(array, FixedArrayBase::kLengthOffset));
1500 : }
1501 :
1502 114112 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1503 : SloppyTNode<FixedArrayBase> array) {
1504 114112 : return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1505 : }
1506 :
1507 0 : TNode<IntPtrT> CodeStubAssembler::LoadFeedbackVectorLength(
1508 : TNode<FeedbackVector> vector) {
1509 : return ChangeInt32ToIntPtr(
1510 0 : LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset));
1511 : }
1512 :
1513 0 : TNode<Smi> CodeStubAssembler::LoadWeakFixedArrayLength(
1514 : TNode<WeakFixedArray> array) {
1515 0 : return CAST(LoadObjectField(array, WeakFixedArray::kLengthOffset));
1516 : }
1517 :
1518 1184 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagWeakFixedArrayLength(
1519 : SloppyTNode<WeakFixedArray> array) {
1520 1184 : return LoadAndUntagObjectField(array, WeakFixedArray::kLengthOffset);
1521 : }
1522 :
1523 2136 : TNode<Int32T> CodeStubAssembler::LoadNumberOfDescriptors(
1524 : TNode<DescriptorArray> array) {
1525 : return UncheckedCast<Int32T>(
1526 : LoadObjectField(array, DescriptorArray::kNumberOfDescriptorsOffset,
1527 2136 : MachineType::Int16()));
1528 : }
1529 :
1530 28804 : TNode<Int32T> CodeStubAssembler::LoadMapBitField(SloppyTNode<Map> map) {
1531 : CSA_SLOW_ASSERT(this, IsMap(map));
1532 : return UncheckedCast<Int32T>(
1533 28804 : LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8()));
1534 : }
1535 :
1536 9972 : TNode<Int32T> CodeStubAssembler::LoadMapBitField2(SloppyTNode<Map> map) {
1537 : CSA_SLOW_ASSERT(this, IsMap(map));
1538 : return UncheckedCast<Int32T>(
1539 9972 : LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()));
1540 : }
1541 :
1542 7008 : TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(SloppyTNode<Map> map) {
1543 : CSA_SLOW_ASSERT(this, IsMap(map));
1544 : return UncheckedCast<Uint32T>(
1545 7008 : LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()));
1546 : }
1547 :
1548 124828 : TNode<Int32T> CodeStubAssembler::LoadMapInstanceType(SloppyTNode<Map> map) {
1549 : return UncheckedCast<Int32T>(
1550 124828 : LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint16()));
1551 : }
1552 :
1553 9132 : TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(SloppyTNode<Map> map) {
1554 : CSA_SLOW_ASSERT(this, IsMap(map));
1555 9132 : Node* bit_field2 = LoadMapBitField2(map);
1556 9132 : return Signed(DecodeWord32<Map::ElementsKindBits>(bit_field2));
1557 : }
1558 :
1559 2352 : TNode<Int32T> CodeStubAssembler::LoadElementsKind(
1560 : SloppyTNode<HeapObject> object) {
1561 2352 : return LoadMapElementsKind(LoadMap(object));
1562 : }
1563 :
1564 9640 : TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(
1565 : SloppyTNode<Map> map) {
1566 : CSA_SLOW_ASSERT(this, IsMap(map));
1567 9640 : return CAST(LoadObjectField(map, Map::kDescriptorsOffset));
1568 : }
1569 :
1570 16804 : TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(SloppyTNode<Map> map) {
1571 : CSA_SLOW_ASSERT(this, IsMap(map));
1572 16804 : return CAST(LoadObjectField(map, Map::kPrototypeOffset));
1573 : }
1574 :
1575 112 : TNode<PrototypeInfo> CodeStubAssembler::LoadMapPrototypeInfo(
1576 : SloppyTNode<Map> map, Label* if_no_proto_info) {
1577 112 : Label if_strong_heap_object(this);
1578 : CSA_ASSERT(this, IsMap(map));
1579 : TNode<MaybeObject> maybe_prototype_info =
1580 112 : LoadMaybeWeakObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1581 224 : TVARIABLE(Object, prototype_info);
1582 : DispatchMaybeObject(maybe_prototype_info, if_no_proto_info, if_no_proto_info,
1583 : if_no_proto_info, &if_strong_heap_object,
1584 112 : &prototype_info);
1585 :
1586 112 : BIND(&if_strong_heap_object);
1587 112 : GotoIfNot(WordEqual(LoadMap(CAST(prototype_info.value())),
1588 336 : LoadRoot(RootIndex::kPrototypeInfoMap)),
1589 112 : if_no_proto_info);
1590 224 : return CAST(prototype_info.value());
1591 : }
1592 :
1593 4608 : TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSizeInWords(
1594 : SloppyTNode<Map> map) {
1595 : CSA_SLOW_ASSERT(this, IsMap(map));
1596 : return ChangeInt32ToIntPtr(LoadObjectField(
1597 4608 : map, Map::kInstanceSizeInWordsOffset, MachineType::Uint8()));
1598 : }
1599 :
1600 2132 : TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectPropertiesStartInWords(
1601 : SloppyTNode<Map> map) {
1602 : CSA_SLOW_ASSERT(this, IsMap(map));
1603 : // See Map::GetInObjectPropertiesStartInWords() for details.
1604 : CSA_ASSERT(this, IsJSObjectMap(map));
1605 : return ChangeInt32ToIntPtr(LoadObjectField(
1606 : map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1607 2132 : MachineType::Uint8()));
1608 : }
1609 :
1610 56 : TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1611 : SloppyTNode<Map> map) {
1612 : CSA_SLOW_ASSERT(this, IsMap(map));
1613 : // See Map::GetConstructorFunctionIndex() for details.
1614 : CSA_ASSERT(this, IsPrimitiveInstanceType(LoadMapInstanceType(map)));
1615 : return ChangeInt32ToIntPtr(LoadObjectField(
1616 : map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1617 56 : MachineType::Uint8()));
1618 : }
1619 :
1620 0 : TNode<Object> CodeStubAssembler::LoadMapConstructor(SloppyTNode<Map> map) {
1621 : CSA_SLOW_ASSERT(this, IsMap(map));
1622 0 : TVARIABLE(Object, result,
1623 : LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1624 :
1625 0 : Label done(this), loop(this, &result);
1626 0 : Goto(&loop);
1627 0 : BIND(&loop);
1628 : {
1629 0 : GotoIf(TaggedIsSmi(result.value()), &done);
1630 : Node* is_map_type =
1631 0 : InstanceTypeEqual(LoadInstanceType(CAST(result.value())), MAP_TYPE);
1632 0 : GotoIfNot(is_map_type, &done);
1633 0 : result = LoadObjectField(CAST(result.value()),
1634 0 : Map::kConstructorOrBackPointerOffset);
1635 0 : Goto(&loop);
1636 : }
1637 0 : BIND(&done);
1638 0 : return result.value();
1639 : }
1640 :
1641 840 : Node* CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) {
1642 : CSA_SLOW_ASSERT(this, IsMap(map));
1643 840 : Node* bit_field3 = LoadMapBitField3(map);
1644 840 : return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3);
1645 : }
1646 :
1647 0 : TNode<Object> CodeStubAssembler::LoadMapBackPointer(SloppyTNode<Map> map) {
1648 : TNode<HeapObject> object =
1649 0 : CAST(LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1650 0 : return Select<Object>(IsMap(object), [=] { return object; },
1651 0 : [=] { return UndefinedConstant(); });
1652 : }
1653 :
1654 112 : TNode<Uint32T> CodeStubAssembler::EnsureOnlyHasSimpleProperties(
1655 : TNode<Map> map, TNode<Int32T> instance_type, Label* bailout) {
1656 : // This check can have false positives, since it applies to any JSValueType.
1657 112 : GotoIf(IsCustomElementsReceiverInstanceType(instance_type), bailout);
1658 :
1659 112 : TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1660 : GotoIf(IsSetWord32(bit_field3, Map::IsDictionaryMapBit::kMask |
1661 224 : Map::HasHiddenPrototypeBit::kMask),
1662 112 : bailout);
1663 :
1664 112 : return bit_field3;
1665 : }
1666 :
1667 560 : TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
1668 : SloppyTNode<Object> receiver, Label* if_no_hash) {
1669 560 : TVARIABLE(IntPtrT, var_hash);
1670 1120 : Label done(this), if_smi(this), if_property_array(this),
1671 1120 : if_property_dictionary(this), if_fixed_array(this);
1672 :
1673 : TNode<Object> properties_or_hash =
1674 560 : LoadObjectField(TNode<HeapObject>::UncheckedCast(receiver),
1675 560 : JSReceiver::kPropertiesOrHashOffset);
1676 560 : GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
1677 :
1678 : TNode<HeapObject> properties =
1679 560 : TNode<HeapObject>::UncheckedCast(properties_or_hash);
1680 560 : TNode<Int32T> properties_instance_type = LoadInstanceType(properties);
1681 :
1682 1120 : GotoIf(InstanceTypeEqual(properties_instance_type, PROPERTY_ARRAY_TYPE),
1683 560 : &if_property_array);
1684 1120 : Branch(InstanceTypeEqual(properties_instance_type, NAME_DICTIONARY_TYPE),
1685 560 : &if_property_dictionary, &if_fixed_array);
1686 :
1687 560 : BIND(&if_fixed_array);
1688 : {
1689 560 : var_hash = IntPtrConstant(PropertyArray::kNoHashSentinel);
1690 560 : Goto(&done);
1691 : }
1692 :
1693 560 : BIND(&if_smi);
1694 : {
1695 560 : var_hash = SmiUntag(TNode<Smi>::UncheckedCast(properties_or_hash));
1696 560 : Goto(&done);
1697 : }
1698 :
1699 560 : BIND(&if_property_array);
1700 : {
1701 : TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
1702 560 : properties, PropertyArray::kLengthAndHashOffset);
1703 1120 : var_hash = TNode<IntPtrT>::UncheckedCast(
1704 1680 : DecodeWord<PropertyArray::HashField>(length_and_hash));
1705 560 : Goto(&done);
1706 : }
1707 :
1708 560 : BIND(&if_property_dictionary);
1709 : {
1710 1120 : var_hash = SmiUntag(CAST(LoadFixedArrayElement(
1711 560 : CAST(properties), NameDictionary::kObjectHashIndex)));
1712 560 : Goto(&done);
1713 : }
1714 :
1715 560 : BIND(&done);
1716 560 : if (if_no_hash != nullptr) {
1717 : GotoIf(IntPtrEqual(var_hash.value(),
1718 448 : IntPtrConstant(PropertyArray::kNoHashSentinel)),
1719 224 : if_no_hash);
1720 : }
1721 1120 : return var_hash.value();
1722 : }
1723 :
1724 16992 : TNode<Uint32T> CodeStubAssembler::LoadNameHashField(SloppyTNode<Name> name) {
1725 : CSA_ASSERT(this, IsName(name));
1726 16992 : return LoadObjectField<Uint32T>(name, Name::kHashFieldOffset);
1727 : }
1728 :
1729 7080 : TNode<Uint32T> CodeStubAssembler::LoadNameHash(SloppyTNode<Name> name,
1730 : Label* if_hash_not_computed) {
1731 7080 : TNode<Uint32T> hash_field = LoadNameHashField(name);
1732 7080 : if (if_hash_not_computed != nullptr) {
1733 672 : GotoIf(IsSetWord32(hash_field, Name::kHashNotComputedMask),
1734 336 : if_hash_not_computed);
1735 : }
1736 7080 : return Unsigned(Word32Shr(hash_field, Int32Constant(Name::kHashShift)));
1737 : }
1738 :
1739 6220 : TNode<Smi> CodeStubAssembler::LoadStringLengthAsSmi(
1740 : SloppyTNode<String> string) {
1741 6220 : return SmiFromIntPtr(LoadStringLengthAsWord(string));
1742 : }
1743 :
1744 12052 : TNode<IntPtrT> CodeStubAssembler::LoadStringLengthAsWord(
1745 : SloppyTNode<String> string) {
1746 12052 : return Signed(ChangeUint32ToWord(LoadStringLengthAsWord32(string)));
1747 : }
1748 :
1749 12388 : TNode<Uint32T> CodeStubAssembler::LoadStringLengthAsWord32(
1750 : SloppyTNode<String> string) {
1751 : CSA_ASSERT(this, IsString(string));
1752 12388 : return LoadObjectField<Uint32T>(string, String::kLengthOffset);
1753 : }
1754 :
1755 56 : Node* CodeStubAssembler::PointerToSeqStringData(Node* seq_string) {
1756 : CSA_ASSERT(this, IsString(seq_string));
1757 : CSA_ASSERT(this,
1758 : IsSequentialStringInstanceType(LoadInstanceType(seq_string)));
1759 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
1760 : return IntPtrAdd(
1761 : BitcastTaggedToWord(seq_string),
1762 56 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag));
1763 : }
1764 :
1765 5724 : Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1766 : CSA_ASSERT(this, IsJSValue(object));
1767 5724 : return LoadObjectField(object, JSValue::kValueOffset);
1768 : }
1769 :
1770 392 : void CodeStubAssembler::DispatchMaybeObject(TNode<MaybeObject> maybe_object,
1771 : Label* if_smi, Label* if_cleared,
1772 : Label* if_weak, Label* if_strong,
1773 : TVariable<Object>* extracted) {
1774 784 : Label inner_if_smi(this), inner_if_strong(this);
1775 :
1776 392 : GotoIf(TaggedIsSmi(maybe_object), &inner_if_smi);
1777 :
1778 392 : GotoIf(IsCleared(maybe_object), if_cleared);
1779 :
1780 : GotoIf(Word32Equal(Word32And(TruncateIntPtrToInt32(
1781 784 : BitcastMaybeObjectToWord(maybe_object)),
1782 1568 : Int32Constant(kHeapObjectTagMask)),
1783 1568 : Int32Constant(kHeapObjectTag)),
1784 392 : &inner_if_strong);
1785 :
1786 784 : *extracted =
1787 : BitcastWordToTagged(WordAnd(BitcastMaybeObjectToWord(maybe_object),
1788 1176 : IntPtrConstant(~kWeakHeapObjectMask)));
1789 392 : Goto(if_weak);
1790 :
1791 392 : BIND(&inner_if_smi);
1792 392 : *extracted = CAST(maybe_object);
1793 392 : Goto(if_smi);
1794 :
1795 392 : BIND(&inner_if_strong);
1796 392 : *extracted = CAST(maybe_object);
1797 784 : Goto(if_strong);
1798 392 : }
1799 :
1800 504 : TNode<BoolT> CodeStubAssembler::IsStrong(TNode<MaybeObject> value) {
1801 : return WordEqual(WordAnd(BitcastMaybeObjectToWord(value),
1802 1008 : IntPtrConstant(kHeapObjectTagMask)),
1803 1512 : IntPtrConstant(kHeapObjectTag));
1804 : }
1805 :
1806 504 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectIfStrong(
1807 : TNode<MaybeObject> value, Label* if_not_strong) {
1808 504 : GotoIfNot(IsStrong(value), if_not_strong);
1809 504 : return CAST(value);
1810 : }
1811 :
1812 504 : TNode<BoolT> CodeStubAssembler::IsWeakOrCleared(TNode<MaybeObject> value) {
1813 : return Word32Equal(
1814 1008 : Word32And(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1815 2016 : Int32Constant(kHeapObjectTagMask)),
1816 1512 : Int32Constant(kWeakHeapObjectTag));
1817 : }
1818 :
1819 7000 : TNode<BoolT> CodeStubAssembler::IsCleared(TNode<MaybeObject> value) {
1820 14000 : return Word32Equal(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1821 21000 : Int32Constant(kClearedWeakHeapObjectLower32));
1822 : }
1823 :
1824 1288 : TNode<BoolT> CodeStubAssembler::IsNotCleared(TNode<MaybeObject> value) {
1825 2576 : return Word32NotEqual(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1826 3864 : Int32Constant(kClearedWeakHeapObjectLower32));
1827 : }
1828 :
1829 5992 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1830 : TNode<MaybeObject> value) {
1831 : CSA_ASSERT(this, IsWeakOrCleared(value));
1832 : CSA_ASSERT(this, IsNotCleared(value));
1833 : return UncheckedCast<HeapObject>(BitcastWordToTagged(WordAnd(
1834 5992 : BitcastMaybeObjectToWord(value), IntPtrConstant(~kWeakHeapObjectMask))));
1835 : }
1836 :
1837 4256 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1838 : TNode<MaybeObject> value, Label* if_cleared) {
1839 4256 : GotoIf(IsCleared(value), if_cleared);
1840 4256 : return GetHeapObjectAssumeWeak(value);
1841 : }
1842 :
1843 2184 : TNode<BoolT> CodeStubAssembler::IsWeakReferenceTo(TNode<MaybeObject> object,
1844 : TNode<Object> value) {
1845 : return WordEqual(WordAnd(BitcastMaybeObjectToWord(object),
1846 4368 : IntPtrConstant(~kWeakHeapObjectMask)),
1847 6552 : BitcastTaggedToWord(value));
1848 : }
1849 :
1850 1400 : TNode<BoolT> CodeStubAssembler::IsStrongReferenceTo(TNode<MaybeObject> object,
1851 : TNode<Object> value) {
1852 2800 : return WordEqual(BitcastMaybeObjectToWord(object),
1853 4200 : BitcastTaggedToWord(value));
1854 : }
1855 :
1856 1120 : TNode<BoolT> CodeStubAssembler::IsNotWeakReferenceTo(TNode<MaybeObject> object,
1857 : TNode<Object> value) {
1858 : return WordNotEqual(WordAnd(BitcastMaybeObjectToWord(object),
1859 2240 : IntPtrConstant(~kWeakHeapObjectMask)),
1860 3360 : BitcastTaggedToWord(value));
1861 : }
1862 :
1863 2240 : TNode<MaybeObject> CodeStubAssembler::MakeWeak(TNode<HeapObject> value) {
1864 : return ReinterpretCast<MaybeObject>(BitcastWordToTagged(
1865 2240 : WordOr(BitcastTaggedToWord(value), IntPtrConstant(kWeakHeapObjectTag))));
1866 : }
1867 :
1868 : template <>
1869 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<FixedArray> array) {
1870 0 : return LoadAndUntagFixedArrayBaseLength(array);
1871 : }
1872 :
1873 : template <>
1874 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<WeakFixedArray> array) {
1875 0 : return LoadAndUntagWeakFixedArrayLength(array);
1876 : }
1877 :
1878 : template <>
1879 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<PropertyArray> array) {
1880 0 : return LoadPropertyArrayLength(array);
1881 : }
1882 :
1883 : template <>
1884 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1885 : TNode<DescriptorArray> array) {
1886 : return IntPtrMul(ChangeInt32ToIntPtr(LoadNumberOfDescriptors(array)),
1887 0 : IntPtrConstant(DescriptorArray::kEntrySize));
1888 : }
1889 :
1890 : template <>
1891 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1892 : TNode<TransitionArray> array) {
1893 0 : return LoadAndUntagWeakFixedArrayLength(array);
1894 : }
1895 :
1896 : template <typename Array>
1897 117920 : TNode<MaybeObject> CodeStubAssembler::LoadArrayElement(
1898 : TNode<Array> array, int array_header_size, Node* index_node,
1899 : int additional_offset, ParameterMode parameter_mode,
1900 : LoadSensitivity needs_poisoning) {
1901 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
1902 : ParameterToIntPtr(index_node, parameter_mode),
1903 : IntPtrConstant(0)));
1904 : DCHECK(IsAligned(additional_offset, kTaggedSize));
1905 117920 : int32_t header_size = array_header_size + additional_offset - kHeapObjectTag;
1906 : TNode<IntPtrT> offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
1907 117920 : parameter_mode, header_size);
1908 : CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(array),
1909 : array_header_size));
1910 : return UncheckedCast<MaybeObject>(
1911 117920 : Load(MachineType::AnyTagged(), array, offset, needs_poisoning));
1912 : }
1913 :
1914 : template TNode<MaybeObject>
1915 : CodeStubAssembler::LoadArrayElement<TransitionArray>(TNode<TransitionArray>,
1916 : int, Node*, int,
1917 : ParameterMode,
1918 : LoadSensitivity);
1919 :
1920 : template TNode<MaybeObject>
1921 : CodeStubAssembler::LoadArrayElement<DescriptorArray>(TNode<DescriptorArray>,
1922 : int, Node*, int,
1923 : ParameterMode,
1924 : LoadSensitivity);
1925 :
1926 118756 : void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
1927 : Node* index,
1928 : int additional_offset,
1929 : ParameterMode parameter_mode) {
1930 118756 : if (!FLAG_fixed_array_bounds_checks) return;
1931 : DCHECK(IsAligned(additional_offset, kTaggedSize));
1932 118756 : if (parameter_mode == ParameterMode::SMI_PARAMETERS) {
1933 8572 : TNode<Smi> effective_index;
1934 8572 : Smi constant_index;
1935 8572 : bool index_is_constant = ToSmiConstant(index, &constant_index);
1936 8572 : if (index_is_constant) {
1937 8 : effective_index = SmiConstant(Smi::ToInt(constant_index) +
1938 8 : additional_offset / kTaggedSize);
1939 8568 : } else if (additional_offset != 0) {
1940 0 : effective_index =
1941 0 : SmiAdd(CAST(index), SmiConstant(additional_offset / kTaggedSize));
1942 : } else {
1943 8568 : effective_index = CAST(index);
1944 : }
1945 8572 : CSA_CHECK(this, SmiBelow(effective_index, LoadFixedArrayBaseLength(array)));
1946 : } else {
1947 : // IntPtrAdd does constant-folding automatically.
1948 : TNode<IntPtrT> effective_index =
1949 : IntPtrAdd(UncheckedCast<IntPtrT>(index),
1950 110184 : IntPtrConstant(additional_offset / kTaggedSize));
1951 220368 : CSA_CHECK(this, UintPtrLessThan(effective_index,
1952 110184 : LoadAndUntagFixedArrayBaseLength(array)));
1953 : }
1954 : }
1955 :
1956 99284 : TNode<Object> CodeStubAssembler::LoadFixedArrayElement(
1957 : TNode<FixedArray> object, Node* index_node, int additional_offset,
1958 : ParameterMode parameter_mode, LoadSensitivity needs_poisoning,
1959 : CheckBounds check_bounds) {
1960 : CSA_ASSERT(this, IsFixedArraySubclass(object));
1961 : CSA_ASSERT(this, IsNotWeakFixedArraySubclass(object));
1962 99284 : if (NeedsBoundsCheck(check_bounds)) {
1963 : FixedArrayBoundsCheck(object, index_node, additional_offset,
1964 82452 : parameter_mode);
1965 : }
1966 : TNode<MaybeObject> element =
1967 : LoadArrayElement(object, FixedArray::kHeaderSize, index_node,
1968 99284 : additional_offset, parameter_mode, needs_poisoning);
1969 99284 : return CAST(element);
1970 : }
1971 :
1972 1852 : TNode<Object> CodeStubAssembler::LoadPropertyArrayElement(
1973 : TNode<PropertyArray> object, SloppyTNode<IntPtrT> index) {
1974 1852 : int additional_offset = 0;
1975 1852 : ParameterMode parameter_mode = INTPTR_PARAMETERS;
1976 1852 : LoadSensitivity needs_poisoning = LoadSensitivity::kSafe;
1977 1852 : return CAST(LoadArrayElement(object, PropertyArray::kHeaderSize, index,
1978 : additional_offset, parameter_mode,
1979 : needs_poisoning));
1980 : }
1981 :
1982 56 : TNode<IntPtrT> CodeStubAssembler::LoadPropertyArrayLength(
1983 : TNode<PropertyArray> object) {
1984 : TNode<IntPtrT> value =
1985 56 : LoadAndUntagObjectField(object, PropertyArray::kLengthAndHashOffset);
1986 56 : return Signed(DecodeWord<PropertyArray::LengthField>(value));
1987 : }
1988 :
1989 7224 : TNode<RawPtrT> CodeStubAssembler::LoadFixedTypedArrayBackingStore(
1990 : TNode<FixedTypedArrayBase> typed_array) {
1991 : // Backing store = external_pointer + base_pointer.
1992 : Node* external_pointer =
1993 : LoadObjectField(typed_array, FixedTypedArrayBase::kExternalPointerOffset,
1994 7224 : MachineType::Pointer());
1995 : Node* base_pointer =
1996 7224 : LoadObjectField(typed_array, FixedTypedArrayBase::kBasePointerOffset);
1997 : return UncheckedCast<RawPtrT>(
1998 7224 : IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer)));
1999 : }
2000 :
2001 56 : TNode<RawPtrT> CodeStubAssembler::LoadFixedTypedArrayOnHeapBackingStore(
2002 : TNode<FixedTypedArrayBase> typed_array) {
2003 : // This is specialized method of retrieving the backing store pointer for on
2004 : // heap allocated typed array buffer. On heap allocated buffer's backing
2005 : // stores are a fixed offset from the pointer to a typed array's elements. See
2006 : // TypedArrayBuiltinsAssembler::AllocateOnHeapElements().
2007 : static const intptr_t fta_base_data_offset =
2008 : FixedTypedArrayBase::kDataOffset - kHeapObjectTag;
2009 :
2010 : TNode<WordT> backing_store = IntPtrAdd(BitcastTaggedToWord(typed_array),
2011 56 : IntPtrConstant(fta_base_data_offset));
2012 :
2013 : #ifdef DEBUG
2014 : // Verify that this is an on heap backing store.
2015 : TNode<RawPtrT> expected_backing_store_pointer =
2016 : LoadFixedTypedArrayBackingStore(typed_array);
2017 : CSA_ASSERT(this, WordEqual(backing_store, expected_backing_store_pointer));
2018 : #endif
2019 :
2020 56 : return UncheckedCast<RawPtrT>(backing_store);
2021 : }
2022 :
2023 784 : Node* CodeStubAssembler::LoadFixedBigInt64ArrayElementAsTagged(
2024 : Node* data_pointer, Node* offset) {
2025 784 : if (Is64()) {
2026 : TNode<IntPtrT> value = UncheckedCast<IntPtrT>(
2027 784 : Load(MachineType::IntPtr(), data_pointer, offset));
2028 784 : return BigIntFromInt64(value);
2029 : } else {
2030 : DCHECK(!Is64());
2031 : #if defined(V8_TARGET_BIG_ENDIAN)
2032 : TNode<IntPtrT> high = UncheckedCast<IntPtrT>(
2033 : Load(MachineType::UintPtr(), data_pointer, offset));
2034 : TNode<IntPtrT> low = UncheckedCast<IntPtrT>(
2035 : Load(MachineType::UintPtr(), data_pointer,
2036 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2037 : #else
2038 : TNode<IntPtrT> low = UncheckedCast<IntPtrT>(
2039 0 : Load(MachineType::UintPtr(), data_pointer, offset));
2040 : TNode<IntPtrT> high = UncheckedCast<IntPtrT>(
2041 : Load(MachineType::UintPtr(), data_pointer,
2042 0 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2043 : #endif
2044 0 : return BigIntFromInt32Pair(low, high);
2045 : }
2046 : }
2047 :
2048 0 : TNode<BigInt> CodeStubAssembler::BigIntFromInt32Pair(TNode<IntPtrT> low,
2049 : TNode<IntPtrT> high) {
2050 : DCHECK(!Is64());
2051 0 : TVARIABLE(BigInt, var_result);
2052 0 : TVARIABLE(Word32T, var_sign, Int32Constant(BigInt::SignBits::encode(false)));
2053 0 : TVARIABLE(IntPtrT, var_high, high);
2054 0 : TVARIABLE(IntPtrT, var_low, low);
2055 0 : Label high_zero(this), negative(this), allocate_one_digit(this),
2056 0 : allocate_two_digits(this), if_zero(this), done(this);
2057 :
2058 0 : GotoIf(WordEqual(var_high.value(), IntPtrConstant(0)), &high_zero);
2059 0 : Branch(IntPtrLessThan(var_high.value(), IntPtrConstant(0)), &negative,
2060 0 : &allocate_two_digits);
2061 :
2062 0 : BIND(&high_zero);
2063 0 : Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &if_zero,
2064 0 : &allocate_one_digit);
2065 :
2066 0 : BIND(&negative);
2067 : {
2068 0 : var_sign = Int32Constant(BigInt::SignBits::encode(true));
2069 : // We must negate the value by computing "0 - (high|low)", performing
2070 : // both parts of the subtraction separately and manually taking care
2071 : // of the carry bit (which is 1 iff low != 0).
2072 0 : var_high = IntPtrSub(IntPtrConstant(0), var_high.value());
2073 0 : Label carry(this), no_carry(this);
2074 0 : Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &no_carry, &carry);
2075 0 : BIND(&carry);
2076 0 : var_high = IntPtrSub(var_high.value(), IntPtrConstant(1));
2077 0 : Goto(&no_carry);
2078 0 : BIND(&no_carry);
2079 0 : var_low = IntPtrSub(IntPtrConstant(0), var_low.value());
2080 : // var_high was non-zero going into this block, but subtracting the
2081 : // carry bit from it could bring us back onto the "one digit" path.
2082 0 : Branch(WordEqual(var_high.value(), IntPtrConstant(0)), &allocate_one_digit,
2083 0 : &allocate_two_digits);
2084 : }
2085 :
2086 0 : BIND(&allocate_one_digit);
2087 : {
2088 0 : var_result = AllocateRawBigInt(IntPtrConstant(1));
2089 : StoreBigIntBitfield(var_result.value(),
2090 : Word32Or(var_sign.value(),
2091 0 : Int32Constant(BigInt::LengthBits::encode(1))));
2092 0 : StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2093 0 : Goto(&done);
2094 : }
2095 :
2096 0 : BIND(&allocate_two_digits);
2097 : {
2098 0 : var_result = AllocateRawBigInt(IntPtrConstant(2));
2099 : StoreBigIntBitfield(var_result.value(),
2100 : Word32Or(var_sign.value(),
2101 0 : Int32Constant(BigInt::LengthBits::encode(2))));
2102 0 : StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2103 0 : StoreBigIntDigit(var_result.value(), 1, Unsigned(var_high.value()));
2104 0 : Goto(&done);
2105 : }
2106 :
2107 0 : BIND(&if_zero);
2108 0 : var_result = AllocateBigInt(IntPtrConstant(0));
2109 0 : Goto(&done);
2110 :
2111 0 : BIND(&done);
2112 0 : return var_result.value();
2113 : }
2114 :
2115 1288 : TNode<BigInt> CodeStubAssembler::BigIntFromInt64(TNode<IntPtrT> value) {
2116 : DCHECK(Is64());
2117 1288 : TVARIABLE(BigInt, var_result);
2118 2576 : Label done(this), if_positive(this), if_negative(this), if_zero(this);
2119 1288 : GotoIf(WordEqual(value, IntPtrConstant(0)), &if_zero);
2120 1288 : var_result = AllocateRawBigInt(IntPtrConstant(1));
2121 2576 : Branch(IntPtrGreaterThan(value, IntPtrConstant(0)), &if_positive,
2122 1288 : &if_negative);
2123 :
2124 1288 : BIND(&if_positive);
2125 : {
2126 : StoreBigIntBitfield(var_result.value(),
2127 2576 : Int32Constant(BigInt::SignBits::encode(false) |
2128 2576 : BigInt::LengthBits::encode(1)));
2129 1288 : StoreBigIntDigit(var_result.value(), 0, Unsigned(value));
2130 1288 : Goto(&done);
2131 : }
2132 :
2133 1288 : BIND(&if_negative);
2134 : {
2135 : StoreBigIntBitfield(var_result.value(),
2136 2576 : Int32Constant(BigInt::SignBits::encode(true) |
2137 2576 : BigInt::LengthBits::encode(1)));
2138 : StoreBigIntDigit(var_result.value(), 0,
2139 1288 : Unsigned(IntPtrSub(IntPtrConstant(0), value)));
2140 1288 : Goto(&done);
2141 : }
2142 :
2143 1288 : BIND(&if_zero);
2144 : {
2145 1288 : var_result = AllocateBigInt(IntPtrConstant(0));
2146 1288 : Goto(&done);
2147 : }
2148 :
2149 1288 : BIND(&done);
2150 2576 : return var_result.value();
2151 : }
2152 :
2153 784 : Node* CodeStubAssembler::LoadFixedBigUint64ArrayElementAsTagged(
2154 : Node* data_pointer, Node* offset) {
2155 1568 : Label if_zero(this), done(this);
2156 784 : if (Is64()) {
2157 : TNode<UintPtrT> value = UncheckedCast<UintPtrT>(
2158 784 : Load(MachineType::UintPtr(), data_pointer, offset));
2159 784 : return BigIntFromUint64(value);
2160 : } else {
2161 : DCHECK(!Is64());
2162 : #if defined(V8_TARGET_BIG_ENDIAN)
2163 : TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2164 : Load(MachineType::UintPtr(), data_pointer, offset));
2165 : TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2166 : Load(MachineType::UintPtr(), data_pointer,
2167 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2168 : #else
2169 : TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2170 0 : Load(MachineType::UintPtr(), data_pointer, offset));
2171 : TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2172 : Load(MachineType::UintPtr(), data_pointer,
2173 0 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2174 : #endif
2175 0 : return BigIntFromUint32Pair(low, high);
2176 784 : }
2177 : }
2178 :
2179 0 : TNode<BigInt> CodeStubAssembler::BigIntFromUint32Pair(TNode<UintPtrT> low,
2180 : TNode<UintPtrT> high) {
2181 : DCHECK(!Is64());
2182 0 : TVARIABLE(BigInt, var_result);
2183 0 : Label high_zero(this), if_zero(this), done(this);
2184 :
2185 0 : GotoIf(WordEqual(high, IntPtrConstant(0)), &high_zero);
2186 0 : var_result = AllocateBigInt(IntPtrConstant(2));
2187 0 : StoreBigIntDigit(var_result.value(), 0, low);
2188 0 : StoreBigIntDigit(var_result.value(), 1, high);
2189 0 : Goto(&done);
2190 :
2191 0 : BIND(&high_zero);
2192 0 : GotoIf(WordEqual(low, IntPtrConstant(0)), &if_zero);
2193 0 : var_result = AllocateBigInt(IntPtrConstant(1));
2194 0 : StoreBigIntDigit(var_result.value(), 0, low);
2195 0 : Goto(&done);
2196 :
2197 0 : BIND(&if_zero);
2198 0 : var_result = AllocateBigInt(IntPtrConstant(0));
2199 0 : Goto(&done);
2200 :
2201 0 : BIND(&done);
2202 0 : return var_result.value();
2203 : }
2204 :
2205 1232 : TNode<BigInt> CodeStubAssembler::BigIntFromUint64(TNode<UintPtrT> value) {
2206 : DCHECK(Is64());
2207 1232 : TVARIABLE(BigInt, var_result);
2208 2464 : Label done(this), if_zero(this);
2209 1232 : GotoIf(WordEqual(value, IntPtrConstant(0)), &if_zero);
2210 1232 : var_result = AllocateBigInt(IntPtrConstant(1));
2211 1232 : StoreBigIntDigit(var_result.value(), 0, value);
2212 1232 : Goto(&done);
2213 :
2214 1232 : BIND(&if_zero);
2215 1232 : var_result = AllocateBigInt(IntPtrConstant(0));
2216 1232 : Goto(&done);
2217 1232 : BIND(&done);
2218 2464 : return var_result.value();
2219 : }
2220 :
2221 7616 : Node* CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2222 : Node* data_pointer, Node* index_node, ElementsKind elements_kind,
2223 : ParameterMode parameter_mode) {
2224 : Node* offset =
2225 7616 : ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2226 7616 : switch (elements_kind) {
2227 : case UINT8_ELEMENTS: /* fall through */
2228 : case UINT8_CLAMPED_ELEMENTS:
2229 1344 : return SmiFromInt32(Load(MachineType::Uint8(), data_pointer, offset));
2230 : case INT8_ELEMENTS:
2231 672 : return SmiFromInt32(Load(MachineType::Int8(), data_pointer, offset));
2232 : case UINT16_ELEMENTS:
2233 672 : return SmiFromInt32(Load(MachineType::Uint16(), data_pointer, offset));
2234 : case INT16_ELEMENTS:
2235 672 : return SmiFromInt32(Load(MachineType::Int16(), data_pointer, offset));
2236 : case UINT32_ELEMENTS:
2237 : return ChangeUint32ToTagged(
2238 672 : Load(MachineType::Uint32(), data_pointer, offset));
2239 : case INT32_ELEMENTS:
2240 : return ChangeInt32ToTagged(
2241 672 : Load(MachineType::Int32(), data_pointer, offset));
2242 : case FLOAT32_ELEMENTS:
2243 : return AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(
2244 672 : Load(MachineType::Float32(), data_pointer, offset)));
2245 : case FLOAT64_ELEMENTS:
2246 : return AllocateHeapNumberWithValue(
2247 672 : Load(MachineType::Float64(), data_pointer, offset));
2248 : case BIGINT64_ELEMENTS:
2249 784 : return LoadFixedBigInt64ArrayElementAsTagged(data_pointer, offset);
2250 : case BIGUINT64_ELEMENTS:
2251 784 : return LoadFixedBigUint64ArrayElementAsTagged(data_pointer, offset);
2252 : default:
2253 0 : UNREACHABLE();
2254 : }
2255 : }
2256 :
2257 56 : TNode<Numeric> CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2258 : TNode<WordT> data_pointer, TNode<Smi> index, TNode<Int32T> elements_kind) {
2259 56 : TVARIABLE(Numeric, var_result);
2260 112 : Label done(this), if_unknown_type(this, Label::kDeferred);
2261 : int32_t elements_kinds[] = {
2262 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) TYPE##_ELEMENTS,
2263 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2264 : #undef TYPED_ARRAY_CASE
2265 56 : };
2266 :
2267 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) Label if_##type##array(this);
2268 112 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2269 : #undef TYPED_ARRAY_CASE
2270 :
2271 : Label* elements_kind_labels[] = {
2272 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) &if_##type##array,
2273 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2274 : #undef TYPED_ARRAY_CASE
2275 56 : };
2276 : STATIC_ASSERT(arraysize(elements_kinds) == arraysize(elements_kind_labels));
2277 :
2278 : Switch(elements_kind, &if_unknown_type, elements_kinds, elements_kind_labels,
2279 56 : arraysize(elements_kinds));
2280 :
2281 56 : BIND(&if_unknown_type);
2282 56 : Unreachable();
2283 :
2284 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2285 : BIND(&if_##type##array); \
2286 : { \
2287 : var_result = CAST(LoadFixedTypedArrayElementAsTagged( \
2288 : data_pointer, index, TYPE##_ELEMENTS, SMI_PARAMETERS)); \
2289 : Goto(&done); \
2290 : }
2291 56 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2292 : #undef TYPED_ARRAY_CASE
2293 :
2294 56 : BIND(&done);
2295 112 : return var_result.value();
2296 : }
2297 :
2298 616 : void CodeStubAssembler::StoreFixedTypedArrayElementFromTagged(
2299 : TNode<Context> context, TNode<FixedTypedArrayBase> elements,
2300 : TNode<Object> index_node, TNode<Object> value, ElementsKind elements_kind,
2301 : ParameterMode parameter_mode) {
2302 616 : TNode<RawPtrT> data_pointer = LoadFixedTypedArrayBackingStore(elements);
2303 616 : switch (elements_kind) {
2304 : case UINT8_ELEMENTS:
2305 : case UINT8_CLAMPED_ELEMENTS:
2306 : case INT8_ELEMENTS:
2307 : case UINT16_ELEMENTS:
2308 : case INT16_ELEMENTS:
2309 : StoreElement(data_pointer, elements_kind, index_node,
2310 280 : SmiToInt32(CAST(value)), parameter_mode);
2311 280 : break;
2312 : case UINT32_ELEMENTS:
2313 : case INT32_ELEMENTS:
2314 : StoreElement(data_pointer, elements_kind, index_node,
2315 112 : TruncateTaggedToWord32(context, value), parameter_mode);
2316 112 : break;
2317 : case FLOAT32_ELEMENTS:
2318 : StoreElement(data_pointer, elements_kind, index_node,
2319 112 : TruncateFloat64ToFloat32(LoadHeapNumberValue(CAST(value))),
2320 56 : parameter_mode);
2321 56 : break;
2322 : case FLOAT64_ELEMENTS:
2323 : StoreElement(data_pointer, elements_kind, index_node,
2324 56 : LoadHeapNumberValue(CAST(value)), parameter_mode);
2325 56 : break;
2326 : case BIGUINT64_ELEMENTS:
2327 : case BIGINT64_ELEMENTS: {
2328 : TNode<IntPtrT> offset =
2329 112 : ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2330 : EmitBigTypedArrayElementStore(elements, data_pointer, offset,
2331 112 : CAST(value));
2332 112 : break;
2333 : }
2334 : default:
2335 0 : UNREACHABLE();
2336 : }
2337 616 : }
2338 :
2339 16016 : TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2340 : Node* object, Node* slot_index_node, int additional_offset,
2341 : ParameterMode parameter_mode) {
2342 : CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2343 : CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2344 : int32_t header_size =
2345 16016 : FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2346 : Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2347 16016 : parameter_mode, header_size);
2348 : CSA_SLOW_ASSERT(
2349 : this, IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2350 : FeedbackVector::kHeaderSize));
2351 : return UncheckedCast<MaybeObject>(
2352 16016 : Load(MachineType::AnyTagged(), object, offset));
2353 : }
2354 :
2355 : template <typename Array>
2356 15200 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ArrayElement(
2357 : TNode<Array> object, int array_header_size, Node* index_node,
2358 : int additional_offset, ParameterMode parameter_mode) {
2359 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2360 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2361 15200 : int endian_correction = 0;
2362 : #if V8_TARGET_LITTLE_ENDIAN
2363 15200 : if (SmiValuesAre32Bits()) endian_correction = 4;
2364 : #endif
2365 : int32_t header_size = array_header_size + additional_offset - kHeapObjectTag +
2366 15200 : endian_correction;
2367 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2368 15200 : parameter_mode, header_size);
2369 : CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(object),
2370 : array_header_size + endian_correction));
2371 15200 : if (SmiValuesAre32Bits()) {
2372 15200 : return UncheckedCast<Int32T>(Load(MachineType::Int32(), object, offset));
2373 : } else {
2374 0 : return SmiToInt32(Load(MachineType::AnyTagged(), object, offset));
2375 : }
2376 : }
2377 :
2378 5988 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
2379 : TNode<FixedArray> object, Node* index_node, int additional_offset,
2380 : ParameterMode parameter_mode) {
2381 : CSA_SLOW_ASSERT(this, IsFixedArraySubclass(object));
2382 : return LoadAndUntagToWord32ArrayElement(object, FixedArray::kHeaderSize,
2383 : index_node, additional_offset,
2384 5988 : parameter_mode);
2385 : }
2386 :
2387 1232 : TNode<MaybeObject> CodeStubAssembler::LoadWeakFixedArrayElement(
2388 : TNode<WeakFixedArray> object, Node* index, int additional_offset,
2389 : ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
2390 : return LoadArrayElement(object, WeakFixedArray::kHeaderSize, index,
2391 1232 : additional_offset, parameter_mode, needs_poisoning);
2392 : }
2393 :
2394 3196 : TNode<Float64T> CodeStubAssembler::LoadFixedDoubleArrayElement(
2395 : SloppyTNode<FixedDoubleArray> object, Node* index_node,
2396 : MachineType machine_type, int additional_offset,
2397 : ParameterMode parameter_mode, Label* if_hole) {
2398 : CSA_ASSERT(this, IsFixedDoubleArray(object));
2399 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2400 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2401 : int32_t header_size =
2402 3196 : FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
2403 : TNode<IntPtrT> offset = ElementOffsetFromIndex(
2404 3196 : index_node, HOLEY_DOUBLE_ELEMENTS, parameter_mode, header_size);
2405 : CSA_ASSERT(this, IsOffsetInBounds(
2406 : offset, LoadAndUntagFixedArrayBaseLength(object),
2407 : FixedDoubleArray::kHeaderSize, HOLEY_DOUBLE_ELEMENTS));
2408 3196 : return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
2409 : }
2410 :
2411 56 : TNode<Object> CodeStubAssembler::LoadFixedArrayBaseElementAsTagged(
2412 : TNode<FixedArrayBase> elements, TNode<IntPtrT> index,
2413 : TNode<Int32T> elements_kind, Label* if_accessor, Label* if_hole) {
2414 56 : TVARIABLE(Object, var_result);
2415 112 : Label done(this), if_packed(this), if_holey(this), if_packed_double(this),
2416 112 : if_holey_double(this), if_dictionary(this, Label::kDeferred);
2417 :
2418 : int32_t kinds[] = {// Handled by if_packed.
2419 : PACKED_SMI_ELEMENTS, PACKED_ELEMENTS,
2420 : // Handled by if_holey.
2421 : HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS,
2422 : // Handled by if_packed_double.
2423 : PACKED_DOUBLE_ELEMENTS,
2424 : // Handled by if_holey_double.
2425 56 : HOLEY_DOUBLE_ELEMENTS};
2426 : Label* labels[] = {// PACKED_{SMI,}_ELEMENTS
2427 : &if_packed, &if_packed,
2428 : // HOLEY_{SMI,}_ELEMENTS
2429 : &if_holey, &if_holey,
2430 : // PACKED_DOUBLE_ELEMENTS
2431 : &if_packed_double,
2432 : // HOLEY_DOUBLE_ELEMENTS
2433 56 : &if_holey_double};
2434 56 : Switch(elements_kind, &if_dictionary, kinds, labels, arraysize(kinds));
2435 :
2436 56 : BIND(&if_packed);
2437 : {
2438 56 : var_result = LoadFixedArrayElement(CAST(elements), index, 0);
2439 56 : Goto(&done);
2440 : }
2441 :
2442 56 : BIND(&if_holey);
2443 : {
2444 56 : var_result = LoadFixedArrayElement(CAST(elements), index);
2445 56 : Branch(WordEqual(var_result.value(), TheHoleConstant()), if_hole, &done);
2446 : }
2447 :
2448 56 : BIND(&if_packed_double);
2449 : {
2450 112 : var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2451 168 : CAST(elements), index, MachineType::Float64()));
2452 56 : Goto(&done);
2453 : }
2454 :
2455 56 : BIND(&if_holey_double);
2456 : {
2457 112 : var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2458 56 : CAST(elements), index, MachineType::Float64(), 0, INTPTR_PARAMETERS,
2459 224 : if_hole));
2460 56 : Goto(&done);
2461 : }
2462 :
2463 56 : BIND(&if_dictionary);
2464 : {
2465 : CSA_ASSERT(this, IsDictionaryElementsKind(elements_kind));
2466 112 : var_result = BasicLoadNumberDictionaryElement(CAST(elements), index,
2467 56 : if_accessor, if_hole);
2468 56 : Goto(&done);
2469 : }
2470 :
2471 56 : BIND(&done);
2472 112 : return var_result.value();
2473 : }
2474 :
2475 5672 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
2476 : SloppyTNode<Object> base, SloppyTNode<IntPtrT> offset, Label* if_hole,
2477 : MachineType machine_type) {
2478 5672 : if (if_hole) {
2479 : // TODO(ishell): Compare only the upper part for the hole once the
2480 : // compiler is able to fold addition of already complex |offset| with
2481 : // |kIeeeDoubleExponentWordOffset| into one addressing mode.
2482 5168 : if (Is64()) {
2483 5168 : Node* element = Load(MachineType::Uint64(), base, offset);
2484 5168 : GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
2485 : } else {
2486 : Node* element_upper = Load(
2487 : MachineType::Uint32(), base,
2488 0 : IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
2489 0 : GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
2490 0 : if_hole);
2491 : }
2492 : }
2493 5672 : if (machine_type.IsNone()) {
2494 : // This means the actual value is not needed.
2495 1068 : return TNode<Float64T>();
2496 : }
2497 4604 : return UncheckedCast<Float64T>(Load(machine_type, base, offset));
2498 : }
2499 :
2500 75268 : TNode<Object> CodeStubAssembler::LoadContextElement(
2501 : SloppyTNode<Context> context, int slot_index) {
2502 75268 : int offset = Context::SlotOffset(slot_index);
2503 : return UncheckedCast<Object>(
2504 75268 : Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)));
2505 : }
2506 :
2507 5776 : TNode<Object> CodeStubAssembler::LoadContextElement(
2508 : SloppyTNode<Context> context, SloppyTNode<IntPtrT> slot_index) {
2509 : Node* offset = ElementOffsetFromIndex(
2510 5776 : slot_index, PACKED_ELEMENTS, INTPTR_PARAMETERS, Context::SlotOffset(0));
2511 5776 : return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2512 : }
2513 :
2514 56 : TNode<Object> CodeStubAssembler::LoadContextElement(TNode<Context> context,
2515 : TNode<Smi> slot_index) {
2516 : Node* offset = ElementOffsetFromIndex(slot_index, PACKED_ELEMENTS,
2517 56 : SMI_PARAMETERS, Context::SlotOffset(0));
2518 56 : return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2519 : }
2520 :
2521 112 : void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2522 : int slot_index,
2523 : SloppyTNode<Object> value) {
2524 112 : int offset = Context::SlotOffset(slot_index);
2525 112 : Store(context, IntPtrConstant(offset), value);
2526 112 : }
2527 :
2528 1008 : void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2529 : SloppyTNode<IntPtrT> slot_index,
2530 : SloppyTNode<Object> value) {
2531 : Node* offset = IntPtrAdd(TimesTaggedSize(slot_index),
2532 1008 : IntPtrConstant(Context::SlotOffset(0)));
2533 1008 : Store(context, offset, value);
2534 1008 : }
2535 :
2536 6880 : void CodeStubAssembler::StoreContextElementNoWriteBarrier(
2537 : SloppyTNode<Context> context, int slot_index, SloppyTNode<Object> value) {
2538 6880 : int offset = Context::SlotOffset(slot_index);
2539 : StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
2540 6880 : IntPtrConstant(offset), value);
2541 6880 : }
2542 :
2543 30852 : TNode<Context> CodeStubAssembler::LoadNativeContext(
2544 : SloppyTNode<Context> context) {
2545 : return UncheckedCast<Context>(
2546 30852 : LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX));
2547 : }
2548 :
2549 168 : TNode<Context> CodeStubAssembler::LoadModuleContext(
2550 : SloppyTNode<Context> context) {
2551 168 : Node* module_map = LoadRoot(RootIndex::kModuleContextMap);
2552 168 : Variable cur_context(this, MachineRepresentation::kTaggedPointer);
2553 168 : cur_context.Bind(context);
2554 :
2555 336 : Label context_found(this);
2556 :
2557 168 : Variable* context_search_loop_variables[1] = {&cur_context};
2558 336 : Label context_search(this, 1, context_search_loop_variables);
2559 :
2560 : // Loop until cur_context->map() is module_map.
2561 168 : Goto(&context_search);
2562 168 : BIND(&context_search);
2563 : {
2564 : CSA_ASSERT(this, Word32BinaryNot(IsNativeContext(cur_context.value())));
2565 168 : GotoIf(WordEqual(LoadMap(cur_context.value()), module_map), &context_found);
2566 :
2567 : cur_context.Bind(
2568 168 : LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
2569 168 : Goto(&context_search);
2570 : }
2571 :
2572 168 : BIND(&context_found);
2573 336 : return UncheckedCast<Context>(cur_context.value());
2574 : }
2575 :
2576 788 : TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2577 : SloppyTNode<Int32T> kind, SloppyTNode<Context> native_context) {
2578 : CSA_ASSERT(this, IsFastElementsKind(kind));
2579 : CSA_ASSERT(this, IsNativeContext(native_context));
2580 : Node* offset = IntPtrAdd(IntPtrConstant(Context::FIRST_JS_ARRAY_MAP_SLOT),
2581 788 : ChangeInt32ToIntPtr(kind));
2582 788 : return UncheckedCast<Map>(LoadContextElement(native_context, offset));
2583 : }
2584 :
2585 4760 : TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2586 : ElementsKind kind, SloppyTNode<Context> native_context) {
2587 : CSA_ASSERT(this, IsNativeContext(native_context));
2588 : return UncheckedCast<Map>(
2589 4760 : LoadContextElement(native_context, Context::ArrayMapIndex(kind)));
2590 : }
2591 :
2592 3980 : TNode<BoolT> CodeStubAssembler::IsGeneratorFunction(
2593 : TNode<JSFunction> function) {
2594 : TNode<SharedFunctionInfo> const shared_function_info =
2595 3980 : CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
2596 :
2597 : TNode<Uint32T> const function_kind =
2598 : DecodeWord32<SharedFunctionInfo::FunctionKindBits>(LoadObjectField(
2599 : shared_function_info, SharedFunctionInfo::kFlagsOffset,
2600 3980 : MachineType::Uint32()));
2601 :
2602 : return TNode<BoolT>::UncheckedCast(Word32Or(
2603 : Word32Or(
2604 : Word32Or(
2605 : Word32Equal(function_kind,
2606 7960 : Int32Constant(FunctionKind::kAsyncGeneratorFunction)),
2607 : Word32Equal(
2608 : function_kind,
2609 15920 : Int32Constant(FunctionKind::kAsyncConciseGeneratorMethod))),
2610 : Word32Equal(function_kind,
2611 15920 : Int32Constant(FunctionKind::kGeneratorFunction))),
2612 : Word32Equal(function_kind,
2613 11940 : Int32Constant(FunctionKind::kConciseGeneratorMethod))));
2614 : }
2615 :
2616 3980 : TNode<BoolT> CodeStubAssembler::HasPrototypeProperty(TNode<JSFunction> function,
2617 : TNode<Map> map) {
2618 : // (has_prototype_slot() && IsConstructor()) ||
2619 : // IsGeneratorFunction(shared()->kind())
2620 : uint32_t mask =
2621 3980 : Map::HasPrototypeSlotBit::kMask | Map::IsConstructorBit::kMask;
2622 : return TNode<BoolT>::UncheckedCast(
2623 7960 : Word32Or(IsAllSetWord32(LoadMapBitField(map), mask),
2624 11940 : IsGeneratorFunction(function)));
2625 : }
2626 :
2627 3980 : void CodeStubAssembler::GotoIfPrototypeRequiresRuntimeLookup(
2628 : TNode<JSFunction> function, TNode<Map> map, Label* runtime) {
2629 : // !has_prototype_property() || has_non_instance_prototype()
2630 3980 : GotoIfNot(HasPrototypeProperty(function, map), runtime);
2631 7960 : GotoIf(IsSetWord32<Map::HasNonInstancePrototypeBit>(LoadMapBitField(map)),
2632 3980 : runtime);
2633 3980 : }
2634 :
2635 3924 : Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function,
2636 : Label* if_bailout) {
2637 : CSA_ASSERT(this, TaggedIsNotSmi(function));
2638 : CSA_ASSERT(this, IsJSFunction(function));
2639 : CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)));
2640 : CSA_ASSERT(this, IsClearWord32<Map::HasNonInstancePrototypeBit>(
2641 : LoadMapBitField(LoadMap(function))));
2642 : Node* proto_or_map =
2643 3924 : LoadObjectField(function, JSFunction::kPrototypeOrInitialMapOffset);
2644 3924 : GotoIf(IsTheHole(proto_or_map), if_bailout);
2645 :
2646 3924 : VARIABLE(var_result, MachineRepresentation::kTagged, proto_or_map);
2647 7848 : Label done(this, &var_result);
2648 3924 : GotoIfNot(IsMap(proto_or_map), &done);
2649 :
2650 3924 : var_result.Bind(LoadMapPrototype(proto_or_map));
2651 3924 : Goto(&done);
2652 :
2653 3924 : BIND(&done);
2654 7848 : return var_result.value();
2655 : }
2656 :
2657 112 : TNode<BytecodeArray> CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray(
2658 : SloppyTNode<SharedFunctionInfo> shared) {
2659 : Node* function_data =
2660 112 : LoadObjectField(shared, SharedFunctionInfo::kFunctionDataOffset);
2661 :
2662 112 : VARIABLE(var_result, MachineRepresentation::kTagged, function_data);
2663 224 : Label done(this, &var_result);
2664 :
2665 112 : GotoIfNot(HasInstanceType(function_data, INTERPRETER_DATA_TYPE), &done);
2666 : Node* bytecode_array =
2667 112 : LoadObjectField(function_data, InterpreterData::kBytecodeArrayOffset);
2668 112 : var_result.Bind(bytecode_array);
2669 112 : Goto(&done);
2670 :
2671 112 : BIND(&done);
2672 224 : return CAST(var_result.value());
2673 : }
2674 :
2675 24 : void CodeStubAssembler::StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,
2676 : int offset,
2677 : TNode<Word32T> value) {
2678 : StoreNoWriteBarrier(MachineRepresentation::kWord8, object,
2679 24 : IntPtrConstant(offset - kHeapObjectTag), value);
2680 24 : }
2681 :
2682 25180 : void CodeStubAssembler::StoreHeapNumberValue(SloppyTNode<HeapNumber> object,
2683 : SloppyTNode<Float64T> value) {
2684 : StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
2685 25180 : MachineRepresentation::kFloat64);
2686 25180 : }
2687 :
2688 504 : void CodeStubAssembler::StoreMutableHeapNumberValue(
2689 : SloppyTNode<MutableHeapNumber> object, SloppyTNode<Float64T> value) {
2690 : StoreObjectFieldNoWriteBarrier(object, MutableHeapNumber::kValueOffset, value,
2691 504 : MachineRepresentation::kFloat64);
2692 504 : }
2693 :
2694 15620 : void CodeStubAssembler::StoreObjectField(Node* object, int offset,
2695 : Node* value) {
2696 : DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
2697 :
2698 : OptimizedStoreField(MachineRepresentation::kTagged,
2699 : UncheckedCast<HeapObject>(object), offset, value,
2700 15620 : WriteBarrierKind::kFullWriteBarrier);
2701 15620 : }
2702 :
2703 1400 : void CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
2704 : Node* value) {
2705 : int const_offset;
2706 1400 : if (ToInt32Constant(offset, const_offset)) {
2707 0 : StoreObjectField(object, const_offset, value);
2708 : } else {
2709 1400 : Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2710 : }
2711 1400 : }
2712 :
2713 161160 : void CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2714 : Node* object, int offset, Node* value, MachineRepresentation rep) {
2715 : OptimizedStoreField(rep, UncheckedCast<HeapObject>(object), offset, value,
2716 161160 : WriteBarrierKind::kNoWriteBarrier);
2717 161160 : }
2718 :
2719 2128 : void CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2720 : Node* object, Node* offset, Node* value, MachineRepresentation rep) {
2721 : int const_offset;
2722 2128 : if (ToInt32Constant(offset, const_offset)) {
2723 2296 : return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
2724 : }
2725 : StoreNoWriteBarrier(rep, object,
2726 1960 : IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2727 : }
2728 :
2729 10812 : void CodeStubAssembler::StoreMap(Node* object, Node* map) {
2730 10812 : OptimizedStoreMap(UncheckedCast<HeapObject>(object), CAST(map));
2731 10812 : }
2732 :
2733 54576 : void CodeStubAssembler::StoreMapNoWriteBarrier(Node* object,
2734 : RootIndex map_root_index) {
2735 54576 : StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
2736 54576 : }
2737 :
2738 71132 : void CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
2739 : CSA_SLOW_ASSERT(this, IsMap(map));
2740 : OptimizedStoreField(MachineRepresentation::kTaggedPointer,
2741 : UncheckedCast<HeapObject>(object), HeapObject::kMapOffset,
2742 71132 : map, WriteBarrierKind::kNoWriteBarrier);
2743 71132 : }
2744 :
2745 21756 : void CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
2746 : RootIndex root_index) {
2747 21756 : if (RootsTable::IsImmortalImmovable(root_index)) {
2748 21756 : return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
2749 : } else {
2750 0 : return StoreObjectField(object, offset, LoadRoot(root_index));
2751 : }
2752 : }
2753 :
2754 0 : void CodeStubAssembler::StoreJSArrayLength(TNode<JSArray> array,
2755 : TNode<Smi> length) {
2756 0 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2757 0 : }
2758 :
2759 0 : void CodeStubAssembler::StoreElements(TNode<Object> object,
2760 : TNode<FixedArrayBase> elements) {
2761 0 : StoreObjectField(object, JSObject::kElementsOffset, elements);
2762 0 : }
2763 :
2764 40004 : void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
2765 : Node* object, Node* index_node, Node* value, WriteBarrierMode barrier_mode,
2766 : int additional_offset, ParameterMode parameter_mode) {
2767 : CSA_SLOW_ASSERT(
2768 : this, Word32Or(IsFixedArraySubclass(object), IsPropertyArray(object)));
2769 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2770 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2771 : barrier_mode == UPDATE_WRITE_BARRIER);
2772 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2773 : STATIC_ASSERT(static_cast<int>(FixedArray::kHeaderSize) ==
2774 : static_cast<int>(PropertyArray::kHeaderSize));
2775 : int header_size =
2776 40004 : FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
2777 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2778 40004 : parameter_mode, header_size);
2779 : STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2780 : static_cast<int>(WeakFixedArray::kLengthOffset));
2781 : STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2782 : static_cast<int>(PropertyArray::kLengthAndHashOffset));
2783 : // Check that index_node + additional_offset <= object.length.
2784 : // TODO(cbruni): Use proper LoadXXLength helpers
2785 : CSA_ASSERT(
2786 : this,
2787 : IsOffsetInBounds(
2788 : offset,
2789 : Select<IntPtrT>(
2790 : IsPropertyArray(object),
2791 : [=] {
2792 : TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
2793 : object, PropertyArray::kLengthAndHashOffset);
2794 : return TNode<IntPtrT>::UncheckedCast(
2795 : DecodeWord<PropertyArray::LengthField>(length_and_hash));
2796 : },
2797 : [=] {
2798 : return LoadAndUntagObjectField(object,
2799 : FixedArrayBase::kLengthOffset);
2800 : }),
2801 : FixedArray::kHeaderSize));
2802 40004 : if (barrier_mode == SKIP_WRITE_BARRIER) {
2803 21708 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2804 : } else {
2805 18296 : Store(object, offset, value);
2806 : }
2807 40004 : }
2808 :
2809 1804 : void CodeStubAssembler::StoreFixedDoubleArrayElement(
2810 : TNode<FixedDoubleArray> object, Node* index_node, TNode<Float64T> value,
2811 : ParameterMode parameter_mode, CheckBounds check_bounds) {
2812 : CSA_ASSERT(this, IsFixedDoubleArray(object));
2813 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2814 1804 : if (NeedsBoundsCheck(check_bounds)) {
2815 1804 : FixedArrayBoundsCheck(object, index_node, 0, parameter_mode);
2816 : }
2817 : Node* offset =
2818 : ElementOffsetFromIndex(index_node, PACKED_DOUBLE_ELEMENTS, parameter_mode,
2819 1804 : FixedArray::kHeaderSize - kHeapObjectTag);
2820 1804 : MachineRepresentation rep = MachineRepresentation::kFloat64;
2821 1804 : StoreNoWriteBarrier(rep, object, offset, value);
2822 1804 : }
2823 :
2824 15960 : void CodeStubAssembler::StoreFeedbackVectorSlot(Node* object,
2825 : Node* slot_index_node,
2826 : Node* value,
2827 : WriteBarrierMode barrier_mode,
2828 : int additional_offset,
2829 : ParameterMode parameter_mode) {
2830 : CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2831 : CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2832 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2833 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2834 : barrier_mode == UPDATE_WRITE_BARRIER);
2835 : int header_size =
2836 15960 : FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2837 : Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2838 15960 : parameter_mode, header_size);
2839 : // Check that slot_index_node <= object.length.
2840 : CSA_ASSERT(this,
2841 : IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2842 : FeedbackVector::kHeaderSize));
2843 15960 : if (barrier_mode == SKIP_WRITE_BARRIER) {
2844 13160 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2845 : } else {
2846 2800 : Store(object, offset, value);
2847 : }
2848 15960 : }
2849 :
2850 336 : void CodeStubAssembler::EnsureArrayLengthWritable(TNode<Map> map,
2851 : Label* bailout) {
2852 : // Don't support arrays in dictionary named property mode.
2853 336 : GotoIf(IsDictionaryMap(map), bailout);
2854 :
2855 : // Check whether the length property is writable. The length property is the
2856 : // only default named property on arrays. It's nonconfigurable, hence is
2857 : // guaranteed to stay the first property.
2858 336 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
2859 :
2860 336 : int length_index = JSArray::kLengthDescriptorIndex;
2861 : #ifdef DEBUG
2862 : TNode<Name> maybe_length =
2863 : LoadKeyByDescriptorEntry(descriptors, length_index);
2864 : CSA_ASSERT(this,
2865 : WordEqual(maybe_length, LoadRoot(RootIndex::klength_string)));
2866 : #endif
2867 :
2868 : TNode<Uint32T> details =
2869 336 : LoadDetailsByDescriptorEntry(descriptors, length_index);
2870 672 : GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
2871 336 : bailout);
2872 336 : }
2873 :
2874 168 : TNode<Int32T> CodeStubAssembler::EnsureArrayPushable(TNode<Map> map,
2875 : Label* bailout) {
2876 : // Disallow pushing onto prototypes. It might be the JSArray prototype.
2877 : // Disallow pushing onto non-extensible objects.
2878 168 : Comment("Disallow pushing onto prototypes");
2879 168 : Node* bit_field2 = LoadMapBitField2(map);
2880 168 : int mask = Map::IsPrototypeMapBit::kMask | Map::IsExtensibleBit::kMask;
2881 168 : Node* test = Word32And(bit_field2, Int32Constant(mask));
2882 336 : GotoIf(Word32NotEqual(test, Int32Constant(Map::IsExtensibleBit::kMask)),
2883 168 : bailout);
2884 :
2885 168 : EnsureArrayLengthWritable(map, bailout);
2886 :
2887 168 : TNode<Uint32T> kind = DecodeWord32<Map::ElementsKindBits>(bit_field2);
2888 168 : return Signed(kind);
2889 : }
2890 :
2891 372 : void CodeStubAssembler::PossiblyGrowElementsCapacity(
2892 : ParameterMode mode, ElementsKind kind, Node* array, Node* length,
2893 : Variable* var_elements, Node* growth, Label* bailout) {
2894 372 : Label fits(this, var_elements);
2895 : Node* capacity =
2896 372 : TaggedToParameter(LoadFixedArrayBaseLength(var_elements->value()), mode);
2897 : // length and growth nodes are already in a ParameterMode appropriate
2898 : // representation.
2899 372 : Node* new_length = IntPtrOrSmiAdd(growth, length, mode);
2900 372 : GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
2901 372 : Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
2902 : var_elements->Bind(GrowElementsCapacity(array, var_elements->value(), kind,
2903 : kind, capacity, new_capacity, mode,
2904 372 : bailout));
2905 372 : Goto(&fits);
2906 372 : BIND(&fits);
2907 372 : }
2908 :
2909 204 : TNode<Smi> CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
2910 : SloppyTNode<JSArray> array,
2911 : CodeStubArguments* args,
2912 : TVariable<IntPtrT>* arg_index,
2913 : Label* bailout) {
2914 : CSA_SLOW_ASSERT(this, IsJSArray(array));
2915 204 : Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
2916 204 : Label pre_bailout(this);
2917 408 : Label success(this);
2918 408 : TVARIABLE(Smi, var_tagged_length);
2919 204 : ParameterMode mode = OptimalParameterMode();
2920 408 : VARIABLE(var_length, OptimalParameterRepresentation(),
2921 : TaggedToParameter(LoadFastJSArrayLength(array), mode));
2922 408 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2923 :
2924 : // Resize the capacity of the fixed array if it doesn't fit.
2925 204 : TNode<IntPtrT> first = arg_index->value();
2926 : Node* growth = IntPtrToParameter(
2927 : IntPtrSub(UncheckedCast<IntPtrT>(args->GetLength(INTPTR_PARAMETERS)),
2928 204 : first),
2929 204 : mode);
2930 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2931 204 : &var_elements, growth, &pre_bailout);
2932 :
2933 : // Push each argument onto the end of the array now that there is enough
2934 : // capacity.
2935 408 : CodeStubAssembler::VariableList push_vars({&var_length}, zone());
2936 204 : Node* elements = var_elements.value();
2937 : args->ForEach(
2938 : push_vars,
2939 204 : [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
2940 : TryStoreArrayElement(kind, mode, &pre_bailout, elements,
2941 204 : var_length.value(), arg);
2942 204 : Increment(&var_length, 1, mode);
2943 204 : },
2944 204 : first, nullptr);
2945 : {
2946 204 : TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2947 204 : var_tagged_length = length;
2948 204 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2949 204 : Goto(&success);
2950 : }
2951 :
2952 204 : BIND(&pre_bailout);
2953 : {
2954 204 : TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2955 204 : var_tagged_length = length;
2956 204 : Node* diff = SmiSub(length, LoadFastJSArrayLength(array));
2957 204 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2958 204 : *arg_index = IntPtrAdd(arg_index->value(), SmiUntag(diff));
2959 204 : Goto(bailout);
2960 : }
2961 :
2962 204 : BIND(&success);
2963 408 : return var_tagged_length.value();
2964 : }
2965 :
2966 372 : void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind,
2967 : ParameterMode mode, Label* bailout,
2968 : Node* elements, Node* index,
2969 : Node* value) {
2970 372 : if (IsSmiElementsKind(kind)) {
2971 128 : GotoIf(TaggedIsNotSmi(value), bailout);
2972 244 : } else if (IsDoubleElementsKind(kind)) {
2973 124 : GotoIfNotNumber(value, bailout);
2974 : }
2975 372 : if (IsDoubleElementsKind(kind)) value = ChangeNumberToFloat64(value);
2976 372 : StoreElement(elements, kind, index, value, mode);
2977 372 : }
2978 :
2979 168 : void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
2980 : Node* value, Label* bailout) {
2981 : CSA_SLOW_ASSERT(this, IsJSArray(array));
2982 168 : Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
2983 168 : ParameterMode mode = OptimalParameterMode();
2984 168 : VARIABLE(var_length, OptimalParameterRepresentation(),
2985 : TaggedToParameter(LoadFastJSArrayLength(array), mode));
2986 336 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2987 :
2988 : // Resize the capacity of the fixed array if it doesn't fit.
2989 168 : Node* growth = IntPtrOrSmiConstant(1, mode);
2990 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2991 168 : &var_elements, growth, bailout);
2992 :
2993 : // Push each argument onto the end of the array now that there is enough
2994 : // capacity.
2995 : TryStoreArrayElement(kind, mode, bailout, var_elements.value(),
2996 168 : var_length.value(), value);
2997 168 : Increment(&var_length, 1, mode);
2998 :
2999 168 : Node* length = ParameterToTagged(var_length.value(), mode);
3000 336 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3001 168 : }
3002 :
3003 0 : Node* CodeStubAssembler::AllocateCellWithValue(Node* value,
3004 : WriteBarrierMode mode) {
3005 0 : Node* result = Allocate(Cell::kSize, kNone);
3006 0 : StoreMapNoWriteBarrier(result, RootIndex::kCellMap);
3007 0 : StoreCellValue(result, value, mode);
3008 0 : return result;
3009 : }
3010 :
3011 1288 : Node* CodeStubAssembler::LoadCellValue(Node* cell) {
3012 : CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
3013 1288 : return LoadObjectField(cell, Cell::kValueOffset);
3014 : }
3015 :
3016 0 : void CodeStubAssembler::StoreCellValue(Node* cell, Node* value,
3017 : WriteBarrierMode mode) {
3018 : CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
3019 : DCHECK(mode == SKIP_WRITE_BARRIER || mode == UPDATE_WRITE_BARRIER);
3020 :
3021 0 : if (mode == UPDATE_WRITE_BARRIER) {
3022 0 : StoreObjectField(cell, Cell::kValueOffset, value);
3023 : } else {
3024 0 : StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
3025 : }
3026 0 : }
3027 :
3028 27620 : TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber() {
3029 27620 : Node* result = Allocate(HeapNumber::kSize, kNone);
3030 27620 : RootIndex heap_map_index = RootIndex::kHeapNumberMap;
3031 27620 : StoreMapNoWriteBarrier(result, heap_map_index);
3032 27620 : return UncheckedCast<HeapNumber>(result);
3033 : }
3034 :
3035 24396 : TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
3036 : SloppyTNode<Float64T> value) {
3037 24396 : TNode<HeapNumber> result = AllocateHeapNumber();
3038 24396 : StoreHeapNumberValue(result, value);
3039 24396 : return result;
3040 : }
3041 :
3042 504 : TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumber() {
3043 504 : Node* result = Allocate(MutableHeapNumber::kSize, kNone);
3044 504 : RootIndex heap_map_index = RootIndex::kMutableHeapNumberMap;
3045 504 : StoreMapNoWriteBarrier(result, heap_map_index);
3046 504 : return UncheckedCast<MutableHeapNumber>(result);
3047 : }
3048 :
3049 56 : TNode<Object> CodeStubAssembler::CloneIfMutablePrimitive(TNode<Object> object) {
3050 56 : TVARIABLE(Object, result, object);
3051 112 : Label done(this);
3052 :
3053 56 : GotoIf(TaggedIsSmi(object), &done);
3054 56 : GotoIfNot(IsMutableHeapNumber(UncheckedCast<HeapObject>(object)), &done);
3055 : {
3056 : // Mutable heap number found --- allocate a clone.
3057 : TNode<Float64T> value =
3058 56 : LoadHeapNumberValue(UncheckedCast<HeapNumber>(object));
3059 56 : result = AllocateMutableHeapNumberWithValue(value);
3060 56 : Goto(&done);
3061 : }
3062 :
3063 56 : BIND(&done);
3064 112 : return result.value();
3065 : }
3066 :
3067 504 : TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumberWithValue(
3068 : SloppyTNode<Float64T> value) {
3069 504 : TNode<MutableHeapNumber> result = AllocateMutableHeapNumber();
3070 504 : StoreMutableHeapNumberValue(result, value);
3071 504 : return result;
3072 : }
3073 :
3074 3976 : TNode<BigInt> CodeStubAssembler::AllocateBigInt(TNode<IntPtrT> length) {
3075 3976 : TNode<BigInt> result = AllocateRawBigInt(length);
3076 : StoreBigIntBitfield(result,
3077 7952 : Word32Shl(TruncateIntPtrToInt32(length),
3078 11928 : Int32Constant(BigInt::LengthBits::kShift)));
3079 3976 : return result;
3080 : }
3081 :
3082 5264 : TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
3083 : // This is currently used only for 64-bit wide BigInts. If more general
3084 : // applicability is required, a large-object check must be added.
3085 : CSA_ASSERT(this, UintPtrLessThan(length, IntPtrConstant(3)));
3086 :
3087 : TNode<IntPtrT> size =
3088 : IntPtrAdd(IntPtrConstant(BigInt::kHeaderSize),
3089 5264 : Signed(WordShl(length, kSystemPointerSizeLog2)));
3090 5264 : Node* raw_result = Allocate(size, kNone);
3091 5264 : StoreMapNoWriteBarrier(raw_result, RootIndex::kBigIntMap);
3092 : if (FIELD_SIZE(BigInt::kOptionalPaddingOffset) != 0) {
3093 : DCHECK_EQ(4, FIELD_SIZE(BigInt::kOptionalPaddingOffset));
3094 : StoreObjectFieldNoWriteBarrier(raw_result, BigInt::kOptionalPaddingOffset,
3095 5264 : Int32Constant(0),
3096 5264 : MachineRepresentation::kWord32);
3097 : }
3098 5264 : return UncheckedCast<BigInt>(raw_result);
3099 : }
3100 :
3101 6776 : void CodeStubAssembler::StoreBigIntBitfield(TNode<BigInt> bigint,
3102 : TNode<Word32T> bitfield) {
3103 : StoreObjectFieldNoWriteBarrier(bigint, BigInt::kBitfieldOffset, bitfield,
3104 6776 : MachineRepresentation::kWord32);
3105 6776 : }
3106 :
3107 3920 : void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint, int digit_index,
3108 : TNode<UintPtrT> digit) {
3109 : StoreObjectFieldNoWriteBarrier(
3110 : bigint, BigInt::kDigitsOffset + digit_index * kSystemPointerSize, digit,
3111 3920 : UintPtrT::kMachineRepresentation);
3112 3920 : }
3113 :
3114 1680 : TNode<Word32T> CodeStubAssembler::LoadBigIntBitfield(TNode<BigInt> bigint) {
3115 : return UncheckedCast<Word32T>(
3116 1680 : LoadObjectField(bigint, BigInt::kBitfieldOffset, MachineType::Uint32()));
3117 : }
3118 :
3119 1568 : TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
3120 : int digit_index) {
3121 : return UncheckedCast<UintPtrT>(LoadObjectField(
3122 : bigint, BigInt::kDigitsOffset + digit_index * kSystemPointerSize,
3123 1568 : MachineType::UintPtr()));
3124 : }
3125 :
3126 784 : TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3127 : uint32_t length, AllocationFlags flags) {
3128 784 : Comment("AllocateSeqOneByteString");
3129 784 : if (length == 0) {
3130 0 : return CAST(LoadRoot(RootIndex::kempty_string));
3131 : }
3132 784 : Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
3133 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3134 784 : StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3135 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3136 784 : Uint32Constant(length),
3137 784 : MachineRepresentation::kWord32);
3138 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3139 784 : Int32Constant(String::kEmptyHashField),
3140 784 : MachineRepresentation::kWord32);
3141 784 : return CAST(result);
3142 : }
3143 :
3144 0 : TNode<BoolT> CodeStubAssembler::IsZeroOrContext(SloppyTNode<Object> object) {
3145 0 : return Select<BoolT>(WordEqual(object, SmiConstant(0)),
3146 0 : [=] { return Int32TrueConstant(); },
3147 0 : [=] { return IsContext(CAST(object)); });
3148 : }
3149 :
3150 1624 : TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3151 : Node* context, TNode<Uint32T> length, AllocationFlags flags) {
3152 1624 : Comment("AllocateSeqOneByteString");
3153 : CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3154 1624 : VARIABLE(var_result, MachineRepresentation::kTagged);
3155 :
3156 : // Compute the SeqOneByteString size and check if it fits into new space.
3157 3248 : Label if_lengthiszero(this), if_sizeissmall(this),
3158 3248 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
3159 1624 : GotoIf(Word32Equal(length, Uint32Constant(0)), &if_lengthiszero);
3160 :
3161 : Node* raw_size = GetArrayAllocationSize(
3162 3248 : Signed(ChangeUint32ToWord(length)), UINT8_ELEMENTS, INTPTR_PARAMETERS,
3163 1624 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3164 1624 : TNode<WordT> size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3165 3248 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3166 1624 : &if_sizeissmall, &if_notsizeissmall);
3167 :
3168 1624 : BIND(&if_sizeissmall);
3169 : {
3170 : // Just allocate the SeqOneByteString in new space.
3171 : TNode<Object> result =
3172 1624 : AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3173 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3174 1624 : StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3175 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3176 1624 : length, MachineRepresentation::kWord32);
3177 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3178 1624 : Int32Constant(String::kEmptyHashField),
3179 1624 : MachineRepresentation::kWord32);
3180 1624 : var_result.Bind(result);
3181 1624 : Goto(&if_join);
3182 : }
3183 :
3184 1624 : BIND(&if_notsizeissmall);
3185 : {
3186 : // We might need to allocate in large object space, go to the runtime.
3187 : Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
3188 1624 : ChangeUint32ToTagged(length));
3189 1624 : var_result.Bind(result);
3190 1624 : Goto(&if_join);
3191 : }
3192 :
3193 1624 : BIND(&if_lengthiszero);
3194 : {
3195 1624 : var_result.Bind(LoadRoot(RootIndex::kempty_string));
3196 1624 : Goto(&if_join);
3197 : }
3198 :
3199 1624 : BIND(&if_join);
3200 3248 : return CAST(var_result.value());
3201 : }
3202 :
3203 896 : TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3204 : uint32_t length, AllocationFlags flags) {
3205 896 : Comment("AllocateSeqTwoByteString");
3206 896 : if (length == 0) {
3207 0 : return CAST(LoadRoot(RootIndex::kempty_string));
3208 : }
3209 896 : Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
3210 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3211 896 : StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3212 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3213 896 : Uint32Constant(length),
3214 896 : MachineRepresentation::kWord32);
3215 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3216 896 : Int32Constant(String::kEmptyHashField),
3217 896 : MachineRepresentation::kWord32);
3218 896 : return CAST(result);
3219 : }
3220 :
3221 1232 : TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3222 : Node* context, TNode<Uint32T> length, AllocationFlags flags) {
3223 : CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3224 1232 : Comment("AllocateSeqTwoByteString");
3225 1232 : VARIABLE(var_result, MachineRepresentation::kTagged);
3226 :
3227 : // Compute the SeqTwoByteString size and check if it fits into new space.
3228 2464 : Label if_lengthiszero(this), if_sizeissmall(this),
3229 2464 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
3230 1232 : GotoIf(Word32Equal(length, Uint32Constant(0)), &if_lengthiszero);
3231 :
3232 : Node* raw_size = GetArrayAllocationSize(
3233 2464 : Signed(ChangeUint32ToWord(length)), UINT16_ELEMENTS, INTPTR_PARAMETERS,
3234 1232 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3235 1232 : TNode<WordT> size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3236 2464 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3237 1232 : &if_sizeissmall, &if_notsizeissmall);
3238 :
3239 1232 : BIND(&if_sizeissmall);
3240 : {
3241 : // Just allocate the SeqTwoByteString in new space.
3242 : TNode<Object> result =
3243 1232 : AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3244 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3245 1232 : StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3246 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3247 1232 : length, MachineRepresentation::kWord32);
3248 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3249 1232 : Int32Constant(String::kEmptyHashField),
3250 1232 : MachineRepresentation::kWord32);
3251 1232 : var_result.Bind(result);
3252 1232 : Goto(&if_join);
3253 : }
3254 :
3255 1232 : BIND(&if_notsizeissmall);
3256 : {
3257 : // We might need to allocate in large object space, go to the runtime.
3258 : Node* result = CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
3259 1232 : ChangeUint32ToTagged(length));
3260 1232 : var_result.Bind(result);
3261 1232 : Goto(&if_join);
3262 : }
3263 :
3264 1232 : BIND(&if_lengthiszero);
3265 : {
3266 1232 : var_result.Bind(LoadRoot(RootIndex::kempty_string));
3267 1232 : Goto(&if_join);
3268 : }
3269 :
3270 1232 : BIND(&if_join);
3271 2464 : return CAST(var_result.value());
3272 : }
3273 :
3274 896 : TNode<String> CodeStubAssembler::AllocateSlicedString(RootIndex map_root_index,
3275 : TNode<Uint32T> length,
3276 : TNode<String> parent,
3277 : TNode<Smi> offset) {
3278 : DCHECK(map_root_index == RootIndex::kSlicedOneByteStringMap ||
3279 : map_root_index == RootIndex::kSlicedStringMap);
3280 896 : Node* result = Allocate(SlicedString::kSize);
3281 : DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
3282 896 : StoreMapNoWriteBarrier(result, map_root_index);
3283 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
3284 896 : Int32Constant(String::kEmptyHashField),
3285 896 : MachineRepresentation::kWord32);
3286 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
3287 896 : MachineRepresentation::kWord32);
3288 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
3289 896 : MachineRepresentation::kTagged);
3290 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
3291 896 : MachineRepresentation::kTagged);
3292 896 : return CAST(result);
3293 : }
3294 :
3295 448 : TNode<String> CodeStubAssembler::AllocateSlicedOneByteString(
3296 : TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3297 : return AllocateSlicedString(RootIndex::kSlicedOneByteStringMap, length,
3298 448 : parent, offset);
3299 : }
3300 :
3301 448 : TNode<String> CodeStubAssembler::AllocateSlicedTwoByteString(
3302 : TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3303 : return AllocateSlicedString(RootIndex::kSlicedStringMap, length, parent,
3304 448 : offset);
3305 : }
3306 :
3307 112 : TNode<String> CodeStubAssembler::AllocateConsString(RootIndex map_root_index,
3308 : TNode<Uint32T> length,
3309 : TNode<String> first,
3310 : TNode<String> second,
3311 : AllocationFlags flags) {
3312 : DCHECK(map_root_index == RootIndex::kConsOneByteStringMap ||
3313 : map_root_index == RootIndex::kConsStringMap);
3314 112 : Node* result = Allocate(ConsString::kSize, flags);
3315 : DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
3316 112 : StoreMapNoWriteBarrier(result, map_root_index);
3317 : StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
3318 112 : MachineRepresentation::kWord32);
3319 : StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset,
3320 112 : Int32Constant(String::kEmptyHashField),
3321 112 : MachineRepresentation::kWord32);
3322 112 : bool const new_space = !(flags & kPretenured);
3323 112 : if (new_space) {
3324 : StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
3325 112 : MachineRepresentation::kTagged);
3326 : StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
3327 112 : MachineRepresentation::kTagged);
3328 : } else {
3329 0 : StoreObjectField(result, ConsString::kFirstOffset, first);
3330 0 : StoreObjectField(result, ConsString::kSecondOffset, second);
3331 : }
3332 112 : return CAST(result);
3333 : }
3334 :
3335 56 : TNode<String> CodeStubAssembler::AllocateOneByteConsString(
3336 : TNode<Uint32T> length, TNode<String> first, TNode<String> second,
3337 : AllocationFlags flags) {
3338 : return AllocateConsString(RootIndex::kConsOneByteStringMap, length, first,
3339 56 : second, flags);
3340 : }
3341 :
3342 56 : TNode<String> CodeStubAssembler::AllocateTwoByteConsString(
3343 : TNode<Uint32T> length, TNode<String> first, TNode<String> second,
3344 : AllocationFlags flags) {
3345 : return AllocateConsString(RootIndex::kConsStringMap, length, first, second,
3346 56 : flags);
3347 : }
3348 :
3349 56 : TNode<String> CodeStubAssembler::NewConsString(TNode<Uint32T> length,
3350 : TNode<String> left,
3351 : TNode<String> right,
3352 : AllocationFlags flags) {
3353 : // Added string can be a cons string.
3354 56 : Comment("Allocating ConsString");
3355 56 : Node* left_instance_type = LoadInstanceType(left);
3356 56 : Node* right_instance_type = LoadInstanceType(right);
3357 :
3358 : // Compute intersection and difference of instance types.
3359 : Node* anded_instance_types =
3360 56 : Word32And(left_instance_type, right_instance_type);
3361 : Node* xored_instance_types =
3362 56 : Word32Xor(left_instance_type, right_instance_type);
3363 :
3364 : // We create a one-byte cons string if
3365 : // 1. both strings are one-byte, or
3366 : // 2. at least one of the strings is two-byte, but happens to contain only
3367 : // one-byte characters.
3368 : // To do this, we check
3369 : // 1. if both strings are one-byte, or if the one-byte data hint is set in
3370 : // both strings, or
3371 : // 2. if one of the strings has the one-byte data hint set and the other
3372 : // string is one-byte.
3373 : STATIC_ASSERT(kOneByteStringTag != 0);
3374 : STATIC_ASSERT(kOneByteDataHintTag != 0);
3375 56 : Label one_byte_map(this);
3376 112 : Label two_byte_map(this);
3377 112 : TVARIABLE(String, result);
3378 112 : Label done(this, &result);
3379 : GotoIf(IsSetWord32(anded_instance_types,
3380 112 : kStringEncodingMask | kOneByteDataHintTag),
3381 56 : &one_byte_map);
3382 : Branch(Word32NotEqual(Word32And(xored_instance_types,
3383 : Int32Constant(kStringEncodingMask |
3384 112 : kOneByteDataHintMask)),
3385 224 : Int32Constant(kOneByteStringTag | kOneByteDataHintTag)),
3386 56 : &two_byte_map, &one_byte_map);
3387 :
3388 56 : BIND(&one_byte_map);
3389 56 : Comment("One-byte ConsString");
3390 56 : result = AllocateOneByteConsString(length, left, right, flags);
3391 56 : Goto(&done);
3392 :
3393 56 : BIND(&two_byte_map);
3394 56 : Comment("Two-byte ConsString");
3395 56 : result = AllocateTwoByteConsString(length, left, right, flags);
3396 56 : Goto(&done);
3397 :
3398 56 : BIND(&done);
3399 :
3400 112 : return result.value();
3401 : }
3402 :
3403 616 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3404 : int at_least_space_for) {
3405 616 : return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
3406 : }
3407 :
3408 844 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3409 : TNode<IntPtrT> at_least_space_for) {
3410 : CSA_ASSERT(this, UintPtrLessThanOrEqual(
3411 : at_least_space_for,
3412 : IntPtrConstant(NameDictionary::kMaxCapacity)));
3413 844 : TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
3414 844 : return AllocateNameDictionaryWithCapacity(capacity);
3415 : }
3416 :
3417 1068 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
3418 : TNode<IntPtrT> capacity) {
3419 : CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3420 : CSA_ASSERT(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
3421 1068 : TNode<IntPtrT> length = EntryToIndex<NameDictionary>(capacity);
3422 : TNode<IntPtrT> store_size = IntPtrAdd(
3423 1068 : TimesTaggedSize(length), IntPtrConstant(NameDictionary::kHeaderSize));
3424 :
3425 : TNode<NameDictionary> result =
3426 1068 : UncheckedCast<NameDictionary>(AllocateInNewSpace(store_size));
3427 1068 : Comment("Initialize NameDictionary");
3428 : // Initialize FixedArray fields.
3429 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kNameDictionaryMap));
3430 1068 : StoreMapNoWriteBarrier(result, RootIndex::kNameDictionaryMap);
3431 : StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
3432 1068 : SmiFromIntPtr(length));
3433 : // Initialized HashTable fields.
3434 1068 : TNode<Smi> zero = SmiConstant(0);
3435 : StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
3436 1068 : SKIP_WRITE_BARRIER);
3437 : StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
3438 1068 : zero, SKIP_WRITE_BARRIER);
3439 : StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
3440 1068 : SmiTag(capacity), SKIP_WRITE_BARRIER);
3441 : // Initialize Dictionary fields.
3442 1068 : TNode<HeapObject> filler = UndefinedConstant();
3443 : StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
3444 1068 : SmiConstant(PropertyDetails::kInitialIndex),
3445 1068 : SKIP_WRITE_BARRIER);
3446 : StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
3447 1068 : SmiConstant(PropertyArray::kNoHashSentinel),
3448 1068 : SKIP_WRITE_BARRIER);
3449 :
3450 : // Initialize NameDictionary elements.
3451 1068 : TNode<WordT> result_word = BitcastTaggedToWord(result);
3452 : TNode<WordT> start_address = IntPtrAdd(
3453 : result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
3454 1068 : NameDictionary::kElementsStartIndex) -
3455 1068 : kHeapObjectTag));
3456 : TNode<WordT> end_address = IntPtrAdd(
3457 1068 : result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
3458 1068 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3459 1068 : return result;
3460 : }
3461 :
3462 224 : TNode<NameDictionary> CodeStubAssembler::CopyNameDictionary(
3463 : TNode<NameDictionary> dictionary, Label* large_object_fallback) {
3464 224 : Comment("Copy boilerplate property dict");
3465 224 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NameDictionary>(dictionary));
3466 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(capacity, IntPtrConstant(0)));
3467 : GotoIf(UintPtrGreaterThan(
3468 448 : capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
3469 224 : large_object_fallback);
3470 : TNode<NameDictionary> properties =
3471 224 : AllocateNameDictionaryWithCapacity(capacity);
3472 224 : TNode<IntPtrT> length = SmiUntag(LoadFixedArrayBaseLength(dictionary));
3473 : CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
3474 224 : SKIP_WRITE_BARRIER, INTPTR_PARAMETERS);
3475 224 : return properties;
3476 : }
3477 :
3478 : template <typename CollectionType>
3479 112 : Node* CodeStubAssembler::AllocateOrderedHashTable() {
3480 : static const int kCapacity = CollectionType::kMinCapacity;
3481 : static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
3482 : static const int kDataTableLength = kCapacity * CollectionType::kEntrySize;
3483 : static const int kFixedArrayLength =
3484 : CollectionType::HashTableStartIndex() + kBucketCount + kDataTableLength;
3485 : static const int kDataTableStartIndex =
3486 : CollectionType::HashTableStartIndex() + kBucketCount;
3487 :
3488 : STATIC_ASSERT(base::bits::IsPowerOfTwo(kCapacity));
3489 : STATIC_ASSERT(kCapacity <= CollectionType::MaxCapacity());
3490 :
3491 : // Allocate the table and add the proper map.
3492 112 : const ElementsKind elements_kind = HOLEY_ELEMENTS;
3493 112 : TNode<IntPtrT> length_intptr = IntPtrConstant(kFixedArrayLength);
3494 : TNode<Map> fixed_array_map =
3495 112 : CAST(LoadRoot(CollectionType::GetMapRootIndex()));
3496 : TNode<FixedArray> table =
3497 112 : CAST(AllocateFixedArray(elements_kind, length_intptr,
3498 : kAllowLargeObjectAllocation, fixed_array_map));
3499 :
3500 : // Initialize the OrderedHashTable fields.
3501 112 : const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
3502 112 : StoreFixedArrayElement(table, CollectionType::NumberOfElementsIndex(),
3503 112 : SmiConstant(0), barrier_mode);
3504 112 : StoreFixedArrayElement(table, CollectionType::NumberOfDeletedElementsIndex(),
3505 112 : SmiConstant(0), barrier_mode);
3506 112 : StoreFixedArrayElement(table, CollectionType::NumberOfBucketsIndex(),
3507 112 : SmiConstant(kBucketCount), barrier_mode);
3508 :
3509 : // Fill the buckets with kNotFound.
3510 112 : TNode<Smi> not_found = SmiConstant(CollectionType::kNotFound);
3511 : STATIC_ASSERT(CollectionType::HashTableStartIndex() ==
3512 : CollectionType::NumberOfBucketsIndex() + 1);
3513 : STATIC_ASSERT((CollectionType::HashTableStartIndex() + kBucketCount) ==
3514 : kDataTableStartIndex);
3515 336 : for (int i = 0; i < kBucketCount; i++) {
3516 224 : StoreFixedArrayElement(table, CollectionType::HashTableStartIndex() + i,
3517 : not_found, barrier_mode);
3518 : }
3519 :
3520 : // Fill the data table with undefined.
3521 : STATIC_ASSERT(kDataTableStartIndex + kDataTableLength == kFixedArrayLength);
3522 1232 : for (int i = 0; i < kDataTableLength; i++) {
3523 1120 : StoreFixedArrayElement(table, kDataTableStartIndex + i, UndefinedConstant(),
3524 2240 : barrier_mode);
3525 : }
3526 :
3527 112 : return table;
3528 : }
3529 :
3530 : template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashMap>();
3531 : template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashSet>();
3532 :
3533 : template <typename CollectionType>
3534 8 : TNode<CollectionType> CodeStubAssembler::AllocateSmallOrderedHashTable(
3535 : TNode<IntPtrT> capacity) {
3536 : CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3537 : CSA_ASSERT(this, IntPtrLessThan(
3538 : capacity, IntPtrConstant(CollectionType::kMaxCapacity)));
3539 :
3540 : TNode<IntPtrT> data_table_start_offset =
3541 8 : IntPtrConstant(CollectionType::DataTableStartOffset());
3542 :
3543 : TNode<IntPtrT> data_table_size = IntPtrMul(
3544 8 : capacity, IntPtrConstant(CollectionType::kEntrySize * kTaggedSize));
3545 :
3546 : TNode<Int32T> hash_table_size =
3547 16 : Int32Div(TruncateIntPtrToInt32(capacity),
3548 24 : Int32Constant(CollectionType::kLoadFactor));
3549 :
3550 : TNode<IntPtrT> hash_table_start_offset =
3551 8 : IntPtrAdd(data_table_start_offset, data_table_size);
3552 :
3553 : TNode<IntPtrT> hash_table_and_chain_table_size =
3554 8 : IntPtrAdd(ChangeInt32ToIntPtr(hash_table_size), capacity);
3555 :
3556 : TNode<IntPtrT> total_size =
3557 8 : IntPtrAdd(hash_table_start_offset, hash_table_and_chain_table_size);
3558 :
3559 : TNode<IntPtrT> total_size_word_aligned =
3560 8 : IntPtrAdd(total_size, IntPtrConstant(kTaggedSize - 1));
3561 8 : total_size_word_aligned = ChangeInt32ToIntPtr(
3562 16 : Int32Div(TruncateIntPtrToInt32(total_size_word_aligned),
3563 32 : Int32Constant(kTaggedSize)));
3564 8 : total_size_word_aligned =
3565 : UncheckedCast<IntPtrT>(TimesTaggedSize(total_size_word_aligned));
3566 :
3567 : // Allocate the table and add the proper map.
3568 : TNode<Map> small_ordered_hash_map =
3569 8 : CAST(LoadRoot(CollectionType::GetMapRootIndex()));
3570 8 : TNode<Object> table_obj = AllocateInNewSpace(total_size_word_aligned);
3571 8 : StoreMapNoWriteBarrier(table_obj, small_ordered_hash_map);
3572 8 : TNode<CollectionType> table = UncheckedCast<CollectionType>(table_obj);
3573 :
3574 : // Initialize the SmallOrderedHashTable fields.
3575 16 : StoreObjectByteNoWriteBarrier(
3576 : table, CollectionType::NumberOfBucketsOffset(),
3577 8 : Word32And(Int32Constant(0xFF), hash_table_size));
3578 8 : StoreObjectByteNoWriteBarrier(table, CollectionType::NumberOfElementsOffset(),
3579 8 : Int32Constant(0));
3580 8 : StoreObjectByteNoWriteBarrier(
3581 8 : table, CollectionType::NumberOfDeletedElementsOffset(), Int32Constant(0));
3582 :
3583 : TNode<IntPtrT> table_address =
3584 8 : IntPtrSub(BitcastTaggedToWord(table), IntPtrConstant(kHeapObjectTag));
3585 : TNode<IntPtrT> hash_table_start_address =
3586 8 : IntPtrAdd(table_address, hash_table_start_offset);
3587 :
3588 : // Initialize the HashTable part.
3589 8 : Node* memset = ExternalConstant(ExternalReference::libc_memset_function());
3590 8 : CallCFunction3(MachineType::AnyTagged(), MachineType::Pointer(),
3591 : MachineType::IntPtr(), MachineType::UintPtr(), memset,
3592 8 : hash_table_start_address, IntPtrConstant(0xFF),
3593 24 : hash_table_and_chain_table_size);
3594 :
3595 : // Initialize the DataTable part.
3596 8 : TNode<HeapObject> filler = TheHoleConstant();
3597 : TNode<WordT> data_table_start_address =
3598 8 : IntPtrAdd(table_address, data_table_start_offset);
3599 : TNode<WordT> data_table_end_address =
3600 8 : IntPtrAdd(data_table_start_address, data_table_size);
3601 8 : StoreFieldsNoWriteBarrier(data_table_start_address, data_table_end_address,
3602 8 : filler);
3603 :
3604 8 : return table;
3605 : }
3606 :
3607 : template TNode<SmallOrderedHashMap>
3608 : CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashMap>(
3609 : TNode<IntPtrT> capacity);
3610 : template TNode<SmallOrderedHashSet>
3611 : CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashSet>(
3612 : TNode<IntPtrT> capacity);
3613 :
3614 : template <typename CollectionType>
3615 1680 : void CodeStubAssembler::FindOrderedHashTableEntry(
3616 : Node* table, Node* hash,
3617 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3618 : Variable* entry_start_position, Label* entry_found, Label* not_found) {
3619 : // Get the index of the bucket.
3620 1680 : Node* const number_of_buckets = SmiUntag(CAST(UnsafeLoadFixedArrayElement(
3621 3360 : CAST(table), CollectionType::NumberOfBucketsIndex())));
3622 : Node* const bucket =
3623 1680 : WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1)));
3624 3360 : Node* const first_entry = SmiUntag(CAST(UnsafeLoadFixedArrayElement(
3625 : CAST(table), bucket,
3626 3360 : CollectionType::HashTableStartIndex() * kTaggedSize)));
3627 :
3628 : // Walk the bucket chain.
3629 : Node* entry_start;
3630 1680 : Label if_key_found(this);
3631 : {
3632 1680 : VARIABLE(var_entry, MachineType::PointerRepresentation(), first_entry);
3633 3360 : Label loop(this, {&var_entry, entry_start_position}),
3634 3360 : continue_next_entry(this);
3635 1680 : Goto(&loop);
3636 1680 : BIND(&loop);
3637 :
3638 : // If the entry index is the not-found sentinel, we are done.
3639 1680 : GotoIf(
3640 : WordEqual(var_entry.value(), IntPtrConstant(CollectionType::kNotFound)),
3641 : not_found);
3642 :
3643 : // Make sure the entry index is within range.
3644 : CSA_ASSERT(
3645 : this,
3646 : UintPtrLessThan(
3647 : var_entry.value(),
3648 : SmiUntag(SmiAdd(
3649 : CAST(UnsafeLoadFixedArrayElement(
3650 : CAST(table), CollectionType::NumberOfElementsIndex())),
3651 : CAST(UnsafeLoadFixedArrayElement(
3652 : CAST(table),
3653 : CollectionType::NumberOfDeletedElementsIndex()))))));
3654 :
3655 : // Compute the index of the entry relative to kHashTableStartIndex.
3656 1680 : entry_start =
3657 : IntPtrAdd(IntPtrMul(var_entry.value(),
3658 : IntPtrConstant(CollectionType::kEntrySize)),
3659 : number_of_buckets);
3660 :
3661 : // Load the key from the entry.
3662 : Node* const candidate_key = UnsafeLoadFixedArrayElement(
3663 1680 : CAST(table), entry_start,
3664 3360 : CollectionType::HashTableStartIndex() * kTaggedSize);
3665 :
3666 1680 : key_compare(candidate_key, &if_key_found, &continue_next_entry);
3667 :
3668 1680 : BIND(&continue_next_entry);
3669 : // Load the index of the next entry in the bucket chain.
3670 1680 : var_entry.Bind(SmiUntag(CAST(UnsafeLoadFixedArrayElement(
3671 : CAST(table), entry_start,
3672 : (CollectionType::HashTableStartIndex() + CollectionType::kChainOffset) *
3673 : kTaggedSize))));
3674 :
3675 3360 : Goto(&loop);
3676 : }
3677 :
3678 1680 : BIND(&if_key_found);
3679 1680 : entry_start_position->Bind(entry_start);
3680 1680 : Goto(entry_found);
3681 1680 : }
3682 :
3683 : template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashMap>(
3684 : Node* table, Node* hash,
3685 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3686 : Variable* entry_start_position, Label* entry_found, Label* not_found);
3687 : template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashSet>(
3688 : Node* table, Node* hash,
3689 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3690 : Variable* entry_start_position, Label* entry_found, Label* not_found);
3691 :
3692 8 : Node* CodeStubAssembler::AllocateStruct(Node* map, AllocationFlags flags) {
3693 8 : Comment("AllocateStruct");
3694 : CSA_ASSERT(this, IsMap(map));
3695 8 : TNode<IntPtrT> size = TimesTaggedSize(LoadMapInstanceSizeInWords(map));
3696 8 : TNode<Object> object = Allocate(size, flags);
3697 8 : StoreMapNoWriteBarrier(object, map);
3698 8 : InitializeStructBody(object, map, size, Struct::kHeaderSize);
3699 8 : return object;
3700 : }
3701 :
3702 8 : void CodeStubAssembler::InitializeStructBody(Node* object, Node* map,
3703 : Node* size, int start_offset) {
3704 : CSA_SLOW_ASSERT(this, IsMap(map));
3705 8 : Comment("InitializeStructBody");
3706 8 : Node* filler = UndefinedConstant();
3707 : // Calculate the untagged field addresses.
3708 8 : object = BitcastTaggedToWord(object);
3709 : Node* start_address =
3710 8 : IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
3711 : Node* end_address =
3712 8 : IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
3713 8 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3714 8 : }
3715 :
3716 2132 : Node* CodeStubAssembler::AllocateJSObjectFromMap(
3717 : Node* map, Node* properties, Node* elements, AllocationFlags flags,
3718 : SlackTrackingMode slack_tracking_mode) {
3719 : CSA_ASSERT(this, IsMap(map));
3720 : CSA_ASSERT(this, Word32BinaryNot(IsJSFunctionMap(map)));
3721 : CSA_ASSERT(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),
3722 : JS_GLOBAL_OBJECT_TYPE)));
3723 : TNode<IntPtrT> instance_size =
3724 2132 : TimesTaggedSize(LoadMapInstanceSizeInWords(map));
3725 2132 : TNode<Object> object = AllocateInNewSpace(instance_size, flags);
3726 2132 : StoreMapNoWriteBarrier(object, map);
3727 : InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
3728 2132 : slack_tracking_mode);
3729 2132 : return object;
3730 : }
3731 :
3732 2132 : void CodeStubAssembler::InitializeJSObjectFromMap(
3733 : Node* object, Node* map, Node* instance_size, Node* properties,
3734 : Node* elements, SlackTrackingMode slack_tracking_mode) {
3735 : CSA_SLOW_ASSERT(this, IsMap(map));
3736 : // This helper assumes that the object is in new-space, as guarded by the
3737 : // check in AllocatedJSObjectFromMap.
3738 2132 : if (properties == nullptr) {
3739 : CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
3740 : StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
3741 1120 : RootIndex::kEmptyFixedArray);
3742 : } else {
3743 : CSA_ASSERT(this, Word32Or(Word32Or(IsPropertyArray(properties),
3744 : IsNameDictionary(properties)),
3745 : IsEmptyFixedArray(properties)));
3746 : StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
3747 1012 : properties);
3748 : }
3749 2132 : if (elements == nullptr) {
3750 : StoreObjectFieldRoot(object, JSObject::kElementsOffset,
3751 1960 : RootIndex::kEmptyFixedArray);
3752 : } else {
3753 : CSA_ASSERT(this, IsFixedArray(elements));
3754 172 : StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
3755 : }
3756 2132 : if (slack_tracking_mode == kNoSlackTracking) {
3757 1628 : InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3758 : } else {
3759 : DCHECK_EQ(slack_tracking_mode, kWithSlackTracking);
3760 504 : InitializeJSObjectBodyWithSlackTracking(object, map, instance_size);
3761 : }
3762 2132 : }
3763 :
3764 2188 : void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
3765 : Node* object, Node* map, Node* instance_size, int start_offset) {
3766 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3767 : CSA_ASSERT(
3768 : this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
3769 2188 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
3770 2188 : RootIndex::kUndefinedValue);
3771 2188 : }
3772 :
3773 504 : void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
3774 : Node* object, Node* map, Node* instance_size) {
3775 : CSA_SLOW_ASSERT(this, IsMap(map));
3776 504 : Comment("InitializeJSObjectBodyNoSlackTracking");
3777 :
3778 : // Perform in-object slack tracking if requested.
3779 504 : int start_offset = JSObject::kHeaderSize;
3780 504 : Node* bit_field3 = LoadMapBitField3(map);
3781 1008 : Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
3782 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3783 1008 : GotoIf(IsSetWord32<Map::ConstructionCounterBits>(bit_field3),
3784 504 : &slack_tracking);
3785 504 : Comment("No slack tracking");
3786 504 : InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3787 504 : Goto(&end);
3788 :
3789 504 : BIND(&slack_tracking);
3790 : {
3791 504 : Comment("Decrease construction counter");
3792 : // Slack tracking is only done on initial maps.
3793 : CSA_ASSERT(this, IsUndefined(LoadMapBackPointer(map)));
3794 : STATIC_ASSERT(Map::ConstructionCounterBits::kNext == 32);
3795 : Node* new_bit_field3 = Int32Sub(
3796 504 : bit_field3, Int32Constant(1 << Map::ConstructionCounterBits::kShift));
3797 : StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3,
3798 504 : MachineRepresentation::kWord32);
3799 : STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3800 :
3801 : // The object still has in-object slack therefore the |unsed_or_unused|
3802 : // field contain the "used" value.
3803 : Node* used_size = TimesTaggedSize(ChangeUint32ToWord(
3804 : LoadObjectField(map, Map::kUsedOrUnusedInstanceSizeInWordsOffset,
3805 504 : MachineType::Uint8())));
3806 :
3807 504 : Comment("iInitialize filler fields");
3808 : InitializeFieldsWithRoot(object, used_size, instance_size,
3809 504 : RootIndex::kOnePointerFillerMap);
3810 :
3811 504 : Comment("Initialize undefined fields");
3812 504 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
3813 504 : RootIndex::kUndefinedValue);
3814 :
3815 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3816 1008 : GotoIf(IsClearWord32<Map::ConstructionCounterBits>(new_bit_field3),
3817 504 : &complete);
3818 504 : Goto(&end);
3819 : }
3820 :
3821 : // Finalize the instance size.
3822 504 : BIND(&complete);
3823 : {
3824 : // ComplextInobjectSlackTracking doesn't allocate and thus doesn't need a
3825 : // context.
3826 : CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
3827 504 : NoContextConstant(), map);
3828 504 : Goto(&end);
3829 : }
3830 :
3831 1008 : BIND(&end);
3832 504 : }
3833 :
3834 1084 : void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
3835 : Node* end_address,
3836 : Node* value) {
3837 1084 : Comment("StoreFieldsNoWriteBarrier");
3838 : CSA_ASSERT(this, WordIsAligned(start_address, kTaggedSize));
3839 : CSA_ASSERT(this, WordIsAligned(end_address, kTaggedSize));
3840 : BuildFastLoop(
3841 : start_address, end_address,
3842 1084 : [this, value](Node* current) {
3843 1084 : StoreNoWriteBarrier(MachineRepresentation::kTagged, current, value);
3844 1084 : },
3845 1084 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
3846 1084 : }
3847 :
3848 168 : TNode<BoolT> CodeStubAssembler::IsValidFastJSArrayCapacity(
3849 : Node* capacity, ParameterMode capacity_mode) {
3850 : return UncheckedCast<BoolT>(
3851 : UintPtrLessThanOrEqual(ParameterToIntPtr(capacity, capacity_mode),
3852 168 : IntPtrConstant(JSArray::kMaxFastArrayLength)));
3853 : }
3854 :
3855 2912 : TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3856 : TNode<Map> array_map, TNode<FixedArrayBase> elements, TNode<Smi> length,
3857 : Node* allocation_site) {
3858 2912 : Comment("begin allocation of JSArray passing in elements");
3859 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3860 :
3861 2912 : int base_size = JSArray::kSize;
3862 2912 : if (allocation_site != nullptr) {
3863 504 : base_size += AllocationMemento::kSize;
3864 : }
3865 :
3866 2912 : TNode<IntPtrT> size = IntPtrConstant(base_size);
3867 : TNode<JSArray> result =
3868 2912 : AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3869 2912 : StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, elements);
3870 2912 : return result;
3871 : }
3872 :
3873 : std::pair<TNode<JSArray>, TNode<FixedArrayBase>>
3874 1736 : CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
3875 : ElementsKind kind, TNode<Map> array_map, TNode<Smi> length,
3876 : Node* allocation_site, Node* capacity, ParameterMode capacity_mode,
3877 : AllocationFlags allocation_flags) {
3878 1736 : Comment("begin allocation of JSArray with elements");
3879 3472 : CHECK_EQ(allocation_flags & ~kAllowLargeObjectAllocation, 0);
3880 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3881 :
3882 1736 : int base_size = JSArray::kSize;
3883 1736 : if (allocation_site != nullptr) base_size += AllocationMemento::kSize;
3884 :
3885 1736 : const int elements_offset = base_size;
3886 :
3887 : // Compute space for elements
3888 1736 : base_size += FixedArray::kHeaderSize;
3889 : TNode<IntPtrT> size =
3890 1736 : ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
3891 :
3892 1736 : TVARIABLE(JSArray, array);
3893 3472 : TVARIABLE(FixedArrayBase, elements);
3894 :
3895 3472 : Label out(this);
3896 :
3897 : // For very large arrays in which the requested allocation exceeds the
3898 : // maximal size of a regular heap object, we cannot use the allocation
3899 : // folding trick. Instead, we first allocate the elements in large object
3900 : // space, and then allocate the JSArray (and possibly the allocation memento)
3901 : // in new space.
3902 1736 : if (allocation_flags & kAllowLargeObjectAllocation) {
3903 168 : Label next(this);
3904 168 : GotoIf(IsRegularHeapObjectSize(size), &next);
3905 :
3906 168 : CSA_CHECK(this, IsValidFastJSArrayCapacity(capacity, capacity_mode));
3907 :
3908 : // Allocate and initialize the elements first. Full initialization is needed
3909 : // because the upcoming JSArray allocation could trigger GC.
3910 336 : elements =
3911 168 : AllocateFixedArray(kind, capacity, capacity_mode, allocation_flags);
3912 :
3913 168 : if (IsDoubleElementsKind(kind)) {
3914 0 : FillFixedDoubleArrayWithZero(CAST(elements.value()),
3915 0 : ParameterToIntPtr(capacity, capacity_mode));
3916 : } else {
3917 168 : FillFixedArrayWithSmiZero(CAST(elements.value()),
3918 336 : ParameterToIntPtr(capacity, capacity_mode));
3919 : }
3920 :
3921 : // The JSArray and possibly allocation memento next. Note that
3922 : // allocation_flags are *not* passed on here and the resulting JSArray will
3923 : // always be in new space.
3924 168 : array =
3925 168 : AllocateJSArray(array_map, elements.value(), length, allocation_site);
3926 :
3927 168 : Goto(&out);
3928 :
3929 168 : BIND(&next);
3930 : }
3931 :
3932 : // Fold all objects into a single new space allocation.
3933 1736 : array =
3934 1736 : AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3935 3472 : elements = UncheckedCast<FixedArrayBase>(
3936 1736 : InnerAllocate(array.value(), elements_offset));
3937 :
3938 : StoreObjectFieldNoWriteBarrier(array.value(), JSObject::kElementsOffset,
3939 1736 : elements.value());
3940 :
3941 : // Setup elements object.
3942 : STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kTaggedSize);
3943 1736 : RootIndex elements_map_index = IsDoubleElementsKind(kind)
3944 : ? RootIndex::kFixedDoubleArrayMap
3945 1736 : : RootIndex::kFixedArrayMap;
3946 : DCHECK(RootsTable::IsImmortalImmovable(elements_map_index));
3947 1736 : StoreMapNoWriteBarrier(elements.value(), elements_map_index);
3948 :
3949 1736 : TNode<Smi> capacity_smi = ParameterToTagged(capacity, capacity_mode);
3950 : CSA_ASSERT(this, SmiGreaterThan(capacity_smi, SmiConstant(0)));
3951 : StoreObjectFieldNoWriteBarrier(elements.value(), FixedArray::kLengthOffset,
3952 1736 : capacity_smi);
3953 1736 : Goto(&out);
3954 :
3955 1736 : BIND(&out);
3956 3472 : return {array.value(), elements.value()};
3957 : }
3958 :
3959 4648 : TNode<JSArray> CodeStubAssembler::AllocateUninitializedJSArray(
3960 : TNode<Map> array_map, TNode<Smi> length, Node* allocation_site,
3961 : TNode<IntPtrT> size_in_bytes) {
3962 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3963 :
3964 : // Allocate space for the JSArray and the elements FixedArray in one go.
3965 4648 : TNode<Object> array = AllocateInNewSpace(size_in_bytes);
3966 :
3967 4648 : StoreMapNoWriteBarrier(array, array_map);
3968 4648 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3969 : StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
3970 4648 : RootIndex::kEmptyFixedArray);
3971 :
3972 4648 : if (allocation_site != nullptr) {
3973 728 : InitializeAllocationMemento(array, IntPtrConstant(JSArray::kSize),
3974 728 : allocation_site);
3975 : }
3976 :
3977 4648 : return CAST(array);
3978 : }
3979 :
3980 2352 : TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3981 : ElementsKind kind, TNode<Map> array_map, Node* capacity, TNode<Smi> length,
3982 : Node* allocation_site, ParameterMode capacity_mode,
3983 : AllocationFlags allocation_flags) {
3984 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3985 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
3986 :
3987 2352 : TNode<JSArray> array;
3988 2352 : TNode<FixedArrayBase> elements;
3989 : int capacity_as_constant;
3990 :
3991 2352 : if (IsIntPtrOrSmiConstantZero(capacity, capacity_mode)) {
3992 : // Array is empty. Use the shared empty fixed array instead of allocating a
3993 : // new one.
3994 : TNode<FixedArrayBase> empty_fixed_array =
3995 784 : CAST(LoadRoot(RootIndex::kEmptyFixedArray));
3996 784 : array =
3997 784 : AllocateJSArray(array_map, empty_fixed_array, length, allocation_site);
3998 1568 : } else if (TryGetIntPtrOrSmiConstantValue(capacity, &capacity_as_constant,
3999 : capacity_mode)) {
4000 1344 : CHECK_GT(capacity_as_constant, 0);
4001 : // Allocate both array and elements object, and initialize the JSArray.
4002 1344 : std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
4003 : kind, array_map, length, allocation_site, capacity, capacity_mode,
4004 1344 : allocation_flags);
4005 : // Fill in the elements with holes.
4006 : FillFixedArrayWithValue(kind, elements,
4007 : IntPtrOrSmiConstant(0, capacity_mode), capacity,
4008 672 : RootIndex::kTheHoleValue, capacity_mode);
4009 : } else {
4010 1792 : Label out(this), empty(this), nonempty(this);
4011 1792 : TVARIABLE(JSArray, var_array);
4012 :
4013 896 : Branch(SmiEqual(ParameterToTagged(capacity, capacity_mode), SmiConstant(0)),
4014 896 : &empty, &nonempty);
4015 :
4016 896 : BIND(&empty);
4017 : {
4018 : // Array is empty. Use the shared empty fixed array instead of allocating
4019 : // a new one.
4020 : TNode<FixedArrayBase> empty_fixed_array =
4021 896 : CAST(LoadRoot(RootIndex::kEmptyFixedArray));
4022 896 : var_array = AllocateJSArray(array_map, empty_fixed_array, length,
4023 896 : allocation_site);
4024 896 : Goto(&out);
4025 : }
4026 :
4027 896 : BIND(&nonempty);
4028 : {
4029 : // Allocate both array and elements object, and initialize the JSArray.
4030 896 : TNode<JSArray> array;
4031 1792 : std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
4032 : kind, array_map, length, allocation_site, capacity, capacity_mode,
4033 1792 : allocation_flags);
4034 896 : var_array = array;
4035 : // Fill in the elements with holes.
4036 : FillFixedArrayWithValue(kind, elements,
4037 : IntPtrOrSmiConstant(0, capacity_mode), capacity,
4038 896 : RootIndex::kTheHoleValue, capacity_mode);
4039 896 : Goto(&out);
4040 : }
4041 :
4042 896 : BIND(&out);
4043 1792 : array = var_array.value();
4044 : }
4045 :
4046 2352 : return array;
4047 : }
4048 :
4049 56 : Node* CodeStubAssembler::ExtractFastJSArray(Node* context, Node* array,
4050 : Node* begin, Node* count,
4051 : ParameterMode mode, Node* capacity,
4052 : Node* allocation_site) {
4053 56 : Node* original_array_map = LoadMap(array);
4054 56 : Node* elements_kind = LoadMapElementsKind(original_array_map);
4055 :
4056 : // Use the cannonical map for the Array's ElementsKind
4057 56 : Node* native_context = LoadNativeContext(context);
4058 56 : TNode<Map> array_map = LoadJSArrayElementsMap(elements_kind, native_context);
4059 :
4060 : TNode<FixedArrayBase> new_elements = ExtractFixedArray(
4061 112 : LoadElements(array), begin, count, capacity,
4062 168 : ExtractFixedArrayFlag::kAllFixedArrays, mode, nullptr, elements_kind);
4063 :
4064 : TNode<Object> result = AllocateJSArray(
4065 56 : array_map, new_elements, ParameterToTagged(count, mode), allocation_site);
4066 56 : return result;
4067 : }
4068 :
4069 336 : Node* CodeStubAssembler::CloneFastJSArray(Node* context, Node* array,
4070 : ParameterMode mode,
4071 : Node* allocation_site,
4072 : HoleConversionMode convert_holes) {
4073 : // TODO(dhai): we should be able to assert IsFastJSArray(array) here, but this
4074 : // function is also used to copy boilerplates even when the no-elements
4075 : // protector is invalid. This function should be renamed to reflect its uses.
4076 : CSA_ASSERT(this, IsJSArray(array));
4077 :
4078 336 : Node* length = LoadJSArrayLength(array);
4079 336 : Node* new_elements = nullptr;
4080 336 : VARIABLE(var_new_elements, MachineRepresentation::kTagged);
4081 672 : TVARIABLE(Int32T, var_elements_kind, LoadMapElementsKind(LoadMap(array)));
4082 :
4083 672 : Label allocate_jsarray(this), holey_extract(this);
4084 :
4085 : bool need_conversion =
4086 336 : convert_holes == HoleConversionMode::kConvertToUndefined;
4087 336 : if (need_conversion) {
4088 : // We need to take care of holes, if the array is of holey elements kind.
4089 56 : GotoIf(IsHoleyFastElementsKind(var_elements_kind.value()), &holey_extract);
4090 : }
4091 :
4092 : // Simple extraction that preserves holes.
4093 672 : new_elements =
4094 672 : ExtractFixedArray(LoadElements(array), IntPtrOrSmiConstant(0, mode),
4095 : TaggedToParameter(length, mode), nullptr,
4096 : ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW, mode,
4097 1680 : nullptr, var_elements_kind.value());
4098 336 : var_new_elements.Bind(new_elements);
4099 336 : Goto(&allocate_jsarray);
4100 :
4101 336 : if (need_conversion) {
4102 56 : BIND(&holey_extract);
4103 : // Convert holes to undefined.
4104 56 : TVARIABLE(BoolT, var_holes_converted, Int32FalseConstant());
4105 : // Copy |array|'s elements store. The copy will be compatible with the
4106 : // original elements kind unless there are holes in the source. Any holes
4107 : // get converted to undefined, hence in that case the copy is compatible
4108 : // only with PACKED_ELEMENTS and HOLEY_ELEMENTS, and we will choose
4109 : // PACKED_ELEMENTS. Also, if we want to replace holes, we must not use
4110 : // ExtractFixedArrayFlag::kDontCopyCOW.
4111 112 : new_elements = ExtractFixedArray(
4112 112 : LoadElements(array), IntPtrOrSmiConstant(0, mode),
4113 : TaggedToParameter(length, mode), nullptr,
4114 280 : ExtractFixedArrayFlag::kAllFixedArrays, mode, &var_holes_converted);
4115 56 : var_new_elements.Bind(new_elements);
4116 : // If the array type didn't change, use the original elements kind.
4117 56 : GotoIfNot(var_holes_converted.value(), &allocate_jsarray);
4118 : // Otherwise use PACKED_ELEMENTS for the target's elements kind.
4119 56 : var_elements_kind = Int32Constant(PACKED_ELEMENTS);
4120 56 : Goto(&allocate_jsarray);
4121 : }
4122 :
4123 336 : BIND(&allocate_jsarray);
4124 : // Use the cannonical map for the chosen elements kind.
4125 336 : Node* native_context = LoadNativeContext(context);
4126 : TNode<Map> array_map =
4127 336 : LoadJSArrayElementsMap(var_elements_kind.value(), native_context);
4128 :
4129 : TNode<Object> result = AllocateJSArray(
4130 336 : array_map, CAST(var_new_elements.value()), CAST(length), allocation_site);
4131 672 : return result;
4132 : }
4133 :
4134 14484 : TNode<FixedArrayBase> CodeStubAssembler::AllocateFixedArray(
4135 : ElementsKind kind, Node* capacity, ParameterMode mode,
4136 : AllocationFlags flags, SloppyTNode<Map> fixed_array_map) {
4137 14484 : Comment("AllocateFixedArray");
4138 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4139 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity,
4140 : IntPtrOrSmiConstant(0, mode), mode));
4141 :
4142 14484 : const intptr_t kMaxLength = IsDoubleElementsKind(kind)
4143 : ? FixedDoubleArray::kMaxLength
4144 14484 : : FixedArray::kMaxLength;
4145 : intptr_t capacity_constant;
4146 14484 : if (ToParameterConstant(capacity, &capacity_constant, mode)) {
4147 1416 : CHECK_LE(capacity_constant, kMaxLength);
4148 : } else {
4149 27552 : Label if_out_of_memory(this, Label::kDeferred), next(this);
4150 : Branch(IntPtrOrSmiGreaterThan(
4151 : capacity,
4152 : IntPtrOrSmiConstant(static_cast<int>(kMaxLength), mode), mode),
4153 13776 : &if_out_of_memory, &next);
4154 :
4155 13776 : BIND(&if_out_of_memory);
4156 : CallRuntime(Runtime::kFatalProcessOutOfMemoryInvalidArrayLength,
4157 13776 : NoContextConstant());
4158 13776 : Unreachable();
4159 :
4160 27552 : BIND(&next);
4161 : }
4162 :
4163 14484 : TNode<IntPtrT> total_size = GetFixedArrayAllocationSize(capacity, kind, mode);
4164 :
4165 14484 : if (IsDoubleElementsKind(kind)) flags |= kDoubleAlignment;
4166 : // Allocate both array and elements object, and initialize the JSArray.
4167 14484 : Node* array = Allocate(total_size, flags);
4168 14484 : if (fixed_array_map != nullptr) {
4169 : // Conservatively only skip the write barrier if there are no allocation
4170 : // flags, this ensures that the object hasn't ended up in LOS. Note that the
4171 : // fixed array map is currently always immortal and technically wouldn't
4172 : // need the write barrier even in LOS, but it's better to not take chances
4173 : // in case this invariant changes later, since it's difficult to enforce
4174 : // locally here.
4175 6476 : if (flags == CodeStubAssembler::kNone) {
4176 2668 : StoreMapNoWriteBarrier(array, fixed_array_map);
4177 : } else {
4178 3808 : StoreMap(array, fixed_array_map);
4179 : }
4180 : } else {
4181 8008 : RootIndex map_index = IsDoubleElementsKind(kind)
4182 : ? RootIndex::kFixedDoubleArrayMap
4183 8008 : : RootIndex::kFixedArrayMap;
4184 : DCHECK(RootsTable::IsImmortalImmovable(map_index));
4185 8008 : StoreMapNoWriteBarrier(array, map_index);
4186 : }
4187 : StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
4188 14484 : ParameterToTagged(capacity, mode));
4189 14484 : return UncheckedCast<FixedArray>(array);
4190 : }
4191 :
4192 2668 : TNode<FixedArray> CodeStubAssembler::ExtractToFixedArray(
4193 : Node* source, Node* first, Node* count, Node* capacity, Node* source_map,
4194 : ElementsKind from_kind, AllocationFlags allocation_flags,
4195 : ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
4196 : HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted,
4197 : Node* source_elements_kind) {
4198 : DCHECK_NE(first, nullptr);
4199 : DCHECK_NE(count, nullptr);
4200 : DCHECK_NE(capacity, nullptr);
4201 : DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays);
4202 : CSA_ASSERT(this,
4203 : WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity));
4204 : CSA_ASSERT(this, WordEqual(source_map, LoadMap(source)));
4205 :
4206 2668 : VARIABLE(var_result, MachineRepresentation::kTagged);
4207 5336 : VARIABLE(var_target_map, MachineRepresentation::kTagged, source_map);
4208 :
4209 5336 : Label done(this, {&var_result}), is_cow(this),
4210 5336 : new_space_check(this, {&var_target_map});
4211 :
4212 : // If source_map is either FixedDoubleArrayMap, or FixedCOWArrayMap but
4213 : // we can't just use COW, use FixedArrayMap as the target map. Otherwise, use
4214 : // source_map as the target map.
4215 2668 : if (IsDoubleElementsKind(from_kind)) {
4216 : CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4217 56 : var_target_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
4218 56 : Goto(&new_space_check);
4219 : } else {
4220 : CSA_ASSERT(this, Word32BinaryNot(IsFixedDoubleArrayMap(source_map)));
4221 : Branch(WordEqual(var_target_map.value(),
4222 2612 : LoadRoot(RootIndex::kFixedCOWArrayMap)),
4223 2612 : &is_cow, &new_space_check);
4224 :
4225 2612 : BIND(&is_cow);
4226 : {
4227 : // |source| is a COW array, so we don't actually need to allocate a new
4228 : // array unless:
4229 : // 1) |extract_flags| forces us to, or
4230 : // 2) we're asked to extract only part of the |source| (|first| != 0).
4231 2612 : if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
4232 : Branch(WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), first),
4233 632 : &new_space_check, [&] {
4234 632 : var_result.Bind(source);
4235 632 : Goto(&done);
4236 1272 : });
4237 : } else {
4238 1972 : var_target_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
4239 1972 : Goto(&new_space_check);
4240 : }
4241 : }
4242 : }
4243 :
4244 2668 : BIND(&new_space_check);
4245 : {
4246 2668 : bool handle_old_space = true;
4247 2668 : if (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly) {
4248 224 : handle_old_space = false;
4249 : CSA_ASSERT(this, Word32BinaryNot(FixedArraySizeDoesntFitInNewSpace(
4250 : count, FixedArray::kHeaderSize, parameter_mode)));
4251 : } else {
4252 : int constant_count;
4253 : handle_old_space =
4254 : !TryGetIntPtrOrSmiConstantValue(count, &constant_count,
4255 2456 : parameter_mode) ||
4256 12 : (constant_count >
4257 2456 : FixedArray::GetMaxLengthForNewSpaceAllocation(PACKED_ELEMENTS));
4258 : }
4259 :
4260 2668 : Label old_space(this, Label::kDeferred);
4261 2668 : if (handle_old_space) {
4262 : GotoIfFixedArraySizeDoesntFitInNewSpace(
4263 2432 : capacity, &old_space, FixedArray::kHeaderSize, parameter_mode);
4264 : }
4265 :
4266 2668 : Comment("Copy FixedArray new space");
4267 : // We use PACKED_ELEMENTS to tell AllocateFixedArray and
4268 : // CopyFixedArrayElements that we want a FixedArray.
4269 2668 : const ElementsKind to_kind = PACKED_ELEMENTS;
4270 : TNode<FixedArrayBase> to_elements =
4271 : AllocateFixedArray(to_kind, capacity, parameter_mode,
4272 2668 : AllocationFlag::kNone, var_target_map.value());
4273 2668 : var_result.Bind(to_elements);
4274 :
4275 5224 : if (convert_holes == HoleConversionMode::kDontConvert &&
4276 2556 : !IsDoubleElementsKind(from_kind)) {
4277 : // We can use CopyElements (memcpy) because we don't need to replace or
4278 : // convert any values. Since {to_elements} is in new-space, CopyElements
4279 : // will efficiently use memcpy.
4280 : FillFixedArrayWithValue(to_kind, to_elements, count, capacity,
4281 2556 : RootIndex::kTheHoleValue, parameter_mode);
4282 2556 : CopyElements(to_kind, to_elements, IntPtrConstant(0), CAST(source),
4283 : ParameterToIntPtr(first, parameter_mode),
4284 : ParameterToIntPtr(count, parameter_mode),
4285 5112 : SKIP_WRITE_BARRIER);
4286 : } else {
4287 : CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4288 : count, capacity, SKIP_WRITE_BARRIER,
4289 : parameter_mode, convert_holes,
4290 112 : var_holes_converted);
4291 : }
4292 2668 : Goto(&done);
4293 :
4294 2668 : if (handle_old_space) {
4295 2432 : BIND(&old_space);
4296 : {
4297 2432 : Comment("Copy FixedArray old space");
4298 2432 : Label copy_one_by_one(this);
4299 :
4300 : // Try to use memcpy if we don't need to convert holes to undefined.
4301 2432 : if (convert_holes == HoleConversionMode::kDontConvert &&
4302 : source_elements_kind != nullptr) {
4303 : // Only try memcpy if we're not copying object pointers.
4304 : GotoIfNot(IsFastSmiElementsKind(source_elements_kind),
4305 392 : ©_one_by_one);
4306 :
4307 392 : const ElementsKind to_smi_kind = PACKED_SMI_ELEMENTS;
4308 784 : to_elements =
4309 : AllocateFixedArray(to_smi_kind, capacity, parameter_mode,
4310 392 : allocation_flags, var_target_map.value());
4311 392 : var_result.Bind(to_elements);
4312 :
4313 : FillFixedArrayWithValue(to_smi_kind, to_elements, count, capacity,
4314 392 : RootIndex::kTheHoleValue, parameter_mode);
4315 : // CopyElements will try to use memcpy if it's not conflicting with
4316 : // GC. Otherwise it will copy elements by elements, but skip write
4317 : // barriers (since we're copying smis to smis).
4318 : CopyElements(to_smi_kind, to_elements, IntPtrConstant(0),
4319 392 : CAST(source), ParameterToIntPtr(first, parameter_mode),
4320 : ParameterToIntPtr(count, parameter_mode),
4321 784 : SKIP_WRITE_BARRIER);
4322 392 : Goto(&done);
4323 : } else {
4324 2040 : Goto(©_one_by_one);
4325 : }
4326 :
4327 2432 : BIND(©_one_by_one);
4328 : {
4329 4864 : to_elements =
4330 : AllocateFixedArray(to_kind, capacity, parameter_mode,
4331 2432 : allocation_flags, var_target_map.value());
4332 2432 : var_result.Bind(to_elements);
4333 : CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4334 : count, capacity, UPDATE_WRITE_BARRIER,
4335 : parameter_mode, convert_holes,
4336 2432 : var_holes_converted);
4337 2432 : Goto(&done);
4338 2432 : }
4339 : }
4340 2668 : }
4341 : }
4342 :
4343 2668 : BIND(&done);
4344 5336 : return UncheckedCast<FixedArray>(var_result.value());
4345 : }
4346 :
4347 56 : TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedDoubleArrayFillingHoles(
4348 : Node* from_array, Node* first, Node* count, Node* capacity,
4349 : Node* fixed_array_map, TVariable<BoolT>* var_holes_converted,
4350 : AllocationFlags allocation_flags, ExtractFixedArrayFlags extract_flags,
4351 : ParameterMode mode) {
4352 : DCHECK_NE(first, nullptr);
4353 : DCHECK_NE(count, nullptr);
4354 : DCHECK_NE(capacity, nullptr);
4355 : DCHECK_NE(var_holes_converted, nullptr);
4356 : CSA_ASSERT(this, IsFixedDoubleArrayMap(fixed_array_map));
4357 :
4358 56 : VARIABLE(var_result, MachineRepresentation::kTagged);
4359 56 : const ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4360 : Node* to_elements = AllocateFixedArray(kind, capacity, mode, allocation_flags,
4361 56 : fixed_array_map);
4362 56 : var_result.Bind(to_elements);
4363 : // We first try to copy the FixedDoubleArray to a new FixedDoubleArray.
4364 : // |var_holes_converted| is set to False preliminarily.
4365 56 : *var_holes_converted = Int32FalseConstant();
4366 :
4367 : // The construction of the loop and the offsets for double elements is
4368 : // extracted from CopyFixedArrayElements.
4369 : CSA_SLOW_ASSERT(this, MatchesParameterMode(count, mode));
4370 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4371 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, kind));
4372 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4373 :
4374 56 : Comment("[ ExtractFixedDoubleArrayFillingHoles");
4375 :
4376 : // This copy can trigger GC, so we pre-initialize the array with holes.
4377 : FillFixedArrayWithValue(kind, to_elements, IntPtrOrSmiConstant(0, mode),
4378 56 : capacity, RootIndex::kTheHoleValue, mode);
4379 :
4380 56 : const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4381 : Node* first_from_element_offset =
4382 56 : ElementOffsetFromIndex(first, kind, mode, 0);
4383 : Node* limit_offset = IntPtrAdd(first_from_element_offset,
4384 56 : IntPtrConstant(first_element_offset));
4385 112 : VARIABLE(var_from_offset, MachineType::PointerRepresentation(),
4386 : ElementOffsetFromIndex(IntPtrOrSmiAdd(first, count, mode), kind,
4387 : mode, first_element_offset));
4388 :
4389 112 : Label decrement(this, {&var_from_offset}), done(this);
4390 : Node* to_array_adjusted =
4391 56 : IntPtrSub(BitcastTaggedToWord(to_elements), first_from_element_offset);
4392 :
4393 56 : Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4394 :
4395 56 : BIND(&decrement);
4396 : {
4397 : Node* from_offset =
4398 56 : IntPtrSub(var_from_offset.value(), IntPtrConstant(kDoubleSize));
4399 56 : var_from_offset.Bind(from_offset);
4400 :
4401 56 : Node* to_offset = from_offset;
4402 :
4403 56 : Label if_hole(this);
4404 :
4405 : Node* value = LoadElementAndPrepareForStore(
4406 56 : from_array, var_from_offset.value(), kind, kind, &if_hole);
4407 :
4408 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4409 56 : to_offset, value);
4410 :
4411 56 : Node* compare = WordNotEqual(from_offset, limit_offset);
4412 56 : Branch(compare, &decrement, &done);
4413 :
4414 56 : BIND(&if_hole);
4415 : // We are unlucky: there are holes! We need to restart the copy, this time
4416 : // we will copy the FixedDoubleArray to a new FixedArray with undefined
4417 : // replacing holes. We signal this to the caller through
4418 : // |var_holes_converted|.
4419 56 : *var_holes_converted = Int32TrueConstant();
4420 112 : to_elements =
4421 : ExtractToFixedArray(from_array, first, count, capacity, fixed_array_map,
4422 : kind, allocation_flags, extract_flags, mode,
4423 56 : HoleConversionMode::kConvertToUndefined);
4424 56 : var_result.Bind(to_elements);
4425 56 : Goto(&done);
4426 : }
4427 :
4428 56 : BIND(&done);
4429 56 : Comment("] ExtractFixedDoubleArrayFillingHoles");
4430 112 : return UncheckedCast<FixedArrayBase>(var_result.value());
4431 : }
4432 :
4433 2612 : TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
4434 : Node* source, Node* first, Node* count, Node* capacity,
4435 : ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
4436 : TVariable<BoolT>* var_holes_converted, Node* source_runtime_kind) {
4437 : DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays ||
4438 : extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays);
4439 : // If we want to replace holes, ExtractFixedArrayFlag::kDontCopyCOW should not
4440 : // be used, because that disables the iteration which detects holes.
4441 : DCHECK_IMPLIES(var_holes_converted != nullptr,
4442 : !(extract_flags & ExtractFixedArrayFlag::kDontCopyCOW));
4443 : HoleConversionMode convert_holes =
4444 : var_holes_converted != nullptr ? HoleConversionMode::kConvertToUndefined
4445 2612 : : HoleConversionMode::kDontConvert;
4446 2612 : VARIABLE(var_result, MachineRepresentation::kTagged);
4447 : const AllocationFlags allocation_flags =
4448 5224 : (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly)
4449 : ? CodeStubAssembler::kNone
4450 2612 : : CodeStubAssembler::kAllowLargeObjectAllocation;
4451 2612 : if (first == nullptr) {
4452 672 : first = IntPtrOrSmiConstant(0, parameter_mode);
4453 : }
4454 2612 : if (count == nullptr) {
4455 : count = IntPtrOrSmiSub(
4456 704 : TaggedToParameter(LoadFixedArrayBaseLength(source), parameter_mode),
4457 352 : first, parameter_mode);
4458 :
4459 : CSA_ASSERT(
4460 : this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant(0, parameter_mode),
4461 : count, parameter_mode));
4462 : }
4463 2612 : if (capacity == nullptr) {
4464 820 : capacity = count;
4465 : } else {
4466 : CSA_ASSERT(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
4467 : IntPtrOrSmiAdd(first, count, parameter_mode), capacity,
4468 : parameter_mode)));
4469 : }
4470 :
4471 5224 : Label if_fixed_double_array(this), empty(this), done(this, {&var_result});
4472 2612 : Node* source_map = LoadMap(source);
4473 2612 : GotoIf(WordEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity), &empty);
4474 :
4475 2612 : if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4476 872 : if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4477 872 : GotoIf(IsFixedDoubleArrayMap(source_map), &if_fixed_double_array);
4478 : } else {
4479 : CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4480 : }
4481 : }
4482 :
4483 2612 : if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4484 : // Here we can only get |source| as FixedArray, never FixedDoubleArray.
4485 : // PACKED_ELEMENTS is used to signify that the source is a FixedArray.
4486 : Node* to_elements = ExtractToFixedArray(
4487 : source, first, count, capacity, source_map, PACKED_ELEMENTS,
4488 : allocation_flags, extract_flags, parameter_mode, convert_holes,
4489 2612 : var_holes_converted, source_runtime_kind);
4490 2612 : var_result.Bind(to_elements);
4491 2612 : Goto(&done);
4492 : }
4493 :
4494 2612 : if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4495 872 : BIND(&if_fixed_double_array);
4496 872 : Comment("Copy FixedDoubleArray");
4497 :
4498 872 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4499 : Node* to_elements = ExtractFixedDoubleArrayFillingHoles(
4500 : source, first, count, capacity, source_map, var_holes_converted,
4501 56 : allocation_flags, extract_flags, parameter_mode);
4502 56 : var_result.Bind(to_elements);
4503 : } else {
4504 : // We use PACKED_DOUBLE_ELEMENTS to signify that both the source and
4505 : // the target are FixedDoubleArray. That it is PACKED or HOLEY does not
4506 : // matter.
4507 816 : ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4508 : TNode<FixedArrayBase> to_elements = AllocateFixedArray(
4509 816 : kind, capacity, parameter_mode, allocation_flags, source_map);
4510 : FillFixedArrayWithValue(kind, to_elements, count, capacity,
4511 816 : RootIndex::kTheHoleValue, parameter_mode);
4512 816 : CopyElements(kind, to_elements, IntPtrConstant(0), CAST(source),
4513 : ParameterToIntPtr(first, parameter_mode),
4514 1632 : ParameterToIntPtr(count, parameter_mode));
4515 816 : var_result.Bind(to_elements);
4516 : }
4517 :
4518 872 : Goto(&done);
4519 : }
4520 :
4521 2612 : BIND(&empty);
4522 : {
4523 2612 : Comment("Copy empty array");
4524 :
4525 2612 : var_result.Bind(EmptyFixedArrayConstant());
4526 2612 : Goto(&done);
4527 : }
4528 :
4529 2612 : BIND(&done);
4530 5224 : return UncheckedCast<FixedArray>(var_result.value());
4531 : }
4532 :
4533 504 : void CodeStubAssembler::InitializePropertyArrayLength(Node* property_array,
4534 : Node* length,
4535 : ParameterMode mode) {
4536 : CSA_SLOW_ASSERT(this, IsPropertyArray(property_array));
4537 : CSA_ASSERT(
4538 : this, IntPtrOrSmiGreaterThan(length, IntPtrOrSmiConstant(0, mode), mode));
4539 : CSA_ASSERT(
4540 : this,
4541 : IntPtrOrSmiLessThanOrEqual(
4542 : length, IntPtrOrSmiConstant(PropertyArray::LengthField::kMax, mode),
4543 : mode));
4544 : StoreObjectFieldNoWriteBarrier(
4545 : property_array, PropertyArray::kLengthAndHashOffset,
4546 504 : ParameterToTagged(length, mode), MachineRepresentation::kTaggedSigned);
4547 504 : }
4548 :
4549 504 : Node* CodeStubAssembler::AllocatePropertyArray(Node* capacity_node,
4550 : ParameterMode mode,
4551 : AllocationFlags flags) {
4552 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
4553 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
4554 : IntPtrOrSmiConstant(0, mode), mode));
4555 : TNode<IntPtrT> total_size =
4556 504 : GetPropertyArrayAllocationSize(capacity_node, mode);
4557 :
4558 504 : TNode<Object> array = Allocate(total_size, flags);
4559 504 : RootIndex map_index = RootIndex::kPropertyArrayMap;
4560 : DCHECK(RootsTable::IsImmortalImmovable(map_index));
4561 504 : StoreMapNoWriteBarrier(array, map_index);
4562 504 : InitializePropertyArrayLength(array, capacity_node, mode);
4563 504 : return array;
4564 : }
4565 :
4566 504 : void CodeStubAssembler::FillPropertyArrayWithUndefined(Node* array,
4567 : Node* from_node,
4568 : Node* to_node,
4569 : ParameterMode mode) {
4570 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4571 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4572 : CSA_SLOW_ASSERT(this, IsPropertyArray(array));
4573 504 : ElementsKind kind = PACKED_ELEMENTS;
4574 504 : Node* value = UndefinedConstant();
4575 : BuildFastFixedArrayForEach(array, kind, from_node, to_node,
4576 504 : [this, value](Node* array, Node* offset) {
4577 : StoreNoWriteBarrier(
4578 : MachineRepresentation::kTagged, array,
4579 504 : offset, value);
4580 504 : },
4581 504 : mode);
4582 504 : }
4583 :
4584 13152 : void CodeStubAssembler::FillFixedArrayWithValue(ElementsKind kind, Node* array,
4585 : Node* from_node, Node* to_node,
4586 : RootIndex value_root_index,
4587 : ParameterMode mode) {
4588 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4589 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4590 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
4591 : DCHECK(value_root_index == RootIndex::kTheHoleValue ||
4592 : value_root_index == RootIndex::kUndefinedValue);
4593 :
4594 : // Determine the value to initialize the {array} based
4595 : // on the {value_root_index} and the elements {kind}.
4596 13152 : Node* value = LoadRoot(value_root_index);
4597 13152 : if (IsDoubleElementsKind(kind)) {
4598 2396 : value = LoadHeapNumberValue(value);
4599 : }
4600 :
4601 : BuildFastFixedArrayForEach(
4602 : array, kind, from_node, to_node,
4603 14644 : [this, value, kind](Node* array, Node* offset) {
4604 14644 : if (IsDoubleElementsKind(kind)) {
4605 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, array, offset,
4606 2724 : value);
4607 : } else {
4608 : StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
4609 11920 : value);
4610 : }
4611 14644 : },
4612 13152 : mode);
4613 13152 : }
4614 :
4615 168 : void CodeStubAssembler::StoreFixedDoubleArrayHole(
4616 : TNode<FixedDoubleArray> array, Node* index, ParameterMode parameter_mode) {
4617 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index, parameter_mode));
4618 : Node* offset =
4619 : ElementOffsetFromIndex(index, PACKED_DOUBLE_ELEMENTS, parameter_mode,
4620 168 : FixedArray::kHeaderSize - kHeapObjectTag);
4621 : CSA_ASSERT(this, IsOffsetInBounds(
4622 : offset, LoadAndUntagFixedArrayBaseLength(array),
4623 : FixedDoubleArray::kHeaderSize, PACKED_DOUBLE_ELEMENTS));
4624 : Node* double_hole =
4625 840 : Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4626 504 : : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4627 : // TODO(danno): When we have a Float32/Float64 wrapper class that
4628 : // preserves double bits during manipulation, remove this code/change
4629 : // this to an indexed Float64 store.
4630 168 : if (Is64()) {
4631 : StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset,
4632 168 : double_hole);
4633 : } else {
4634 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
4635 0 : double_hole);
4636 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array,
4637 0 : IntPtrAdd(offset, IntPtrConstant(kInt32Size)),
4638 0 : double_hole);
4639 : }
4640 168 : }
4641 :
4642 1300 : void CodeStubAssembler::FillFixedArrayWithSmiZero(TNode<FixedArray> array,
4643 : TNode<IntPtrT> length) {
4644 : CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4645 :
4646 1300 : TNode<IntPtrT> byte_length = TimesTaggedSize(length);
4647 : CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4648 :
4649 : static const int32_t fa_base_data_offset =
4650 : FixedArray::kHeaderSize - kHeapObjectTag;
4651 : TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4652 1300 : IntPtrConstant(fa_base_data_offset));
4653 :
4654 : // Call out to memset to perform initialization.
4655 : TNode<ExternalReference> memset =
4656 1300 : ExternalConstant(ExternalReference::libc_memset_function());
4657 : STATIC_ASSERT(kSizetSize == kIntptrSize);
4658 : CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4659 : MachineType::IntPtr(), MachineType::UintPtr(), memset,
4660 1300 : backing_store, IntPtrConstant(0), byte_length);
4661 1300 : }
4662 :
4663 56 : void CodeStubAssembler::FillFixedDoubleArrayWithZero(
4664 : TNode<FixedDoubleArray> array, TNode<IntPtrT> length) {
4665 : CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4666 :
4667 56 : TNode<IntPtrT> byte_length = TimesDoubleSize(length);
4668 : CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4669 :
4670 : static const int32_t fa_base_data_offset =
4671 : FixedDoubleArray::kHeaderSize - kHeapObjectTag;
4672 : TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4673 56 : IntPtrConstant(fa_base_data_offset));
4674 :
4675 : // Call out to memset to perform initialization.
4676 : TNode<ExternalReference> memset =
4677 56 : ExternalConstant(ExternalReference::libc_memset_function());
4678 : STATIC_ASSERT(kSizetSize == kIntptrSize);
4679 : CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4680 : MachineType::IntPtr(), MachineType::UintPtr(), memset,
4681 56 : backing_store, IntPtrConstant(0), byte_length);
4682 56 : }
4683 :
4684 3340 : void CodeStubAssembler::JumpIfPointersFromHereAreInteresting(
4685 : TNode<Object> object, Label* interesting) {
4686 3340 : Label finished(this);
4687 3340 : TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
4688 3340 : TNode<IntPtrT> object_page = PageFromAddress(object_word);
4689 : TNode<IntPtrT> page_flags = UncheckedCast<IntPtrT>(Load(
4690 3340 : MachineType::IntPtr(), object_page, IntPtrConstant(Page::kFlagsOffset)));
4691 : Branch(
4692 : WordEqual(WordAnd(page_flags,
4693 : IntPtrConstant(
4694 3340 : MemoryChunk::kPointersFromHereAreInterestingMask)),
4695 10020 : IntPtrConstant(0)),
4696 3340 : &finished, interesting);
4697 3340 : BIND(&finished);
4698 3340 : }
4699 :
4700 392 : void CodeStubAssembler::MoveElements(ElementsKind kind,
4701 : TNode<FixedArrayBase> elements,
4702 : TNode<IntPtrT> dst_index,
4703 : TNode<IntPtrT> src_index,
4704 : TNode<IntPtrT> length) {
4705 392 : Label finished(this);
4706 784 : Label needs_barrier(this);
4707 392 : const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4708 :
4709 : DCHECK(IsFastElementsKind(kind));
4710 : CSA_ASSERT(this, IsFixedArrayWithKind(elements, kind));
4711 : CSA_ASSERT(this,
4712 : IntPtrLessThanOrEqual(IntPtrAdd(dst_index, length),
4713 : LoadAndUntagFixedArrayBaseLength(elements)));
4714 : CSA_ASSERT(this,
4715 : IntPtrLessThanOrEqual(IntPtrAdd(src_index, length),
4716 : LoadAndUntagFixedArrayBaseLength(elements)));
4717 :
4718 : // The write barrier can be ignored if {dst_elements} is in new space, or if
4719 : // the elements pointer is FixedDoubleArray.
4720 392 : if (needs_barrier_check) {
4721 224 : JumpIfPointersFromHereAreInteresting(elements, &needs_barrier);
4722 : }
4723 :
4724 : const TNode<IntPtrT> source_byte_length =
4725 392 : IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4726 : static const int32_t fa_base_data_offset =
4727 : FixedArrayBase::kHeaderSize - kHeapObjectTag;
4728 392 : TNode<IntPtrT> elements_intptr = BitcastTaggedToWord(elements);
4729 : TNode<IntPtrT> target_data_ptr =
4730 : IntPtrAdd(elements_intptr,
4731 : ElementOffsetFromIndex(dst_index, kind, INTPTR_PARAMETERS,
4732 392 : fa_base_data_offset));
4733 : TNode<IntPtrT> source_data_ptr =
4734 : IntPtrAdd(elements_intptr,
4735 : ElementOffsetFromIndex(src_index, kind, INTPTR_PARAMETERS,
4736 392 : fa_base_data_offset));
4737 : TNode<ExternalReference> memmove =
4738 392 : ExternalConstant(ExternalReference::libc_memmove_function());
4739 : CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4740 : MachineType::Pointer(), MachineType::UintPtr(), memmove,
4741 392 : target_data_ptr, source_data_ptr, source_byte_length);
4742 :
4743 392 : if (needs_barrier_check) {
4744 224 : Goto(&finished);
4745 :
4746 224 : BIND(&needs_barrier);
4747 : {
4748 224 : const TNode<IntPtrT> begin = src_index;
4749 224 : const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4750 :
4751 : // If dst_index is less than src_index, then walk forward.
4752 : const TNode<IntPtrT> delta =
4753 : IntPtrMul(IntPtrSub(dst_index, begin),
4754 224 : IntPtrConstant(ElementsKindToByteSize(kind)));
4755 448 : auto loop_body = [&](Node* array, Node* offset) {
4756 448 : Node* const element = Load(MachineType::AnyTagged(), array, offset);
4757 448 : Node* const delta_offset = IntPtrAdd(offset, delta);
4758 448 : Store(array, delta_offset, element);
4759 672 : };
4760 :
4761 224 : Label iterate_forward(this);
4762 448 : Label iterate_backward(this);
4763 448 : Branch(IntPtrLessThan(delta, IntPtrConstant(0)), &iterate_forward,
4764 224 : &iterate_backward);
4765 224 : BIND(&iterate_forward);
4766 : {
4767 : // Make a loop for the stores.
4768 : BuildFastFixedArrayForEach(elements, kind, begin, end, loop_body,
4769 : INTPTR_PARAMETERS,
4770 224 : ForEachDirection::kForward);
4771 224 : Goto(&finished);
4772 : }
4773 :
4774 224 : BIND(&iterate_backward);
4775 : {
4776 : BuildFastFixedArrayForEach(elements, kind, begin, end, loop_body,
4777 : INTPTR_PARAMETERS,
4778 224 : ForEachDirection::kReverse);
4779 224 : Goto(&finished);
4780 224 : }
4781 : }
4782 224 : BIND(&finished);
4783 392 : }
4784 392 : }
4785 :
4786 3988 : void CodeStubAssembler::CopyElements(ElementsKind kind,
4787 : TNode<FixedArrayBase> dst_elements,
4788 : TNode<IntPtrT> dst_index,
4789 : TNode<FixedArrayBase> src_elements,
4790 : TNode<IntPtrT> src_index,
4791 : TNode<IntPtrT> length,
4792 : WriteBarrierMode write_barrier) {
4793 3988 : Label finished(this);
4794 7976 : Label needs_barrier(this);
4795 3988 : const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4796 :
4797 : DCHECK(IsFastElementsKind(kind));
4798 : CSA_ASSERT(this, IsFixedArrayWithKind(dst_elements, kind));
4799 : CSA_ASSERT(this, IsFixedArrayWithKind(src_elements, kind));
4800 : CSA_ASSERT(this, IntPtrLessThanOrEqual(
4801 : IntPtrAdd(dst_index, length),
4802 : LoadAndUntagFixedArrayBaseLength(dst_elements)));
4803 : CSA_ASSERT(this, IntPtrLessThanOrEqual(
4804 : IntPtrAdd(src_index, length),
4805 : LoadAndUntagFixedArrayBaseLength(src_elements)));
4806 : CSA_ASSERT(this, Word32Or(WordNotEqual(dst_elements, src_elements),
4807 : WordEqual(length, IntPtrConstant(0))));
4808 :
4809 : // The write barrier can be ignored if {dst_elements} is in new space, or if
4810 : // the elements pointer is FixedDoubleArray.
4811 3988 : if (needs_barrier_check) {
4812 3116 : JumpIfPointersFromHereAreInteresting(dst_elements, &needs_barrier);
4813 : }
4814 :
4815 : TNode<IntPtrT> source_byte_length =
4816 3988 : IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4817 : static const int32_t fa_base_data_offset =
4818 : FixedArrayBase::kHeaderSize - kHeapObjectTag;
4819 : TNode<IntPtrT> src_offset_start = ElementOffsetFromIndex(
4820 3988 : src_index, kind, INTPTR_PARAMETERS, fa_base_data_offset);
4821 : TNode<IntPtrT> dst_offset_start = ElementOffsetFromIndex(
4822 3988 : dst_index, kind, INTPTR_PARAMETERS, fa_base_data_offset);
4823 3988 : TNode<IntPtrT> src_elements_intptr = BitcastTaggedToWord(src_elements);
4824 : TNode<IntPtrT> source_data_ptr =
4825 3988 : IntPtrAdd(src_elements_intptr, src_offset_start);
4826 3988 : TNode<IntPtrT> dst_elements_intptr = BitcastTaggedToWord(dst_elements);
4827 : TNode<IntPtrT> dst_data_ptr =
4828 3988 : IntPtrAdd(dst_elements_intptr, dst_offset_start);
4829 : TNode<ExternalReference> memcpy =
4830 3988 : ExternalConstant(ExternalReference::libc_memcpy_function());
4831 : CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4832 : MachineType::Pointer(), MachineType::UintPtr(), memcpy,
4833 3988 : dst_data_ptr, source_data_ptr, source_byte_length);
4834 :
4835 3988 : if (needs_barrier_check) {
4836 3116 : Goto(&finished);
4837 :
4838 3116 : BIND(&needs_barrier);
4839 : {
4840 3116 : const TNode<IntPtrT> begin = src_index;
4841 3116 : const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4842 : const TNode<IntPtrT> delta =
4843 : IntPtrMul(IntPtrSub(dst_index, src_index),
4844 3116 : IntPtrConstant(ElementsKindToByteSize(kind)));
4845 : BuildFastFixedArrayForEach(
4846 : src_elements, kind, begin, end,
4847 3128 : [&](Node* array, Node* offset) {
4848 3128 : Node* const element = Load(MachineType::AnyTagged(), array, offset);
4849 3128 : Node* const delta_offset = IntPtrAdd(offset, delta);
4850 3128 : if (write_barrier == SKIP_WRITE_BARRIER) {
4851 : StoreNoWriteBarrier(MachineRepresentation::kTagged, dst_elements,
4852 2960 : delta_offset, element);
4853 : } else {
4854 168 : Store(dst_elements, delta_offset, element);
4855 : }
4856 3128 : },
4857 3116 : INTPTR_PARAMETERS, ForEachDirection::kForward);
4858 3116 : Goto(&finished);
4859 : }
4860 3116 : BIND(&finished);
4861 3988 : }
4862 3988 : }
4863 :
4864 8404 : void CodeStubAssembler::CopyFixedArrayElements(
4865 : ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
4866 : Node* to_array, Node* first_element, Node* element_count, Node* capacity,
4867 : WriteBarrierMode barrier_mode, ParameterMode mode,
4868 : HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted) {
4869 : DCHECK_IMPLIES(var_holes_converted != nullptr,
4870 : convert_holes == HoleConversionMode::kConvertToUndefined);
4871 : CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
4872 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4873 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
4874 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
4875 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4876 8404 : const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4877 8404 : Comment("[ CopyFixedArrayElements");
4878 :
4879 : // Typed array elements are not supported.
4880 : DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
4881 : DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
4882 :
4883 8404 : Label done(this);
4884 8404 : bool from_double_elements = IsDoubleElementsKind(from_kind);
4885 8404 : bool to_double_elements = IsDoubleElementsKind(to_kind);
4886 : bool doubles_to_objects_conversion =
4887 8404 : IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
4888 : bool needs_write_barrier =
4889 12404 : doubles_to_objects_conversion ||
4890 10892 : (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
4891 : bool element_offset_matches =
4892 8404 : !needs_write_barrier && (Is64() || IsDoubleElementsKind(from_kind) ==
4893 8404 : IsDoubleElementsKind(to_kind));
4894 : Node* double_hole =
4895 42020 : Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4896 25212 : : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4897 :
4898 : // If copying might trigger a GC, we pre-initialize the FixedArray such that
4899 : // it's always in a consistent state.
4900 8404 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4901 : DCHECK(IsObjectElementsKind(to_kind));
4902 : // Use undefined for the part that we copy and holes for the rest.
4903 : // Later if we run into a hole in the source we can just skip the writing
4904 : // to the target and are still guaranteed that we get an undefined.
4905 : FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4906 224 : element_count, RootIndex::kUndefinedValue, mode);
4907 : FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4908 224 : RootIndex::kTheHoleValue, mode);
4909 8180 : } else if (doubles_to_objects_conversion) {
4910 : // Pre-initialized the target with holes so later if we run into a hole in
4911 : // the source we can just skip the writing to the target.
4912 : FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4913 1400 : capacity, RootIndex::kTheHoleValue, mode);
4914 6780 : } else if (element_count != capacity) {
4915 : FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4916 4852 : RootIndex::kTheHoleValue, mode);
4917 : }
4918 :
4919 : Node* first_from_element_offset =
4920 8404 : ElementOffsetFromIndex(first_element, from_kind, mode, 0);
4921 : Node* limit_offset = IntPtrAdd(first_from_element_offset,
4922 8404 : IntPtrConstant(first_element_offset));
4923 16808 : VARIABLE(
4924 : var_from_offset, MachineType::PointerRepresentation(),
4925 : ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count, mode),
4926 : from_kind, mode, first_element_offset));
4927 : // This second variable is used only when the element sizes of source and
4928 : // destination arrays do not match.
4929 16808 : VARIABLE(var_to_offset, MachineType::PointerRepresentation());
4930 8404 : if (element_offset_matches) {
4931 4404 : var_to_offset.Bind(var_from_offset.value());
4932 : } else {
4933 : var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
4934 4000 : first_element_offset));
4935 : }
4936 :
4937 8404 : Variable* vars[] = {&var_from_offset, &var_to_offset, var_holes_converted};
4938 : int num_vars =
4939 8404 : var_holes_converted != nullptr ? arraysize(vars) : arraysize(vars) - 1;
4940 16808 : Label decrement(this, num_vars, vars);
4941 :
4942 : Node* to_array_adjusted =
4943 : element_offset_matches
4944 26020 : ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
4945 25616 : : to_array;
4946 :
4947 8404 : Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4948 :
4949 8404 : BIND(&decrement);
4950 : {
4951 : Node* from_offset = IntPtrSub(
4952 : var_from_offset.value(),
4953 8404 : IntPtrConstant(from_double_elements ? kDoubleSize : kTaggedSize));
4954 8404 : var_from_offset.Bind(from_offset);
4955 :
4956 : Node* to_offset;
4957 8404 : if (element_offset_matches) {
4958 4404 : to_offset = from_offset;
4959 : } else {
4960 8000 : to_offset = IntPtrSub(
4961 : var_to_offset.value(),
4962 12000 : IntPtrConstant(to_double_elements ? kDoubleSize : kTaggedSize));
4963 4000 : var_to_offset.Bind(to_offset);
4964 : }
4965 :
4966 16808 : Label next_iter(this), store_double_hole(this), signal_hole(this);
4967 : Label* if_hole;
4968 8404 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4969 : // The target elements array is already preinitialized with undefined
4970 : // so we only need to signal that a hole was found and continue the loop.
4971 224 : if_hole = &signal_hole;
4972 8180 : } else if (doubles_to_objects_conversion) {
4973 : // The target elements array is already preinitialized with holes, so we
4974 : // can just proceed with the next iteration.
4975 1400 : if_hole = &next_iter;
4976 6780 : } else if (IsDoubleElementsKind(to_kind)) {
4977 1692 : if_hole = &store_double_hole;
4978 : } else {
4979 : // In all the other cases don't check for holes and copy the data as is.
4980 5088 : if_hole = nullptr;
4981 : }
4982 :
4983 : Node* value = LoadElementAndPrepareForStore(
4984 8404 : from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
4985 :
4986 8404 : if (needs_write_barrier) {
4987 4000 : CHECK_EQ(to_array, to_array_adjusted);
4988 4000 : Store(to_array_adjusted, to_offset, value);
4989 4404 : } else if (to_double_elements) {
4990 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4991 1692 : to_offset, value);
4992 : } else {
4993 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array_adjusted,
4994 2712 : to_offset, value);
4995 : }
4996 8404 : Goto(&next_iter);
4997 :
4998 8404 : if (if_hole == &store_double_hole) {
4999 1692 : BIND(&store_double_hole);
5000 : // Don't use doubles to store the hole double, since manipulating the
5001 : // signaling NaN used for the hole in C++, e.g. with bit_cast, will
5002 : // change its value on ia32 (the x87 stack is used to return values
5003 : // and stores to the stack silently clear the signalling bit).
5004 : //
5005 : // TODO(danno): When we have a Float32/Float64 wrapper class that
5006 : // preserves double bits during manipulation, remove this code/change
5007 : // this to an indexed Float64 store.
5008 1692 : if (Is64()) {
5009 : StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
5010 1692 : to_offset, double_hole);
5011 : } else {
5012 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
5013 0 : to_offset, double_hole);
5014 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
5015 0 : IntPtrAdd(to_offset, IntPtrConstant(kInt32Size)),
5016 0 : double_hole);
5017 : }
5018 1692 : Goto(&next_iter);
5019 6712 : } else if (if_hole == &signal_hole) {
5020 : // This case happens only when IsObjectElementsKind(to_kind).
5021 224 : BIND(&signal_hole);
5022 224 : if (var_holes_converted != nullptr) {
5023 112 : *var_holes_converted = Int32TrueConstant();
5024 : }
5025 224 : Goto(&next_iter);
5026 : }
5027 :
5028 8404 : BIND(&next_iter);
5029 8404 : Node* compare = WordNotEqual(from_offset, limit_offset);
5030 16808 : Branch(compare, &decrement, &done);
5031 : }
5032 :
5033 8404 : BIND(&done);
5034 16808 : Comment("] CopyFixedArrayElements");
5035 8404 : }
5036 :
5037 976 : TNode<FixedArray> CodeStubAssembler::HeapObjectToFixedArray(
5038 : TNode<HeapObject> base, Label* cast_fail) {
5039 976 : Label fixed_array(this);
5040 976 : TNode<Map> map = LoadMap(base);
5041 976 : GotoIf(WordEqual(map, LoadRoot(RootIndex::kFixedArrayMap)), &fixed_array);
5042 976 : GotoIf(WordNotEqual(map, LoadRoot(RootIndex::kFixedCOWArrayMap)), cast_fail);
5043 976 : Goto(&fixed_array);
5044 976 : BIND(&fixed_array);
5045 976 : return UncheckedCast<FixedArray>(base);
5046 : }
5047 :
5048 504 : void CodeStubAssembler::CopyPropertyArrayValues(Node* from_array,
5049 : Node* to_array,
5050 : Node* property_count,
5051 : WriteBarrierMode barrier_mode,
5052 : ParameterMode mode,
5053 : DestroySource destroy_source) {
5054 : CSA_SLOW_ASSERT(this, MatchesParameterMode(property_count, mode));
5055 : CSA_SLOW_ASSERT(this, Word32Or(IsPropertyArray(from_array),
5056 : IsEmptyFixedArray(from_array)));
5057 : CSA_SLOW_ASSERT(this, IsPropertyArray(to_array));
5058 504 : Comment("[ CopyPropertyArrayValues");
5059 :
5060 504 : bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
5061 :
5062 504 : if (destroy_source == DestroySource::kNo) {
5063 : // PropertyArray may contain MutableHeapNumbers, which will be cloned on the
5064 : // heap, requiring a write barrier.
5065 56 : needs_write_barrier = true;
5066 : }
5067 :
5068 504 : Node* start = IntPtrOrSmiConstant(0, mode);
5069 504 : ElementsKind kind = PACKED_ELEMENTS;
5070 : BuildFastFixedArrayForEach(
5071 : from_array, kind, start, property_count,
5072 : [this, to_array, needs_write_barrier, destroy_source](Node* array,
5073 504 : Node* offset) {
5074 504 : Node* value = Load(MachineType::AnyTagged(), array, offset);
5075 :
5076 504 : if (destroy_source == DestroySource::kNo) {
5077 56 : value = CloneIfMutablePrimitive(CAST(value));
5078 : }
5079 :
5080 504 : if (needs_write_barrier) {
5081 56 : Store(to_array, offset, value);
5082 : } else {
5083 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, offset,
5084 448 : value);
5085 : }
5086 504 : },
5087 504 : mode);
5088 :
5089 : #ifdef DEBUG
5090 : // Zap {from_array} if the copying above has made it invalid.
5091 : if (destroy_source == DestroySource::kYes) {
5092 : Label did_zap(this);
5093 : GotoIf(IsEmptyFixedArray(from_array), &did_zap);
5094 : FillPropertyArrayWithUndefined(from_array, start, property_count, mode);
5095 :
5096 : Goto(&did_zap);
5097 : BIND(&did_zap);
5098 : }
5099 : #endif
5100 504 : Comment("] CopyPropertyArrayValues");
5101 504 : }
5102 :
5103 2088 : void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
5104 : TNode<IntPtrT> from_index,
5105 : TNode<IntPtrT> to_index,
5106 : TNode<IntPtrT> character_count,
5107 : String::Encoding from_encoding,
5108 : String::Encoding to_encoding) {
5109 : // Cannot assert IsString(from_string) and IsString(to_string) here because
5110 : // CSA::SubString can pass in faked sequential strings when handling external
5111 : // subject strings.
5112 2088 : bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
5113 2088 : bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
5114 : DCHECK_IMPLIES(to_one_byte, from_one_byte);
5115 : Comment("CopyStringCharacters ",
5116 : from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING", " -> ",
5117 2088 : to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
5118 :
5119 2088 : ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
5120 2088 : ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
5121 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
5122 2088 : int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
5123 : Node* from_offset = ElementOffsetFromIndex(from_index, from_kind,
5124 2088 : INTPTR_PARAMETERS, header_size);
5125 : Node* to_offset =
5126 2088 : ElementOffsetFromIndex(to_index, to_kind, INTPTR_PARAMETERS, header_size);
5127 : Node* byte_count =
5128 2088 : ElementOffsetFromIndex(character_count, from_kind, INTPTR_PARAMETERS);
5129 2088 : Node* limit_offset = IntPtrAdd(from_offset, byte_count);
5130 :
5131 : // Prepare the fast loop
5132 : MachineType type =
5133 2088 : from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
5134 : MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
5135 2088 : : MachineRepresentation::kWord16;
5136 2088 : int from_increment = 1 << ElementsKindToShiftSize(from_kind);
5137 2088 : int to_increment = 1 << ElementsKindToShiftSize(to_kind);
5138 :
5139 2088 : VARIABLE(current_to_offset, MachineType::PointerRepresentation(), to_offset);
5140 4176 : VariableList vars({¤t_to_offset}, zone());
5141 2088 : int to_index_constant = 0, from_index_constant = 0;
5142 4236 : bool index_same = (from_encoding == to_encoding) &&
5143 4056 : (from_index == to_index ||
5144 2264 : (ToInt32Constant(from_index, from_index_constant) &&
5145 360 : ToInt32Constant(to_index, to_index_constant) &&
5146 2212 : from_index_constant == to_index_constant));
5147 : BuildFastLoop(vars, from_offset, limit_offset,
5148 : [this, from_string, to_string, ¤t_to_offset, to_increment,
5149 2088 : type, rep, index_same](Node* offset) {
5150 2088 : Node* value = Load(type, from_string, offset);
5151 : StoreNoWriteBarrier(
5152 : rep, to_string,
5153 2088 : index_same ? offset : current_to_offset.value(), value);
5154 2088 : if (!index_same) {
5155 1968 : Increment(¤t_to_offset, to_increment);
5156 : }
5157 2088 : },
5158 4176 : from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
5159 2088 : }
5160 :
5161 8460 : Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
5162 : Node* offset,
5163 : ElementsKind from_kind,
5164 : ElementsKind to_kind,
5165 : Label* if_hole) {
5166 : CSA_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
5167 8460 : if (IsDoubleElementsKind(from_kind)) {
5168 : Node* value =
5169 2084 : LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
5170 2084 : if (!IsDoubleElementsKind(to_kind)) {
5171 1512 : value = AllocateHeapNumberWithValue(value);
5172 : }
5173 2084 : return value;
5174 :
5175 : } else {
5176 6376 : Node* value = Load(MachineType::AnyTagged(), array, offset);
5177 6376 : if (if_hole) {
5178 1288 : GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
5179 : }
5180 6376 : if (IsDoubleElementsKind(to_kind)) {
5181 1176 : if (IsSmiElementsKind(from_kind)) {
5182 1176 : value = SmiToFloat64(value);
5183 : } else {
5184 0 : value = LoadHeapNumberValue(value);
5185 : }
5186 : }
5187 6376 : return value;
5188 : }
5189 : }
5190 :
5191 2396 : Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
5192 : ParameterMode mode) {
5193 : CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
5194 2396 : Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
5195 2396 : Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
5196 : Node* padding =
5197 2396 : IntPtrOrSmiConstant(JSObject::kMinAddedElementsCapacity, mode);
5198 2396 : return IntPtrOrSmiAdd(new_capacity, padding, mode);
5199 : }
5200 :
5201 112 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
5202 : ElementsKind kind, Node* key,
5203 : Label* bailout) {
5204 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5205 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
5206 : CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
5207 112 : Node* capacity = LoadFixedArrayBaseLength(elements);
5208 :
5209 112 : ParameterMode mode = OptimalParameterMode();
5210 112 : capacity = TaggedToParameter(capacity, mode);
5211 112 : key = TaggedToParameter(key, mode);
5212 :
5213 : return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
5214 112 : bailout);
5215 : }
5216 :
5217 1120 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
5218 : ElementsKind kind, Node* key,
5219 : Node* capacity,
5220 : ParameterMode mode,
5221 : Label* bailout) {
5222 1120 : Comment("TryGrowElementsCapacity");
5223 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5224 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
5225 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
5226 : CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
5227 :
5228 : // If the gap growth is too big, fall back to the runtime.
5229 1120 : Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
5230 1120 : Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
5231 1120 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
5232 :
5233 : // Calculate the capacity of the new backing store.
5234 : Node* new_capacity = CalculateNewElementsCapacity(
5235 1120 : IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
5236 : return GrowElementsCapacity(object, elements, kind, kind, capacity,
5237 1120 : new_capacity, mode, bailout);
5238 : }
5239 :
5240 5188 : Node* CodeStubAssembler::GrowElementsCapacity(
5241 : Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
5242 : Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
5243 5188 : Comment("[ GrowElementsCapacity");
5244 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5245 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
5246 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
5247 : CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
5248 :
5249 : // If size of the allocation for the new capacity doesn't fit in a page
5250 : // that we can bump-pointer allocate from, fall back to the runtime.
5251 5188 : int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
5252 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(
5253 : new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
5254 5188 : bailout);
5255 :
5256 : // Allocate the new backing store.
5257 5188 : Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
5258 :
5259 : // Copy the elements from the old elements store to the new.
5260 : // The size-check above guarantees that the |new_elements| is allocated
5261 : // in new space so we can skip the write barrier.
5262 : CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
5263 5188 : new_capacity, SKIP_WRITE_BARRIER, mode);
5264 :
5265 5188 : StoreObjectField(object, JSObject::kElementsOffset, new_elements);
5266 5188 : Comment("] GrowElementsCapacity");
5267 5188 : return new_elements;
5268 : }
5269 :
5270 952 : void CodeStubAssembler::InitializeAllocationMemento(Node* base,
5271 : Node* base_allocation_size,
5272 : Node* allocation_site) {
5273 952 : Comment("[Initialize AllocationMemento");
5274 : TNode<Object> memento =
5275 952 : InnerAllocate(CAST(base), UncheckedCast<IntPtrT>(base_allocation_size));
5276 952 : StoreMapNoWriteBarrier(memento, RootIndex::kAllocationMementoMap);
5277 : StoreObjectFieldNoWriteBarrier(
5278 952 : memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
5279 952 : if (FLAG_allocation_site_pretenuring) {
5280 : TNode<Int32T> count = UncheckedCast<Int32T>(LoadObjectField(
5281 : allocation_site, AllocationSite::kPretenureCreateCountOffset,
5282 952 : MachineType::Int32()));
5283 :
5284 952 : TNode<Int32T> incremented_count = Int32Add(count, Int32Constant(1));
5285 : StoreObjectFieldNoWriteBarrier(
5286 : allocation_site, AllocationSite::kPretenureCreateCountOffset,
5287 952 : incremented_count, MachineRepresentation::kWord32);
5288 : }
5289 952 : Comment("]");
5290 952 : }
5291 :
5292 3696 : Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
5293 : Label* if_valueisnotnumber) {
5294 3696 : Label out(this);
5295 7392 : VARIABLE(var_result, MachineRepresentation::kFloat64);
5296 :
5297 : // Check if the {value} is a Smi or a HeapObject.
5298 7392 : Label if_valueissmi(this), if_valueisnotsmi(this);
5299 3696 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5300 :
5301 3696 : BIND(&if_valueissmi);
5302 : {
5303 : // Convert the Smi {value}.
5304 3696 : var_result.Bind(SmiToFloat64(value));
5305 3696 : Goto(&out);
5306 : }
5307 :
5308 3696 : BIND(&if_valueisnotsmi);
5309 : {
5310 : // Check if {value} is a HeapNumber.
5311 3696 : Label if_valueisheapnumber(this);
5312 3696 : Branch(IsHeapNumber(value), &if_valueisheapnumber, if_valueisnotnumber);
5313 :
5314 3696 : BIND(&if_valueisheapnumber);
5315 : {
5316 : // Load the floating point value.
5317 3696 : var_result.Bind(LoadHeapNumberValue(value));
5318 3696 : Goto(&out);
5319 3696 : }
5320 : }
5321 3696 : BIND(&out);
5322 7392 : return var_result.value();
5323 : }
5324 :
5325 1680 : Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
5326 : // We might need to loop once due to ToNumber conversion.
5327 1680 : VARIABLE(var_value, MachineRepresentation::kTagged);
5328 3360 : VARIABLE(var_result, MachineRepresentation::kFloat64);
5329 3360 : Label loop(this, &var_value), done_loop(this, &var_result);
5330 1680 : var_value.Bind(value);
5331 1680 : Goto(&loop);
5332 1680 : BIND(&loop);
5333 : {
5334 1680 : Label if_valueisnotnumber(this, Label::kDeferred);
5335 :
5336 : // Load the current {value}.
5337 1680 : value = var_value.value();
5338 :
5339 : // Convert {value} to Float64 if it is a number and convert it to a number
5340 : // otherwise.
5341 1680 : Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
5342 1680 : var_result.Bind(result);
5343 1680 : Goto(&done_loop);
5344 :
5345 1680 : BIND(&if_valueisnotnumber);
5346 : {
5347 : // Convert the {value} to a Number first.
5348 1680 : var_value.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, value));
5349 1680 : Goto(&loop);
5350 1680 : }
5351 : }
5352 1680 : BIND(&done_loop);
5353 3360 : return var_result.value();
5354 : }
5355 :
5356 1400 : Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
5357 1400 : VARIABLE(var_result, MachineRepresentation::kWord32);
5358 2800 : Label done(this);
5359 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumber>(context, value,
5360 1400 : &done, &var_result);
5361 1400 : BIND(&done);
5362 2800 : return var_result.value();
5363 : }
5364 :
5365 : // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5366 : // or find that it is a BigInt and jump to {if_bigint}.
5367 672 : void CodeStubAssembler::TaggedToWord32OrBigInt(Node* context, Node* value,
5368 : Label* if_number,
5369 : Variable* var_word32,
5370 : Label* if_bigint,
5371 : Variable* var_bigint) {
5372 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5373 672 : context, value, if_number, var_word32, if_bigint, var_bigint);
5374 672 : }
5375 :
5376 : // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5377 : // or find that it is a BigInt and jump to {if_bigint}. In either case,
5378 : // store the type feedback in {var_feedback}.
5379 3192 : void CodeStubAssembler::TaggedToWord32OrBigIntWithFeedback(
5380 : Node* context, Node* value, Label* if_number, Variable* var_word32,
5381 : Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
5382 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5383 : context, value, if_number, var_word32, if_bigint, var_bigint,
5384 3192 : var_feedback);
5385 3192 : }
5386 :
5387 : template <Object::Conversion conversion>
5388 5264 : void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
5389 : Node* context, Node* value, Label* if_number, Variable* var_word32,
5390 : Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
5391 : DCHECK(var_word32->rep() == MachineRepresentation::kWord32);
5392 : DCHECK(var_bigint == nullptr ||
5393 : var_bigint->rep() == MachineRepresentation::kTagged);
5394 : DCHECK(var_feedback == nullptr ||
5395 : var_feedback->rep() == MachineRepresentation::kTaggedSigned);
5396 :
5397 : // We might need to loop after conversion.
5398 5264 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5399 5264 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNone);
5400 5264 : Variable* loop_vars[] = {&var_value, var_feedback};
5401 : int num_vars =
5402 5264 : var_feedback != nullptr ? arraysize(loop_vars) : arraysize(loop_vars) - 1;
5403 10528 : Label loop(this, num_vars, loop_vars);
5404 5264 : Goto(&loop);
5405 5264 : BIND(&loop);
5406 : {
5407 5264 : value = var_value.value();
5408 10528 : Label not_smi(this), is_heap_number(this), is_oddball(this),
5409 10528 : is_bigint(this);
5410 5264 : GotoIf(TaggedIsNotSmi(value), ¬_smi);
5411 :
5412 : // {value} is a Smi.
5413 5264 : var_word32->Bind(SmiToInt32(value));
5414 5264 : CombineFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
5415 5264 : Goto(if_number);
5416 :
5417 5264 : BIND(¬_smi);
5418 5264 : Node* map = LoadMap(value);
5419 5264 : GotoIf(IsHeapNumberMap(map), &is_heap_number);
5420 5264 : Node* instance_type = LoadMapInstanceType(map);
5421 : if (conversion == Object::Conversion::kToNumeric) {
5422 3864 : GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
5423 : }
5424 :
5425 : // Not HeapNumber (or BigInt if conversion == kToNumeric).
5426 : {
5427 : if (var_feedback != nullptr) {
5428 : // We do not require an Or with earlier feedback here because once we
5429 : // convert the value to a Numeric, we cannot reach this path. We can
5430 : // only reach this path on the first pass when the feedback is kNone.
5431 : CSA_ASSERT(this, SmiEqual(CAST(var_feedback->value()),
5432 : SmiConstant(BinaryOperationFeedback::kNone)));
5433 : }
5434 5264 : GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
5435 : // Not an oddball either -> convert.
5436 : auto builtin = conversion == Object::Conversion::kToNumeric
5437 : ? Builtins::kNonNumberToNumeric
5438 5264 : : Builtins::kNonNumberToNumber;
5439 5264 : var_value.Bind(CallBuiltin(builtin, context, value));
5440 5264 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
5441 5264 : Goto(&loop);
5442 :
5443 5264 : BIND(&is_oddball);
5444 5264 : var_value.Bind(LoadObjectField(value, Oddball::kToNumberOffset));
5445 5264 : OverwriteFeedback(var_feedback,
5446 : BinaryOperationFeedback::kNumberOrOddball);
5447 5264 : Goto(&loop);
5448 : }
5449 :
5450 5264 : BIND(&is_heap_number);
5451 5264 : var_word32->Bind(TruncateHeapNumberValueToWord32(value));
5452 5264 : CombineFeedback(var_feedback, BinaryOperationFeedback::kNumber);
5453 5264 : Goto(if_number);
5454 :
5455 : if (conversion == Object::Conversion::kToNumeric) {
5456 3864 : BIND(&is_bigint);
5457 3864 : var_bigint->Bind(value);
5458 3864 : CombineFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
5459 3864 : Goto(if_bigint);
5460 5264 : }
5461 5264 : }
5462 5264 : }
5463 :
5464 5320 : Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
5465 5320 : Node* value = LoadHeapNumberValue(object);
5466 5320 : return TruncateFloat64ToWord32(value);
5467 : }
5468 :
5469 340 : void CodeStubAssembler::TryHeapNumberToSmi(TNode<HeapNumber> number,
5470 : TVariable<Smi>& var_result_smi,
5471 : Label* if_smi) {
5472 340 : TNode<Float64T> value = LoadHeapNumberValue(number);
5473 340 : TryFloat64ToSmi(value, var_result_smi, if_smi);
5474 340 : }
5475 :
5476 5168 : void CodeStubAssembler::TryFloat64ToSmi(TNode<Float64T> value,
5477 : TVariable<Smi>& var_result_smi,
5478 : Label* if_smi) {
5479 5168 : TNode<Int32T> value32 = RoundFloat64ToInt32(value);
5480 5168 : TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
5481 :
5482 10336 : Label if_int32(this), if_heap_number(this, Label::kDeferred);
5483 :
5484 5168 : GotoIfNot(Float64Equal(value, value64), &if_heap_number);
5485 5168 : GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32);
5486 10336 : Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
5487 20672 : Int32Constant(0)),
5488 5168 : &if_heap_number, &if_int32);
5489 :
5490 10336 : TVARIABLE(Number, var_result);
5491 5168 : BIND(&if_int32);
5492 : {
5493 5168 : if (SmiValuesAre32Bits()) {
5494 5168 : var_result_smi = SmiTag(ChangeInt32ToIntPtr(value32));
5495 : } else {
5496 : DCHECK(SmiValuesAre31Bits());
5497 0 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
5498 0 : TNode<BoolT> overflow = Projection<1>(pair);
5499 0 : GotoIf(overflow, &if_heap_number);
5500 0 : var_result_smi =
5501 0 : BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
5502 : }
5503 5168 : Goto(if_smi);
5504 : }
5505 10336 : BIND(&if_heap_number);
5506 5168 : }
5507 :
5508 4828 : TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(
5509 : SloppyTNode<Float64T> value) {
5510 9656 : Label if_smi(this), done(this);
5511 9656 : TVARIABLE(Smi, var_smi_result);
5512 9656 : TVARIABLE(Number, var_result);
5513 4828 : TryFloat64ToSmi(value, var_smi_result, &if_smi);
5514 :
5515 4828 : var_result = AllocateHeapNumberWithValue(value);
5516 4828 : Goto(&done);
5517 :
5518 4828 : BIND(&if_smi);
5519 : {
5520 4828 : var_result = var_smi_result.value();
5521 4828 : Goto(&done);
5522 : }
5523 4828 : BIND(&done);
5524 9656 : return var_result.value();
5525 : }
5526 :
5527 5768 : TNode<Number> CodeStubAssembler::ChangeInt32ToTagged(
5528 : SloppyTNode<Int32T> value) {
5529 5768 : if (SmiValuesAre32Bits()) {
5530 5768 : return SmiTag(ChangeInt32ToIntPtr(value));
5531 : }
5532 : DCHECK(SmiValuesAre31Bits());
5533 0 : TVARIABLE(Number, var_result);
5534 0 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
5535 0 : TNode<BoolT> overflow = Projection<1>(pair);
5536 0 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
5537 0 : if_join(this);
5538 0 : Branch(overflow, &if_overflow, &if_notoverflow);
5539 0 : BIND(&if_overflow);
5540 : {
5541 0 : TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
5542 0 : TNode<HeapNumber> result = AllocateHeapNumberWithValue(value64);
5543 0 : var_result = result;
5544 0 : Goto(&if_join);
5545 : }
5546 0 : BIND(&if_notoverflow);
5547 : {
5548 : TNode<IntPtrT> almost_tagged_value =
5549 0 : ChangeInt32ToIntPtr(Projection<0>(pair));
5550 0 : TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
5551 0 : var_result = result;
5552 0 : Goto(&if_join);
5553 : }
5554 0 : BIND(&if_join);
5555 0 : return var_result.value();
5556 : }
5557 :
5558 4592 : TNode<Number> CodeStubAssembler::ChangeUint32ToTagged(
5559 : SloppyTNode<Uint32T> value) {
5560 9184 : Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5561 9184 : if_join(this);
5562 9184 : TVARIABLE(Number, var_result);
5563 : // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5564 9184 : Branch(Uint32LessThan(Uint32Constant(Smi::kMaxValue), value), &if_overflow,
5565 4592 : &if_not_overflow);
5566 :
5567 4592 : BIND(&if_not_overflow);
5568 : {
5569 : // The {value} is definitely in valid Smi range.
5570 4592 : var_result = SmiTag(Signed(ChangeUint32ToWord(value)));
5571 : }
5572 4592 : Goto(&if_join);
5573 :
5574 4592 : BIND(&if_overflow);
5575 : {
5576 4592 : TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
5577 4592 : var_result = AllocateHeapNumberWithValue(float64_value);
5578 : }
5579 4592 : Goto(&if_join);
5580 :
5581 4592 : BIND(&if_join);
5582 9184 : return var_result.value();
5583 : }
5584 :
5585 560 : TNode<Number> CodeStubAssembler::ChangeUintPtrToTagged(TNode<UintPtrT> value) {
5586 1120 : Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5587 1120 : if_join(this);
5588 1120 : TVARIABLE(Number, var_result);
5589 : // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5590 1120 : Branch(UintPtrLessThan(UintPtrConstant(Smi::kMaxValue), value), &if_overflow,
5591 560 : &if_not_overflow);
5592 :
5593 560 : BIND(&if_not_overflow);
5594 : {
5595 : // The {value} is definitely in valid Smi range.
5596 560 : var_result = SmiTag(Signed(value));
5597 : }
5598 560 : Goto(&if_join);
5599 :
5600 560 : BIND(&if_overflow);
5601 : {
5602 560 : TNode<Float64T> float64_value = ChangeUintPtrToFloat64(value);
5603 560 : var_result = AllocateHeapNumberWithValue(float64_value);
5604 : }
5605 560 : Goto(&if_join);
5606 :
5607 560 : BIND(&if_join);
5608 1120 : return var_result.value();
5609 : }
5610 :
5611 1512 : TNode<String> CodeStubAssembler::ToThisString(Node* context, Node* value,
5612 : char const* method_name) {
5613 1512 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5614 :
5615 : // Check if the {value} is a Smi or a HeapObject.
5616 3024 : Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
5617 3024 : if_valueisstring(this);
5618 1512 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5619 1512 : BIND(&if_valueisnotsmi);
5620 : {
5621 : // Load the instance type of the {value}.
5622 1512 : Node* value_instance_type = LoadInstanceType(value);
5623 :
5624 : // Check if the {value} is already String.
5625 1512 : Label if_valueisnotstring(this, Label::kDeferred);
5626 3024 : Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
5627 1512 : &if_valueisnotstring);
5628 1512 : BIND(&if_valueisnotstring);
5629 : {
5630 : // Check if the {value} is null.
5631 1512 : Label if_valueisnullorundefined(this, Label::kDeferred);
5632 1512 : GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
5633 : // Convert the {value} to a String.
5634 1512 : var_value.Bind(CallBuiltin(Builtins::kToString, context, value));
5635 1512 : Goto(&if_valueisstring);
5636 :
5637 1512 : BIND(&if_valueisnullorundefined);
5638 : {
5639 : // The {value} is either null or undefined.
5640 : ThrowTypeError(context, MessageTemplate::kCalledOnNullOrUndefined,
5641 1512 : method_name);
5642 1512 : }
5643 1512 : }
5644 : }
5645 1512 : BIND(&if_valueissmi);
5646 : {
5647 : // The {value} is a Smi, convert it to a String.
5648 1512 : var_value.Bind(CallBuiltin(Builtins::kNumberToString, context, value));
5649 1512 : Goto(&if_valueisstring);
5650 : }
5651 1512 : BIND(&if_valueisstring);
5652 3024 : return CAST(var_value.value());
5653 : }
5654 :
5655 112 : TNode<Uint32T> CodeStubAssembler::ChangeNumberToUint32(TNode<Number> value) {
5656 112 : TVARIABLE(Uint32T, var_result);
5657 224 : Label if_smi(this), if_heapnumber(this, Label::kDeferred), done(this);
5658 112 : Branch(TaggedIsSmi(value), &if_smi, &if_heapnumber);
5659 112 : BIND(&if_smi);
5660 : {
5661 112 : var_result = Unsigned(SmiToInt32(CAST(value)));
5662 112 : Goto(&done);
5663 : }
5664 112 : BIND(&if_heapnumber);
5665 : {
5666 112 : var_result = ChangeFloat64ToUint32(LoadHeapNumberValue(CAST(value)));
5667 112 : Goto(&done);
5668 : }
5669 112 : BIND(&done);
5670 224 : return var_result.value();
5671 : }
5672 :
5673 11508 : TNode<Float64T> CodeStubAssembler::ChangeNumberToFloat64(
5674 : SloppyTNode<Number> value) {
5675 : // TODO(tebbi): Remove assert once argument is TNode instead of SloppyTNode.
5676 : CSA_SLOW_ASSERT(this, IsNumber(value));
5677 11508 : TVARIABLE(Float64T, result);
5678 23016 : Label smi(this);
5679 23016 : Label done(this, &result);
5680 11508 : GotoIf(TaggedIsSmi(value), &smi);
5681 11508 : result = LoadHeapNumberValue(CAST(value));
5682 11508 : Goto(&done);
5683 :
5684 11508 : BIND(&smi);
5685 : {
5686 11508 : result = SmiToFloat64(CAST(value));
5687 11508 : Goto(&done);
5688 : }
5689 :
5690 11508 : BIND(&done);
5691 23016 : return result.value();
5692 : }
5693 :
5694 728 : TNode<UintPtrT> CodeStubAssembler::ChangeNonnegativeNumberToUintPtr(
5695 : TNode<Number> value) {
5696 728 : TVARIABLE(UintPtrT, result);
5697 1456 : Label done(this, &result);
5698 : Branch(TaggedIsSmi(value),
5699 728 : [&] {
5700 728 : TNode<Smi> value_smi = CAST(value);
5701 : CSA_SLOW_ASSERT(this, SmiLessThan(SmiConstant(-1), value_smi));
5702 728 : result = UncheckedCast<UintPtrT>(SmiToIntPtr(value_smi));
5703 728 : Goto(&done);
5704 728 : },
5705 728 : [&] {
5706 728 : TNode<HeapNumber> value_hn = CAST(value);
5707 728 : result = ChangeFloat64ToUintPtr(LoadHeapNumberValue(value_hn));
5708 728 : Goto(&done);
5709 1456 : });
5710 :
5711 728 : BIND(&done);
5712 1456 : return result.value();
5713 : }
5714 :
5715 72464 : TNode<WordT> CodeStubAssembler::TimesSystemPointerSize(
5716 : SloppyTNode<WordT> value) {
5717 72464 : return WordShl(value, kSystemPointerSizeLog2);
5718 : }
5719 :
5720 8552 : TNode<WordT> CodeStubAssembler::TimesTaggedSize(SloppyTNode<WordT> value) {
5721 8552 : return WordShl(value, kTaggedSizeLog2);
5722 : }
5723 :
5724 56 : TNode<WordT> CodeStubAssembler::TimesDoubleSize(SloppyTNode<WordT> value) {
5725 56 : return WordShl(value, kDoubleSizeLog2);
5726 : }
5727 :
5728 504 : Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
5729 : PrimitiveType primitive_type,
5730 : char const* method_name) {
5731 : // We might need to loop once due to JSValue unboxing.
5732 504 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5733 1008 : Label loop(this, &var_value), done_loop(this),
5734 1008 : done_throw(this, Label::kDeferred);
5735 504 : Goto(&loop);
5736 504 : BIND(&loop);
5737 : {
5738 : // Load the current {value}.
5739 504 : value = var_value.value();
5740 :
5741 : // Check if the {value} is a Smi or a HeapObject.
5742 1008 : GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
5743 : ? &done_loop
5744 1008 : : &done_throw);
5745 :
5746 : // Load the map of the {value}.
5747 504 : Node* value_map = LoadMap(value);
5748 :
5749 : // Load the instance type of the {value}.
5750 504 : Node* value_instance_type = LoadMapInstanceType(value_map);
5751 :
5752 : // Check if {value} is a JSValue.
5753 1008 : Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
5754 1008 : Branch(InstanceTypeEqual(value_instance_type, JS_VALUE_TYPE),
5755 504 : &if_valueisvalue, &if_valueisnotvalue);
5756 :
5757 504 : BIND(&if_valueisvalue);
5758 : {
5759 : // Load the actual value from the {value}.
5760 504 : var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
5761 504 : Goto(&loop);
5762 : }
5763 :
5764 504 : BIND(&if_valueisnotvalue);
5765 : {
5766 504 : switch (primitive_type) {
5767 : case PrimitiveType::kBoolean:
5768 112 : GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
5769 112 : break;
5770 : case PrimitiveType::kNumber:
5771 56 : GotoIf(WordEqual(value_map, HeapNumberMapConstant()), &done_loop);
5772 56 : break;
5773 : case PrimitiveType::kString:
5774 112 : GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
5775 112 : break;
5776 : case PrimitiveType::kSymbol:
5777 224 : GotoIf(WordEqual(value_map, SymbolMapConstant()), &done_loop);
5778 224 : break;
5779 : }
5780 504 : Goto(&done_throw);
5781 504 : }
5782 : }
5783 :
5784 504 : BIND(&done_throw);
5785 : {
5786 504 : const char* primitive_name = nullptr;
5787 504 : switch (primitive_type) {
5788 : case PrimitiveType::kBoolean:
5789 112 : primitive_name = "Boolean";
5790 112 : break;
5791 : case PrimitiveType::kNumber:
5792 56 : primitive_name = "Number";
5793 56 : break;
5794 : case PrimitiveType::kString:
5795 112 : primitive_name = "String";
5796 112 : break;
5797 : case PrimitiveType::kSymbol:
5798 224 : primitive_name = "Symbol";
5799 224 : break;
5800 : }
5801 504 : CHECK_NOT_NULL(primitive_name);
5802 :
5803 : // The {value} is not a compatible receiver for this method.
5804 : ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
5805 504 : primitive_name);
5806 : }
5807 :
5808 504 : BIND(&done_loop);
5809 1008 : return var_value.value();
5810 : }
5811 :
5812 3024 : Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
5813 : InstanceType instance_type,
5814 : char const* method_name) {
5815 6048 : Label out(this), throw_exception(this, Label::kDeferred);
5816 6048 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
5817 :
5818 3024 : GotoIf(TaggedIsSmi(value), &throw_exception);
5819 :
5820 : // Load the instance type of the {value}.
5821 3024 : var_value_map.Bind(LoadMap(value));
5822 3024 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5823 :
5824 6048 : Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
5825 3024 : &throw_exception);
5826 :
5827 : // The {value} is not a compatible receiver for this method.
5828 3024 : BIND(&throw_exception);
5829 : ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver,
5830 3024 : StringConstant(method_name), value);
5831 :
5832 3024 : BIND(&out);
5833 6048 : return var_value_map.value();
5834 : }
5835 :
5836 840 : Node* CodeStubAssembler::ThrowIfNotJSReceiver(Node* context, Node* value,
5837 : MessageTemplate msg_template,
5838 : const char* method_name) {
5839 1680 : Label out(this), throw_exception(this, Label::kDeferred);
5840 1680 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
5841 :
5842 840 : GotoIf(TaggedIsSmi(value), &throw_exception);
5843 :
5844 : // Load the instance type of the {value}.
5845 840 : var_value_map.Bind(LoadMap(value));
5846 840 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5847 :
5848 840 : Branch(IsJSReceiverInstanceType(value_instance_type), &out, &throw_exception);
5849 :
5850 : // The {value} is not a compatible receiver for this method.
5851 840 : BIND(&throw_exception);
5852 840 : ThrowTypeError(context, msg_template, method_name);
5853 :
5854 840 : BIND(&out);
5855 1680 : return var_value_map.value();
5856 : }
5857 :
5858 3808 : void CodeStubAssembler::ThrowRangeError(Node* context, MessageTemplate message,
5859 : Node* arg0, Node* arg1, Node* arg2) {
5860 3808 : Node* template_index = SmiConstant(static_cast<int>(message));
5861 3808 : if (arg0 == nullptr) {
5862 3360 : CallRuntime(Runtime::kThrowRangeError, context, template_index);
5863 448 : } else if (arg1 == nullptr) {
5864 448 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0);
5865 0 : } else if (arg2 == nullptr) {
5866 0 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1);
5867 : } else {
5868 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1,
5869 0 : arg2);
5870 : }
5871 3808 : Unreachable();
5872 3808 : }
5873 :
5874 14348 : void CodeStubAssembler::ThrowTypeError(Node* context, MessageTemplate message,
5875 : char const* arg0, char const* arg1) {
5876 14348 : Node* arg0_node = nullptr;
5877 14348 : if (arg0) arg0_node = StringConstant(arg0);
5878 14348 : Node* arg1_node = nullptr;
5879 14348 : if (arg1) arg1_node = StringConstant(arg1);
5880 14348 : ThrowTypeError(context, message, arg0_node, arg1_node);
5881 14348 : }
5882 :
5883 26332 : void CodeStubAssembler::ThrowTypeError(Node* context, MessageTemplate message,
5884 : Node* arg0, Node* arg1, Node* arg2) {
5885 26332 : Node* template_index = SmiConstant(static_cast<int>(message));
5886 26332 : if (arg0 == nullptr) {
5887 7460 : CallRuntime(Runtime::kThrowTypeError, context, template_index);
5888 18872 : } else if (arg1 == nullptr) {
5889 13720 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0);
5890 5152 : } else if (arg2 == nullptr) {
5891 4648 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1);
5892 : } else {
5893 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1,
5894 504 : arg2);
5895 : }
5896 26332 : Unreachable();
5897 26332 : }
5898 :
5899 101636 : TNode<BoolT> CodeStubAssembler::InstanceTypeEqual(
5900 : SloppyTNode<Int32T> instance_type, int type) {
5901 101636 : return Word32Equal(instance_type, Int32Constant(type));
5902 : }
5903 :
5904 1568 : TNode<BoolT> CodeStubAssembler::IsDictionaryMap(SloppyTNode<Map> map) {
5905 : CSA_SLOW_ASSERT(this, IsMap(map));
5906 1568 : Node* bit_field3 = LoadMapBitField3(map);
5907 1568 : return IsSetWord32<Map::IsDictionaryMapBit>(bit_field3);
5908 : }
5909 :
5910 168 : TNode<BoolT> CodeStubAssembler::IsExtensibleMap(SloppyTNode<Map> map) {
5911 : CSA_ASSERT(this, IsMap(map));
5912 168 : return IsSetWord32<Map::IsExtensibleBit>(LoadMapBitField2(map));
5913 : }
5914 :
5915 0 : TNode<BoolT> CodeStubAssembler::IsExtensibleNonPrototypeMap(TNode<Map> map) {
5916 0 : int kMask = Map::IsExtensibleBit::kMask | Map::IsPrototypeMapBit::kMask;
5917 0 : int kExpected = Map::IsExtensibleBit::kMask;
5918 0 : return Word32Equal(Word32And(LoadMapBitField2(map), Int32Constant(kMask)),
5919 0 : Int32Constant(kExpected));
5920 : }
5921 :
5922 8516 : TNode<BoolT> CodeStubAssembler::IsCallableMap(SloppyTNode<Map> map) {
5923 : CSA_ASSERT(this, IsMap(map));
5924 8516 : return IsSetWord32<Map::IsCallableBit>(LoadMapBitField(map));
5925 : }
5926 :
5927 672 : TNode<BoolT> CodeStubAssembler::IsDeprecatedMap(SloppyTNode<Map> map) {
5928 : CSA_ASSERT(this, IsMap(map));
5929 672 : return IsSetWord32<Map::IsDeprecatedBit>(LoadMapBitField3(map));
5930 : }
5931 :
5932 8400 : TNode<BoolT> CodeStubAssembler::IsUndetectableMap(SloppyTNode<Map> map) {
5933 : CSA_ASSERT(this, IsMap(map));
5934 8400 : return IsSetWord32<Map::IsUndetectableBit>(LoadMapBitField(map));
5935 : }
5936 :
5937 4592 : TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
5938 4592 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5939 4592 : Node* cell = LoadRoot(RootIndex::kNoElementsProtector);
5940 4592 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5941 4592 : return WordEqual(cell_value, invalid);
5942 : }
5943 :
5944 448 : TNode<BoolT> CodeStubAssembler::IsArrayIteratorProtectorCellInvalid() {
5945 448 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5946 448 : Node* cell = LoadRoot(RootIndex::kArrayIteratorProtector);
5947 448 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5948 448 : return WordEqual(cell_value, invalid);
5949 : }
5950 :
5951 168 : TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
5952 168 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5953 168 : Node* cell = LoadRoot(RootIndex::kPromiseResolveProtector);
5954 168 : Node* cell_value = LoadObjectField(cell, Cell::kValueOffset);
5955 168 : return WordEqual(cell_value, invalid);
5956 : }
5957 :
5958 392 : TNode<BoolT> CodeStubAssembler::IsPromiseThenProtectorCellInvalid() {
5959 392 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5960 392 : Node* cell = LoadRoot(RootIndex::kPromiseThenProtector);
5961 392 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5962 392 : return WordEqual(cell_value, invalid);
5963 : }
5964 :
5965 280 : TNode<BoolT> CodeStubAssembler::IsArraySpeciesProtectorCellInvalid() {
5966 280 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5967 280 : Node* cell = LoadRoot(RootIndex::kArraySpeciesProtector);
5968 280 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5969 280 : return WordEqual(cell_value, invalid);
5970 : }
5971 :
5972 224 : TNode<BoolT> CodeStubAssembler::IsTypedArraySpeciesProtectorCellInvalid() {
5973 224 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5974 224 : Node* cell = LoadRoot(RootIndex::kTypedArraySpeciesProtector);
5975 224 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5976 224 : return WordEqual(cell_value, invalid);
5977 : }
5978 :
5979 952 : TNode<BoolT> CodeStubAssembler::IsRegExpSpeciesProtectorCellInvalid() {
5980 952 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5981 952 : Node* cell = LoadRoot(RootIndex::kRegExpSpeciesProtector);
5982 952 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5983 952 : return WordEqual(cell_value, invalid);
5984 : }
5985 :
5986 616 : TNode<BoolT> CodeStubAssembler::IsPromiseSpeciesProtectorCellInvalid() {
5987 616 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5988 616 : Node* cell = LoadRoot(RootIndex::kPromiseSpeciesProtector);
5989 616 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5990 616 : return WordEqual(cell_value, invalid);
5991 : }
5992 :
5993 3304 : TNode<BoolT> CodeStubAssembler::IsPrototypeInitialArrayPrototype(
5994 : SloppyTNode<Context> context, SloppyTNode<Map> map) {
5995 3304 : Node* const native_context = LoadNativeContext(context);
5996 : Node* const initial_array_prototype = LoadContextElement(
5997 3304 : native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
5998 3304 : Node* proto = LoadMapPrototype(map);
5999 3304 : return WordEqual(proto, initial_array_prototype);
6000 : }
6001 :
6002 224 : TNode<BoolT> CodeStubAssembler::IsPrototypeTypedArrayPrototype(
6003 : SloppyTNode<Context> context, SloppyTNode<Map> map) {
6004 224 : TNode<Context> const native_context = LoadNativeContext(context);
6005 : TNode<Object> const typed_array_prototype =
6006 224 : LoadContextElement(native_context, Context::TYPED_ARRAY_PROTOTYPE_INDEX);
6007 224 : TNode<HeapObject> proto = LoadMapPrototype(map);
6008 : TNode<HeapObject> proto_of_proto = Select<HeapObject>(
6009 672 : IsJSObject(proto), [=] { return LoadMapPrototype(LoadMap(proto)); },
6010 896 : [=] { return NullConstant(); });
6011 224 : return WordEqual(proto_of_proto, typed_array_prototype);
6012 : }
6013 :
6014 896 : TNode<BoolT> CodeStubAssembler::IsFastAliasedArgumentsMap(
6015 : TNode<Context> context, TNode<Map> map) {
6016 896 : TNode<Context> const native_context = LoadNativeContext(context);
6017 : TNode<Object> const arguments_map = LoadContextElement(
6018 896 : native_context, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
6019 896 : return WordEqual(arguments_map, map);
6020 : }
6021 :
6022 840 : TNode<BoolT> CodeStubAssembler::IsSlowAliasedArgumentsMap(
6023 : TNode<Context> context, TNode<Map> map) {
6024 840 : TNode<Context> const native_context = LoadNativeContext(context);
6025 : TNode<Object> const arguments_map = LoadContextElement(
6026 840 : native_context, Context::SLOW_ALIASED_ARGUMENTS_MAP_INDEX);
6027 840 : return WordEqual(arguments_map, map);
6028 : }
6029 :
6030 896 : TNode<BoolT> CodeStubAssembler::IsSloppyArgumentsMap(TNode<Context> context,
6031 : TNode<Map> map) {
6032 896 : TNode<Context> const native_context = LoadNativeContext(context);
6033 : TNode<Object> const arguments_map =
6034 896 : LoadContextElement(native_context, Context::SLOPPY_ARGUMENTS_MAP_INDEX);
6035 896 : return WordEqual(arguments_map, map);
6036 : }
6037 :
6038 896 : TNode<BoolT> CodeStubAssembler::IsStrictArgumentsMap(TNode<Context> context,
6039 : TNode<Map> map) {
6040 896 : TNode<Context> const native_context = LoadNativeContext(context);
6041 : TNode<Object> const arguments_map =
6042 896 : LoadContextElement(native_context, Context::STRICT_ARGUMENTS_MAP_INDEX);
6043 896 : return WordEqual(arguments_map, map);
6044 : }
6045 :
6046 168 : TNode<BoolT> CodeStubAssembler::TaggedIsCallable(TNode<Object> object) {
6047 : return Select<BoolT>(
6048 504 : TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
6049 168 : [=] {
6050 336 : return IsCallableMap(LoadMap(UncheckedCast<HeapObject>(object)));
6051 840 : });
6052 : }
6053 :
6054 3024 : TNode<BoolT> CodeStubAssembler::IsCallable(SloppyTNode<HeapObject> object) {
6055 3024 : return IsCallableMap(LoadMap(object));
6056 : }
6057 :
6058 0 : TNode<BoolT> CodeStubAssembler::IsCell(SloppyTNode<HeapObject> object) {
6059 0 : return WordEqual(LoadMap(object), LoadRoot(RootIndex::kCellMap));
6060 : }
6061 :
6062 616 : TNode<BoolT> CodeStubAssembler::IsCode(SloppyTNode<HeapObject> object) {
6063 616 : return HasInstanceType(object, CODE_TYPE);
6064 : }
6065 :
6066 1344 : TNode<BoolT> CodeStubAssembler::IsConstructorMap(SloppyTNode<Map> map) {
6067 : CSA_ASSERT(this, IsMap(map));
6068 1344 : return IsSetWord32<Map::IsConstructorBit>(LoadMapBitField(map));
6069 : }
6070 :
6071 504 : TNode<BoolT> CodeStubAssembler::IsConstructor(SloppyTNode<HeapObject> object) {
6072 504 : return IsConstructorMap(LoadMap(object));
6073 : }
6074 :
6075 112 : TNode<BoolT> CodeStubAssembler::IsFunctionWithPrototypeSlotMap(
6076 : SloppyTNode<Map> map) {
6077 : CSA_ASSERT(this, IsMap(map));
6078 112 : return IsSetWord32<Map::HasPrototypeSlotBit>(LoadMapBitField(map));
6079 : }
6080 :
6081 3092 : TNode<BoolT> CodeStubAssembler::IsSpecialReceiverInstanceType(
6082 : TNode<Int32T> instance_type) {
6083 : STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
6084 : return Int32LessThanOrEqual(instance_type,
6085 3092 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
6086 : }
6087 :
6088 1568 : TNode<BoolT> CodeStubAssembler::IsCustomElementsReceiverInstanceType(
6089 : TNode<Int32T> instance_type) {
6090 : return Int32LessThanOrEqual(instance_type,
6091 1568 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER));
6092 : }
6093 :
6094 21240 : TNode<BoolT> CodeStubAssembler::IsStringInstanceType(
6095 : SloppyTNode<Int32T> instance_type) {
6096 : STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
6097 21240 : return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
6098 : }
6099 :
6100 5152 : TNode<BoolT> CodeStubAssembler::IsOneByteStringInstanceType(
6101 : SloppyTNode<Int32T> instance_type) {
6102 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6103 : return Word32Equal(
6104 10304 : Word32And(instance_type, Int32Constant(kStringEncodingMask)),
6105 15456 : Int32Constant(kOneByteStringTag));
6106 : }
6107 :
6108 448 : TNode<BoolT> CodeStubAssembler::HasOnlyOneByteChars(
6109 : TNode<Int32T> instance_type) {
6110 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6111 448 : return IsSetWord32(instance_type, kStringEncodingMask | kOneByteDataHintMask);
6112 : }
6113 :
6114 4816 : TNode<BoolT> CodeStubAssembler::IsSequentialStringInstanceType(
6115 : SloppyTNode<Int32T> instance_type) {
6116 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6117 : return Word32Equal(
6118 9632 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6119 14448 : Int32Constant(kSeqStringTag));
6120 : }
6121 :
6122 56 : TNode<BoolT> CodeStubAssembler::IsConsStringInstanceType(
6123 : SloppyTNode<Int32T> instance_type) {
6124 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6125 : return Word32Equal(
6126 112 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6127 168 : Int32Constant(kConsStringTag));
6128 : }
6129 :
6130 0 : TNode<BoolT> CodeStubAssembler::IsIndirectStringInstanceType(
6131 : SloppyTNode<Int32T> instance_type) {
6132 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6133 : STATIC_ASSERT(kIsIndirectStringMask == 0x1);
6134 : STATIC_ASSERT(kIsIndirectStringTag == 0x1);
6135 : return UncheckedCast<BoolT>(
6136 0 : Word32And(instance_type, Int32Constant(kIsIndirectStringMask)));
6137 : }
6138 :
6139 0 : TNode<BoolT> CodeStubAssembler::IsExternalStringInstanceType(
6140 : SloppyTNode<Int32T> instance_type) {
6141 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6142 : return Word32Equal(
6143 0 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6144 0 : Int32Constant(kExternalStringTag));
6145 : }
6146 :
6147 4816 : TNode<BoolT> CodeStubAssembler::IsUncachedExternalStringInstanceType(
6148 : SloppyTNode<Int32T> instance_type) {
6149 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6150 : STATIC_ASSERT(kUncachedExternalStringTag != 0);
6151 4816 : return IsSetWord32(instance_type, kUncachedExternalStringMask);
6152 : }
6153 :
6154 14900 : TNode<BoolT> CodeStubAssembler::IsJSReceiverInstanceType(
6155 : SloppyTNode<Int32T> instance_type) {
6156 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6157 : return Int32GreaterThanOrEqual(instance_type,
6158 14900 : Int32Constant(FIRST_JS_RECEIVER_TYPE));
6159 : }
6160 :
6161 7000 : TNode<BoolT> CodeStubAssembler::IsJSReceiverMap(SloppyTNode<Map> map) {
6162 7000 : return IsJSReceiverInstanceType(LoadMapInstanceType(map));
6163 : }
6164 :
6165 6104 : TNode<BoolT> CodeStubAssembler::IsJSReceiver(SloppyTNode<HeapObject> object) {
6166 6104 : return IsJSReceiverMap(LoadMap(object));
6167 : }
6168 :
6169 0 : TNode<BoolT> CodeStubAssembler::IsNullOrJSReceiver(
6170 : SloppyTNode<HeapObject> object) {
6171 0 : return UncheckedCast<BoolT>(Word32Or(IsJSReceiver(object), IsNull(object)));
6172 : }
6173 :
6174 4256 : TNode<BoolT> CodeStubAssembler::IsNullOrUndefined(SloppyTNode<Object> value) {
6175 4256 : return UncheckedCast<BoolT>(Word32Or(IsUndefined(value), IsNull(value)));
6176 : }
6177 :
6178 0 : TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyInstanceType(
6179 : SloppyTNode<Int32T> instance_type) {
6180 0 : return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
6181 : }
6182 :
6183 448 : TNode<BoolT> CodeStubAssembler::IsJSObjectInstanceType(
6184 : SloppyTNode<Int32T> instance_type) {
6185 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
6186 : return Int32GreaterThanOrEqual(instance_type,
6187 448 : Int32Constant(FIRST_JS_OBJECT_TYPE));
6188 : }
6189 :
6190 336 : TNode<BoolT> CodeStubAssembler::IsJSObjectMap(SloppyTNode<Map> map) {
6191 : CSA_ASSERT(this, IsMap(map));
6192 336 : return IsJSObjectInstanceType(LoadMapInstanceType(map));
6193 : }
6194 :
6195 224 : TNode<BoolT> CodeStubAssembler::IsJSObject(SloppyTNode<HeapObject> object) {
6196 224 : return IsJSObjectMap(LoadMap(object));
6197 : }
6198 :
6199 840 : TNode<BoolT> CodeStubAssembler::IsJSPromiseMap(SloppyTNode<Map> map) {
6200 : CSA_ASSERT(this, IsMap(map));
6201 840 : return InstanceTypeEqual(LoadMapInstanceType(map), JS_PROMISE_TYPE);
6202 : }
6203 :
6204 0 : TNode<BoolT> CodeStubAssembler::IsJSPromise(SloppyTNode<HeapObject> object) {
6205 0 : return IsJSPromiseMap(LoadMap(object));
6206 : }
6207 :
6208 336 : TNode<BoolT> CodeStubAssembler::IsJSProxy(SloppyTNode<HeapObject> object) {
6209 336 : return HasInstanceType(object, JS_PROXY_TYPE);
6210 : }
6211 :
6212 616 : TNode<BoolT> CodeStubAssembler::IsJSGlobalProxy(
6213 : SloppyTNode<HeapObject> object) {
6214 616 : return HasInstanceType(object, JS_GLOBAL_PROXY_TYPE);
6215 : }
6216 :
6217 4484 : TNode<BoolT> CodeStubAssembler::IsMap(SloppyTNode<HeapObject> map) {
6218 4484 : return IsMetaMap(LoadMap(map));
6219 : }
6220 :
6221 3812 : TNode<BoolT> CodeStubAssembler::IsJSValueInstanceType(
6222 : SloppyTNode<Int32T> instance_type) {
6223 3812 : return InstanceTypeEqual(instance_type, JS_VALUE_TYPE);
6224 : }
6225 :
6226 0 : TNode<BoolT> CodeStubAssembler::IsJSValue(SloppyTNode<HeapObject> object) {
6227 0 : return IsJSValueMap(LoadMap(object));
6228 : }
6229 :
6230 0 : TNode<BoolT> CodeStubAssembler::IsJSValueMap(SloppyTNode<Map> map) {
6231 0 : return IsJSValueInstanceType(LoadMapInstanceType(map));
6232 : }
6233 :
6234 14848 : TNode<BoolT> CodeStubAssembler::IsJSArrayInstanceType(
6235 : SloppyTNode<Int32T> instance_type) {
6236 14848 : return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
6237 : }
6238 :
6239 7508 : TNode<BoolT> CodeStubAssembler::IsJSArray(SloppyTNode<HeapObject> object) {
6240 7508 : return IsJSArrayMap(LoadMap(object));
6241 : }
6242 :
6243 11036 : TNode<BoolT> CodeStubAssembler::IsJSArrayMap(SloppyTNode<Map> map) {
6244 11036 : return IsJSArrayInstanceType(LoadMapInstanceType(map));
6245 : }
6246 :
6247 0 : TNode<BoolT> CodeStubAssembler::IsJSArrayIterator(
6248 : SloppyTNode<HeapObject> object) {
6249 0 : return HasInstanceType(object, JS_ARRAY_ITERATOR_TYPE);
6250 : }
6251 :
6252 0 : TNode<BoolT> CodeStubAssembler::IsJSAsyncGeneratorObject(
6253 : SloppyTNode<HeapObject> object) {
6254 0 : return HasInstanceType(object, JS_ASYNC_GENERATOR_OBJECT_TYPE);
6255 : }
6256 :
6257 4 : TNode<BoolT> CodeStubAssembler::IsContext(SloppyTNode<HeapObject> object) {
6258 4 : Node* instance_type = LoadInstanceType(object);
6259 : return UncheckedCast<BoolT>(Word32And(
6260 8 : Int32GreaterThanOrEqual(instance_type, Int32Constant(FIRST_CONTEXT_TYPE)),
6261 12 : Int32LessThanOrEqual(instance_type, Int32Constant(LAST_CONTEXT_TYPE))));
6262 : }
6263 :
6264 0 : TNode<BoolT> CodeStubAssembler::IsFixedArray(SloppyTNode<HeapObject> object) {
6265 0 : return HasInstanceType(object, FIXED_ARRAY_TYPE);
6266 : }
6267 :
6268 0 : TNode<BoolT> CodeStubAssembler::IsFixedArraySubclass(
6269 : SloppyTNode<HeapObject> object) {
6270 0 : Node* instance_type = LoadInstanceType(object);
6271 : return UncheckedCast<BoolT>(
6272 : Word32And(Int32GreaterThanOrEqual(instance_type,
6273 0 : Int32Constant(FIRST_FIXED_ARRAY_TYPE)),
6274 : Int32LessThanOrEqual(instance_type,
6275 0 : Int32Constant(LAST_FIXED_ARRAY_TYPE))));
6276 : }
6277 :
6278 0 : TNode<BoolT> CodeStubAssembler::IsNotWeakFixedArraySubclass(
6279 : SloppyTNode<HeapObject> object) {
6280 0 : Node* instance_type = LoadInstanceType(object);
6281 : return UncheckedCast<BoolT>(Word32Or(
6282 0 : Int32LessThan(instance_type, Int32Constant(FIRST_WEAK_FIXED_ARRAY_TYPE)),
6283 : Int32GreaterThan(instance_type,
6284 0 : Int32Constant(LAST_WEAK_FIXED_ARRAY_TYPE))));
6285 : }
6286 :
6287 392 : TNode<BoolT> CodeStubAssembler::IsPromiseCapability(
6288 : SloppyTNode<HeapObject> object) {
6289 392 : return HasInstanceType(object, PROMISE_CAPABILITY_TYPE);
6290 : }
6291 :
6292 0 : TNode<BoolT> CodeStubAssembler::IsPropertyArray(
6293 : SloppyTNode<HeapObject> object) {
6294 0 : return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
6295 : }
6296 :
6297 : // This complicated check is due to elements oddities. If a smi array is empty
6298 : // after Array.p.shift, it is replaced by the empty array constant. If it is
6299 : // later filled with a double element, we try to grow it but pass in a double
6300 : // elements kind. Usually this would cause a size mismatch (since the source
6301 : // fixed array has HOLEY_ELEMENTS and destination has
6302 : // HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
6303 : // source array is empty.
6304 : // TODO(jgruber): It might we worth creating an empty_double_array constant to
6305 : // simplify this case.
6306 0 : TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKindOrEmpty(
6307 : SloppyTNode<HeapObject> object, ElementsKind kind) {
6308 0 : Label out(this);
6309 0 : TVARIABLE(BoolT, var_result, Int32TrueConstant());
6310 :
6311 0 : GotoIf(IsFixedArrayWithKind(object, kind), &out);
6312 :
6313 0 : TNode<Smi> const length = LoadFixedArrayBaseLength(CAST(object));
6314 0 : GotoIf(SmiEqual(length, SmiConstant(0)), &out);
6315 :
6316 0 : var_result = Int32FalseConstant();
6317 0 : Goto(&out);
6318 :
6319 0 : BIND(&out);
6320 0 : return var_result.value();
6321 : }
6322 :
6323 0 : TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKind(
6324 : SloppyTNode<HeapObject> object, ElementsKind kind) {
6325 0 : if (IsDoubleElementsKind(kind)) {
6326 0 : return IsFixedDoubleArray(object);
6327 : } else {
6328 : DCHECK(IsSmiOrObjectElementsKind(kind));
6329 0 : return IsFixedArraySubclass(object);
6330 : }
6331 : }
6332 :
6333 168 : TNode<BoolT> CodeStubAssembler::IsBoolean(SloppyTNode<HeapObject> object) {
6334 168 : return IsBooleanMap(LoadMap(object));
6335 : }
6336 :
6337 0 : TNode<BoolT> CodeStubAssembler::IsPropertyCell(SloppyTNode<HeapObject> object) {
6338 0 : return IsPropertyCellMap(LoadMap(object));
6339 : }
6340 :
6341 336 : TNode<BoolT> CodeStubAssembler::IsAccessorInfo(SloppyTNode<HeapObject> object) {
6342 336 : return IsAccessorInfoMap(LoadMap(object));
6343 : }
6344 :
6345 3980 : TNode<BoolT> CodeStubAssembler::IsAccessorPair(SloppyTNode<HeapObject> object) {
6346 3980 : return IsAccessorPairMap(LoadMap(object));
6347 : }
6348 :
6349 168 : TNode<BoolT> CodeStubAssembler::IsAllocationSite(
6350 : SloppyTNode<HeapObject> object) {
6351 168 : return IsAllocationSiteInstanceType(LoadInstanceType(object));
6352 : }
6353 :
6354 0 : TNode<BoolT> CodeStubAssembler::IsAnyHeapNumber(
6355 : SloppyTNode<HeapObject> object) {
6356 : return UncheckedCast<BoolT>(
6357 0 : Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object)));
6358 : }
6359 :
6360 30716 : TNode<BoolT> CodeStubAssembler::IsHeapNumber(SloppyTNode<HeapObject> object) {
6361 30716 : return IsHeapNumberMap(LoadMap(object));
6362 : }
6363 :
6364 280 : TNode<BoolT> CodeStubAssembler::IsHeapNumberInstanceType(
6365 : SloppyTNode<Int32T> instance_type) {
6366 280 : return InstanceTypeEqual(instance_type, HEAP_NUMBER_TYPE);
6367 : }
6368 :
6369 0 : TNode<BoolT> CodeStubAssembler::IsOddball(SloppyTNode<HeapObject> object) {
6370 0 : return IsOddballInstanceType(LoadInstanceType(object));
6371 : }
6372 :
6373 896 : TNode<BoolT> CodeStubAssembler::IsOddballInstanceType(
6374 : SloppyTNode<Int32T> instance_type) {
6375 896 : return InstanceTypeEqual(instance_type, ODDBALL_TYPE);
6376 : }
6377 :
6378 56 : TNode<BoolT> CodeStubAssembler::IsMutableHeapNumber(
6379 : SloppyTNode<HeapObject> object) {
6380 56 : return IsMutableHeapNumberMap(LoadMap(object));
6381 : }
6382 :
6383 0 : TNode<BoolT> CodeStubAssembler::IsFeedbackCell(SloppyTNode<HeapObject> object) {
6384 0 : return HasInstanceType(object, FEEDBACK_CELL_TYPE);
6385 : }
6386 :
6387 0 : TNode<BoolT> CodeStubAssembler::IsFeedbackVector(
6388 : SloppyTNode<HeapObject> object) {
6389 0 : return IsFeedbackVectorMap(LoadMap(object));
6390 : }
6391 :
6392 56 : TNode<BoolT> CodeStubAssembler::IsName(SloppyTNode<HeapObject> object) {
6393 56 : return IsNameInstanceType(LoadInstanceType(object));
6394 : }
6395 :
6396 112 : TNode<BoolT> CodeStubAssembler::IsNameInstanceType(
6397 : SloppyTNode<Int32T> instance_type) {
6398 112 : return Int32LessThanOrEqual(instance_type, Int32Constant(LAST_NAME_TYPE));
6399 : }
6400 :
6401 9580 : TNode<BoolT> CodeStubAssembler::IsString(SloppyTNode<HeapObject> object) {
6402 9580 : return IsStringInstanceType(LoadInstanceType(object));
6403 : }
6404 :
6405 728 : TNode<BoolT> CodeStubAssembler::IsSymbolInstanceType(
6406 : SloppyTNode<Int32T> instance_type) {
6407 728 : return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
6408 : }
6409 :
6410 2976 : TNode<BoolT> CodeStubAssembler::IsSymbol(SloppyTNode<HeapObject> object) {
6411 2976 : return IsSymbolMap(LoadMap(object));
6412 : }
6413 :
6414 31980 : TNode<BoolT> CodeStubAssembler::IsBigIntInstanceType(
6415 : SloppyTNode<Int32T> instance_type) {
6416 31980 : return InstanceTypeEqual(instance_type, BIGINT_TYPE);
6417 : }
6418 :
6419 13048 : TNode<BoolT> CodeStubAssembler::IsBigInt(SloppyTNode<HeapObject> object) {
6420 13048 : return IsBigIntInstanceType(LoadInstanceType(object));
6421 : }
6422 :
6423 448 : TNode<BoolT> CodeStubAssembler::IsPrimitiveInstanceType(
6424 : SloppyTNode<Int32T> instance_type) {
6425 : return Int32LessThanOrEqual(instance_type,
6426 448 : Int32Constant(LAST_PRIMITIVE_TYPE));
6427 : }
6428 :
6429 2692 : TNode<BoolT> CodeStubAssembler::IsPrivateSymbol(
6430 : SloppyTNode<HeapObject> object) {
6431 : return Select<BoolT>(IsSymbol(object),
6432 2692 : [=] {
6433 2692 : TNode<Symbol> symbol = CAST(object);
6434 : TNode<Uint32T> flags = LoadObjectField<Uint32T>(
6435 2692 : symbol, Symbol::kFlagsOffset);
6436 2692 : return IsSetWord32<Symbol::IsPrivateBit>(flags);
6437 : },
6438 5384 : [=] { return Int32FalseConstant(); });
6439 : }
6440 :
6441 56 : TNode<BoolT> CodeStubAssembler::IsNativeContext(
6442 : SloppyTNode<HeapObject> object) {
6443 56 : return WordEqual(LoadMap(object), LoadRoot(RootIndex::kNativeContextMap));
6444 : }
6445 :
6446 112 : TNode<BoolT> CodeStubAssembler::IsFixedDoubleArray(
6447 : SloppyTNode<HeapObject> object) {
6448 112 : return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
6449 : }
6450 :
6451 0 : TNode<BoolT> CodeStubAssembler::IsHashTable(SloppyTNode<HeapObject> object) {
6452 0 : Node* instance_type = LoadInstanceType(object);
6453 : return UncheckedCast<BoolT>(
6454 : Word32And(Int32GreaterThanOrEqual(instance_type,
6455 0 : Int32Constant(FIRST_HASH_TABLE_TYPE)),
6456 : Int32LessThanOrEqual(instance_type,
6457 0 : Int32Constant(LAST_HASH_TABLE_TYPE))));
6458 : }
6459 :
6460 0 : TNode<BoolT> CodeStubAssembler::IsEphemeronHashTable(
6461 : SloppyTNode<HeapObject> object) {
6462 0 : return HasInstanceType(object, EPHEMERON_HASH_TABLE_TYPE);
6463 : }
6464 :
6465 0 : TNode<BoolT> CodeStubAssembler::IsNameDictionary(
6466 : SloppyTNode<HeapObject> object) {
6467 0 : return HasInstanceType(object, NAME_DICTIONARY_TYPE);
6468 : }
6469 :
6470 0 : TNode<BoolT> CodeStubAssembler::IsGlobalDictionary(
6471 : SloppyTNode<HeapObject> object) {
6472 0 : return HasInstanceType(object, GLOBAL_DICTIONARY_TYPE);
6473 : }
6474 :
6475 0 : TNode<BoolT> CodeStubAssembler::IsNumberDictionary(
6476 : SloppyTNode<HeapObject> object) {
6477 0 : return HasInstanceType(object, NUMBER_DICTIONARY_TYPE);
6478 : }
6479 :
6480 0 : TNode<BoolT> CodeStubAssembler::IsJSGeneratorObject(
6481 : SloppyTNode<HeapObject> object) {
6482 0 : return HasInstanceType(object, JS_GENERATOR_OBJECT_TYPE);
6483 : }
6484 :
6485 3868 : TNode<BoolT> CodeStubAssembler::IsJSFunctionInstanceType(
6486 : SloppyTNode<Int32T> instance_type) {
6487 3868 : return InstanceTypeEqual(instance_type, JS_FUNCTION_TYPE);
6488 : }
6489 :
6490 168 : TNode<BoolT> CodeStubAssembler::IsAllocationSiteInstanceType(
6491 : SloppyTNode<Int32T> instance_type) {
6492 168 : return InstanceTypeEqual(instance_type, ALLOCATION_SITE_TYPE);
6493 : }
6494 :
6495 56 : TNode<BoolT> CodeStubAssembler::IsJSFunction(SloppyTNode<HeapObject> object) {
6496 56 : return IsJSFunctionMap(LoadMap(object));
6497 : }
6498 :
6499 56 : TNode<BoolT> CodeStubAssembler::IsJSFunctionMap(SloppyTNode<Map> map) {
6500 56 : return IsJSFunctionInstanceType(LoadMapInstanceType(map));
6501 : }
6502 :
6503 504 : TNode<BoolT> CodeStubAssembler::IsJSTypedArray(SloppyTNode<HeapObject> object) {
6504 504 : return HasInstanceType(object, JS_TYPED_ARRAY_TYPE);
6505 : }
6506 :
6507 168 : TNode<BoolT> CodeStubAssembler::IsJSArrayBuffer(
6508 : SloppyTNode<HeapObject> object) {
6509 168 : return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
6510 : }
6511 :
6512 1288 : TNode<BoolT> CodeStubAssembler::IsJSDataView(TNode<HeapObject> object) {
6513 1288 : return HasInstanceType(object, JS_DATA_VIEW_TYPE);
6514 : }
6515 :
6516 0 : TNode<BoolT> CodeStubAssembler::IsFixedTypedArray(
6517 : SloppyTNode<HeapObject> object) {
6518 0 : TNode<Int32T> instance_type = LoadInstanceType(object);
6519 : return UncheckedCast<BoolT>(Word32And(
6520 : Int32GreaterThanOrEqual(instance_type,
6521 0 : Int32Constant(FIRST_FIXED_TYPED_ARRAY_TYPE)),
6522 : Int32LessThanOrEqual(instance_type,
6523 0 : Int32Constant(LAST_FIXED_TYPED_ARRAY_TYPE))));
6524 : }
6525 :
6526 1008 : TNode<BoolT> CodeStubAssembler::IsJSRegExp(SloppyTNode<HeapObject> object) {
6527 1008 : return HasInstanceType(object, JS_REGEXP_TYPE);
6528 : }
6529 :
6530 2748 : TNode<BoolT> CodeStubAssembler::IsNumber(SloppyTNode<Object> object) {
6531 2748 : return Select<BoolT>(TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6532 5496 : [=] { return IsHeapNumber(CAST(object)); });
6533 : }
6534 :
6535 112 : TNode<BoolT> CodeStubAssembler::IsNumeric(SloppyTNode<Object> object) {
6536 : return Select<BoolT>(
6537 112 : TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6538 112 : [=] {
6539 : return UncheckedCast<BoolT>(
6540 224 : Word32Or(IsHeapNumber(CAST(object)), IsBigInt(CAST(object))));
6541 336 : });
6542 : }
6543 :
6544 0 : TNode<BoolT> CodeStubAssembler::IsNumberNormalized(SloppyTNode<Number> number) {
6545 0 : TVARIABLE(BoolT, var_result, Int32TrueConstant());
6546 0 : Label out(this);
6547 :
6548 0 : GotoIf(TaggedIsSmi(number), &out);
6549 :
6550 0 : TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
6551 : TNode<Float64T> smi_min =
6552 0 : Float64Constant(static_cast<double>(Smi::kMinValue));
6553 : TNode<Float64T> smi_max =
6554 0 : Float64Constant(static_cast<double>(Smi::kMaxValue));
6555 :
6556 0 : GotoIf(Float64LessThan(value, smi_min), &out);
6557 0 : GotoIf(Float64GreaterThan(value, smi_max), &out);
6558 0 : GotoIfNot(Float64Equal(value, value), &out); // NaN.
6559 :
6560 0 : var_result = Int32FalseConstant();
6561 0 : Goto(&out);
6562 :
6563 0 : BIND(&out);
6564 0 : return var_result.value();
6565 : }
6566 :
6567 0 : TNode<BoolT> CodeStubAssembler::IsNumberPositive(SloppyTNode<Number> number) {
6568 0 : return Select<BoolT>(TaggedIsSmi(number),
6569 0 : [=] { return TaggedIsPositiveSmi(number); },
6570 0 : [=] { return IsHeapNumberPositive(CAST(number)); });
6571 : }
6572 :
6573 : // TODO(cbruni): Use TNode<HeapNumber> instead of custom name.
6574 4 : TNode<BoolT> CodeStubAssembler::IsHeapNumberPositive(TNode<HeapNumber> number) {
6575 4 : TNode<Float64T> value = LoadHeapNumberValue(number);
6576 4 : TNode<Float64T> float_zero = Float64Constant(0.);
6577 4 : return Float64GreaterThanOrEqual(value, float_zero);
6578 : }
6579 :
6580 0 : TNode<BoolT> CodeStubAssembler::IsNumberNonNegativeSafeInteger(
6581 : TNode<Number> number) {
6582 : return Select<BoolT>(
6583 : // TODO(cbruni): Introduce TaggedIsNonNegateSmi to avoid confusion.
6584 0 : TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
6585 0 : [=] {
6586 0 : TNode<HeapNumber> heap_number = CAST(number);
6587 0 : return Select<BoolT>(IsInteger(heap_number),
6588 0 : [=] { return IsHeapNumberPositive(heap_number); },
6589 0 : [=] { return Int32FalseConstant(); });
6590 0 : });
6591 : }
6592 :
6593 56 : TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<Object> number) {
6594 : return Select<BoolT>(
6595 168 : TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6596 56 : [=] {
6597 : return Select<BoolT>(
6598 112 : IsHeapNumber(CAST(number)),
6599 56 : [=] { return IsSafeInteger(UncheckedCast<HeapNumber>(number)); },
6600 280 : [=] { return Int32FalseConstant(); });
6601 280 : });
6602 : }
6603 :
6604 56 : TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<HeapNumber> number) {
6605 : // Load the actual value of {number}.
6606 56 : TNode<Float64T> number_value = LoadHeapNumberValue(number);
6607 : // Truncate the value of {number} to an integer (or an infinity).
6608 56 : TNode<Float64T> integer = Float64Trunc(number_value);
6609 :
6610 : return Select<BoolT>(
6611 : // Check if {number}s value matches the integer (ruling out the
6612 : // infinities).
6613 112 : Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0)),
6614 56 : [=] {
6615 : // Check if the {integer} value is in safe integer range.
6616 112 : return Float64LessThanOrEqual(Float64Abs(integer),
6617 224 : Float64Constant(kMaxSafeInteger));
6618 112 : },
6619 224 : [=] { return Int32FalseConstant(); });
6620 : }
6621 :
6622 56 : TNode<BoolT> CodeStubAssembler::IsInteger(TNode<Object> number) {
6623 : return Select<BoolT>(
6624 168 : TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6625 56 : [=] {
6626 : return Select<BoolT>(
6627 112 : IsHeapNumber(CAST(number)),
6628 56 : [=] { return IsInteger(UncheckedCast<HeapNumber>(number)); },
6629 280 : [=] { return Int32FalseConstant(); });
6630 280 : });
6631 : }
6632 :
6633 56 : TNode<BoolT> CodeStubAssembler::IsInteger(TNode<HeapNumber> number) {
6634 56 : TNode<Float64T> number_value = LoadHeapNumberValue(number);
6635 : // Truncate the value of {number} to an integer (or an infinity).
6636 56 : TNode<Float64T> integer = Float64Trunc(number_value);
6637 : // Check if {number}s value matches the integer (ruling out the infinities).
6638 56 : return Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0));
6639 : }
6640 :
6641 4 : TNode<BoolT> CodeStubAssembler::IsHeapNumberUint32(TNode<HeapNumber> number) {
6642 : // Check that the HeapNumber is a valid uint32
6643 : return Select<BoolT>(
6644 : IsHeapNumberPositive(number),
6645 4 : [=] {
6646 4 : TNode<Float64T> value = LoadHeapNumberValue(number);
6647 4 : TNode<Uint32T> int_value = Unsigned(TruncateFloat64ToWord32(value));
6648 4 : return Float64Equal(value, ChangeUint32ToFloat64(int_value));
6649 : },
6650 8 : [=] { return Int32FalseConstant(); });
6651 : }
6652 :
6653 4 : TNode<BoolT> CodeStubAssembler::IsNumberArrayIndex(TNode<Number> number) {
6654 8 : return Select<BoolT>(TaggedIsSmi(number),
6655 4 : [=] { return TaggedIsPositiveSmi(number); },
6656 16 : [=] { return IsHeapNumberUint32(CAST(number)); });
6657 : }
6658 :
6659 2656 : Node* CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(Node* element_count,
6660 : int base_size,
6661 : ParameterMode mode) {
6662 : int max_newspace_elements =
6663 2656 : (kMaxRegularHeapObjectSize - base_size) / kTaggedSize;
6664 : return IntPtrOrSmiGreaterThan(
6665 2656 : element_count, IntPtrOrSmiConstant(max_newspace_elements, mode), mode);
6666 : }
6667 :
6668 2968 : TNode<Int32T> CodeStubAssembler::StringCharCodeAt(SloppyTNode<String> string,
6669 : SloppyTNode<IntPtrT> index) {
6670 : CSA_ASSERT(this, IsString(string));
6671 :
6672 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(index, IntPtrConstant(0)));
6673 : CSA_ASSERT(this, IntPtrLessThan(index, LoadStringLengthAsWord(string)));
6674 :
6675 2968 : TVARIABLE(Int32T, var_result);
6676 :
6677 5936 : Label return_result(this), if_runtime(this, Label::kDeferred),
6678 5936 : if_stringistwobyte(this), if_stringisonebyte(this);
6679 :
6680 5936 : ToDirectStringAssembler to_direct(state(), string);
6681 2968 : to_direct.TryToDirect(&if_runtime);
6682 2968 : Node* const offset = IntPtrAdd(index, to_direct.offset());
6683 2968 : Node* const instance_type = to_direct.instance_type();
6684 :
6685 2968 : Node* const string_data = to_direct.PointerToData(&if_runtime);
6686 :
6687 : // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
6688 5936 : Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
6689 2968 : &if_stringistwobyte);
6690 :
6691 2968 : BIND(&if_stringisonebyte);
6692 : {
6693 2968 : var_result =
6694 2968 : UncheckedCast<Int32T>(Load(MachineType::Uint8(), string_data, offset));
6695 2968 : Goto(&return_result);
6696 : }
6697 :
6698 2968 : BIND(&if_stringistwobyte);
6699 : {
6700 5936 : var_result =
6701 : UncheckedCast<Int32T>(Load(MachineType::Uint16(), string_data,
6702 8904 : WordShl(offset, IntPtrConstant(1))));
6703 2968 : Goto(&return_result);
6704 : }
6705 :
6706 2968 : BIND(&if_runtime);
6707 : {
6708 : Node* result = CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(),
6709 2968 : string, SmiTag(index));
6710 2968 : var_result = SmiToInt32(result);
6711 2968 : Goto(&return_result);
6712 : }
6713 :
6714 2968 : BIND(&return_result);
6715 5936 : return var_result.value();
6716 : }
6717 :
6718 784 : TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
6719 784 : VARIABLE(var_result, MachineRepresentation::kTagged);
6720 :
6721 : // Check if the {code} is a one-byte char code.
6722 1568 : Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
6723 1568 : if_done(this);
6724 1568 : Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
6725 784 : &if_codeisonebyte, &if_codeistwobyte);
6726 784 : BIND(&if_codeisonebyte);
6727 : {
6728 : // Load the isolate wide single character string cache.
6729 : TNode<FixedArray> cache =
6730 784 : CAST(LoadRoot(RootIndex::kSingleCharacterStringCache));
6731 784 : TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
6732 :
6733 : // Check if we have an entry for the {code} in the single character string
6734 : // cache already.
6735 784 : Label if_entryisundefined(this, Label::kDeferred),
6736 1568 : if_entryisnotundefined(this);
6737 784 : Node* entry = LoadFixedArrayElement(cache, code_index);
6738 784 : Branch(IsUndefined(entry), &if_entryisundefined, &if_entryisnotundefined);
6739 :
6740 784 : BIND(&if_entryisundefined);
6741 : {
6742 : // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
6743 784 : TNode<String> result = AllocateSeqOneByteString(1);
6744 : StoreNoWriteBarrier(
6745 : MachineRepresentation::kWord8, result,
6746 784 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
6747 784 : StoreFixedArrayElement(cache, code_index, result);
6748 784 : var_result.Bind(result);
6749 784 : Goto(&if_done);
6750 : }
6751 :
6752 784 : BIND(&if_entryisnotundefined);
6753 : {
6754 : // Return the entry from the {cache}.
6755 784 : var_result.Bind(entry);
6756 784 : Goto(&if_done);
6757 784 : }
6758 : }
6759 :
6760 784 : BIND(&if_codeistwobyte);
6761 : {
6762 : // Allocate a new SeqTwoByteString for {code}.
6763 784 : Node* result = AllocateSeqTwoByteString(1);
6764 : StoreNoWriteBarrier(
6765 : MachineRepresentation::kWord16, result,
6766 784 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
6767 784 : var_result.Bind(result);
6768 784 : Goto(&if_done);
6769 : }
6770 :
6771 784 : BIND(&if_done);
6772 : CSA_ASSERT(this, IsString(var_result.value()));
6773 1568 : return CAST(var_result.value());
6774 : }
6775 :
6776 : // A wrapper around CopyStringCharacters which determines the correct string
6777 : // encoding, allocates a corresponding sequential string, and then copies the
6778 : // given character range using CopyStringCharacters.
6779 : // |from_string| must be a sequential string.
6780 : // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
6781 896 : TNode<String> CodeStubAssembler::AllocAndCopyStringCharacters(
6782 : Node* from, Node* from_instance_type, TNode<IntPtrT> from_index,
6783 : TNode<IntPtrT> character_count) {
6784 1792 : Label end(this), one_byte_sequential(this), two_byte_sequential(this);
6785 1792 : TVARIABLE(String, var_result);
6786 :
6787 1792 : Branch(IsOneByteStringInstanceType(from_instance_type), &one_byte_sequential,
6788 896 : &two_byte_sequential);
6789 :
6790 : // The subject string is a sequential one-byte string.
6791 896 : BIND(&one_byte_sequential);
6792 : {
6793 : TNode<String> result = AllocateSeqOneByteString(
6794 896 : NoContextConstant(), Unsigned(TruncateIntPtrToInt32(character_count)));
6795 : CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6796 : character_count, String::ONE_BYTE_ENCODING,
6797 896 : String::ONE_BYTE_ENCODING);
6798 896 : var_result = result;
6799 896 : Goto(&end);
6800 : }
6801 :
6802 : // The subject string is a sequential two-byte string.
6803 896 : BIND(&two_byte_sequential);
6804 : {
6805 : TNode<String> result = AllocateSeqTwoByteString(
6806 896 : NoContextConstant(), Unsigned(TruncateIntPtrToInt32(character_count)));
6807 : CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6808 : character_count, String::TWO_BYTE_ENCODING,
6809 896 : String::TWO_BYTE_ENCODING);
6810 896 : var_result = result;
6811 896 : Goto(&end);
6812 : }
6813 :
6814 896 : BIND(&end);
6815 1792 : return var_result.value();
6816 : }
6817 :
6818 448 : TNode<String> CodeStubAssembler::SubString(TNode<String> string,
6819 : TNode<IntPtrT> from,
6820 : TNode<IntPtrT> to) {
6821 448 : TVARIABLE(String, var_result);
6822 896 : ToDirectStringAssembler to_direct(state(), string);
6823 896 : Label end(this), runtime(this);
6824 :
6825 448 : TNode<IntPtrT> const substr_length = IntPtrSub(to, from);
6826 448 : TNode<IntPtrT> const string_length = LoadStringLengthAsWord(string);
6827 :
6828 : // Begin dispatching based on substring length.
6829 :
6830 896 : Label original_string_or_invalid_length(this);
6831 896 : GotoIf(UintPtrGreaterThanOrEqual(substr_length, string_length),
6832 448 : &original_string_or_invalid_length);
6833 :
6834 : // A real substring (substr_length < string_length).
6835 896 : Label empty(this);
6836 448 : GotoIf(IntPtrEqual(substr_length, IntPtrConstant(0)), &empty);
6837 :
6838 896 : Label single_char(this);
6839 448 : GotoIf(IntPtrEqual(substr_length, IntPtrConstant(1)), &single_char);
6840 :
6841 : // Deal with different string types: update the index if necessary
6842 : // and extract the underlying string.
6843 :
6844 448 : TNode<String> direct_string = to_direct.TryToDirect(&runtime);
6845 448 : TNode<IntPtrT> offset = IntPtrAdd(from, to_direct.offset());
6846 448 : Node* const instance_type = to_direct.instance_type();
6847 :
6848 : // The subject string can only be external or sequential string of either
6849 : // encoding at this point.
6850 896 : Label external_string(this);
6851 : {
6852 : if (FLAG_string_slices) {
6853 448 : Label next(this);
6854 :
6855 : // Short slice. Copy instead of slicing.
6856 : GotoIf(IntPtrLessThan(substr_length,
6857 896 : IntPtrConstant(SlicedString::kMinLength)),
6858 448 : &next);
6859 :
6860 : // Allocate new sliced string.
6861 :
6862 448 : Counters* counters = isolate()->counters();
6863 448 : IncrementCounter(counters->sub_string_native(), 1);
6864 :
6865 896 : Label one_byte_slice(this), two_byte_slice(this);
6866 896 : Branch(IsOneByteStringInstanceType(to_direct.instance_type()),
6867 448 : &one_byte_slice, &two_byte_slice);
6868 :
6869 448 : BIND(&one_byte_slice);
6870 : {
6871 1792 : var_result = AllocateSlicedOneByteString(
6872 896 : Unsigned(TruncateIntPtrToInt32(substr_length)), direct_string,
6873 448 : SmiTag(offset));
6874 448 : Goto(&end);
6875 : }
6876 :
6877 448 : BIND(&two_byte_slice);
6878 : {
6879 1792 : var_result = AllocateSlicedTwoByteString(
6880 896 : Unsigned(TruncateIntPtrToInt32(substr_length)), direct_string,
6881 448 : SmiTag(offset));
6882 448 : Goto(&end);
6883 : }
6884 :
6885 896 : BIND(&next);
6886 : }
6887 :
6888 : // The subject string can only be external or sequential string of either
6889 : // encoding at this point.
6890 448 : GotoIf(to_direct.is_external(), &external_string);
6891 :
6892 448 : var_result = AllocAndCopyStringCharacters(direct_string, instance_type,
6893 448 : offset, substr_length);
6894 :
6895 448 : Counters* counters = isolate()->counters();
6896 448 : IncrementCounter(counters->sub_string_native(), 1);
6897 :
6898 448 : Goto(&end);
6899 : }
6900 :
6901 : // Handle external string.
6902 448 : BIND(&external_string);
6903 : {
6904 448 : Node* const fake_sequential_string = to_direct.PointerToString(&runtime);
6905 :
6906 448 : var_result = AllocAndCopyStringCharacters(
6907 448 : fake_sequential_string, instance_type, offset, substr_length);
6908 :
6909 448 : Counters* counters = isolate()->counters();
6910 448 : IncrementCounter(counters->sub_string_native(), 1);
6911 :
6912 448 : Goto(&end);
6913 : }
6914 :
6915 448 : BIND(&empty);
6916 : {
6917 448 : var_result = EmptyStringConstant();
6918 448 : Goto(&end);
6919 : }
6920 :
6921 : // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
6922 448 : BIND(&single_char);
6923 : {
6924 448 : TNode<Int32T> char_code = StringCharCodeAt(string, from);
6925 448 : var_result = StringFromSingleCharCode(char_code);
6926 448 : Goto(&end);
6927 : }
6928 :
6929 448 : BIND(&original_string_or_invalid_length);
6930 : {
6931 : CSA_ASSERT(this, IntPtrEqual(substr_length, string_length));
6932 :
6933 : // Equal length - check if {from, to} == {0, str.length}.
6934 448 : GotoIf(UintPtrGreaterThan(from, IntPtrConstant(0)), &runtime);
6935 :
6936 : // Return the original string (substr_length == string_length).
6937 :
6938 448 : Counters* counters = isolate()->counters();
6939 448 : IncrementCounter(counters->sub_string_native(), 1);
6940 :
6941 448 : var_result = string;
6942 448 : Goto(&end);
6943 : }
6944 :
6945 : // Fall back to a runtime call.
6946 448 : BIND(&runtime);
6947 : {
6948 896 : var_result =
6949 896 : CAST(CallRuntime(Runtime::kStringSubstring, NoContextConstant(), string,
6950 448 : SmiTag(from), SmiTag(to)));
6951 448 : Goto(&end);
6952 : }
6953 :
6954 448 : BIND(&end);
6955 896 : return var_result.value();
6956 : }
6957 :
6958 4816 : ToDirectStringAssembler::ToDirectStringAssembler(
6959 : compiler::CodeAssemblerState* state, Node* string, Flags flags)
6960 : : CodeStubAssembler(state),
6961 : var_string_(this, MachineRepresentation::kTagged, string),
6962 : var_instance_type_(this, MachineRepresentation::kWord32),
6963 : var_offset_(this, MachineType::PointerRepresentation()),
6964 : var_is_external_(this, MachineRepresentation::kWord32),
6965 4816 : flags_(flags) {
6966 : CSA_ASSERT(this, TaggedIsNotSmi(string));
6967 : CSA_ASSERT(this, IsString(string));
6968 :
6969 4816 : var_string_.Bind(string);
6970 4816 : var_offset_.Bind(IntPtrConstant(0));
6971 4816 : var_instance_type_.Bind(LoadInstanceType(string));
6972 4816 : var_is_external_.Bind(Int32Constant(0));
6973 4816 : }
6974 :
6975 4816 : TNode<String> ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
6976 4816 : VariableList vars({&var_string_, &var_offset_, &var_instance_type_}, zone());
6977 9632 : Label dispatch(this, vars);
6978 9632 : Label if_iscons(this);
6979 9632 : Label if_isexternal(this);
6980 9632 : Label if_issliced(this);
6981 9632 : Label if_isthin(this);
6982 9632 : Label out(this);
6983 :
6984 9632 : Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
6985 4816 : &dispatch);
6986 :
6987 : // Dispatch based on string representation.
6988 4816 : BIND(&dispatch);
6989 : {
6990 : int32_t values[] = {
6991 : kSeqStringTag, kConsStringTag, kExternalStringTag,
6992 : kSlicedStringTag, kThinStringTag,
6993 4816 : };
6994 : Label* labels[] = {
6995 : &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
6996 4816 : };
6997 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
6998 :
6999 : Node* const representation = Word32And(
7000 4816 : var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
7001 4816 : Switch(representation, if_bailout, values, labels, arraysize(values));
7002 : }
7003 :
7004 : // Cons string. Check whether it is flat, then fetch first part.
7005 : // Flat cons strings have an empty second part.
7006 4816 : BIND(&if_iscons);
7007 : {
7008 4816 : Node* const string = var_string_.value();
7009 9632 : GotoIfNot(IsEmptyString(LoadObjectField(string, ConsString::kSecondOffset)),
7010 4816 : if_bailout);
7011 :
7012 4816 : Node* const lhs = LoadObjectField(string, ConsString::kFirstOffset);
7013 4816 : var_string_.Bind(lhs);
7014 4816 : var_instance_type_.Bind(LoadInstanceType(lhs));
7015 :
7016 4816 : Goto(&dispatch);
7017 : }
7018 :
7019 : // Sliced string. Fetch parent and correct start index by offset.
7020 4816 : BIND(&if_issliced);
7021 : {
7022 4816 : if (!FLAG_string_slices || (flags_ & kDontUnpackSlicedStrings)) {
7023 56 : Goto(if_bailout);
7024 : } else {
7025 4760 : Node* const string = var_string_.value();
7026 : Node* const sliced_offset =
7027 4760 : LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
7028 4760 : var_offset_.Bind(IntPtrAdd(var_offset_.value(), sliced_offset));
7029 :
7030 4760 : Node* const parent = LoadObjectField(string, SlicedString::kParentOffset);
7031 4760 : var_string_.Bind(parent);
7032 4760 : var_instance_type_.Bind(LoadInstanceType(parent));
7033 :
7034 4760 : Goto(&dispatch);
7035 : }
7036 : }
7037 :
7038 : // Thin string. Fetch the actual string.
7039 4816 : BIND(&if_isthin);
7040 : {
7041 4816 : Node* const string = var_string_.value();
7042 : Node* const actual_string =
7043 4816 : LoadObjectField(string, ThinString::kActualOffset);
7044 4816 : Node* const actual_instance_type = LoadInstanceType(actual_string);
7045 :
7046 4816 : var_string_.Bind(actual_string);
7047 4816 : var_instance_type_.Bind(actual_instance_type);
7048 :
7049 4816 : Goto(&dispatch);
7050 : }
7051 :
7052 : // External string.
7053 4816 : BIND(&if_isexternal);
7054 4816 : var_is_external_.Bind(Int32Constant(1));
7055 4816 : Goto(&out);
7056 :
7057 4816 : BIND(&out);
7058 9632 : return CAST(var_string_.value());
7059 : }
7060 :
7061 4816 : TNode<RawPtrT> ToDirectStringAssembler::TryToSequential(
7062 : StringPointerKind ptr_kind, Label* if_bailout) {
7063 4816 : CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
7064 :
7065 4816 : TVARIABLE(RawPtrT, var_result);
7066 9632 : Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
7067 4816 : Branch(is_external(), &if_isexternal, &if_issequential);
7068 :
7069 4816 : BIND(&if_issequential);
7070 : {
7071 : STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
7072 : SeqTwoByteString::kHeaderSize);
7073 4816 : TNode<IntPtrT> result = BitcastTaggedToWord(var_string_.value());
7074 4816 : if (ptr_kind == PTR_TO_DATA) {
7075 4368 : result = IntPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7076 4368 : kHeapObjectTag));
7077 : }
7078 4816 : var_result = ReinterpretCast<RawPtrT>(result);
7079 4816 : Goto(&out);
7080 : }
7081 :
7082 4816 : BIND(&if_isexternal);
7083 : {
7084 9632 : GotoIf(IsUncachedExternalStringInstanceType(var_instance_type_.value()),
7085 4816 : if_bailout);
7086 :
7087 4816 : TNode<String> string = CAST(var_string_.value());
7088 : TNode<IntPtrT> result =
7089 4816 : LoadObjectField<IntPtrT>(string, ExternalString::kResourceDataOffset);
7090 4816 : if (ptr_kind == PTR_TO_STRING) {
7091 448 : result = IntPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7092 448 : kHeapObjectTag));
7093 : }
7094 4816 : var_result = ReinterpretCast<RawPtrT>(result);
7095 4816 : Goto(&out);
7096 : }
7097 :
7098 4816 : BIND(&out);
7099 9632 : return var_result.value();
7100 : }
7101 :
7102 1008 : void CodeStubAssembler::BranchIfCanDerefIndirectString(Node* string,
7103 : Node* instance_type,
7104 : Label* can_deref,
7105 : Label* cannot_deref) {
7106 : CSA_ASSERT(this, IsString(string));
7107 : Node* representation =
7108 1008 : Word32And(instance_type, Int32Constant(kStringRepresentationMask));
7109 1008 : GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), can_deref);
7110 2016 : GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)),
7111 1008 : cannot_deref);
7112 : // Cons string.
7113 1008 : Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
7114 1008 : GotoIf(IsEmptyString(rhs), can_deref);
7115 1008 : Goto(cannot_deref);
7116 1008 : }
7117 :
7118 0 : Node* CodeStubAssembler::DerefIndirectString(TNode<String> string,
7119 : TNode<Int32T> instance_type,
7120 : Label* cannot_deref) {
7121 0 : Label deref(this);
7122 0 : BranchIfCanDerefIndirectString(string, instance_type, &deref, cannot_deref);
7123 0 : BIND(&deref);
7124 : STATIC_ASSERT(static_cast<int>(ThinString::kActualOffset) ==
7125 : static_cast<int>(ConsString::kFirstOffset));
7126 0 : return LoadObjectField(string, ThinString::kActualOffset);
7127 : }
7128 :
7129 1008 : void CodeStubAssembler::DerefIndirectString(Variable* var_string,
7130 : Node* instance_type) {
7131 : #ifdef DEBUG
7132 : Label can_deref(this), cannot_deref(this);
7133 : BranchIfCanDerefIndirectString(var_string->value(), instance_type, &can_deref,
7134 : &cannot_deref);
7135 : BIND(&cannot_deref);
7136 : DebugBreak(); // Should be able to dereference string.
7137 : Goto(&can_deref);
7138 : BIND(&can_deref);
7139 : #endif // DEBUG
7140 :
7141 : STATIC_ASSERT(static_cast<int>(ThinString::kActualOffset) ==
7142 : static_cast<int>(ConsString::kFirstOffset));
7143 : var_string->Bind(
7144 1008 : LoadObjectField(var_string->value(), ThinString::kActualOffset));
7145 1008 : }
7146 :
7147 1008 : void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
7148 : Node* instance_type,
7149 : Label* did_deref,
7150 : Label* cannot_deref) {
7151 1008 : Label deref(this);
7152 : BranchIfCanDerefIndirectString(var_string->value(), instance_type, &deref,
7153 1008 : cannot_deref);
7154 :
7155 1008 : BIND(&deref);
7156 : {
7157 1008 : DerefIndirectString(var_string, instance_type);
7158 1008 : Goto(did_deref);
7159 1008 : }
7160 1008 : }
7161 :
7162 336 : void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
7163 : Node* left_instance_type,
7164 : Variable* var_right,
7165 : Node* right_instance_type,
7166 : Label* did_something) {
7167 672 : Label did_nothing_left(this), did_something_left(this),
7168 672 : didnt_do_anything(this);
7169 : MaybeDerefIndirectString(var_left, left_instance_type, &did_something_left,
7170 336 : &did_nothing_left);
7171 :
7172 336 : BIND(&did_something_left);
7173 : {
7174 : MaybeDerefIndirectString(var_right, right_instance_type, did_something,
7175 336 : did_something);
7176 : }
7177 :
7178 336 : BIND(&did_nothing_left);
7179 : {
7180 : MaybeDerefIndirectString(var_right, right_instance_type, did_something,
7181 336 : &didnt_do_anything);
7182 : }
7183 :
7184 672 : BIND(&didnt_do_anything);
7185 : // Fall through if neither string was an indirect string.
7186 336 : }
7187 :
7188 56 : TNode<String> CodeStubAssembler::StringAdd(Node* context, TNode<String> left,
7189 : TNode<String> right,
7190 : AllocationFlags flags) {
7191 56 : TVARIABLE(String, result);
7192 112 : Label check_right(this), runtime(this, Label::kDeferred), cons(this),
7193 112 : done(this, &result), done_native(this, &result);
7194 56 : Counters* counters = isolate()->counters();
7195 :
7196 56 : TNode<Uint32T> left_length = LoadStringLengthAsWord32(left);
7197 56 : GotoIfNot(Word32Equal(left_length, Uint32Constant(0)), &check_right);
7198 56 : result = right;
7199 56 : Goto(&done_native);
7200 :
7201 56 : BIND(&check_right);
7202 56 : TNode<Uint32T> right_length = LoadStringLengthAsWord32(right);
7203 56 : GotoIfNot(Word32Equal(right_length, Uint32Constant(0)), &cons);
7204 56 : result = left;
7205 56 : Goto(&done_native);
7206 :
7207 56 : BIND(&cons);
7208 : {
7209 56 : TNode<Uint32T> new_length = Uint32Add(left_length, right_length);
7210 :
7211 : // If new length is greater than String::kMaxLength, goto runtime to
7212 : // throw. Note: we also need to invalidate the string length protector, so
7213 : // can't just throw here directly.
7214 112 : GotoIf(Uint32GreaterThan(new_length, Uint32Constant(String::kMaxLength)),
7215 56 : &runtime);
7216 :
7217 56 : TVARIABLE(String, var_left, left);
7218 112 : TVARIABLE(String, var_right, right);
7219 56 : Variable* input_vars[2] = {&var_left, &var_right};
7220 112 : Label non_cons(this, 2, input_vars);
7221 112 : Label slow(this, Label::kDeferred);
7222 112 : GotoIf(Uint32LessThan(new_length, Uint32Constant(ConsString::kMinLength)),
7223 56 : &non_cons);
7224 :
7225 56 : result =
7226 56 : NewConsString(new_length, var_left.value(), var_right.value(), flags);
7227 56 : Goto(&done_native);
7228 :
7229 56 : BIND(&non_cons);
7230 :
7231 56 : Comment("Full string concatenate");
7232 56 : Node* left_instance_type = LoadInstanceType(var_left.value());
7233 56 : Node* right_instance_type = LoadInstanceType(var_right.value());
7234 : // Compute intersection and difference of instance types.
7235 :
7236 : Node* ored_instance_types =
7237 56 : Word32Or(left_instance_type, right_instance_type);
7238 : Node* xored_instance_types =
7239 56 : Word32Xor(left_instance_type, right_instance_type);
7240 :
7241 : // Check if both strings have the same encoding and both are sequential.
7242 56 : GotoIf(IsSetWord32(xored_instance_types, kStringEncodingMask), &runtime);
7243 56 : GotoIf(IsSetWord32(ored_instance_types, kStringRepresentationMask), &slow);
7244 :
7245 56 : TNode<IntPtrT> word_left_length = Signed(ChangeUint32ToWord(left_length));
7246 56 : TNode<IntPtrT> word_right_length = Signed(ChangeUint32ToWord(right_length));
7247 :
7248 112 : Label two_byte(this);
7249 : GotoIf(Word32Equal(Word32And(ored_instance_types,
7250 112 : Int32Constant(kStringEncodingMask)),
7251 224 : Int32Constant(kTwoByteStringTag)),
7252 56 : &two_byte);
7253 : // One-byte sequential string case
7254 56 : result = AllocateSeqOneByteString(context, new_length);
7255 : CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
7256 : IntPtrConstant(0), word_left_length,
7257 56 : String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
7258 : CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
7259 : word_left_length, word_right_length,
7260 56 : String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
7261 56 : Goto(&done_native);
7262 :
7263 56 : BIND(&two_byte);
7264 : {
7265 : // Two-byte sequential string case
7266 56 : result = AllocateSeqTwoByteString(context, new_length);
7267 : CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
7268 : IntPtrConstant(0), word_left_length,
7269 : String::TWO_BYTE_ENCODING,
7270 56 : String::TWO_BYTE_ENCODING);
7271 : CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
7272 : word_left_length, word_right_length,
7273 : String::TWO_BYTE_ENCODING,
7274 56 : String::TWO_BYTE_ENCODING);
7275 56 : Goto(&done_native);
7276 : }
7277 :
7278 56 : BIND(&slow);
7279 : {
7280 : // Try to unwrap indirect strings, restart the above attempt on success.
7281 : MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
7282 56 : right_instance_type, &non_cons);
7283 56 : Goto(&runtime);
7284 56 : }
7285 : }
7286 56 : BIND(&runtime);
7287 : {
7288 56 : result = CAST(CallRuntime(Runtime::kStringAdd, context, left, right));
7289 56 : Goto(&done);
7290 : }
7291 :
7292 56 : BIND(&done_native);
7293 : {
7294 56 : IncrementCounter(counters->string_add_native(), 1);
7295 56 : Goto(&done);
7296 : }
7297 :
7298 56 : BIND(&done);
7299 112 : return result.value();
7300 : }
7301 :
7302 112 : TNode<String> CodeStubAssembler::StringFromSingleCodePoint(
7303 : TNode<Int32T> codepoint, UnicodeEncoding encoding) {
7304 112 : VARIABLE(var_result, MachineRepresentation::kTagged, EmptyStringConstant());
7305 :
7306 224 : Label if_isword16(this), if_isword32(this), return_result(this);
7307 :
7308 224 : Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
7309 112 : &if_isword32);
7310 :
7311 112 : BIND(&if_isword16);
7312 : {
7313 112 : var_result.Bind(StringFromSingleCharCode(codepoint));
7314 112 : Goto(&return_result);
7315 : }
7316 :
7317 112 : BIND(&if_isword32);
7318 : {
7319 112 : switch (encoding) {
7320 : case UnicodeEncoding::UTF16:
7321 112 : break;
7322 : case UnicodeEncoding::UTF32: {
7323 : // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
7324 0 : Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
7325 :
7326 : // lead = (codepoint >> 10) + LEAD_OFFSET
7327 : Node* lead =
7328 0 : Int32Add(Word32Shr(codepoint, Int32Constant(10)), lead_offset);
7329 :
7330 : // trail = (codepoint & 0x3FF) + 0xDC00;
7331 0 : Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
7332 0 : Int32Constant(0xDC00));
7333 :
7334 : // codpoint = (trail << 16) | lead;
7335 0 : codepoint = Signed(Word32Or(Word32Shl(trail, Int32Constant(16)), lead));
7336 0 : break;
7337 : }
7338 : }
7339 :
7340 112 : Node* value = AllocateSeqTwoByteString(2);
7341 : StoreNoWriteBarrier(
7342 : MachineRepresentation::kWord32, value,
7343 112 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
7344 224 : codepoint);
7345 112 : var_result.Bind(value);
7346 112 : Goto(&return_result);
7347 : }
7348 :
7349 112 : BIND(&return_result);
7350 224 : return CAST(var_result.value());
7351 : }
7352 :
7353 900 : TNode<Number> CodeStubAssembler::StringToNumber(TNode<String> input) {
7354 900 : Label runtime(this, Label::kDeferred);
7355 1800 : Label end(this);
7356 :
7357 1800 : TVARIABLE(Number, var_result);
7358 :
7359 : // Check if string has a cached array index.
7360 900 : TNode<Uint32T> hash = LoadNameHashField(input);
7361 1800 : GotoIf(IsSetWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
7362 900 : &runtime);
7363 :
7364 1800 : var_result =
7365 2700 : SmiTag(Signed(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
7366 900 : Goto(&end);
7367 :
7368 900 : BIND(&runtime);
7369 : {
7370 1800 : var_result =
7371 2700 : CAST(CallRuntime(Runtime::kStringToNumber, NoContextConstant(), input));
7372 900 : Goto(&end);
7373 : }
7374 :
7375 900 : BIND(&end);
7376 1800 : return var_result.value();
7377 : }
7378 :
7379 340 : TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
7380 340 : TVARIABLE(String, result);
7381 680 : TVARIABLE(Smi, smi_input);
7382 680 : Label runtime(this, Label::kDeferred), if_smi(this), if_heap_number(this),
7383 680 : done(this, &result);
7384 :
7385 : // Load the number string cache.
7386 340 : Node* number_string_cache = LoadRoot(RootIndex::kNumberStringCache);
7387 :
7388 : // Make the hash mask from the length of the number string cache. It
7389 : // contains two elements (number and string) for each cache entry.
7390 : // TODO(ishell): cleanup mask handling.
7391 : Node* mask =
7392 340 : BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
7393 340 : TNode<IntPtrT> one = IntPtrConstant(1);
7394 340 : mask = IntPtrSub(mask, one);
7395 :
7396 340 : GotoIfNot(TaggedIsSmi(input), &if_heap_number);
7397 340 : smi_input = CAST(input);
7398 340 : Goto(&if_smi);
7399 :
7400 340 : BIND(&if_heap_number);
7401 : {
7402 340 : TNode<HeapNumber> heap_number_input = CAST(input);
7403 : // Try normalizing the HeapNumber.
7404 340 : TryHeapNumberToSmi(heap_number_input, smi_input, &if_smi);
7405 :
7406 : // Make a hash from the two 32-bit values of the double.
7407 : TNode<Int32T> low =
7408 340 : LoadObjectField<Int32T>(heap_number_input, HeapNumber::kValueOffset);
7409 : TNode<Int32T> high = LoadObjectField<Int32T>(
7410 340 : heap_number_input, HeapNumber::kValueOffset + kIntSize);
7411 340 : TNode<Word32T> hash = Word32Xor(low, high);
7412 340 : TNode<WordT> word_hash = WordShl(ChangeInt32ToIntPtr(hash), one);
7413 : TNode<WordT> index =
7414 340 : WordAnd(word_hash, WordSar(mask, SmiShiftBitsConstant()));
7415 :
7416 : // Cache entry's key must be a heap number
7417 : Node* number_key =
7418 340 : UnsafeLoadFixedArrayElement(CAST(number_string_cache), index);
7419 340 : GotoIf(TaggedIsSmi(number_key), &runtime);
7420 340 : GotoIfNot(IsHeapNumber(number_key), &runtime);
7421 :
7422 : // Cache entry's key must match the heap number value we're looking for.
7423 : Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
7424 340 : MachineType::Int32());
7425 : Node* high_compare = LoadObjectField(
7426 340 : number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
7427 340 : GotoIfNot(Word32Equal(low, low_compare), &runtime);
7428 340 : GotoIfNot(Word32Equal(high, high_compare), &runtime);
7429 :
7430 : // Heap number match, return value from cache entry.
7431 680 : result = CAST(UnsafeLoadFixedArrayElement(CAST(number_string_cache), index,
7432 340 : kTaggedSize));
7433 340 : Goto(&done);
7434 : }
7435 :
7436 340 : BIND(&if_smi);
7437 : {
7438 : // Load the smi key, make sure it matches the smi we're looking for.
7439 : Node* smi_index = BitcastWordToTagged(
7440 340 : WordAnd(WordShl(BitcastTaggedToWord(smi_input.value()), one), mask));
7441 340 : Node* smi_key = UnsafeLoadFixedArrayElement(CAST(number_string_cache),
7442 340 : smi_index, 0, SMI_PARAMETERS);
7443 340 : GotoIf(WordNotEqual(smi_key, smi_input.value()), &runtime);
7444 :
7445 : // Smi match, return value from cache entry.
7446 680 : result = CAST(UnsafeLoadFixedArrayElement(
7447 340 : CAST(number_string_cache), smi_index, kTaggedSize, SMI_PARAMETERS));
7448 340 : Goto(&done);
7449 : }
7450 :
7451 340 : BIND(&runtime);
7452 : {
7453 : // No cache entry, go to the runtime.
7454 680 : result =
7455 1020 : CAST(CallRuntime(Runtime::kNumberToString, NoContextConstant(), input));
7456 340 : Goto(&done);
7457 : }
7458 340 : BIND(&done);
7459 680 : return result.value();
7460 : }
7461 :
7462 844 : Node* CodeStubAssembler::NonNumberToNumberOrNumeric(
7463 : Node* context, Node* input, Object::Conversion mode,
7464 : BigIntHandling bigint_handling) {
7465 : CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
7466 : CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(input)));
7467 :
7468 : // We might need to loop once here due to ToPrimitive conversions.
7469 844 : VARIABLE(var_input, MachineRepresentation::kTagged, input);
7470 1688 : VARIABLE(var_result, MachineRepresentation::kTagged);
7471 1688 : Label loop(this, &var_input);
7472 1688 : Label end(this);
7473 844 : Goto(&loop);
7474 844 : BIND(&loop);
7475 : {
7476 : // Load the current {input} value (known to be a HeapObject).
7477 844 : Node* input = var_input.value();
7478 :
7479 : // Dispatch on the {input} instance type.
7480 844 : Node* input_instance_type = LoadInstanceType(input);
7481 1688 : Label if_inputisstring(this), if_inputisoddball(this),
7482 1688 : if_inputisbigint(this), if_inputisreceiver(this, Label::kDeferred),
7483 1688 : if_inputisother(this, Label::kDeferred);
7484 844 : GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
7485 844 : GotoIf(IsBigIntInstanceType(input_instance_type), &if_inputisbigint);
7486 1688 : GotoIf(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE),
7487 844 : &if_inputisoddball);
7488 1688 : Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
7489 844 : &if_inputisother);
7490 :
7491 844 : BIND(&if_inputisstring);
7492 : {
7493 : // The {input} is a String, use the fast stub to convert it to a Number.
7494 844 : TNode<String> string_input = CAST(input);
7495 844 : var_result.Bind(StringToNumber(string_input));
7496 844 : Goto(&end);
7497 : }
7498 :
7499 844 : BIND(&if_inputisbigint);
7500 844 : if (mode == Object::Conversion::kToNumeric) {
7501 112 : var_result.Bind(input);
7502 112 : Goto(&end);
7503 : } else {
7504 : DCHECK_EQ(mode, Object::Conversion::kToNumber);
7505 732 : if (bigint_handling == BigIntHandling::kThrow) {
7506 620 : Goto(&if_inputisother);
7507 : } else {
7508 : DCHECK_EQ(bigint_handling, BigIntHandling::kConvertToNumber);
7509 112 : var_result.Bind(CallRuntime(Runtime::kBigIntToNumber, context, input));
7510 112 : Goto(&end);
7511 : }
7512 : }
7513 :
7514 844 : BIND(&if_inputisoddball);
7515 : {
7516 : // The {input} is an Oddball, we just need to load the Number value of it.
7517 844 : var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
7518 844 : Goto(&end);
7519 : }
7520 :
7521 844 : BIND(&if_inputisreceiver);
7522 : {
7523 : // The {input} is a JSReceiver, we need to convert it to a Primitive first
7524 : // using the ToPrimitive type conversion, preferably yielding a Number.
7525 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
7526 844 : isolate(), ToPrimitiveHint::kNumber);
7527 844 : Node* result = CallStub(callable, context, input);
7528 :
7529 : // Check if the {result} is already a Number/Numeric.
7530 1688 : Label if_done(this), if_notdone(this);
7531 2308 : Branch(mode == Object::Conversion::kToNumber ? IsNumber(result)
7532 956 : : IsNumeric(result),
7533 1800 : &if_done, &if_notdone);
7534 :
7535 844 : BIND(&if_done);
7536 : {
7537 : // The ToPrimitive conversion already gave us a Number/Numeric, so we're
7538 : // done.
7539 844 : var_result.Bind(result);
7540 844 : Goto(&end);
7541 : }
7542 :
7543 844 : BIND(&if_notdone);
7544 : {
7545 : // We now have a Primitive {result}, but it's not yet a Number/Numeric.
7546 844 : var_input.Bind(result);
7547 844 : Goto(&loop);
7548 844 : }
7549 : }
7550 :
7551 844 : BIND(&if_inputisother);
7552 : {
7553 : // The {input} is something else (e.g. Symbol), let the runtime figure
7554 : // out the correct exception.
7555 : // Note: We cannot tail call to the runtime here, as js-to-wasm
7556 : // trampolines also use this code currently, and they declare all
7557 : // outgoing parameters as untagged, while we would push a tagged
7558 : // object here.
7559 : auto function_id = mode == Object::Conversion::kToNumber
7560 : ? Runtime::kToNumber
7561 844 : : Runtime::kToNumeric;
7562 844 : var_result.Bind(CallRuntime(function_id, context, input));
7563 844 : Goto(&end);
7564 844 : }
7565 : }
7566 :
7567 844 : BIND(&end);
7568 : if (mode == Object::Conversion::kToNumeric) {
7569 : CSA_ASSERT(this, IsNumeric(var_result.value()));
7570 : } else {
7571 : DCHECK_EQ(mode, Object::Conversion::kToNumber);
7572 : CSA_ASSERT(this, IsNumber(var_result.value()));
7573 : }
7574 1688 : return var_result.value();
7575 : }
7576 :
7577 732 : TNode<Number> CodeStubAssembler::NonNumberToNumber(
7578 : SloppyTNode<Context> context, SloppyTNode<HeapObject> input,
7579 : BigIntHandling bigint_handling) {
7580 732 : return CAST(NonNumberToNumberOrNumeric(
7581 : context, input, Object::Conversion::kToNumber, bigint_handling));
7582 : }
7583 :
7584 112 : TNode<Numeric> CodeStubAssembler::NonNumberToNumeric(
7585 : SloppyTNode<Context> context, SloppyTNode<HeapObject> input) {
7586 : Node* result = NonNumberToNumberOrNumeric(context, input,
7587 112 : Object::Conversion::kToNumeric);
7588 : CSA_SLOW_ASSERT(this, IsNumeric(result));
7589 112 : return UncheckedCast<Numeric>(result);
7590 : }
7591 :
7592 616 : TNode<Number> CodeStubAssembler::ToNumber_Inline(SloppyTNode<Context> context,
7593 : SloppyTNode<Object> input) {
7594 616 : TVARIABLE(Number, var_result);
7595 1232 : Label end(this), not_smi(this, Label::kDeferred);
7596 :
7597 616 : GotoIfNot(TaggedIsSmi(input), ¬_smi);
7598 616 : var_result = CAST(input);
7599 616 : Goto(&end);
7600 :
7601 616 : BIND(¬_smi);
7602 : {
7603 2464 : var_result =
7604 1848 : Select<Number>(IsHeapNumber(CAST(input)), [=] { return CAST(input); },
7605 616 : [=] {
7606 1232 : return CAST(CallBuiltin(Builtins::kNonNumberToNumber,
7607 : context, input));
7608 1848 : });
7609 616 : Goto(&end);
7610 : }
7611 :
7612 616 : BIND(&end);
7613 1232 : return var_result.value();
7614 : }
7615 :
7616 676 : TNode<Number> CodeStubAssembler::ToNumber(SloppyTNode<Context> context,
7617 : SloppyTNode<Object> input,
7618 : BigIntHandling bigint_handling) {
7619 676 : TVARIABLE(Number, var_result);
7620 1352 : Label end(this);
7621 :
7622 1352 : Label not_smi(this, Label::kDeferred);
7623 676 : GotoIfNot(TaggedIsSmi(input), ¬_smi);
7624 676 : TNode<Smi> input_smi = CAST(input);
7625 676 : var_result = input_smi;
7626 676 : Goto(&end);
7627 :
7628 676 : BIND(¬_smi);
7629 : {
7630 676 : Label not_heap_number(this, Label::kDeferred);
7631 676 : TNode<HeapObject> input_ho = CAST(input);
7632 676 : GotoIfNot(IsHeapNumber(input_ho), ¬_heap_number);
7633 :
7634 676 : TNode<HeapNumber> input_hn = CAST(input_ho);
7635 676 : var_result = input_hn;
7636 676 : Goto(&end);
7637 :
7638 676 : BIND(¬_heap_number);
7639 : {
7640 676 : var_result = NonNumberToNumber(context, input_ho, bigint_handling);
7641 676 : Goto(&end);
7642 676 : }
7643 : }
7644 :
7645 676 : BIND(&end);
7646 1352 : return var_result.value();
7647 : }
7648 :
7649 1568 : TNode<BigInt> CodeStubAssembler::ToBigInt(SloppyTNode<Context> context,
7650 : SloppyTNode<Object> input) {
7651 1568 : TVARIABLE(BigInt, var_result);
7652 3136 : Label if_bigint(this), done(this), if_throw(this);
7653 :
7654 1568 : GotoIf(TaggedIsSmi(input), &if_throw);
7655 1568 : GotoIf(IsBigInt(CAST(input)), &if_bigint);
7656 1568 : var_result = CAST(CallRuntime(Runtime::kToBigInt, context, input));
7657 1568 : Goto(&done);
7658 :
7659 1568 : BIND(&if_bigint);
7660 1568 : var_result = CAST(input);
7661 1568 : Goto(&done);
7662 :
7663 1568 : BIND(&if_throw);
7664 1568 : ThrowTypeError(context, MessageTemplate::kBigIntFromObject, input);
7665 :
7666 1568 : BIND(&done);
7667 3136 : return var_result.value();
7668 : }
7669 :
7670 336 : void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
7671 : Variable* var_numeric) {
7672 336 : TaggedToNumeric(context, value, done, var_numeric, nullptr);
7673 336 : }
7674 :
7675 1008 : void CodeStubAssembler::TaggedToNumericWithFeedback(Node* context, Node* value,
7676 : Label* done,
7677 : Variable* var_numeric,
7678 : Variable* var_feedback) {
7679 : DCHECK_NOT_NULL(var_feedback);
7680 1008 : TaggedToNumeric(context, value, done, var_numeric, var_feedback);
7681 1008 : }
7682 :
7683 1344 : void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
7684 : Variable* var_numeric,
7685 : Variable* var_feedback) {
7686 1344 : var_numeric->Bind(value);
7687 2688 : Label if_smi(this), if_heapnumber(this), if_bigint(this), if_oddball(this);
7688 1344 : GotoIf(TaggedIsSmi(value), &if_smi);
7689 1344 : Node* map = LoadMap(value);
7690 1344 : GotoIf(IsHeapNumberMap(map), &if_heapnumber);
7691 1344 : Node* instance_type = LoadMapInstanceType(map);
7692 1344 : GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
7693 :
7694 : // {value} is not a Numeric yet.
7695 1344 : GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
7696 1344 : var_numeric->Bind(CallBuiltin(Builtins::kNonNumberToNumeric, context, value));
7697 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
7698 1344 : Goto(done);
7699 :
7700 1344 : BIND(&if_smi);
7701 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
7702 1344 : Goto(done);
7703 :
7704 1344 : BIND(&if_heapnumber);
7705 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumber);
7706 1344 : Goto(done);
7707 :
7708 1344 : BIND(&if_bigint);
7709 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
7710 1344 : Goto(done);
7711 :
7712 1344 : BIND(&if_oddball);
7713 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumberOrOddball);
7714 1344 : var_numeric->Bind(LoadObjectField(value, Oddball::kToNumberOffset));
7715 2688 : Goto(done);
7716 1344 : }
7717 :
7718 : // ES#sec-touint32
7719 60 : TNode<Number> CodeStubAssembler::ToUint32(SloppyTNode<Context> context,
7720 : SloppyTNode<Object> input) {
7721 60 : Node* const float_zero = Float64Constant(0.0);
7722 60 : Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
7723 :
7724 60 : Label out(this);
7725 :
7726 120 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
7727 :
7728 : // Early exit for positive smis.
7729 : {
7730 : // TODO(jgruber): This branch and the recheck below can be removed once we
7731 : // have a ToNumber with multiple exits.
7732 60 : Label next(this, Label::kDeferred);
7733 60 : Branch(TaggedIsPositiveSmi(input), &out, &next);
7734 60 : BIND(&next);
7735 : }
7736 :
7737 60 : Node* const number = ToNumber(context, input);
7738 60 : var_result.Bind(number);
7739 :
7740 : // Perhaps we have a positive smi now.
7741 : {
7742 60 : Label next(this, Label::kDeferred);
7743 60 : Branch(TaggedIsPositiveSmi(number), &out, &next);
7744 60 : BIND(&next);
7745 : }
7746 :
7747 120 : Label if_isnegativesmi(this), if_isheapnumber(this);
7748 60 : Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
7749 :
7750 60 : BIND(&if_isnegativesmi);
7751 : {
7752 60 : Node* const uint32_value = SmiToInt32(number);
7753 60 : Node* float64_value = ChangeUint32ToFloat64(uint32_value);
7754 60 : var_result.Bind(AllocateHeapNumberWithValue(float64_value));
7755 60 : Goto(&out);
7756 : }
7757 :
7758 60 : BIND(&if_isheapnumber);
7759 : {
7760 60 : Label return_zero(this);
7761 60 : Node* const value = LoadHeapNumberValue(number);
7762 :
7763 : {
7764 : // +-0.
7765 60 : Label next(this);
7766 60 : Branch(Float64Equal(value, float_zero), &return_zero, &next);
7767 60 : BIND(&next);
7768 : }
7769 :
7770 : {
7771 : // NaN.
7772 60 : Label next(this);
7773 60 : Branch(Float64Equal(value, value), &next, &return_zero);
7774 60 : BIND(&next);
7775 : }
7776 :
7777 : {
7778 : // +Infinity.
7779 60 : Label next(this);
7780 : Node* const positive_infinity =
7781 60 : Float64Constant(std::numeric_limits<double>::infinity());
7782 60 : Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
7783 60 : BIND(&next);
7784 : }
7785 :
7786 : {
7787 : // -Infinity.
7788 60 : Label next(this);
7789 : Node* const negative_infinity =
7790 60 : Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
7791 60 : Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
7792 60 : BIND(&next);
7793 : }
7794 :
7795 : // * Let int be the mathematical value that is the same sign as number and
7796 : // whose magnitude is floor(abs(number)).
7797 : // * Let int32bit be int modulo 2^32.
7798 : // * Return int32bit.
7799 : {
7800 60 : Node* x = Float64Trunc(value);
7801 60 : x = Float64Mod(x, float_two_32);
7802 60 : x = Float64Add(x, float_two_32);
7803 60 : x = Float64Mod(x, float_two_32);
7804 :
7805 60 : Node* const result = ChangeFloat64ToTagged(x);
7806 60 : var_result.Bind(result);
7807 60 : Goto(&out);
7808 : }
7809 :
7810 60 : BIND(&return_zero);
7811 : {
7812 60 : var_result.Bind(SmiConstant(0));
7813 60 : Goto(&out);
7814 60 : }
7815 : }
7816 :
7817 60 : BIND(&out);
7818 120 : return CAST(var_result.value());
7819 : }
7820 :
7821 172 : TNode<String> CodeStubAssembler::ToString(SloppyTNode<Context> context,
7822 : SloppyTNode<Object> input) {
7823 172 : Label is_number(this);
7824 344 : Label runtime(this, Label::kDeferred), done(this);
7825 344 : VARIABLE(result, MachineRepresentation::kTagged);
7826 172 : GotoIf(TaggedIsSmi(input), &is_number);
7827 :
7828 172 : TNode<Map> input_map = LoadMap(CAST(input));
7829 172 : TNode<Int32T> input_instance_type = LoadMapInstanceType(input_map);
7830 :
7831 172 : result.Bind(input);
7832 172 : GotoIf(IsStringInstanceType(input_instance_type), &done);
7833 :
7834 344 : Label not_heap_number(this);
7835 172 : Branch(IsHeapNumberMap(input_map), &is_number, ¬_heap_number);
7836 :
7837 172 : BIND(&is_number);
7838 172 : TNode<Number> number_input = CAST(input);
7839 172 : result.Bind(NumberToString(number_input));
7840 172 : Goto(&done);
7841 :
7842 172 : BIND(¬_heap_number);
7843 : {
7844 172 : GotoIfNot(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE), &runtime);
7845 172 : result.Bind(LoadObjectField(CAST(input), Oddball::kToStringOffset));
7846 172 : Goto(&done);
7847 : }
7848 :
7849 172 : BIND(&runtime);
7850 : {
7851 172 : result.Bind(CallRuntime(Runtime::kToString, context, input));
7852 172 : Goto(&done);
7853 : }
7854 :
7855 172 : BIND(&done);
7856 344 : return CAST(result.value());
7857 : }
7858 :
7859 2968 : TNode<String> CodeStubAssembler::ToString_Inline(SloppyTNode<Context> context,
7860 : SloppyTNode<Object> input) {
7861 2968 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
7862 5936 : Label stub_call(this, Label::kDeferred), out(this);
7863 :
7864 2968 : GotoIf(TaggedIsSmi(input), &stub_call);
7865 2968 : Branch(IsString(CAST(input)), &out, &stub_call);
7866 :
7867 2968 : BIND(&stub_call);
7868 2968 : var_result.Bind(CallBuiltin(Builtins::kToString, context, input));
7869 2968 : Goto(&out);
7870 :
7871 2968 : BIND(&out);
7872 5936 : return CAST(var_result.value());
7873 : }
7874 :
7875 112 : Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
7876 224 : Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
7877 224 : VARIABLE(result, MachineRepresentation::kTagged);
7878 224 : Label done(this, &result);
7879 :
7880 112 : BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7881 :
7882 112 : BIND(&if_isreceiver);
7883 : {
7884 : // Convert {input} to a primitive first passing Number hint.
7885 112 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
7886 112 : result.Bind(CallStub(callable, context, input));
7887 112 : Goto(&done);
7888 : }
7889 :
7890 112 : BIND(&if_isnotreceiver);
7891 : {
7892 112 : result.Bind(input);
7893 112 : Goto(&done);
7894 : }
7895 :
7896 112 : BIND(&done);
7897 224 : return result.value();
7898 : }
7899 :
7900 1848 : TNode<JSReceiver> CodeStubAssembler::ToObject(SloppyTNode<Context> context,
7901 : SloppyTNode<Object> input) {
7902 1848 : return CAST(CallBuiltin(Builtins::kToObject, context, input));
7903 : }
7904 :
7905 1624 : TNode<JSReceiver> CodeStubAssembler::ToObject_Inline(TNode<Context> context,
7906 : TNode<Object> input) {
7907 1624 : TVARIABLE(JSReceiver, result);
7908 3248 : Label if_isreceiver(this), if_isnotreceiver(this, Label::kDeferred);
7909 3248 : Label done(this);
7910 :
7911 1624 : BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7912 :
7913 1624 : BIND(&if_isreceiver);
7914 : {
7915 1624 : result = CAST(input);
7916 1624 : Goto(&done);
7917 : }
7918 :
7919 1624 : BIND(&if_isnotreceiver);
7920 : {
7921 1624 : result = ToObject(context, input);
7922 1624 : Goto(&done);
7923 : }
7924 :
7925 1624 : BIND(&done);
7926 3248 : return result.value();
7927 : }
7928 :
7929 616 : TNode<Smi> CodeStubAssembler::ToSmiIndex(TNode<Context> context,
7930 : TNode<Object> input,
7931 : Label* range_error) {
7932 616 : TVARIABLE(Smi, result);
7933 1232 : Label check_undefined(this), return_zero(this), defined(this),
7934 1232 : negative_check(this), done(this);
7935 :
7936 616 : GotoIfNot(TaggedIsSmi(input), &check_undefined);
7937 616 : result = CAST(input);
7938 616 : Goto(&negative_check);
7939 :
7940 616 : BIND(&check_undefined);
7941 616 : Branch(IsUndefined(input), &return_zero, &defined);
7942 :
7943 616 : BIND(&defined);
7944 : TNode<Number> integer_input =
7945 616 : CAST(CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
7946 616 : GotoIfNot(TaggedIsSmi(integer_input), range_error);
7947 616 : result = CAST(integer_input);
7948 616 : Goto(&negative_check);
7949 :
7950 616 : BIND(&negative_check);
7951 616 : Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
7952 :
7953 616 : BIND(&return_zero);
7954 616 : result = SmiConstant(0);
7955 616 : Goto(&done);
7956 :
7957 616 : BIND(&done);
7958 1232 : return result.value();
7959 : }
7960 :
7961 168 : TNode<Smi> CodeStubAssembler::ToSmiLength(TNode<Context> context,
7962 : TNode<Object> input,
7963 : Label* range_error) {
7964 168 : TVARIABLE(Smi, result);
7965 336 : Label to_integer(this), negative_check(this),
7966 336 : heap_number_negative_check(this), return_zero(this), done(this);
7967 :
7968 168 : GotoIfNot(TaggedIsSmi(input), &to_integer);
7969 168 : result = CAST(input);
7970 168 : Goto(&negative_check);
7971 :
7972 168 : BIND(&to_integer);
7973 : {
7974 168 : TNode<Number> integer_input = CAST(
7975 : CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
7976 168 : GotoIfNot(TaggedIsSmi(integer_input), &heap_number_negative_check);
7977 168 : result = CAST(integer_input);
7978 168 : Goto(&negative_check);
7979 :
7980 : // integer_input can still be a negative HeapNumber here.
7981 168 : BIND(&heap_number_negative_check);
7982 168 : TNode<HeapNumber> heap_number_input = CAST(integer_input);
7983 : Branch(IsTrue(CallBuiltin(Builtins::kLessThan, context, heap_number_input,
7984 336 : SmiConstant(0))),
7985 168 : &return_zero, range_error);
7986 : }
7987 :
7988 168 : BIND(&negative_check);
7989 168 : Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
7990 :
7991 168 : BIND(&return_zero);
7992 168 : result = SmiConstant(0);
7993 168 : Goto(&done);
7994 :
7995 168 : BIND(&done);
7996 336 : return result.value();
7997 : }
7998 :
7999 1680 : TNode<Number> CodeStubAssembler::ToLength_Inline(SloppyTNode<Context> context,
8000 : SloppyTNode<Object> input) {
8001 1680 : TNode<Smi> smi_zero = SmiConstant(0);
8002 : return Select<Number>(
8003 1680 : TaggedIsSmi(input), [=] { return SmiMax(CAST(input), smi_zero); },
8004 3360 : [=] { return CAST(CallBuiltin(Builtins::kToLength, context, input)); });
8005 : }
8006 :
8007 3192 : TNode<Number> CodeStubAssembler::ToInteger_Inline(
8008 : SloppyTNode<Context> context, SloppyTNode<Object> input,
8009 : ToIntegerTruncationMode mode) {
8010 : Builtins::Name builtin = (mode == kNoTruncation)
8011 : ? Builtins::kToInteger
8012 3192 : : Builtins::kToInteger_TruncateMinusZero;
8013 : return Select<Number>(
8014 3192 : TaggedIsSmi(input), [=] { return CAST(input); },
8015 6384 : [=] { return CAST(CallBuiltin(builtin, context, input)); });
8016 : }
8017 :
8018 112 : TNode<Number> CodeStubAssembler::ToInteger(SloppyTNode<Context> context,
8019 : SloppyTNode<Object> input,
8020 : ToIntegerTruncationMode mode) {
8021 : // We might need to loop once for ToNumber conversion.
8022 112 : TVARIABLE(Object, var_arg, input);
8023 224 : Label loop(this, &var_arg), out(this);
8024 112 : Goto(&loop);
8025 112 : BIND(&loop);
8026 : {
8027 : // Shared entry points.
8028 112 : Label return_zero(this, Label::kDeferred);
8029 :
8030 : // Load the current {arg} value.
8031 112 : TNode<Object> arg = var_arg.value();
8032 :
8033 : // Check if {arg} is a Smi.
8034 112 : GotoIf(TaggedIsSmi(arg), &out);
8035 :
8036 : // Check if {arg} is a HeapNumber.
8037 224 : Label if_argisheapnumber(this),
8038 224 : if_argisnotheapnumber(this, Label::kDeferred);
8039 224 : Branch(IsHeapNumber(CAST(arg)), &if_argisheapnumber,
8040 112 : &if_argisnotheapnumber);
8041 :
8042 112 : BIND(&if_argisheapnumber);
8043 : {
8044 112 : TNode<HeapNumber> arg_hn = CAST(arg);
8045 : // Load the floating-point value of {arg}.
8046 112 : Node* arg_value = LoadHeapNumberValue(arg_hn);
8047 :
8048 : // Check if {arg} is NaN.
8049 112 : GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
8050 :
8051 : // Truncate {arg} towards zero.
8052 112 : TNode<Float64T> value = Float64Trunc(arg_value);
8053 :
8054 112 : if (mode == kTruncateMinusZero) {
8055 : // Truncate -0.0 to 0.
8056 56 : GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
8057 : }
8058 :
8059 112 : var_arg = ChangeFloat64ToTagged(value);
8060 112 : Goto(&out);
8061 : }
8062 :
8063 112 : BIND(&if_argisnotheapnumber);
8064 : {
8065 : // Need to convert {arg} to a Number first.
8066 224 : var_arg = UncheckedCast<Object>(
8067 112 : CallBuiltin(Builtins::kNonNumberToNumber, context, arg));
8068 112 : Goto(&loop);
8069 : }
8070 :
8071 112 : BIND(&return_zero);
8072 112 : var_arg = SmiConstant(0);
8073 224 : Goto(&out);
8074 : }
8075 :
8076 112 : BIND(&out);
8077 : if (mode == kTruncateMinusZero) {
8078 : CSA_ASSERT(this, IsNumberNormalized(CAST(var_arg.value())));
8079 : }
8080 224 : return CAST(var_arg.value());
8081 : }
8082 :
8083 35548 : TNode<Uint32T> CodeStubAssembler::DecodeWord32(SloppyTNode<Word32T> word32,
8084 : uint32_t shift, uint32_t mask) {
8085 : return UncheckedCast<Uint32T>(Word32Shr(
8086 35548 : Word32And(word32, Int32Constant(mask)), static_cast<int>(shift)));
8087 : }
8088 :
8089 21128 : TNode<UintPtrT> CodeStubAssembler::DecodeWord(SloppyTNode<WordT> word,
8090 : uint32_t shift, uint32_t mask) {
8091 : return Unsigned(
8092 21128 : WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift)));
8093 : }
8094 :
8095 392 : TNode<WordT> CodeStubAssembler::UpdateWord(TNode<WordT> word,
8096 : TNode<WordT> value, uint32_t shift,
8097 : uint32_t mask) {
8098 392 : TNode<WordT> encoded_value = WordShl(value, static_cast<int>(shift));
8099 392 : TNode<IntPtrT> inverted_mask = IntPtrConstant(~static_cast<intptr_t>(mask));
8100 : // Ensure the {value} fits fully in the mask.
8101 : CSA_ASSERT(this, WordEqual(WordAnd(encoded_value, inverted_mask),
8102 : IntPtrConstant(0)));
8103 392 : return WordOr(WordAnd(word, inverted_mask), encoded_value);
8104 : }
8105 :
8106 0 : void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
8107 0 : if (FLAG_native_code_counters && counter->Enabled()) {
8108 : Node* counter_address =
8109 0 : ExternalConstant(ExternalReference::Create(counter));
8110 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
8111 0 : Int32Constant(value));
8112 : }
8113 0 : }
8114 :
8115 3368 : void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
8116 : DCHECK_GT(delta, 0);
8117 3368 : if (FLAG_native_code_counters && counter->Enabled()) {
8118 : Node* counter_address =
8119 0 : ExternalConstant(ExternalReference::Create(counter));
8120 0 : Node* value = Load(MachineType::Int32(), counter_address);
8121 0 : value = Int32Add(value, Int32Constant(delta));
8122 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8123 : }
8124 3368 : }
8125 :
8126 0 : void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
8127 : DCHECK_GT(delta, 0);
8128 0 : if (FLAG_native_code_counters && counter->Enabled()) {
8129 : Node* counter_address =
8130 0 : ExternalConstant(ExternalReference::Create(counter));
8131 0 : Node* value = Load(MachineType::Int32(), counter_address);
8132 0 : value = Int32Sub(value, Int32Constant(delta));
8133 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8134 : }
8135 0 : }
8136 :
8137 48716 : void CodeStubAssembler::Increment(Variable* variable, int value,
8138 : ParameterMode mode) {
8139 : DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
8140 : variable->rep() == MachineType::PointerRepresentation());
8141 : DCHECK_IMPLIES(mode == SMI_PARAMETERS,
8142 : variable->rep() == MachineRepresentation::kTagged ||
8143 : variable->rep() == MachineRepresentation::kTaggedSigned);
8144 : variable->Bind(IntPtrOrSmiAdd(variable->value(),
8145 48716 : IntPtrOrSmiConstant(value, mode), mode));
8146 48716 : }
8147 :
8148 56 : void CodeStubAssembler::Use(Label* label) {
8149 56 : GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
8150 56 : }
8151 :
8152 1516 : void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
8153 : Variable* var_index, Label* if_keyisunique,
8154 : Variable* var_unique, Label* if_bailout,
8155 : Label* if_notinternalized) {
8156 : DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
8157 : DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
8158 1516 : Comment("TryToName");
8159 :
8160 3032 : Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this),
8161 3032 : if_keyisother(this, Label::kDeferred);
8162 : // Handle Smi and HeapNumber keys.
8163 1516 : var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
8164 1516 : Goto(if_keyisindex);
8165 :
8166 1516 : BIND(&if_keyisnotindex);
8167 1516 : Node* key_map = LoadMap(key);
8168 1516 : var_unique->Bind(key);
8169 : // Symbols are unique.
8170 1516 : GotoIf(IsSymbolMap(key_map), if_keyisunique);
8171 1516 : Node* key_instance_type = LoadMapInstanceType(key_map);
8172 : // Miss if |key| is not a String.
8173 : STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
8174 1516 : GotoIfNot(IsStringInstanceType(key_instance_type), &if_keyisother);
8175 :
8176 : // |key| is a String. Check if it has a cached array index.
8177 1516 : Node* hash = LoadNameHashField(key);
8178 3032 : GotoIf(IsClearWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
8179 1516 : &if_hascachedindex);
8180 : // No cached array index. If the string knows that it contains an index,
8181 : // then it must be an uncacheable index. Handle this case in the runtime.
8182 1516 : GotoIf(IsClearWord32(hash, Name::kIsNotArrayIndexMask), if_bailout);
8183 : // Check if we have a ThinString.
8184 3032 : GotoIf(InstanceTypeEqual(key_instance_type, THIN_STRING_TYPE),
8185 1516 : &if_thinstring);
8186 3032 : GotoIf(InstanceTypeEqual(key_instance_type, THIN_ONE_BYTE_STRING_TYPE),
8187 1516 : &if_thinstring);
8188 : // Finally, check if |key| is internalized.
8189 : STATIC_ASSERT(kNotInternalizedTag != 0);
8190 3032 : GotoIf(IsSetWord32(key_instance_type, kIsNotInternalizedMask),
8191 3032 : if_notinternalized != nullptr ? if_notinternalized : if_bailout);
8192 1516 : Goto(if_keyisunique);
8193 :
8194 1516 : BIND(&if_thinstring);
8195 1516 : var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
8196 1516 : Goto(if_keyisunique);
8197 :
8198 1516 : BIND(&if_hascachedindex);
8199 1516 : var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
8200 1516 : Goto(if_keyisindex);
8201 :
8202 1516 : BIND(&if_keyisother);
8203 1516 : GotoIfNot(InstanceTypeEqual(key_instance_type, ODDBALL_TYPE), if_bailout);
8204 1516 : var_unique->Bind(LoadObjectField(key, Oddball::kToStringOffset));
8205 3032 : Goto(if_keyisunique);
8206 1516 : }
8207 :
8208 392 : void CodeStubAssembler::TryInternalizeString(
8209 : Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
8210 : Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
8211 : DCHECK(var_index->rep() == MachineType::PointerRepresentation());
8212 : DCHECK_EQ(var_internalized->rep(), MachineRepresentation::kTagged);
8213 : CSA_SLOW_ASSERT(this, IsString(string));
8214 : Node* function =
8215 392 : ExternalConstant(ExternalReference::try_internalize_string_function());
8216 : Node* const isolate_ptr =
8217 392 : ExternalConstant(ExternalReference::isolate_address(isolate()));
8218 : Node* result =
8219 : CallCFunction2(MachineType::AnyTagged(), MachineType::Pointer(),
8220 392 : MachineType::AnyTagged(), function, isolate_ptr, string);
8221 392 : Label internalized(this);
8222 392 : GotoIf(TaggedIsNotSmi(result), &internalized);
8223 392 : Node* word_result = SmiUntag(result);
8224 784 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
8225 392 : if_not_internalized);
8226 784 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
8227 392 : if_bailout);
8228 392 : var_index->Bind(word_result);
8229 392 : Goto(if_index);
8230 :
8231 392 : BIND(&internalized);
8232 392 : var_internalized->Bind(result);
8233 392 : Goto(if_internalized);
8234 392 : }
8235 :
8236 : template <typename Dictionary>
8237 32284 : TNode<IntPtrT> CodeStubAssembler::EntryToIndex(TNode<IntPtrT> entry,
8238 : int field_index) {
8239 : TNode<IntPtrT> entry_index =
8240 32284 : IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
8241 : return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
8242 32284 : field_index));
8243 : }
8244 :
8245 8180 : TNode<MaybeObject> CodeStubAssembler::LoadDescriptorArrayElement(
8246 : TNode<DescriptorArray> object, Node* index, int additional_offset) {
8247 : return LoadArrayElement(object, DescriptorArray::kHeaderSize, index,
8248 8180 : additional_offset);
8249 : }
8250 :
8251 392 : TNode<Name> CodeStubAssembler::LoadKeyByKeyIndex(
8252 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8253 392 : return CAST(LoadDescriptorArrayElement(container, key_index, 0));
8254 : }
8255 :
8256 2132 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
8257 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8258 : const int kKeyToDetails =
8259 2132 : DescriptorArray::ToDetailsIndex(0) - DescriptorArray::ToKeyIndex(0);
8260 : return Unsigned(
8261 : LoadAndUntagToWord32ArrayElement(container, DescriptorArray::kHeaderSize,
8262 2132 : key_index, kKeyToDetails * kTaggedSize));
8263 : }
8264 :
8265 2020 : TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
8266 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8267 : const int kKeyToValue =
8268 2020 : DescriptorArray::ToValueIndex(0) - DescriptorArray::ToKeyIndex(0);
8269 2020 : return CAST(LoadDescriptorArrayElement(container, key_index,
8270 : kKeyToValue * kTaggedSize));
8271 : }
8272 :
8273 728 : TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByKeyIndex(
8274 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8275 : const int kKeyToValue =
8276 728 : DescriptorArray::ToValueIndex(0) - DescriptorArray::ToKeyIndex(0);
8277 : return LoadDescriptorArrayElement(container, key_index,
8278 728 : kKeyToValue * kTaggedSize);
8279 : }
8280 :
8281 4928 : TNode<IntPtrT> CodeStubAssembler::DescriptorEntryToIndex(
8282 : TNode<IntPtrT> descriptor_entry) {
8283 : return IntPtrMul(descriptor_entry,
8284 4928 : IntPtrConstant(DescriptorArray::kEntrySize));
8285 : }
8286 :
8287 112 : TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8288 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8289 112 : return CAST(LoadDescriptorArrayElement(
8290 : container, DescriptorEntryToIndex(descriptor_entry),
8291 : DescriptorArray::ToKeyIndex(0) * kTaggedSize));
8292 : }
8293 :
8294 112 : TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8295 : TNode<DescriptorArray> container, int descriptor_entry) {
8296 112 : return CAST(LoadDescriptorArrayElement(
8297 : container, IntPtrConstant(0),
8298 : DescriptorArray::ToKeyIndex(descriptor_entry) * kTaggedSize));
8299 : }
8300 :
8301 112 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8302 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8303 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8304 : container, DescriptorArray::kHeaderSize,
8305 : DescriptorEntryToIndex(descriptor_entry),
8306 112 : DescriptorArray::ToDetailsIndex(0) * kTaggedSize));
8307 : }
8308 :
8309 672 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8310 : TNode<DescriptorArray> container, int descriptor_entry) {
8311 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8312 672 : container, DescriptorArray::kHeaderSize, IntPtrConstant(0),
8313 1344 : DescriptorArray::ToDetailsIndex(descriptor_entry) * kTaggedSize));
8314 : }
8315 :
8316 112 : TNode<Object> CodeStubAssembler::LoadValueByDescriptorEntry(
8317 : TNode<DescriptorArray> container, int descriptor_entry) {
8318 112 : return CAST(LoadDescriptorArrayElement(
8319 : container, IntPtrConstant(0),
8320 : DescriptorArray::ToValueIndex(descriptor_entry) * kTaggedSize));
8321 : }
8322 :
8323 4704 : TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByDescriptorEntry(
8324 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8325 : return LoadDescriptorArrayElement(
8326 : container, DescriptorEntryToIndex(descriptor_entry),
8327 4704 : DescriptorArray::ToValueIndex(0) * kTaggedSize);
8328 : }
8329 :
8330 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NameDictionary>(
8331 : TNode<IntPtrT>, int);
8332 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<GlobalDictionary>(
8333 : TNode<IntPtrT>, int);
8334 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NumberDictionary>(
8335 : TNode<IntPtrT>, int);
8336 :
8337 : // This must be kept in sync with HashTableBase::ComputeCapacity().
8338 956 : TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
8339 : TNode<IntPtrT> at_least_space_for) {
8340 : TNode<IntPtrT> capacity = IntPtrRoundUpToPowerOfTwo32(
8341 956 : IntPtrAdd(at_least_space_for, WordShr(at_least_space_for, 1)));
8342 956 : return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
8343 : }
8344 :
8345 1573 : TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
8346 : SloppyTNode<IntPtrT> right) {
8347 : intptr_t left_constant;
8348 : intptr_t right_constant;
8349 2190 : if (ToIntPtrConstant(left, left_constant) &&
8350 617 : ToIntPtrConstant(right, right_constant)) {
8351 617 : return IntPtrConstant(std::max(left_constant, right_constant));
8352 : }
8353 : return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
8354 956 : right);
8355 : }
8356 :
8357 1009 : TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
8358 : SloppyTNode<IntPtrT> right) {
8359 : intptr_t left_constant;
8360 : intptr_t right_constant;
8361 1010 : if (ToIntPtrConstant(left, left_constant) &&
8362 1 : ToIntPtrConstant(right, right_constant)) {
8363 1 : return IntPtrConstant(std::min(left_constant, right_constant));
8364 : }
8365 : return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
8366 1008 : right);
8367 : }
8368 :
8369 : template <>
8370 21060 : TNode<HeapObject> CodeStubAssembler::LoadName<NameDictionary>(
8371 : TNode<HeapObject> key) {
8372 : CSA_ASSERT(this, Word32Or(IsTheHole(key), IsName(key)));
8373 21060 : return key;
8374 : }
8375 :
8376 : template <>
8377 7620 : TNode<HeapObject> CodeStubAssembler::LoadName<GlobalDictionary>(
8378 : TNode<HeapObject> key) {
8379 7620 : TNode<PropertyCell> property_cell = CAST(key);
8380 7620 : return CAST(LoadObjectField(property_cell, PropertyCell::kNameOffset));
8381 : }
8382 :
8383 : template <typename Dictionary>
8384 6744 : void CodeStubAssembler::NameDictionaryLookup(
8385 : TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
8386 : TVariable<IntPtrT>* var_name_index, Label* if_not_found, int inlined_probes,
8387 : LookupMode mode) {
8388 : static_assert(std::is_same<Dictionary, NameDictionary>::value ||
8389 : std::is_same<Dictionary, GlobalDictionary>::value,
8390 : "Unexpected NameDictionary");
8391 : DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
8392 : DCHECK_IMPLIES(mode == kFindInsertionIndex,
8393 : inlined_probes == 0 && if_found == nullptr);
8394 6744 : Comment("NameDictionaryLookup");
8395 :
8396 6744 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
8397 6744 : TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8398 6744 : TNode<WordT> hash = ChangeUint32ToWord(LoadNameHash(unique_name));
8399 :
8400 : // See Dictionary::FirstProbe().
8401 6744 : TNode<IntPtrT> count = IntPtrConstant(0);
8402 6744 : TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
8403 6744 : Node* undefined = UndefinedConstant();
8404 :
8405 29688 : for (int i = 0; i < inlined_probes; i++) {
8406 22944 : TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
8407 22944 : *var_name_index = index;
8408 :
8409 22944 : TNode<HeapObject> current = CAST(LoadFixedArrayElement(dictionary, index));
8410 22944 : GotoIf(WordEqual(current, undefined), if_not_found);
8411 22944 : current = LoadName<Dictionary>(current);
8412 22944 : GotoIf(WordEqual(current, unique_name), if_found);
8413 :
8414 : // See Dictionary::NextProbe().
8415 22944 : count = IntPtrConstant(i + 1);
8416 22944 : entry = Signed(WordAnd(IntPtrAdd(entry, count), mask));
8417 : }
8418 6744 : if (mode == kFindInsertionIndex) {
8419 : // Appease the variable merging algorithm for "Goto(&loop)" below.
8420 1008 : *var_name_index = IntPtrConstant(0);
8421 : }
8422 :
8423 6744 : TVARIABLE(IntPtrT, var_count, count);
8424 13488 : TVARIABLE(IntPtrT, var_entry, entry);
8425 6744 : Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
8426 13488 : Label loop(this, 3, loop_vars);
8427 6744 : Goto(&loop);
8428 6744 : BIND(&loop);
8429 : {
8430 6744 : TNode<IntPtrT> entry = var_entry.value();
8431 :
8432 6744 : TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
8433 6744 : *var_name_index = index;
8434 :
8435 6744 : TNode<HeapObject> current = CAST(LoadFixedArrayElement(dictionary, index));
8436 6744 : GotoIf(WordEqual(current, undefined), if_not_found);
8437 6744 : if (mode == kFindExisting) {
8438 5736 : current = LoadName<Dictionary>(current);
8439 5736 : GotoIf(WordEqual(current, unique_name), if_found);
8440 : } else {
8441 : DCHECK_EQ(kFindInsertionIndex, mode);
8442 1008 : GotoIf(WordEqual(current, TheHoleConstant()), if_not_found);
8443 : }
8444 :
8445 : // See Dictionary::NextProbe().
8446 6744 : Increment(&var_count);
8447 6744 : entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8448 :
8449 6744 : var_entry = entry;
8450 6744 : Goto(&loop);
8451 6744 : }
8452 6744 : }
8453 :
8454 : // Instantiate template methods to workaround GCC compilation issue.
8455 : template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
8456 : TNode<NameDictionary>, TNode<Name>, Label*, TVariable<IntPtrT>*, Label*,
8457 : int, LookupMode);
8458 : template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
8459 : TNode<GlobalDictionary>, TNode<Name>, Label*, TVariable<IntPtrT>*, Label*,
8460 : int, LookupMode);
8461 :
8462 336 : Node* CodeStubAssembler::ComputeUnseededHash(Node* key) {
8463 : // See v8::internal::ComputeUnseededHash()
8464 336 : Node* hash = TruncateIntPtrToInt32(key);
8465 1344 : hash = Int32Add(Word32Xor(hash, Int32Constant(0xFFFFFFFF)),
8466 1680 : Word32Shl(hash, Int32Constant(15)));
8467 336 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
8468 336 : hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
8469 336 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
8470 336 : hash = Int32Mul(hash, Int32Constant(2057));
8471 336 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
8472 336 : return Word32And(hash, Int32Constant(0x3FFFFFFF));
8473 : }
8474 :
8475 1188 : Node* CodeStubAssembler::ComputeSeededHash(Node* key) {
8476 : Node* const function_addr =
8477 1188 : ExternalConstant(ExternalReference::compute_integer_hash());
8478 : Node* const isolate_ptr =
8479 1188 : ExternalConstant(ExternalReference::isolate_address(isolate()));
8480 :
8481 1188 : MachineType type_ptr = MachineType::Pointer();
8482 1188 : MachineType type_uint32 = MachineType::Uint32();
8483 :
8484 : Node* const result =
8485 : CallCFunction2(type_uint32, type_ptr, type_uint32, function_addr,
8486 1188 : isolate_ptr, TruncateIntPtrToInt32(key));
8487 1188 : return result;
8488 : }
8489 :
8490 1184 : void CodeStubAssembler::NumberDictionaryLookup(
8491 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8492 : Label* if_found, TVariable<IntPtrT>* var_entry, Label* if_not_found) {
8493 : CSA_ASSERT(this, IsNumberDictionary(dictionary));
8494 : DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
8495 1184 : Comment("NumberDictionaryLookup");
8496 :
8497 1184 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NumberDictionary>(dictionary));
8498 1184 : TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8499 :
8500 1184 : TNode<WordT> hash = ChangeUint32ToWord(ComputeSeededHash(intptr_index));
8501 1184 : Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
8502 :
8503 : // See Dictionary::FirstProbe().
8504 1184 : TNode<IntPtrT> count = IntPtrConstant(0);
8505 1184 : TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
8506 :
8507 1184 : Node* undefined = UndefinedConstant();
8508 1184 : Node* the_hole = TheHoleConstant();
8509 :
8510 1184 : TVARIABLE(IntPtrT, var_count, count);
8511 1184 : Variable* loop_vars[] = {&var_count, var_entry};
8512 2368 : Label loop(this, 2, loop_vars);
8513 1184 : *var_entry = entry;
8514 1184 : Goto(&loop);
8515 1184 : BIND(&loop);
8516 : {
8517 1184 : TNode<IntPtrT> entry = var_entry->value();
8518 :
8519 1184 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(entry);
8520 1184 : Node* current = LoadFixedArrayElement(dictionary, index);
8521 1184 : GotoIf(WordEqual(current, undefined), if_not_found);
8522 1184 : Label next_probe(this);
8523 : {
8524 2368 : Label if_currentissmi(this), if_currentisnotsmi(this);
8525 1184 : Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
8526 1184 : BIND(&if_currentissmi);
8527 : {
8528 1184 : Node* current_value = SmiUntag(current);
8529 1184 : Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
8530 : }
8531 1184 : BIND(&if_currentisnotsmi);
8532 : {
8533 1184 : GotoIf(WordEqual(current, the_hole), &next_probe);
8534 : // Current must be the Number.
8535 1184 : Node* current_value = LoadHeapNumberValue(current);
8536 2368 : Branch(Float64Equal(current_value, key_as_float64), if_found,
8537 1184 : &next_probe);
8538 1184 : }
8539 : }
8540 :
8541 1184 : BIND(&next_probe);
8542 : // See Dictionary::NextProbe().
8543 1184 : Increment(&var_count);
8544 1184 : entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8545 :
8546 1184 : *var_entry = entry;
8547 1184 : Goto(&loop);
8548 1184 : }
8549 1184 : }
8550 :
8551 280 : TNode<Object> CodeStubAssembler::BasicLoadNumberDictionaryElement(
8552 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8553 : Label* not_data, Label* if_hole) {
8554 280 : TVARIABLE(IntPtrT, var_entry);
8555 560 : Label if_found(this);
8556 : NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8557 280 : if_hole);
8558 280 : BIND(&if_found);
8559 :
8560 : // Check that the value is a data property.
8561 280 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8562 : TNode<Uint32T> details =
8563 280 : LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
8564 280 : TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8565 : // TODO(jkummerow): Support accessors without missing?
8566 280 : GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
8567 : // Finally, load the value.
8568 560 : return LoadValueByKeyIndex<NumberDictionary>(dictionary, index);
8569 : }
8570 :
8571 56 : void CodeStubAssembler::BasicStoreNumberDictionaryElement(
8572 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8573 : TNode<Object> value, Label* not_data, Label* if_hole, Label* read_only) {
8574 56 : TVARIABLE(IntPtrT, var_entry);
8575 112 : Label if_found(this);
8576 : NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8577 56 : if_hole);
8578 56 : BIND(&if_found);
8579 :
8580 : // Check that the value is a data property.
8581 56 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8582 : TNode<Uint32T> details =
8583 56 : LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
8584 56 : TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8585 : // TODO(jkummerow): Support accessors without missing?
8586 56 : GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
8587 :
8588 : // Check that the property is writeable.
8589 112 : GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
8590 56 : read_only);
8591 :
8592 : // Finally, store the value.
8593 112 : StoreValueByKeyIndex<NumberDictionary>(dictionary, index, value);
8594 56 : }
8595 :
8596 : template <class Dictionary>
8597 : void CodeStubAssembler::FindInsertionEntry(TNode<Dictionary> dictionary,
8598 : TNode<Name> key,
8599 : TVariable<IntPtrT>* var_key_index) {
8600 : UNREACHABLE();
8601 : }
8602 :
8603 : template <>
8604 1008 : void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
8605 : TNode<NameDictionary> dictionary, TNode<Name> key,
8606 : TVariable<IntPtrT>* var_key_index) {
8607 1008 : Label done(this);
8608 : NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
8609 1008 : &done, 0, kFindInsertionIndex);
8610 1008 : BIND(&done);
8611 1008 : }
8612 :
8613 : template <class Dictionary>
8614 : void CodeStubAssembler::InsertEntry(TNode<Dictionary> dictionary,
8615 : TNode<Name> key, TNode<Object> value,
8616 : TNode<IntPtrT> index,
8617 : TNode<Smi> enum_index) {
8618 : UNREACHABLE(); // Use specializations instead.
8619 : }
8620 :
8621 : template <>
8622 1008 : void CodeStubAssembler::InsertEntry<NameDictionary>(
8623 : TNode<NameDictionary> dictionary, TNode<Name> name, TNode<Object> value,
8624 : TNode<IntPtrT> index, TNode<Smi> enum_index) {
8625 : // Store name and value.
8626 1008 : StoreFixedArrayElement(dictionary, index, name);
8627 1008 : StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
8628 :
8629 : // Prepare details of the new property.
8630 1008 : PropertyDetails d(kData, NONE, PropertyCellType::kNoCell);
8631 1008 : enum_index =
8632 1008 : SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
8633 : // We OR over the actual index below, so we expect the initial value to be 0.
8634 : DCHECK_EQ(0, d.dictionary_index());
8635 1008 : TVARIABLE(Smi, var_details, SmiOr(SmiConstant(d.AsSmi()), enum_index));
8636 :
8637 : // Private names must be marked non-enumerable.
8638 2016 : Label not_private(this, &var_details);
8639 1008 : GotoIfNot(IsPrivateSymbol(name), ¬_private);
8640 : TNode<Smi> dont_enum =
8641 1008 : SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
8642 1008 : var_details = SmiOr(var_details.value(), dont_enum);
8643 1008 : Goto(¬_private);
8644 1008 : BIND(¬_private);
8645 :
8646 : // Finally, store the details.
8647 : StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
8648 2016 : var_details.value());
8649 1008 : }
8650 :
8651 : template <>
8652 0 : void CodeStubAssembler::InsertEntry<GlobalDictionary>(
8653 : TNode<GlobalDictionary> dictionary, TNode<Name> key, TNode<Object> value,
8654 : TNode<IntPtrT> index, TNode<Smi> enum_index) {
8655 0 : UNIMPLEMENTED();
8656 : }
8657 :
8658 : template <class Dictionary>
8659 1008 : void CodeStubAssembler::Add(TNode<Dictionary> dictionary, TNode<Name> key,
8660 : TNode<Object> value, Label* bailout) {
8661 : CSA_ASSERT(this, Word32BinaryNot(IsEmptyPropertyDictionary(dictionary)));
8662 1008 : TNode<Smi> capacity = GetCapacity<Dictionary>(dictionary);
8663 1008 : TNode<Smi> nof = GetNumberOfElements<Dictionary>(dictionary);
8664 1008 : TNode<Smi> new_nof = SmiAdd(nof, SmiConstant(1));
8665 : // Require 33% to still be free after adding additional_elements.
8666 : // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
8667 : // But that's OK here because it's only used for a comparison.
8668 1008 : TNode<Smi> required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
8669 1008 : GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
8670 : // Require rehashing if more than 50% of free elements are deleted elements.
8671 1008 : TNode<Smi> deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
8672 : CSA_ASSERT(this, SmiAbove(capacity, new_nof));
8673 1008 : TNode<Smi> half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
8674 1008 : GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
8675 :
8676 1008 : TNode<Smi> enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
8677 1008 : TNode<Smi> new_enum_index = SmiAdd(enum_index, SmiConstant(1));
8678 : TNode<Smi> max_enum_index =
8679 1008 : SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
8680 1008 : GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
8681 :
8682 : // No more bailouts after this point.
8683 : // Operations from here on can have side effects.
8684 :
8685 1008 : SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
8686 1008 : SetNumberOfElements<Dictionary>(dictionary, new_nof);
8687 :
8688 1008 : TVARIABLE(IntPtrT, var_key_index);
8689 1008 : FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
8690 1008 : InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
8691 2016 : enum_index);
8692 1008 : }
8693 :
8694 : template void CodeStubAssembler::Add<NameDictionary>(TNode<NameDictionary>,
8695 : TNode<Name>, TNode<Object>,
8696 : Label*);
8697 :
8698 : template <typename Array>
8699 2364 : void CodeStubAssembler::LookupLinear(TNode<Name> unique_name,
8700 : TNode<Array> array,
8701 : TNode<Uint32T> number_of_valid_entries,
8702 : Label* if_found,
8703 : TVariable<IntPtrT>* var_name_index,
8704 : Label* if_not_found) {
8705 : static_assert(std::is_base_of<FixedArray, Array>::value ||
8706 : std::is_base_of<WeakFixedArray, Array>::value ||
8707 : std::is_base_of<DescriptorArray, Array>::value,
8708 : "T must be a descendant of FixedArray or a WeakFixedArray");
8709 2364 : Comment("LookupLinear");
8710 2364 : TNode<IntPtrT> first_inclusive = IntPtrConstant(Array::ToKeyIndex(0));
8711 2364 : TNode<IntPtrT> factor = IntPtrConstant(Array::kEntrySize);
8712 : TNode<IntPtrT> last_exclusive = IntPtrAdd(
8713 : first_inclusive,
8714 2364 : IntPtrMul(ChangeInt32ToIntPtr(number_of_valid_entries), factor));
8715 :
8716 2364 : BuildFastLoop(last_exclusive, first_inclusive,
8717 2364 : [=](SloppyTNode<IntPtrT> name_index) {
8718 : TNode<MaybeObject> element =
8719 2364 : LoadArrayElement(array, Array::kHeaderSize, name_index);
8720 2364 : TNode<Name> candidate_name = CAST(element);
8721 2364 : *var_name_index = name_index;
8722 2364 : GotoIf(WordEqual(candidate_name, unique_name), if_found);
8723 2364 : },
8724 : -Array::kEntrySize, INTPTR_PARAMETERS, IndexAdvanceMode::kPre);
8725 2364 : Goto(if_not_found);
8726 2364 : }
8727 :
8728 : template <>
8729 2080 : TNode<Uint32T> CodeStubAssembler::NumberOfEntries<DescriptorArray>(
8730 : TNode<DescriptorArray> descriptors) {
8731 2080 : return Unsigned(LoadNumberOfDescriptors(descriptors));
8732 : }
8733 :
8734 : template <>
8735 568 : TNode<Uint32T> CodeStubAssembler::NumberOfEntries<TransitionArray>(
8736 : TNode<TransitionArray> transitions) {
8737 568 : TNode<IntPtrT> length = LoadAndUntagWeakFixedArrayLength(transitions);
8738 : return Select<Uint32T>(
8739 1136 : UintPtrLessThan(length, IntPtrConstant(TransitionArray::kFirstIndex)),
8740 568 : [=] { return Unsigned(Int32Constant(0)); },
8741 568 : [=] {
8742 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8743 : transitions, WeakFixedArray::kHeaderSize,
8744 1136 : IntPtrConstant(TransitionArray::kTransitionLengthIndex)));
8745 2840 : });
8746 : }
8747 :
8748 : template <typename Array>
8749 13156 : TNode<IntPtrT> CodeStubAssembler::EntryIndexToIndex(
8750 : TNode<Uint32T> entry_index) {
8751 13156 : TNode<Int32T> entry_size = Int32Constant(Array::kEntrySize);
8752 13156 : TNode<Word32T> index = Int32Mul(entry_index, entry_size);
8753 13156 : return ChangeInt32ToIntPtr(index);
8754 : }
8755 :
8756 : template <typename Array>
8757 2700 : TNode<IntPtrT> CodeStubAssembler::ToKeyIndex(TNode<Uint32T> entry_index) {
8758 2700 : return IntPtrAdd(IntPtrConstant(Array::ToKeyIndex(0)),
8759 5400 : EntryIndexToIndex<Array>(entry_index));
8760 : }
8761 :
8762 : template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<DescriptorArray>(
8763 : TNode<Uint32T>);
8764 : template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<TransitionArray>(
8765 : TNode<Uint32T>);
8766 :
8767 : template <>
8768 4160 : TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<DescriptorArray>(
8769 : TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8770 : TNode<Uint32T> details =
8771 4160 : DescriptorArrayGetDetails(descriptors, descriptor_number);
8772 4160 : return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
8773 : }
8774 :
8775 : template <>
8776 568 : TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<TransitionArray>(
8777 : TNode<TransitionArray> transitions, TNode<Uint32T> transition_number) {
8778 568 : return transition_number;
8779 : }
8780 :
8781 : template <typename Array>
8782 4728 : TNode<Name> CodeStubAssembler::GetKey(TNode<Array> array,
8783 : TNode<Uint32T> entry_index) {
8784 : static_assert(std::is_base_of<TransitionArray, Array>::value ||
8785 : std::is_base_of<DescriptorArray, Array>::value,
8786 : "T must be a descendant of DescriptorArray or TransitionArray");
8787 4728 : const int key_offset = Array::ToKeyIndex(0) * kTaggedSize;
8788 : TNode<MaybeObject> element =
8789 : LoadArrayElement(array, Array::kHeaderSize,
8790 4728 : EntryIndexToIndex<Array>(entry_index), key_offset);
8791 4728 : return CAST(element);
8792 : }
8793 :
8794 : template TNode<Name> CodeStubAssembler::GetKey<DescriptorArray>(
8795 : TNode<DescriptorArray>, TNode<Uint32T>);
8796 : template TNode<Name> CodeStubAssembler::GetKey<TransitionArray>(
8797 : TNode<TransitionArray>, TNode<Uint32T>);
8798 :
8799 5728 : TNode<Uint32T> CodeStubAssembler::DescriptorArrayGetDetails(
8800 : TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8801 5728 : const int details_offset = DescriptorArray::ToDetailsIndex(0) * kTaggedSize;
8802 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8803 : descriptors, DescriptorArray::kHeaderSize,
8804 5728 : EntryIndexToIndex<DescriptorArray>(descriptor_number), details_offset));
8805 : }
8806 :
8807 : template <typename Array>
8808 2364 : void CodeStubAssembler::LookupBinary(TNode<Name> unique_name,
8809 : TNode<Array> array,
8810 : TNode<Uint32T> number_of_valid_entries,
8811 : Label* if_found,
8812 : TVariable<IntPtrT>* var_name_index,
8813 : Label* if_not_found) {
8814 2364 : Comment("LookupBinary");
8815 2364 : TVARIABLE(Uint32T, var_low, Unsigned(Int32Constant(0)));
8816 : TNode<Uint32T> limit =
8817 2364 : Unsigned(Int32Sub(NumberOfEntries<Array>(array), Int32Constant(1)));
8818 4728 : TVARIABLE(Uint32T, var_high, limit);
8819 2364 : TNode<Uint32T> hash = LoadNameHashField(unique_name);
8820 : CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
8821 :
8822 : // Assume non-empty array.
8823 : CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
8824 :
8825 4728 : Label binary_loop(this, {&var_high, &var_low});
8826 2364 : Goto(&binary_loop);
8827 2364 : BIND(&binary_loop);
8828 : {
8829 : // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
8830 : TNode<Uint32T> mid = Unsigned(
8831 : Int32Add(var_low.value(),
8832 2364 : Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1)));
8833 : // mid_name = array->GetSortedKey(mid).
8834 2364 : TNode<Uint32T> sorted_key_index = GetSortedKeyIndex<Array>(array, mid);
8835 2364 : TNode<Name> mid_name = GetKey<Array>(array, sorted_key_index);
8836 :
8837 2364 : TNode<Uint32T> mid_hash = LoadNameHashField(mid_name);
8838 :
8839 4728 : Label mid_greater(this), mid_less(this), merge(this);
8840 2364 : Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
8841 2364 : BIND(&mid_greater);
8842 : {
8843 2364 : var_high = mid;
8844 2364 : Goto(&merge);
8845 : }
8846 2364 : BIND(&mid_less);
8847 : {
8848 2364 : var_low = Unsigned(Int32Add(mid, Int32Constant(1)));
8849 2364 : Goto(&merge);
8850 : }
8851 2364 : BIND(&merge);
8852 4728 : GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
8853 : }
8854 :
8855 4728 : Label scan_loop(this, &var_low);
8856 2364 : Goto(&scan_loop);
8857 2364 : BIND(&scan_loop);
8858 : {
8859 2364 : GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
8860 :
8861 : TNode<Uint32T> sort_index =
8862 2364 : GetSortedKeyIndex<Array>(array, var_low.value());
8863 2364 : TNode<Name> current_name = GetKey<Array>(array, sort_index);
8864 2364 : TNode<Uint32T> current_hash = LoadNameHashField(current_name);
8865 2364 : GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
8866 2364 : Label next(this);
8867 2364 : GotoIf(WordNotEqual(current_name, unique_name), &next);
8868 7092 : GotoIf(Uint32GreaterThanOrEqual(sort_index, number_of_valid_entries),
8869 4728 : if_not_found);
8870 2364 : *var_name_index = ToKeyIndex<Array>(sort_index);
8871 2364 : Goto(if_found);
8872 :
8873 2364 : BIND(&next);
8874 2364 : var_low = Unsigned(Int32Add(var_low.value(), Int32Constant(1)));
8875 2364 : Goto(&scan_loop);
8876 2364 : }
8877 2364 : }
8878 :
8879 112 : void CodeStubAssembler::DescriptorArrayForEach(
8880 : VariableList& variable_list, TNode<Uint32T> start_descriptor,
8881 : TNode<Uint32T> end_descriptor, const ForEachDescriptorBodyFunction& body) {
8882 112 : TNode<IntPtrT> start_index = ToKeyIndex<DescriptorArray>(start_descriptor);
8883 112 : TNode<IntPtrT> end_index = ToKeyIndex<DescriptorArray>(end_descriptor);
8884 :
8885 : BuildFastLoop(variable_list, start_index, end_index,
8886 448 : [=](Node* index) {
8887 : TNode<IntPtrT> descriptor_key_index =
8888 112 : TNode<IntPtrT>::UncheckedCast(index);
8889 112 : body(descriptor_key_index);
8890 112 : },
8891 : DescriptorArray::kEntrySize, INTPTR_PARAMETERS,
8892 112 : IndexAdvanceMode::kPost);
8893 112 : }
8894 :
8895 112 : void CodeStubAssembler::ForEachEnumerableOwnProperty(
8896 : TNode<Context> context, TNode<Map> map, TNode<JSObject> object,
8897 : ForEachEnumerationMode mode, const ForEachKeyValueFunction& body,
8898 : Label* bailout) {
8899 112 : TNode<Int32T> type = LoadMapInstanceType(map);
8900 112 : TNode<Uint32T> bit_field3 = EnsureOnlyHasSimpleProperties(map, type, bailout);
8901 :
8902 112 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
8903 : TNode<Uint32T> nof_descriptors =
8904 112 : DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bit_field3);
8905 :
8906 112 : TVARIABLE(BoolT, var_stable, Int32TrueConstant());
8907 :
8908 224 : TVARIABLE(BoolT, var_has_symbol, Int32FalseConstant());
8909 : // false - iterate only string properties, true - iterate only symbol
8910 : // properties
8911 224 : TVARIABLE(BoolT, var_name_filter, Int32FalseConstant());
8912 224 : VariableList list({&var_stable, &var_has_symbol, &var_name_filter}, zone());
8913 : Label descriptor_array_loop(this,
8914 224 : {&var_stable, &var_has_symbol, &var_name_filter});
8915 :
8916 112 : Goto(&descriptor_array_loop);
8917 112 : BIND(&descriptor_array_loop);
8918 :
8919 : DescriptorArrayForEach(
8920 112 : list, Unsigned(Int32Constant(0)), nof_descriptors,
8921 : [=, &var_stable, &var_has_symbol,
8922 896 : &var_name_filter](TNode<IntPtrT> descriptor_key_index) {
8923 : TNode<Name> next_key =
8924 112 : LoadKeyByKeyIndex(descriptors, descriptor_key_index);
8925 :
8926 112 : TVARIABLE(Object, var_value, SmiConstant(0));
8927 224 : Label callback(this), next_iteration(this);
8928 :
8929 112 : if (mode == kEnumerationOrder) {
8930 : // |next_key| is either a string or a symbol
8931 : // Skip strings or symbols depending on var_name_filter value.
8932 112 : Label if_string(this), if_symbol(this), if_name_ok(this);
8933 :
8934 56 : Branch(IsSymbol(next_key), &if_symbol, &if_string);
8935 56 : BIND(&if_symbol);
8936 : {
8937 56 : var_has_symbol = Int32TrueConstant();
8938 : // Process symbol property when |var_name_filer| is true.
8939 56 : Branch(var_name_filter.value(), &if_name_ok, &next_iteration);
8940 : }
8941 56 : BIND(&if_string);
8942 : {
8943 : CSA_ASSERT(this, IsString(next_key));
8944 : // Process string property when |var_name_filer| is false.
8945 56 : Branch(var_name_filter.value(), &next_iteration, &if_name_ok);
8946 : }
8947 112 : BIND(&if_name_ok);
8948 : }
8949 : {
8950 112 : TVARIABLE(Map, var_map);
8951 224 : TVARIABLE(HeapObject, var_meta_storage);
8952 224 : TVARIABLE(IntPtrT, var_entry);
8953 224 : TVARIABLE(Uint32T, var_details);
8954 224 : Label if_found(this);
8955 :
8956 224 : Label if_found_fast(this), if_found_dict(this);
8957 :
8958 224 : Label if_stable(this), if_not_stable(this);
8959 112 : Branch(var_stable.value(), &if_stable, &if_not_stable);
8960 112 : BIND(&if_stable);
8961 : {
8962 : // Directly decode from the descriptor array if |object| did not
8963 : // change shape.
8964 112 : var_map = map;
8965 112 : var_meta_storage = descriptors;
8966 112 : var_entry = Signed(descriptor_key_index);
8967 112 : Goto(&if_found_fast);
8968 : }
8969 112 : BIND(&if_not_stable);
8970 : {
8971 : // If the map did change, do a slower lookup. We are still
8972 : // guaranteed that the object has a simple shape, and that the key
8973 : // is a name.
8974 112 : var_map = LoadMap(object);
8975 : TryLookupPropertyInSimpleObject(
8976 : object, var_map.value(), next_key, &if_found_fast,
8977 112 : &if_found_dict, &var_meta_storage, &var_entry, &next_iteration);
8978 : }
8979 :
8980 112 : BIND(&if_found_fast);
8981 : {
8982 112 : TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
8983 112 : TNode<IntPtrT> name_index = var_entry.value();
8984 :
8985 : // Skip non-enumerable properties.
8986 112 : var_details = LoadDetailsByKeyIndex(descriptors, name_index);
8987 : GotoIf(IsSetWord32(var_details.value(),
8988 224 : PropertyDetails::kAttributesDontEnumMask),
8989 112 : &next_iteration);
8990 :
8991 : LoadPropertyFromFastObject(object, var_map.value(), descriptors,
8992 : name_index, var_details.value(),
8993 112 : &var_value);
8994 112 : Goto(&if_found);
8995 : }
8996 112 : BIND(&if_found_dict);
8997 : {
8998 112 : TNode<NameDictionary> dictionary = CAST(var_meta_storage.value());
8999 112 : TNode<IntPtrT> entry = var_entry.value();
9000 :
9001 : TNode<Uint32T> details =
9002 112 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, entry);
9003 : // Skip non-enumerable properties.
9004 : GotoIf(
9005 224 : IsSetWord32(details, PropertyDetails::kAttributesDontEnumMask),
9006 112 : &next_iteration);
9007 :
9008 112 : var_details = details;
9009 112 : var_value = LoadValueByKeyIndex<NameDictionary>(dictionary, entry);
9010 112 : Goto(&if_found);
9011 : }
9012 :
9013 : // Here we have details and value which could be an accessor.
9014 112 : BIND(&if_found);
9015 : {
9016 112 : Label slow_load(this, Label::kDeferred);
9017 :
9018 224 : var_value = CallGetterIfAccessor(var_value.value(),
9019 : var_details.value(), context,
9020 112 : object, &slow_load, kCallJSGetter);
9021 112 : Goto(&callback);
9022 :
9023 112 : BIND(&slow_load);
9024 224 : var_value =
9025 112 : CallRuntime(Runtime::kGetProperty, context, object, next_key);
9026 112 : Goto(&callback);
9027 :
9028 112 : BIND(&callback);
9029 112 : body(next_key, var_value.value());
9030 :
9031 : // Check if |object| is still stable, i.e. we can proceed using
9032 : // property details from preloaded |descriptors|.
9033 336 : var_stable =
9034 112 : Select<BoolT>(var_stable.value(),
9035 112 : [=] { return WordEqual(LoadMap(object), map); },
9036 224 : [=] { return Int32FalseConstant(); });
9037 :
9038 112 : Goto(&next_iteration);
9039 112 : }
9040 : }
9041 112 : BIND(&next_iteration);
9042 336 : });
9043 :
9044 112 : if (mode == kEnumerationOrder) {
9045 56 : Label done(this);
9046 56 : GotoIf(var_name_filter.value(), &done);
9047 56 : GotoIfNot(var_has_symbol.value(), &done);
9048 : // All string properties are processed, now process symbol properties.
9049 56 : var_name_filter = Int32TrueConstant();
9050 56 : Goto(&descriptor_array_loop);
9051 :
9052 56 : BIND(&done);
9053 112 : }
9054 112 : }
9055 :
9056 2080 : void CodeStubAssembler::DescriptorLookup(
9057 : SloppyTNode<Name> unique_name, SloppyTNode<DescriptorArray> descriptors,
9058 : SloppyTNode<Uint32T> bitfield3, Label* if_found,
9059 : TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
9060 2080 : Comment("DescriptorArrayLookup");
9061 2080 : TNode<Uint32T> nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
9062 : Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
9063 2080 : var_name_index, if_not_found);
9064 2080 : }
9065 :
9066 284 : void CodeStubAssembler::TransitionLookup(
9067 : SloppyTNode<Name> unique_name, SloppyTNode<TransitionArray> transitions,
9068 : Label* if_found, TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
9069 284 : Comment("TransitionArrayLookup");
9070 : TNode<Uint32T> number_of_valid_transitions =
9071 284 : NumberOfEntries<TransitionArray>(transitions);
9072 : Lookup<TransitionArray>(unique_name, transitions, number_of_valid_transitions,
9073 284 : if_found, var_name_index, if_not_found);
9074 284 : }
9075 :
9076 : template <typename Array>
9077 2364 : void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
9078 : TNode<Uint32T> number_of_valid_entries,
9079 : Label* if_found,
9080 : TVariable<IntPtrT>* var_name_index,
9081 : Label* if_not_found) {
9082 2364 : Comment("ArrayLookup");
9083 2364 : if (!number_of_valid_entries) {
9084 0 : number_of_valid_entries = NumberOfEntries(array);
9085 : }
9086 2364 : GotoIf(Word32Equal(number_of_valid_entries, Int32Constant(0)), if_not_found);
9087 4728 : Label linear_search(this), binary_search(this);
9088 2364 : const int kMaxElementsForLinearSearch = 32;
9089 2364 : Branch(Uint32LessThanOrEqual(number_of_valid_entries,
9090 4728 : Int32Constant(kMaxElementsForLinearSearch)),
9091 4728 : &linear_search, &binary_search);
9092 2364 : BIND(&linear_search);
9093 : {
9094 2364 : LookupLinear<Array>(unique_name, array, number_of_valid_entries, if_found,
9095 : var_name_index, if_not_found);
9096 : }
9097 2364 : BIND(&binary_search);
9098 : {
9099 2364 : LookupBinary<Array>(unique_name, array, number_of_valid_entries, if_found,
9100 : var_name_index, if_not_found);
9101 2364 : }
9102 2364 : }
9103 :
9104 56 : TNode<BoolT> CodeStubAssembler::IsSimpleObjectMap(TNode<Map> map) {
9105 : uint32_t mask =
9106 56 : Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
9107 : // !IsSpecialReceiverType && !IsNamedInterceptor && !IsAccessCheckNeeded
9108 : return Select<BoolT>(
9109 112 : IsSpecialReceiverInstanceType(LoadMapInstanceType(map)),
9110 56 : [=] { return Int32FalseConstant(); },
9111 224 : [=] { return IsClearWord32(LoadMapBitField(map), mask); });
9112 : }
9113 :
9114 1632 : void CodeStubAssembler::TryLookupPropertyInSimpleObject(
9115 : TNode<JSObject> object, TNode<Map> map, TNode<Name> unique_name,
9116 : Label* if_found_fast, Label* if_found_dict,
9117 : TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9118 : Label* if_not_found) {
9119 : CSA_ASSERT(this, IsSimpleObjectMap(map));
9120 :
9121 1632 : TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
9122 3264 : Label if_isfastmap(this), if_isslowmap(this);
9123 3264 : Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
9124 1632 : &if_isfastmap);
9125 1632 : BIND(&if_isfastmap);
9126 : {
9127 1632 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
9128 1632 : *var_meta_storage = descriptors;
9129 :
9130 : DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
9131 1632 : var_name_index, if_not_found);
9132 : }
9133 1632 : BIND(&if_isslowmap);
9134 : {
9135 1632 : TNode<NameDictionary> dictionary = CAST(LoadSlowProperties(object));
9136 1632 : *var_meta_storage = dictionary;
9137 :
9138 : NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
9139 1632 : var_name_index, if_not_found);
9140 1632 : }
9141 1632 : }
9142 :
9143 1520 : void CodeStubAssembler::TryLookupProperty(
9144 : SloppyTNode<JSObject> object, SloppyTNode<Map> map,
9145 : SloppyTNode<Int32T> instance_type, SloppyTNode<Name> unique_name,
9146 : Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
9147 : TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9148 : Label* if_not_found, Label* if_bailout) {
9149 1520 : Label if_objectisspecial(this);
9150 1520 : GotoIf(IsSpecialReceiverInstanceType(instance_type), &if_objectisspecial);
9151 :
9152 : TryLookupPropertyInSimpleObject(object, map, unique_name, if_found_fast,
9153 : if_found_dict, var_meta_storage,
9154 1520 : var_name_index, if_not_found);
9155 :
9156 1520 : BIND(&if_objectisspecial);
9157 : {
9158 : // Handle global object here and bailout for other special objects.
9159 : GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
9160 1520 : if_bailout);
9161 :
9162 : // Handle interceptors and access checks in runtime.
9163 1520 : TNode<Int32T> bit_field = LoadMapBitField(map);
9164 : int mask =
9165 1520 : Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
9166 1520 : GotoIf(IsSetWord32(bit_field, mask), if_bailout);
9167 :
9168 1520 : TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(object));
9169 1520 : *var_meta_storage = dictionary;
9170 :
9171 : NameDictionaryLookup<GlobalDictionary>(
9172 1520 : dictionary, unique_name, if_found_global, var_name_index, if_not_found);
9173 1520 : }
9174 1520 : }
9175 :
9176 844 : void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
9177 : Node* instance_type,
9178 : Node* unique_name, Label* if_found,
9179 : Label* if_not_found,
9180 : Label* if_bailout) {
9181 844 : Comment("TryHasOwnProperty");
9182 844 : TVARIABLE(HeapObject, var_meta_storage);
9183 1688 : TVARIABLE(IntPtrT, var_name_index);
9184 :
9185 1688 : Label if_found_global(this);
9186 : TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
9187 : &if_found_global, &var_meta_storage, &var_name_index,
9188 844 : if_not_found, if_bailout);
9189 :
9190 844 : BIND(&if_found_global);
9191 : {
9192 844 : VARIABLE(var_value, MachineRepresentation::kTagged);
9193 1688 : VARIABLE(var_details, MachineRepresentation::kWord32);
9194 : // Check if the property cell is not deleted.
9195 : LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
9196 : var_name_index.value(), &var_value,
9197 844 : &var_details, if_not_found);
9198 1688 : Goto(if_found);
9199 844 : }
9200 844 : }
9201 :
9202 392 : Node* CodeStubAssembler::GetMethod(Node* context, Node* object,
9203 : Handle<Name> name,
9204 : Label* if_null_or_undefined) {
9205 392 : Node* method = GetProperty(context, object, name);
9206 :
9207 392 : GotoIf(IsUndefined(method), if_null_or_undefined);
9208 392 : GotoIf(IsNull(method), if_null_or_undefined);
9209 :
9210 392 : return method;
9211 : }
9212 :
9213 56 : TNode<Object> CodeStubAssembler::GetIteratorMethod(
9214 : TNode<Context> context, TNode<HeapObject> heap_obj,
9215 : Label* if_iteratorundefined) {
9216 112 : return CAST(GetMethod(context, heap_obj,
9217 : isolate()->factory()->iterator_symbol(),
9218 : if_iteratorundefined));
9219 : }
9220 :
9221 1068 : void CodeStubAssembler::LoadPropertyFromFastObject(
9222 : Node* object, Node* map, TNode<DescriptorArray> descriptors,
9223 : Node* name_index, Variable* var_details, Variable* var_value) {
9224 : DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
9225 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
9226 :
9227 : Node* details =
9228 1068 : LoadDetailsByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index));
9229 1068 : var_details->Bind(details);
9230 :
9231 : LoadPropertyFromFastObject(object, map, descriptors, name_index, details,
9232 1068 : var_value);
9233 1068 : }
9234 :
9235 1292 : void CodeStubAssembler::LoadPropertyFromFastObject(
9236 : Node* object, Node* map, TNode<DescriptorArray> descriptors,
9237 : Node* name_index, Node* details, Variable* var_value) {
9238 1292 : Comment("[ LoadPropertyFromFastObject");
9239 :
9240 1292 : Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
9241 :
9242 2584 : Label if_in_field(this), if_in_descriptor(this), done(this);
9243 2584 : Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
9244 1292 : &if_in_descriptor);
9245 1292 : BIND(&if_in_field);
9246 : {
9247 : Node* field_index =
9248 1292 : DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
9249 : Node* representation =
9250 1292 : DecodeWord32<PropertyDetails::RepresentationField>(details);
9251 :
9252 2584 : field_index =
9253 3876 : IntPtrAdd(field_index, LoadMapInobjectPropertiesStartInWords(map));
9254 1292 : Node* instance_size_in_words = LoadMapInstanceSizeInWords(map);
9255 :
9256 2584 : Label if_inobject(this), if_backing_store(this);
9257 2584 : VARIABLE(var_double_value, MachineRepresentation::kFloat64);
9258 2584 : Label rebox_double(this, &var_double_value);
9259 2584 : Branch(UintPtrLessThan(field_index, instance_size_in_words), &if_inobject,
9260 1292 : &if_backing_store);
9261 1292 : BIND(&if_inobject);
9262 : {
9263 1292 : Comment("if_inobject");
9264 1292 : Node* field_offset = TimesTaggedSize(field_index);
9265 :
9266 2584 : Label if_double(this), if_tagged(this);
9267 : Branch(Word32NotEqual(representation,
9268 2584 : Int32Constant(Representation::kDouble)),
9269 1292 : &if_tagged, &if_double);
9270 1292 : BIND(&if_tagged);
9271 : {
9272 1292 : var_value->Bind(LoadObjectField(object, field_offset));
9273 1292 : Goto(&done);
9274 : }
9275 1292 : BIND(&if_double);
9276 : {
9277 : if (FLAG_unbox_double_fields) {
9278 : var_double_value.Bind(
9279 1292 : LoadObjectField(object, field_offset, MachineType::Float64()));
9280 : } else {
9281 : Node* mutable_heap_number = LoadObjectField(object, field_offset);
9282 : var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
9283 : }
9284 1292 : Goto(&rebox_double);
9285 1292 : }
9286 : }
9287 1292 : BIND(&if_backing_store);
9288 : {
9289 1292 : Comment("if_backing_store");
9290 1292 : TNode<HeapObject> properties = LoadFastProperties(object);
9291 1292 : field_index = IntPtrSub(field_index, instance_size_in_words);
9292 1292 : Node* value = LoadPropertyArrayElement(CAST(properties), field_index);
9293 :
9294 2584 : Label if_double(this), if_tagged(this);
9295 : Branch(Word32NotEqual(representation,
9296 2584 : Int32Constant(Representation::kDouble)),
9297 1292 : &if_tagged, &if_double);
9298 1292 : BIND(&if_tagged);
9299 : {
9300 1292 : var_value->Bind(value);
9301 1292 : Goto(&done);
9302 : }
9303 1292 : BIND(&if_double);
9304 : {
9305 1292 : var_double_value.Bind(LoadHeapNumberValue(value));
9306 1292 : Goto(&rebox_double);
9307 1292 : }
9308 : }
9309 1292 : BIND(&rebox_double);
9310 : {
9311 1292 : Comment("rebox_double");
9312 1292 : Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
9313 1292 : var_value->Bind(heap_number);
9314 1292 : Goto(&done);
9315 1292 : }
9316 : }
9317 1292 : BIND(&if_in_descriptor);
9318 : {
9319 : var_value->Bind(
9320 1292 : LoadValueByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index)));
9321 1292 : Goto(&done);
9322 : }
9323 1292 : BIND(&done);
9324 :
9325 2584 : Comment("] LoadPropertyFromFastObject");
9326 1292 : }
9327 :
9328 2412 : void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
9329 : Node* name_index,
9330 : Variable* var_details,
9331 : Variable* var_value) {
9332 2412 : Comment("LoadPropertyFromNameDictionary");
9333 : CSA_ASSERT(this, IsNameDictionary(dictionary));
9334 :
9335 : var_details->Bind(
9336 2412 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
9337 2412 : var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
9338 :
9339 2412 : Comment("] LoadPropertyFromNameDictionary");
9340 2412 : }
9341 :
9342 1296 : void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
9343 : Node* name_index,
9344 : Variable* var_details,
9345 : Variable* var_value,
9346 : Label* if_deleted) {
9347 1296 : Comment("[ LoadPropertyFromGlobalDictionary");
9348 : CSA_ASSERT(this, IsGlobalDictionary(dictionary));
9349 :
9350 1296 : Node* property_cell = LoadFixedArrayElement(CAST(dictionary), name_index);
9351 : CSA_ASSERT(this, IsPropertyCell(property_cell));
9352 :
9353 1296 : Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
9354 1296 : GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
9355 :
9356 1296 : var_value->Bind(value);
9357 :
9358 : Node* details = LoadAndUntagToWord32ObjectField(property_cell,
9359 1296 : PropertyCell::kDetailsOffset);
9360 1296 : var_details->Bind(details);
9361 :
9362 1296 : Comment("] LoadPropertyFromGlobalDictionary");
9363 1296 : }
9364 :
9365 : // |value| is the property backing store's contents, which is either a value
9366 : // or an accessor pair, as specified by |details|.
9367 : // Returns either the original value, or the result of the getter call.
9368 3812 : TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
9369 : Node* value, Node* details, Node* context, Node* receiver,
9370 : Label* if_bailout, GetOwnPropertyMode mode) {
9371 3812 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
9372 7624 : Label done(this), if_accessor_info(this, Label::kDeferred);
9373 :
9374 3812 : Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
9375 3812 : GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
9376 :
9377 : // Accessor case.
9378 3812 : GotoIfNot(IsAccessorPair(value), &if_accessor_info);
9379 :
9380 : // AccessorPair case.
9381 : {
9382 3812 : if (mode == kCallJSGetter) {
9383 3588 : Node* accessor_pair = value;
9384 : Node* getter =
9385 3588 : LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
9386 3588 : Node* getter_map = LoadMap(getter);
9387 3588 : Node* instance_type = LoadMapInstanceType(getter_map);
9388 : // FunctionTemplateInfo getters are not supported yet.
9389 7176 : GotoIf(InstanceTypeEqual(instance_type, FUNCTION_TEMPLATE_INFO_TYPE),
9390 3588 : if_bailout);
9391 :
9392 : // Return undefined if the {getter} is not callable.
9393 3588 : var_value.Bind(UndefinedConstant());
9394 3588 : GotoIfNot(IsCallableMap(getter_map), &done);
9395 :
9396 : // Call the accessor.
9397 3588 : Callable callable = CodeFactory::Call(isolate());
9398 3588 : Node* result = CallJS(callable, context, getter, receiver);
9399 3588 : var_value.Bind(result);
9400 : }
9401 3812 : Goto(&done);
9402 : }
9403 :
9404 : // AccessorInfo case.
9405 3812 : BIND(&if_accessor_info);
9406 : {
9407 3812 : Node* accessor_info = value;
9408 : CSA_ASSERT(this, IsAccessorInfo(value));
9409 : CSA_ASSERT(this, TaggedIsNotSmi(receiver));
9410 7624 : Label if_array(this), if_function(this), if_value(this);
9411 :
9412 : // Dispatch based on {receiver} instance type.
9413 3812 : Node* receiver_map = LoadMap(receiver);
9414 3812 : Node* receiver_instance_type = LoadMapInstanceType(receiver_map);
9415 3812 : GotoIf(IsJSArrayInstanceType(receiver_instance_type), &if_array);
9416 3812 : GotoIf(IsJSFunctionInstanceType(receiver_instance_type), &if_function);
9417 7624 : Branch(IsJSValueInstanceType(receiver_instance_type), &if_value,
9418 3812 : if_bailout);
9419 :
9420 : // JSArray AccessorInfo case.
9421 3812 : BIND(&if_array);
9422 : {
9423 : // We only deal with the "length" accessor on JSArray.
9424 : GotoIfNot(IsLengthString(
9425 7624 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9426 3812 : if_bailout);
9427 3812 : var_value.Bind(LoadJSArrayLength(receiver));
9428 3812 : Goto(&done);
9429 : }
9430 :
9431 : // JSFunction AccessorInfo case.
9432 3812 : BIND(&if_function);
9433 : {
9434 : // We only deal with the "prototype" accessor on JSFunction here.
9435 : GotoIfNot(IsPrototypeString(
9436 7624 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9437 3812 : if_bailout);
9438 :
9439 7624 : GotoIfPrototypeRequiresRuntimeLookup(CAST(receiver), CAST(receiver_map),
9440 7624 : if_bailout);
9441 3812 : var_value.Bind(LoadJSFunctionPrototype(receiver, if_bailout));
9442 3812 : Goto(&done);
9443 : }
9444 :
9445 : // JSValue AccessorInfo case.
9446 3812 : BIND(&if_value);
9447 : {
9448 : // We only deal with the "length" accessor on JSValue string wrappers.
9449 : GotoIfNot(IsLengthString(
9450 7624 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9451 3812 : if_bailout);
9452 3812 : Node* receiver_value = LoadJSValueValue(receiver);
9453 3812 : GotoIfNot(TaggedIsNotSmi(receiver_value), if_bailout);
9454 3812 : GotoIfNot(IsString(receiver_value), if_bailout);
9455 3812 : var_value.Bind(LoadStringLengthAsSmi(receiver_value));
9456 3812 : Goto(&done);
9457 3812 : }
9458 : }
9459 :
9460 3812 : BIND(&done);
9461 7624 : return UncheckedCast<Object>(var_value.value());
9462 : }
9463 :
9464 228 : void CodeStubAssembler::TryGetOwnProperty(
9465 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
9466 : Node* unique_name, Label* if_found_value, Variable* var_value,
9467 : Label* if_not_found, Label* if_bailout) {
9468 : TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
9469 : if_found_value, var_value, nullptr, nullptr, if_not_found,
9470 228 : if_bailout, kCallJSGetter);
9471 228 : }
9472 :
9473 452 : void CodeStubAssembler::TryGetOwnProperty(
9474 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
9475 : Node* unique_name, Label* if_found_value, Variable* var_value,
9476 : Variable* var_details, Variable* var_raw_value, Label* if_not_found,
9477 : Label* if_bailout, GetOwnPropertyMode mode) {
9478 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
9479 452 : Comment("TryGetOwnProperty");
9480 :
9481 452 : TVARIABLE(HeapObject, var_meta_storage);
9482 904 : TVARIABLE(IntPtrT, var_entry);
9483 :
9484 904 : Label if_found_fast(this), if_found_dict(this), if_found_global(this);
9485 :
9486 904 : VARIABLE(local_var_details, MachineRepresentation::kWord32);
9487 452 : if (!var_details) {
9488 228 : var_details = &local_var_details;
9489 : }
9490 904 : Label if_found(this);
9491 :
9492 : TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
9493 : &if_found_dict, &if_found_global, &var_meta_storage,
9494 452 : &var_entry, if_not_found, if_bailout);
9495 452 : BIND(&if_found_fast);
9496 : {
9497 452 : TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
9498 452 : Node* name_index = var_entry.value();
9499 :
9500 : LoadPropertyFromFastObject(object, map, descriptors, name_index,
9501 452 : var_details, var_value);
9502 452 : Goto(&if_found);
9503 : }
9504 452 : BIND(&if_found_dict);
9505 : {
9506 452 : Node* dictionary = var_meta_storage.value();
9507 452 : Node* entry = var_entry.value();
9508 452 : LoadPropertyFromNameDictionary(dictionary, entry, var_details, var_value);
9509 452 : Goto(&if_found);
9510 : }
9511 452 : BIND(&if_found_global);
9512 : {
9513 452 : Node* dictionary = var_meta_storage.value();
9514 452 : Node* entry = var_entry.value();
9515 :
9516 : LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
9517 452 : if_not_found);
9518 452 : Goto(&if_found);
9519 : }
9520 : // Here we have details and value which could be an accessor.
9521 452 : BIND(&if_found);
9522 : {
9523 : // TODO(ishell): Execute C++ accessor in case of accessor info
9524 452 : if (var_raw_value) {
9525 224 : var_raw_value->Bind(var_value->value());
9526 : }
9527 : Node* value = CallGetterIfAccessor(var_value->value(), var_details->value(),
9528 452 : context, receiver, if_bailout, mode);
9529 452 : var_value->Bind(value);
9530 452 : Goto(if_found_value);
9531 452 : }
9532 452 : }
9533 :
9534 844 : void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
9535 : SloppyTNode<Int32T> instance_type,
9536 : SloppyTNode<IntPtrT> intptr_index,
9537 : Label* if_found, Label* if_absent,
9538 : Label* if_not_found,
9539 : Label* if_bailout) {
9540 : // Handle special objects in runtime.
9541 844 : GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
9542 :
9543 844 : Node* elements_kind = LoadMapElementsKind(map);
9544 :
9545 : // TODO(verwaest): Support other elements kinds as well.
9546 1688 : Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
9547 1688 : if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
9548 1688 : if_typedarray(this);
9549 : // clang-format off
9550 : int32_t values[] = {
9551 : // Handled by {if_isobjectorsmi}.
9552 : PACKED_SMI_ELEMENTS, HOLEY_SMI_ELEMENTS, PACKED_ELEMENTS,
9553 : HOLEY_ELEMENTS,
9554 : // Handled by {if_isdouble}.
9555 : PACKED_DOUBLE_ELEMENTS, HOLEY_DOUBLE_ELEMENTS,
9556 : // Handled by {if_isdictionary}.
9557 : DICTIONARY_ELEMENTS,
9558 : // Handled by {if_isfaststringwrapper}.
9559 : FAST_STRING_WRAPPER_ELEMENTS,
9560 : // Handled by {if_isslowstringwrapper}.
9561 : SLOW_STRING_WRAPPER_ELEMENTS,
9562 : // Handled by {if_not_found}.
9563 : NO_ELEMENTS,
9564 : // Handled by {if_typed_array}.
9565 : UINT8_ELEMENTS,
9566 : INT8_ELEMENTS,
9567 : UINT16_ELEMENTS,
9568 : INT16_ELEMENTS,
9569 : UINT32_ELEMENTS,
9570 : INT32_ELEMENTS,
9571 : FLOAT32_ELEMENTS,
9572 : FLOAT64_ELEMENTS,
9573 : UINT8_CLAMPED_ELEMENTS,
9574 : BIGUINT64_ELEMENTS,
9575 : BIGINT64_ELEMENTS,
9576 844 : };
9577 : Label* labels[] = {
9578 : &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
9579 : &if_isobjectorsmi,
9580 : &if_isdouble, &if_isdouble,
9581 : &if_isdictionary,
9582 : &if_isfaststringwrapper,
9583 : &if_isslowstringwrapper,
9584 : if_not_found,
9585 : &if_typedarray,
9586 : &if_typedarray,
9587 : &if_typedarray,
9588 : &if_typedarray,
9589 : &if_typedarray,
9590 : &if_typedarray,
9591 : &if_typedarray,
9592 : &if_typedarray,
9593 : &if_typedarray,
9594 : &if_typedarray,
9595 : &if_typedarray,
9596 844 : };
9597 : // clang-format on
9598 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
9599 844 : Switch(elements_kind, if_bailout, values, labels, arraysize(values));
9600 :
9601 844 : BIND(&if_isobjectorsmi);
9602 : {
9603 844 : TNode<FixedArray> elements = CAST(LoadElements(object));
9604 844 : TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
9605 :
9606 844 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
9607 :
9608 844 : TNode<Object> element = LoadFixedArrayElement(elements, intptr_index);
9609 844 : TNode<Oddball> the_hole = TheHoleConstant();
9610 844 : Branch(WordEqual(element, the_hole), if_not_found, if_found);
9611 : }
9612 844 : BIND(&if_isdouble);
9613 : {
9614 844 : TNode<FixedArrayBase> elements = LoadElements(object);
9615 844 : TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
9616 :
9617 844 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
9618 :
9619 : // Check if the element is a double hole, but don't load it.
9620 844 : LoadFixedDoubleArrayElement(CAST(elements), intptr_index,
9621 : MachineType::None(), 0, INTPTR_PARAMETERS,
9622 1688 : if_not_found);
9623 844 : Goto(if_found);
9624 : }
9625 844 : BIND(&if_isdictionary);
9626 : {
9627 : // Negative keys must be converted to property names.
9628 844 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9629 :
9630 844 : TVARIABLE(IntPtrT, var_entry);
9631 844 : TNode<NumberDictionary> elements = CAST(LoadElements(object));
9632 : NumberDictionaryLookup(elements, intptr_index, if_found, &var_entry,
9633 844 : if_not_found);
9634 : }
9635 844 : BIND(&if_isfaststringwrapper);
9636 : {
9637 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
9638 844 : Node* string = LoadJSValueValue(object);
9639 : CSA_ASSERT(this, IsString(string));
9640 844 : Node* length = LoadStringLengthAsWord(string);
9641 844 : GotoIf(UintPtrLessThan(intptr_index, length), if_found);
9642 844 : Goto(&if_isobjectorsmi);
9643 : }
9644 844 : BIND(&if_isslowstringwrapper);
9645 : {
9646 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
9647 844 : Node* string = LoadJSValueValue(object);
9648 : CSA_ASSERT(this, IsString(string));
9649 844 : Node* length = LoadStringLengthAsWord(string);
9650 844 : GotoIf(UintPtrLessThan(intptr_index, length), if_found);
9651 844 : Goto(&if_isdictionary);
9652 : }
9653 844 : BIND(&if_typedarray);
9654 : {
9655 844 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
9656 844 : GotoIf(IsDetachedBuffer(buffer), if_absent);
9657 :
9658 844 : Node* length = SmiUntag(LoadJSTypedArrayLength(CAST(object)));
9659 844 : Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
9660 : }
9661 844 : BIND(&if_oob);
9662 : {
9663 : // Positive OOB indices mean "not found", negative indices must be
9664 : // converted to property names.
9665 844 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9666 844 : Goto(if_not_found);
9667 844 : }
9668 844 : }
9669 :
9670 840 : void CodeStubAssembler::BranchIfMaybeSpecialIndex(TNode<String> name_string,
9671 : Label* if_maybe_special_index,
9672 : Label* if_not_special_index) {
9673 : // TODO(cwhan.tunz): Implement fast cases more.
9674 :
9675 : // If a name is empty or too long, it's not a special index
9676 : // Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
9677 840 : const int kBufferSize = 24;
9678 840 : TNode<Smi> string_length = LoadStringLengthAsSmi(name_string);
9679 840 : GotoIf(SmiEqual(string_length, SmiConstant(0)), if_not_special_index);
9680 840 : GotoIf(SmiGreaterThan(string_length, SmiConstant(kBufferSize)),
9681 840 : if_not_special_index);
9682 :
9683 : // If the first character of name is not a digit or '-', or we can't match it
9684 : // to Infinity or NaN, then this is not a special index.
9685 840 : TNode<Int32T> first_char = StringCharCodeAt(name_string, IntPtrConstant(0));
9686 : // If the name starts with '-', it can be a negative index.
9687 840 : GotoIf(Word32Equal(first_char, Int32Constant('-')), if_maybe_special_index);
9688 : // If the name starts with 'I', it can be "Infinity".
9689 840 : GotoIf(Word32Equal(first_char, Int32Constant('I')), if_maybe_special_index);
9690 : // If the name starts with 'N', it can be "NaN".
9691 840 : GotoIf(Word32Equal(first_char, Int32Constant('N')), if_maybe_special_index);
9692 : // Finally, if the first character is not a digit either, then we are sure
9693 : // that the name is not a special index.
9694 840 : GotoIf(Uint32LessThan(first_char, Int32Constant('0')), if_not_special_index);
9695 840 : GotoIf(Uint32LessThan(Int32Constant('9'), first_char), if_not_special_index);
9696 840 : Goto(if_maybe_special_index);
9697 840 : }
9698 :
9699 840 : void CodeStubAssembler::TryPrototypeChainLookup(
9700 : Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
9701 : const LookupInHolder& lookup_element_in_holder, Label* if_end,
9702 : Label* if_bailout, Label* if_proxy) {
9703 : // Ensure receiver is JSReceiver, otherwise bailout.
9704 840 : Label if_objectisnotsmi(this);
9705 840 : Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
9706 840 : BIND(&if_objectisnotsmi);
9707 :
9708 840 : Node* map = LoadMap(receiver);
9709 840 : Node* instance_type = LoadMapInstanceType(map);
9710 : {
9711 840 : Label if_objectisreceiver(this);
9712 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
9713 : STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
9714 1680 : Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
9715 840 : if_bailout);
9716 840 : BIND(&if_objectisreceiver);
9717 :
9718 840 : if (if_proxy) {
9719 840 : GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
9720 840 : }
9721 : }
9722 :
9723 1680 : VARIABLE(var_index, MachineType::PointerRepresentation());
9724 1680 : VARIABLE(var_unique, MachineRepresentation::kTagged);
9725 :
9726 1680 : Label if_keyisindex(this), if_iskeyunique(this);
9727 : TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
9728 840 : if_bailout);
9729 :
9730 840 : BIND(&if_iskeyunique);
9731 : {
9732 840 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
9733 1680 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
9734 1680 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
9735 : instance_type);
9736 :
9737 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
9738 840 : &var_holder_instance_type};
9739 1680 : Label loop(this, arraysize(merged_variables), merged_variables);
9740 840 : Goto(&loop);
9741 840 : BIND(&loop);
9742 : {
9743 840 : Node* holder_map = var_holder_map.value();
9744 840 : Node* holder_instance_type = var_holder_instance_type.value();
9745 :
9746 1680 : Label next_proto(this), check_integer_indexed_exotic(this);
9747 : lookup_property_in_holder(receiver, var_holder.value(), holder_map,
9748 : holder_instance_type, var_unique.value(),
9749 840 : &check_integer_indexed_exotic, if_bailout);
9750 :
9751 840 : BIND(&check_integer_indexed_exotic);
9752 : {
9753 : // Bailout if it can be an integer indexed exotic case.
9754 1680 : GotoIfNot(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
9755 840 : &next_proto);
9756 840 : GotoIfNot(IsString(var_unique.value()), &next_proto);
9757 840 : BranchIfMaybeSpecialIndex(CAST(var_unique.value()), if_bailout,
9758 840 : &next_proto);
9759 : }
9760 :
9761 840 : BIND(&next_proto);
9762 :
9763 840 : Node* proto = LoadMapPrototype(holder_map);
9764 :
9765 840 : GotoIf(IsNull(proto), if_end);
9766 :
9767 840 : Node* map = LoadMap(proto);
9768 840 : Node* instance_type = LoadMapInstanceType(map);
9769 :
9770 840 : var_holder.Bind(proto);
9771 840 : var_holder_map.Bind(map);
9772 840 : var_holder_instance_type.Bind(instance_type);
9773 1680 : Goto(&loop);
9774 840 : }
9775 : }
9776 840 : BIND(&if_keyisindex);
9777 : {
9778 840 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
9779 1680 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
9780 1680 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
9781 : instance_type);
9782 :
9783 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
9784 840 : &var_holder_instance_type};
9785 1680 : Label loop(this, arraysize(merged_variables), merged_variables);
9786 840 : Goto(&loop);
9787 840 : BIND(&loop);
9788 : {
9789 840 : Label next_proto(this);
9790 : lookup_element_in_holder(receiver, var_holder.value(),
9791 : var_holder_map.value(),
9792 : var_holder_instance_type.value(),
9793 840 : var_index.value(), &next_proto, if_bailout);
9794 840 : BIND(&next_proto);
9795 :
9796 840 : Node* proto = LoadMapPrototype(var_holder_map.value());
9797 :
9798 840 : GotoIf(IsNull(proto), if_end);
9799 :
9800 840 : Node* map = LoadMap(proto);
9801 840 : Node* instance_type = LoadMapInstanceType(map);
9802 :
9803 840 : var_holder.Bind(proto);
9804 840 : var_holder_map.Bind(map);
9805 840 : var_holder_instance_type.Bind(instance_type);
9806 840 : Goto(&loop);
9807 840 : }
9808 840 : }
9809 840 : }
9810 :
9811 168 : Node* CodeStubAssembler::HasInPrototypeChain(Node* context, Node* object,
9812 : Node* prototype) {
9813 : CSA_ASSERT(this, TaggedIsNotSmi(object));
9814 168 : VARIABLE(var_result, MachineRepresentation::kTagged);
9815 336 : Label return_false(this), return_true(this),
9816 336 : return_runtime(this, Label::kDeferred), return_result(this);
9817 :
9818 : // Loop through the prototype chain looking for the {prototype}.
9819 336 : VARIABLE(var_object_map, MachineRepresentation::kTagged, LoadMap(object));
9820 336 : Label loop(this, &var_object_map);
9821 168 : Goto(&loop);
9822 168 : BIND(&loop);
9823 : {
9824 : // Check if we can determine the prototype directly from the {object_map}.
9825 336 : Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
9826 168 : Node* object_map = var_object_map.value();
9827 168 : TNode<Int32T> object_instance_type = LoadMapInstanceType(object_map);
9828 : Branch(IsSpecialReceiverInstanceType(object_instance_type),
9829 168 : &if_objectisspecial, &if_objectisdirect);
9830 168 : BIND(&if_objectisspecial);
9831 : {
9832 : // The {object_map} is a special receiver map or a primitive map, check
9833 : // if we need to use the if_objectisspecial path in the runtime.
9834 336 : GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
9835 168 : &return_runtime);
9836 168 : Node* object_bitfield = LoadMapBitField(object_map);
9837 : int mask = Map::HasNamedInterceptorBit::kMask |
9838 168 : Map::IsAccessCheckNeededBit::kMask;
9839 336 : Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
9840 168 : &if_objectisdirect);
9841 : }
9842 168 : BIND(&if_objectisdirect);
9843 :
9844 : // Check the current {object} prototype.
9845 168 : Node* object_prototype = LoadMapPrototype(object_map);
9846 168 : GotoIf(IsNull(object_prototype), &return_false);
9847 168 : GotoIf(WordEqual(object_prototype, prototype), &return_true);
9848 :
9849 : // Continue with the prototype.
9850 : CSA_ASSERT(this, TaggedIsNotSmi(object_prototype));
9851 168 : var_object_map.Bind(LoadMap(object_prototype));
9852 336 : Goto(&loop);
9853 : }
9854 :
9855 168 : BIND(&return_true);
9856 168 : var_result.Bind(TrueConstant());
9857 168 : Goto(&return_result);
9858 :
9859 168 : BIND(&return_false);
9860 168 : var_result.Bind(FalseConstant());
9861 168 : Goto(&return_result);
9862 :
9863 168 : BIND(&return_runtime);
9864 : {
9865 : // Fallback to the runtime implementation.
9866 : var_result.Bind(
9867 168 : CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype));
9868 : }
9869 168 : Goto(&return_result);
9870 :
9871 168 : BIND(&return_result);
9872 336 : return var_result.value();
9873 : }
9874 :
9875 112 : Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
9876 : Node* object) {
9877 112 : VARIABLE(var_result, MachineRepresentation::kTagged);
9878 224 : Label return_runtime(this, Label::kDeferred), return_result(this);
9879 :
9880 : // Goto runtime if {object} is a Smi.
9881 112 : GotoIf(TaggedIsSmi(object), &return_runtime);
9882 :
9883 : // Goto runtime if {callable} is a Smi.
9884 112 : GotoIf(TaggedIsSmi(callable), &return_runtime);
9885 :
9886 : // Load map of {callable}.
9887 112 : Node* callable_map = LoadMap(callable);
9888 :
9889 : // Goto runtime if {callable} is not a JSFunction.
9890 112 : Node* callable_instance_type = LoadMapInstanceType(callable_map);
9891 224 : GotoIfNot(InstanceTypeEqual(callable_instance_type, JS_FUNCTION_TYPE),
9892 112 : &return_runtime);
9893 :
9894 224 : GotoIfPrototypeRequiresRuntimeLookup(CAST(callable), CAST(callable_map),
9895 224 : &return_runtime);
9896 :
9897 : // Get the "prototype" (or initial map) of the {callable}.
9898 : Node* callable_prototype =
9899 112 : LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
9900 : {
9901 112 : Label callable_prototype_valid(this);
9902 224 : VARIABLE(var_callable_prototype, MachineRepresentation::kTagged,
9903 : callable_prototype);
9904 :
9905 : // Resolve the "prototype" if the {callable} has an initial map. Afterwards
9906 : // the {callable_prototype} will be either the JSReceiver prototype object
9907 : // or the hole value, which means that no instances of the {callable} were
9908 : // created so far and hence we should return false.
9909 : Node* callable_prototype_instance_type =
9910 112 : LoadInstanceType(callable_prototype);
9911 224 : GotoIfNot(InstanceTypeEqual(callable_prototype_instance_type, MAP_TYPE),
9912 112 : &callable_prototype_valid);
9913 : var_callable_prototype.Bind(
9914 112 : LoadObjectField(callable_prototype, Map::kPrototypeOffset));
9915 112 : Goto(&callable_prototype_valid);
9916 112 : BIND(&callable_prototype_valid);
9917 224 : callable_prototype = var_callable_prototype.value();
9918 : }
9919 :
9920 : // Loop through the prototype chain looking for the {callable} prototype.
9921 112 : var_result.Bind(HasInPrototypeChain(context, object, callable_prototype));
9922 112 : Goto(&return_result);
9923 :
9924 112 : BIND(&return_runtime);
9925 : {
9926 : // Fallback to the runtime implementation.
9927 : var_result.Bind(
9928 112 : CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
9929 : }
9930 112 : Goto(&return_result);
9931 :
9932 112 : BIND(&return_result);
9933 224 : return var_result.value();
9934 : }
9935 :
9936 350300 : TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
9937 : ElementsKind kind,
9938 : ParameterMode mode,
9939 : int base_size) {
9940 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, mode));
9941 350300 : int element_size_shift = ElementsKindToShiftSize(kind);
9942 350300 : int element_size = 1 << element_size_shift;
9943 350300 : int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
9944 350300 : intptr_t index = 0;
9945 350300 : bool constant_index = false;
9946 350300 : if (mode == SMI_PARAMETERS) {
9947 27340 : element_size_shift -= kSmiShiftBits;
9948 27340 : Smi smi_index;
9949 27340 : constant_index = ToSmiConstant(index_node, &smi_index);
9950 27340 : if (constant_index) index = smi_index->value();
9951 27340 : index_node = BitcastTaggedToWord(index_node);
9952 : } else {
9953 : DCHECK(mode == INTPTR_PARAMETERS);
9954 322960 : constant_index = ToIntPtrConstant(index_node, index);
9955 : }
9956 350300 : if (constant_index) {
9957 96920 : return IntPtrConstant(base_size + element_size * index);
9958 : }
9959 :
9960 : TNode<WordT> shifted_index =
9961 : (element_size_shift == 0)
9962 6496 : ? UncheckedCast<WordT>(index_node)
9963 : : ((element_size_shift > 0)
9964 1134852 : ? WordShl(index_node, IntPtrConstant(element_size_shift))
9965 953992 : : WordSar(index_node, IntPtrConstant(-element_size_shift)));
9966 253380 : return IntPtrAdd(IntPtrConstant(base_size), Signed(shifted_index));
9967 : }
9968 :
9969 0 : TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(SloppyTNode<IntPtrT> offset,
9970 : SloppyTNode<IntPtrT> length,
9971 : int header_size,
9972 : ElementsKind kind) {
9973 : // Make sure we point to the last field.
9974 0 : int element_size = 1 << ElementsKindToShiftSize(kind);
9975 0 : int correction = header_size - kHeapObjectTag - element_size;
9976 : TNode<IntPtrT> last_offset =
9977 0 : ElementOffsetFromIndex(length, kind, INTPTR_PARAMETERS, correction);
9978 0 : return IntPtrLessThanOrEqual(offset, last_offset);
9979 : }
9980 :
9981 908 : TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVector(
9982 : SloppyTNode<JSFunction> closure, Label* if_undefined) {
9983 908 : TNode<Object> maybe_vector = LoadFeedbackVectorUnchecked(closure);
9984 908 : if (if_undefined) {
9985 56 : GotoIf(IsUndefined(maybe_vector), if_undefined);
9986 : }
9987 908 : return CAST(maybe_vector);
9988 : }
9989 :
9990 12668 : TNode<Object> CodeStubAssembler::LoadFeedbackVectorUnchecked(
9991 : SloppyTNode<JSFunction> closure) {
9992 : TNode<FeedbackCell> feedback_cell =
9993 12668 : CAST(LoadObjectField(closure, JSFunction::kFeedbackCellOffset));
9994 : TNode<Object> maybe_vector =
9995 12668 : LoadObjectField(feedback_cell, FeedbackCell::kValueOffset);
9996 12668 : return maybe_vector;
9997 : }
9998 :
9999 504 : TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorForStub() {
10000 : TNode<JSFunction> function =
10001 504 : CAST(LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset));
10002 504 : return LoadFeedbackVector(function);
10003 : }
10004 :
10005 8736 : void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* maybe_vector,
10006 : Node* slot_id) {
10007 8736 : Label end(this);
10008 : // If feedback_vector is not valid, then nothing to do.
10009 8736 : GotoIf(IsUndefined(maybe_vector), &end);
10010 :
10011 : // This method is used for binary op and compare feedback. These
10012 : // vector nodes are initialized with a smi 0, so we can simply OR
10013 : // our new feedback in place.
10014 8736 : TNode<FeedbackVector> feedback_vector = CAST(maybe_vector);
10015 : TNode<MaybeObject> feedback_element =
10016 8736 : LoadFeedbackVectorSlot(feedback_vector, slot_id);
10017 8736 : TNode<Smi> previous_feedback = CAST(feedback_element);
10018 8736 : TNode<Smi> combined_feedback = SmiOr(previous_feedback, CAST(feedback));
10019 :
10020 8736 : GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
10021 : {
10022 : StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
10023 8736 : SKIP_WRITE_BARRIER);
10024 8736 : ReportFeedbackUpdate(feedback_vector, slot_id, "UpdateFeedback");
10025 8736 : Goto(&end);
10026 : }
10027 :
10028 8736 : BIND(&end);
10029 8736 : }
10030 :
10031 13272 : void CodeStubAssembler::ReportFeedbackUpdate(
10032 : SloppyTNode<FeedbackVector> feedback_vector, SloppyTNode<IntPtrT> slot_id,
10033 : const char* reason) {
10034 : // Reset profiler ticks.
10035 : StoreObjectFieldNoWriteBarrier(
10036 13272 : feedback_vector, FeedbackVector::kProfilerTicksOffset, Int32Constant(0),
10037 13272 : MachineRepresentation::kWord32);
10038 :
10039 : #ifdef V8_TRACE_FEEDBACK_UPDATES
10040 : // Trace the update.
10041 : CallRuntime(Runtime::kInterpreterTraceUpdateFeedback, NoContextConstant(),
10042 : LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset),
10043 : SmiTag(slot_id), StringConstant(reason));
10044 : #endif // V8_TRACE_FEEDBACK_UPDATES
10045 13272 : }
10046 :
10047 33320 : void CodeStubAssembler::OverwriteFeedback(Variable* existing_feedback,
10048 : int new_feedback) {
10049 66640 : if (existing_feedback == nullptr) return;
10050 23184 : existing_feedback->Bind(SmiConstant(new_feedback));
10051 : }
10052 :
10053 26320 : void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
10054 : int feedback) {
10055 52640 : if (existing_feedback == nullptr) return;
10056 : existing_feedback->Bind(
10057 19320 : SmiOr(CAST(existing_feedback->value()), SmiConstant(feedback)));
10058 : }
10059 :
10060 560 : void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
10061 : Node* feedback) {
10062 1120 : if (existing_feedback == nullptr) return;
10063 : existing_feedback->Bind(
10064 504 : SmiOr(CAST(existing_feedback->value()), CAST(feedback)));
10065 : }
10066 :
10067 896 : void CodeStubAssembler::CheckForAssociatedProtector(Node* name,
10068 : Label* if_protector) {
10069 : // This list must be kept in sync with LookupIterator::UpdateProtector!
10070 : // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
10071 896 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kconstructor_string)),
10072 896 : if_protector);
10073 896 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kiterator_symbol)), if_protector);
10074 896 : GotoIf(WordEqual(name, LoadRoot(RootIndex::knext_string)), if_protector);
10075 896 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kspecies_symbol)), if_protector);
10076 896 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kis_concat_spreadable_symbol)),
10077 896 : if_protector);
10078 896 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kresolve_string)), if_protector);
10079 896 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kthen_string)), if_protector);
10080 : // Fall through if no case matched.
10081 896 : }
10082 :
10083 616 : TNode<Map> CodeStubAssembler::LoadReceiverMap(SloppyTNode<Object> receiver) {
10084 : return Select<Map>(
10085 : TaggedIsSmi(receiver),
10086 616 : [=] { return CAST(LoadRoot(RootIndex::kHeapNumberMap)); },
10087 1232 : [=] { return LoadMap(UncheckedCast<HeapObject>(receiver)); });
10088 : }
10089 :
10090 8740 : TNode<IntPtrT> CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
10091 8740 : TVARIABLE(IntPtrT, var_intptr_key);
10092 17480 : Label done(this, &var_intptr_key), key_is_smi(this);
10093 8740 : GotoIf(TaggedIsSmi(key), &key_is_smi);
10094 : // Try to convert a heap number to a Smi.
10095 8740 : GotoIfNot(IsHeapNumber(key), miss);
10096 : {
10097 8740 : TNode<Float64T> value = LoadHeapNumberValue(key);
10098 8740 : TNode<Int32T> int_value = RoundFloat64ToInt32(value);
10099 8740 : GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
10100 8740 : var_intptr_key = ChangeInt32ToIntPtr(int_value);
10101 8740 : Goto(&done);
10102 : }
10103 :
10104 8740 : BIND(&key_is_smi);
10105 : {
10106 8740 : var_intptr_key = SmiUntag(key);
10107 8740 : Goto(&done);
10108 : }
10109 :
10110 8740 : BIND(&done);
10111 17480 : return var_intptr_key.value();
10112 : }
10113 :
10114 280 : Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
10115 : Node* value, Label* bailout) {
10116 : // Mapped arguments are actual arguments. Unmapped arguments are values added
10117 : // to the arguments object after it was created for the call. Mapped arguments
10118 : // are stored in the context at indexes given by elements[key + 2]. Unmapped
10119 : // arguments are stored as regular indexed properties in the arguments array,
10120 : // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
10121 : // look at argument object construction.
10122 : //
10123 : // The sloppy arguments elements array has a special format:
10124 : //
10125 : // 0: context
10126 : // 1: unmapped arguments array
10127 : // 2: mapped_index0,
10128 : // 3: mapped_index1,
10129 : // ...
10130 : //
10131 : // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
10132 : // If key + 2 >= elements.length then attempt to look in the unmapped
10133 : // arguments array (given by elements[1]) and return the value at key, missing
10134 : // to the runtime if the unmapped arguments array is not a fixed array or if
10135 : // key >= unmapped_arguments_array.length.
10136 : //
10137 : // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
10138 : // in the unmapped arguments array, as described above. Otherwise, t is a Smi
10139 : // index into the context array given at elements[0]. Return the value at
10140 : // context[t].
10141 :
10142 280 : bool is_load = value == nullptr;
10143 :
10144 280 : GotoIfNot(TaggedIsSmi(key), bailout);
10145 280 : key = SmiUntag(key);
10146 280 : GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
10147 :
10148 280 : TNode<FixedArray> elements = CAST(LoadElements(receiver));
10149 280 : TNode<IntPtrT> elements_length = LoadAndUntagFixedArrayBaseLength(elements);
10150 :
10151 280 : VARIABLE(var_result, MachineRepresentation::kTagged);
10152 280 : if (!is_load) {
10153 224 : var_result.Bind(value);
10154 : }
10155 560 : Label if_mapped(this), if_unmapped(this), end(this, &var_result);
10156 280 : Node* intptr_two = IntPtrConstant(2);
10157 280 : Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
10158 :
10159 280 : GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
10160 :
10161 : TNode<Object> mapped_index =
10162 280 : LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
10163 280 : Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
10164 :
10165 280 : BIND(&if_mapped);
10166 : {
10167 280 : TNode<IntPtrT> mapped_index_intptr = SmiUntag(CAST(mapped_index));
10168 280 : TNode<Context> the_context = CAST(LoadFixedArrayElement(elements, 0));
10169 280 : if (is_load) {
10170 56 : Node* result = LoadContextElement(the_context, mapped_index_intptr);
10171 : CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
10172 56 : var_result.Bind(result);
10173 : } else {
10174 224 : StoreContextElement(the_context, mapped_index_intptr, value);
10175 : }
10176 280 : Goto(&end);
10177 : }
10178 :
10179 280 : BIND(&if_unmapped);
10180 : {
10181 : TNode<HeapObject> backing_store_ho =
10182 280 : CAST(LoadFixedArrayElement(elements, 1));
10183 560 : GotoIf(WordNotEqual(LoadMap(backing_store_ho), FixedArrayMapConstant()),
10184 280 : bailout);
10185 280 : TNode<FixedArray> backing_store = CAST(backing_store_ho);
10186 :
10187 : TNode<IntPtrT> backing_store_length =
10188 280 : LoadAndUntagFixedArrayBaseLength(backing_store);
10189 280 : GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
10190 :
10191 : // The key falls into unmapped range.
10192 280 : if (is_load) {
10193 56 : Node* result = LoadFixedArrayElement(backing_store, key);
10194 56 : GotoIf(WordEqual(result, TheHoleConstant()), bailout);
10195 56 : var_result.Bind(result);
10196 : } else {
10197 224 : StoreFixedArrayElement(backing_store, key, value);
10198 : }
10199 280 : Goto(&end);
10200 : }
10201 :
10202 280 : BIND(&end);
10203 560 : return var_result.value();
10204 : }
10205 :
10206 840 : TNode<Context> CodeStubAssembler::LoadScriptContext(
10207 : TNode<Context> context, TNode<IntPtrT> context_index) {
10208 840 : TNode<Context> native_context = LoadNativeContext(context);
10209 840 : TNode<ScriptContextTable> script_context_table = CAST(
10210 : LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX));
10211 :
10212 840 : TNode<Context> script_context = CAST(LoadFixedArrayElement(
10213 : script_context_table, context_index,
10214 : ScriptContextTable::kFirstContextSlotIndex * kTaggedSize));
10215 840 : return script_context;
10216 : }
10217 :
10218 : namespace {
10219 :
10220 : // Converts typed array elements kind to a machine representations.
10221 4032 : MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
10222 4032 : switch (kind) {
10223 : case UINT8_CLAMPED_ELEMENTS:
10224 : case UINT8_ELEMENTS:
10225 : case INT8_ELEMENTS:
10226 1344 : return MachineRepresentation::kWord8;
10227 : case UINT16_ELEMENTS:
10228 : case INT16_ELEMENTS:
10229 896 : return MachineRepresentation::kWord16;
10230 : case UINT32_ELEMENTS:
10231 : case INT32_ELEMENTS:
10232 896 : return MachineRepresentation::kWord32;
10233 : case FLOAT32_ELEMENTS:
10234 448 : return MachineRepresentation::kFloat32;
10235 : case FLOAT64_ELEMENTS:
10236 448 : return MachineRepresentation::kFloat64;
10237 : default:
10238 0 : UNREACHABLE();
10239 : }
10240 : }
10241 :
10242 : } // namespace
10243 :
10244 8436 : void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
10245 : Node* index, Node* value,
10246 : ParameterMode mode) {
10247 8436 : if (IsFixedTypedArrayElementsKind(kind)) {
10248 : if (kind == UINT8_CLAMPED_ELEMENTS) {
10249 : CSA_ASSERT(this,
10250 : Word32Equal(value, Word32And(Int32Constant(0xFF), value)));
10251 : }
10252 4032 : Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
10253 : // TODO(cbruni): Add OOB check once typed.
10254 4032 : MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
10255 4032 : StoreNoWriteBarrier(rep, elements, offset, value);
10256 12468 : return;
10257 4404 : } else if (IsDoubleElementsKind(kind)) {
10258 : // Make sure we do not store signalling NaNs into double arrays.
10259 1468 : TNode<Float64T> value_silenced = Float64SilenceNaN(value);
10260 1468 : StoreFixedDoubleArrayElement(CAST(elements), index, value_silenced, mode);
10261 : } else {
10262 : WriteBarrierMode barrier_mode =
10263 2936 : IsSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
10264 2936 : StoreFixedArrayElement(CAST(elements), index, value, barrier_mode, 0, mode);
10265 : }
10266 : }
10267 :
10268 392 : Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
10269 392 : Label done(this);
10270 392 : Node* int32_zero = Int32Constant(0);
10271 392 : Node* int32_255 = Int32Constant(255);
10272 784 : VARIABLE(var_value, MachineRepresentation::kWord32, int32_value);
10273 392 : GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
10274 392 : var_value.Bind(int32_zero);
10275 392 : GotoIf(Int32LessThan(int32_value, int32_zero), &done);
10276 392 : var_value.Bind(int32_255);
10277 392 : Goto(&done);
10278 392 : BIND(&done);
10279 784 : return var_value.value();
10280 : }
10281 :
10282 392 : Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
10283 392 : Label done(this);
10284 784 : VARIABLE(var_value, MachineRepresentation::kWord32, Int32Constant(0));
10285 392 : GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
10286 392 : var_value.Bind(Int32Constant(255));
10287 392 : GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
10288 : {
10289 392 : Node* rounded_value = Float64RoundToEven(float64_value);
10290 392 : var_value.Bind(TruncateFloat64ToWord32(rounded_value));
10291 392 : Goto(&done);
10292 : }
10293 392 : BIND(&done);
10294 784 : return var_value.value();
10295 : }
10296 :
10297 4088 : Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
10298 : TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
10299 : DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
10300 :
10301 : MachineRepresentation rep;
10302 4088 : switch (elements_kind) {
10303 : case UINT8_ELEMENTS:
10304 : case INT8_ELEMENTS:
10305 : case UINT16_ELEMENTS:
10306 : case INT16_ELEMENTS:
10307 : case UINT32_ELEMENTS:
10308 : case INT32_ELEMENTS:
10309 : case UINT8_CLAMPED_ELEMENTS:
10310 2744 : rep = MachineRepresentation::kWord32;
10311 2744 : break;
10312 : case FLOAT32_ELEMENTS:
10313 392 : rep = MachineRepresentation::kFloat32;
10314 392 : break;
10315 : case FLOAT64_ELEMENTS:
10316 392 : rep = MachineRepresentation::kFloat64;
10317 392 : break;
10318 : case BIGINT64_ELEMENTS:
10319 : case BIGUINT64_ELEMENTS:
10320 560 : return ToBigInt(context, input);
10321 : default:
10322 0 : UNREACHABLE();
10323 : }
10324 :
10325 3528 : VARIABLE(var_result, rep);
10326 7056 : VARIABLE(var_input, MachineRepresentation::kTagged, input);
10327 7056 : Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
10328 7056 : convert(this), loop(this, &var_input);
10329 3528 : Goto(&loop);
10330 3528 : BIND(&loop);
10331 3528 : GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
10332 : // We can handle both HeapNumber and Oddball here, since Oddball has the
10333 : // same layout as the HeapNumber for the HeapNumber::value field. This
10334 : // way we can also properly optimize stores of oddballs to typed arrays.
10335 3528 : GotoIf(IsHeapNumber(var_input.value()), &if_heapnumber_or_oddball);
10336 : STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
10337 7056 : Branch(HasInstanceType(var_input.value(), ODDBALL_TYPE),
10338 3528 : &if_heapnumber_or_oddball, &convert);
10339 :
10340 3528 : BIND(&if_heapnumber_or_oddball);
10341 : {
10342 : Node* value = UncheckedCast<Float64T>(LoadObjectField(
10343 3528 : var_input.value(), HeapNumber::kValueOffset, MachineType::Float64()));
10344 3528 : if (rep == MachineRepresentation::kWord32) {
10345 2744 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10346 392 : value = Float64ToUint8Clamped(value);
10347 : } else {
10348 2352 : value = TruncateFloat64ToWord32(value);
10349 : }
10350 784 : } else if (rep == MachineRepresentation::kFloat32) {
10351 392 : value = TruncateFloat64ToFloat32(value);
10352 : } else {
10353 : DCHECK_EQ(MachineRepresentation::kFloat64, rep);
10354 : }
10355 3528 : var_result.Bind(value);
10356 3528 : Goto(&done);
10357 : }
10358 :
10359 3528 : BIND(&if_smi);
10360 : {
10361 3528 : Node* value = SmiToInt32(var_input.value());
10362 3528 : if (rep == MachineRepresentation::kFloat32) {
10363 392 : value = RoundInt32ToFloat32(value);
10364 3136 : } else if (rep == MachineRepresentation::kFloat64) {
10365 392 : value = ChangeInt32ToFloat64(value);
10366 : } else {
10367 : DCHECK_EQ(MachineRepresentation::kWord32, rep);
10368 2744 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10369 392 : value = Int32ToUint8Clamped(value);
10370 : }
10371 : }
10372 3528 : var_result.Bind(value);
10373 3528 : Goto(&done);
10374 : }
10375 :
10376 3528 : BIND(&convert);
10377 : {
10378 3528 : var_input.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, input));
10379 3528 : Goto(&loop);
10380 : }
10381 :
10382 3528 : BIND(&done);
10383 7056 : return var_result.value();
10384 : }
10385 :
10386 224 : void CodeStubAssembler::EmitBigTypedArrayElementStore(
10387 : TNode<JSTypedArray> object, TNode<FixedTypedArrayBase> elements,
10388 : TNode<IntPtrT> intptr_key, TNode<Object> value, TNode<Context> context,
10389 : Label* opt_if_detached) {
10390 224 : TNode<BigInt> bigint_value = ToBigInt(context, value);
10391 :
10392 224 : if (opt_if_detached != nullptr) {
10393 : // Check if buffer has been detached. Must happen after {ToBigInt}!
10394 224 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
10395 224 : GotoIf(IsDetachedBuffer(buffer), opt_if_detached);
10396 : }
10397 :
10398 224 : TNode<RawPtrT> backing_store = LoadFixedTypedArrayBackingStore(elements);
10399 : TNode<IntPtrT> offset = ElementOffsetFromIndex(intptr_key, BIGINT64_ELEMENTS,
10400 224 : INTPTR_PARAMETERS, 0);
10401 224 : EmitBigTypedArrayElementStore(elements, backing_store, offset, bigint_value);
10402 224 : }
10403 :
10404 1456 : void CodeStubAssembler::BigIntToRawBytes(TNode<BigInt> bigint,
10405 : TVariable<UintPtrT>* var_low,
10406 : TVariable<UintPtrT>* var_high) {
10407 1456 : Label done(this);
10408 1456 : *var_low = Unsigned(IntPtrConstant(0));
10409 1456 : *var_high = Unsigned(IntPtrConstant(0));
10410 1456 : TNode<Word32T> bitfield = LoadBigIntBitfield(bigint);
10411 1456 : TNode<Uint32T> length = DecodeWord32<BigIntBase::LengthBits>(bitfield);
10412 1456 : TNode<Uint32T> sign = DecodeWord32<BigIntBase::SignBits>(bitfield);
10413 1456 : GotoIf(Word32Equal(length, Int32Constant(0)), &done);
10414 1456 : *var_low = LoadBigIntDigit(bigint, 0);
10415 1456 : if (!Is64()) {
10416 0 : Label load_done(this);
10417 0 : GotoIf(Word32Equal(length, Int32Constant(1)), &load_done);
10418 0 : *var_high = LoadBigIntDigit(bigint, 1);
10419 0 : Goto(&load_done);
10420 0 : BIND(&load_done);
10421 : }
10422 1456 : GotoIf(Word32Equal(sign, Int32Constant(0)), &done);
10423 : // Negative value. Simulate two's complement.
10424 1456 : if (!Is64()) {
10425 0 : *var_high = Unsigned(IntPtrSub(IntPtrConstant(0), var_high->value()));
10426 0 : Label no_carry(this);
10427 0 : GotoIf(WordEqual(var_low->value(), IntPtrConstant(0)), &no_carry);
10428 0 : *var_high = Unsigned(IntPtrSub(var_high->value(), IntPtrConstant(1)));
10429 0 : Goto(&no_carry);
10430 0 : BIND(&no_carry);
10431 : }
10432 1456 : *var_low = Unsigned(IntPtrSub(IntPtrConstant(0), var_low->value()));
10433 1456 : Goto(&done);
10434 1456 : BIND(&done);
10435 1456 : }
10436 :
10437 896 : void CodeStubAssembler::EmitBigTypedArrayElementStore(
10438 : TNode<FixedTypedArrayBase> elements, TNode<RawPtrT> backing_store,
10439 : TNode<IntPtrT> offset, TNode<BigInt> bigint_value) {
10440 896 : TVARIABLE(UintPtrT, var_low);
10441 : // Only used on 32-bit platforms.
10442 1792 : TVARIABLE(UintPtrT, var_high);
10443 896 : BigIntToRawBytes(bigint_value, &var_low, &var_high);
10444 :
10445 : // Assert that offset < elements.length. Given that it's an offset for a raw
10446 : // pointer we correct it by the usual kHeapObjectTag offset.
10447 : CSA_ASSERT(
10448 : this, IsOffsetInBounds(offset, LoadAndUntagFixedArrayBaseLength(elements),
10449 : kHeapObjectTag, BIGINT64_ELEMENTS));
10450 :
10451 896 : MachineRepresentation rep = WordT::kMachineRepresentation;
10452 : #if defined(V8_TARGET_BIG_ENDIAN)
10453 : if (!Is64()) {
10454 : StoreNoWriteBarrier(rep, backing_store, offset, var_high.value());
10455 : StoreNoWriteBarrier(rep, backing_store,
10456 : IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
10457 : var_low.value());
10458 : } else {
10459 : StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
10460 : }
10461 : #else
10462 896 : StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
10463 896 : if (!Is64()) {
10464 : StoreNoWriteBarrier(rep, backing_store,
10465 0 : IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
10466 0 : var_high.value());
10467 896 : }
10468 : #endif
10469 896 : }
10470 :
10471 7112 : void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
10472 : ElementsKind elements_kind,
10473 : KeyedAccessStoreMode store_mode,
10474 : Label* bailout, Node* context) {
10475 : CSA_ASSERT(this, Word32BinaryNot(IsJSProxy(object)));
10476 :
10477 7112 : Node* elements = LoadElements(object);
10478 7112 : if (!IsSmiOrObjectElementsKind(elements_kind)) {
10479 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10480 2688 : } else if (!IsCOWHandlingStoreMode(store_mode)) {
10481 1344 : GotoIf(IsFixedCOWArrayMap(LoadMap(elements)), bailout);
10482 : }
10483 :
10484 : // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
10485 7112 : ParameterMode parameter_mode = INTPTR_PARAMETERS;
10486 7112 : TNode<IntPtrT> intptr_key = TryToIntptr(key, bailout);
10487 :
10488 7112 : if (IsFixedTypedArrayElementsKind(elements_kind)) {
10489 3080 : Label done(this);
10490 :
10491 : // IntegerIndexedElementSet converts value to a Number/BigInt prior to the
10492 : // bounds check.
10493 3080 : value = PrepareValueForWriteToTypedArray(CAST(value), elements_kind,
10494 6160 : CAST(context));
10495 :
10496 : // There must be no allocations between the buffer load and
10497 : // and the actual store to backing store, because GC may decide that
10498 : // the buffer is not alive or move the elements.
10499 : // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
10500 :
10501 : // Check if buffer has been detached.
10502 3080 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
10503 3080 : GotoIf(IsDetachedBuffer(buffer), bailout);
10504 :
10505 : // Bounds check.
10506 : Node* length =
10507 3080 : TaggedToParameter(LoadJSTypedArrayLength(CAST(object)), parameter_mode);
10508 :
10509 3080 : if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
10510 : // Skip the store if we write beyond the length or
10511 : // to a property with a negative integer index.
10512 616 : GotoIfNot(UintPtrLessThan(intptr_key, length), &done);
10513 2464 : } else if (store_mode == STANDARD_STORE) {
10514 1232 : GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
10515 : } else {
10516 : // This case is produced due to the dispatched call in
10517 : // ElementsTransitionAndStore and StoreFastElement.
10518 : // TODO(jgruber): Avoid generating unsupported combinations to save code
10519 : // size.
10520 1232 : DebugBreak();
10521 : }
10522 :
10523 3080 : if (elements_kind == BIGINT64_ELEMENTS ||
10524 : elements_kind == BIGUINT64_ELEMENTS) {
10525 560 : TNode<BigInt> bigint_value = UncheckedCast<BigInt>(value);
10526 :
10527 : TNode<RawPtrT> backing_store =
10528 560 : LoadFixedTypedArrayBackingStore(CAST(elements));
10529 : TNode<IntPtrT> offset = ElementOffsetFromIndex(
10530 560 : intptr_key, BIGINT64_ELEMENTS, INTPTR_PARAMETERS, 0);
10531 560 : EmitBigTypedArrayElementStore(CAST(elements), backing_store, offset,
10532 560 : bigint_value);
10533 : } else {
10534 2520 : Node* backing_store = LoadFixedTypedArrayBackingStore(CAST(elements));
10535 : StoreElement(backing_store, elements_kind, intptr_key, value,
10536 2520 : parameter_mode);
10537 : }
10538 3080 : Goto(&done);
10539 :
10540 3080 : BIND(&done);
10541 10192 : return;
10542 : }
10543 : DCHECK(IsFastElementsKind(elements_kind));
10544 :
10545 : Node* length =
10546 4032 : SelectImpl(IsJSArray(object), [=]() { return LoadJSArrayLength(object); },
10547 4032 : [=]() { return LoadFixedArrayBaseLength(elements); },
10548 4032 : MachineRepresentation::kTagged);
10549 4032 : length = TaggedToParameter(length, parameter_mode);
10550 :
10551 : // In case value is stored into a fast smi array, assure that the value is
10552 : // a smi before manipulating the backing store. Otherwise the backing store
10553 : // may be left in an invalid state.
10554 4032 : if (IsSmiElementsKind(elements_kind)) {
10555 672 : GotoIfNot(TaggedIsSmi(value), bailout);
10556 3360 : } else if (IsDoubleElementsKind(elements_kind)) {
10557 1344 : value = TryTaggedToFloat64(value, bailout);
10558 : }
10559 :
10560 4032 : if (IsGrowStoreMode(store_mode)) {
10561 : elements = CheckForCapacityGrow(object, elements, elements_kind, length,
10562 1008 : intptr_key, parameter_mode, bailout);
10563 : } else {
10564 3024 : GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
10565 : }
10566 :
10567 : // If we didn't grow {elements}, it might still be COW, in which case we
10568 : // copy it now.
10569 4032 : if (!IsSmiOrObjectElementsKind(elements_kind)) {
10570 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10571 2688 : } else if (IsCOWHandlingStoreMode(store_mode)) {
10572 : elements = CopyElementsOnWrite(object, elements, elements_kind, length,
10573 1344 : parameter_mode, bailout);
10574 : }
10575 :
10576 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10577 4032 : StoreElement(elements, elements_kind, intptr_key, value, parameter_mode);
10578 : }
10579 :
10580 1008 : Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
10581 : ElementsKind kind, Node* length,
10582 : Node* key, ParameterMode mode,
10583 : Label* bailout) {
10584 : DCHECK(IsFastElementsKind(kind));
10585 1008 : VARIABLE(checked_elements, MachineRepresentation::kTagged);
10586 2016 : Label grow_case(this), no_grow_case(this), done(this),
10587 2016 : grow_bailout(this, Label::kDeferred);
10588 :
10589 : Node* condition;
10590 1008 : if (IsHoleyElementsKind(kind)) {
10591 672 : condition = UintPtrGreaterThanOrEqual(key, length);
10592 : } else {
10593 : // We don't support growing here unless the value is being appended.
10594 336 : condition = WordEqual(key, length);
10595 : }
10596 1008 : Branch(condition, &grow_case, &no_grow_case);
10597 :
10598 1008 : BIND(&grow_case);
10599 : {
10600 : Node* current_capacity =
10601 1008 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
10602 1008 : checked_elements.Bind(elements);
10603 1008 : Label fits_capacity(this);
10604 : // If key is negative, we will notice in Runtime::kGrowArrayElements.
10605 1008 : GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
10606 :
10607 : {
10608 : Node* new_elements = TryGrowElementsCapacity(
10609 1008 : object, elements, kind, key, current_capacity, mode, &grow_bailout);
10610 1008 : checked_elements.Bind(new_elements);
10611 1008 : Goto(&fits_capacity);
10612 : }
10613 :
10614 1008 : BIND(&grow_bailout);
10615 : {
10616 : Node* tagged_key = mode == SMI_PARAMETERS
10617 : ? key
10618 1008 : : ChangeInt32ToTagged(TruncateIntPtrToInt32(key));
10619 : Node* maybe_elements = CallRuntime(
10620 1008 : Runtime::kGrowArrayElements, NoContextConstant(), object, tagged_key);
10621 1008 : GotoIf(TaggedIsSmi(maybe_elements), bailout);
10622 : CSA_ASSERT(this, IsFixedArrayWithKind(maybe_elements, kind));
10623 1008 : checked_elements.Bind(maybe_elements);
10624 1008 : Goto(&fits_capacity);
10625 : }
10626 :
10627 1008 : BIND(&fits_capacity);
10628 1008 : GotoIfNot(IsJSArray(object), &done);
10629 :
10630 1008 : Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
10631 : StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
10632 1008 : ParameterToTagged(new_length, mode));
10633 1008 : Goto(&done);
10634 : }
10635 :
10636 1008 : BIND(&no_grow_case);
10637 : {
10638 1008 : GotoIfNot(UintPtrLessThan(key, length), bailout);
10639 1008 : checked_elements.Bind(elements);
10640 1008 : Goto(&done);
10641 : }
10642 :
10643 1008 : BIND(&done);
10644 2016 : return checked_elements.value();
10645 : }
10646 :
10647 1344 : Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
10648 : ElementsKind kind, Node* length,
10649 : ParameterMode mode,
10650 : Label* bailout) {
10651 1344 : VARIABLE(new_elements_var, MachineRepresentation::kTagged, elements);
10652 2688 : Label done(this);
10653 :
10654 1344 : GotoIfNot(IsFixedCOWArrayMap(LoadMap(elements)), &done);
10655 : {
10656 : Node* capacity =
10657 1344 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
10658 : Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
10659 1344 : length, capacity, mode, bailout);
10660 1344 : new_elements_var.Bind(new_elements);
10661 1344 : Goto(&done);
10662 : }
10663 :
10664 1344 : BIND(&done);
10665 2688 : return new_elements_var.value();
10666 : }
10667 :
10668 2688 : void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
10669 : ElementsKind from_kind,
10670 : ElementsKind to_kind,
10671 : Label* bailout) {
10672 : DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind));
10673 2688 : if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
10674 1568 : TrapAllocationMemento(object, bailout);
10675 : }
10676 :
10677 2688 : if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
10678 1344 : Comment("Non-simple map transition");
10679 1344 : Node* elements = LoadElements(object);
10680 :
10681 1344 : Label done(this);
10682 1344 : GotoIf(WordEqual(elements, EmptyFixedArrayConstant()), &done);
10683 :
10684 : // TODO(ishell): Use OptimalParameterMode().
10685 1344 : ParameterMode mode = INTPTR_PARAMETERS;
10686 1344 : Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
10687 : Node* array_length = SelectImpl(
10688 : IsJSArray(object),
10689 1344 : [=]() {
10690 : CSA_ASSERT(this, IsFastElementsKind(LoadElementsKind(object)));
10691 1344 : return SmiUntag(LoadFastJSArrayLength(object));
10692 : },
10693 1344 : [=]() { return elements_length; },
10694 1344 : MachineType::PointerRepresentation());
10695 :
10696 : CSA_ASSERT(this, WordNotEqual(elements_length, IntPtrConstant(0)));
10697 :
10698 : GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
10699 1344 : elements_length, mode, bailout);
10700 1344 : Goto(&done);
10701 1344 : BIND(&done);
10702 : }
10703 :
10704 2688 : StoreMap(object, map);
10705 2688 : }
10706 :
10707 2744 : void CodeStubAssembler::TrapAllocationMemento(Node* object,
10708 : Label* memento_found) {
10709 2744 : Comment("[ TrapAllocationMemento");
10710 2744 : Label no_memento_found(this);
10711 5488 : Label top_check(this), map_check(this);
10712 :
10713 : TNode<ExternalReference> new_space_top_address = ExternalConstant(
10714 2744 : ExternalReference::new_space_allocation_top_address(isolate()));
10715 2744 : const int kMementoMapOffset = JSArray::kSize;
10716 : const int kMementoLastWordOffset =
10717 2744 : kMementoMapOffset + AllocationMemento::kSize - kTaggedSize;
10718 :
10719 : // Bail out if the object is not in new space.
10720 2744 : TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
10721 2744 : TNode<IntPtrT> object_page = PageFromAddress(object_word);
10722 : {
10723 : TNode<IntPtrT> page_flags =
10724 : UncheckedCast<IntPtrT>(Load(MachineType::IntPtr(), object_page,
10725 2744 : IntPtrConstant(Page::kFlagsOffset)));
10726 : GotoIf(WordEqual(
10727 : WordAnd(page_flags,
10728 2744 : IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),
10729 8232 : IntPtrConstant(0)),
10730 2744 : &no_memento_found);
10731 : // TODO(ulan): Support allocation memento for a large object by allocating
10732 : // additional word for the memento after the large object.
10733 : GotoIf(WordNotEqual(WordAnd(page_flags,
10734 2744 : IntPtrConstant(MemoryChunk::kIsLargePageMask)),
10735 8232 : IntPtrConstant(0)),
10736 2744 : &no_memento_found);
10737 : }
10738 :
10739 : TNode<IntPtrT> memento_last_word = IntPtrAdd(
10740 2744 : object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
10741 2744 : TNode<IntPtrT> memento_last_word_page = PageFromAddress(memento_last_word);
10742 :
10743 : TNode<IntPtrT> new_space_top = UncheckedCast<IntPtrT>(
10744 2744 : Load(MachineType::Pointer(), new_space_top_address));
10745 2744 : TNode<IntPtrT> new_space_top_page = PageFromAddress(new_space_top);
10746 :
10747 : // If the object is in new space, we need to check whether respective
10748 : // potential memento object is on the same page as the current top.
10749 2744 : GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
10750 :
10751 : // The object is on a different page than allocation top. Bail out if the
10752 : // object sits on the page boundary as no memento can follow and we cannot
10753 : // touch the memory following it.
10754 5488 : Branch(WordEqual(object_page, memento_last_word_page), &map_check,
10755 2744 : &no_memento_found);
10756 :
10757 : // If top is on the same page as the current object, we need to check whether
10758 : // we are below top.
10759 2744 : BIND(&top_check);
10760 : {
10761 5488 : Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
10762 2744 : &no_memento_found, &map_check);
10763 : }
10764 :
10765 : // Memento map check.
10766 2744 : BIND(&map_check);
10767 : {
10768 2744 : TNode<Object> memento_map = LoadObjectField(object, kMementoMapOffset);
10769 2744 : Branch(WordEqual(memento_map, LoadRoot(RootIndex::kAllocationMementoMap)),
10770 2744 : memento_found, &no_memento_found);
10771 : }
10772 2744 : BIND(&no_memento_found);
10773 5488 : Comment("] TrapAllocationMemento");
10774 2744 : }
10775 :
10776 11852 : TNode<IntPtrT> CodeStubAssembler::PageFromAddress(TNode<IntPtrT> address) {
10777 11852 : return WordAnd(address, IntPtrConstant(~kPageAlignmentMask));
10778 : }
10779 :
10780 392 : TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
10781 : SloppyTNode<FeedbackVector> feedback_vector, TNode<Smi> slot) {
10782 392 : TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSizeWithWeakNext);
10783 392 : Node* site = Allocate(size, CodeStubAssembler::kPretenured);
10784 392 : StoreMapNoWriteBarrier(site, RootIndex::kAllocationSiteWithWeakNextMap);
10785 : // Should match AllocationSite::Initialize.
10786 : TNode<WordT> field = UpdateWord<AllocationSite::ElementsKindBits>(
10787 392 : IntPtrConstant(0), IntPtrConstant(GetInitialFastElementsKind()));
10788 : StoreObjectFieldNoWriteBarrier(
10789 : site, AllocationSite::kTransitionInfoOrBoilerplateOffset,
10790 392 : SmiTag(Signed(field)));
10791 :
10792 : // Unlike literals, constructed arrays don't have nested sites
10793 392 : TNode<Smi> zero = SmiConstant(0);
10794 392 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
10795 :
10796 : // Pretenuring calculation field.
10797 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
10798 392 : Int32Constant(0),
10799 392 : MachineRepresentation::kWord32);
10800 :
10801 : // Pretenuring memento creation count field.
10802 : StoreObjectFieldNoWriteBarrier(
10803 392 : site, AllocationSite::kPretenureCreateCountOffset, Int32Constant(0),
10804 392 : MachineRepresentation::kWord32);
10805 :
10806 : // Store an empty fixed array for the code dependency.
10807 : StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
10808 392 : RootIndex::kEmptyWeakFixedArray);
10809 :
10810 : // Link the object to the allocation site list
10811 : TNode<ExternalReference> site_list = ExternalConstant(
10812 392 : ExternalReference::allocation_sites_list_address(isolate()));
10813 392 : TNode<Object> next_site = CAST(LoadBufferObject(site_list, 0));
10814 :
10815 : // TODO(mvstanton): This is a store to a weak pointer, which we may want to
10816 : // mark as such in order to skip the write barrier, once we have a unified
10817 : // system for weakness. For now we decided to keep it like this because having
10818 : // an initial write barrier backed store makes this pointer strong until the
10819 : // next GC, and allocation sites are designed to survive several GCs anyway.
10820 392 : StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
10821 392 : StoreFullTaggedNoWriteBarrier(site_list, site);
10822 :
10823 : StoreFeedbackVectorSlot(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
10824 392 : SMI_PARAMETERS);
10825 392 : return CAST(site);
10826 : }
10827 :
10828 2240 : TNode<MaybeObject> CodeStubAssembler::StoreWeakReferenceInFeedbackVector(
10829 : SloppyTNode<FeedbackVector> feedback_vector, Node* slot,
10830 : SloppyTNode<HeapObject> value, int additional_offset,
10831 : ParameterMode parameter_mode) {
10832 2240 : TNode<MaybeObject> weak_value = MakeWeak(value);
10833 : StoreFeedbackVectorSlot(feedback_vector, slot, weak_value,
10834 : UPDATE_WRITE_BARRIER, additional_offset,
10835 2240 : parameter_mode);
10836 2240 : return weak_value;
10837 : }
10838 :
10839 672 : TNode<BoolT> CodeStubAssembler::NotHasBoilerplate(
10840 : TNode<Object> maybe_literal_site) {
10841 672 : return TaggedIsSmi(maybe_literal_site);
10842 : }
10843 :
10844 336 : TNode<Smi> CodeStubAssembler::LoadTransitionInfo(
10845 : TNode<AllocationSite> allocation_site) {
10846 336 : TNode<Smi> transition_info = CAST(LoadObjectField(
10847 : allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10848 336 : return transition_info;
10849 : }
10850 :
10851 448 : TNode<JSObject> CodeStubAssembler::LoadBoilerplate(
10852 : TNode<AllocationSite> allocation_site) {
10853 448 : TNode<JSObject> boilerplate = CAST(LoadObjectField(
10854 : allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10855 448 : return boilerplate;
10856 : }
10857 :
10858 280 : TNode<Int32T> CodeStubAssembler::LoadElementsKind(
10859 : TNode<AllocationSite> allocation_site) {
10860 280 : TNode<Smi> transition_info = LoadTransitionInfo(allocation_site);
10861 : TNode<Int32T> elements_kind =
10862 : Signed(DecodeWord32<AllocationSite::ElementsKindBits>(
10863 280 : SmiToInt32(transition_info)));
10864 : CSA_ASSERT(this, IsFastElementsKind(elements_kind));
10865 280 : return elements_kind;
10866 : }
10867 :
10868 35100 : Node* CodeStubAssembler::BuildFastLoop(
10869 : const CodeStubAssembler::VariableList& vars, Node* start_index,
10870 : Node* end_index, const FastLoopBody& body, int increment,
10871 : ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
10872 : CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
10873 : CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
10874 : MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
10875 : ? MachineType::PointerRepresentation()
10876 35100 : : MachineRepresentation::kTaggedSigned;
10877 35100 : VARIABLE(var, index_rep, start_index);
10878 70200 : VariableList vars_copy(vars.begin(), vars.end(), zone());
10879 35100 : vars_copy.push_back(&var);
10880 70200 : Label loop(this, vars_copy);
10881 70200 : Label after_loop(this);
10882 : // Introduce an explicit second check of the termination condition before the
10883 : // loop that helps turbofan generate better code. If there's only a single
10884 : // check, then the CodeStubAssembler forces it to be at the beginning of the
10885 : // loop requiring a backwards branch at the end of the loop (it's not possible
10886 : // to force the loop header check at the end of the loop and branch forward to
10887 : // it from the pre-header). The extra branch is slower in the case that the
10888 : // loop actually iterates.
10889 35100 : Node* first_check = WordEqual(var.value(), end_index);
10890 : int32_t first_check_val;
10891 35100 : if (ToInt32Constant(first_check, first_check_val)) {
10892 308 : if (first_check_val) return var.value();
10893 72 : Goto(&loop);
10894 : } else {
10895 34792 : Branch(first_check, &after_loop, &loop);
10896 : }
10897 :
10898 34864 : BIND(&loop);
10899 : {
10900 34864 : if (advance_mode == IndexAdvanceMode::kPre) {
10901 20260 : Increment(&var, increment, parameter_mode);
10902 : }
10903 34864 : body(var.value());
10904 34864 : if (advance_mode == IndexAdvanceMode::kPost) {
10905 14604 : Increment(&var, increment, parameter_mode);
10906 : }
10907 34864 : Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
10908 : }
10909 34864 : BIND(&after_loop);
10910 69964 : return var.value();
10911 : }
10912 :
10913 17724 : void CodeStubAssembler::BuildFastFixedArrayForEach(
10914 : const CodeStubAssembler::VariableList& vars, Node* fixed_array,
10915 : ElementsKind kind, Node* first_element_inclusive,
10916 : Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
10917 : ParameterMode mode, ForEachDirection direction) {
10918 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
10919 : CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
10920 : CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
10921 : CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(fixed_array, kind),
10922 : IsPropertyArray(fixed_array)));
10923 : int32_t first_val;
10924 17724 : bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
10925 : int32_t last_val;
10926 17724 : bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
10927 17724 : if (constant_first && constent_last) {
10928 696 : int delta = last_val - first_val;
10929 : DCHECK_GE(delta, 0);
10930 696 : if (delta <= kElementLoopUnrollThreshold) {
10931 640 : if (direction == ForEachDirection::kForward) {
10932 36 : for (int i = first_val; i < last_val; ++i) {
10933 24 : Node* index = IntPtrConstant(i);
10934 : Node* offset =
10935 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
10936 24 : FixedArray::kHeaderSize - kHeapObjectTag);
10937 24 : body(fixed_array, offset);
10938 : }
10939 : } else {
10940 2756 : for (int i = last_val - 1; i >= first_val; --i) {
10941 2128 : Node* index = IntPtrConstant(i);
10942 : Node* offset =
10943 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
10944 2128 : FixedArray::kHeaderSize - kHeapObjectTag);
10945 2128 : body(fixed_array, offset);
10946 : }
10947 : }
10948 1280 : return;
10949 : }
10950 : }
10951 :
10952 : Node* start =
10953 : ElementOffsetFromIndex(first_element_inclusive, kind, mode,
10954 17084 : FixedArray::kHeaderSize - kHeapObjectTag);
10955 : Node* limit =
10956 : ElementOffsetFromIndex(last_element_exclusive, kind, mode,
10957 17084 : FixedArray::kHeaderSize - kHeapObjectTag);
10958 17084 : if (direction == ForEachDirection::kReverse) std::swap(start, limit);
10959 :
10960 17084 : int increment = IsDoubleElementsKind(kind) ? kDoubleSize : kTaggedSize;
10961 : BuildFastLoop(
10962 : vars, start, limit,
10963 17076 : [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
10964 : direction == ForEachDirection::kReverse ? -increment : increment,
10965 : INTPTR_PARAMETERS,
10966 : direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
10967 17084 : : IndexAdvanceMode::kPost);
10968 : }
10969 :
10970 2656 : void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
10971 : Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
10972 : GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size, mode),
10973 2656 : doesnt_fit);
10974 2656 : }
10975 :
10976 3536 : void CodeStubAssembler::InitializeFieldsWithRoot(Node* object,
10977 : Node* start_offset,
10978 : Node* end_offset,
10979 : RootIndex root_index) {
10980 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
10981 3536 : start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
10982 3536 : end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
10983 3536 : Node* root_value = LoadRoot(root_index);
10984 : BuildFastLoop(
10985 : end_offset, start_offset,
10986 3308 : [this, object, root_value](Node* current) {
10987 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, current,
10988 3308 : root_value);
10989 3308 : },
10990 : -kTaggedSize, INTPTR_PARAMETERS,
10991 3536 : CodeStubAssembler::IndexAdvanceMode::kPre);
10992 3536 : }
10993 :
10994 8936 : void CodeStubAssembler::BranchIfNumberRelationalComparison(
10995 : Operation op, Node* left, Node* right, Label* if_true, Label* if_false) {
10996 : CSA_SLOW_ASSERT(this, IsNumber(left));
10997 : CSA_SLOW_ASSERT(this, IsNumber(right));
10998 :
10999 8936 : Label do_float_comparison(this);
11000 17872 : TVARIABLE(Float64T, var_left_float);
11001 17872 : TVARIABLE(Float64T, var_right_float);
11002 :
11003 : Branch(TaggedIsSmi(left),
11004 8936 : [&] {
11005 8936 : TNode<Smi> smi_left = CAST(left);
11006 :
11007 : Branch(TaggedIsSmi(right),
11008 8936 : [&] {
11009 8936 : TNode<Smi> smi_right = CAST(right);
11010 :
11011 : // Both {left} and {right} are Smi, so just perform a fast
11012 : // Smi comparison.
11013 8936 : switch (op) {
11014 : case Operation::kEqual:
11015 : BranchIfSmiEqual(smi_left, smi_right, if_true,
11016 516 : if_false);
11017 516 : break;
11018 : case Operation::kLessThan:
11019 : BranchIfSmiLessThan(smi_left, smi_right, if_true,
11020 3080 : if_false);
11021 3080 : break;
11022 : case Operation::kLessThanOrEqual:
11023 : BranchIfSmiLessThanOrEqual(smi_left, smi_right, if_true,
11024 56 : if_false);
11025 56 : break;
11026 : case Operation::kGreaterThan:
11027 : BranchIfSmiLessThan(smi_right, smi_left, if_true,
11028 1904 : if_false);
11029 1904 : break;
11030 : case Operation::kGreaterThanOrEqual:
11031 : BranchIfSmiLessThanOrEqual(smi_right, smi_left, if_true,
11032 3380 : if_false);
11033 3380 : break;
11034 : default:
11035 0 : UNREACHABLE();
11036 : }
11037 8936 : },
11038 8936 : [&] {
11039 : CSA_ASSERT(this, IsHeapNumber(right));
11040 8936 : var_left_float = SmiToFloat64(smi_left);
11041 8936 : var_right_float = LoadHeapNumberValue(right);
11042 8936 : Goto(&do_float_comparison);
11043 17872 : });
11044 8936 : },
11045 8936 : [&] {
11046 : CSA_ASSERT(this, IsHeapNumber(left));
11047 8936 : var_left_float = LoadHeapNumberValue(left);
11048 :
11049 : Branch(TaggedIsSmi(right),
11050 8936 : [&] {
11051 8936 : var_right_float = SmiToFloat64(right);
11052 8936 : Goto(&do_float_comparison);
11053 8936 : },
11054 8936 : [&] {
11055 : CSA_ASSERT(this, IsHeapNumber(right));
11056 8936 : var_right_float = LoadHeapNumberValue(right);
11057 8936 : Goto(&do_float_comparison);
11058 17872 : });
11059 17872 : });
11060 :
11061 8936 : BIND(&do_float_comparison);
11062 : {
11063 8936 : switch (op) {
11064 : case Operation::kEqual:
11065 1032 : Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
11066 1032 : if_true, if_false);
11067 516 : break;
11068 : case Operation::kLessThan:
11069 6160 : Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11070 6160 : if_true, if_false);
11071 3080 : break;
11072 : case Operation::kLessThanOrEqual:
11073 : Branch(Float64LessThanOrEqual(var_left_float.value(),
11074 112 : var_right_float.value()),
11075 112 : if_true, if_false);
11076 56 : break;
11077 : case Operation::kGreaterThan:
11078 : Branch(
11079 3808 : Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11080 3808 : if_true, if_false);
11081 1904 : break;
11082 : case Operation::kGreaterThanOrEqual:
11083 : Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11084 6760 : var_right_float.value()),
11085 6760 : if_true, if_false);
11086 3380 : break;
11087 : default:
11088 0 : UNREACHABLE();
11089 : }
11090 8936 : }
11091 8936 : }
11092 :
11093 2984 : void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(Node* left, Node* right,
11094 : Label* if_true) {
11095 2984 : Label if_false(this);
11096 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, left,
11097 2984 : right, if_true, &if_false);
11098 2984 : BIND(&if_false);
11099 2984 : }
11100 :
11101 : namespace {
11102 2688 : Operation Reverse(Operation op) {
11103 2688 : switch (op) {
11104 : case Operation::kLessThan:
11105 672 : return Operation::kGreaterThan;
11106 : case Operation::kLessThanOrEqual:
11107 672 : return Operation::kGreaterThanOrEqual;
11108 : case Operation::kGreaterThan:
11109 672 : return Operation::kLessThan;
11110 : case Operation::kGreaterThanOrEqual:
11111 672 : return Operation::kLessThanOrEqual;
11112 : default:
11113 0 : break;
11114 : }
11115 0 : UNREACHABLE();
11116 : }
11117 : } // anonymous namespace
11118 :
11119 896 : Node* CodeStubAssembler::RelationalComparison(Operation op, Node* left,
11120 : Node* right, Node* context,
11121 : Variable* var_type_feedback) {
11122 1792 : Label return_true(this), return_false(this), do_float_comparison(this),
11123 1792 : end(this);
11124 1792 : TVARIABLE(Oddball, var_result); // Actually only "true" or "false".
11125 1792 : TVARIABLE(Float64T, var_left_float);
11126 1792 : TVARIABLE(Float64T, var_right_float);
11127 :
11128 : // We might need to loop several times due to ToPrimitive and/or ToNumeric
11129 : // conversions.
11130 1792 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
11131 1792 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
11132 1792 : VariableList loop_variable_list({&var_left, &var_right}, zone());
11133 896 : if (var_type_feedback != nullptr) {
11134 : // Initialize the type feedback to None. The current feedback is combined
11135 : // with the previous feedback.
11136 672 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
11137 672 : loop_variable_list.push_back(var_type_feedback);
11138 : }
11139 1792 : Label loop(this, loop_variable_list);
11140 896 : Goto(&loop);
11141 896 : BIND(&loop);
11142 : {
11143 896 : left = var_left.value();
11144 896 : right = var_right.value();
11145 :
11146 1792 : Label if_left_smi(this), if_left_not_smi(this);
11147 896 : Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11148 :
11149 896 : BIND(&if_left_smi);
11150 : {
11151 896 : TNode<Smi> smi_left = CAST(left);
11152 1792 : Label if_right_smi(this), if_right_heapnumber(this),
11153 1792 : if_right_bigint(this, Label::kDeferred),
11154 1792 : if_right_not_numeric(this, Label::kDeferred);
11155 896 : GotoIf(TaggedIsSmi(right), &if_right_smi);
11156 896 : Node* right_map = LoadMap(right);
11157 896 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11158 896 : Node* right_instance_type = LoadMapInstanceType(right_map);
11159 1792 : Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
11160 896 : &if_right_not_numeric);
11161 :
11162 896 : BIND(&if_right_smi);
11163 : {
11164 896 : TNode<Smi> smi_right = CAST(right);
11165 : CombineFeedback(var_type_feedback,
11166 896 : CompareOperationFeedback::kSignedSmall);
11167 896 : switch (op) {
11168 : case Operation::kLessThan:
11169 : BranchIfSmiLessThan(smi_left, smi_right, &return_true,
11170 224 : &return_false);
11171 224 : break;
11172 : case Operation::kLessThanOrEqual:
11173 : BranchIfSmiLessThanOrEqual(smi_left, smi_right, &return_true,
11174 224 : &return_false);
11175 224 : break;
11176 : case Operation::kGreaterThan:
11177 : BranchIfSmiLessThan(smi_right, smi_left, &return_true,
11178 224 : &return_false);
11179 224 : break;
11180 : case Operation::kGreaterThanOrEqual:
11181 : BranchIfSmiLessThanOrEqual(smi_right, smi_left, &return_true,
11182 224 : &return_false);
11183 224 : break;
11184 : default:
11185 0 : UNREACHABLE();
11186 : }
11187 : }
11188 :
11189 896 : BIND(&if_right_heapnumber);
11190 : {
11191 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11192 896 : var_left_float = SmiToFloat64(smi_left);
11193 896 : var_right_float = LoadHeapNumberValue(right);
11194 896 : Goto(&do_float_comparison);
11195 : }
11196 :
11197 896 : BIND(&if_right_bigint);
11198 : {
11199 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11200 1792 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11201 : NoContextConstant(),
11202 896 : SmiConstant(Reverse(op)), right, left));
11203 896 : Goto(&end);
11204 : }
11205 :
11206 896 : BIND(&if_right_not_numeric);
11207 : {
11208 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11209 : // Convert {right} to a Numeric; we don't need to perform the
11210 : // dedicated ToPrimitive(right, hint Number) operation, as the
11211 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11212 : // a Number hint.
11213 : var_right.Bind(
11214 896 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11215 896 : Goto(&loop);
11216 896 : }
11217 : }
11218 :
11219 896 : BIND(&if_left_not_smi);
11220 : {
11221 896 : Node* left_map = LoadMap(left);
11222 :
11223 1792 : Label if_right_smi(this), if_right_not_smi(this);
11224 896 : Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11225 :
11226 896 : BIND(&if_right_smi);
11227 : {
11228 1792 : Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
11229 1792 : if_left_not_numeric(this, Label::kDeferred);
11230 896 : GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
11231 896 : Node* left_instance_type = LoadMapInstanceType(left_map);
11232 1792 : Branch(IsBigIntInstanceType(left_instance_type), &if_left_bigint,
11233 896 : &if_left_not_numeric);
11234 :
11235 896 : BIND(&if_left_heapnumber);
11236 : {
11237 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11238 896 : var_left_float = LoadHeapNumberValue(left);
11239 896 : var_right_float = SmiToFloat64(right);
11240 896 : Goto(&do_float_comparison);
11241 : }
11242 :
11243 896 : BIND(&if_left_bigint);
11244 : {
11245 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11246 1792 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11247 : NoContextConstant(), SmiConstant(op),
11248 896 : left, right));
11249 896 : Goto(&end);
11250 : }
11251 :
11252 896 : BIND(&if_left_not_numeric);
11253 : {
11254 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11255 : // Convert {left} to a Numeric; we don't need to perform the
11256 : // dedicated ToPrimitive(left, hint Number) operation, as the
11257 : // ToNumeric(left) will by itself already invoke ToPrimitive with
11258 : // a Number hint.
11259 : var_left.Bind(
11260 896 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11261 896 : Goto(&loop);
11262 896 : }
11263 : }
11264 :
11265 896 : BIND(&if_right_not_smi);
11266 : {
11267 896 : Node* right_map = LoadMap(right);
11268 :
11269 1792 : Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
11270 1792 : if_left_string(this), if_left_other(this, Label::kDeferred);
11271 896 : GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
11272 896 : Node* left_instance_type = LoadMapInstanceType(left_map);
11273 896 : GotoIf(IsBigIntInstanceType(left_instance_type), &if_left_bigint);
11274 1792 : Branch(IsStringInstanceType(left_instance_type), &if_left_string,
11275 896 : &if_left_other);
11276 :
11277 896 : BIND(&if_left_heapnumber);
11278 : {
11279 896 : Label if_right_heapnumber(this),
11280 1792 : if_right_bigint(this, Label::kDeferred),
11281 1792 : if_right_not_numeric(this, Label::kDeferred);
11282 896 : GotoIf(WordEqual(right_map, left_map), &if_right_heapnumber);
11283 896 : Node* right_instance_type = LoadMapInstanceType(right_map);
11284 1792 : Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
11285 896 : &if_right_not_numeric);
11286 :
11287 896 : BIND(&if_right_heapnumber);
11288 : {
11289 : CombineFeedback(var_type_feedback,
11290 896 : CompareOperationFeedback::kNumber);
11291 896 : var_left_float = LoadHeapNumberValue(left);
11292 896 : var_right_float = LoadHeapNumberValue(right);
11293 896 : Goto(&do_float_comparison);
11294 : }
11295 :
11296 896 : BIND(&if_right_bigint);
11297 : {
11298 : OverwriteFeedback(var_type_feedback,
11299 896 : CompareOperationFeedback::kAny);
11300 1792 : var_result = CAST(CallRuntime(
11301 : Runtime::kBigIntCompareToNumber, NoContextConstant(),
11302 896 : SmiConstant(Reverse(op)), right, left));
11303 896 : Goto(&end);
11304 : }
11305 :
11306 896 : BIND(&if_right_not_numeric);
11307 : {
11308 : OverwriteFeedback(var_type_feedback,
11309 896 : CompareOperationFeedback::kAny);
11310 : // Convert {right} to a Numeric; we don't need to perform
11311 : // dedicated ToPrimitive(right, hint Number) operation, as the
11312 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11313 : // a Number hint.
11314 : var_right.Bind(
11315 896 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11316 896 : Goto(&loop);
11317 896 : }
11318 : }
11319 :
11320 896 : BIND(&if_left_bigint);
11321 : {
11322 1792 : Label if_right_heapnumber(this), if_right_bigint(this),
11323 1792 : if_right_string(this), if_right_other(this);
11324 896 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11325 896 : Node* right_instance_type = LoadMapInstanceType(right_map);
11326 896 : GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
11327 1792 : Branch(IsStringInstanceType(right_instance_type), &if_right_string,
11328 896 : &if_right_other);
11329 :
11330 896 : BIND(&if_right_heapnumber);
11331 : {
11332 : OverwriteFeedback(var_type_feedback,
11333 896 : CompareOperationFeedback::kAny);
11334 1792 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11335 : NoContextConstant(), SmiConstant(op),
11336 896 : left, right));
11337 896 : Goto(&end);
11338 : }
11339 :
11340 896 : BIND(&if_right_bigint);
11341 : {
11342 : CombineFeedback(var_type_feedback,
11343 896 : CompareOperationFeedback::kBigInt);
11344 1792 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToBigInt,
11345 : NoContextConstant(), SmiConstant(op),
11346 896 : left, right));
11347 896 : Goto(&end);
11348 : }
11349 :
11350 896 : BIND(&if_right_string);
11351 : {
11352 : OverwriteFeedback(var_type_feedback,
11353 896 : CompareOperationFeedback::kAny);
11354 1792 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToString,
11355 : NoContextConstant(), SmiConstant(op),
11356 896 : left, right));
11357 896 : Goto(&end);
11358 : }
11359 :
11360 : // {right} is not a Number, BigInt, or String.
11361 896 : BIND(&if_right_other);
11362 : {
11363 : OverwriteFeedback(var_type_feedback,
11364 896 : CompareOperationFeedback::kAny);
11365 : // Convert {right} to a Numeric; we don't need to perform
11366 : // dedicated ToPrimitive(right, hint Number) operation, as the
11367 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11368 : // a Number hint.
11369 : var_right.Bind(
11370 896 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11371 896 : Goto(&loop);
11372 896 : }
11373 : }
11374 :
11375 896 : BIND(&if_left_string);
11376 : {
11377 896 : Node* right_instance_type = LoadMapInstanceType(right_map);
11378 :
11379 896 : Label if_right_not_string(this, Label::kDeferred);
11380 1792 : GotoIfNot(IsStringInstanceType(right_instance_type),
11381 896 : &if_right_not_string);
11382 :
11383 : // Both {left} and {right} are strings.
11384 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
11385 : Builtins::Name builtin;
11386 896 : switch (op) {
11387 : case Operation::kLessThan:
11388 224 : builtin = Builtins::kStringLessThan;
11389 224 : break;
11390 : case Operation::kLessThanOrEqual:
11391 224 : builtin = Builtins::kStringLessThanOrEqual;
11392 224 : break;
11393 : case Operation::kGreaterThan:
11394 224 : builtin = Builtins::kStringGreaterThan;
11395 224 : break;
11396 : case Operation::kGreaterThanOrEqual:
11397 224 : builtin = Builtins::kStringGreaterThanOrEqual;
11398 224 : break;
11399 : default:
11400 0 : UNREACHABLE();
11401 : }
11402 896 : var_result = CAST(CallBuiltin(builtin, context, left, right));
11403 896 : Goto(&end);
11404 :
11405 896 : BIND(&if_right_not_string);
11406 : {
11407 : OverwriteFeedback(var_type_feedback,
11408 896 : CompareOperationFeedback::kAny);
11409 : // {left} is a String, while {right} isn't. Check if {right} is
11410 : // a BigInt, otherwise call ToPrimitive(right, hint Number) if
11411 : // {right} is a receiver, or ToNumeric(left) and then
11412 : // ToNumeric(right) in the other cases.
11413 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
11414 896 : Label if_right_bigint(this),
11415 1792 : if_right_receiver(this, Label::kDeferred);
11416 896 : GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
11417 1792 : GotoIf(IsJSReceiverInstanceType(right_instance_type),
11418 896 : &if_right_receiver);
11419 :
11420 : var_left.Bind(
11421 896 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11422 896 : var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
11423 896 : Goto(&loop);
11424 :
11425 896 : BIND(&if_right_bigint);
11426 : {
11427 1792 : var_result = CAST(CallRuntime(
11428 : Runtime::kBigIntCompareToString, NoContextConstant(),
11429 896 : SmiConstant(Reverse(op)), right, left));
11430 896 : Goto(&end);
11431 : }
11432 :
11433 896 : BIND(&if_right_receiver);
11434 : {
11435 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
11436 896 : isolate(), ToPrimitiveHint::kNumber);
11437 896 : var_right.Bind(CallStub(callable, context, right));
11438 896 : Goto(&loop);
11439 896 : }
11440 896 : }
11441 : }
11442 :
11443 896 : BIND(&if_left_other);
11444 : {
11445 : // {left} is neither a Numeric nor a String, and {right} is not a Smi.
11446 896 : if (var_type_feedback != nullptr) {
11447 : // Collect NumberOrOddball feedback if {left} is an Oddball
11448 : // and {right} is either a HeapNumber or Oddball. Otherwise collect
11449 : // Any feedback.
11450 1344 : Label collect_any_feedback(this), collect_oddball_feedback(this),
11451 1344 : collect_feedback_done(this);
11452 1344 : GotoIfNot(InstanceTypeEqual(left_instance_type, ODDBALL_TYPE),
11453 672 : &collect_any_feedback);
11454 :
11455 672 : GotoIf(IsHeapNumberMap(right_map), &collect_oddball_feedback);
11456 672 : Node* right_instance_type = LoadMapInstanceType(right_map);
11457 1344 : Branch(InstanceTypeEqual(right_instance_type, ODDBALL_TYPE),
11458 672 : &collect_oddball_feedback, &collect_any_feedback);
11459 :
11460 672 : BIND(&collect_oddball_feedback);
11461 : {
11462 : CombineFeedback(var_type_feedback,
11463 672 : CompareOperationFeedback::kNumberOrOddball);
11464 672 : Goto(&collect_feedback_done);
11465 : }
11466 :
11467 672 : BIND(&collect_any_feedback);
11468 : {
11469 : OverwriteFeedback(var_type_feedback,
11470 672 : CompareOperationFeedback::kAny);
11471 672 : Goto(&collect_feedback_done);
11472 : }
11473 :
11474 1344 : BIND(&collect_feedback_done);
11475 : }
11476 :
11477 : // If {left} is a receiver, call ToPrimitive(left, hint Number).
11478 : // Otherwise call ToNumeric(right) and then ToNumeric(left), the
11479 : // order here is important as it's observable by user code.
11480 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
11481 896 : Label if_left_receiver(this, Label::kDeferred);
11482 1792 : GotoIf(IsJSReceiverInstanceType(left_instance_type),
11483 896 : &if_left_receiver);
11484 :
11485 896 : var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
11486 : var_left.Bind(
11487 896 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11488 896 : Goto(&loop);
11489 :
11490 896 : BIND(&if_left_receiver);
11491 : {
11492 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
11493 896 : isolate(), ToPrimitiveHint::kNumber);
11494 896 : var_left.Bind(CallStub(callable, context, left));
11495 896 : Goto(&loop);
11496 896 : }
11497 896 : }
11498 896 : }
11499 896 : }
11500 : }
11501 :
11502 896 : BIND(&do_float_comparison);
11503 : {
11504 896 : switch (op) {
11505 : case Operation::kLessThan:
11506 448 : Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11507 224 : &return_true, &return_false);
11508 224 : break;
11509 : case Operation::kLessThanOrEqual:
11510 : Branch(Float64LessThanOrEqual(var_left_float.value(),
11511 448 : var_right_float.value()),
11512 224 : &return_true, &return_false);
11513 224 : break;
11514 : case Operation::kGreaterThan:
11515 : Branch(
11516 448 : Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11517 224 : &return_true, &return_false);
11518 224 : break;
11519 : case Operation::kGreaterThanOrEqual:
11520 : Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11521 448 : var_right_float.value()),
11522 224 : &return_true, &return_false);
11523 224 : break;
11524 : default:
11525 0 : UNREACHABLE();
11526 : }
11527 : }
11528 :
11529 896 : BIND(&return_true);
11530 : {
11531 896 : var_result = TrueConstant();
11532 896 : Goto(&end);
11533 : }
11534 :
11535 896 : BIND(&return_false);
11536 : {
11537 896 : var_result = FalseConstant();
11538 896 : Goto(&end);
11539 : }
11540 :
11541 896 : BIND(&end);
11542 1792 : return var_result.value();
11543 : }
11544 :
11545 1120 : TNode<Smi> CodeStubAssembler::CollectFeedbackForString(
11546 : SloppyTNode<Int32T> instance_type) {
11547 : TNode<Smi> feedback = SelectSmiConstant(
11548 : Word32Equal(
11549 2240 : Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
11550 4480 : Int32Constant(kInternalizedTag)),
11551 : CompareOperationFeedback::kInternalizedString,
11552 1120 : CompareOperationFeedback::kString);
11553 1120 : return feedback;
11554 : }
11555 :
11556 616 : void CodeStubAssembler::GenerateEqual_Same(Node* value, Label* if_equal,
11557 : Label* if_notequal,
11558 : Variable* var_type_feedback) {
11559 : // In case of abstract or strict equality checks, we need additional checks
11560 : // for NaN values because they are not considered equal, even if both the
11561 : // left and the right hand side reference exactly the same value.
11562 :
11563 1232 : Label if_smi(this), if_heapnumber(this);
11564 616 : GotoIf(TaggedIsSmi(value), &if_smi);
11565 :
11566 616 : Node* value_map = LoadMap(value);
11567 616 : GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
11568 :
11569 : // For non-HeapNumbers, all we do is collect type feedback.
11570 616 : if (var_type_feedback != nullptr) {
11571 336 : Node* instance_type = LoadMapInstanceType(value_map);
11572 :
11573 672 : Label if_string(this), if_receiver(this), if_oddball(this), if_symbol(this),
11574 672 : if_bigint(this);
11575 336 : GotoIf(IsStringInstanceType(instance_type), &if_string);
11576 336 : GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
11577 336 : GotoIf(IsOddballInstanceType(instance_type), &if_oddball);
11578 336 : Branch(IsBigIntInstanceType(instance_type), &if_bigint, &if_symbol);
11579 :
11580 336 : BIND(&if_string);
11581 : {
11582 : CSA_ASSERT(this, IsString(value));
11583 : CombineFeedback(var_type_feedback,
11584 336 : CollectFeedbackForString(instance_type));
11585 336 : Goto(if_equal);
11586 : }
11587 :
11588 336 : BIND(&if_symbol);
11589 : {
11590 : CSA_ASSERT(this, IsSymbol(value));
11591 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kSymbol);
11592 336 : Goto(if_equal);
11593 : }
11594 :
11595 336 : BIND(&if_receiver);
11596 : {
11597 : CSA_ASSERT(this, IsJSReceiver(value));
11598 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
11599 336 : Goto(if_equal);
11600 : }
11601 :
11602 336 : BIND(&if_bigint);
11603 : {
11604 : CSA_ASSERT(this, IsBigInt(value));
11605 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11606 336 : Goto(if_equal);
11607 : }
11608 :
11609 336 : BIND(&if_oddball);
11610 : {
11611 : CSA_ASSERT(this, IsOddball(value));
11612 672 : Label if_boolean(this), if_not_boolean(this);
11613 336 : Branch(IsBooleanMap(value_map), &if_boolean, &if_not_boolean);
11614 :
11615 336 : BIND(&if_boolean);
11616 : {
11617 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11618 336 : Goto(if_equal);
11619 : }
11620 :
11621 336 : BIND(&if_not_boolean);
11622 : {
11623 : CSA_ASSERT(this, IsNullOrUndefined(value));
11624 : CombineFeedback(var_type_feedback,
11625 336 : CompareOperationFeedback::kReceiverOrNullOrUndefined);
11626 336 : Goto(if_equal);
11627 336 : }
11628 336 : }
11629 : } else {
11630 280 : Goto(if_equal);
11631 : }
11632 :
11633 616 : BIND(&if_heapnumber);
11634 : {
11635 616 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11636 616 : Node* number_value = LoadHeapNumberValue(value);
11637 616 : BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
11638 : }
11639 :
11640 616 : BIND(&if_smi);
11641 : {
11642 616 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kSignedSmall);
11643 616 : Goto(if_equal);
11644 616 : }
11645 616 : }
11646 :
11647 : // ES6 section 7.2.12 Abstract Equality Comparison
11648 224 : Node* CodeStubAssembler::Equal(Node* left, Node* right, Node* context,
11649 : Variable* var_type_feedback) {
11650 : // This is a slightly optimized version of Object::Equals. Whenever you
11651 : // change something functionality wise in here, remember to update the
11652 : // Object::Equals method as well.
11653 :
11654 448 : Label if_equal(this), if_notequal(this), do_float_comparison(this),
11655 448 : do_right_stringtonumber(this, Label::kDeferred), end(this);
11656 448 : VARIABLE(result, MachineRepresentation::kTagged);
11657 448 : TVARIABLE(Float64T, var_left_float);
11658 448 : TVARIABLE(Float64T, var_right_float);
11659 :
11660 : // We can avoid code duplication by exploiting the fact that abstract equality
11661 : // is symmetric.
11662 448 : Label use_symmetry(this);
11663 :
11664 : // We might need to loop several times due to ToPrimitive and/or ToNumber
11665 : // conversions.
11666 448 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
11667 448 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
11668 448 : VariableList loop_variable_list({&var_left, &var_right}, zone());
11669 224 : if (var_type_feedback != nullptr) {
11670 : // Initialize the type feedback to None. The current feedback will be
11671 : // combined with the previous feedback.
11672 168 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
11673 168 : loop_variable_list.push_back(var_type_feedback);
11674 : }
11675 448 : Label loop(this, loop_variable_list);
11676 224 : Goto(&loop);
11677 224 : BIND(&loop);
11678 : {
11679 224 : left = var_left.value();
11680 224 : right = var_right.value();
11681 :
11682 224 : Label if_notsame(this);
11683 224 : GotoIf(WordNotEqual(left, right), &if_notsame);
11684 : {
11685 : // {left} and {right} reference the exact same value, yet we need special
11686 : // treatment for HeapNumber, as NaN is not equal to NaN.
11687 224 : GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
11688 : }
11689 :
11690 224 : BIND(&if_notsame);
11691 448 : Label if_left_smi(this), if_left_not_smi(this);
11692 224 : Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11693 :
11694 224 : BIND(&if_left_smi);
11695 : {
11696 448 : Label if_right_smi(this), if_right_not_smi(this);
11697 224 : Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11698 :
11699 224 : BIND(&if_right_smi);
11700 : {
11701 : // We have already checked for {left} and {right} being the same value,
11702 : // so when we get here they must be different Smis.
11703 : CombineFeedback(var_type_feedback,
11704 224 : CompareOperationFeedback::kSignedSmall);
11705 224 : Goto(&if_notequal);
11706 : }
11707 :
11708 224 : BIND(&if_right_not_smi);
11709 224 : Node* right_map = LoadMap(right);
11710 448 : Label if_right_heapnumber(this), if_right_boolean(this),
11711 448 : if_right_bigint(this, Label::kDeferred),
11712 448 : if_right_receiver(this, Label::kDeferred);
11713 224 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11714 : // {left} is Smi and {right} is not HeapNumber or Smi.
11715 224 : if (var_type_feedback != nullptr) {
11716 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
11717 : }
11718 224 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11719 224 : Node* right_type = LoadMapInstanceType(right_map);
11720 224 : GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11721 224 : GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11722 448 : Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
11723 224 : &if_notequal);
11724 :
11725 224 : BIND(&if_right_heapnumber);
11726 : {
11727 224 : var_left_float = SmiToFloat64(left);
11728 224 : var_right_float = LoadHeapNumberValue(right);
11729 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11730 224 : Goto(&do_float_comparison);
11731 : }
11732 :
11733 224 : BIND(&if_right_boolean);
11734 : {
11735 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11736 224 : Goto(&loop);
11737 : }
11738 :
11739 224 : BIND(&if_right_bigint);
11740 : {
11741 : result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
11742 224 : NoContextConstant(), right, left));
11743 224 : Goto(&end);
11744 : }
11745 :
11746 224 : BIND(&if_right_receiver);
11747 : {
11748 224 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
11749 224 : var_right.Bind(CallStub(callable, context, right));
11750 224 : Goto(&loop);
11751 224 : }
11752 : }
11753 :
11754 224 : BIND(&if_left_not_smi);
11755 : {
11756 224 : GotoIf(TaggedIsSmi(right), &use_symmetry);
11757 :
11758 448 : Label if_left_symbol(this), if_left_number(this), if_left_string(this),
11759 448 : if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
11760 448 : if_left_receiver(this);
11761 :
11762 224 : Node* left_map = LoadMap(left);
11763 224 : Node* right_map = LoadMap(right);
11764 224 : Node* left_type = LoadMapInstanceType(left_map);
11765 224 : Node* right_type = LoadMapInstanceType(right_map);
11766 :
11767 224 : GotoIf(IsStringInstanceType(left_type), &if_left_string);
11768 224 : GotoIf(IsSymbolInstanceType(left_type), &if_left_symbol);
11769 224 : GotoIf(IsHeapNumberInstanceType(left_type), &if_left_number);
11770 224 : GotoIf(IsOddballInstanceType(left_type), &if_left_oddball);
11771 448 : Branch(IsBigIntInstanceType(left_type), &if_left_bigint,
11772 224 : &if_left_receiver);
11773 :
11774 224 : BIND(&if_left_string);
11775 : {
11776 224 : GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
11777 224 : result.Bind(CallBuiltin(Builtins::kStringEqual, context, left, right));
11778 : CombineFeedback(var_type_feedback,
11779 : SmiOr(CollectFeedbackForString(left_type),
11780 224 : CollectFeedbackForString(right_type)));
11781 224 : Goto(&end);
11782 : }
11783 :
11784 224 : BIND(&if_left_number);
11785 : {
11786 224 : Label if_right_not_number(this);
11787 224 : GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
11788 :
11789 224 : var_left_float = LoadHeapNumberValue(left);
11790 224 : var_right_float = LoadHeapNumberValue(right);
11791 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11792 224 : Goto(&do_float_comparison);
11793 :
11794 224 : BIND(&if_right_not_number);
11795 : {
11796 224 : Label if_right_boolean(this);
11797 224 : if (var_type_feedback != nullptr) {
11798 : var_type_feedback->Bind(
11799 168 : SmiConstant(CompareOperationFeedback::kAny));
11800 : }
11801 224 : GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11802 224 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11803 224 : GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
11804 448 : Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11805 224 : &if_notequal);
11806 :
11807 224 : BIND(&if_right_boolean);
11808 : {
11809 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11810 224 : Goto(&loop);
11811 224 : }
11812 224 : }
11813 : }
11814 :
11815 224 : BIND(&if_left_bigint);
11816 : {
11817 448 : Label if_right_heapnumber(this), if_right_bigint(this),
11818 448 : if_right_string(this), if_right_boolean(this);
11819 224 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11820 224 : GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11821 224 : GotoIf(IsStringInstanceType(right_type), &if_right_string);
11822 224 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11823 448 : Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11824 224 : &if_notequal);
11825 :
11826 224 : BIND(&if_right_heapnumber);
11827 : {
11828 224 : if (var_type_feedback != nullptr) {
11829 : var_type_feedback->Bind(
11830 168 : SmiConstant(CompareOperationFeedback::kAny));
11831 : }
11832 : result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
11833 224 : NoContextConstant(), left, right));
11834 224 : Goto(&end);
11835 : }
11836 :
11837 224 : BIND(&if_right_bigint);
11838 : {
11839 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11840 : result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
11841 224 : NoContextConstant(), left, right));
11842 224 : Goto(&end);
11843 : }
11844 :
11845 224 : BIND(&if_right_string);
11846 : {
11847 224 : if (var_type_feedback != nullptr) {
11848 : var_type_feedback->Bind(
11849 168 : SmiConstant(CompareOperationFeedback::kAny));
11850 : }
11851 : result.Bind(CallRuntime(Runtime::kBigIntEqualToString,
11852 224 : NoContextConstant(), left, right));
11853 224 : Goto(&end);
11854 : }
11855 :
11856 224 : BIND(&if_right_boolean);
11857 : {
11858 224 : if (var_type_feedback != nullptr) {
11859 : var_type_feedback->Bind(
11860 168 : SmiConstant(CompareOperationFeedback::kAny));
11861 : }
11862 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11863 224 : Goto(&loop);
11864 224 : }
11865 : }
11866 :
11867 224 : BIND(&if_left_oddball);
11868 : {
11869 448 : Label if_left_boolean(this), if_left_not_boolean(this);
11870 224 : Branch(IsBooleanMap(left_map), &if_left_boolean, &if_left_not_boolean);
11871 :
11872 224 : BIND(&if_left_not_boolean);
11873 : {
11874 : // {left} is either Null or Undefined. Check if {right} is
11875 : // undetectable (which includes Null and Undefined).
11876 448 : Label if_right_undetectable(this), if_right_not_undetectable(this);
11877 448 : Branch(IsUndetectableMap(right_map), &if_right_undetectable,
11878 224 : &if_right_not_undetectable);
11879 :
11880 224 : BIND(&if_right_undetectable);
11881 : {
11882 224 : if (var_type_feedback != nullptr) {
11883 : // If {right} is undetectable, it must be either also
11884 : // Null or Undefined, or a Receiver (aka document.all).
11885 : var_type_feedback->Bind(SmiConstant(
11886 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
11887 : }
11888 224 : Goto(&if_equal);
11889 : }
11890 :
11891 224 : BIND(&if_right_not_undetectable);
11892 : {
11893 224 : if (var_type_feedback != nullptr) {
11894 : // Track whether {right} is Null, Undefined or Receiver.
11895 : var_type_feedback->Bind(SmiConstant(
11896 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
11897 168 : GotoIf(IsJSReceiverInstanceType(right_type), &if_notequal);
11898 168 : GotoIfNot(IsBooleanMap(right_map), &if_notequal);
11899 : var_type_feedback->Bind(
11900 168 : SmiConstant(CompareOperationFeedback::kAny));
11901 : }
11902 224 : Goto(&if_notequal);
11903 224 : }
11904 : }
11905 :
11906 224 : BIND(&if_left_boolean);
11907 : {
11908 224 : if (var_type_feedback != nullptr) {
11909 : var_type_feedback->Bind(
11910 168 : SmiConstant(CompareOperationFeedback::kAny));
11911 : }
11912 :
11913 : // If {right} is a Boolean too, it must be a different Boolean.
11914 224 : GotoIf(WordEqual(right_map, left_map), &if_notequal);
11915 :
11916 : // Otherwise, convert {left} to number and try again.
11917 224 : var_left.Bind(LoadObjectField(left, Oddball::kToNumberOffset));
11918 224 : Goto(&loop);
11919 224 : }
11920 : }
11921 :
11922 224 : BIND(&if_left_symbol);
11923 : {
11924 224 : Label if_right_receiver(this);
11925 224 : GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
11926 : // {right} is not a JSReceiver and also not the same Symbol as {left},
11927 : // so the result is "not equal".
11928 224 : if (var_type_feedback != nullptr) {
11929 168 : Label if_right_symbol(this);
11930 168 : GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
11931 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
11932 168 : Goto(&if_notequal);
11933 :
11934 168 : BIND(&if_right_symbol);
11935 : {
11936 : CombineFeedback(var_type_feedback,
11937 168 : CompareOperationFeedback::kSymbol);
11938 168 : Goto(&if_notequal);
11939 168 : }
11940 : } else {
11941 56 : Goto(&if_notequal);
11942 : }
11943 :
11944 224 : BIND(&if_right_receiver);
11945 : {
11946 : // {left} is a Primitive and {right} is a JSReceiver, so swapping
11947 : // the order is not observable.
11948 224 : if (var_type_feedback != nullptr) {
11949 : var_type_feedback->Bind(
11950 168 : SmiConstant(CompareOperationFeedback::kAny));
11951 : }
11952 224 : Goto(&use_symmetry);
11953 224 : }
11954 : }
11955 :
11956 224 : BIND(&if_left_receiver);
11957 : {
11958 : CSA_ASSERT(this, IsJSReceiverInstanceType(left_type));
11959 448 : Label if_right_receiver(this), if_right_not_receiver(this);
11960 448 : Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
11961 224 : &if_right_not_receiver);
11962 :
11963 224 : BIND(&if_right_receiver);
11964 : {
11965 : // {left} and {right} are different JSReceiver references.
11966 : CombineFeedback(var_type_feedback,
11967 224 : CompareOperationFeedback::kReceiver);
11968 224 : Goto(&if_notequal);
11969 : }
11970 :
11971 224 : BIND(&if_right_not_receiver);
11972 : {
11973 : // Check if {right} is undetectable, which means it must be Null
11974 : // or Undefined, since we already ruled out Receiver for {right}.
11975 224 : Label if_right_undetectable(this),
11976 448 : if_right_not_undetectable(this, Label::kDeferred);
11977 448 : Branch(IsUndetectableMap(right_map), &if_right_undetectable,
11978 224 : &if_right_not_undetectable);
11979 :
11980 224 : BIND(&if_right_undetectable);
11981 : {
11982 : // When we get here, {right} must be either Null or Undefined.
11983 : CSA_ASSERT(this, IsNullOrUndefined(right));
11984 224 : if (var_type_feedback != nullptr) {
11985 : var_type_feedback->Bind(SmiConstant(
11986 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
11987 : }
11988 224 : Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
11989 : }
11990 :
11991 224 : BIND(&if_right_not_undetectable);
11992 : {
11993 : // {right} is a Primitive, and neither Null or Undefined;
11994 : // convert {left} to Primitive too.
11995 224 : if (var_type_feedback != nullptr) {
11996 : var_type_feedback->Bind(
11997 168 : SmiConstant(CompareOperationFeedback::kAny));
11998 : }
11999 224 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
12000 224 : var_left.Bind(CallStub(callable, context, left));
12001 224 : Goto(&loop);
12002 224 : }
12003 224 : }
12004 224 : }
12005 : }
12006 :
12007 224 : BIND(&do_right_stringtonumber);
12008 : {
12009 224 : var_right.Bind(CallBuiltin(Builtins::kStringToNumber, context, right));
12010 224 : Goto(&loop);
12011 : }
12012 :
12013 224 : BIND(&use_symmetry);
12014 : {
12015 224 : var_left.Bind(right);
12016 224 : var_right.Bind(left);
12017 224 : Goto(&loop);
12018 224 : }
12019 : }
12020 :
12021 224 : BIND(&do_float_comparison);
12022 : {
12023 448 : Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
12024 224 : &if_equal, &if_notequal);
12025 : }
12026 :
12027 224 : BIND(&if_equal);
12028 : {
12029 224 : result.Bind(TrueConstant());
12030 224 : Goto(&end);
12031 : }
12032 :
12033 224 : BIND(&if_notequal);
12034 : {
12035 224 : result.Bind(FalseConstant());
12036 224 : Goto(&end);
12037 : }
12038 :
12039 224 : BIND(&end);
12040 448 : return result.value();
12041 : }
12042 :
12043 392 : Node* CodeStubAssembler::StrictEqual(Node* lhs, Node* rhs,
12044 : Variable* var_type_feedback) {
12045 : // Pseudo-code for the algorithm below:
12046 : //
12047 : // if (lhs == rhs) {
12048 : // if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
12049 : // return true;
12050 : // }
12051 : // if (!lhs->IsSmi()) {
12052 : // if (lhs->IsHeapNumber()) {
12053 : // if (rhs->IsSmi()) {
12054 : // return Smi::ToInt(rhs) == HeapNumber::cast(lhs)->value();
12055 : // } else if (rhs->IsHeapNumber()) {
12056 : // return HeapNumber::cast(rhs)->value() ==
12057 : // HeapNumber::cast(lhs)->value();
12058 : // } else {
12059 : // return false;
12060 : // }
12061 : // } else {
12062 : // if (rhs->IsSmi()) {
12063 : // return false;
12064 : // } else {
12065 : // if (lhs->IsString()) {
12066 : // if (rhs->IsString()) {
12067 : // return %StringEqual(lhs, rhs);
12068 : // } else {
12069 : // return false;
12070 : // }
12071 : // } else if (lhs->IsBigInt()) {
12072 : // if (rhs->IsBigInt()) {
12073 : // return %BigIntEqualToBigInt(lhs, rhs);
12074 : // } else {
12075 : // return false;
12076 : // }
12077 : // } else {
12078 : // return false;
12079 : // }
12080 : // }
12081 : // }
12082 : // } else {
12083 : // if (rhs->IsSmi()) {
12084 : // return false;
12085 : // } else {
12086 : // if (rhs->IsHeapNumber()) {
12087 : // return Smi::ToInt(lhs) == HeapNumber::cast(rhs)->value();
12088 : // } else {
12089 : // return false;
12090 : // }
12091 : // }
12092 : // }
12093 :
12094 784 : Label if_equal(this), if_notequal(this), end(this);
12095 784 : VARIABLE(result, MachineRepresentation::kTagged);
12096 :
12097 : // Check if {lhs} and {rhs} refer to the same object.
12098 784 : Label if_same(this), if_notsame(this);
12099 392 : Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
12100 :
12101 392 : BIND(&if_same);
12102 : {
12103 : // The {lhs} and {rhs} reference the exact same value, yet we need special
12104 : // treatment for HeapNumber, as NaN is not equal to NaN.
12105 392 : if (var_type_feedback != nullptr) {
12106 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
12107 : }
12108 392 : GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
12109 : }
12110 :
12111 392 : BIND(&if_notsame);
12112 : {
12113 : // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
12114 : // BigInt and String they can still be considered equal.
12115 :
12116 392 : if (var_type_feedback != nullptr) {
12117 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
12118 : }
12119 :
12120 : // Check if {lhs} is a Smi or a HeapObject.
12121 784 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
12122 392 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
12123 :
12124 392 : BIND(&if_lhsisnotsmi);
12125 : {
12126 : // Load the map of {lhs}.
12127 392 : Node* lhs_map = LoadMap(lhs);
12128 :
12129 : // Check if {lhs} is a HeapNumber.
12130 784 : Label if_lhsisnumber(this), if_lhsisnotnumber(this);
12131 392 : Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
12132 :
12133 392 : BIND(&if_lhsisnumber);
12134 : {
12135 : // Check if {rhs} is a Smi or a HeapObject.
12136 784 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12137 392 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12138 :
12139 392 : BIND(&if_rhsissmi);
12140 : {
12141 : // Convert {lhs} and {rhs} to floating point values.
12142 392 : Node* lhs_value = LoadHeapNumberValue(lhs);
12143 392 : Node* rhs_value = SmiToFloat64(rhs);
12144 :
12145 392 : if (var_type_feedback != nullptr) {
12146 : var_type_feedback->Bind(
12147 168 : SmiConstant(CompareOperationFeedback::kNumber));
12148 : }
12149 :
12150 : // Perform a floating point comparison of {lhs} and {rhs}.
12151 392 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12152 : }
12153 :
12154 392 : BIND(&if_rhsisnotsmi);
12155 : {
12156 : // Load the map of {rhs}.
12157 392 : Node* rhs_map = LoadMap(rhs);
12158 :
12159 : // Check if {rhs} is also a HeapNumber.
12160 784 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
12161 392 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
12162 :
12163 392 : BIND(&if_rhsisnumber);
12164 : {
12165 : // Convert {lhs} and {rhs} to floating point values.
12166 392 : Node* lhs_value = LoadHeapNumberValue(lhs);
12167 392 : Node* rhs_value = LoadHeapNumberValue(rhs);
12168 :
12169 392 : if (var_type_feedback != nullptr) {
12170 : var_type_feedback->Bind(
12171 168 : SmiConstant(CompareOperationFeedback::kNumber));
12172 : }
12173 :
12174 : // Perform a floating point comparison of {lhs} and {rhs}.
12175 392 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12176 : }
12177 :
12178 392 : BIND(&if_rhsisnotnumber);
12179 784 : Goto(&if_notequal);
12180 392 : }
12181 : }
12182 :
12183 392 : BIND(&if_lhsisnotnumber);
12184 : {
12185 : // Check if {rhs} is a Smi or a HeapObject.
12186 784 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12187 392 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12188 :
12189 392 : BIND(&if_rhsissmi);
12190 392 : Goto(&if_notequal);
12191 :
12192 392 : BIND(&if_rhsisnotsmi);
12193 : {
12194 : // Load the instance type of {lhs}.
12195 392 : Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
12196 :
12197 : // Check if {lhs} is a String.
12198 784 : Label if_lhsisstring(this), if_lhsisnotstring(this);
12199 784 : Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
12200 392 : &if_lhsisnotstring);
12201 :
12202 392 : BIND(&if_lhsisstring);
12203 : {
12204 : // Load the instance type of {rhs}.
12205 392 : Node* rhs_instance_type = LoadInstanceType(rhs);
12206 :
12207 : // Check if {rhs} is also a String.
12208 392 : Label if_rhsisstring(this, Label::kDeferred),
12209 784 : if_rhsisnotstring(this);
12210 784 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
12211 392 : &if_rhsisnotstring);
12212 :
12213 392 : BIND(&if_rhsisstring);
12214 : {
12215 392 : if (var_type_feedback != nullptr) {
12216 : TNode<Smi> lhs_feedback =
12217 168 : CollectFeedbackForString(lhs_instance_type);
12218 : TNode<Smi> rhs_feedback =
12219 168 : CollectFeedbackForString(rhs_instance_type);
12220 168 : var_type_feedback->Bind(SmiOr(lhs_feedback, rhs_feedback));
12221 : }
12222 : result.Bind(CallBuiltin(Builtins::kStringEqual,
12223 392 : NoContextConstant(), lhs, rhs));
12224 392 : Goto(&end);
12225 : }
12226 :
12227 392 : BIND(&if_rhsisnotstring);
12228 784 : Goto(&if_notequal);
12229 : }
12230 :
12231 392 : BIND(&if_lhsisnotstring);
12232 :
12233 : // Check if {lhs} is a BigInt.
12234 784 : Label if_lhsisbigint(this), if_lhsisnotbigint(this);
12235 784 : Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
12236 392 : &if_lhsisnotbigint);
12237 :
12238 392 : BIND(&if_lhsisbigint);
12239 : {
12240 : // Load the instance type of {rhs}.
12241 392 : Node* rhs_instance_type = LoadInstanceType(rhs);
12242 :
12243 : // Check if {rhs} is also a BigInt.
12244 392 : Label if_rhsisbigint(this, Label::kDeferred),
12245 784 : if_rhsisnotbigint(this);
12246 784 : Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
12247 392 : &if_rhsisnotbigint);
12248 :
12249 392 : BIND(&if_rhsisbigint);
12250 : {
12251 392 : if (var_type_feedback != nullptr) {
12252 : var_type_feedback->Bind(
12253 168 : SmiConstant(CompareOperationFeedback::kBigInt));
12254 : }
12255 : result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
12256 392 : NoContextConstant(), lhs, rhs));
12257 392 : Goto(&end);
12258 : }
12259 :
12260 392 : BIND(&if_rhsisnotbigint);
12261 784 : Goto(&if_notequal);
12262 : }
12263 :
12264 392 : BIND(&if_lhsisnotbigint);
12265 392 : if (var_type_feedback != nullptr) {
12266 : // Load the instance type of {rhs}.
12267 168 : Node* rhs_map = LoadMap(rhs);
12268 168 : Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
12269 :
12270 336 : Label if_lhsissymbol(this), if_lhsisreceiver(this),
12271 336 : if_lhsisoddball(this);
12272 336 : GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
12273 168 : &if_lhsisreceiver);
12274 168 : GotoIf(IsBooleanMap(lhs_map), &if_notequal);
12275 168 : GotoIf(IsOddballInstanceType(lhs_instance_type), &if_lhsisoddball);
12276 336 : Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
12277 168 : &if_notequal);
12278 :
12279 168 : BIND(&if_lhsisreceiver);
12280 : {
12281 168 : GotoIf(IsBooleanMap(rhs_map), &if_notequal);
12282 : var_type_feedback->Bind(
12283 168 : SmiConstant(CompareOperationFeedback::kReceiver));
12284 168 : GotoIf(IsJSReceiverInstanceType(rhs_instance_type), &if_notequal);
12285 : var_type_feedback->Bind(SmiConstant(
12286 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12287 168 : GotoIf(IsOddballInstanceType(rhs_instance_type), &if_notequal);
12288 : var_type_feedback->Bind(
12289 168 : SmiConstant(CompareOperationFeedback::kAny));
12290 168 : Goto(&if_notequal);
12291 : }
12292 :
12293 168 : BIND(&if_lhsisoddball);
12294 : {
12295 : STATIC_ASSERT(LAST_PRIMITIVE_TYPE == ODDBALL_TYPE);
12296 168 : GotoIf(IsBooleanMap(rhs_map), &if_notequal);
12297 : GotoIf(
12298 336 : Int32LessThan(rhs_instance_type, Int32Constant(ODDBALL_TYPE)),
12299 168 : &if_notequal);
12300 : var_type_feedback->Bind(SmiConstant(
12301 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12302 168 : Goto(&if_notequal);
12303 : }
12304 :
12305 168 : BIND(&if_lhsissymbol);
12306 : {
12307 168 : GotoIfNot(IsSymbolInstanceType(rhs_instance_type), &if_notequal);
12308 : var_type_feedback->Bind(
12309 168 : SmiConstant(CompareOperationFeedback::kSymbol));
12310 168 : Goto(&if_notequal);
12311 168 : }
12312 : } else {
12313 224 : Goto(&if_notequal);
12314 392 : }
12315 392 : }
12316 392 : }
12317 : }
12318 :
12319 392 : BIND(&if_lhsissmi);
12320 : {
12321 : // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
12322 : // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
12323 : // HeapNumber with an equal floating point value.
12324 :
12325 : // Check if {rhs} is a Smi or a HeapObject.
12326 784 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12327 392 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12328 :
12329 392 : BIND(&if_rhsissmi);
12330 392 : if (var_type_feedback != nullptr) {
12331 : var_type_feedback->Bind(
12332 168 : SmiConstant(CompareOperationFeedback::kSignedSmall));
12333 : }
12334 392 : Goto(&if_notequal);
12335 :
12336 392 : BIND(&if_rhsisnotsmi);
12337 : {
12338 : // Load the map of the {rhs}.
12339 392 : Node* rhs_map = LoadMap(rhs);
12340 :
12341 : // The {rhs} could be a HeapNumber with the same value as {lhs}.
12342 784 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
12343 392 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
12344 :
12345 392 : BIND(&if_rhsisnumber);
12346 : {
12347 : // Convert {lhs} and {rhs} to floating point values.
12348 392 : Node* lhs_value = SmiToFloat64(lhs);
12349 392 : Node* rhs_value = LoadHeapNumberValue(rhs);
12350 :
12351 392 : if (var_type_feedback != nullptr) {
12352 : var_type_feedback->Bind(
12353 168 : SmiConstant(CompareOperationFeedback::kNumber));
12354 : }
12355 :
12356 : // Perform a floating point comparison of {lhs} and {rhs}.
12357 392 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12358 : }
12359 :
12360 392 : BIND(&if_rhsisnotnumber);
12361 784 : Goto(&if_notequal);
12362 392 : }
12363 392 : }
12364 : }
12365 :
12366 392 : BIND(&if_equal);
12367 : {
12368 392 : result.Bind(TrueConstant());
12369 392 : Goto(&end);
12370 : }
12371 :
12372 392 : BIND(&if_notequal);
12373 : {
12374 392 : result.Bind(FalseConstant());
12375 392 : Goto(&end);
12376 : }
12377 :
12378 392 : BIND(&end);
12379 784 : return result.value();
12380 : }
12381 :
12382 : // ECMA#sec-samevalue
12383 : // This algorithm differs from the Strict Equality Comparison Algorithm in its
12384 : // treatment of signed zeroes and NaNs.
12385 336 : void CodeStubAssembler::BranchIfSameValue(Node* lhs, Node* rhs, Label* if_true,
12386 : Label* if_false) {
12387 336 : VARIABLE(var_lhs_value, MachineRepresentation::kFloat64);
12388 672 : VARIABLE(var_rhs_value, MachineRepresentation::kFloat64);
12389 672 : Label do_fcmp(this);
12390 :
12391 : // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
12392 : // StrictEqual - SameValue considers two NaNs to be equal.
12393 336 : GotoIf(WordEqual(lhs, rhs), if_true);
12394 :
12395 : // Check if the {lhs} is a Smi.
12396 672 : Label if_lhsissmi(this), if_lhsisheapobject(this);
12397 336 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
12398 :
12399 336 : BIND(&if_lhsissmi);
12400 : {
12401 : // Since {lhs} is a Smi, the comparison can only yield true
12402 : // iff the {rhs} is a HeapNumber with the same float64 value.
12403 336 : Branch(TaggedIsSmi(rhs), if_false, [&] {
12404 336 : GotoIfNot(IsHeapNumber(rhs), if_false);
12405 336 : var_lhs_value.Bind(SmiToFloat64(lhs));
12406 336 : var_rhs_value.Bind(LoadHeapNumberValue(rhs));
12407 336 : Goto(&do_fcmp);
12408 672 : });
12409 : }
12410 :
12411 336 : BIND(&if_lhsisheapobject);
12412 : {
12413 : // Check if the {rhs} is a Smi.
12414 : Branch(TaggedIsSmi(rhs),
12415 336 : [&] {
12416 : // Since {rhs} is a Smi, the comparison can only yield true
12417 : // iff the {lhs} is a HeapNumber with the same float64 value.
12418 336 : GotoIfNot(IsHeapNumber(lhs), if_false);
12419 336 : var_lhs_value.Bind(LoadHeapNumberValue(lhs));
12420 336 : var_rhs_value.Bind(SmiToFloat64(rhs));
12421 336 : Goto(&do_fcmp);
12422 336 : },
12423 336 : [&] {
12424 : // Now this can only yield true if either both {lhs} and {rhs} are
12425 : // HeapNumbers with the same value, or both are Strings with the
12426 : // same character sequence, or both are BigInts with the same
12427 : // value.
12428 672 : Label if_lhsisheapnumber(this), if_lhsisstring(this),
12429 672 : if_lhsisbigint(this);
12430 336 : Node* const lhs_map = LoadMap(lhs);
12431 336 : GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
12432 336 : Node* const lhs_instance_type = LoadMapInstanceType(lhs_map);
12433 336 : GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
12434 672 : Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
12435 672 : if_false);
12436 :
12437 336 : BIND(&if_lhsisheapnumber);
12438 : {
12439 336 : GotoIfNot(IsHeapNumber(rhs), if_false);
12440 336 : var_lhs_value.Bind(LoadHeapNumberValue(lhs));
12441 336 : var_rhs_value.Bind(LoadHeapNumberValue(rhs));
12442 336 : Goto(&do_fcmp);
12443 : }
12444 :
12445 336 : BIND(&if_lhsisstring);
12446 : {
12447 : // Now we can only yield true if {rhs} is also a String
12448 : // with the same sequence of characters.
12449 336 : GotoIfNot(IsString(rhs), if_false);
12450 : Node* const result = CallBuiltin(Builtins::kStringEqual,
12451 336 : NoContextConstant(), lhs, rhs);
12452 336 : Branch(IsTrue(result), if_true, if_false);
12453 : }
12454 :
12455 336 : BIND(&if_lhsisbigint);
12456 : {
12457 336 : GotoIfNot(IsBigInt(rhs), if_false);
12458 : Node* const result = CallRuntime(Runtime::kBigIntEqualToBigInt,
12459 336 : NoContextConstant(), lhs, rhs);
12460 336 : Branch(IsTrue(result), if_true, if_false);
12461 : }
12462 672 : });
12463 : }
12464 :
12465 336 : BIND(&do_fcmp);
12466 : {
12467 336 : Node* const lhs_value = var_lhs_value.value();
12468 336 : Node* const rhs_value = var_rhs_value.value();
12469 :
12470 672 : Label if_equal(this), if_notequal(this);
12471 336 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12472 :
12473 336 : BIND(&if_equal);
12474 : {
12475 : // We still need to handle the case when {lhs} and {rhs} are -0.0 and
12476 : // 0.0 (or vice versa). Compare the high word to
12477 : // distinguish between the two.
12478 336 : Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_value);
12479 336 : Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_value);
12480 :
12481 : // If x is +0 and y is -0, return false.
12482 : // If x is -0 and y is +0, return false.
12483 336 : Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
12484 : }
12485 :
12486 336 : BIND(&if_notequal);
12487 : {
12488 : // Return true iff both {rhs} and {lhs} are NaN.
12489 336 : GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
12490 336 : Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
12491 336 : }
12492 336 : }
12493 336 : }
12494 :
12495 784 : TNode<Oddball> CodeStubAssembler::HasProperty(SloppyTNode<Context> context,
12496 : SloppyTNode<Object> object,
12497 : SloppyTNode<Object> key,
12498 : HasPropertyLookupMode mode) {
12499 1568 : Label call_runtime(this, Label::kDeferred), return_true(this),
12500 1568 : return_false(this), end(this), if_proxy(this, Label::kDeferred);
12501 :
12502 : CodeStubAssembler::LookupInHolder lookup_property_in_holder =
12503 : [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
12504 : Node* holder_instance_type, Node* unique_name,
12505 784 : Label* next_holder, Label* if_bailout) {
12506 : TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
12507 784 : &return_true, next_holder, if_bailout);
12508 2352 : };
12509 :
12510 : CodeStubAssembler::LookupInHolder lookup_element_in_holder =
12511 : [this, &return_true, &return_false](
12512 : Node* receiver, Node* holder, Node* holder_map,
12513 : Node* holder_instance_type, Node* index, Label* next_holder,
12514 784 : Label* if_bailout) {
12515 : TryLookupElement(holder, holder_map, holder_instance_type, index,
12516 784 : &return_true, &return_false, next_holder, if_bailout);
12517 2352 : };
12518 :
12519 : TryPrototypeChainLookup(object, key, lookup_property_in_holder,
12520 : lookup_element_in_holder, &return_false,
12521 784 : &call_runtime, &if_proxy);
12522 :
12523 1568 : TVARIABLE(Oddball, result);
12524 :
12525 784 : BIND(&if_proxy);
12526 : {
12527 784 : TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
12528 784 : switch (mode) {
12529 : case kHasProperty:
12530 728 : GotoIf(IsPrivateSymbol(name), &return_false);
12531 :
12532 1456 : result = CAST(
12533 728 : CallBuiltin(Builtins::kProxyHasProperty, context, object, name));
12534 728 : Goto(&end);
12535 728 : break;
12536 : case kForInHasProperty:
12537 56 : Goto(&call_runtime);
12538 56 : break;
12539 : }
12540 : }
12541 :
12542 784 : BIND(&return_true);
12543 : {
12544 784 : result = TrueConstant();
12545 784 : Goto(&end);
12546 : }
12547 :
12548 784 : BIND(&return_false);
12549 : {
12550 784 : result = FalseConstant();
12551 784 : Goto(&end);
12552 : }
12553 :
12554 784 : BIND(&call_runtime);
12555 : {
12556 : Runtime::FunctionId fallback_runtime_function_id;
12557 784 : switch (mode) {
12558 : case kHasProperty:
12559 728 : fallback_runtime_function_id = Runtime::kHasProperty;
12560 728 : break;
12561 : case kForInHasProperty:
12562 56 : fallback_runtime_function_id = Runtime::kForInHasProperty;
12563 56 : break;
12564 : }
12565 :
12566 1568 : result =
12567 2352 : CAST(CallRuntime(fallback_runtime_function_id, context, object, key));
12568 784 : Goto(&end);
12569 : }
12570 :
12571 784 : BIND(&end);
12572 : CSA_ASSERT(this, IsBoolean(result.value()));
12573 1568 : return result.value();
12574 : }
12575 :
12576 392 : Node* CodeStubAssembler::Typeof(Node* value) {
12577 392 : VARIABLE(result_var, MachineRepresentation::kTagged);
12578 :
12579 784 : Label return_number(this, Label::kDeferred), if_oddball(this),
12580 784 : return_function(this), return_undefined(this), return_object(this),
12581 784 : return_string(this), return_bigint(this), return_result(this);
12582 :
12583 392 : GotoIf(TaggedIsSmi(value), &return_number);
12584 :
12585 392 : Node* map = LoadMap(value);
12586 :
12587 392 : GotoIf(IsHeapNumberMap(map), &return_number);
12588 :
12589 392 : Node* instance_type = LoadMapInstanceType(map);
12590 :
12591 392 : GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
12592 :
12593 : Node* callable_or_undetectable_mask = Word32And(
12594 784 : LoadMapBitField(map),
12595 1176 : Int32Constant(Map::IsCallableBit::kMask | Map::IsUndetectableBit::kMask));
12596 :
12597 : GotoIf(Word32Equal(callable_or_undetectable_mask,
12598 784 : Int32Constant(Map::IsCallableBit::kMask)),
12599 392 : &return_function);
12600 :
12601 784 : GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
12602 392 : &return_undefined);
12603 :
12604 392 : GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
12605 :
12606 392 : GotoIf(IsStringInstanceType(instance_type), &return_string);
12607 :
12608 392 : GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
12609 :
12610 : CSA_ASSERT(this, InstanceTypeEqual(instance_type, SYMBOL_TYPE));
12611 392 : result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
12612 392 : Goto(&return_result);
12613 :
12614 392 : BIND(&return_number);
12615 : {
12616 392 : result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
12617 392 : Goto(&return_result);
12618 : }
12619 :
12620 392 : BIND(&if_oddball);
12621 : {
12622 392 : Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
12623 392 : result_var.Bind(type);
12624 392 : Goto(&return_result);
12625 : }
12626 :
12627 392 : BIND(&return_function);
12628 : {
12629 392 : result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
12630 392 : Goto(&return_result);
12631 : }
12632 :
12633 392 : BIND(&return_undefined);
12634 : {
12635 392 : result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
12636 392 : Goto(&return_result);
12637 : }
12638 :
12639 392 : BIND(&return_object);
12640 : {
12641 392 : result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
12642 392 : Goto(&return_result);
12643 : }
12644 :
12645 392 : BIND(&return_string);
12646 : {
12647 392 : result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
12648 392 : Goto(&return_result);
12649 : }
12650 :
12651 392 : BIND(&return_bigint);
12652 : {
12653 392 : result_var.Bind(HeapConstant(isolate()->factory()->bigint_string()));
12654 392 : Goto(&return_result);
12655 : }
12656 :
12657 392 : BIND(&return_result);
12658 784 : return result_var.value();
12659 : }
12660 :
12661 224 : TNode<Object> CodeStubAssembler::GetSuperConstructor(
12662 : SloppyTNode<Context> context, SloppyTNode<JSFunction> active_function) {
12663 448 : Label is_not_constructor(this, Label::kDeferred), out(this);
12664 448 : TVARIABLE(Object, result);
12665 :
12666 224 : TNode<Map> map = LoadMap(active_function);
12667 224 : TNode<Object> prototype = LoadMapPrototype(map);
12668 224 : TNode<Map> prototype_map = LoadMap(CAST(prototype));
12669 224 : GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
12670 :
12671 224 : result = prototype;
12672 224 : Goto(&out);
12673 :
12674 224 : BIND(&is_not_constructor);
12675 : {
12676 : CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
12677 224 : active_function);
12678 224 : Unreachable();
12679 : }
12680 :
12681 224 : BIND(&out);
12682 448 : return result.value();
12683 : }
12684 :
12685 504 : TNode<JSReceiver> CodeStubAssembler::SpeciesConstructor(
12686 : SloppyTNode<Context> context, SloppyTNode<Object> object,
12687 : SloppyTNode<JSReceiver> default_constructor) {
12688 504 : Isolate* isolate = this->isolate();
12689 504 : TVARIABLE(JSReceiver, var_result, default_constructor);
12690 :
12691 : // 2. Let C be ? Get(O, "constructor").
12692 : TNode<Object> constructor =
12693 1008 : GetProperty(context, object, isolate->factory()->constructor_string());
12694 :
12695 : // 3. If C is undefined, return defaultConstructor.
12696 1008 : Label out(this);
12697 504 : GotoIf(IsUndefined(constructor), &out);
12698 :
12699 : // 4. If Type(C) is not Object, throw a TypeError exception.
12700 : ThrowIfNotJSReceiver(context, constructor,
12701 504 : MessageTemplate::kConstructorNotReceiver);
12702 :
12703 : // 5. Let S be ? Get(C, @@species).
12704 : TNode<Object> species =
12705 1008 : GetProperty(context, constructor, isolate->factory()->species_symbol());
12706 :
12707 : // 6. If S is either undefined or null, return defaultConstructor.
12708 504 : GotoIf(IsNullOrUndefined(species), &out);
12709 :
12710 : // 7. If IsConstructor(S) is true, return S.
12711 1008 : Label throw_error(this);
12712 504 : GotoIf(TaggedIsSmi(species), &throw_error);
12713 504 : GotoIfNot(IsConstructorMap(LoadMap(CAST(species))), &throw_error);
12714 504 : var_result = CAST(species);
12715 504 : Goto(&out);
12716 :
12717 : // 8. Throw a TypeError exception.
12718 504 : BIND(&throw_error);
12719 504 : ThrowTypeError(context, MessageTemplate::kSpeciesNotConstructor);
12720 :
12721 504 : BIND(&out);
12722 1008 : return var_result.value();
12723 : }
12724 :
12725 224 : Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
12726 : Node* context) {
12727 224 : VARIABLE(var_result, MachineRepresentation::kTagged);
12728 448 : Label if_notcallable(this, Label::kDeferred),
12729 448 : if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
12730 448 : if_nohandler(this, Label::kDeferred), return_true(this),
12731 448 : return_false(this), return_result(this, &var_result);
12732 :
12733 : // Ensure that the {callable} is actually a JSReceiver.
12734 224 : GotoIf(TaggedIsSmi(callable), &if_notreceiver);
12735 224 : GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
12736 :
12737 : // Load the @@hasInstance property from {callable}.
12738 : Node* inst_of_handler =
12739 224 : GetProperty(context, callable, HasInstanceSymbolConstant());
12740 :
12741 : // Optimize for the likely case where {inst_of_handler} is the builtin
12742 : // Function.prototype[@@hasInstance] method, and emit a direct call in
12743 : // that case without any additional checking.
12744 224 : Node* native_context = LoadNativeContext(context);
12745 : Node* function_has_instance =
12746 224 : LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
12747 448 : GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
12748 224 : &if_otherhandler);
12749 : {
12750 : // Call to Function.prototype[@@hasInstance] directly.
12751 : Callable builtin(BUILTIN_CODE(isolate(), FunctionPrototypeHasInstance),
12752 224 : CallTrampolineDescriptor{});
12753 224 : Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
12754 224 : var_result.Bind(result);
12755 224 : Goto(&return_result);
12756 : }
12757 :
12758 224 : BIND(&if_otherhandler);
12759 : {
12760 : // Check if there's actually an {inst_of_handler}.
12761 224 : GotoIf(IsNull(inst_of_handler), &if_nohandler);
12762 224 : GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
12763 :
12764 : // Call the {inst_of_handler} for {callable} and {object}.
12765 : Node* result = CallJS(
12766 : CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
12767 224 : context, inst_of_handler, callable, object);
12768 :
12769 : // Convert the {result} to a Boolean.
12770 224 : BranchIfToBooleanIsTrue(result, &return_true, &return_false);
12771 : }
12772 :
12773 224 : BIND(&if_nohandler);
12774 : {
12775 : // Ensure that the {callable} is actually Callable.
12776 224 : GotoIfNot(IsCallable(callable), &if_notcallable);
12777 :
12778 : // Use the OrdinaryHasInstance algorithm.
12779 : Node* result =
12780 224 : CallBuiltin(Builtins::kOrdinaryHasInstance, context, callable, object);
12781 224 : var_result.Bind(result);
12782 224 : Goto(&return_result);
12783 : }
12784 :
12785 224 : BIND(&if_notcallable);
12786 224 : { ThrowTypeError(context, MessageTemplate::kNonCallableInInstanceOfCheck); }
12787 :
12788 224 : BIND(&if_notreceiver);
12789 224 : { ThrowTypeError(context, MessageTemplate::kNonObjectInInstanceOfCheck); }
12790 :
12791 224 : BIND(&return_true);
12792 224 : var_result.Bind(TrueConstant());
12793 224 : Goto(&return_result);
12794 :
12795 224 : BIND(&return_false);
12796 224 : var_result.Bind(FalseConstant());
12797 224 : Goto(&return_result);
12798 :
12799 224 : BIND(&return_result);
12800 448 : return var_result.value();
12801 : }
12802 :
12803 1232 : TNode<Number> CodeStubAssembler::NumberInc(SloppyTNode<Number> value) {
12804 1232 : TVARIABLE(Number, var_result);
12805 2464 : TVARIABLE(Float64T, var_finc_value);
12806 2464 : Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
12807 1232 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12808 :
12809 1232 : BIND(&if_issmi);
12810 : {
12811 1232 : Label if_overflow(this);
12812 1232 : TNode<Smi> smi_value = CAST(value);
12813 1232 : TNode<Smi> one = SmiConstant(1);
12814 1232 : var_result = TrySmiAdd(smi_value, one, &if_overflow);
12815 1232 : Goto(&end);
12816 :
12817 1232 : BIND(&if_overflow);
12818 : {
12819 1232 : var_finc_value = SmiToFloat64(smi_value);
12820 1232 : Goto(&do_finc);
12821 1232 : }
12822 : }
12823 :
12824 1232 : BIND(&if_isnotsmi);
12825 : {
12826 1232 : TNode<HeapNumber> heap_number_value = CAST(value);
12827 :
12828 : // Load the HeapNumber value.
12829 1232 : var_finc_value = LoadHeapNumberValue(heap_number_value);
12830 1232 : Goto(&do_finc);
12831 : }
12832 :
12833 1232 : BIND(&do_finc);
12834 : {
12835 1232 : TNode<Float64T> finc_value = var_finc_value.value();
12836 1232 : TNode<Float64T> one = Float64Constant(1.0);
12837 1232 : TNode<Float64T> finc_result = Float64Add(finc_value, one);
12838 1232 : var_result = AllocateHeapNumberWithValue(finc_result);
12839 1232 : Goto(&end);
12840 : }
12841 :
12842 1232 : BIND(&end);
12843 2464 : return var_result.value();
12844 : }
12845 :
12846 392 : TNode<Number> CodeStubAssembler::NumberDec(SloppyTNode<Number> value) {
12847 392 : TVARIABLE(Number, var_result);
12848 784 : TVARIABLE(Float64T, var_fdec_value);
12849 784 : Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
12850 392 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12851 :
12852 392 : BIND(&if_issmi);
12853 : {
12854 392 : TNode<Smi> smi_value = CAST(value);
12855 392 : TNode<Smi> one = SmiConstant(1);
12856 392 : Label if_overflow(this);
12857 392 : var_result = TrySmiSub(smi_value, one, &if_overflow);
12858 392 : Goto(&end);
12859 :
12860 392 : BIND(&if_overflow);
12861 : {
12862 392 : var_fdec_value = SmiToFloat64(smi_value);
12863 392 : Goto(&do_fdec);
12864 392 : }
12865 : }
12866 :
12867 392 : BIND(&if_isnotsmi);
12868 : {
12869 392 : TNode<HeapNumber> heap_number_value = CAST(value);
12870 :
12871 : // Load the HeapNumber value.
12872 392 : var_fdec_value = LoadHeapNumberValue(heap_number_value);
12873 392 : Goto(&do_fdec);
12874 : }
12875 :
12876 392 : BIND(&do_fdec);
12877 : {
12878 392 : TNode<Float64T> fdec_value = var_fdec_value.value();
12879 392 : TNode<Float64T> minus_one = Float64Constant(-1.0);
12880 392 : TNode<Float64T> fdec_result = Float64Add(fdec_value, minus_one);
12881 392 : var_result = AllocateHeapNumberWithValue(fdec_result);
12882 392 : Goto(&end);
12883 : }
12884 :
12885 392 : BIND(&end);
12886 784 : return var_result.value();
12887 : }
12888 :
12889 2356 : TNode<Number> CodeStubAssembler::NumberAdd(SloppyTNode<Number> a,
12890 : SloppyTNode<Number> b) {
12891 2356 : TVARIABLE(Number, var_result);
12892 4712 : Label float_add(this, Label::kDeferred), end(this);
12893 2356 : GotoIf(TaggedIsNotSmi(a), &float_add);
12894 2356 : GotoIf(TaggedIsNotSmi(b), &float_add);
12895 :
12896 : // Try fast Smi addition first.
12897 2356 : var_result = TrySmiAdd(CAST(a), CAST(b), &float_add);
12898 2356 : Goto(&end);
12899 :
12900 2356 : BIND(&float_add);
12901 : {
12902 4712 : var_result = ChangeFloat64ToTagged(
12903 7068 : Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
12904 2356 : Goto(&end);
12905 : }
12906 :
12907 2356 : BIND(&end);
12908 4712 : return var_result.value();
12909 : }
12910 :
12911 1740 : TNode<Number> CodeStubAssembler::NumberSub(SloppyTNode<Number> a,
12912 : SloppyTNode<Number> b) {
12913 1740 : TVARIABLE(Number, var_result);
12914 3480 : Label float_sub(this, Label::kDeferred), end(this);
12915 1740 : GotoIf(TaggedIsNotSmi(a), &float_sub);
12916 1740 : GotoIf(TaggedIsNotSmi(b), &float_sub);
12917 :
12918 : // Try fast Smi subtraction first.
12919 1740 : var_result = TrySmiSub(CAST(a), CAST(b), &float_sub);
12920 1740 : Goto(&end);
12921 :
12922 1740 : BIND(&float_sub);
12923 : {
12924 3480 : var_result = ChangeFloat64ToTagged(
12925 5220 : Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
12926 1740 : Goto(&end);
12927 : }
12928 :
12929 1740 : BIND(&end);
12930 3480 : return var_result.value();
12931 : }
12932 :
12933 180 : void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
12934 180 : Label is_number(this);
12935 180 : GotoIf(TaggedIsSmi(input), &is_number);
12936 180 : Branch(IsHeapNumber(input), &is_number, is_not_number);
12937 180 : BIND(&is_number);
12938 180 : }
12939 :
12940 112 : void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
12941 112 : GotoIf(TaggedIsSmi(input), is_number);
12942 112 : GotoIf(IsHeapNumber(input), is_number);
12943 112 : }
12944 :
12945 2352 : TNode<Number> CodeStubAssembler::BitwiseOp(Node* left32, Node* right32,
12946 : Operation bitwise_op) {
12947 2352 : switch (bitwise_op) {
12948 : case Operation::kBitwiseAnd:
12949 392 : return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
12950 : case Operation::kBitwiseOr:
12951 392 : return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
12952 : case Operation::kBitwiseXor:
12953 392 : return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
12954 : case Operation::kShiftLeft:
12955 392 : if (!Word32ShiftIsSafe()) {
12956 0 : right32 = Word32And(right32, Int32Constant(0x1F));
12957 : }
12958 392 : return ChangeInt32ToTagged(Signed(Word32Shl(left32, right32)));
12959 : case Operation::kShiftRight:
12960 392 : if (!Word32ShiftIsSafe()) {
12961 0 : right32 = Word32And(right32, Int32Constant(0x1F));
12962 : }
12963 392 : return ChangeInt32ToTagged(Signed(Word32Sar(left32, right32)));
12964 : case Operation::kShiftRightLogical:
12965 392 : if (!Word32ShiftIsSafe()) {
12966 0 : right32 = Word32And(right32, Int32Constant(0x1F));
12967 : }
12968 392 : return ChangeUint32ToTagged(Unsigned(Word32Shr(left32, right32)));
12969 : default:
12970 0 : break;
12971 : }
12972 0 : UNREACHABLE();
12973 : }
12974 :
12975 : // ES #sec-createarrayiterator
12976 336 : TNode<JSArrayIterator> CodeStubAssembler::CreateArrayIterator(
12977 : TNode<Context> context, TNode<Object> object, IterationKind kind) {
12978 336 : TNode<Context> native_context = LoadNativeContext(context);
12979 336 : TNode<Map> iterator_map = CAST(LoadContextElement(
12980 : native_context, Context::INITIAL_ARRAY_ITERATOR_MAP_INDEX));
12981 336 : Node* iterator = Allocate(JSArrayIterator::kSize);
12982 336 : StoreMapNoWriteBarrier(iterator, iterator_map);
12983 : StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOrHashOffset,
12984 336 : RootIndex::kEmptyFixedArray);
12985 : StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
12986 336 : RootIndex::kEmptyFixedArray);
12987 : StoreObjectFieldNoWriteBarrier(
12988 336 : iterator, JSArrayIterator::kIteratedObjectOffset, object);
12989 : StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
12990 336 : SmiConstant(0));
12991 : StoreObjectFieldNoWriteBarrier(
12992 : iterator, JSArrayIterator::kKindOffset,
12993 336 : SmiConstant(Smi::FromInt(static_cast<int>(kind))));
12994 336 : return CAST(iterator);
12995 : }
12996 :
12997 336 : Node* CodeStubAssembler::AllocateJSIteratorResult(Node* context, Node* value,
12998 : Node* done) {
12999 : CSA_ASSERT(this, IsBoolean(done));
13000 336 : Node* native_context = LoadNativeContext(context);
13001 : Node* map =
13002 336 : LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
13003 336 : Node* result = Allocate(JSIteratorResult::kSize);
13004 336 : StoreMapNoWriteBarrier(result, map);
13005 : StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
13006 336 : RootIndex::kEmptyFixedArray);
13007 : StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
13008 336 : RootIndex::kEmptyFixedArray);
13009 336 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
13010 336 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
13011 336 : return result;
13012 : }
13013 :
13014 168 : Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
13015 : Node* key,
13016 : Node* value) {
13017 168 : Node* native_context = LoadNativeContext(context);
13018 168 : Node* length = SmiConstant(2);
13019 168 : int const elements_size = FixedArray::SizeFor(2);
13020 : TNode<FixedArray> elements = UncheckedCast<FixedArray>(
13021 168 : Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize));
13022 : StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
13023 168 : RootIndex::kFixedArrayMap);
13024 168 : StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
13025 168 : StoreFixedArrayElement(elements, 0, key);
13026 168 : StoreFixedArrayElement(elements, 1, value);
13027 : Node* array_map = LoadContextElement(
13028 168 : native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX);
13029 168 : TNode<HeapObject> array = InnerAllocate(elements, elements_size);
13030 168 : StoreMapNoWriteBarrier(array, array_map);
13031 : StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
13032 168 : RootIndex::kEmptyFixedArray);
13033 168 : StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
13034 168 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
13035 : Node* iterator_map =
13036 168 : LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
13037 168 : TNode<HeapObject> result = InnerAllocate(array, JSArray::kSize);
13038 168 : StoreMapNoWriteBarrier(result, iterator_map);
13039 : StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
13040 168 : RootIndex::kEmptyFixedArray);
13041 : StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
13042 168 : RootIndex::kEmptyFixedArray);
13043 168 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
13044 : StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
13045 168 : RootIndex::kFalseValue);
13046 168 : return result;
13047 : }
13048 :
13049 224 : TNode<JSReceiver> CodeStubAssembler::ArraySpeciesCreate(TNode<Context> context,
13050 : TNode<Object> o,
13051 : TNode<Number> len) {
13052 : TNode<JSReceiver> constructor =
13053 224 : CAST(CallRuntime(Runtime::kArraySpeciesConstructor, context, o));
13054 224 : return Construct(context, constructor, len);
13055 : }
13056 :
13057 13612 : Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
13058 : CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
13059 13612 : TNode<Uint32T> buffer_bit_field = LoadJSArrayBufferBitField(CAST(buffer));
13060 13612 : return IsSetWord32<JSArrayBuffer::WasDetachedBit>(buffer_bit_field);
13061 : }
13062 :
13063 1456 : void CodeStubAssembler::ThrowIfArrayBufferIsDetached(
13064 : SloppyTNode<Context> context, TNode<JSArrayBuffer> array_buffer,
13065 : const char* method_name) {
13066 2912 : Label if_detached(this, Label::kDeferred), if_not_detached(this);
13067 1456 : Branch(IsDetachedBuffer(array_buffer), &if_detached, &if_not_detached);
13068 1456 : BIND(&if_detached);
13069 1456 : ThrowTypeError(context, MessageTemplate::kDetachedOperation, method_name);
13070 2912 : BIND(&if_not_detached);
13071 1456 : }
13072 :
13073 952 : void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
13074 : SloppyTNode<Context> context, TNode<JSArrayBufferView> array_buffer_view,
13075 : const char* method_name) {
13076 952 : TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array_buffer_view);
13077 952 : ThrowIfArrayBufferIsDetached(context, buffer, method_name);
13078 952 : }
13079 :
13080 14116 : TNode<Uint32T> CodeStubAssembler::LoadJSArrayBufferBitField(
13081 : TNode<JSArrayBuffer> array_buffer) {
13082 14116 : return LoadObjectField<Uint32T>(array_buffer, JSArrayBuffer::kBitFieldOffset);
13083 : }
13084 :
13085 504 : TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStore(
13086 : TNode<JSArrayBuffer> array_buffer) {
13087 : return LoadObjectField<RawPtrT>(array_buffer,
13088 504 : JSArrayBuffer::kBackingStoreOffset);
13089 : }
13090 :
13091 2184 : TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer(
13092 : TNode<JSArrayBufferView> array_buffer_view) {
13093 : return LoadObjectField<JSArrayBuffer>(array_buffer_view,
13094 2184 : JSArrayBufferView::kBufferOffset);
13095 : }
13096 :
13097 56 : TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteLength(
13098 : TNode<JSArrayBufferView> array_buffer_view) {
13099 : return LoadObjectField<UintPtrT>(array_buffer_view,
13100 56 : JSArrayBufferView::kByteLengthOffset);
13101 : }
13102 :
13103 616 : TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteOffset(
13104 : TNode<JSArrayBufferView> array_buffer_view) {
13105 : return LoadObjectField<UintPtrT>(array_buffer_view,
13106 616 : JSArrayBufferView::kByteOffsetOffset);
13107 : }
13108 :
13109 6052 : TNode<Smi> CodeStubAssembler::LoadJSTypedArrayLength(
13110 : TNode<JSTypedArray> typed_array) {
13111 6052 : return LoadObjectField<Smi>(typed_array, JSTypedArray::kLengthOffset);
13112 : }
13113 :
13114 7052 : CodeStubArguments::CodeStubArguments(
13115 : CodeStubAssembler* assembler, Node* argc, Node* fp,
13116 : CodeStubAssembler::ParameterMode param_mode, ReceiverMode receiver_mode)
13117 : : assembler_(assembler),
13118 : argc_mode_(param_mode),
13119 : receiver_mode_(receiver_mode),
13120 : argc_(argc),
13121 : arguments_(),
13122 7052 : fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
13123 : Node* offset = assembler_->ElementOffsetFromIndex(
13124 : argc_, SYSTEM_POINTER_ELEMENTS, param_mode,
13125 : (StandardFrameConstants::kFixedSlotCountAboveFp - 1) *
13126 7052 : kSystemPointerSize);
13127 14104 : arguments_ =
13128 7052 : assembler_->UncheckedCast<WordT>(assembler_->IntPtrAdd(fp_, offset));
13129 7052 : }
13130 :
13131 5780 : TNode<Object> CodeStubArguments::GetReceiver() const {
13132 : DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
13133 : return assembler_->UncheckedCast<Object>(assembler_->LoadFullTagged(
13134 5780 : arguments_, assembler_->IntPtrConstant(kSystemPointerSize)));
13135 : }
13136 :
13137 224 : void CodeStubArguments::SetReceiver(TNode<Object> object) const {
13138 : DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
13139 : assembler_->StoreFullTaggedNoWriteBarrier(
13140 224 : arguments_, assembler_->IntPtrConstant(kSystemPointerSize), object);
13141 224 : }
13142 :
13143 11536 : TNode<WordT> CodeStubArguments::AtIndexPtr(
13144 : Node* index, CodeStubAssembler::ParameterMode mode) const {
13145 : typedef compiler::Node Node;
13146 : Node* negated_index = assembler_->IntPtrOrSmiSub(
13147 11536 : assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
13148 : Node* offset = assembler_->ElementOffsetFromIndex(
13149 11536 : negated_index, SYSTEM_POINTER_ELEMENTS, mode, 0);
13150 11536 : return assembler_->IntPtrAdd(assembler_->UncheckedCast<IntPtrT>(arguments_),
13151 23072 : offset);
13152 : }
13153 :
13154 11480 : TNode<Object> CodeStubArguments::AtIndex(
13155 : Node* index, CodeStubAssembler::ParameterMode mode) const {
13156 : DCHECK_EQ(argc_mode_, mode);
13157 : CSA_ASSERT(assembler_,
13158 : assembler_->UintPtrOrSmiLessThan(index, GetLength(mode), mode));
13159 : return assembler_->UncheckedCast<Object>(
13160 11480 : assembler_->LoadFullTagged(AtIndexPtr(index, mode)));
13161 : }
13162 :
13163 5488 : TNode<Object> CodeStubArguments::AtIndex(int index) const {
13164 5488 : return AtIndex(assembler_->IntPtrConstant(index));
13165 : }
13166 :
13167 4536 : TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
13168 : int index, TNode<Object> default_value) {
13169 4536 : CodeStubAssembler::TVariable<Object> result(assembler_);
13170 9072 : CodeStubAssembler::Label argument_missing(assembler_),
13171 9072 : argument_done(assembler_, &result);
13172 :
13173 : assembler_->GotoIf(assembler_->UintPtrOrSmiGreaterThanOrEqual(
13174 : assembler_->IntPtrOrSmiConstant(index, argc_mode_),
13175 : argc_, argc_mode_),
13176 4536 : &argument_missing);
13177 4536 : result = AtIndex(index);
13178 4536 : assembler_->Goto(&argument_done);
13179 :
13180 4536 : assembler_->BIND(&argument_missing);
13181 4536 : result = default_value;
13182 4536 : assembler_->Goto(&argument_done);
13183 :
13184 4536 : assembler_->BIND(&argument_done);
13185 9072 : return result.value();
13186 : }
13187 :
13188 5208 : TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
13189 : TNode<IntPtrT> index, TNode<Object> default_value) {
13190 5208 : CodeStubAssembler::TVariable<Object> result(assembler_);
13191 10416 : CodeStubAssembler::Label argument_missing(assembler_),
13192 10416 : argument_done(assembler_, &result);
13193 :
13194 : assembler_->GotoIf(
13195 : assembler_->UintPtrOrSmiGreaterThanOrEqual(
13196 : assembler_->IntPtrToParameter(index, argc_mode_), argc_, argc_mode_),
13197 5208 : &argument_missing);
13198 5208 : result = AtIndex(index);
13199 5208 : assembler_->Goto(&argument_done);
13200 :
13201 5208 : assembler_->BIND(&argument_missing);
13202 5208 : result = default_value;
13203 5208 : assembler_->Goto(&argument_done);
13204 :
13205 5208 : assembler_->BIND(&argument_done);
13206 10416 : return result.value();
13207 : }
13208 :
13209 1048 : void CodeStubArguments::ForEach(
13210 : const CodeStubAssembler::VariableList& vars,
13211 : const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
13212 : CodeStubAssembler::ParameterMode mode) {
13213 1048 : assembler_->Comment("CodeStubArguments::ForEach");
13214 1048 : if (first == nullptr) {
13215 452 : first = assembler_->IntPtrOrSmiConstant(0, mode);
13216 : }
13217 1048 : if (last == nullptr) {
13218 : DCHECK_EQ(mode, argc_mode_);
13219 1048 : last = argc_;
13220 : }
13221 : Node* start = assembler_->IntPtrSub(
13222 : assembler_->UncheckedCast<IntPtrT>(arguments_),
13223 1048 : assembler_->ElementOffsetFromIndex(first, SYSTEM_POINTER_ELEMENTS, mode));
13224 : Node* end = assembler_->IntPtrSub(
13225 : assembler_->UncheckedCast<IntPtrT>(arguments_),
13226 1048 : assembler_->ElementOffsetFromIndex(last, SYSTEM_POINTER_ELEMENTS, mode));
13227 : assembler_->BuildFastLoop(
13228 : vars, start, end,
13229 1048 : [this, &body](Node* current) {
13230 1048 : Node* arg = assembler_->Load(MachineType::AnyTagged(), current);
13231 1048 : body(arg);
13232 1048 : },
13233 : -kSystemPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
13234 1048 : CodeStubAssembler::IndexAdvanceMode::kPost);
13235 1048 : }
13236 :
13237 11384 : void CodeStubArguments::PopAndReturn(Node* value) {
13238 : Node* pop_count;
13239 11384 : if (receiver_mode_ == ReceiverMode::kHasReceiver) {
13240 : pop_count = assembler_->IntPtrOrSmiAdd(
13241 11384 : argc_, assembler_->IntPtrOrSmiConstant(1, argc_mode_), argc_mode_);
13242 : } else {
13243 0 : pop_count = argc_;
13244 : }
13245 :
13246 11384 : assembler_->PopAndReturn(assembler_->ParameterToIntPtr(pop_count, argc_mode_),
13247 11384 : value);
13248 11384 : }
13249 :
13250 4088 : Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
13251 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13252 : return Uint32LessThanOrEqual(elements_kind,
13253 4088 : Int32Constant(LAST_FAST_ELEMENTS_KIND));
13254 : }
13255 :
13256 228 : TNode<BoolT> CodeStubAssembler::IsDoubleElementsKind(
13257 : TNode<Int32T> elements_kind) {
13258 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13259 : STATIC_ASSERT((PACKED_DOUBLE_ELEMENTS & 1) == 0);
13260 : STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS);
13261 456 : return Word32Equal(Word32Shr(elements_kind, Int32Constant(1)),
13262 684 : Int32Constant(PACKED_DOUBLE_ELEMENTS / 2));
13263 : }
13264 :
13265 336 : Node* CodeStubAssembler::IsFastSmiOrTaggedElementsKind(Node* elements_kind) {
13266 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13267 : STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
13268 : STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
13269 : return Uint32LessThanOrEqual(elements_kind,
13270 336 : Int32Constant(TERMINAL_FAST_ELEMENTS_KIND));
13271 : }
13272 :
13273 448 : Node* CodeStubAssembler::IsFastSmiElementsKind(Node* elements_kind) {
13274 : return Uint32LessThanOrEqual(elements_kind,
13275 448 : Int32Constant(HOLEY_SMI_ELEMENTS));
13276 : }
13277 :
13278 56 : Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
13279 : CSA_ASSERT(this, IsFastElementsKind(elements_kind));
13280 :
13281 : STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
13282 : STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
13283 : STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
13284 56 : return IsSetWord32(elements_kind, 1);
13285 : }
13286 :
13287 784 : Node* CodeStubAssembler::IsElementsKindGreaterThan(
13288 : Node* target_kind, ElementsKind reference_kind) {
13289 784 : return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
13290 : }
13291 :
13292 560 : TNode<BoolT> CodeStubAssembler::IsElementsKindLessThanOrEqual(
13293 : TNode<Int32T> target_kind, ElementsKind reference_kind) {
13294 560 : return Int32LessThanOrEqual(target_kind, Int32Constant(reference_kind));
13295 : }
13296 :
13297 396 : Node* CodeStubAssembler::IsDebugActive() {
13298 : Node* is_debug_active = Load(
13299 : MachineType::Uint8(),
13300 396 : ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
13301 396 : return Word32NotEqual(is_debug_active, Int32Constant(0));
13302 : }
13303 :
13304 2576 : TNode<BoolT> CodeStubAssembler::IsRuntimeCallStatsEnabled() {
13305 : TNode<Word32T> flag_value = UncheckedCast<Word32T>(Load(
13306 : MachineType::Int32(),
13307 2576 : ExternalConstant(ExternalReference::address_of_runtime_stats_flag())));
13308 2576 : return Word32NotEqual(flag_value, Int32Constant(0));
13309 : }
13310 :
13311 56 : Node* CodeStubAssembler::IsPromiseHookEnabled() {
13312 : Node* const promise_hook = Load(
13313 : MachineType::Pointer(),
13314 56 : ExternalConstant(ExternalReference::promise_hook_address(isolate())));
13315 56 : return WordNotEqual(promise_hook, IntPtrConstant(0));
13316 : }
13317 :
13318 224 : Node* CodeStubAssembler::HasAsyncEventDelegate() {
13319 : Node* const async_event_delegate =
13320 : Load(MachineType::Pointer(),
13321 : ExternalConstant(
13322 224 : ExternalReference::async_event_delegate_address(isolate())));
13323 224 : return WordNotEqual(async_event_delegate, IntPtrConstant(0));
13324 : }
13325 :
13326 924 : Node* CodeStubAssembler::IsPromiseHookEnabledOrHasAsyncEventDelegate() {
13327 : Node* const promise_hook_or_async_event_delegate =
13328 : Load(MachineType::Uint8(),
13329 : ExternalConstant(
13330 : ExternalReference::promise_hook_or_async_event_delegate_address(
13331 924 : isolate())));
13332 924 : return Word32NotEqual(promise_hook_or_async_event_delegate, Int32Constant(0));
13333 : }
13334 :
13335 1176 : Node* CodeStubAssembler::
13336 : IsPromiseHookEnabledOrDebugIsActiveOrHasAsyncEventDelegate() {
13337 : Node* const promise_hook_or_debug_is_active_or_async_event_delegate = Load(
13338 : MachineType::Uint8(),
13339 : ExternalConstant(
13340 : ExternalReference::
13341 : promise_hook_or_debug_is_active_or_async_event_delegate_address(
13342 1176 : isolate())));
13343 : return Word32NotEqual(promise_hook_or_debug_is_active_or_async_event_delegate,
13344 1176 : Int32Constant(0));
13345 : }
13346 :
13347 2364 : TNode<Code> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
13348 : CSA_ASSERT(this, SmiGreaterThanOrEqual(builtin_id, SmiConstant(0)));
13349 : CSA_ASSERT(this,
13350 : SmiLessThan(builtin_id, SmiConstant(Builtins::builtin_count)));
13351 :
13352 2364 : int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
13353 2364 : int index_shift = kSystemPointerSizeLog2 - kSmiShiftBits;
13354 : TNode<WordT> table_index =
13355 2364 : index_shift >= 0 ? WordShl(BitcastTaggedToWord(builtin_id), index_shift)
13356 4728 : : WordSar(BitcastTaggedToWord(builtin_id), -index_shift);
13357 :
13358 2364 : return CAST(
13359 : Load(MachineType::TaggedPointer(),
13360 : ExternalConstant(ExternalReference::builtins_address(isolate())),
13361 : table_index));
13362 : }
13363 :
13364 1020 : TNode<Code> CodeStubAssembler::GetSharedFunctionInfoCode(
13365 : SloppyTNode<SharedFunctionInfo> shared_info, Label* if_compile_lazy) {
13366 : TNode<Object> sfi_data =
13367 1020 : LoadObjectField(shared_info, SharedFunctionInfo::kFunctionDataOffset);
13368 :
13369 1020 : TVARIABLE(Code, sfi_code);
13370 :
13371 2040 : Label done(this);
13372 2040 : Label check_instance_type(this);
13373 :
13374 : // IsSmi: Is builtin
13375 1020 : GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
13376 1020 : if (if_compile_lazy) {
13377 112 : GotoIf(SmiEqual(CAST(sfi_data), SmiConstant(Builtins::kCompileLazy)),
13378 56 : if_compile_lazy);
13379 : }
13380 1020 : sfi_code = LoadBuiltin(CAST(sfi_data));
13381 1020 : Goto(&done);
13382 :
13383 : // Switch on data's instance type.
13384 1020 : BIND(&check_instance_type);
13385 1020 : TNode<Int32T> data_type = LoadInstanceType(CAST(sfi_data));
13386 :
13387 : int32_t case_values[] = {BYTECODE_ARRAY_TYPE,
13388 : WASM_EXPORTED_FUNCTION_DATA_TYPE,
13389 : ASM_WASM_DATA_TYPE,
13390 : UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE,
13391 : UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE,
13392 1020 : FUNCTION_TEMPLATE_INFO_TYPE};
13393 2040 : Label check_is_bytecode_array(this);
13394 2040 : Label check_is_exported_function_data(this);
13395 2040 : Label check_is_asm_wasm_data(this);
13396 2040 : Label check_is_uncompiled_data_without_preparse_data(this);
13397 2040 : Label check_is_uncompiled_data_with_preparse_data(this);
13398 2040 : Label check_is_function_template_info(this);
13399 2040 : Label check_is_interpreter_data(this);
13400 : Label* case_labels[] = {&check_is_bytecode_array,
13401 : &check_is_exported_function_data,
13402 : &check_is_asm_wasm_data,
13403 : &check_is_uncompiled_data_without_preparse_data,
13404 : &check_is_uncompiled_data_with_preparse_data,
13405 1020 : &check_is_function_template_info};
13406 : STATIC_ASSERT(arraysize(case_values) == arraysize(case_labels));
13407 : Switch(data_type, &check_is_interpreter_data, case_values, case_labels,
13408 1020 : arraysize(case_labels));
13409 :
13410 : // IsBytecodeArray: Interpret bytecode
13411 1020 : BIND(&check_is_bytecode_array);
13412 1020 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
13413 1020 : Goto(&done);
13414 :
13415 : // IsWasmExportedFunctionData: Use the wrapper code
13416 1020 : BIND(&check_is_exported_function_data);
13417 2040 : sfi_code = CAST(LoadObjectField(
13418 1020 : CAST(sfi_data), WasmExportedFunctionData::kWrapperCodeOffset));
13419 1020 : Goto(&done);
13420 :
13421 : // IsAsmWasmData: Instantiate using AsmWasmData
13422 1020 : BIND(&check_is_asm_wasm_data);
13423 1020 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InstantiateAsmJs));
13424 1020 : Goto(&done);
13425 :
13426 : // IsUncompiledDataWithPreparseData | IsUncompiledDataWithoutPreparseData:
13427 : // Compile lazy
13428 1020 : BIND(&check_is_uncompiled_data_with_preparse_data);
13429 1020 : Goto(&check_is_uncompiled_data_without_preparse_data);
13430 1020 : BIND(&check_is_uncompiled_data_without_preparse_data);
13431 1020 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
13432 1020 : Goto(if_compile_lazy ? if_compile_lazy : &done);
13433 :
13434 : // IsFunctionTemplateInfo: API call
13435 1020 : BIND(&check_is_function_template_info);
13436 1020 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), HandleApiCall));
13437 1020 : Goto(&done);
13438 :
13439 : // IsInterpreterData: Interpret bytecode
13440 1020 : BIND(&check_is_interpreter_data);
13441 : // This is the default branch, so assert that we have the expected data type.
13442 : CSA_ASSERT(this,
13443 : Word32Equal(data_type, Int32Constant(INTERPRETER_DATA_TYPE)));
13444 2040 : sfi_code = CAST(LoadObjectField(
13445 1020 : CAST(sfi_data), InterpreterData::kInterpreterTrampolineOffset));
13446 1020 : Goto(&done);
13447 :
13448 1020 : BIND(&done);
13449 2040 : return sfi_code.value();
13450 : }
13451 :
13452 908 : Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
13453 : Node* shared_info,
13454 : Node* context) {
13455 : CSA_SLOW_ASSERT(this, IsMap(map));
13456 :
13457 908 : Node* const code = GetSharedFunctionInfoCode(shared_info);
13458 :
13459 : // TODO(ishell): All the callers of this function pass map loaded from
13460 : // Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove
13461 : // map parameter.
13462 : CSA_ASSERT(this, Word32BinaryNot(IsConstructorMap(map)));
13463 : CSA_ASSERT(this, Word32BinaryNot(IsFunctionWithPrototypeSlotMap(map)));
13464 908 : Node* const fun = Allocate(JSFunction::kSizeWithoutPrototype);
13465 : STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
13466 908 : StoreMapNoWriteBarrier(fun, map);
13467 : StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
13468 908 : RootIndex::kEmptyFixedArray);
13469 : StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
13470 908 : RootIndex::kEmptyFixedArray);
13471 : StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
13472 908 : RootIndex::kManyClosuresCell);
13473 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
13474 908 : shared_info);
13475 908 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
13476 908 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeOffset, code);
13477 908 : return fun;
13478 : }
13479 :
13480 0 : Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
13481 : StackFrame::Type frame_type) {
13482 : return WordEqual(marker_or_function,
13483 0 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
13484 : }
13485 :
13486 0 : Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
13487 : StackFrame::Type frame_type) {
13488 : return WordNotEqual(marker_or_function,
13489 0 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
13490 : }
13491 :
13492 448 : void CodeStubAssembler::CheckPrototypeEnumCache(Node* receiver,
13493 : Node* receiver_map,
13494 : Label* if_fast,
13495 : Label* if_slow) {
13496 448 : VARIABLE(var_object, MachineRepresentation::kTagged, receiver);
13497 896 : VARIABLE(var_object_map, MachineRepresentation::kTagged, receiver_map);
13498 :
13499 896 : Label loop(this, {&var_object, &var_object_map}), done_loop(this);
13500 448 : Goto(&loop);
13501 448 : BIND(&loop);
13502 : {
13503 : // Check that there are no elements on the current {object}.
13504 448 : Label if_no_elements(this);
13505 448 : Node* object = var_object.value();
13506 448 : Node* object_map = var_object_map.value();
13507 :
13508 : // The following relies on the elements only aliasing with JSProxy::target,
13509 : // which is a Javascript value and hence cannot be confused with an elements
13510 : // backing store.
13511 : STATIC_ASSERT(static_cast<int>(JSObject::kElementsOffset) ==
13512 : static_cast<int>(JSProxy::kTargetOffset));
13513 448 : Node* object_elements = LoadObjectField(object, JSObject::kElementsOffset);
13514 448 : GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
13515 448 : GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
13516 :
13517 : // It might still be an empty JSArray.
13518 448 : GotoIfNot(IsJSArrayMap(object_map), if_slow);
13519 448 : Node* object_length = LoadJSArrayLength(object);
13520 448 : Branch(WordEqual(object_length, SmiConstant(0)), &if_no_elements, if_slow);
13521 :
13522 : // Continue with the {object}s prototype.
13523 448 : BIND(&if_no_elements);
13524 448 : object = LoadMapPrototype(object_map);
13525 448 : GotoIf(IsNull(object), if_fast);
13526 :
13527 : // For all {object}s but the {receiver}, check that the cache is empty.
13528 448 : var_object.Bind(object);
13529 448 : object_map = LoadMap(object);
13530 448 : var_object_map.Bind(object_map);
13531 448 : Node* object_enum_length = LoadMapEnumLength(object_map);
13532 448 : Branch(WordEqual(object_enum_length, IntPtrConstant(0)), &loop, if_slow);
13533 448 : }
13534 448 : }
13535 :
13536 224 : Node* CodeStubAssembler::CheckEnumCache(Node* receiver, Label* if_empty,
13537 : Label* if_runtime) {
13538 448 : Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
13539 224 : Node* receiver_map = LoadMap(receiver);
13540 :
13541 : // Check if the enum length field of the {receiver} is properly initialized,
13542 : // indicating that there is an enum cache.
13543 224 : Node* receiver_enum_length = LoadMapEnumLength(receiver_map);
13544 : Branch(WordEqual(receiver_enum_length,
13545 448 : IntPtrConstant(kInvalidEnumCacheSentinel)),
13546 224 : &if_no_cache, &if_cache);
13547 :
13548 224 : BIND(&if_no_cache);
13549 : {
13550 : // Avoid runtime-call for empty dictionary receivers.
13551 224 : GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
13552 224 : TNode<NameDictionary> properties = CAST(LoadSlowProperties(receiver));
13553 224 : TNode<Smi> length = GetNumberOfElements(properties);
13554 224 : GotoIfNot(WordEqual(length, SmiConstant(0)), if_runtime);
13555 : // Check that there are no elements on the {receiver} and its prototype
13556 : // chain. Given that we do not create an EnumCache for dict-mode objects,
13557 : // directly jump to {if_empty} if there are no elements and no properties
13558 : // on the {receiver}.
13559 224 : CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
13560 : }
13561 :
13562 : // Check that there are no elements on the fast {receiver} and its
13563 : // prototype chain.
13564 224 : BIND(&if_cache);
13565 224 : CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
13566 :
13567 224 : BIND(&if_fast);
13568 448 : return receiver_map;
13569 : }
13570 :
13571 3920 : TNode<IntPtrT> CodeStubAssembler::GetArgumentsLength(CodeStubArguments* args) {
13572 3920 : return args->GetLength();
13573 : }
13574 :
13575 5208 : TNode<Object> CodeStubAssembler::GetArgumentValue(CodeStubArguments* args,
13576 : TNode<IntPtrT> index) {
13577 5208 : return args->GetOptionalArgumentValue(index);
13578 : }
13579 :
13580 0 : void CodeStubAssembler::Print(const char* s) {
13581 0 : std::string formatted(s);
13582 0 : formatted += "\n";
13583 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13584 0 : StringConstant(formatted.c_str()));
13585 0 : }
13586 :
13587 0 : void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
13588 0 : if (prefix != nullptr) {
13589 0 : std::string formatted(prefix);
13590 0 : formatted += ": ";
13591 : Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
13592 0 : formatted.c_str(), TENURED);
13593 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13594 0 : HeapConstant(string));
13595 : }
13596 0 : CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
13597 0 : }
13598 :
13599 19264 : void CodeStubAssembler::PerformStackCheck(TNode<Context> context) {
13600 38528 : Label ok(this), stack_check_interrupt(this, Label::kDeferred);
13601 :
13602 : // The instruction sequence below is carefully crafted to hit our pattern
13603 : // matcher for stack checks within instruction selection.
13604 : // See StackCheckMatcher::Matched and JSGenericLowering::LowerJSStackCheck.
13605 :
13606 19264 : TNode<UintPtrT> sp = UncheckedCast<UintPtrT>(LoadStackPointer());
13607 : TNode<UintPtrT> stack_limit = UncheckedCast<UintPtrT>(Load(
13608 : MachineType::Pointer(),
13609 19264 : ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))));
13610 19264 : TNode<BoolT> sp_within_limit = UintPtrLessThan(stack_limit, sp);
13611 :
13612 19264 : Branch(sp_within_limit, &ok, &stack_check_interrupt);
13613 :
13614 19264 : BIND(&stack_check_interrupt);
13615 19264 : CallRuntime(Runtime::kStackGuard, context);
13616 19264 : Goto(&ok);
13617 :
13618 38528 : BIND(&ok);
13619 19264 : }
13620 :
13621 688 : void CodeStubAssembler::InitializeFunctionContext(Node* native_context,
13622 : Node* context, int slots) {
13623 : DCHECK_GE(slots, Context::MIN_CONTEXT_SLOTS);
13624 688 : StoreMapNoWriteBarrier(context, RootIndex::kFunctionContextMap);
13625 : StoreObjectFieldNoWriteBarrier(context, FixedArray::kLengthOffset,
13626 688 : SmiConstant(slots));
13627 :
13628 : Node* const empty_scope_info =
13629 688 : LoadContextElement(native_context, Context::SCOPE_INFO_INDEX);
13630 : StoreContextElementNoWriteBarrier(context, Context::SCOPE_INFO_INDEX,
13631 688 : empty_scope_info);
13632 : StoreContextElementNoWriteBarrier(context, Context::PREVIOUS_INDEX,
13633 688 : UndefinedConstant());
13634 : StoreContextElementNoWriteBarrier(context, Context::EXTENSION_INDEX,
13635 688 : TheHoleConstant());
13636 : StoreContextElementNoWriteBarrier(context, Context::NATIVE_CONTEXT_INDEX,
13637 688 : native_context);
13638 688 : }
13639 :
13640 112 : TNode<JSArray> CodeStubAssembler::ArrayCreate(TNode<Context> context,
13641 : TNode<Number> length) {
13642 112 : TVARIABLE(JSArray, array);
13643 224 : Label allocate_js_array(this);
13644 :
13645 224 : Label done(this), next(this), runtime(this, Label::kDeferred);
13646 112 : TNode<Smi> limit = SmiConstant(JSArray::kInitialMaxFastElementArray);
13647 : CSA_ASSERT_BRANCH(this, [=](Label* ok, Label* not_ok) {
13648 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13649 : SmiConstant(0), ok, not_ok);
13650 : });
13651 : // This check also transitively covers the case where length is too big
13652 : // to be representable by a SMI and so is not usable with
13653 : // AllocateJSArray.
13654 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13655 112 : limit, &runtime, &next);
13656 :
13657 112 : BIND(&runtime);
13658 : {
13659 112 : TNode<Context> native_context = LoadNativeContext(context);
13660 : TNode<JSFunction> array_function =
13661 112 : CAST(LoadContextElement(native_context, Context::ARRAY_FUNCTION_INDEX));
13662 224 : array = CAST(CallRuntime(Runtime::kNewArray, context, array_function,
13663 112 : length, array_function, UndefinedConstant()));
13664 112 : Goto(&done);
13665 : }
13666 :
13667 112 : BIND(&next);
13668 : CSA_ASSERT(this, TaggedIsSmi(length));
13669 :
13670 112 : TNode<Map> array_map = CAST(LoadContextElement(
13671 : context, Context::JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX));
13672 :
13673 : // TODO(delphick): Consider using
13674 : // AllocateUninitializedJSArrayWithElements to avoid initializing an
13675 : // array and then writing over it.
13676 224 : array =
13677 : AllocateJSArray(PACKED_SMI_ELEMENTS, array_map, length, SmiConstant(0),
13678 112 : nullptr, ParameterMode::SMI_PARAMETERS);
13679 112 : Goto(&done);
13680 :
13681 112 : BIND(&done);
13682 224 : return array.value();
13683 : }
13684 :
13685 112 : void CodeStubAssembler::SetPropertyLength(TNode<Context> context,
13686 : TNode<Object> array,
13687 : TNode<Number> length) {
13688 224 : Label fast(this), runtime(this), done(this);
13689 : // There's no need to set the length, if
13690 : // 1) the array is a fast JS array and
13691 : // 2) the new length is equal to the old length.
13692 : // as the set is not observable. Otherwise fall back to the run-time.
13693 :
13694 : // 1) Check that the array has fast elements.
13695 : // TODO(delphick): Consider changing this since it does an an unnecessary
13696 : // check for SMIs.
13697 : // TODO(delphick): Also we could hoist this to after the array construction
13698 : // and copy the args into array in the same way as the Array constructor.
13699 112 : BranchIfFastJSArray(array, context, &fast, &runtime);
13700 :
13701 112 : BIND(&fast);
13702 : {
13703 112 : TNode<JSArray> fast_array = CAST(array);
13704 :
13705 112 : TNode<Smi> length_smi = CAST(length);
13706 112 : TNode<Smi> old_length = LoadFastJSArrayLength(fast_array);
13707 : CSA_ASSERT(this, TaggedIsPositiveSmi(old_length));
13708 :
13709 : // 2) If the created array's length matches the required length, then
13710 : // there's nothing else to do. Otherwise use the runtime to set the
13711 : // property as that will insert holes into excess elements or shrink
13712 : // the backing store as appropriate.
13713 112 : Branch(SmiNotEqual(length_smi, old_length), &runtime, &done);
13714 : }
13715 :
13716 112 : BIND(&runtime);
13717 : {
13718 : SetPropertyStrict(context, array, CodeStubAssembler::LengthStringConstant(),
13719 112 : length);
13720 112 : Goto(&done);
13721 : }
13722 :
13723 224 : BIND(&done);
13724 112 : }
13725 :
13726 224 : void CodeStubAssembler::GotoIfInitialPrototypePropertyModified(
13727 : TNode<Map> object_map, TNode<Map> initial_prototype_map, int descriptor,
13728 : RootIndex field_name_root_index, Label* if_modified) {
13729 224 : DescriptorIndexAndName index_name{descriptor, field_name_root_index};
13730 : GotoIfInitialPrototypePropertiesModified(
13731 : object_map, initial_prototype_map,
13732 224 : Vector<DescriptorIndexAndName>(&index_name, 1), if_modified);
13733 224 : }
13734 :
13735 1176 : void CodeStubAssembler::GotoIfInitialPrototypePropertiesModified(
13736 : TNode<Map> object_map, TNode<Map> initial_prototype_map,
13737 : Vector<DescriptorIndexAndName> properties, Label* if_modified) {
13738 1176 : TNode<Map> prototype_map = LoadMap(LoadMapPrototype(object_map));
13739 1176 : GotoIfNot(WordEqual(prototype_map, initial_prototype_map), if_modified);
13740 :
13741 : if (FLAG_track_constant_fields) {
13742 : // With constant field tracking, we need to make sure that important
13743 : // properties in the prototype has not been tampered with. We do this by
13744 : // checking that their slots in the prototype's descriptor array are still
13745 : // marked as const.
13746 1176 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(prototype_map);
13747 :
13748 1176 : TNode<Uint32T> combined_details;
13749 2744 : for (int i = 0; i < properties.length(); i++) {
13750 : // Assert the descriptor index is in-bounds.
13751 1568 : int descriptor = properties[i].descriptor_index;
13752 : CSA_ASSERT(this, Int32LessThan(Int32Constant(descriptor),
13753 : LoadNumberOfDescriptors(descriptors)));
13754 : // Assert that the name is correct. This essentially checks that
13755 : // the descriptor index corresponds to the insertion order in
13756 : // the bootstrapper.
13757 : CSA_ASSERT(this,
13758 : WordEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),
13759 : LoadRoot(properties[i].name_root_index)));
13760 :
13761 : TNode<Uint32T> details =
13762 1568 : DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
13763 1568 : if (i == 0) {
13764 1176 : combined_details = details;
13765 : } else {
13766 392 : combined_details = Unsigned(Word32And(combined_details, details));
13767 : }
13768 : }
13769 :
13770 : TNode<Uint32T> constness =
13771 1176 : DecodeWord32<PropertyDetails::ConstnessField>(combined_details);
13772 :
13773 : GotoIfNot(
13774 : Word32Equal(constness,
13775 2352 : Int32Constant(static_cast<int>(PropertyConstness::kConst))),
13776 1176 : if_modified);
13777 : }
13778 1176 : }
13779 :
13780 224 : TNode<String> CodeStubAssembler::TaggedToDirectString(TNode<Object> value,
13781 : Label* fail) {
13782 224 : ToDirectStringAssembler to_direct(state(), value);
13783 224 : to_direct.TryToDirect(fail);
13784 224 : to_direct.PointerToData(fail);
13785 224 : return CAST(value);
13786 : }
13787 :
13788 : } // namespace internal
13789 86739 : } // namespace v8
|