Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/code-stub-assembler.h"
6 :
7 : #include "src/code-factory.h"
8 : #include "src/counters.h"
9 : #include "src/frames-inl.h"
10 : #include "src/frames.h"
11 : #include "src/function-kind.h"
12 : #include "src/heap/heap-inl.h" // For Page/MemoryChunk. TODO(jkummerow): Drop.
13 : #include "src/objects/api-callbacks.h"
14 : #include "src/objects/cell.h"
15 : #include "src/objects/descriptor-array.h"
16 : #include "src/objects/heap-number.h"
17 : #include "src/objects/oddball.h"
18 : #include "src/objects/ordered-hash-table-inl.h"
19 : #include "src/objects/property-cell.h"
20 : #include "src/wasm/wasm-objects.h"
21 :
22 : namespace v8 {
23 : namespace internal {
24 :
25 : using compiler::Node;
26 : template <class T>
27 : using TNode = compiler::TNode<T>;
28 : template <class T>
29 : using SloppyTNode = compiler::SloppyTNode<T>;
30 :
31 534485 : CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
32 : : compiler::CodeAssembler(state), BaseBuiltinsFromDSLAssembler(state) {
33 : if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
34 : HandleBreakOnNode();
35 : }
36 534485 : }
37 :
38 0 : void CodeStubAssembler::HandleBreakOnNode() {
39 : // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
40 : // string specifying the name of a stub and NODE is number specifying node id.
41 : const char* name = state()->name();
42 0 : size_t name_length = strlen(name);
43 0 : if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
44 : // Different name.
45 0 : return;
46 : }
47 0 : size_t option_length = strlen(FLAG_csa_trap_on_node);
48 0 : if (option_length < name_length + 2 ||
49 0 : FLAG_csa_trap_on_node[name_length] != ',') {
50 : // Option is too short.
51 : return;
52 : }
53 0 : const char* start = &FLAG_csa_trap_on_node[name_length + 1];
54 : char* end;
55 0 : int node_id = static_cast<int>(strtol(start, &end, 10));
56 0 : if (start == end) {
57 : // Bad node id.
58 : return;
59 : }
60 0 : BreakOnNode(node_id);
61 : }
62 :
63 0 : void CodeStubAssembler::Assert(const BranchGenerator& branch,
64 : const char* message, const char* file, int line,
65 : Node* extra_node1, const char* extra_node1_name,
66 : Node* extra_node2, const char* extra_node2_name,
67 : Node* extra_node3, const char* extra_node3_name,
68 : Node* extra_node4, const char* extra_node4_name,
69 : Node* extra_node5,
70 : const char* extra_node5_name) {
71 : #if defined(DEBUG)
72 : if (FLAG_debug_code) {
73 : Check(branch, message, file, line, extra_node1, extra_node1_name,
74 : extra_node2, extra_node2_name, extra_node3, extra_node3_name,
75 : extra_node4, extra_node4_name, extra_node5, extra_node5_name);
76 : }
77 : #endif
78 0 : }
79 :
80 0 : void CodeStubAssembler::Assert(const NodeGenerator& condition_body,
81 : const char* message, const char* file, int line,
82 : Node* extra_node1, const char* extra_node1_name,
83 : Node* extra_node2, const char* extra_node2_name,
84 : Node* extra_node3, const char* extra_node3_name,
85 : Node* extra_node4, const char* extra_node4_name,
86 : Node* extra_node5,
87 : const char* extra_node5_name) {
88 : #if defined(DEBUG)
89 : if (FLAG_debug_code) {
90 : Check(condition_body, message, file, line, extra_node1, extra_node1_name,
91 : extra_node2, extra_node2_name, extra_node3, extra_node3_name,
92 : extra_node4, extra_node4_name, extra_node5, extra_node5_name);
93 : }
94 : #endif
95 0 : }
96 :
97 : #ifdef DEBUG
98 : namespace {
99 : void MaybePrintNodeWithName(CodeStubAssembler* csa, Node* node,
100 : const char* node_name) {
101 : if (node != nullptr) {
102 : csa->CallRuntime(Runtime::kPrintWithNameForAssert, csa->SmiConstant(0),
103 : csa->StringConstant(node_name), node);
104 : }
105 : }
106 : } // namespace
107 : #endif
108 :
109 0 : void CodeStubAssembler::Check(const BranchGenerator& branch,
110 : const char* message, const char* file, int line,
111 : Node* extra_node1, const char* extra_node1_name,
112 : Node* extra_node2, const char* extra_node2_name,
113 : Node* extra_node3, const char* extra_node3_name,
114 : Node* extra_node4, const char* extra_node4_name,
115 : Node* extra_node5, const char* extra_node5_name) {
116 0 : Label ok(this);
117 0 : Label not_ok(this, Label::kDeferred);
118 0 : if (message != nullptr && FLAG_code_comments) {
119 0 : Comment("[ Assert: ", message);
120 : } else {
121 0 : Comment("[ Assert");
122 : }
123 : branch(&ok, ¬_ok);
124 :
125 : BIND(¬_ok);
126 : FailAssert(message, file, line, extra_node1, extra_node1_name, extra_node2,
127 : extra_node2_name, extra_node3, extra_node3_name, extra_node4,
128 0 : extra_node4_name, extra_node5, extra_node5_name);
129 :
130 : BIND(&ok);
131 0 : Comment("] Assert");
132 0 : }
133 :
134 0 : void CodeStubAssembler::Check(const NodeGenerator& condition_body,
135 : const char* message, const char* file, int line,
136 : Node* extra_node1, const char* extra_node1_name,
137 : Node* extra_node2, const char* extra_node2_name,
138 : Node* extra_node3, const char* extra_node3_name,
139 : Node* extra_node4, const char* extra_node4_name,
140 : Node* extra_node5, const char* extra_node5_name) {
141 0 : BranchGenerator branch = [=](Label* ok, Label* not_ok) {
142 : Node* condition = condition_body();
143 : DCHECK_NOT_NULL(condition);
144 0 : Branch(condition, ok, not_ok);
145 0 : };
146 :
147 : Check(branch, message, file, line, extra_node1, extra_node1_name, extra_node2,
148 : extra_node2_name, extra_node3, extra_node3_name, extra_node4,
149 0 : extra_node4_name, extra_node5, extra_node5_name);
150 0 : }
151 :
152 68676 : void CodeStubAssembler::FastCheck(TNode<BoolT> condition) {
153 137352 : Label ok(this), not_ok(this, Label::kDeferred);
154 68676 : Branch(condition, &ok, ¬_ok);
155 : BIND(¬_ok);
156 : {
157 68676 : DebugBreak();
158 68676 : Goto(&ok);
159 : }
160 : BIND(&ok);
161 68676 : }
162 :
163 440 : void CodeStubAssembler::FailAssert(
164 : const char* message, const char* file, int line, Node* extra_node1,
165 : const char* extra_node1_name, Node* extra_node2,
166 : const char* extra_node2_name, Node* extra_node3,
167 : const char* extra_node3_name, Node* extra_node4,
168 : const char* extra_node4_name, Node* extra_node5,
169 : const char* extra_node5_name) {
170 : DCHECK_NOT_NULL(message);
171 : char chars[1024];
172 : Vector<char> buffer(chars);
173 440 : if (file != nullptr) {
174 440 : SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
175 : } else {
176 0 : SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
177 : }
178 880 : Node* message_node = StringConstant(&(buffer[0]));
179 :
180 : #ifdef DEBUG
181 : // Only print the extra nodes in debug builds.
182 : MaybePrintNodeWithName(this, extra_node1, extra_node1_name);
183 : MaybePrintNodeWithName(this, extra_node2, extra_node2_name);
184 : MaybePrintNodeWithName(this, extra_node3, extra_node3_name);
185 : MaybePrintNodeWithName(this, extra_node4, extra_node4_name);
186 : MaybePrintNodeWithName(this, extra_node5, extra_node5_name);
187 : #endif
188 :
189 440 : DebugAbort(message_node);
190 440 : Unreachable();
191 440 : }
192 :
193 47596 : Node* CodeStubAssembler::SelectImpl(TNode<BoolT> condition,
194 : const NodeGenerator& true_body,
195 : const NodeGenerator& false_body,
196 : MachineRepresentation rep) {
197 95192 : VARIABLE(value, rep);
198 47596 : Label vtrue(this), vfalse(this), end(this);
199 47596 : Branch(condition, &vtrue, &vfalse);
200 :
201 : BIND(&vtrue);
202 : {
203 47596 : value.Bind(true_body());
204 47596 : Goto(&end);
205 : }
206 : BIND(&vfalse);
207 : {
208 47596 : value.Bind(false_body());
209 47596 : Goto(&end);
210 : }
211 :
212 : BIND(&end);
213 95192 : return value.value();
214 : }
215 :
216 224 : TNode<Int32T> CodeStubAssembler::SelectInt32Constant(
217 : SloppyTNode<BoolT> condition, int true_value, int false_value) {
218 : return SelectConstant<Int32T>(condition, Int32Constant(true_value),
219 448 : Int32Constant(false_value));
220 : }
221 :
222 0 : TNode<IntPtrT> CodeStubAssembler::SelectIntPtrConstant(
223 : SloppyTNode<BoolT> condition, int true_value, int false_value) {
224 : return SelectConstant<IntPtrT>(condition, IntPtrConstant(true_value),
225 0 : IntPtrConstant(false_value));
226 : }
227 :
228 3084 : TNode<Oddball> CodeStubAssembler::SelectBooleanConstant(
229 : SloppyTNode<BoolT> condition) {
230 3084 : return SelectConstant<Oddball>(condition, TrueConstant(), FalseConstant());
231 : }
232 :
233 4144 : TNode<Smi> CodeStubAssembler::SelectSmiConstant(SloppyTNode<BoolT> condition,
234 : Smi true_value,
235 : Smi false_value) {
236 : return SelectConstant<Smi>(condition, SmiConstant(true_value),
237 8288 : SmiConstant(false_value));
238 : }
239 :
240 2464 : TNode<Object> CodeStubAssembler::NoContextConstant() {
241 111992 : return SmiConstant(Context::kNoContext);
242 : }
243 :
244 : #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
245 : compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
246 : std::declval<Heap>().rootAccessorName())>::type>::type> \
247 : CodeStubAssembler::name##Constant() { \
248 : return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
249 : std::declval<Heap>().rootAccessorName())>::type>::type>( \
250 : LoadRoot(RootIndex::k##rootIndexName)); \
251 : }
252 0 : HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)
253 : #undef HEAP_CONSTANT_ACCESSOR
254 :
255 : #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
256 : compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
257 : std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type> \
258 : CodeStubAssembler::name##Constant() { \
259 : return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
260 : std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type>( \
261 : LoadRoot(RootIndex::k##rootIndexName)); \
262 : }
263 305988 : HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)
264 : #undef HEAP_CONSTANT_ACCESSOR
265 :
266 : #define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
267 : compiler::TNode<BoolT> CodeStubAssembler::Is##name( \
268 : SloppyTNode<Object> value) { \
269 : return WordEqual(value, name##Constant()); \
270 : } \
271 : compiler::TNode<BoolT> CodeStubAssembler::IsNot##name( \
272 : SloppyTNode<Object> value) { \
273 : return WordNotEqual(value, name##Constant()); \
274 : }
275 324008 : HEAP_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_TEST)
276 : #undef HEAP_CONSTANT_TEST
277 :
278 112632 : Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
279 112632 : if (mode == SMI_PARAMETERS) {
280 6928 : return SmiConstant(value);
281 : } else {
282 : DCHECK_EQ(INTPTR_PARAMETERS, mode);
283 218336 : return IntPtrConstant(value);
284 : }
285 : }
286 :
287 2968 : bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
288 : ParameterMode mode) {
289 : int32_t constant_test;
290 2968 : Smi smi_test;
291 2968 : if (mode == INTPTR_PARAMETERS) {
292 1736 : if (ToInt32Constant(test, constant_test) && constant_test == 0) {
293 : return true;
294 : }
295 : } else {
296 : DCHECK_EQ(mode, SMI_PARAMETERS);
297 1736 : if (ToSmiConstant(test, &smi_test) && smi_test->value() == 0) {
298 : return true;
299 : }
300 : }
301 : return false;
302 : }
303 :
304 0 : bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
305 : int* value,
306 : ParameterMode mode) {
307 : int32_t int32_constant;
308 0 : if (mode == INTPTR_PARAMETERS) {
309 0 : if (ToInt32Constant(maybe_constant, int32_constant)) {
310 0 : *value = int32_constant;
311 0 : return true;
312 : }
313 : } else {
314 : DCHECK_EQ(mode, SMI_PARAMETERS);
315 0 : Smi smi_constant;
316 0 : if (ToSmiConstant(maybe_constant, &smi_constant)) {
317 0 : *value = Smi::ToInt(smi_constant);
318 0 : return true;
319 : }
320 : }
321 : return false;
322 : }
323 :
324 956 : TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
325 : TNode<IntPtrT> value) {
326 956 : Comment("IntPtrRoundUpToPowerOfTwo32");
327 : CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
328 956 : value = Signed(IntPtrSub(value, IntPtrConstant(1)));
329 10516 : for (int i = 1; i <= 16; i *= 2) {
330 9560 : value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
331 : }
332 1912 : return Signed(IntPtrAdd(value, IntPtrConstant(1)));
333 : }
334 :
335 0 : Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
336 0 : if (mode == SMI_PARAMETERS) {
337 0 : return TaggedIsSmi(value);
338 : } else {
339 0 : return Int32Constant(1);
340 : }
341 : }
342 :
343 0 : TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
344 : // value && !(value & (value - 1))
345 : return WordEqual(
346 0 : Select<IntPtrT>(
347 0 : WordEqual(value, IntPtrConstant(0)),
348 0 : [=] { return IntPtrConstant(1); },
349 0 : [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }),
350 0 : IntPtrConstant(0));
351 : }
352 :
353 56 : TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
354 112 : Node* one = Float64Constant(1.0);
355 112 : Node* one_half = Float64Constant(0.5);
356 :
357 56 : Label return_x(this);
358 :
359 : // Round up {x} towards Infinity.
360 168 : VARIABLE(var_x, MachineRepresentation::kFloat64, Float64Ceil(x));
361 :
362 224 : GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
363 56 : &return_x);
364 168 : var_x.Bind(Float64Sub(var_x.value(), one));
365 56 : Goto(&return_x);
366 :
367 : BIND(&return_x);
368 112 : return TNode<Float64T>::UncheckedCast(var_x.value());
369 : }
370 :
371 112 : TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
372 112 : if (IsFloat64RoundUpSupported()) {
373 110 : return Float64RoundUp(x);
374 : }
375 :
376 4 : Node* one = Float64Constant(1.0);
377 4 : Node* zero = Float64Constant(0.0);
378 4 : Node* two_52 = Float64Constant(4503599627370496.0E0);
379 4 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
380 :
381 4 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
382 2 : Label return_x(this), return_minus_x(this);
383 :
384 : // Check if {x} is greater than zero.
385 2 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
386 4 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
387 2 : &if_xnotgreaterthanzero);
388 :
389 : BIND(&if_xgreaterthanzero);
390 : {
391 : // Just return {x} unless it's in the range ]0,2^52[.
392 4 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
393 :
394 : // Round positive {x} towards Infinity.
395 6 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
396 6 : GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
397 6 : var_x.Bind(Float64Add(var_x.value(), one));
398 2 : Goto(&return_x);
399 : }
400 :
401 : BIND(&if_xnotgreaterthanzero);
402 : {
403 : // Just return {x} unless it's in the range ]-2^52,0[
404 4 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
405 4 : GotoIfNot(Float64LessThan(x, zero), &return_x);
406 :
407 : // Round negated {x} towards Infinity and return the result negated.
408 4 : Node* minus_x = Float64Neg(x);
409 6 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
410 6 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
411 6 : var_x.Bind(Float64Sub(var_x.value(), one));
412 2 : Goto(&return_minus_x);
413 : }
414 :
415 : BIND(&return_minus_x);
416 6 : var_x.Bind(Float64Neg(var_x.value()));
417 2 : Goto(&return_x);
418 :
419 : BIND(&return_x);
420 2 : return TNode<Float64T>::UncheckedCast(var_x.value());
421 : }
422 :
423 119 : TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
424 119 : if (IsFloat64RoundDownSupported()) {
425 110 : return Float64RoundDown(x);
426 : }
427 :
428 18 : Node* one = Float64Constant(1.0);
429 18 : Node* zero = Float64Constant(0.0);
430 18 : Node* two_52 = Float64Constant(4503599627370496.0E0);
431 18 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
432 :
433 18 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
434 9 : Label return_x(this), return_minus_x(this);
435 :
436 : // Check if {x} is greater than zero.
437 9 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
438 18 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
439 9 : &if_xnotgreaterthanzero);
440 :
441 : BIND(&if_xgreaterthanzero);
442 : {
443 : // Just return {x} unless it's in the range ]0,2^52[.
444 18 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
445 :
446 : // Round positive {x} towards -Infinity.
447 27 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
448 27 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
449 27 : var_x.Bind(Float64Sub(var_x.value(), one));
450 9 : Goto(&return_x);
451 : }
452 :
453 : BIND(&if_xnotgreaterthanzero);
454 : {
455 : // Just return {x} unless it's in the range ]-2^52,0[
456 18 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
457 18 : GotoIfNot(Float64LessThan(x, zero), &return_x);
458 :
459 : // Round negated {x} towards -Infinity and return the result negated.
460 18 : Node* minus_x = Float64Neg(x);
461 27 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
462 27 : GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
463 27 : var_x.Bind(Float64Add(var_x.value(), one));
464 9 : Goto(&return_minus_x);
465 : }
466 :
467 : BIND(&return_minus_x);
468 27 : var_x.Bind(Float64Neg(var_x.value()));
469 9 : Goto(&return_x);
470 :
471 : BIND(&return_x);
472 9 : return TNode<Float64T>::UncheckedCast(var_x.value());
473 : }
474 :
475 392 : TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
476 392 : if (IsFloat64RoundTiesEvenSupported()) {
477 385 : return Float64RoundTiesEven(x);
478 : }
479 : // See ES#sec-touint8clamp for details.
480 14 : Node* f = Float64Floor(x);
481 21 : Node* f_and_half = Float64Add(f, Float64Constant(0.5));
482 :
483 14 : VARIABLE(var_result, MachineRepresentation::kFloat64);
484 7 : Label return_f(this), return_f_plus_one(this), done(this);
485 :
486 14 : GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
487 14 : GotoIf(Float64LessThan(x, f_and_half), &return_f);
488 : {
489 21 : Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
490 21 : Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
491 7 : &return_f_plus_one);
492 : }
493 :
494 : BIND(&return_f);
495 7 : var_result.Bind(f);
496 7 : Goto(&done);
497 :
498 : BIND(&return_f_plus_one);
499 21 : var_result.Bind(Float64Add(f, Float64Constant(1.0)));
500 7 : Goto(&done);
501 :
502 : BIND(&done);
503 7 : return TNode<Float64T>::UncheckedCast(var_result.value());
504 : }
505 :
506 340 : TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
507 340 : if (IsFloat64RoundTruncateSupported()) {
508 334 : return Float64RoundTruncate(x);
509 : }
510 :
511 12 : Node* one = Float64Constant(1.0);
512 12 : Node* zero = Float64Constant(0.0);
513 12 : Node* two_52 = Float64Constant(4503599627370496.0E0);
514 12 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
515 :
516 12 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
517 6 : Label return_x(this), return_minus_x(this);
518 :
519 : // Check if {x} is greater than 0.
520 6 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
521 12 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
522 6 : &if_xnotgreaterthanzero);
523 :
524 : BIND(&if_xgreaterthanzero);
525 : {
526 6 : if (IsFloat64RoundDownSupported()) {
527 0 : var_x.Bind(Float64RoundDown(x));
528 : } else {
529 : // Just return {x} unless it's in the range ]0,2^52[.
530 12 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
531 :
532 : // Round positive {x} towards -Infinity.
533 18 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
534 18 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
535 18 : var_x.Bind(Float64Sub(var_x.value(), one));
536 : }
537 6 : Goto(&return_x);
538 : }
539 :
540 : BIND(&if_xnotgreaterthanzero);
541 : {
542 6 : if (IsFloat64RoundUpSupported()) {
543 0 : var_x.Bind(Float64RoundUp(x));
544 0 : Goto(&return_x);
545 : } else {
546 : // Just return {x} unless its in the range ]-2^52,0[.
547 12 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
548 12 : GotoIfNot(Float64LessThan(x, zero), &return_x);
549 :
550 : // Round negated {x} towards -Infinity and return result negated.
551 12 : Node* minus_x = Float64Neg(x);
552 18 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
553 18 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
554 18 : var_x.Bind(Float64Sub(var_x.value(), one));
555 6 : Goto(&return_minus_x);
556 : }
557 : }
558 :
559 : BIND(&return_minus_x);
560 18 : var_x.Bind(Float64Neg(var_x.value()));
561 6 : Goto(&return_x);
562 :
563 : BIND(&return_x);
564 6 : return TNode<Float64T>::UncheckedCast(var_x.value());
565 : }
566 :
567 0 : TNode<BoolT> CodeStubAssembler::IsValidSmi(TNode<Smi> smi) {
568 : if (SmiValuesAre31Bits() && kSystemPointerSize == kInt64Size) {
569 : // Check that the Smi value is properly sign-extended.
570 : TNode<IntPtrT> value = Signed(BitcastTaggedToWord(smi));
571 : return WordEqual(value, ChangeInt32ToIntPtr(TruncateIntPtrToInt32(value)));
572 : }
573 0 : return Int32TrueConstant();
574 : }
575 :
576 0 : Node* CodeStubAssembler::SmiShiftBitsConstant() {
577 428848 : return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
578 : }
579 :
580 10768 : TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) {
581 10768 : TNode<IntPtrT> value_intptr = ChangeInt32ToIntPtr(value);
582 : TNode<Smi> smi =
583 21536 : BitcastWordToTaggedSigned(WordShl(value_intptr, SmiShiftBitsConstant()));
584 10768 : return smi;
585 : }
586 :
587 7332 : TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
588 : intptr_t constant_value;
589 7332 : if (ToIntPtrConstant(value, constant_value)) {
590 44 : return (static_cast<uintptr_t>(constant_value) <=
591 : static_cast<uintptr_t>(Smi::kMaxValue))
592 : ? Int32TrueConstant()
593 44 : : Int32FalseConstant();
594 : }
595 :
596 14576 : return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
597 : }
598 :
599 76441 : TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
600 : int32_t constant_value;
601 76441 : if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
602 6925 : return SmiConstant(constant_value);
603 : }
604 : TNode<Smi> smi =
605 139032 : BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
606 69516 : return smi;
607 : }
608 :
609 134217 : TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
610 : intptr_t constant_value;
611 134217 : if (ToIntPtrConstant(value, constant_value)) {
612 417 : return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
613 : }
614 267600 : return Signed(WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()));
615 : }
616 :
617 67508 : TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
618 67508 : TNode<IntPtrT> result = SmiUntag(value);
619 67508 : return TruncateIntPtrToInt32(result);
620 : }
621 :
622 48756 : TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
623 97512 : return ChangeInt32ToFloat64(SmiToInt32(value));
624 : }
625 :
626 2016 : TNode<Smi> CodeStubAssembler::SmiMax(TNode<Smi> a, TNode<Smi> b) {
627 4032 : return SelectConstant<Smi>(SmiLessThan(a, b), b, a);
628 : }
629 :
630 224 : TNode<Smi> CodeStubAssembler::SmiMin(TNode<Smi> a, TNode<Smi> b) {
631 448 : return SelectConstant<Smi>(SmiLessThan(a, b), a, b);
632 : }
633 :
634 4820 : TNode<IntPtrT> CodeStubAssembler::TryIntPtrAdd(TNode<IntPtrT> a,
635 : TNode<IntPtrT> b,
636 : Label* if_overflow) {
637 4820 : TNode<PairT<IntPtrT, BoolT>> pair = IntPtrAddWithOverflow(a, b);
638 : TNode<BoolT> overflow = Projection<1>(pair);
639 4820 : GotoIf(overflow, if_overflow);
640 4820 : return Projection<0>(pair);
641 : }
642 :
643 4148 : TNode<Smi> CodeStubAssembler::TrySmiAdd(TNode<Smi> lhs, TNode<Smi> rhs,
644 : Label* if_overflow) {
645 : if (SmiValuesAre32Bits()) {
646 8296 : return BitcastWordToTaggedSigned(TryIntPtrAdd(
647 16592 : BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs), if_overflow));
648 : } else {
649 : DCHECK(SmiValuesAre31Bits());
650 : TNode<PairT<Int32T, BoolT>> pair =
651 : Int32AddWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
652 : TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
653 : TNode<BoolT> overflow = Projection<1>(pair);
654 : GotoIf(overflow, if_overflow);
655 : TNode<Int32T> result = Projection<0>(pair);
656 : return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
657 : }
658 : }
659 :
660 2916 : TNode<Smi> CodeStubAssembler::TrySmiSub(TNode<Smi> lhs, TNode<Smi> rhs,
661 : Label* if_overflow) {
662 : if (SmiValuesAre32Bits()) {
663 : TNode<PairT<IntPtrT, BoolT>> pair = IntPtrSubWithOverflow(
664 8748 : BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs));
665 : TNode<BoolT> overflow = Projection<1>(pair);
666 2916 : GotoIf(overflow, if_overflow);
667 : TNode<IntPtrT> result = Projection<0>(pair);
668 2916 : return BitcastWordToTaggedSigned(result);
669 : } else {
670 : DCHECK(SmiValuesAre31Bits());
671 : TNode<PairT<Int32T, BoolT>> pair =
672 : Int32SubWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
673 : TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
674 : TNode<BoolT> overflow = Projection<1>(pair);
675 : GotoIf(overflow, if_overflow);
676 : TNode<Int32T> result = Projection<0>(pair);
677 : return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
678 : }
679 : }
680 :
681 564 : TNode<Number> CodeStubAssembler::NumberMax(SloppyTNode<Number> a,
682 : SloppyTNode<Number> b) {
683 : // TODO(danno): This could be optimized by specifically handling smi cases.
684 564 : TVARIABLE(Number, result);
685 564 : Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
686 564 : GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
687 564 : GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
688 : result = NanConstant();
689 564 : Goto(&done);
690 : BIND(&greater_than_equal_a);
691 : result = a;
692 564 : Goto(&done);
693 : BIND(&greater_than_equal_b);
694 : result = b;
695 564 : Goto(&done);
696 : BIND(&done);
697 564 : return result.value();
698 : }
699 :
700 620 : TNode<Number> CodeStubAssembler::NumberMin(SloppyTNode<Number> a,
701 : SloppyTNode<Number> b) {
702 : // TODO(danno): This could be optimized by specifically handling smi cases.
703 620 : TVARIABLE(Number, result);
704 620 : Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
705 620 : GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
706 620 : GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
707 : result = NanConstant();
708 620 : Goto(&done);
709 : BIND(&greater_than_equal_a);
710 : result = b;
711 620 : Goto(&done);
712 : BIND(&greater_than_equal_b);
713 : result = a;
714 620 : Goto(&done);
715 : BIND(&done);
716 620 : return result.value();
717 : }
718 :
719 392 : TNode<IntPtrT> CodeStubAssembler::ConvertToRelativeIndex(
720 : TNode<Context> context, TNode<Object> index, TNode<IntPtrT> length) {
721 392 : TVARIABLE(IntPtrT, result);
722 :
723 : TNode<Number> const index_int =
724 392 : ToInteger_Inline(context, index, CodeStubAssembler::kTruncateMinusZero);
725 392 : TNode<IntPtrT> zero = IntPtrConstant(0);
726 :
727 392 : Label done(this);
728 392 : Label if_issmi(this), if_isheapnumber(this, Label::kDeferred);
729 784 : Branch(TaggedIsSmi(index_int), &if_issmi, &if_isheapnumber);
730 :
731 : BIND(&if_issmi);
732 : {
733 : TNode<Smi> const index_smi = CAST(index_int);
734 1568 : result = Select<IntPtrT>(
735 1176 : IntPtrLessThan(SmiUntag(index_smi), zero),
736 1176 : [=] { return IntPtrMax(IntPtrAdd(length, SmiUntag(index_smi)), zero); },
737 1176 : [=] { return IntPtrMin(SmiUntag(index_smi), length); });
738 392 : Goto(&done);
739 : }
740 :
741 : BIND(&if_isheapnumber);
742 : {
743 : // If {index} is a heap number, it is definitely out of bounds. If it is
744 : // negative, {index} = max({length} + {index}),0) = 0'. If it is positive,
745 : // set {index} to {length}.
746 : TNode<HeapNumber> const index_hn = CAST(index_int);
747 392 : TNode<Float64T> const float_zero = Float64Constant(0.);
748 : TNode<Float64T> const index_float = LoadHeapNumberValue(index_hn);
749 784 : result = SelectConstant<IntPtrT>(Float64LessThan(index_float, float_zero),
750 : zero, length);
751 392 : Goto(&done);
752 : }
753 : BIND(&done);
754 392 : return result.value();
755 : }
756 :
757 392 : TNode<Number> CodeStubAssembler::SmiMod(TNode<Smi> a, TNode<Smi> b) {
758 392 : TVARIABLE(Number, var_result);
759 392 : Label return_result(this, &var_result),
760 392 : return_minuszero(this, Label::kDeferred),
761 392 : return_nan(this, Label::kDeferred);
762 :
763 : // Untag {a} and {b}.
764 392 : TNode<Int32T> int_a = SmiToInt32(a);
765 392 : TNode<Int32T> int_b = SmiToInt32(b);
766 :
767 : // Return NaN if {b} is zero.
768 1176 : GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
769 :
770 : // Check if {a} is non-negative.
771 392 : Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
772 1176 : Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
773 392 : &if_aisnegative);
774 :
775 : BIND(&if_aisnotnegative);
776 : {
777 : // Fast case, don't need to check any other edge cases.
778 392 : TNode<Int32T> r = Int32Mod(int_a, int_b);
779 784 : var_result = SmiFromInt32(r);
780 392 : Goto(&return_result);
781 : }
782 :
783 : BIND(&if_aisnegative);
784 : {
785 : if (SmiValuesAre32Bits()) {
786 : // Check if {a} is kMinInt and {b} is -1 (only relevant if the
787 : // kMinInt is actually representable as a Smi).
788 392 : Label join(this);
789 1176 : GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
790 1176 : GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
791 392 : Goto(&join);
792 : BIND(&join);
793 : }
794 :
795 : // Perform the integer modulus operation.
796 392 : TNode<Int32T> r = Int32Mod(int_a, int_b);
797 :
798 : // Check if {r} is zero, and if so return -0, because we have to
799 : // take the sign of the left hand side {a}, which is negative.
800 1176 : GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
801 :
802 : // The remainder {r} can be outside the valid Smi range on 32bit
803 : // architectures, so we cannot just say SmiFromInt32(r) here.
804 784 : var_result = ChangeInt32ToTagged(r);
805 392 : Goto(&return_result);
806 : }
807 :
808 : BIND(&return_minuszero);
809 : var_result = MinusZeroConstant();
810 392 : Goto(&return_result);
811 :
812 : BIND(&return_nan);
813 : var_result = NanConstant();
814 392 : Goto(&return_result);
815 :
816 : BIND(&return_result);
817 392 : return var_result.value();
818 : }
819 :
820 448 : TNode<Number> CodeStubAssembler::SmiMul(TNode<Smi> a, TNode<Smi> b) {
821 448 : TVARIABLE(Number, var_result);
822 896 : VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64);
823 896 : VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64);
824 448 : Label return_result(this, &var_result);
825 :
826 : // Both {a} and {b} are Smis. Convert them to integers and multiply.
827 896 : Node* lhs32 = SmiToInt32(a);
828 896 : Node* rhs32 = SmiToInt32(b);
829 896 : Node* pair = Int32MulWithOverflow(lhs32, rhs32);
830 :
831 448 : Node* overflow = Projection(1, pair);
832 :
833 : // Check if the multiplication overflowed.
834 448 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
835 448 : Branch(overflow, &if_overflow, &if_notoverflow);
836 : BIND(&if_notoverflow);
837 : {
838 : // If the answer is zero, we may need to return -0.0, depending on the
839 : // input.
840 448 : Label answer_zero(this), answer_not_zero(this);
841 448 : Node* answer = Projection(0, pair);
842 896 : Node* zero = Int32Constant(0);
843 896 : Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
844 : BIND(&answer_not_zero);
845 : {
846 896 : var_result = ChangeInt32ToTagged(answer);
847 448 : Goto(&return_result);
848 : }
849 : BIND(&answer_zero);
850 : {
851 896 : Node* or_result = Word32Or(lhs32, rhs32);
852 448 : Label if_should_be_negative_zero(this), if_should_be_zero(this);
853 896 : Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
854 448 : &if_should_be_zero);
855 : BIND(&if_should_be_negative_zero);
856 : {
857 : var_result = MinusZeroConstant();
858 448 : Goto(&return_result);
859 : }
860 : BIND(&if_should_be_zero);
861 : {
862 896 : var_result = SmiConstant(0);
863 448 : Goto(&return_result);
864 : }
865 : }
866 : }
867 : BIND(&if_overflow);
868 : {
869 896 : var_lhs_float64.Bind(SmiToFloat64(a));
870 896 : var_rhs_float64.Bind(SmiToFloat64(b));
871 1792 : Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
872 896 : var_result = AllocateHeapNumberWithValue(value);
873 448 : Goto(&return_result);
874 : }
875 :
876 : BIND(&return_result);
877 448 : return var_result.value();
878 : }
879 :
880 336 : TNode<Smi> CodeStubAssembler::TrySmiDiv(TNode<Smi> dividend, TNode<Smi> divisor,
881 : Label* bailout) {
882 : // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
883 : // is zero.
884 672 : GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
885 :
886 : // Do floating point division if {dividend} is zero and {divisor} is
887 : // negative.
888 336 : Label dividend_is_zero(this), dividend_is_not_zero(this);
889 672 : Branch(WordEqual(dividend, SmiConstant(0)), ÷nd_is_zero,
890 336 : ÷nd_is_not_zero);
891 :
892 : BIND(÷nd_is_zero);
893 : {
894 672 : GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
895 336 : Goto(÷nd_is_not_zero);
896 : }
897 : BIND(÷nd_is_not_zero);
898 :
899 336 : TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
900 336 : TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
901 :
902 : // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
903 : // if the Smi size is 31) and {divisor} is -1.
904 336 : Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
905 1008 : Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
906 336 : &divisor_is_minus_one, &divisor_is_not_minus_one);
907 :
908 : BIND(&divisor_is_minus_one);
909 : {
910 672 : GotoIf(Word32Equal(
911 : untagged_dividend,
912 672 : Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
913 336 : bailout);
914 336 : Goto(&divisor_is_not_minus_one);
915 : }
916 : BIND(&divisor_is_not_minus_one);
917 :
918 336 : TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
919 336 : TNode<Int32T> truncated = Signed(Int32Mul(untagged_result, untagged_divisor));
920 :
921 : // Do floating point division if the remainder is not 0.
922 672 : GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
923 :
924 672 : return SmiFromInt32(untagged_result);
925 : }
926 :
927 56 : TNode<Smi> CodeStubAssembler::SmiLexicographicCompare(TNode<Smi> x,
928 : TNode<Smi> y) {
929 : TNode<ExternalReference> smi_lexicographic_compare =
930 56 : ExternalConstant(ExternalReference::smi_lexicographic_compare_function());
931 : TNode<ExternalReference> isolate_ptr =
932 56 : ExternalConstant(ExternalReference::isolate_address(isolate()));
933 56 : return CAST(CallCFunction(smi_lexicographic_compare, MachineType::AnyTagged(),
934 : std::make_pair(MachineType::Pointer(), isolate_ptr),
935 : std::make_pair(MachineType::AnyTagged(), x),
936 : std::make_pair(MachineType::AnyTagged(), y)));
937 : }
938 :
939 91924 : TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(
940 : SloppyTNode<IntPtrT> value) {
941 91924 : if (Is64()) {
942 91924 : return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
943 : }
944 : return ReinterpretCast<Int32T>(value);
945 : }
946 :
947 167552 : TNode<BoolT> CodeStubAssembler::TaggedIsSmi(SloppyTNode<Object> a) {
948 167552 : return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
949 335104 : IntPtrConstant(0));
950 : }
951 :
952 1680 : TNode<BoolT> CodeStubAssembler::TaggedIsSmi(TNode<MaybeObject> a) {
953 : return WordEqual(
954 5040 : WordAnd(BitcastMaybeObjectToWord(a), IntPtrConstant(kSmiTagMask)),
955 5040 : IntPtrConstant(0));
956 : }
957 :
958 24684 : TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(SloppyTNode<Object> a) {
959 : return WordNotEqual(
960 24684 : WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
961 49368 : IntPtrConstant(0));
962 : }
963 :
964 1972 : TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(SloppyTNode<Object> a) {
965 : return WordEqual(WordAnd(BitcastTaggedToWord(a),
966 1972 : IntPtrConstant(kSmiTagMask | kSmiSignMask)),
967 3944 : IntPtrConstant(0));
968 : }
969 :
970 0 : TNode<BoolT> CodeStubAssembler::WordIsAligned(SloppyTNode<WordT> word,
971 : size_t alignment) {
972 : DCHECK(base::bits::IsPowerOfTwo(alignment));
973 0 : return WordEqual(IntPtrConstant(0),
974 0 : WordAnd(word, IntPtrConstant(alignment - 1)));
975 : }
976 :
977 : #if DEBUG
978 : void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
979 : CodeAssembler::Bind(label, debug_info);
980 : }
981 : #endif // DEBUG
982 :
983 1468885 : void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
984 :
985 1512 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
986 : TNode<FixedDoubleArray> array, TNode<Smi> index, Label* if_hole) {
987 : return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
988 1512 : SMI_PARAMETERS, if_hole);
989 : }
990 :
991 0 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
992 : TNode<FixedDoubleArray> array, TNode<IntPtrT> index, Label* if_hole) {
993 : return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
994 0 : INTPTR_PARAMETERS, if_hole);
995 : }
996 :
997 168 : void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
998 : Node* receiver_map, Label* definitely_no_elements,
999 : Label* possibly_elements) {
1000 : CSA_SLOW_ASSERT(this, IsMap(receiver_map));
1001 336 : VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
1002 168 : Label loop_body(this, &var_map);
1003 336 : Node* empty_fixed_array = LoadRoot(RootIndex::kEmptyFixedArray);
1004 : Node* empty_slow_element_dictionary =
1005 336 : LoadRoot(RootIndex::kEmptySlowElementDictionary);
1006 168 : Goto(&loop_body);
1007 :
1008 : BIND(&loop_body);
1009 : {
1010 168 : Node* map = var_map.value();
1011 : Node* prototype = LoadMapPrototype(map);
1012 336 : GotoIf(IsNull(prototype), definitely_no_elements);
1013 : Node* prototype_map = LoadMap(prototype);
1014 : TNode<Int32T> prototype_instance_type = LoadMapInstanceType(prototype_map);
1015 :
1016 : // Pessimistically assume elements if a Proxy, Special API Object,
1017 : // or JSValue wrapper is found on the prototype chain. After this
1018 : // instance type check, it's not necessary to check for interceptors or
1019 : // access checks.
1020 168 : Label if_custom(this, Label::kDeferred), if_notcustom(this);
1021 336 : Branch(IsCustomElementsReceiverInstanceType(prototype_instance_type),
1022 168 : &if_custom, &if_notcustom);
1023 :
1024 : BIND(&if_custom);
1025 : {
1026 : // For string JSValue wrappers we still support the checks as long
1027 : // as they wrap the empty string.
1028 336 : GotoIfNot(InstanceTypeEqual(prototype_instance_type, JS_VALUE_TYPE),
1029 168 : possibly_elements);
1030 : Node* prototype_value = LoadJSValueValue(prototype);
1031 336 : Branch(IsEmptyString(prototype_value), &if_notcustom, possibly_elements);
1032 : }
1033 :
1034 : BIND(&if_notcustom);
1035 : {
1036 : Node* prototype_elements = LoadElements(prototype);
1037 168 : var_map.Bind(prototype_map);
1038 336 : GotoIf(WordEqual(prototype_elements, empty_fixed_array), &loop_body);
1039 336 : Branch(WordEqual(prototype_elements, empty_slow_element_dictionary),
1040 168 : &loop_body, possibly_elements);
1041 : }
1042 : }
1043 168 : }
1044 :
1045 1904 : void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
1046 : Label* if_false) {
1047 3808 : GotoIf(TaggedIsSmi(object), if_false);
1048 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1049 3808 : Branch(IsJSReceiver(object), if_true, if_false);
1050 1904 : }
1051 :
1052 2464 : void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
1053 : #ifdef V8_ENABLE_FORCE_SLOW_PATH
1054 : Node* const force_slow_path_addr =
1055 : ExternalConstant(ExternalReference::force_slow_path(isolate()));
1056 : Node* const force_slow = Load(MachineType::Uint8(), force_slow_path_addr);
1057 :
1058 : GotoIf(force_slow, if_true);
1059 : #endif
1060 2464 : }
1061 :
1062 4 : void CodeStubAssembler::GotoIfDebugExecutionModeChecksSideEffects(
1063 : Label* if_true) {
1064 : STATIC_ASSERT(sizeof(DebugInfo::ExecutionMode) >= sizeof(int32_t));
1065 :
1066 : TNode<ExternalReference> execution_mode_address = ExternalConstant(
1067 4 : ExternalReference::debug_execution_mode_address(isolate()));
1068 : TNode<Int32T> execution_mode =
1069 4 : UncheckedCast<Int32T>(Load(MachineType::Int32(), execution_mode_address));
1070 :
1071 12 : GotoIf(Word32Equal(execution_mode, Int32Constant(DebugInfo::kSideEffects)),
1072 4 : if_true);
1073 4 : }
1074 :
1075 7708 : TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
1076 : AllocationFlags flags,
1077 : TNode<RawPtrT> top_address,
1078 : TNode<RawPtrT> limit_address) {
1079 15416 : Label if_out_of_memory(this, Label::kDeferred);
1080 :
1081 : // TODO(jgruber,jkummerow): Extract the slow paths (= probably everything
1082 : // but bump pointer allocation) into a builtin to save code space. The
1083 : // size_in_bytes check may be moved there as well since a non-smi
1084 : // size_in_bytes probably doesn't fit into the bump pointer region
1085 : // (double-check that).
1086 :
1087 : intptr_t size_in_bytes_constant;
1088 : bool size_in_bytes_is_constant = false;
1089 7708 : if (ToIntPtrConstant(size_in_bytes, size_in_bytes_constant)) {
1090 : size_in_bytes_is_constant = true;
1091 1176 : CHECK(Internals::IsValidSmi(size_in_bytes_constant));
1092 588 : CHECK_GT(size_in_bytes_constant, 0);
1093 : } else {
1094 14240 : GotoIfNot(IsValidPositiveSmi(size_in_bytes), &if_out_of_memory);
1095 : }
1096 :
1097 : TNode<RawPtrT> top =
1098 7708 : UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), top_address));
1099 : TNode<RawPtrT> limit =
1100 7708 : UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), limit_address));
1101 :
1102 : // If there's not enough space, call the runtime.
1103 : TVARIABLE(Object, result);
1104 7708 : Label runtime_call(this, Label::kDeferred), no_runtime_call(this), out(this);
1105 :
1106 7708 : bool needs_double_alignment = flags & kDoubleAlignment;
1107 :
1108 7708 : if (flags & kAllowLargeObjectAllocation) {
1109 5736 : Label next(this);
1110 11472 : GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1111 :
1112 5736 : if (FLAG_young_generation_large_objects) {
1113 11472 : result = CallRuntime(Runtime::kAllocateInYoungGeneration,
1114 : NoContextConstant(), SmiTag(size_in_bytes));
1115 : } else {
1116 : TNode<Smi> alignment_flag = SmiConstant(Smi::FromInt(
1117 0 : AllocateDoubleAlignFlag::encode(needs_double_alignment)));
1118 0 : result =
1119 : CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1120 : SmiTag(size_in_bytes), alignment_flag);
1121 : }
1122 5736 : Goto(&out);
1123 :
1124 : BIND(&next);
1125 : }
1126 :
1127 : TVARIABLE(IntPtrT, adjusted_size, size_in_bytes);
1128 :
1129 7708 : if (needs_double_alignment) {
1130 0 : Label next(this);
1131 0 : GotoIfNot(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &next);
1132 :
1133 0 : adjusted_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1134 0 : Goto(&next);
1135 :
1136 : BIND(&next);
1137 : }
1138 :
1139 : TNode<IntPtrT> new_top =
1140 : IntPtrAdd(UncheckedCast<IntPtrT>(top), adjusted_size.value());
1141 :
1142 15416 : Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1143 7708 : &no_runtime_call);
1144 :
1145 : BIND(&runtime_call);
1146 : {
1147 7708 : if (flags & kPretenured) {
1148 : TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
1149 0 : AllocateDoubleAlignFlag::encode(needs_double_alignment)));
1150 0 : result =
1151 : CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1152 : SmiTag(size_in_bytes), runtime_flags);
1153 : } else {
1154 15416 : result = CallRuntime(Runtime::kAllocateInYoungGeneration,
1155 : NoContextConstant(), SmiTag(size_in_bytes));
1156 : }
1157 7708 : Goto(&out);
1158 : }
1159 :
1160 : // When there is enough space, return `top' and bump it up.
1161 : BIND(&no_runtime_call);
1162 : {
1163 : StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
1164 7708 : new_top);
1165 :
1166 : TVARIABLE(IntPtrT, address, UncheckedCast<IntPtrT>(top));
1167 :
1168 7708 : if (needs_double_alignment) {
1169 0 : Label next(this);
1170 0 : GotoIf(IntPtrEqual(adjusted_size.value(), size_in_bytes), &next);
1171 :
1172 : // Store a filler and increase the address by 4.
1173 : StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
1174 0 : LoadRoot(RootIndex::kOnePointerFillerMap));
1175 0 : address = IntPtrAdd(UncheckedCast<IntPtrT>(top), IntPtrConstant(4));
1176 0 : Goto(&next);
1177 :
1178 : BIND(&next);
1179 : }
1180 :
1181 15416 : result = BitcastWordToTagged(
1182 7708 : IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1183 7708 : Goto(&out);
1184 : }
1185 :
1186 7708 : if (!size_in_bytes_is_constant) {
1187 : BIND(&if_out_of_memory);
1188 : CallRuntime(Runtime::kFatalProcessOutOfMemoryInAllocateRaw,
1189 : NoContextConstant());
1190 7120 : Unreachable();
1191 : }
1192 :
1193 : BIND(&out);
1194 7708 : return UncheckedCast<HeapObject>(result.value());
1195 : }
1196 :
1197 0 : TNode<HeapObject> CodeStubAssembler::AllocateRawUnaligned(
1198 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1199 : TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1200 : DCHECK_EQ(flags & kDoubleAlignment, 0);
1201 4920 : return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1202 : }
1203 :
1204 0 : TNode<HeapObject> CodeStubAssembler::AllocateRawDoubleAligned(
1205 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1206 : TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1207 : #if defined(V8_HOST_ARCH_32_BIT)
1208 : return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
1209 : limit_address);
1210 : #elif defined(V8_HOST_ARCH_64_BIT)
1211 : #ifdef V8_COMPRESS_POINTERS
1212 : // TODO(ishell, v8:8875): Consider using aligned allocations once the
1213 : // allocation alignment inconsistency is fixed. For now we keep using
1214 : // unaligned access since both x64 and arm64 architectures (where pointer
1215 : // compression is supported) allow unaligned access to doubles and full words.
1216 : #endif // V8_COMPRESS_POINTERS
1217 : // Allocation on 64 bit machine is naturally double aligned
1218 : return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
1219 2788 : limit_address);
1220 : #else
1221 : #error Architecture not supported
1222 : #endif
1223 : }
1224 :
1225 840 : TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(
1226 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags) {
1227 : DCHECK(flags == kNone || flags == kDoubleAlignment);
1228 : CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
1229 13232 : return Allocate(size_in_bytes, flags);
1230 : }
1231 :
1232 66920 : TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
1233 : AllocationFlags flags) {
1234 66920 : Comment("Allocate");
1235 : bool const new_space = !(flags & kPretenured);
1236 66920 : if (!(flags & kAllowLargeObjectAllocation)) {
1237 : intptr_t size_constant;
1238 61184 : if (ToIntPtrConstant(size_in_bytes, size_constant)) {
1239 45552 : CHECK_LE(size_constant, kMaxRegularHeapObjectSize);
1240 : }
1241 : }
1242 66920 : if (!(flags & kDoubleAlignment) && !(flags & kAllowLargeObjectAllocation)) {
1243 : return OptimizedAllocate(size_in_bytes, new_space ? AllocationType::kYoung
1244 59212 : : AllocationType::kOld);
1245 : }
1246 : TNode<ExternalReference> top_address = ExternalConstant(
1247 : new_space
1248 7708 : ? ExternalReference::new_space_allocation_top_address(isolate())
1249 15416 : : ExternalReference::old_space_allocation_top_address(isolate()));
1250 : DCHECK_EQ(kSystemPointerSize,
1251 : ExternalReference::new_space_allocation_limit_address(isolate())
1252 : .address() -
1253 : ExternalReference::new_space_allocation_top_address(isolate())
1254 : .address());
1255 : DCHECK_EQ(kSystemPointerSize,
1256 : ExternalReference::old_space_allocation_limit_address(isolate())
1257 : .address() -
1258 : ExternalReference::old_space_allocation_top_address(isolate())
1259 : .address());
1260 : TNode<IntPtrT> limit_address =
1261 : IntPtrAdd(ReinterpretCast<IntPtrT>(top_address),
1262 7708 : IntPtrConstant(kSystemPointerSize));
1263 :
1264 7708 : if (flags & kDoubleAlignment) {
1265 : return AllocateRawDoubleAligned(size_in_bytes, flags,
1266 : ReinterpretCast<RawPtrT>(top_address),
1267 : ReinterpretCast<RawPtrT>(limit_address));
1268 : } else {
1269 : return AllocateRawUnaligned(size_in_bytes, flags,
1270 : ReinterpretCast<RawPtrT>(top_address),
1271 : ReinterpretCast<RawPtrT>(limit_address));
1272 : }
1273 : }
1274 :
1275 2032 : TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1276 : AllocationFlags flags) {
1277 2032 : CHECK(flags == kNone || flags == kDoubleAlignment);
1278 : DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1279 2032 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1280 : }
1281 :
1282 2320 : TNode<HeapObject> CodeStubAssembler::Allocate(int size_in_bytes,
1283 : AllocationFlags flags) {
1284 34040 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1285 : }
1286 :
1287 5432 : TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1288 : TNode<IntPtrT> offset) {
1289 : return UncheckedCast<HeapObject>(
1290 10864 : BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset)));
1291 : }
1292 :
1293 2240 : TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1294 : int offset) {
1295 4312 : return InnerAllocate(previous, IntPtrConstant(offset));
1296 : }
1297 :
1298 5904 : TNode<BoolT> CodeStubAssembler::IsRegularHeapObjectSize(TNode<IntPtrT> size) {
1299 : return UintPtrLessThanOrEqual(size,
1300 11808 : IntPtrConstant(kMaxRegularHeapObjectSize));
1301 : }
1302 :
1303 5152 : void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
1304 : Label* if_false) {
1305 10304 : Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1306 5152 : if_bigint(this, Label::kDeferred);
1307 : // Rule out false {value}.
1308 5152 : GotoIf(WordEqual(value, FalseConstant()), if_false);
1309 :
1310 : // Check if {value} is a Smi or a HeapObject.
1311 10304 : Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1312 :
1313 : BIND(&if_smi);
1314 : {
1315 : // The {value} is a Smi, only need to check against zero.
1316 5152 : BranchIfSmiEqual(CAST(value), SmiConstant(0), if_false, if_true);
1317 : }
1318 :
1319 : BIND(&if_notsmi);
1320 : {
1321 : // Check if {value} is the empty string.
1322 10304 : GotoIf(IsEmptyString(value), if_false);
1323 :
1324 : // The {value} is a HeapObject, load its map.
1325 : Node* value_map = LoadMap(value);
1326 :
1327 : // Only null, undefined and document.all have the undetectable bit set,
1328 : // so we can return false immediately when that bit is set.
1329 10304 : GotoIf(IsUndetectableMap(value_map), if_false);
1330 :
1331 : // We still need to handle numbers specially, but all other {value}s
1332 : // that make it here yield true.
1333 10304 : GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1334 10304 : Branch(IsBigInt(value), &if_bigint, if_true);
1335 :
1336 : BIND(&if_heapnumber);
1337 : {
1338 : // Load the floating point value of {value}.
1339 5152 : Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
1340 5152 : MachineType::Float64());
1341 :
1342 : // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1343 20608 : Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1344 5152 : if_true, if_false);
1345 : }
1346 :
1347 : BIND(&if_bigint);
1348 : {
1349 : Node* result =
1350 : CallRuntime(Runtime::kBigIntToBoolean, NoContextConstant(), value);
1351 : CSA_ASSERT(this, IsBoolean(result));
1352 5152 : Branch(WordEqual(result, TrueConstant()), if_true, if_false);
1353 : }
1354 : }
1355 5152 : }
1356 :
1357 2184 : Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
1358 2184 : Node* frame_pointer = LoadParentFramePointer();
1359 4368 : return Load(rep, frame_pointer, IntPtrConstant(offset));
1360 : }
1361 :
1362 3204 : Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
1363 : MachineType rep) {
1364 6408 : return Load(rep, buffer, IntPtrConstant(offset));
1365 : }
1366 :
1367 910600 : Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1368 : int offset, MachineType rep) {
1369 : CSA_ASSERT(this, IsStrong(object));
1370 1821200 : return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
1371 : }
1372 :
1373 66768 : Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1374 : SloppyTNode<IntPtrT> offset,
1375 : MachineType rep) {
1376 : CSA_ASSERT(this, IsStrong(object));
1377 133536 : return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
1378 : }
1379 :
1380 70476 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1381 : SloppyTNode<HeapObject> object, int offset) {
1382 : if (SmiValuesAre32Bits()) {
1383 : #if V8_TARGET_LITTLE_ENDIAN
1384 70476 : offset += 4;
1385 : #endif
1386 : return ChangeInt32ToIntPtr(
1387 140952 : LoadObjectField(object, offset, MachineType::Int32()));
1388 : } else {
1389 : return SmiToIntPtr(
1390 : LoadObjectField(object, offset, MachineType::AnyTagged()));
1391 : }
1392 : }
1393 :
1394 3080 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
1395 : int offset) {
1396 : if (SmiValuesAre32Bits()) {
1397 : #if V8_TARGET_LITTLE_ENDIAN
1398 3080 : offset += 4;
1399 : #endif
1400 : return UncheckedCast<Int32T>(
1401 4264 : LoadObjectField(object, offset, MachineType::Int32()));
1402 : } else {
1403 : return SmiToInt32(
1404 : LoadObjectField(object, offset, MachineType::AnyTagged()));
1405 : }
1406 : }
1407 :
1408 1512 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
1409 : if (SmiValuesAre32Bits()) {
1410 : #if V8_TARGET_LITTLE_ENDIAN
1411 1512 : index += 4;
1412 : #endif
1413 : return ChangeInt32ToIntPtr(
1414 4536 : Load(MachineType::Int32(), base, IntPtrConstant(index)));
1415 : } else {
1416 : return SmiToIntPtr(
1417 : Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
1418 : }
1419 : }
1420 :
1421 57077 : void CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
1422 : if (SmiValuesAre32Bits()) {
1423 57077 : int zero_offset = offset + 4;
1424 : int payload_offset = offset;
1425 : #if V8_TARGET_LITTLE_ENDIAN
1426 : std::swap(zero_offset, payload_offset);
1427 : #endif
1428 : StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1429 171231 : IntPtrConstant(zero_offset), Int32Constant(0));
1430 : StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1431 114154 : IntPtrConstant(payload_offset),
1432 171231 : TruncateInt64ToInt32(value));
1433 : } else {
1434 : StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
1435 : IntPtrConstant(offset), SmiTag(value));
1436 : }
1437 57077 : }
1438 :
1439 13588 : TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1440 : SloppyTNode<HeapNumber> object) {
1441 : return TNode<Float64T>::UncheckedCast(LoadObjectField(
1442 88156 : object, HeapNumber::kValueOffset, MachineType::Float64()));
1443 : }
1444 :
1445 27344 : TNode<Map> CodeStubAssembler::LoadMap(SloppyTNode<HeapObject> object) {
1446 : return UncheckedCast<Map>(LoadObjectField(object, HeapObject::kMapOffset,
1447 221548 : MachineType::TaggedPointer()));
1448 : }
1449 :
1450 66952 : TNode<Int32T> CodeStubAssembler::LoadInstanceType(
1451 : SloppyTNode<HeapObject> object) {
1452 66952 : return LoadMapInstanceType(LoadMap(object));
1453 : }
1454 :
1455 10584 : TNode<BoolT> CodeStubAssembler::HasInstanceType(SloppyTNode<HeapObject> object,
1456 : InstanceType instance_type) {
1457 21168 : return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1458 : }
1459 :
1460 504 : TNode<BoolT> CodeStubAssembler::DoesntHaveInstanceType(
1461 : SloppyTNode<HeapObject> object, InstanceType instance_type) {
1462 1512 : return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1463 : }
1464 :
1465 0 : TNode<BoolT> CodeStubAssembler::TaggedDoesntHaveInstanceType(
1466 : SloppyTNode<HeapObject> any_tagged, InstanceType type) {
1467 : /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1468 0 : TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
1469 : return Select<BoolT>(
1470 0 : tagged_is_smi, [=]() { return tagged_is_smi; },
1471 0 : [=]() { return DoesntHaveInstanceType(any_tagged, type); });
1472 : }
1473 :
1474 3756 : TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1475 : SloppyTNode<JSObject> object) {
1476 : CSA_SLOW_ASSERT(this, Word32BinaryNot(IsDictionaryMap(LoadMap(object))));
1477 3756 : TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1478 7512 : return Select<HeapObject>(TaggedIsSmi(properties),
1479 3756 : [=] { return EmptyFixedArrayConstant(); },
1480 11268 : [=] { return CAST(properties); });
1481 : }
1482 :
1483 6344 : TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1484 : SloppyTNode<JSObject> object) {
1485 : CSA_SLOW_ASSERT(this, IsDictionaryMap(LoadMap(object)));
1486 6344 : TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1487 12688 : return Select<HeapObject>(TaggedIsSmi(properties),
1488 6344 : [=] { return EmptyPropertyDictionaryConstant(); },
1489 19032 : [=] { return CAST(properties); });
1490 : }
1491 :
1492 392 : TNode<Number> CodeStubAssembler::LoadJSArrayLength(SloppyTNode<JSArray> array) {
1493 : CSA_ASSERT(this, IsJSArray(array));
1494 392 : return CAST(LoadObjectField(array, JSArray::kLengthOffset));
1495 : }
1496 :
1497 0 : TNode<Object> CodeStubAssembler::LoadJSArgumentsObjectWithLength(
1498 : SloppyTNode<JSArgumentsObjectWithLength> array) {
1499 0 : return LoadObjectField(array, JSArgumentsObjectWithLength::kLengthOffset);
1500 : }
1501 :
1502 2408 : TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(
1503 : SloppyTNode<JSArray> array) {
1504 : TNode<Object> length = LoadJSArrayLength(array);
1505 : CSA_ASSERT(this, Word32Or(IsFastElementsKind(LoadElementsKind(array)),
1506 : IsElementsKindInRange(LoadElementsKind(array),
1507 : PACKED_SEALED_ELEMENTS,
1508 : PACKED_FROZEN_ELEMENTS)));
1509 : // JSArray length is always a positive Smi for fast arrays.
1510 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
1511 2408 : return UncheckedCast<Smi>(length);
1512 : }
1513 :
1514 2520 : TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1515 : SloppyTNode<FixedArrayBase> array) {
1516 : CSA_SLOW_ASSERT(this, IsNotWeakFixedArraySubclass(array));
1517 2520 : return CAST(LoadObjectField(array, FixedArrayBase::kLengthOffset));
1518 : }
1519 :
1520 1680 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1521 : SloppyTNode<FixedArrayBase> array) {
1522 61788 : return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1523 : }
1524 :
1525 0 : TNode<IntPtrT> CodeStubAssembler::LoadFeedbackVectorLength(
1526 : TNode<FeedbackVector> vector) {
1527 : return ChangeInt32ToIntPtr(
1528 0 : LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset));
1529 : }
1530 :
1531 0 : TNode<Smi> CodeStubAssembler::LoadWeakFixedArrayLength(
1532 : TNode<WeakFixedArray> array) {
1533 0 : return CAST(LoadObjectField(array, WeakFixedArray::kLengthOffset));
1534 : }
1535 :
1536 728 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagWeakFixedArrayLength(
1537 : SloppyTNode<WeakFixedArray> array) {
1538 1296 : return LoadAndUntagObjectField(array, WeakFixedArray::kLengthOffset);
1539 : }
1540 :
1541 56 : TNode<Int32T> CodeStubAssembler::LoadNumberOfDescriptors(
1542 : TNode<DescriptorArray> array) {
1543 : return UncheckedCast<Int32T>(
1544 : LoadObjectField(array, DescriptorArray::kNumberOfDescriptorsOffset,
1545 2024 : MachineType::Int16()));
1546 : }
1547 :
1548 336 : TNode<Int32T> CodeStubAssembler::LoadMapBitField(SloppyTNode<Map> map) {
1549 : CSA_SLOW_ASSERT(this, IsMap(map));
1550 : return UncheckedCast<Int32T>(
1551 27068 : LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8()));
1552 : }
1553 :
1554 504 : TNode<Int32T> CodeStubAssembler::LoadMapBitField2(SloppyTNode<Map> map) {
1555 : CSA_SLOW_ASSERT(this, IsMap(map));
1556 : return UncheckedCast<Int32T>(
1557 9916 : LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()));
1558 : }
1559 :
1560 1680 : TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(SloppyTNode<Map> map) {
1561 : CSA_SLOW_ASSERT(this, IsMap(map));
1562 : return UncheckedCast<Uint32T>(
1563 6952 : LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()));
1564 : }
1565 :
1566 6060 : TNode<Int32T> CodeStubAssembler::LoadMapInstanceType(SloppyTNode<Map> map) {
1567 : return UncheckedCast<Int32T>(
1568 123932 : LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint16()));
1569 : }
1570 :
1571 9020 : TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(SloppyTNode<Map> map) {
1572 : CSA_SLOW_ASSERT(this, IsMap(map));
1573 : Node* bit_field2 = LoadMapBitField2(map);
1574 9020 : return Signed(DecodeWord32<Map::ElementsKindBits>(bit_field2));
1575 : }
1576 :
1577 2576 : TNode<Int32T> CodeStubAssembler::LoadElementsKind(
1578 : SloppyTNode<HeapObject> object) {
1579 2576 : return LoadMapElementsKind(LoadMap(object));
1580 : }
1581 :
1582 6384 : TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(
1583 : SloppyTNode<Map> map) {
1584 : CSA_SLOW_ASSERT(this, IsMap(map));
1585 6384 : return CAST(LoadObjectField(map, Map::kDescriptorsOffset));
1586 : }
1587 :
1588 5432 : TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(SloppyTNode<Map> map) {
1589 : CSA_SLOW_ASSERT(this, IsMap(map));
1590 5432 : return CAST(LoadObjectField(map, Map::kPrototypeOffset));
1591 : }
1592 :
1593 168 : TNode<PrototypeInfo> CodeStubAssembler::LoadMapPrototypeInfo(
1594 : SloppyTNode<Map> map, Label* if_no_proto_info) {
1595 336 : Label if_strong_heap_object(this);
1596 : CSA_ASSERT(this, IsMap(map));
1597 : TNode<MaybeObject> maybe_prototype_info =
1598 168 : LoadMaybeWeakObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1599 : TVARIABLE(Object, prototype_info);
1600 : DispatchMaybeObject(maybe_prototype_info, if_no_proto_info, if_no_proto_info,
1601 : if_no_proto_info, &if_strong_heap_object,
1602 168 : &prototype_info);
1603 :
1604 : BIND(&if_strong_heap_object);
1605 168 : GotoIfNot(WordEqual(LoadMap(CAST(prototype_info.value())),
1606 168 : LoadRoot(RootIndex::kPrototypeInfoMap)),
1607 168 : if_no_proto_info);
1608 168 : return CAST(prototype_info.value());
1609 : }
1610 :
1611 4720 : TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSizeInWords(
1612 : SloppyTNode<Map> map) {
1613 : CSA_SLOW_ASSERT(this, IsMap(map));
1614 : return ChangeInt32ToIntPtr(LoadObjectField(
1615 9440 : map, Map::kInstanceSizeInWordsOffset, MachineType::Uint8()));
1616 : }
1617 :
1618 2132 : TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectPropertiesStartInWords(
1619 : SloppyTNode<Map> map) {
1620 : CSA_SLOW_ASSERT(this, IsMap(map));
1621 : // See Map::GetInObjectPropertiesStartInWords() for details.
1622 : CSA_ASSERT(this, IsJSObjectMap(map));
1623 : return ChangeInt32ToIntPtr(LoadObjectField(
1624 : map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1625 4264 : MachineType::Uint8()));
1626 : }
1627 :
1628 56 : TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1629 : SloppyTNode<Map> map) {
1630 : CSA_SLOW_ASSERT(this, IsMap(map));
1631 : // See Map::GetConstructorFunctionIndex() for details.
1632 : CSA_ASSERT(this, IsPrimitiveInstanceType(LoadMapInstanceType(map)));
1633 : return ChangeInt32ToIntPtr(LoadObjectField(
1634 : map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1635 112 : MachineType::Uint8()));
1636 : }
1637 :
1638 0 : TNode<Object> CodeStubAssembler::LoadMapConstructor(SloppyTNode<Map> map) {
1639 : CSA_SLOW_ASSERT(this, IsMap(map));
1640 0 : TVARIABLE(Object, result,
1641 : LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1642 :
1643 0 : Label done(this), loop(this, &result);
1644 0 : Goto(&loop);
1645 : BIND(&loop);
1646 : {
1647 0 : GotoIf(TaggedIsSmi(result.value()), &done);
1648 : Node* is_map_type =
1649 0 : InstanceTypeEqual(LoadInstanceType(CAST(result.value())), MAP_TYPE);
1650 0 : GotoIfNot(is_map_type, &done);
1651 : result = LoadObjectField(CAST(result.value()),
1652 : Map::kConstructorOrBackPointerOffset);
1653 0 : Goto(&loop);
1654 : }
1655 : BIND(&done);
1656 0 : return result.value();
1657 : }
1658 :
1659 840 : Node* CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) {
1660 : CSA_SLOW_ASSERT(this, IsMap(map));
1661 : Node* bit_field3 = LoadMapBitField3(map);
1662 1680 : return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3);
1663 : }
1664 :
1665 0 : TNode<Object> CodeStubAssembler::LoadMapBackPointer(SloppyTNode<Map> map) {
1666 : TNode<HeapObject> object =
1667 : CAST(LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1668 0 : return Select<Object>(IsMap(object), [=] { return object; },
1669 0 : [=] { return UndefinedConstant(); });
1670 : }
1671 :
1672 112 : TNode<Uint32T> CodeStubAssembler::EnsureOnlyHasSimpleProperties(
1673 : TNode<Map> map, TNode<Int32T> instance_type, Label* bailout) {
1674 : // This check can have false positives, since it applies to any JSValueType.
1675 224 : GotoIf(IsCustomElementsReceiverInstanceType(instance_type), bailout);
1676 :
1677 : TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1678 224 : GotoIf(IsSetWord32(bit_field3, Map::IsDictionaryMapBit::kMask |
1679 112 : Map::HasHiddenPrototypeBit::kMask),
1680 112 : bailout);
1681 :
1682 112 : return bit_field3;
1683 : }
1684 :
1685 672 : TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
1686 : SloppyTNode<Object> receiver, Label* if_no_hash) {
1687 672 : TVARIABLE(IntPtrT, var_hash);
1688 672 : Label done(this), if_smi(this), if_property_array(this),
1689 672 : if_property_dictionary(this), if_fixed_array(this);
1690 :
1691 : TNode<Object> properties_or_hash =
1692 : LoadObjectField(TNode<HeapObject>::UncheckedCast(receiver),
1693 : JSReceiver::kPropertiesOrHashOffset);
1694 1344 : GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
1695 :
1696 : TNode<HeapObject> properties =
1697 : TNode<HeapObject>::UncheckedCast(properties_or_hash);
1698 672 : TNode<Int32T> properties_instance_type = LoadInstanceType(properties);
1699 :
1700 1344 : GotoIf(InstanceTypeEqual(properties_instance_type, PROPERTY_ARRAY_TYPE),
1701 672 : &if_property_array);
1702 1344 : Branch(InstanceTypeEqual(properties_instance_type, NAME_DICTIONARY_TYPE),
1703 672 : &if_property_dictionary, &if_fixed_array);
1704 :
1705 : BIND(&if_fixed_array);
1706 : {
1707 672 : var_hash = IntPtrConstant(PropertyArray::kNoHashSentinel);
1708 672 : Goto(&done);
1709 : }
1710 :
1711 : BIND(&if_smi);
1712 : {
1713 1344 : var_hash = SmiUntag(TNode<Smi>::UncheckedCast(properties_or_hash));
1714 672 : Goto(&done);
1715 : }
1716 :
1717 : BIND(&if_property_array);
1718 : {
1719 : TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
1720 672 : properties, PropertyArray::kLengthAndHashOffset);
1721 : var_hash = TNode<IntPtrT>::UncheckedCast(
1722 : DecodeWord<PropertyArray::HashField>(length_and_hash));
1723 672 : Goto(&done);
1724 : }
1725 :
1726 : BIND(&if_property_dictionary);
1727 : {
1728 672 : var_hash = SmiUntag(CAST(LoadFixedArrayElement(
1729 : CAST(properties), NameDictionary::kObjectHashIndex)));
1730 672 : Goto(&done);
1731 : }
1732 :
1733 : BIND(&done);
1734 672 : if (if_no_hash != nullptr) {
1735 672 : GotoIf(IntPtrEqual(var_hash.value(),
1736 672 : IntPtrConstant(PropertyArray::kNoHashSentinel)),
1737 336 : if_no_hash);
1738 : }
1739 672 : return var_hash.value();
1740 : }
1741 :
1742 404 : TNode<Uint32T> CodeStubAssembler::LoadNameHashField(SloppyTNode<Name> name) {
1743 : CSA_ASSERT(this, IsName(name));
1744 404 : return LoadObjectField<Uint32T>(name, Name::kHashFieldOffset);
1745 : }
1746 :
1747 7080 : TNode<Uint32T> CodeStubAssembler::LoadNameHash(SloppyTNode<Name> name,
1748 : Label* if_hash_not_computed) {
1749 : TNode<Uint32T> hash_field = LoadNameHashField(name);
1750 7080 : if (if_hash_not_computed != nullptr) {
1751 1008 : GotoIf(IsSetWord32(hash_field, Name::kHashNotComputedMask),
1752 336 : if_hash_not_computed);
1753 : }
1754 14160 : return Unsigned(Word32Shr(hash_field, Int32Constant(Name::kHashShift)));
1755 : }
1756 :
1757 6108 : TNode<Smi> CodeStubAssembler::LoadStringLengthAsSmi(
1758 : SloppyTNode<String> string) {
1759 12216 : return SmiFromIntPtr(LoadStringLengthAsWord(string));
1760 : }
1761 :
1762 11716 : TNode<IntPtrT> CodeStubAssembler::LoadStringLengthAsWord(
1763 : SloppyTNode<String> string) {
1764 23432 : return Signed(ChangeUint32ToWord(LoadStringLengthAsWord32(string)));
1765 : }
1766 :
1767 224 : TNode<Uint32T> CodeStubAssembler::LoadStringLengthAsWord32(
1768 : SloppyTNode<String> string) {
1769 : CSA_ASSERT(this, IsString(string));
1770 224 : return LoadObjectField<Uint32T>(string, String::kLengthOffset);
1771 : }
1772 :
1773 56 : Node* CodeStubAssembler::PointerToSeqStringData(Node* seq_string) {
1774 : CSA_ASSERT(this, IsString(seq_string));
1775 : CSA_ASSERT(this,
1776 : IsSequentialStringInstanceType(LoadInstanceType(seq_string)));
1777 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
1778 : return IntPtrAdd(
1779 : BitcastTaggedToWord(seq_string),
1780 168 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag));
1781 : }
1782 :
1783 112 : Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1784 : CSA_ASSERT(this, IsJSValue(object));
1785 112 : return LoadObjectField(object, JSValue::kValueOffset);
1786 : }
1787 :
1788 448 : void CodeStubAssembler::DispatchMaybeObject(TNode<MaybeObject> maybe_object,
1789 : Label* if_smi, Label* if_cleared,
1790 : Label* if_weak, Label* if_strong,
1791 : TVariable<Object>* extracted) {
1792 896 : Label inner_if_smi(this), inner_if_strong(this);
1793 :
1794 896 : GotoIf(TaggedIsSmi(maybe_object), &inner_if_smi);
1795 :
1796 896 : GotoIf(IsCleared(maybe_object), if_cleared);
1797 :
1798 2688 : GotoIf(Word32Equal(Word32And(TruncateIntPtrToInt32(
1799 896 : BitcastMaybeObjectToWord(maybe_object)),
1800 1344 : Int32Constant(kHeapObjectTagMask)),
1801 1344 : Int32Constant(kHeapObjectTag)),
1802 448 : &inner_if_strong);
1803 :
1804 896 : *extracted =
1805 : BitcastWordToTagged(WordAnd(BitcastMaybeObjectToWord(maybe_object),
1806 1344 : IntPtrConstant(~kWeakHeapObjectMask)));
1807 448 : Goto(if_weak);
1808 :
1809 : BIND(&inner_if_smi);
1810 : *extracted = CAST(maybe_object);
1811 448 : Goto(if_smi);
1812 :
1813 : BIND(&inner_if_strong);
1814 : *extracted = CAST(maybe_object);
1815 448 : Goto(if_strong);
1816 448 : }
1817 :
1818 560 : TNode<BoolT> CodeStubAssembler::IsStrong(TNode<MaybeObject> value) {
1819 : return WordEqual(WordAnd(BitcastMaybeObjectToWord(value),
1820 1680 : IntPtrConstant(kHeapObjectTagMask)),
1821 1680 : IntPtrConstant(kHeapObjectTag));
1822 : }
1823 :
1824 560 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectIfStrong(
1825 : TNode<MaybeObject> value, Label* if_not_strong) {
1826 1120 : GotoIfNot(IsStrong(value), if_not_strong);
1827 560 : return CAST(value);
1828 : }
1829 :
1830 504 : TNode<BoolT> CodeStubAssembler::IsWeakOrCleared(TNode<MaybeObject> value) {
1831 : return Word32Equal(
1832 2520 : Word32And(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1833 1512 : Int32Constant(kHeapObjectTagMask)),
1834 1512 : Int32Constant(kWeakHeapObjectTag));
1835 : }
1836 :
1837 7280 : TNode<BoolT> CodeStubAssembler::IsCleared(TNode<MaybeObject> value) {
1838 21840 : return Word32Equal(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1839 21840 : Int32Constant(kClearedWeakHeapObjectLower32));
1840 : }
1841 :
1842 1288 : TNode<BoolT> CodeStubAssembler::IsNotCleared(TNode<MaybeObject> value) {
1843 3864 : return Word32NotEqual(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1844 3864 : Int32Constant(kClearedWeakHeapObjectLower32));
1845 : }
1846 :
1847 6216 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1848 : TNode<MaybeObject> value) {
1849 : CSA_ASSERT(this, IsWeakOrCleared(value));
1850 : CSA_ASSERT(this, IsNotCleared(value));
1851 : return UncheckedCast<HeapObject>(BitcastWordToTagged(WordAnd(
1852 18648 : BitcastMaybeObjectToWord(value), IntPtrConstant(~kWeakHeapObjectMask))));
1853 : }
1854 :
1855 4480 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1856 : TNode<MaybeObject> value, Label* if_cleared) {
1857 8960 : GotoIf(IsCleared(value), if_cleared);
1858 4480 : return GetHeapObjectAssumeWeak(value);
1859 : }
1860 :
1861 2184 : TNode<BoolT> CodeStubAssembler::IsWeakReferenceTo(TNode<MaybeObject> object,
1862 : TNode<Object> value) {
1863 : return WordEqual(WordAnd(BitcastMaybeObjectToWord(object),
1864 6552 : IntPtrConstant(~kWeakHeapObjectMask)),
1865 6552 : BitcastTaggedToWord(value));
1866 : }
1867 :
1868 1512 : TNode<BoolT> CodeStubAssembler::IsStrongReferenceTo(TNode<MaybeObject> object,
1869 : TNode<Object> value) {
1870 3024 : return WordEqual(BitcastMaybeObjectToWord(object),
1871 4536 : BitcastTaggedToWord(value));
1872 : }
1873 :
1874 1288 : TNode<BoolT> CodeStubAssembler::IsNotWeakReferenceTo(TNode<MaybeObject> object,
1875 : TNode<Object> value) {
1876 : return WordNotEqual(WordAnd(BitcastMaybeObjectToWord(object),
1877 3864 : IntPtrConstant(~kWeakHeapObjectMask)),
1878 3864 : BitcastTaggedToWord(value));
1879 : }
1880 :
1881 2240 : TNode<MaybeObject> CodeStubAssembler::MakeWeak(TNode<HeapObject> value) {
1882 4480 : return ReinterpretCast<MaybeObject>(BitcastWordToTagged(
1883 8960 : WordOr(BitcastTaggedToWord(value), IntPtrConstant(kWeakHeapObjectTag))));
1884 : }
1885 :
1886 : template <>
1887 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<FixedArray> array) {
1888 0 : return LoadAndUntagFixedArrayBaseLength(array);
1889 : }
1890 :
1891 : template <>
1892 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<WeakFixedArray> array) {
1893 0 : return LoadAndUntagWeakFixedArrayLength(array);
1894 : }
1895 :
1896 : template <>
1897 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<PropertyArray> array) {
1898 0 : return LoadPropertyArrayLength(array);
1899 : }
1900 :
1901 : template <>
1902 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1903 : TNode<DescriptorArray> array) {
1904 : return IntPtrMul(ChangeInt32ToIntPtr(LoadNumberOfDescriptors(array)),
1905 0 : IntPtrConstant(DescriptorArray::kEntrySize));
1906 : }
1907 :
1908 : template <>
1909 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1910 : TNode<TransitionArray> array) {
1911 0 : return LoadAndUntagWeakFixedArrayLength(array);
1912 : }
1913 :
1914 : template <typename Array>
1915 90424 : TNode<MaybeObject> CodeStubAssembler::LoadArrayElement(
1916 : TNode<Array> array, int array_header_size, Node* index_node,
1917 : int additional_offset, ParameterMode parameter_mode,
1918 : LoadSensitivity needs_poisoning) {
1919 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
1920 : ParameterToIntPtr(index_node, parameter_mode),
1921 : IntPtrConstant(0)));
1922 : DCHECK(IsAligned(additional_offset, kTaggedSize));
1923 90424 : int32_t header_size = array_header_size + additional_offset - kHeapObjectTag;
1924 : TNode<IntPtrT> offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
1925 90424 : parameter_mode, header_size);
1926 : CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(array),
1927 : array_header_size));
1928 : return UncheckedCast<MaybeObject>(
1929 90424 : Load(MachineType::AnyTagged(), array, offset, needs_poisoning));
1930 : }
1931 :
1932 : template TNode<MaybeObject>
1933 : CodeStubAssembler::LoadArrayElement<TransitionArray>(TNode<TransitionArray>,
1934 : int, Node*, int,
1935 : ParameterMode,
1936 : LoadSensitivity);
1937 :
1938 : template TNode<MaybeObject>
1939 : CodeStubAssembler::LoadArrayElement<DescriptorArray>(TNode<DescriptorArray>,
1940 : int, Node*, int,
1941 : ParameterMode,
1942 : LoadSensitivity);
1943 :
1944 66660 : void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
1945 : Node* index,
1946 : int additional_offset,
1947 : ParameterMode parameter_mode) {
1948 : if (!FLAG_fixed_array_bounds_checks) return;
1949 : DCHECK(IsAligned(additional_offset, kTaggedSize));
1950 66660 : if (parameter_mode == ParameterMode::SMI_PARAMETERS) {
1951 : TNode<Smi> effective_index;
1952 8688 : Smi constant_index;
1953 8688 : bool index_is_constant = ToSmiConstant(index, &constant_index);
1954 8688 : if (index_is_constant) {
1955 4 : effective_index = SmiConstant(Smi::ToInt(constant_index) +
1956 4 : additional_offset / kTaggedSize);
1957 8684 : } else if (additional_offset != 0) {
1958 0 : effective_index =
1959 : SmiAdd(CAST(index), SmiConstant(additional_offset / kTaggedSize));
1960 : } else {
1961 : effective_index = CAST(index);
1962 : }
1963 8688 : CSA_CHECK(this, SmiBelow(effective_index, LoadFixedArrayBaseLength(array)));
1964 : } else {
1965 : // IntPtrAdd does constant-folding automatically.
1966 : TNode<IntPtrT> effective_index =
1967 : IntPtrAdd(UncheckedCast<IntPtrT>(index),
1968 57972 : IntPtrConstant(additional_offset / kTaggedSize));
1969 115944 : CSA_CHECK(this, UintPtrLessThan(effective_index,
1970 57972 : LoadAndUntagFixedArrayBaseLength(array)));
1971 : }
1972 : }
1973 :
1974 71900 : TNode<Object> CodeStubAssembler::LoadFixedArrayElement(
1975 : TNode<FixedArray> object, Node* index_node, int additional_offset,
1976 : ParameterMode parameter_mode, LoadSensitivity needs_poisoning,
1977 : CheckBounds check_bounds) {
1978 : CSA_ASSERT(this, IsFixedArraySubclass(object));
1979 : CSA_ASSERT(this, IsNotWeakFixedArraySubclass(object));
1980 71900 : if (NeedsBoundsCheck(check_bounds)) {
1981 30800 : FixedArrayBoundsCheck(object, index_node, additional_offset,
1982 30800 : parameter_mode);
1983 : }
1984 : TNode<MaybeObject> element =
1985 : LoadArrayElement(object, FixedArray::kHeaderSize, index_node,
1986 71900 : additional_offset, parameter_mode, needs_poisoning);
1987 71900 : return CAST(element);
1988 : }
1989 :
1990 560 : TNode<Object> CodeStubAssembler::LoadPropertyArrayElement(
1991 : TNode<PropertyArray> object, SloppyTNode<IntPtrT> index) {
1992 : int additional_offset = 0;
1993 : ParameterMode parameter_mode = INTPTR_PARAMETERS;
1994 : LoadSensitivity needs_poisoning = LoadSensitivity::kSafe;
1995 1852 : return CAST(LoadArrayElement(object, PropertyArray::kHeaderSize, index,
1996 : additional_offset, parameter_mode,
1997 : needs_poisoning));
1998 : }
1999 :
2000 56 : TNode<IntPtrT> CodeStubAssembler::LoadPropertyArrayLength(
2001 : TNode<PropertyArray> object) {
2002 : TNode<IntPtrT> value =
2003 56 : LoadAndUntagObjectField(object, PropertyArray::kLengthAndHashOffset);
2004 56 : return Signed(DecodeWord<PropertyArray::LengthField>(value));
2005 : }
2006 :
2007 6608 : TNode<RawPtrT> CodeStubAssembler::LoadFixedTypedArrayBackingStore(
2008 : TNode<FixedTypedArrayBase> typed_array) {
2009 : // Backing store = external_pointer + base_pointer.
2010 : Node* external_pointer =
2011 6608 : LoadObjectField(typed_array, FixedTypedArrayBase::kExternalPointerOffset,
2012 6608 : MachineType::Pointer());
2013 : Node* base_pointer =
2014 : LoadObjectField(typed_array, FixedTypedArrayBase::kBasePointerOffset);
2015 : return UncheckedCast<RawPtrT>(
2016 13216 : IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer)));
2017 : }
2018 :
2019 56 : TNode<RawPtrT> CodeStubAssembler::LoadFixedTypedArrayOnHeapBackingStore(
2020 : TNode<FixedTypedArrayBase> typed_array) {
2021 : // This is specialized method of retrieving the backing store pointer for on
2022 : // heap allocated typed array buffer. On heap allocated buffer's backing
2023 : // stores are a fixed offset from the pointer to a typed array's elements. See
2024 : // TypedArrayBuiltinsAssembler::AllocateOnHeapElements().
2025 : TNode<WordT> backing_store =
2026 : IntPtrAdd(BitcastTaggedToWord(typed_array),
2027 : IntPtrConstant(
2028 168 : FixedTypedArrayBase::ExternalPointerValueForOnHeapArray()));
2029 :
2030 : #ifdef DEBUG
2031 : // Verify that this is an on heap backing store.
2032 : TNode<RawPtrT> expected_backing_store_pointer =
2033 : LoadFixedTypedArrayBackingStore(typed_array);
2034 : CSA_ASSERT(this, WordEqual(backing_store, expected_backing_store_pointer));
2035 : #endif
2036 :
2037 56 : return UncheckedCast<RawPtrT>(backing_store);
2038 : }
2039 :
2040 336 : Node* CodeStubAssembler::LoadFixedBigInt64ArrayElementAsTagged(
2041 : Node* data_pointer, Node* offset) {
2042 336 : if (Is64()) {
2043 : TNode<IntPtrT> value = UncheckedCast<IntPtrT>(
2044 336 : Load(MachineType::IntPtr(), data_pointer, offset));
2045 672 : return BigIntFromInt64(value);
2046 : } else {
2047 : DCHECK(!Is64());
2048 : #if defined(V8_TARGET_BIG_ENDIAN)
2049 : TNode<IntPtrT> high = UncheckedCast<IntPtrT>(
2050 : Load(MachineType::UintPtr(), data_pointer, offset));
2051 : TNode<IntPtrT> low = UncheckedCast<IntPtrT>(
2052 : Load(MachineType::UintPtr(), data_pointer,
2053 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2054 : #else
2055 : TNode<IntPtrT> low = UncheckedCast<IntPtrT>(
2056 0 : Load(MachineType::UintPtr(), data_pointer, offset));
2057 : TNode<IntPtrT> high = UncheckedCast<IntPtrT>(
2058 : Load(MachineType::UintPtr(), data_pointer,
2059 0 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2060 : #endif
2061 0 : return BigIntFromInt32Pair(low, high);
2062 : }
2063 : }
2064 :
2065 0 : TNode<BigInt> CodeStubAssembler::BigIntFromInt32Pair(TNode<IntPtrT> low,
2066 : TNode<IntPtrT> high) {
2067 : DCHECK(!Is64());
2068 0 : TVARIABLE(BigInt, var_result);
2069 0 : TVARIABLE(Word32T, var_sign, Int32Constant(BigInt::SignBits::encode(false)));
2070 : TVARIABLE(IntPtrT, var_high, high);
2071 : TVARIABLE(IntPtrT, var_low, low);
2072 0 : Label high_zero(this), negative(this), allocate_one_digit(this),
2073 0 : allocate_two_digits(this), if_zero(this), done(this);
2074 :
2075 0 : GotoIf(WordEqual(var_high.value(), IntPtrConstant(0)), &high_zero);
2076 0 : Branch(IntPtrLessThan(var_high.value(), IntPtrConstant(0)), &negative,
2077 0 : &allocate_two_digits);
2078 :
2079 : BIND(&high_zero);
2080 0 : Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &if_zero,
2081 0 : &allocate_one_digit);
2082 :
2083 : BIND(&negative);
2084 : {
2085 0 : var_sign = Int32Constant(BigInt::SignBits::encode(true));
2086 : // We must negate the value by computing "0 - (high|low)", performing
2087 : // both parts of the subtraction separately and manually taking care
2088 : // of the carry bit (which is 1 iff low != 0).
2089 0 : var_high = IntPtrSub(IntPtrConstant(0), var_high.value());
2090 0 : Label carry(this), no_carry(this);
2091 0 : Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &no_carry, &carry);
2092 : BIND(&carry);
2093 0 : var_high = IntPtrSub(var_high.value(), IntPtrConstant(1));
2094 0 : Goto(&no_carry);
2095 : BIND(&no_carry);
2096 0 : var_low = IntPtrSub(IntPtrConstant(0), var_low.value());
2097 : // var_high was non-zero going into this block, but subtracting the
2098 : // carry bit from it could bring us back onto the "one digit" path.
2099 0 : Branch(WordEqual(var_high.value(), IntPtrConstant(0)), &allocate_one_digit,
2100 0 : &allocate_two_digits);
2101 : }
2102 :
2103 : BIND(&allocate_one_digit);
2104 : {
2105 0 : var_result = AllocateRawBigInt(IntPtrConstant(1));
2106 0 : StoreBigIntBitfield(var_result.value(),
2107 : Word32Or(var_sign.value(),
2108 0 : Int32Constant(BigInt::LengthBits::encode(1))));
2109 : StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2110 0 : Goto(&done);
2111 : }
2112 :
2113 : BIND(&allocate_two_digits);
2114 : {
2115 0 : var_result = AllocateRawBigInt(IntPtrConstant(2));
2116 0 : StoreBigIntBitfield(var_result.value(),
2117 : Word32Or(var_sign.value(),
2118 0 : Int32Constant(BigInt::LengthBits::encode(2))));
2119 : StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2120 : StoreBigIntDigit(var_result.value(), 1, Unsigned(var_high.value()));
2121 0 : Goto(&done);
2122 : }
2123 :
2124 : BIND(&if_zero);
2125 0 : var_result = AllocateBigInt(IntPtrConstant(0));
2126 0 : Goto(&done);
2127 :
2128 : BIND(&done);
2129 0 : return var_result.value();
2130 : }
2131 :
2132 840 : TNode<BigInt> CodeStubAssembler::BigIntFromInt64(TNode<IntPtrT> value) {
2133 : DCHECK(Is64());
2134 840 : TVARIABLE(BigInt, var_result);
2135 840 : Label done(this), if_positive(this), if_negative(this), if_zero(this);
2136 2520 : GotoIf(WordEqual(value, IntPtrConstant(0)), &if_zero);
2137 840 : var_result = AllocateRawBigInt(IntPtrConstant(1));
2138 2520 : Branch(IntPtrGreaterThan(value, IntPtrConstant(0)), &if_positive,
2139 840 : &if_negative);
2140 :
2141 : BIND(&if_positive);
2142 : {
2143 : StoreBigIntBitfield(var_result.value(),
2144 1680 : Int32Constant(BigInt::SignBits::encode(false) |
2145 : BigInt::LengthBits::encode(1)));
2146 : StoreBigIntDigit(var_result.value(), 0, Unsigned(value));
2147 840 : Goto(&done);
2148 : }
2149 :
2150 : BIND(&if_negative);
2151 : {
2152 : StoreBigIntBitfield(var_result.value(),
2153 1680 : Int32Constant(BigInt::SignBits::encode(true) |
2154 : BigInt::LengthBits::encode(1)));
2155 : StoreBigIntDigit(var_result.value(), 0,
2156 840 : Unsigned(IntPtrSub(IntPtrConstant(0), value)));
2157 840 : Goto(&done);
2158 : }
2159 :
2160 : BIND(&if_zero);
2161 : {
2162 840 : var_result = AllocateBigInt(IntPtrConstant(0));
2163 840 : Goto(&done);
2164 : }
2165 :
2166 : BIND(&done);
2167 840 : return var_result.value();
2168 : }
2169 :
2170 336 : Node* CodeStubAssembler::LoadFixedBigUint64ArrayElementAsTagged(
2171 : Node* data_pointer, Node* offset) {
2172 672 : Label if_zero(this), done(this);
2173 336 : if (Is64()) {
2174 : TNode<UintPtrT> value = UncheckedCast<UintPtrT>(
2175 336 : Load(MachineType::UintPtr(), data_pointer, offset));
2176 672 : return BigIntFromUint64(value);
2177 : } else {
2178 : DCHECK(!Is64());
2179 : #if defined(V8_TARGET_BIG_ENDIAN)
2180 : TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2181 : Load(MachineType::UintPtr(), data_pointer, offset));
2182 : TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2183 : Load(MachineType::UintPtr(), data_pointer,
2184 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2185 : #else
2186 : TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2187 0 : Load(MachineType::UintPtr(), data_pointer, offset));
2188 : TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2189 : Load(MachineType::UintPtr(), data_pointer,
2190 0 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2191 : #endif
2192 0 : return BigIntFromUint32Pair(low, high);
2193 : }
2194 : }
2195 :
2196 0 : TNode<BigInt> CodeStubAssembler::BigIntFromUint32Pair(TNode<UintPtrT> low,
2197 : TNode<UintPtrT> high) {
2198 : DCHECK(!Is64());
2199 0 : TVARIABLE(BigInt, var_result);
2200 0 : Label high_zero(this), if_zero(this), done(this);
2201 :
2202 0 : GotoIf(WordEqual(high, IntPtrConstant(0)), &high_zero);
2203 0 : var_result = AllocateBigInt(IntPtrConstant(2));
2204 : StoreBigIntDigit(var_result.value(), 0, low);
2205 : StoreBigIntDigit(var_result.value(), 1, high);
2206 0 : Goto(&done);
2207 :
2208 : BIND(&high_zero);
2209 0 : GotoIf(WordEqual(low, IntPtrConstant(0)), &if_zero);
2210 0 : var_result = AllocateBigInt(IntPtrConstant(1));
2211 : StoreBigIntDigit(var_result.value(), 0, low);
2212 0 : Goto(&done);
2213 :
2214 : BIND(&if_zero);
2215 0 : var_result = AllocateBigInt(IntPtrConstant(0));
2216 0 : Goto(&done);
2217 :
2218 : BIND(&done);
2219 0 : return var_result.value();
2220 : }
2221 :
2222 784 : TNode<BigInt> CodeStubAssembler::BigIntFromUint64(TNode<UintPtrT> value) {
2223 : DCHECK(Is64());
2224 784 : TVARIABLE(BigInt, var_result);
2225 784 : Label done(this), if_zero(this);
2226 2352 : GotoIf(WordEqual(value, IntPtrConstant(0)), &if_zero);
2227 784 : var_result = AllocateBigInt(IntPtrConstant(1));
2228 : StoreBigIntDigit(var_result.value(), 0, value);
2229 784 : Goto(&done);
2230 :
2231 : BIND(&if_zero);
2232 784 : var_result = AllocateBigInt(IntPtrConstant(0));
2233 784 : Goto(&done);
2234 : BIND(&done);
2235 784 : return var_result.value();
2236 : }
2237 :
2238 2688 : Node* CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2239 : Node* data_pointer, Node* index_node, ElementsKind elements_kind,
2240 : ParameterMode parameter_mode) {
2241 : Node* offset =
2242 5376 : ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2243 2688 : switch (elements_kind) {
2244 : case UINT8_ELEMENTS: /* fall through */
2245 : case UINT8_CLAMPED_ELEMENTS:
2246 1344 : return SmiFromInt32(Load(MachineType::Uint8(), data_pointer, offset));
2247 : case INT8_ELEMENTS:
2248 672 : return SmiFromInt32(Load(MachineType::Int8(), data_pointer, offset));
2249 : case UINT16_ELEMENTS:
2250 672 : return SmiFromInt32(Load(MachineType::Uint16(), data_pointer, offset));
2251 : case INT16_ELEMENTS:
2252 672 : return SmiFromInt32(Load(MachineType::Int16(), data_pointer, offset));
2253 : case UINT32_ELEMENTS:
2254 448 : return ChangeUint32ToTagged(
2255 448 : Load(MachineType::Uint32(), data_pointer, offset));
2256 : case INT32_ELEMENTS:
2257 448 : return ChangeInt32ToTagged(
2258 448 : Load(MachineType::Int32(), data_pointer, offset));
2259 : case FLOAT32_ELEMENTS:
2260 896 : return AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(
2261 672 : Load(MachineType::Float32(), data_pointer, offset)));
2262 : case FLOAT64_ELEMENTS:
2263 448 : return AllocateHeapNumberWithValue(
2264 448 : Load(MachineType::Float64(), data_pointer, offset));
2265 : case BIGINT64_ELEMENTS:
2266 336 : return LoadFixedBigInt64ArrayElementAsTagged(data_pointer, offset);
2267 : case BIGUINT64_ELEMENTS:
2268 336 : return LoadFixedBigUint64ArrayElementAsTagged(data_pointer, offset);
2269 : default:
2270 0 : UNREACHABLE();
2271 : }
2272 : }
2273 :
2274 56 : TNode<Numeric> CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2275 : TNode<WordT> data_pointer, TNode<Smi> index, TNode<Int32T> elements_kind) {
2276 56 : TVARIABLE(Numeric, var_result);
2277 56 : Label done(this), if_unknown_type(this, Label::kDeferred);
2278 : int32_t elements_kinds[] = {
2279 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) TYPE##_ELEMENTS,
2280 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2281 : #undef TYPED_ARRAY_CASE
2282 56 : };
2283 :
2284 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) Label if_##type##array(this);
2285 56 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2286 : #undef TYPED_ARRAY_CASE
2287 :
2288 : Label* elements_kind_labels[] = {
2289 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) &if_##type##array,
2290 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2291 : #undef TYPED_ARRAY_CASE
2292 56 : };
2293 : STATIC_ASSERT(arraysize(elements_kinds) == arraysize(elements_kind_labels));
2294 :
2295 : Switch(elements_kind, &if_unknown_type, elements_kinds, elements_kind_labels,
2296 56 : arraysize(elements_kinds));
2297 :
2298 : BIND(&if_unknown_type);
2299 56 : Unreachable();
2300 :
2301 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2302 : BIND(&if_##type##array); \
2303 : { \
2304 : var_result = CAST(LoadFixedTypedArrayElementAsTagged( \
2305 : data_pointer, index, TYPE##_ELEMENTS, SMI_PARAMETERS)); \
2306 : Goto(&done); \
2307 : }
2308 1232 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2309 : #undef TYPED_ARRAY_CASE
2310 :
2311 : BIND(&done);
2312 56 : return var_result.value();
2313 : }
2314 :
2315 616 : void CodeStubAssembler::StoreFixedTypedArrayElementFromTagged(
2316 : TNode<Context> context, TNode<FixedTypedArrayBase> elements,
2317 : TNode<Object> index_node, TNode<Object> value, ElementsKind elements_kind,
2318 : ParameterMode parameter_mode) {
2319 616 : TNode<RawPtrT> data_pointer = LoadFixedTypedArrayBackingStore(elements);
2320 616 : switch (elements_kind) {
2321 : case UINT8_ELEMENTS:
2322 : case UINT8_CLAMPED_ELEMENTS:
2323 : case INT8_ELEMENTS:
2324 : case UINT16_ELEMENTS:
2325 : case INT16_ELEMENTS:
2326 280 : StoreElement(data_pointer, elements_kind, index_node,
2327 560 : SmiToInt32(CAST(value)), parameter_mode);
2328 280 : break;
2329 : case UINT32_ELEMENTS:
2330 : case INT32_ELEMENTS:
2331 112 : StoreElement(data_pointer, elements_kind, index_node,
2332 112 : TruncateTaggedToWord32(context, value), parameter_mode);
2333 112 : break;
2334 : case FLOAT32_ELEMENTS:
2335 56 : StoreElement(data_pointer, elements_kind, index_node,
2336 112 : TruncateFloat64ToFloat32(LoadHeapNumberValue(CAST(value))),
2337 56 : parameter_mode);
2338 56 : break;
2339 : case FLOAT64_ELEMENTS:
2340 56 : StoreElement(data_pointer, elements_kind, index_node,
2341 56 : LoadHeapNumberValue(CAST(value)), parameter_mode);
2342 56 : break;
2343 : case BIGUINT64_ELEMENTS:
2344 : case BIGINT64_ELEMENTS: {
2345 : TNode<IntPtrT> offset =
2346 112 : ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2347 : EmitBigTypedArrayElementStore(elements, data_pointer, offset,
2348 112 : CAST(value));
2349 : break;
2350 : }
2351 : default:
2352 0 : UNREACHABLE();
2353 : }
2354 616 : }
2355 :
2356 15904 : TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2357 : Node* object, Node* slot_index_node, int additional_offset,
2358 : ParameterMode parameter_mode) {
2359 : CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2360 : CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2361 : int32_t header_size =
2362 15904 : FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2363 31808 : Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2364 : parameter_mode, header_size);
2365 : CSA_SLOW_ASSERT(
2366 : this, IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2367 : FeedbackVector::kHeaderSize));
2368 : return UncheckedCast<MaybeObject>(
2369 15904 : Load(MachineType::AnyTagged(), object, offset));
2370 : }
2371 :
2372 : template <typename Array>
2373 15032 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ArrayElement(
2374 : TNode<Array> object, int array_header_size, Node* index_node,
2375 : int additional_offset, ParameterMode parameter_mode) {
2376 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2377 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2378 : int endian_correction = 0;
2379 : #if V8_TARGET_LITTLE_ENDIAN
2380 : if (SmiValuesAre32Bits()) endian_correction = 4;
2381 : #endif
2382 : int32_t header_size = array_header_size + additional_offset - kHeapObjectTag +
2383 15032 : endian_correction;
2384 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2385 30064 : parameter_mode, header_size);
2386 : CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(object),
2387 : array_header_size + endian_correction));
2388 : if (SmiValuesAre32Bits()) {
2389 15032 : return UncheckedCast<Int32T>(Load(MachineType::Int32(), object, offset));
2390 : } else {
2391 : return SmiToInt32(Load(MachineType::AnyTagged(), object, offset));
2392 : }
2393 : }
2394 :
2395 3128 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
2396 : TNode<FixedArray> object, Node* index_node, int additional_offset,
2397 : ParameterMode parameter_mode) {
2398 : CSA_SLOW_ASSERT(this, IsFixedArraySubclass(object));
2399 : return LoadAndUntagToWord32ArrayElement(object, FixedArray::kHeaderSize,
2400 : index_node, additional_offset,
2401 6044 : parameter_mode);
2402 : }
2403 :
2404 1456 : TNode<MaybeObject> CodeStubAssembler::LoadWeakFixedArrayElement(
2405 : TNode<WeakFixedArray> object, Node* index, int additional_offset,
2406 : ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
2407 : return LoadArrayElement(object, WeakFixedArray::kHeaderSize, index,
2408 1456 : additional_offset, parameter_mode, needs_poisoning);
2409 : }
2410 :
2411 3364 : TNode<Float64T> CodeStubAssembler::LoadFixedDoubleArrayElement(
2412 : SloppyTNode<FixedDoubleArray> object, Node* index_node,
2413 : MachineType machine_type, int additional_offset,
2414 : ParameterMode parameter_mode, Label* if_hole) {
2415 : CSA_ASSERT(this, IsFixedDoubleArray(object));
2416 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2417 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2418 : int32_t header_size =
2419 3364 : FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
2420 : TNode<IntPtrT> offset = ElementOffsetFromIndex(
2421 3364 : index_node, HOLEY_DOUBLE_ELEMENTS, parameter_mode, header_size);
2422 : CSA_ASSERT(this, IsOffsetInBounds(
2423 : offset, LoadAndUntagFixedArrayBaseLength(object),
2424 : FixedDoubleArray::kHeaderSize, HOLEY_DOUBLE_ELEMENTS));
2425 3364 : return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
2426 : }
2427 :
2428 56 : TNode<Object> CodeStubAssembler::LoadFixedArrayBaseElementAsTagged(
2429 : TNode<FixedArrayBase> elements, TNode<IntPtrT> index,
2430 : TNode<Int32T> elements_kind, Label* if_accessor, Label* if_hole) {
2431 56 : TVARIABLE(Object, var_result);
2432 56 : Label done(this), if_packed(this), if_holey(this), if_packed_double(this),
2433 56 : if_holey_double(this), if_dictionary(this, Label::kDeferred);
2434 :
2435 : int32_t kinds[] = {// Handled by if_packed.
2436 : PACKED_SMI_ELEMENTS, PACKED_ELEMENTS,
2437 : PACKED_SEALED_ELEMENTS, PACKED_FROZEN_ELEMENTS,
2438 : // Handled by if_holey.
2439 : HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS,
2440 : // Handled by if_packed_double.
2441 : PACKED_DOUBLE_ELEMENTS,
2442 : // Handled by if_holey_double.
2443 56 : HOLEY_DOUBLE_ELEMENTS};
2444 : Label* labels[] = {// PACKED_{SMI,}_ELEMENTS
2445 : &if_packed, &if_packed, &if_packed, &if_packed,
2446 : // HOLEY_{SMI,}_ELEMENTS
2447 : &if_holey, &if_holey,
2448 : // PACKED_DOUBLE_ELEMENTS
2449 : &if_packed_double,
2450 : // HOLEY_DOUBLE_ELEMENTS
2451 56 : &if_holey_double};
2452 56 : Switch(elements_kind, &if_dictionary, kinds, labels, arraysize(kinds));
2453 :
2454 : BIND(&if_packed);
2455 : {
2456 : var_result = LoadFixedArrayElement(CAST(elements), index, 0);
2457 56 : Goto(&done);
2458 : }
2459 :
2460 : BIND(&if_holey);
2461 : {
2462 : var_result = LoadFixedArrayElement(CAST(elements), index);
2463 56 : Branch(WordEqual(var_result.value(), TheHoleConstant()), if_hole, &done);
2464 : }
2465 :
2466 : BIND(&if_packed_double);
2467 : {
2468 224 : var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2469 56 : CAST(elements), index, MachineType::Float64()));
2470 56 : Goto(&done);
2471 : }
2472 :
2473 : BIND(&if_holey_double);
2474 : {
2475 224 : var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2476 : CAST(elements), index, MachineType::Float64(), 0, INTPTR_PARAMETERS,
2477 56 : if_hole));
2478 56 : Goto(&done);
2479 : }
2480 :
2481 : BIND(&if_dictionary);
2482 : {
2483 : CSA_ASSERT(this, IsDictionaryElementsKind(elements_kind));
2484 56 : var_result = BasicLoadNumberDictionaryElement(CAST(elements), index,
2485 : if_accessor, if_hole);
2486 56 : Goto(&done);
2487 : }
2488 :
2489 : BIND(&done);
2490 56 : return var_result.value();
2491 : }
2492 :
2493 5616 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
2494 : SloppyTNode<Object> base, SloppyTNode<IntPtrT> offset, Label* if_hole,
2495 : MachineType machine_type) {
2496 5616 : if (if_hole) {
2497 : // TODO(ishell): Compare only the upper part for the hole once the
2498 : // compiler is able to fold addition of already complex |offset| with
2499 : // |kIeeeDoubleExponentWordOffset| into one addressing mode.
2500 5112 : if (Is64()) {
2501 5112 : Node* element = Load(MachineType::Uint64(), base, offset);
2502 15336 : GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
2503 : } else {
2504 : Node* element_upper = Load(
2505 : MachineType::Uint32(), base,
2506 0 : IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
2507 0 : GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
2508 0 : if_hole);
2509 : }
2510 : }
2511 5616 : if (machine_type.IsNone()) {
2512 : // This means the actual value is not needed.
2513 956 : return TNode<Float64T>();
2514 : }
2515 4660 : return UncheckedCast<Float64T>(Load(machine_type, base, offset));
2516 : }
2517 :
2518 78908 : TNode<Object> CodeStubAssembler::LoadContextElement(
2519 : SloppyTNode<Context> context, int slot_index) {
2520 : int offset = Context::SlotOffset(slot_index);
2521 : return UncheckedCast<Object>(
2522 157816 : Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)));
2523 : }
2524 :
2525 5832 : TNode<Object> CodeStubAssembler::LoadContextElement(
2526 : SloppyTNode<Context> context, SloppyTNode<IntPtrT> slot_index) {
2527 11664 : Node* offset = ElementOffsetFromIndex(
2528 : slot_index, PACKED_ELEMENTS, INTPTR_PARAMETERS, Context::SlotOffset(0));
2529 5832 : return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2530 : }
2531 :
2532 56 : TNode<Object> CodeStubAssembler::LoadContextElement(TNode<Context> context,
2533 : TNode<Smi> slot_index) {
2534 112 : Node* offset = ElementOffsetFromIndex(slot_index, PACKED_ELEMENTS,
2535 : SMI_PARAMETERS, Context::SlotOffset(0));
2536 56 : return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2537 : }
2538 :
2539 224 : void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2540 : int slot_index,
2541 : SloppyTNode<Object> value) {
2542 : int offset = Context::SlotOffset(slot_index);
2543 448 : Store(context, IntPtrConstant(offset), value);
2544 224 : }
2545 :
2546 1008 : void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2547 : SloppyTNode<IntPtrT> slot_index,
2548 : SloppyTNode<Object> value) {
2549 : Node* offset = IntPtrAdd(TimesTaggedSize(slot_index),
2550 1008 : IntPtrConstant(Context::SlotOffset(0)));
2551 1008 : Store(context, offset, value);
2552 1008 : }
2553 :
2554 7384 : void CodeStubAssembler::StoreContextElementNoWriteBarrier(
2555 : SloppyTNode<Context> context, int slot_index, SloppyTNode<Object> value) {
2556 : int offset = Context::SlotOffset(slot_index);
2557 : StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
2558 14768 : IntPtrConstant(offset), value);
2559 7384 : }
2560 :
2561 22620 : TNode<Context> CodeStubAssembler::LoadNativeContext(
2562 : SloppyTNode<Context> context) {
2563 : return UncheckedCast<Context>(
2564 32588 : LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX));
2565 : }
2566 :
2567 168 : TNode<Context> CodeStubAssembler::LoadModuleContext(
2568 : SloppyTNode<Context> context) {
2569 336 : Node* module_map = LoadRoot(RootIndex::kModuleContextMap);
2570 336 : Variable cur_context(this, MachineRepresentation::kTaggedPointer);
2571 168 : cur_context.Bind(context);
2572 :
2573 168 : Label context_found(this);
2574 :
2575 168 : Variable* context_search_loop_variables[1] = {&cur_context};
2576 336 : Label context_search(this, 1, context_search_loop_variables);
2577 :
2578 : // Loop until cur_context->map() is module_map.
2579 168 : Goto(&context_search);
2580 : BIND(&context_search);
2581 : {
2582 : CSA_ASSERT(this, Word32BinaryNot(IsNativeContext(cur_context.value())));
2583 336 : GotoIf(WordEqual(LoadMap(cur_context.value()), module_map), &context_found);
2584 :
2585 : cur_context.Bind(
2586 504 : LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
2587 168 : Goto(&context_search);
2588 : }
2589 :
2590 : BIND(&context_found);
2591 336 : return UncheckedCast<Context>(cur_context.value());
2592 : }
2593 :
2594 788 : TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2595 : SloppyTNode<Int32T> kind, SloppyTNode<Context> native_context) {
2596 : CSA_ASSERT(this, IsFastElementsKind(kind));
2597 : CSA_ASSERT(this, IsNativeContext(native_context));
2598 : Node* offset = IntPtrAdd(IntPtrConstant(Context::FIRST_JS_ARRAY_MAP_SLOT),
2599 1576 : ChangeInt32ToIntPtr(kind));
2600 788 : return UncheckedCast<Map>(LoadContextElement(native_context, offset));
2601 : }
2602 :
2603 4760 : TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2604 : ElementsKind kind, SloppyTNode<Context> native_context) {
2605 : CSA_ASSERT(this, IsNativeContext(native_context));
2606 : return UncheckedCast<Map>(
2607 4760 : LoadContextElement(native_context, Context::ArrayMapIndex(kind)));
2608 : }
2609 :
2610 3980 : TNode<BoolT> CodeStubAssembler::IsGeneratorFunction(
2611 : TNode<JSFunction> function) {
2612 : TNode<SharedFunctionInfo> const shared_function_info =
2613 : CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
2614 :
2615 : TNode<Uint32T> const function_kind =
2616 : DecodeWord32<SharedFunctionInfo::FunctionKindBits>(LoadObjectField(
2617 : shared_function_info, SharedFunctionInfo::kFlagsOffset,
2618 7960 : MachineType::Uint32()));
2619 :
2620 7960 : return TNode<BoolT>::UncheckedCast(Word32Or(
2621 7960 : Word32Or(
2622 7960 : Word32Or(
2623 7960 : Word32Equal(function_kind,
2624 7960 : Int32Constant(FunctionKind::kAsyncGeneratorFunction)),
2625 7960 : Word32Equal(
2626 : function_kind,
2627 15920 : Int32Constant(FunctionKind::kAsyncConciseGeneratorMethod))),
2628 7960 : Word32Equal(function_kind,
2629 15920 : Int32Constant(FunctionKind::kGeneratorFunction))),
2630 7960 : Word32Equal(function_kind,
2631 15920 : Int32Constant(FunctionKind::kConciseGeneratorMethod))));
2632 : }
2633 :
2634 3980 : TNode<BoolT> CodeStubAssembler::HasPrototypeProperty(TNode<JSFunction> function,
2635 : TNode<Map> map) {
2636 : // (has_prototype_slot() && IsConstructor()) ||
2637 : // IsGeneratorFunction(shared()->kind())
2638 : uint32_t mask =
2639 : Map::HasPrototypeSlotBit::kMask | Map::IsConstructorBit::kMask;
2640 : return TNode<BoolT>::UncheckedCast(
2641 15920 : Word32Or(IsAllSetWord32(LoadMapBitField(map), mask),
2642 15920 : IsGeneratorFunction(function)));
2643 : }
2644 :
2645 3980 : void CodeStubAssembler::GotoIfPrototypeRequiresRuntimeLookup(
2646 : TNode<JSFunction> function, TNode<Map> map, Label* runtime) {
2647 : // !has_prototype_property() || has_non_instance_prototype()
2648 7960 : GotoIfNot(HasPrototypeProperty(function, map), runtime);
2649 3980 : GotoIf(IsSetWord32<Map::HasNonInstancePrototypeBit>(LoadMapBitField(map)),
2650 3980 : runtime);
2651 3980 : }
2652 :
2653 3924 : Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function,
2654 : Label* if_bailout) {
2655 : CSA_ASSERT(this, TaggedIsNotSmi(function));
2656 : CSA_ASSERT(this, IsJSFunction(function));
2657 : CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)));
2658 : CSA_ASSERT(this, IsClearWord32<Map::HasNonInstancePrototypeBit>(
2659 : LoadMapBitField(LoadMap(function))));
2660 : Node* proto_or_map =
2661 : LoadObjectField(function, JSFunction::kPrototypeOrInitialMapOffset);
2662 7848 : GotoIf(IsTheHole(proto_or_map), if_bailout);
2663 :
2664 7848 : VARIABLE(var_result, MachineRepresentation::kTagged, proto_or_map);
2665 3924 : Label done(this, &var_result);
2666 7848 : GotoIfNot(IsMap(proto_or_map), &done);
2667 :
2668 3924 : var_result.Bind(LoadMapPrototype(proto_or_map));
2669 3924 : Goto(&done);
2670 :
2671 : BIND(&done);
2672 7848 : return var_result.value();
2673 : }
2674 :
2675 112 : TNode<BytecodeArray> CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray(
2676 : SloppyTNode<SharedFunctionInfo> shared) {
2677 : Node* function_data =
2678 : LoadObjectField(shared, SharedFunctionInfo::kFunctionDataOffset);
2679 :
2680 224 : VARIABLE(var_result, MachineRepresentation::kTagged, function_data);
2681 112 : Label done(this, &var_result);
2682 :
2683 224 : GotoIfNot(HasInstanceType(function_data, INTERPRETER_DATA_TYPE), &done);
2684 : Node* bytecode_array =
2685 : LoadObjectField(function_data, InterpreterData::kBytecodeArrayOffset);
2686 112 : var_result.Bind(bytecode_array);
2687 112 : Goto(&done);
2688 :
2689 : BIND(&done);
2690 224 : return CAST(var_result.value());
2691 : }
2692 :
2693 24 : void CodeStubAssembler::StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,
2694 : int offset,
2695 : TNode<Word32T> value) {
2696 : StoreNoWriteBarrier(MachineRepresentation::kWord8, object,
2697 48 : IntPtrConstant(offset - kHeapObjectTag), value);
2698 24 : }
2699 :
2700 784 : void CodeStubAssembler::StoreHeapNumberValue(SloppyTNode<HeapNumber> object,
2701 : SloppyTNode<Float64T> value) {
2702 : StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
2703 : MachineRepresentation::kFloat64);
2704 784 : }
2705 :
2706 0 : void CodeStubAssembler::StoreMutableHeapNumberValue(
2707 : SloppyTNode<MutableHeapNumber> object, SloppyTNode<Float64T> value) {
2708 : StoreObjectFieldNoWriteBarrier(object, MutableHeapNumber::kValueOffset, value,
2709 : MachineRepresentation::kFloat64);
2710 0 : }
2711 :
2712 8176 : void CodeStubAssembler::StoreObjectField(Node* object, int offset,
2713 : Node* value) {
2714 : DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
2715 :
2716 8176 : OptimizedStoreField(MachineRepresentation::kTagged,
2717 : UncheckedCast<HeapObject>(object), offset, value,
2718 16684 : WriteBarrierKind::kFullWriteBarrier);
2719 8176 : }
2720 :
2721 4160 : void CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
2722 : Node* value) {
2723 : int const_offset;
2724 4160 : if (ToInt32Constant(offset, const_offset)) {
2725 2760 : StoreObjectField(object, const_offset, value);
2726 : } else {
2727 4200 : Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2728 : }
2729 4160 : }
2730 :
2731 35596 : void CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2732 : Node* object, int offset, Node* value, MachineRepresentation rep) {
2733 62464 : OptimizedStoreField(rep, UncheckedCast<HeapObject>(object), offset, value,
2734 162048 : WriteBarrierKind::kNoWriteBarrier);
2735 35596 : }
2736 :
2737 4636 : void CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2738 : Node* object, Node* offset, Node* value, MachineRepresentation rep) {
2739 : int const_offset;
2740 4636 : if (ToInt32Constant(offset, const_offset)) {
2741 5336 : return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
2742 : }
2743 : StoreNoWriteBarrier(rep, object,
2744 5904 : IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2745 : }
2746 :
2747 4544 : void CodeStubAssembler::StoreMap(Node* object, Node* map) {
2748 10828 : OptimizedStoreMap(UncheckedCast<HeapObject>(object), CAST(map));
2749 4544 : }
2750 :
2751 52900 : void CodeStubAssembler::StoreMapNoWriteBarrier(Node* object,
2752 : RootIndex map_root_index) {
2753 105800 : StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
2754 52900 : }
2755 :
2756 4840 : void CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
2757 : CSA_SLOW_ASSERT(this, IsMap(map));
2758 10384 : OptimizedStoreField(MachineRepresentation::kTaggedPointer,
2759 : UncheckedCast<HeapObject>(object), HeapObject::kMapOffset,
2760 68188 : map, WriteBarrierKind::kNoWriteBarrier);
2761 4840 : }
2762 :
2763 22764 : void CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
2764 : RootIndex root_index) {
2765 22764 : if (RootsTable::IsImmortalImmovable(root_index)) {
2766 45528 : return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
2767 : } else {
2768 0 : return StoreObjectField(object, offset, LoadRoot(root_index));
2769 : }
2770 : }
2771 :
2772 0 : void CodeStubAssembler::StoreJSArrayLength(TNode<JSArray> array,
2773 : TNode<Smi> length) {
2774 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2775 0 : }
2776 :
2777 0 : void CodeStubAssembler::StoreElements(TNode<Object> object,
2778 : TNode<FixedArrayBase> elements) {
2779 : StoreObjectField(object, JSObject::kElementsOffset, elements);
2780 0 : }
2781 :
2782 38032 : void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
2783 : Node* object, Node* index_node, Node* value, WriteBarrierMode barrier_mode,
2784 : int additional_offset, ParameterMode parameter_mode) {
2785 : CSA_SLOW_ASSERT(
2786 : this, Word32Or(IsFixedArraySubclass(object), IsPropertyArray(object)));
2787 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2788 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2789 : barrier_mode == UPDATE_WRITE_BARRIER ||
2790 : barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER);
2791 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2792 : STATIC_ASSERT(static_cast<int>(FixedArray::kHeaderSize) ==
2793 : static_cast<int>(PropertyArray::kHeaderSize));
2794 : int header_size =
2795 38032 : FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
2796 76064 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2797 : parameter_mode, header_size);
2798 : STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2799 : static_cast<int>(WeakFixedArray::kLengthOffset));
2800 : STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2801 : static_cast<int>(PropertyArray::kLengthAndHashOffset));
2802 : // Check that index_node + additional_offset <= object.length.
2803 : // TODO(cbruni): Use proper LoadXXLength helpers
2804 : CSA_ASSERT(
2805 : this,
2806 : IsOffsetInBounds(
2807 : offset,
2808 : Select<IntPtrT>(
2809 : IsPropertyArray(object),
2810 : [=] {
2811 : TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
2812 : object, PropertyArray::kLengthAndHashOffset);
2813 : return TNode<IntPtrT>::UncheckedCast(
2814 : DecodeWord<PropertyArray::LengthField>(length_and_hash));
2815 : },
2816 : [=] {
2817 : return LoadAndUntagObjectField(object,
2818 : FixedArrayBase::kLengthOffset);
2819 : }),
2820 : FixedArray::kHeaderSize));
2821 38032 : if (barrier_mode == SKIP_WRITE_BARRIER) {
2822 20188 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2823 17844 : } else if (barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER) {
2824 56 : StoreEphemeronKey(object, offset, value);
2825 : } else {
2826 17788 : Store(object, offset, value);
2827 : }
2828 38032 : }
2829 :
2830 1916 : void CodeStubAssembler::StoreFixedDoubleArrayElement(
2831 : TNode<FixedDoubleArray> object, Node* index_node, TNode<Float64T> value,
2832 : ParameterMode parameter_mode, CheckBounds check_bounds) {
2833 : CSA_ASSERT(this, IsFixedDoubleArray(object));
2834 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2835 1916 : if (NeedsBoundsCheck(check_bounds)) {
2836 1916 : FixedArrayBoundsCheck(object, index_node, 0, parameter_mode);
2837 : }
2838 : Node* offset =
2839 3832 : ElementOffsetFromIndex(index_node, PACKED_DOUBLE_ELEMENTS, parameter_mode,
2840 : FixedArray::kHeaderSize - kHeapObjectTag);
2841 : MachineRepresentation rep = MachineRepresentation::kFloat64;
2842 : // Make sure we do not store signalling NaNs into double arrays.
2843 1916 : TNode<Float64T> value_silenced = Float64SilenceNaN(value);
2844 1916 : StoreNoWriteBarrier(rep, object, offset, value_silenced);
2845 1916 : }
2846 :
2847 15960 : void CodeStubAssembler::StoreFeedbackVectorSlot(Node* object,
2848 : Node* slot_index_node,
2849 : Node* value,
2850 : WriteBarrierMode barrier_mode,
2851 : int additional_offset,
2852 : ParameterMode parameter_mode) {
2853 : CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2854 : CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2855 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2856 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2857 : barrier_mode == UPDATE_WRITE_BARRIER);
2858 : int header_size =
2859 15960 : FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2860 31920 : Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2861 : parameter_mode, header_size);
2862 : // Check that slot_index_node <= object.length.
2863 : CSA_ASSERT(this,
2864 : IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2865 : FeedbackVector::kHeaderSize));
2866 15960 : if (barrier_mode == SKIP_WRITE_BARRIER) {
2867 13160 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2868 : } else {
2869 2800 : Store(object, offset, value);
2870 : }
2871 15960 : }
2872 :
2873 336 : void CodeStubAssembler::EnsureArrayLengthWritable(TNode<Map> map,
2874 : Label* bailout) {
2875 : // Don't support arrays in dictionary named property mode.
2876 672 : GotoIf(IsDictionaryMap(map), bailout);
2877 :
2878 : // Check whether the length property is writable. The length property is the
2879 : // only default named property on arrays. It's nonconfigurable, hence is
2880 : // guaranteed to stay the first property.
2881 336 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
2882 :
2883 : int length_index = JSArray::kLengthDescriptorIndex;
2884 : #ifdef DEBUG
2885 : TNode<Name> maybe_length =
2886 : LoadKeyByDescriptorEntry(descriptors, length_index);
2887 : CSA_ASSERT(this,
2888 : WordEqual(maybe_length, LoadRoot(RootIndex::klength_string)));
2889 : #endif
2890 :
2891 : TNode<Uint32T> details =
2892 336 : LoadDetailsByDescriptorEntry(descriptors, length_index);
2893 672 : GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
2894 336 : bailout);
2895 336 : }
2896 :
2897 224 : TNode<Int32T> CodeStubAssembler::EnsureArrayPushable(TNode<Map> map,
2898 : Label* bailout) {
2899 : // Disallow pushing onto prototypes. It might be the JSArray prototype.
2900 : // Disallow pushing onto non-extensible objects.
2901 224 : Comment("Disallow pushing onto prototypes");
2902 : Node* bit_field2 = LoadMapBitField2(map);
2903 : int mask = Map::IsPrototypeMapBit::kMask | Map::IsExtensibleBit::kMask;
2904 672 : Node* test = Word32And(bit_field2, Int32Constant(mask));
2905 672 : GotoIf(Word32NotEqual(test, Int32Constant(Map::IsExtensibleBit::kMask)),
2906 224 : bailout);
2907 :
2908 224 : EnsureArrayLengthWritable(map, bailout);
2909 :
2910 : TNode<Uint32T> kind = DecodeWord32<Map::ElementsKindBits>(bit_field2);
2911 224 : return Signed(kind);
2912 : }
2913 :
2914 540 : void CodeStubAssembler::PossiblyGrowElementsCapacity(
2915 : ParameterMode mode, ElementsKind kind, Node* array, Node* length,
2916 : Variable* var_elements, Node* growth, Label* bailout) {
2917 1080 : Label fits(this, var_elements);
2918 : Node* capacity =
2919 540 : TaggedToParameter(LoadFixedArrayBaseLength(var_elements->value()), mode);
2920 : // length and growth nodes are already in a ParameterMode appropriate
2921 : // representation.
2922 540 : Node* new_length = IntPtrOrSmiAdd(growth, length, mode);
2923 1080 : GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
2924 540 : Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
2925 540 : var_elements->Bind(GrowElementsCapacity(array, var_elements->value(), kind,
2926 : kind, capacity, new_capacity, mode,
2927 540 : bailout));
2928 540 : Goto(&fits);
2929 : BIND(&fits);
2930 540 : }
2931 :
2932 204 : TNode<Smi> CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
2933 : SloppyTNode<JSArray> array,
2934 : CodeStubArguments* args,
2935 : TVariable<IntPtrT>* arg_index,
2936 : Label* bailout) {
2937 : CSA_SLOW_ASSERT(this, IsJSArray(array));
2938 204 : Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
2939 204 : Label pre_bailout(this);
2940 204 : Label success(this);
2941 : TVARIABLE(Smi, var_tagged_length);
2942 : ParameterMode mode = OptimalParameterMode();
2943 408 : VARIABLE(var_length, OptimalParameterRepresentation(),
2944 : TaggedToParameter(LoadFastJSArrayLength(array), mode));
2945 408 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2946 :
2947 : // Resize the capacity of the fixed array if it doesn't fit.
2948 : TNode<IntPtrT> first = arg_index->value();
2949 : Node* growth = IntPtrToParameter(
2950 : IntPtrSub(UncheckedCast<IntPtrT>(args->GetLength(INTPTR_PARAMETERS)),
2951 : first),
2952 : mode);
2953 204 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2954 204 : &var_elements, growth, &pre_bailout);
2955 :
2956 : // Push each argument onto the end of the array now that there is enough
2957 : // capacity.
2958 408 : CodeStubAssembler::VariableList push_vars({&var_length}, zone());
2959 204 : Node* elements = var_elements.value();
2960 204 : args->ForEach(
2961 : push_vars,
2962 816 : [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
2963 408 : TryStoreArrayElement(kind, mode, &pre_bailout, elements,
2964 204 : var_length.value(), arg);
2965 204 : Increment(&var_length, 1, mode);
2966 204 : },
2967 204 : first, nullptr);
2968 : {
2969 204 : TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2970 : var_tagged_length = length;
2971 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2972 204 : Goto(&success);
2973 : }
2974 :
2975 : BIND(&pre_bailout);
2976 : {
2977 204 : TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2978 : var_tagged_length = length;
2979 408 : Node* diff = SmiSub(length, LoadFastJSArrayLength(array));
2980 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2981 408 : *arg_index = IntPtrAdd(arg_index->value(), SmiUntag(diff));
2982 204 : Goto(bailout);
2983 : }
2984 :
2985 : BIND(&success);
2986 204 : return var_tagged_length.value();
2987 : }
2988 :
2989 540 : void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind,
2990 : ParameterMode mode, Label* bailout,
2991 : Node* elements, Node* index,
2992 : Node* value) {
2993 540 : if (IsSmiElementsKind(kind)) {
2994 368 : GotoIf(TaggedIsNotSmi(value), bailout);
2995 356 : } else if (IsDoubleElementsKind(kind)) {
2996 180 : GotoIfNotNumber(value, bailout);
2997 : }
2998 540 : if (IsDoubleElementsKind(kind)) {
2999 360 : value = ChangeNumberToFloat64(value);
3000 : }
3001 540 : StoreElement(elements, kind, index, value, mode);
3002 540 : }
3003 :
3004 336 : void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
3005 : Node* value, Label* bailout) {
3006 : CSA_SLOW_ASSERT(this, IsJSArray(array));
3007 336 : Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
3008 : ParameterMode mode = OptimalParameterMode();
3009 672 : VARIABLE(var_length, OptimalParameterRepresentation(),
3010 : TaggedToParameter(LoadFastJSArrayLength(array), mode));
3011 672 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
3012 :
3013 : // Resize the capacity of the fixed array if it doesn't fit.
3014 336 : Node* growth = IntPtrOrSmiConstant(1, mode);
3015 336 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
3016 336 : &var_elements, growth, bailout);
3017 :
3018 : // Push each argument onto the end of the array now that there is enough
3019 : // capacity.
3020 336 : TryStoreArrayElement(kind, mode, bailout, var_elements.value(),
3021 336 : var_length.value(), value);
3022 336 : Increment(&var_length, 1, mode);
3023 :
3024 336 : Node* length = ParameterToTagged(var_length.value(), mode);
3025 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3026 336 : }
3027 :
3028 0 : Node* CodeStubAssembler::AllocateCellWithValue(Node* value,
3029 : WriteBarrierMode mode) {
3030 : Node* result = Allocate(Cell::kSize, kNone);
3031 0 : StoreMapNoWriteBarrier(result, RootIndex::kCellMap);
3032 0 : StoreCellValue(result, value, mode);
3033 0 : return result;
3034 : }
3035 :
3036 1288 : Node* CodeStubAssembler::LoadCellValue(Node* cell) {
3037 : CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
3038 1288 : return LoadObjectField(cell, Cell::kValueOffset);
3039 : }
3040 :
3041 0 : void CodeStubAssembler::StoreCellValue(Node* cell, Node* value,
3042 : WriteBarrierMode mode) {
3043 : CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
3044 : DCHECK(mode == SKIP_WRITE_BARRIER || mode == UPDATE_WRITE_BARRIER);
3045 :
3046 0 : if (mode == UPDATE_WRITE_BARRIER) {
3047 : StoreObjectField(cell, Cell::kValueOffset, value);
3048 : } else {
3049 : StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
3050 : }
3051 0 : }
3052 :
3053 26780 : TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber() {
3054 : Node* result = Allocate(HeapNumber::kSize, kNone);
3055 : RootIndex heap_map_index = RootIndex::kHeapNumberMap;
3056 26780 : StoreMapNoWriteBarrier(result, heap_map_index);
3057 26780 : return UncheckedCast<HeapNumber>(result);
3058 : }
3059 :
3060 23556 : TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
3061 : SloppyTNode<Float64T> value) {
3062 23556 : TNode<HeapNumber> result = AllocateHeapNumber();
3063 : StoreHeapNumberValue(result, value);
3064 23556 : return result;
3065 : }
3066 :
3067 504 : TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumber() {
3068 : Node* result = Allocate(MutableHeapNumber::kSize, kNone);
3069 : RootIndex heap_map_index = RootIndex::kMutableHeapNumberMap;
3070 504 : StoreMapNoWriteBarrier(result, heap_map_index);
3071 504 : return UncheckedCast<MutableHeapNumber>(result);
3072 : }
3073 :
3074 56 : TNode<Object> CodeStubAssembler::CloneIfMutablePrimitive(TNode<Object> object) {
3075 56 : TVARIABLE(Object, result, object);
3076 56 : Label done(this);
3077 :
3078 112 : GotoIf(TaggedIsSmi(object), &done);
3079 112 : GotoIfNot(IsMutableHeapNumber(UncheckedCast<HeapObject>(object)), &done);
3080 : {
3081 : // Mutable heap number found --- allocate a clone.
3082 : TNode<Float64T> value =
3083 : LoadHeapNumberValue(UncheckedCast<HeapNumber>(object));
3084 112 : result = AllocateMutableHeapNumberWithValue(value);
3085 56 : Goto(&done);
3086 : }
3087 :
3088 : BIND(&done);
3089 56 : return result.value();
3090 : }
3091 :
3092 504 : TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumberWithValue(
3093 : SloppyTNode<Float64T> value) {
3094 504 : TNode<MutableHeapNumber> result = AllocateMutableHeapNumber();
3095 : StoreMutableHeapNumberValue(result, value);
3096 504 : return result;
3097 : }
3098 :
3099 2632 : TNode<BigInt> CodeStubAssembler::AllocateBigInt(TNode<IntPtrT> length) {
3100 2632 : TNode<BigInt> result = AllocateRawBigInt(length);
3101 7896 : StoreBigIntBitfield(result,
3102 5264 : Word32Shl(TruncateIntPtrToInt32(length),
3103 5264 : Int32Constant(BigInt::LengthBits::kShift)));
3104 2632 : return result;
3105 : }
3106 :
3107 3472 : TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
3108 : // This is currently used only for 64-bit wide BigInts. If more general
3109 : // applicability is required, a large-object check must be added.
3110 : CSA_ASSERT(this, UintPtrLessThan(length, IntPtrConstant(3)));
3111 :
3112 : TNode<IntPtrT> size =
3113 : IntPtrAdd(IntPtrConstant(BigInt::kHeaderSize),
3114 6944 : Signed(WordShl(length, kSystemPointerSizeLog2)));
3115 6944 : Node* raw_result = Allocate(size, kNone);
3116 3472 : StoreMapNoWriteBarrier(raw_result, RootIndex::kBigIntMap);
3117 : if (FIELD_SIZE(BigInt::kOptionalPaddingOffset) != 0) {
3118 : DCHECK_EQ(4, FIELD_SIZE(BigInt::kOptionalPaddingOffset));
3119 : StoreObjectFieldNoWriteBarrier(raw_result, BigInt::kOptionalPaddingOffset,
3120 6944 : Int32Constant(0),
3121 : MachineRepresentation::kWord32);
3122 : }
3123 3472 : return UncheckedCast<BigInt>(raw_result);
3124 : }
3125 :
3126 224 : void CodeStubAssembler::StoreBigIntBitfield(TNode<BigInt> bigint,
3127 : TNode<Word32T> bitfield) {
3128 : StoreObjectFieldNoWriteBarrier(bigint, BigInt::kBitfieldOffset, bitfield,
3129 : MachineRepresentation::kWord32);
3130 224 : }
3131 :
3132 112 : void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint, int digit_index,
3133 : TNode<UintPtrT> digit) {
3134 112 : StoreObjectFieldNoWriteBarrier(
3135 : bigint, BigInt::kDigitsOffset + digit_index * kSystemPointerSize, digit,
3136 : UintPtrT::kMachineRepresentation);
3137 112 : }
3138 :
3139 224 : TNode<Word32T> CodeStubAssembler::LoadBigIntBitfield(TNode<BigInt> bigint) {
3140 : return UncheckedCast<Word32T>(
3141 1680 : LoadObjectField(bigint, BigInt::kBitfieldOffset, MachineType::Uint32()));
3142 : }
3143 :
3144 112 : TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
3145 : int digit_index) {
3146 : return UncheckedCast<UintPtrT>(LoadObjectField(
3147 : bigint, BigInt::kDigitsOffset + digit_index * kSystemPointerSize,
3148 1680 : MachineType::UintPtr()));
3149 : }
3150 :
3151 784 : TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3152 : uint32_t length, AllocationFlags flags) {
3153 784 : Comment("AllocateSeqOneByteString");
3154 784 : if (length == 0) {
3155 0 : return CAST(LoadRoot(RootIndex::kempty_string));
3156 : }
3157 : Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
3158 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3159 784 : StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3160 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3161 : Uint32Constant(length),
3162 : MachineRepresentation::kWord32);
3163 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3164 1568 : Int32Constant(String::kEmptyHashField),
3165 : MachineRepresentation::kWord32);
3166 : return CAST(result);
3167 : }
3168 :
3169 0 : TNode<BoolT> CodeStubAssembler::IsZeroOrContext(SloppyTNode<Object> object) {
3170 0 : return Select<BoolT>(WordEqual(object, SmiConstant(0)),
3171 0 : [=] { return Int32TrueConstant(); },
3172 0 : [=] { return IsContext(CAST(object)); });
3173 : }
3174 :
3175 1960 : TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3176 : Node* context, TNode<Uint32T> length, AllocationFlags flags) {
3177 1960 : Comment("AllocateSeqOneByteString");
3178 : CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3179 3920 : VARIABLE(var_result, MachineRepresentation::kTagged);
3180 :
3181 : // Compute the SeqOneByteString size and check if it fits into new space.
3182 1960 : Label if_lengthiszero(this), if_sizeissmall(this),
3183 1960 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
3184 3920 : GotoIf(Word32Equal(length, Uint32Constant(0)), &if_lengthiszero);
3185 :
3186 : Node* raw_size = GetArrayAllocationSize(
3187 3920 : Signed(ChangeUint32ToWord(length)), UINT8_ELEMENTS, INTPTR_PARAMETERS,
3188 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3189 3920 : TNode<WordT> size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3190 5880 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3191 1960 : &if_sizeissmall, &if_notsizeissmall);
3192 :
3193 : BIND(&if_sizeissmall);
3194 : {
3195 : // Just allocate the SeqOneByteString in new space.
3196 : TNode<Object> result =
3197 : AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3198 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3199 1960 : StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3200 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3201 : length, MachineRepresentation::kWord32);
3202 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3203 3920 : Int32Constant(String::kEmptyHashField),
3204 : MachineRepresentation::kWord32);
3205 1960 : var_result.Bind(result);
3206 1960 : Goto(&if_join);
3207 : }
3208 :
3209 : BIND(&if_notsizeissmall);
3210 : {
3211 : // We might need to allocate in large object space, go to the runtime.
3212 : Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
3213 3920 : ChangeUint32ToTagged(length));
3214 1960 : var_result.Bind(result);
3215 1960 : Goto(&if_join);
3216 : }
3217 :
3218 : BIND(&if_lengthiszero);
3219 : {
3220 3920 : var_result.Bind(LoadRoot(RootIndex::kempty_string));
3221 1960 : Goto(&if_join);
3222 : }
3223 :
3224 : BIND(&if_join);
3225 3920 : return CAST(var_result.value());
3226 : }
3227 :
3228 896 : TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3229 : uint32_t length, AllocationFlags flags) {
3230 896 : Comment("AllocateSeqTwoByteString");
3231 896 : if (length == 0) {
3232 0 : return CAST(LoadRoot(RootIndex::kempty_string));
3233 : }
3234 896 : Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
3235 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3236 896 : StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3237 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3238 : Uint32Constant(length),
3239 : MachineRepresentation::kWord32);
3240 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3241 1792 : Int32Constant(String::kEmptyHashField),
3242 : MachineRepresentation::kWord32);
3243 : return CAST(result);
3244 : }
3245 :
3246 1568 : TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3247 : Node* context, TNode<Uint32T> length, AllocationFlags flags) {
3248 : CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3249 1568 : Comment("AllocateSeqTwoByteString");
3250 3136 : VARIABLE(var_result, MachineRepresentation::kTagged);
3251 :
3252 : // Compute the SeqTwoByteString size and check if it fits into new space.
3253 1568 : Label if_lengthiszero(this), if_sizeissmall(this),
3254 1568 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
3255 3136 : GotoIf(Word32Equal(length, Uint32Constant(0)), &if_lengthiszero);
3256 :
3257 : Node* raw_size = GetArrayAllocationSize(
3258 3136 : Signed(ChangeUint32ToWord(length)), UINT16_ELEMENTS, INTPTR_PARAMETERS,
3259 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3260 3136 : TNode<WordT> size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3261 4704 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3262 1568 : &if_sizeissmall, &if_notsizeissmall);
3263 :
3264 : BIND(&if_sizeissmall);
3265 : {
3266 : // Just allocate the SeqTwoByteString in new space.
3267 : TNode<Object> result =
3268 : AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3269 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3270 1568 : StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3271 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3272 : length, MachineRepresentation::kWord32);
3273 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3274 3136 : Int32Constant(String::kEmptyHashField),
3275 : MachineRepresentation::kWord32);
3276 1568 : var_result.Bind(result);
3277 1568 : Goto(&if_join);
3278 : }
3279 :
3280 : BIND(&if_notsizeissmall);
3281 : {
3282 : // We might need to allocate in large object space, go to the runtime.
3283 : Node* result = CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
3284 3136 : ChangeUint32ToTagged(length));
3285 1568 : var_result.Bind(result);
3286 1568 : Goto(&if_join);
3287 : }
3288 :
3289 : BIND(&if_lengthiszero);
3290 : {
3291 3136 : var_result.Bind(LoadRoot(RootIndex::kempty_string));
3292 1568 : Goto(&if_join);
3293 : }
3294 :
3295 : BIND(&if_join);
3296 3136 : return CAST(var_result.value());
3297 : }
3298 :
3299 896 : TNode<String> CodeStubAssembler::AllocateSlicedString(RootIndex map_root_index,
3300 : TNode<Uint32T> length,
3301 : TNode<String> parent,
3302 : TNode<Smi> offset) {
3303 : DCHECK(map_root_index == RootIndex::kSlicedOneByteStringMap ||
3304 : map_root_index == RootIndex::kSlicedStringMap);
3305 : Node* result = Allocate(SlicedString::kSize);
3306 : DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
3307 896 : StoreMapNoWriteBarrier(result, map_root_index);
3308 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
3309 1792 : Int32Constant(String::kEmptyHashField),
3310 : MachineRepresentation::kWord32);
3311 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
3312 : MachineRepresentation::kWord32);
3313 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
3314 : MachineRepresentation::kTagged);
3315 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
3316 : MachineRepresentation::kTagged);
3317 896 : return CAST(result);
3318 : }
3319 :
3320 0 : TNode<String> CodeStubAssembler::AllocateSlicedOneByteString(
3321 : TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3322 : return AllocateSlicedString(RootIndex::kSlicedOneByteStringMap, length,
3323 448 : parent, offset);
3324 : }
3325 :
3326 0 : TNode<String> CodeStubAssembler::AllocateSlicedTwoByteString(
3327 : TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3328 : return AllocateSlicedString(RootIndex::kSlicedStringMap, length, parent,
3329 448 : offset);
3330 : }
3331 :
3332 392 : TNode<String> CodeStubAssembler::AllocateConsString(TNode<Uint32T> length,
3333 : TNode<String> left,
3334 : TNode<String> right,
3335 : Variable* var_feedback) {
3336 : // Added string can be a cons string.
3337 392 : Comment("Allocating ConsString");
3338 784 : Node* left_instance_type = LoadInstanceType(left);
3339 784 : Node* right_instance_type = LoadInstanceType(right);
3340 :
3341 : // Determine the resulting ConsString map to use depending on whether
3342 : // any of {left} or {right} has two byte encoding.
3343 : STATIC_ASSERT(kOneByteStringTag != 0);
3344 : STATIC_ASSERT(kTwoByteStringTag == 0);
3345 : Node* combined_instance_type =
3346 784 : Word32And(left_instance_type, right_instance_type);
3347 2632 : TNode<Map> result_map = CAST(Select<Object>(
3348 : IsSetWord32(combined_instance_type, kStringEncodingMask),
3349 : [=] {
3350 : if (var_feedback != nullptr) {
3351 : var_feedback->Bind(
3352 : SmiConstant(BinaryOperationFeedback::kConsOneByteString));
3353 : }
3354 : return LoadRoot(RootIndex::kConsOneByteStringMap);
3355 : },
3356 : [=] {
3357 : if (var_feedback != nullptr) {
3358 : var_feedback->Bind(
3359 : SmiConstant(BinaryOperationFeedback::kConsTwoByteString));
3360 : }
3361 : return LoadRoot(RootIndex::kConsStringMap);
3362 : }));
3363 784 : Node* result = AllocateInNewSpace(ConsString::kSize);
3364 : StoreMapNoWriteBarrier(result, result_map);
3365 : StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
3366 : MachineRepresentation::kWord32);
3367 : StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset,
3368 784 : Int32Constant(String::kEmptyHashField),
3369 : MachineRepresentation::kWord32);
3370 : StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, left);
3371 : StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, right);
3372 392 : return CAST(result);
3373 : }
3374 :
3375 616 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3376 : int at_least_space_for) {
3377 1232 : return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
3378 : }
3379 :
3380 228 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3381 : TNode<IntPtrT> at_least_space_for) {
3382 : CSA_ASSERT(this, UintPtrLessThanOrEqual(
3383 : at_least_space_for,
3384 : IntPtrConstant(NameDictionary::kMaxCapacity)));
3385 844 : TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
3386 844 : return AllocateNameDictionaryWithCapacity(capacity);
3387 : }
3388 :
3389 1068 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
3390 : TNode<IntPtrT> capacity) {
3391 : CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3392 : CSA_ASSERT(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
3393 : TNode<IntPtrT> length = EntryToIndex<NameDictionary>(capacity);
3394 : TNode<IntPtrT> store_size = IntPtrAdd(
3395 1068 : TimesTaggedSize(length), IntPtrConstant(NameDictionary::kHeaderSize));
3396 :
3397 : TNode<NameDictionary> result =
3398 : UncheckedCast<NameDictionary>(AllocateInNewSpace(store_size));
3399 1068 : Comment("Initialize NameDictionary");
3400 : // Initialize FixedArray fields.
3401 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kNameDictionaryMap));
3402 1068 : StoreMapNoWriteBarrier(result, RootIndex::kNameDictionaryMap);
3403 : StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
3404 : SmiFromIntPtr(length));
3405 : // Initialized HashTable fields.
3406 1068 : TNode<Smi> zero = SmiConstant(0);
3407 1068 : StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
3408 1068 : SKIP_WRITE_BARRIER);
3409 1068 : StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
3410 1068 : zero, SKIP_WRITE_BARRIER);
3411 1068 : StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
3412 3204 : SmiTag(capacity), SKIP_WRITE_BARRIER);
3413 : // Initialize Dictionary fields.
3414 : TNode<HeapObject> filler = UndefinedConstant();
3415 1068 : StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
3416 2136 : SmiConstant(PropertyDetails::kInitialIndex),
3417 1068 : SKIP_WRITE_BARRIER);
3418 1068 : StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
3419 2136 : SmiConstant(PropertyArray::kNoHashSentinel),
3420 1068 : SKIP_WRITE_BARRIER);
3421 :
3422 : // Initialize NameDictionary elements.
3423 2136 : TNode<WordT> result_word = BitcastTaggedToWord(result);
3424 : TNode<WordT> start_address = IntPtrAdd(
3425 2136 : result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
3426 : NameDictionary::kElementsStartIndex) -
3427 1068 : kHeapObjectTag));
3428 : TNode<WordT> end_address = IntPtrAdd(
3429 2136 : result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
3430 1068 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3431 1068 : return result;
3432 : }
3433 :
3434 224 : TNode<NameDictionary> CodeStubAssembler::CopyNameDictionary(
3435 : TNode<NameDictionary> dictionary, Label* large_object_fallback) {
3436 224 : Comment("Copy boilerplate property dict");
3437 224 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NameDictionary>(dictionary));
3438 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(capacity, IntPtrConstant(0)));
3439 448 : GotoIf(UintPtrGreaterThan(
3440 448 : capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
3441 224 : large_object_fallback);
3442 : TNode<NameDictionary> properties =
3443 224 : AllocateNameDictionaryWithCapacity(capacity);
3444 224 : TNode<IntPtrT> length = SmiUntag(LoadFixedArrayBaseLength(dictionary));
3445 : CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
3446 224 : SKIP_WRITE_BARRIER, INTPTR_PARAMETERS);
3447 224 : return properties;
3448 : }
3449 :
3450 : template <typename CollectionType>
3451 112 : Node* CodeStubAssembler::AllocateOrderedHashTable() {
3452 : static const int kCapacity = CollectionType::kMinCapacity;
3453 : static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
3454 : static const int kDataTableLength = kCapacity * CollectionType::kEntrySize;
3455 : static const int kFixedArrayLength =
3456 : CollectionType::HashTableStartIndex() + kBucketCount + kDataTableLength;
3457 : static const int kDataTableStartIndex =
3458 : CollectionType::HashTableStartIndex() + kBucketCount;
3459 :
3460 : STATIC_ASSERT(base::bits::IsPowerOfTwo(kCapacity));
3461 : STATIC_ASSERT(kCapacity <= CollectionType::MaxCapacity());
3462 :
3463 : // Allocate the table and add the proper map.
3464 : const ElementsKind elements_kind = HOLEY_ELEMENTS;
3465 112 : TNode<IntPtrT> length_intptr = IntPtrConstant(kFixedArrayLength);
3466 : TNode<Map> fixed_array_map =
3467 112 : CAST(LoadRoot(CollectionType::GetMapRootIndex()));
3468 : TNode<FixedArray> table =
3469 : CAST(AllocateFixedArray(elements_kind, length_intptr,
3470 : kAllowLargeObjectAllocation, fixed_array_map));
3471 :
3472 : // Initialize the OrderedHashTable fields.
3473 : const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
3474 224 : StoreFixedArrayElement(table, CollectionType::NumberOfElementsIndex(),
3475 : SmiConstant(0), barrier_mode);
3476 224 : StoreFixedArrayElement(table, CollectionType::NumberOfDeletedElementsIndex(),
3477 : SmiConstant(0), barrier_mode);
3478 224 : StoreFixedArrayElement(table, CollectionType::NumberOfBucketsIndex(),
3479 : SmiConstant(kBucketCount), barrier_mode);
3480 :
3481 : // Fill the buckets with kNotFound.
3482 112 : TNode<Smi> not_found = SmiConstant(CollectionType::kNotFound);
3483 : STATIC_ASSERT(CollectionType::HashTableStartIndex() ==
3484 : CollectionType::NumberOfBucketsIndex() + 1);
3485 : STATIC_ASSERT((CollectionType::HashTableStartIndex() + kBucketCount) ==
3486 : kDataTableStartIndex);
3487 560 : for (int i = 0; i < kBucketCount; i++) {
3488 224 : StoreFixedArrayElement(table, CollectionType::HashTableStartIndex() + i,
3489 : not_found, barrier_mode);
3490 : }
3491 :
3492 : // Fill the data table with undefined.
3493 : STATIC_ASSERT(kDataTableStartIndex + kDataTableLength == kFixedArrayLength);
3494 2352 : for (int i = 0; i < kDataTableLength; i++) {
3495 1120 : StoreFixedArrayElement(table, kDataTableStartIndex + i, UndefinedConstant(),
3496 : barrier_mode);
3497 : }
3498 :
3499 112 : return table;
3500 : }
3501 :
3502 : template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashMap>();
3503 : template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashSet>();
3504 :
3505 : template <typename CollectionType>
3506 8 : TNode<CollectionType> CodeStubAssembler::AllocateSmallOrderedHashTable(
3507 : TNode<IntPtrT> capacity) {
3508 : CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3509 : CSA_ASSERT(this, IntPtrLessThan(
3510 : capacity, IntPtrConstant(CollectionType::kMaxCapacity)));
3511 :
3512 : TNode<IntPtrT> data_table_start_offset =
3513 8 : IntPtrConstant(CollectionType::DataTableStartOffset());
3514 :
3515 : TNode<IntPtrT> data_table_size = IntPtrMul(
3516 8 : capacity, IntPtrConstant(CollectionType::kEntrySize * kTaggedSize));
3517 :
3518 : TNode<Int32T> hash_table_size =
3519 8 : Int32Div(TruncateIntPtrToInt32(capacity),
3520 32 : Int32Constant(CollectionType::kLoadFactor));
3521 :
3522 : TNode<IntPtrT> hash_table_start_offset =
3523 : IntPtrAdd(data_table_start_offset, data_table_size);
3524 :
3525 : TNode<IntPtrT> hash_table_and_chain_table_size =
3526 16 : IntPtrAdd(ChangeInt32ToIntPtr(hash_table_size), capacity);
3527 :
3528 : TNode<IntPtrT> total_size =
3529 : IntPtrAdd(hash_table_start_offset, hash_table_and_chain_table_size);
3530 :
3531 : TNode<IntPtrT> total_size_word_aligned =
3532 8 : IntPtrAdd(total_size, IntPtrConstant(kTaggedSize - 1));
3533 16 : total_size_word_aligned = ChangeInt32ToIntPtr(
3534 24 : Int32Div(TruncateIntPtrToInt32(total_size_word_aligned),
3535 : Int32Constant(kTaggedSize)));
3536 : total_size_word_aligned =
3537 : UncheckedCast<IntPtrT>(TimesTaggedSize(total_size_word_aligned));
3538 :
3539 : // Allocate the table and add the proper map.
3540 : TNode<Map> small_ordered_hash_map =
3541 8 : CAST(LoadRoot(CollectionType::GetMapRootIndex()));
3542 : TNode<Object> table_obj = AllocateInNewSpace(total_size_word_aligned);
3543 : StoreMapNoWriteBarrier(table_obj, small_ordered_hash_map);
3544 : TNode<CollectionType> table = UncheckedCast<CollectionType>(table_obj);
3545 :
3546 : // Initialize the SmallOrderedHashTable fields.
3547 24 : StoreObjectByteNoWriteBarrier(
3548 : table, CollectionType::NumberOfBucketsOffset(),
3549 : Word32And(Int32Constant(0xFF), hash_table_size));
3550 16 : StoreObjectByteNoWriteBarrier(table, CollectionType::NumberOfElementsOffset(),
3551 : Int32Constant(0));
3552 16 : StoreObjectByteNoWriteBarrier(
3553 : table, CollectionType::NumberOfDeletedElementsOffset(), Int32Constant(0));
3554 :
3555 : TNode<IntPtrT> table_address =
3556 24 : IntPtrSub(BitcastTaggedToWord(table), IntPtrConstant(kHeapObjectTag));
3557 : TNode<IntPtrT> hash_table_start_address =
3558 : IntPtrAdd(table_address, hash_table_start_offset);
3559 :
3560 : // Initialize the HashTable part.
3561 16 : Node* memset = ExternalConstant(ExternalReference::libc_memset_function());
3562 16 : CallCFunction(
3563 : memset, MachineType::AnyTagged(),
3564 : std::make_pair(MachineType::Pointer(), hash_table_start_address),
3565 : std::make_pair(MachineType::IntPtr(), IntPtrConstant(0xFF)),
3566 : std::make_pair(MachineType::UintPtr(), hash_table_and_chain_table_size));
3567 :
3568 : // Initialize the DataTable part.
3569 : TNode<HeapObject> filler = TheHoleConstant();
3570 : TNode<WordT> data_table_start_address =
3571 : IntPtrAdd(table_address, data_table_start_offset);
3572 : TNode<WordT> data_table_end_address =
3573 8 : IntPtrAdd(data_table_start_address, data_table_size);
3574 8 : StoreFieldsNoWriteBarrier(data_table_start_address, data_table_end_address,
3575 : filler);
3576 :
3577 8 : return table;
3578 : }
3579 :
3580 : template V8_EXPORT_PRIVATE TNode<SmallOrderedHashMap>
3581 : CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashMap>(
3582 : TNode<IntPtrT> capacity);
3583 : template V8_EXPORT_PRIVATE TNode<SmallOrderedHashSet>
3584 : CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashSet>(
3585 : TNode<IntPtrT> capacity);
3586 :
3587 : template <typename CollectionType>
3588 1680 : void CodeStubAssembler::FindOrderedHashTableEntry(
3589 : Node* table, Node* hash,
3590 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3591 : Variable* entry_start_position, Label* entry_found, Label* not_found) {
3592 : // Get the index of the bucket.
3593 : Node* const number_of_buckets = SmiUntag(CAST(UnsafeLoadFixedArrayElement(
3594 3360 : CAST(table), CollectionType::NumberOfBucketsIndex())));
3595 : Node* const bucket =
3596 6720 : WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1)));
3597 : Node* const first_entry = SmiUntag(CAST(UnsafeLoadFixedArrayElement(
3598 : CAST(table), bucket,
3599 3360 : CollectionType::HashTableStartIndex() * kTaggedSize)));
3600 :
3601 : // Walk the bucket chain.
3602 : Node* entry_start;
3603 1680 : Label if_key_found(this);
3604 : {
3605 3360 : VARIABLE(var_entry, MachineType::PointerRepresentation(), first_entry);
3606 5040 : Label loop(this, {&var_entry, entry_start_position}),
3607 1680 : continue_next_entry(this);
3608 1680 : Goto(&loop);
3609 : BIND(&loop);
3610 :
3611 : // If the entry index is the not-found sentinel, we are done.
3612 6720 : GotoIf(
3613 : WordEqual(var_entry.value(), IntPtrConstant(CollectionType::kNotFound)),
3614 : not_found);
3615 :
3616 : // Make sure the entry index is within range.
3617 : CSA_ASSERT(
3618 : this,
3619 : UintPtrLessThan(
3620 : var_entry.value(),
3621 : SmiUntag(SmiAdd(
3622 : CAST(UnsafeLoadFixedArrayElement(
3623 : CAST(table), CollectionType::NumberOfElementsIndex())),
3624 : CAST(UnsafeLoadFixedArrayElement(
3625 : CAST(table),
3626 : CollectionType::NumberOfDeletedElementsIndex()))))));
3627 :
3628 : // Compute the index of the entry relative to kHashTableStartIndex.
3629 8400 : entry_start =
3630 : IntPtrAdd(IntPtrMul(var_entry.value(),
3631 : IntPtrConstant(CollectionType::kEntrySize)),
3632 : number_of_buckets);
3633 :
3634 : // Load the key from the entry.
3635 : Node* const candidate_key = UnsafeLoadFixedArrayElement(
3636 : CAST(table), entry_start,
3637 : CollectionType::HashTableStartIndex() * kTaggedSize);
3638 :
3639 : key_compare(candidate_key, &if_key_found, &continue_next_entry);
3640 :
3641 : BIND(&continue_next_entry);
3642 : // Load the index of the next entry in the bucket chain.
3643 3360 : var_entry.Bind(SmiUntag(CAST(UnsafeLoadFixedArrayElement(
3644 : CAST(table), entry_start,
3645 : (CollectionType::HashTableStartIndex() + CollectionType::kChainOffset) *
3646 : kTaggedSize))));
3647 :
3648 1680 : Goto(&loop);
3649 : }
3650 :
3651 : BIND(&if_key_found);
3652 1680 : entry_start_position->Bind(entry_start);
3653 1680 : Goto(entry_found);
3654 1680 : }
3655 :
3656 : template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashMap>(
3657 : Node* table, Node* hash,
3658 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3659 : Variable* entry_start_position, Label* entry_found, Label* not_found);
3660 : template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashSet>(
3661 : Node* table, Node* hash,
3662 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3663 : Variable* entry_start_position, Label* entry_found, Label* not_found);
3664 :
3665 8 : Node* CodeStubAssembler::AllocateStruct(Node* map, AllocationFlags flags) {
3666 8 : Comment("AllocateStruct");
3667 : CSA_ASSERT(this, IsMap(map));
3668 16 : TNode<IntPtrT> size = TimesTaggedSize(LoadMapInstanceSizeInWords(map));
3669 16 : TNode<Object> object = Allocate(size, flags);
3670 : StoreMapNoWriteBarrier(object, map);
3671 8 : InitializeStructBody(object, map, size, Struct::kHeaderSize);
3672 8 : return object;
3673 : }
3674 :
3675 8 : void CodeStubAssembler::InitializeStructBody(Node* object, Node* map,
3676 : Node* size, int start_offset) {
3677 : CSA_SLOW_ASSERT(this, IsMap(map));
3678 8 : Comment("InitializeStructBody");
3679 : Node* filler = UndefinedConstant();
3680 : // Calculate the untagged field addresses.
3681 16 : object = BitcastTaggedToWord(object);
3682 : Node* start_address =
3683 24 : IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
3684 : Node* end_address =
3685 32 : IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
3686 8 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3687 8 : }
3688 :
3689 2244 : Node* CodeStubAssembler::AllocateJSObjectFromMap(
3690 : Node* map, Node* properties, Node* elements, AllocationFlags flags,
3691 : SlackTrackingMode slack_tracking_mode) {
3692 : CSA_ASSERT(this, IsMap(map));
3693 : CSA_ASSERT(this, Word32BinaryNot(IsJSFunctionMap(map)));
3694 : CSA_ASSERT(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),
3695 : JS_GLOBAL_OBJECT_TYPE)));
3696 : TNode<IntPtrT> instance_size =
3697 4488 : TimesTaggedSize(LoadMapInstanceSizeInWords(map));
3698 : TNode<Object> object = AllocateInNewSpace(instance_size, flags);
3699 : StoreMapNoWriteBarrier(object, map);
3700 : InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
3701 2244 : slack_tracking_mode);
3702 2244 : return object;
3703 : }
3704 :
3705 2244 : void CodeStubAssembler::InitializeJSObjectFromMap(
3706 : Node* object, Node* map, Node* instance_size, Node* properties,
3707 : Node* elements, SlackTrackingMode slack_tracking_mode) {
3708 : CSA_SLOW_ASSERT(this, IsMap(map));
3709 : // This helper assumes that the object is in new-space, as guarded by the
3710 : // check in AllocatedJSObjectFromMap.
3711 2244 : if (properties == nullptr) {
3712 : CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
3713 : StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
3714 1232 : RootIndex::kEmptyFixedArray);
3715 : } else {
3716 : CSA_ASSERT(this, Word32Or(Word32Or(IsPropertyArray(properties),
3717 : IsNameDictionary(properties)),
3718 : IsEmptyFixedArray(properties)));
3719 : StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
3720 : properties);
3721 : }
3722 2244 : if (elements == nullptr) {
3723 : StoreObjectFieldRoot(object, JSObject::kElementsOffset,
3724 2072 : RootIndex::kEmptyFixedArray);
3725 : } else {
3726 : CSA_ASSERT(this, IsFixedArray(elements));
3727 : StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
3728 : }
3729 2244 : if (slack_tracking_mode == kNoSlackTracking) {
3730 1740 : InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3731 : } else {
3732 : DCHECK_EQ(slack_tracking_mode, kWithSlackTracking);
3733 504 : InitializeJSObjectBodyWithSlackTracking(object, map, instance_size);
3734 : }
3735 2244 : }
3736 :
3737 2300 : void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
3738 : Node* object, Node* map, Node* instance_size, int start_offset) {
3739 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3740 : CSA_ASSERT(
3741 : this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
3742 4600 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
3743 2300 : RootIndex::kUndefinedValue);
3744 2300 : }
3745 :
3746 504 : void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
3747 : Node* object, Node* map, Node* instance_size) {
3748 : CSA_SLOW_ASSERT(this, IsMap(map));
3749 504 : Comment("InitializeJSObjectBodyNoSlackTracking");
3750 :
3751 : // Perform in-object slack tracking if requested.
3752 : int start_offset = JSObject::kHeaderSize;
3753 : Node* bit_field3 = LoadMapBitField3(map);
3754 504 : Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
3755 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3756 504 : GotoIf(IsSetWord32<Map::ConstructionCounterBits>(bit_field3),
3757 504 : &slack_tracking);
3758 504 : Comment("No slack tracking");
3759 504 : InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3760 504 : Goto(&end);
3761 :
3762 : BIND(&slack_tracking);
3763 : {
3764 504 : Comment("Decrease construction counter");
3765 : // Slack tracking is only done on initial maps.
3766 : CSA_ASSERT(this, IsUndefined(LoadMapBackPointer(map)));
3767 : STATIC_ASSERT(Map::ConstructionCounterBits::kNext == 32);
3768 1008 : Node* new_bit_field3 = Int32Sub(
3769 1008 : bit_field3, Int32Constant(1 << Map::ConstructionCounterBits::kShift));
3770 : StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3,
3771 : MachineRepresentation::kWord32);
3772 : STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3773 :
3774 : // The object still has in-object slack therefore the |unsed_or_unused|
3775 : // field contain the "used" value.
3776 : Node* used_size = TimesTaggedSize(ChangeUint32ToWord(
3777 : LoadObjectField(map, Map::kUsedOrUnusedInstanceSizeInWordsOffset,
3778 1512 : MachineType::Uint8())));
3779 :
3780 504 : Comment("iInitialize filler fields");
3781 : InitializeFieldsWithRoot(object, used_size, instance_size,
3782 504 : RootIndex::kOnePointerFillerMap);
3783 :
3784 504 : Comment("Initialize undefined fields");
3785 1008 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
3786 504 : RootIndex::kUndefinedValue);
3787 :
3788 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3789 504 : GotoIf(IsClearWord32<Map::ConstructionCounterBits>(new_bit_field3),
3790 504 : &complete);
3791 504 : Goto(&end);
3792 : }
3793 :
3794 : // Finalize the instance size.
3795 : BIND(&complete);
3796 : {
3797 : // ComplextInobjectSlackTracking doesn't allocate and thus doesn't need a
3798 : // context.
3799 : CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
3800 : NoContextConstant(), map);
3801 504 : Goto(&end);
3802 : }
3803 :
3804 : BIND(&end);
3805 504 : }
3806 :
3807 1084 : void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
3808 : Node* end_address,
3809 : Node* value) {
3810 1084 : Comment("StoreFieldsNoWriteBarrier");
3811 : CSA_ASSERT(this, WordIsAligned(start_address, kTaggedSize));
3812 : CSA_ASSERT(this, WordIsAligned(end_address, kTaggedSize));
3813 1084 : BuildFastLoop(
3814 : start_address, end_address,
3815 1084 : [this, value](Node* current) {
3816 1084 : StoreNoWriteBarrier(MachineRepresentation::kTagged, current, value);
3817 : },
3818 1084 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
3819 1084 : }
3820 :
3821 168 : TNode<BoolT> CodeStubAssembler::IsValidFastJSArrayCapacity(
3822 : Node* capacity, ParameterMode capacity_mode) {
3823 : return UncheckedCast<BoolT>(
3824 : UintPtrLessThanOrEqual(ParameterToIntPtr(capacity, capacity_mode),
3825 336 : IntPtrConstant(JSArray::kMaxFastArrayLength)));
3826 : }
3827 :
3828 3808 : TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3829 : TNode<Map> array_map, TNode<FixedArrayBase> elements, TNode<Smi> length,
3830 : Node* allocation_site) {
3831 3808 : Comment("begin allocation of JSArray passing in elements");
3832 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3833 :
3834 : int base_size = JSArray::kSize;
3835 3808 : if (allocation_site != nullptr) {
3836 : base_size += AllocationMemento::kSize;
3837 : }
3838 :
3839 3808 : TNode<IntPtrT> size = IntPtrConstant(base_size);
3840 : TNode<JSArray> result =
3841 3808 : AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3842 : StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, elements);
3843 3808 : return result;
3844 : }
3845 :
3846 : std::pair<TNode<JSArray>, TNode<FixedArrayBase>>
3847 2576 : CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
3848 : ElementsKind kind, TNode<Map> array_map, TNode<Smi> length,
3849 : Node* allocation_site, Node* capacity, ParameterMode capacity_mode,
3850 : AllocationFlags allocation_flags) {
3851 2576 : Comment("begin allocation of JSArray with elements");
3852 2576 : CHECK_EQ(allocation_flags & ~kAllowLargeObjectAllocation, 0);
3853 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3854 :
3855 : TVARIABLE(JSArray, array);
3856 : TVARIABLE(FixedArrayBase, elements);
3857 :
3858 2576 : if (IsIntPtrOrSmiConstantZero(capacity, capacity_mode)) {
3859 : TNode<FixedArrayBase> empty_array = EmptyFixedArrayConstant();
3860 840 : array = AllocateJSArray(array_map, empty_array, length, allocation_site);
3861 840 : return {array.value(), empty_array};
3862 : }
3863 :
3864 1736 : Label out(this), empty(this), nonempty(this);
3865 :
3866 5208 : Branch(SmiEqual(ParameterToTagged(capacity, capacity_mode), SmiConstant(0)),
3867 1736 : &empty, &nonempty);
3868 :
3869 : BIND(&empty);
3870 : {
3871 : TNode<FixedArrayBase> empty_array = EmptyFixedArrayConstant();
3872 1736 : array = AllocateJSArray(array_map, empty_array, length, allocation_site);
3873 : elements = empty_array;
3874 1736 : Goto(&out);
3875 : }
3876 :
3877 : BIND(&nonempty);
3878 : {
3879 : int base_size = JSArray::kSize;
3880 1736 : if (allocation_site != nullptr) base_size += AllocationMemento::kSize;
3881 :
3882 : const int elements_offset = base_size;
3883 :
3884 : // Compute space for elements
3885 1736 : base_size += FixedArray::kHeaderSize;
3886 : TNode<IntPtrT> size =
3887 1736 : ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
3888 :
3889 : // For very large arrays in which the requested allocation exceeds the
3890 : // maximal size of a regular heap object, we cannot use the allocation
3891 : // folding trick. Instead, we first allocate the elements in large object
3892 : // space, and then allocate the JSArray (and possibly the allocation
3893 : // memento) in new space.
3894 1736 : if (allocation_flags & kAllowLargeObjectAllocation) {
3895 168 : Label next(this);
3896 336 : GotoIf(IsRegularHeapObjectSize(size), &next);
3897 :
3898 168 : CSA_CHECK(this, IsValidFastJSArrayCapacity(capacity, capacity_mode));
3899 :
3900 : // Allocate and initialize the elements first. Full initialization is
3901 : // needed because the upcoming JSArray allocation could trigger GC.
3902 336 : elements =
3903 : AllocateFixedArray(kind, capacity, capacity_mode, allocation_flags);
3904 :
3905 168 : if (IsDoubleElementsKind(kind)) {
3906 : FillFixedDoubleArrayWithZero(CAST(elements.value()),
3907 0 : ParameterToIntPtr(capacity, capacity_mode));
3908 : } else {
3909 : FillFixedArrayWithSmiZero(CAST(elements.value()),
3910 168 : ParameterToIntPtr(capacity, capacity_mode));
3911 : }
3912 :
3913 : // The JSArray and possibly allocation memento next. Note that
3914 : // allocation_flags are *not* passed on here and the resulting JSArray
3915 : // will always be in new space.
3916 168 : array =
3917 : AllocateJSArray(array_map, elements.value(), length, allocation_site);
3918 :
3919 168 : Goto(&out);
3920 :
3921 : BIND(&next);
3922 : }
3923 :
3924 : // Fold all objects into a single new space allocation.
3925 1736 : array =
3926 : AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3927 : elements = UncheckedCast<FixedArrayBase>(
3928 : InnerAllocate(array.value(), elements_offset));
3929 :
3930 : StoreObjectFieldNoWriteBarrier(array.value(), JSObject::kElementsOffset,
3931 : elements.value());
3932 :
3933 : // Setup elements object.
3934 : STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kTaggedSize);
3935 : RootIndex elements_map_index = IsDoubleElementsKind(kind)
3936 : ? RootIndex::kFixedDoubleArrayMap
3937 1736 : : RootIndex::kFixedArrayMap;
3938 : DCHECK(RootsTable::IsImmortalImmovable(elements_map_index));
3939 1736 : StoreMapNoWriteBarrier(elements.value(), elements_map_index);
3940 :
3941 : TNode<Smi> capacity_smi = ParameterToTagged(capacity, capacity_mode);
3942 : CSA_ASSERT(this, SmiGreaterThan(capacity_smi, SmiConstant(0)));
3943 : StoreObjectFieldNoWriteBarrier(elements.value(), FixedArray::kLengthOffset,
3944 : capacity_smi);
3945 1736 : Goto(&out);
3946 : }
3947 :
3948 : BIND(&out);
3949 1736 : return {array.value(), elements.value()};
3950 : }
3951 :
3952 5544 : TNode<JSArray> CodeStubAssembler::AllocateUninitializedJSArray(
3953 : TNode<Map> array_map, TNode<Smi> length, Node* allocation_site,
3954 : TNode<IntPtrT> size_in_bytes) {
3955 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3956 :
3957 : // Allocate space for the JSArray and the elements FixedArray in one go.
3958 : TNode<Object> array = AllocateInNewSpace(size_in_bytes);
3959 :
3960 : StoreMapNoWriteBarrier(array, array_map);
3961 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3962 : StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
3963 5544 : RootIndex::kEmptyFixedArray);
3964 :
3965 5544 : if (allocation_site != nullptr) {
3966 1680 : InitializeAllocationMemento(array, IntPtrConstant(JSArray::kSize),
3967 840 : allocation_site);
3968 : }
3969 :
3970 5544 : return CAST(array);
3971 : }
3972 :
3973 2408 : TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3974 : ElementsKind kind, TNode<Map> array_map, Node* capacity, TNode<Smi> length,
3975 : Node* allocation_site, ParameterMode capacity_mode,
3976 : AllocationFlags allocation_flags) {
3977 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3978 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
3979 :
3980 : TNode<JSArray> array;
3981 : TNode<FixedArrayBase> elements;
3982 :
3983 4816 : std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
3984 : kind, array_map, length, allocation_site, capacity, capacity_mode,
3985 2408 : allocation_flags);
3986 :
3987 4816 : Label out(this), nonempty(this);
3988 :
3989 7224 : Branch(SmiEqual(ParameterToTagged(capacity, capacity_mode), SmiConstant(0)),
3990 2408 : &out, &nonempty);
3991 :
3992 : BIND(&nonempty);
3993 : {
3994 2408 : FillFixedArrayWithValue(kind, elements,
3995 : IntPtrOrSmiConstant(0, capacity_mode), capacity,
3996 2408 : RootIndex::kTheHoleValue, capacity_mode);
3997 2408 : Goto(&out);
3998 : }
3999 :
4000 : BIND(&out);
4001 4816 : return array;
4002 : }
4003 :
4004 56 : Node* CodeStubAssembler::ExtractFastJSArray(Node* context, Node* array,
4005 : Node* begin, Node* count,
4006 : ParameterMode mode, Node* capacity,
4007 : Node* allocation_site) {
4008 : Node* original_array_map = LoadMap(array);
4009 112 : Node* elements_kind = LoadMapElementsKind(original_array_map);
4010 :
4011 : // Use the cannonical map for the Array's ElementsKind
4012 : Node* native_context = LoadNativeContext(context);
4013 56 : TNode<Map> array_map = LoadJSArrayElementsMap(elements_kind, native_context);
4014 :
4015 : TNode<FixedArrayBase> new_elements = ExtractFixedArray(
4016 : LoadElements(array), begin, count, capacity,
4017 56 : ExtractFixedArrayFlag::kAllFixedArrays, mode, nullptr, elements_kind);
4018 :
4019 112 : TNode<Object> result = AllocateJSArray(
4020 : array_map, new_elements, ParameterToTagged(count, mode), allocation_site);
4021 56 : return result;
4022 : }
4023 :
4024 336 : Node* CodeStubAssembler::CloneFastJSArray(Node* context, Node* array,
4025 : ParameterMode mode,
4026 : Node* allocation_site,
4027 : HoleConversionMode convert_holes) {
4028 : // TODO(dhai): we should be able to assert IsFastJSArray(array) here, but this
4029 : // function is also used to copy boilerplates even when the no-elements
4030 : // protector is invalid. This function should be renamed to reflect its uses.
4031 : CSA_ASSERT(this, IsJSArray(array));
4032 :
4033 : Node* length = LoadJSArrayLength(array);
4034 : Node* new_elements = nullptr;
4035 672 : VARIABLE(var_new_elements, MachineRepresentation::kTagged);
4036 672 : TVARIABLE(Int32T, var_elements_kind, LoadMapElementsKind(LoadMap(array)));
4037 :
4038 336 : Label allocate_jsarray(this), holey_extract(this);
4039 :
4040 : bool need_conversion =
4041 : convert_holes == HoleConversionMode::kConvertToUndefined;
4042 336 : if (need_conversion) {
4043 : // We need to take care of holes, if the array is of holey elements kind.
4044 56 : GotoIf(IsHoleyFastElementsKind(var_elements_kind.value()), &holey_extract);
4045 : }
4046 :
4047 : // Simple extraction that preserves holes.
4048 : new_elements =
4049 672 : ExtractFixedArray(LoadElements(array), IntPtrOrSmiConstant(0, mode),
4050 : TaggedToParameter(length, mode), nullptr,
4051 : ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW, mode,
4052 672 : nullptr, var_elements_kind.value());
4053 336 : var_new_elements.Bind(new_elements);
4054 336 : Goto(&allocate_jsarray);
4055 :
4056 336 : if (need_conversion) {
4057 : BIND(&holey_extract);
4058 : // Convert holes to undefined.
4059 : TVARIABLE(BoolT, var_holes_converted, Int32FalseConstant());
4060 : // Copy |array|'s elements store. The copy will be compatible with the
4061 : // original elements kind unless there are holes in the source. Any holes
4062 : // get converted to undefined, hence in that case the copy is compatible
4063 : // only with PACKED_ELEMENTS and HOLEY_ELEMENTS, and we will choose
4064 : // PACKED_ELEMENTS. Also, if we want to replace holes, we must not use
4065 : // ExtractFixedArrayFlag::kDontCopyCOW.
4066 112 : new_elements = ExtractFixedArray(
4067 : LoadElements(array), IntPtrOrSmiConstant(0, mode),
4068 : TaggedToParameter(length, mode), nullptr,
4069 112 : ExtractFixedArrayFlag::kAllFixedArrays, mode, &var_holes_converted);
4070 56 : var_new_elements.Bind(new_elements);
4071 : // If the array type didn't change, use the original elements kind.
4072 56 : GotoIfNot(var_holes_converted.value(), &allocate_jsarray);
4073 : // Otherwise use PACKED_ELEMENTS for the target's elements kind.
4074 56 : var_elements_kind = Int32Constant(PACKED_ELEMENTS);
4075 56 : Goto(&allocate_jsarray);
4076 : }
4077 :
4078 : BIND(&allocate_jsarray);
4079 : // Use the cannonical map for the chosen elements kind.
4080 : Node* native_context = LoadNativeContext(context);
4081 : TNode<Map> array_map =
4082 336 : LoadJSArrayElementsMap(var_elements_kind.value(), native_context);
4083 :
4084 672 : TNode<Object> result = AllocateJSArray(
4085 336 : array_map, CAST(var_new_elements.value()), CAST(length), allocation_site);
4086 336 : return result;
4087 : }
4088 :
4089 12000 : TNode<FixedArrayBase> CodeStubAssembler::AllocateFixedArray(
4090 : ElementsKind kind, Node* capacity, ParameterMode mode,
4091 : AllocationFlags flags, SloppyTNode<Map> fixed_array_map) {
4092 12000 : Comment("AllocateFixedArray");
4093 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4094 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity,
4095 : IntPtrOrSmiConstant(0, mode), mode));
4096 :
4097 : const intptr_t kMaxLength = IsDoubleElementsKind(kind)
4098 : ? FixedDoubleArray::kMaxLength
4099 : : FixedArray::kMaxLength;
4100 : intptr_t capacity_constant;
4101 12000 : if (ToParameterConstant(capacity, &capacity_constant, mode)) {
4102 656 : CHECK_LE(capacity_constant, kMaxLength);
4103 : } else {
4104 11344 : Label if_out_of_memory(this, Label::kDeferred), next(this);
4105 22688 : Branch(IntPtrOrSmiGreaterThan(
4106 : capacity,
4107 : IntPtrOrSmiConstant(static_cast<int>(kMaxLength), mode), mode),
4108 11344 : &if_out_of_memory, &next);
4109 :
4110 : BIND(&if_out_of_memory);
4111 : CallRuntime(Runtime::kFatalProcessOutOfMemoryInvalidArrayLength,
4112 : NoContextConstant());
4113 11344 : Unreachable();
4114 :
4115 : BIND(&next);
4116 : }
4117 :
4118 12000 : TNode<IntPtrT> total_size = GetFixedArrayAllocationSize(capacity, kind, mode);
4119 :
4120 12000 : if (IsDoubleElementsKind(kind)) flags |= kDoubleAlignment;
4121 : // Allocate both array and elements object, and initialize the JSArray.
4122 24000 : Node* array = Allocate(total_size, flags);
4123 12000 : if (fixed_array_map != nullptr) {
4124 : // Conservatively only skip the write barrier if there are no allocation
4125 : // flags, this ensures that the object hasn't ended up in LOS. Note that the
4126 : // fixed array map is currently always immortal and technically wouldn't
4127 : // need the write barrier even in LOS, but it's better to not take chances
4128 : // in case this invariant changes later, since it's difficult to enforce
4129 : // locally here.
4130 3820 : if (flags == CodeStubAssembler::kNone) {
4131 : StoreMapNoWriteBarrier(array, fixed_array_map);
4132 : } else {
4133 : StoreMap(array, fixed_array_map);
4134 : }
4135 : } else {
4136 : RootIndex map_index = IsDoubleElementsKind(kind)
4137 : ? RootIndex::kFixedDoubleArrayMap
4138 8180 : : RootIndex::kFixedArrayMap;
4139 : DCHECK(RootsTable::IsImmortalImmovable(map_index));
4140 8180 : StoreMapNoWriteBarrier(array, map_index);
4141 : }
4142 : StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
4143 : ParameterToTagged(capacity, mode));
4144 12000 : return UncheckedCast<FixedArray>(array);
4145 : }
4146 :
4147 2836 : TNode<FixedArray> CodeStubAssembler::ExtractToFixedArray(
4148 : Node* source, Node* first, Node* count, Node* capacity, Node* source_map,
4149 : ElementsKind from_kind, AllocationFlags allocation_flags,
4150 : ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
4151 : HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted,
4152 : Node* source_elements_kind) {
4153 : DCHECK_NE(first, nullptr);
4154 : DCHECK_NE(count, nullptr);
4155 : DCHECK_NE(capacity, nullptr);
4156 : DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays);
4157 : CSA_ASSERT(this,
4158 : WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity));
4159 : CSA_ASSERT(this, WordEqual(source_map, LoadMap(source)));
4160 :
4161 5672 : VARIABLE(var_result, MachineRepresentation::kTagged);
4162 5672 : VARIABLE(var_target_map, MachineRepresentation::kTagged, source_map);
4163 :
4164 8508 : Label done(this, {&var_result}), is_cow(this),
4165 8508 : new_space_check(this, {&var_target_map});
4166 :
4167 : // If source_map is either FixedDoubleArrayMap, or FixedCOWArrayMap but
4168 : // we can't just use COW, use FixedArrayMap as the target map. Otherwise, use
4169 : // source_map as the target map.
4170 2836 : if (IsDoubleElementsKind(from_kind)) {
4171 : CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4172 112 : var_target_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
4173 56 : Goto(&new_space_check);
4174 : } else {
4175 : CSA_ASSERT(this, Word32BinaryNot(IsFixedDoubleArrayMap(source_map)));
4176 2780 : Branch(WordEqual(var_target_map.value(),
4177 2780 : LoadRoot(RootIndex::kFixedCOWArrayMap)),
4178 2780 : &is_cow, &new_space_check);
4179 :
4180 : BIND(&is_cow);
4181 : {
4182 : // |source| is a COW array, so we don't actually need to allocate a new
4183 : // array unless:
4184 : // 1) |extract_flags| forces us to, or
4185 : // 2) we're asked to extract only part of the |source| (|first| != 0).
4186 2780 : if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
4187 2560 : Branch(WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), first),
4188 632 : &new_space_check, [&] {
4189 632 : var_result.Bind(source);
4190 632 : Goto(&done);
4191 1272 : });
4192 : } else {
4193 4280 : var_target_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
4194 2140 : Goto(&new_space_check);
4195 : }
4196 : }
4197 : }
4198 :
4199 : BIND(&new_space_check);
4200 : {
4201 2836 : bool handle_old_space = !FLAG_young_generation_large_objects;
4202 2836 : if (handle_old_space) {
4203 0 : if (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly) {
4204 : handle_old_space = false;
4205 : CSA_ASSERT(this, Word32BinaryNot(FixedArraySizeDoesntFitInNewSpace(
4206 : count, FixedArray::kHeaderSize, parameter_mode)));
4207 : } else {
4208 : int constant_count;
4209 : handle_old_space =
4210 0 : !TryGetIntPtrOrSmiConstantValue(count, &constant_count,
4211 0 : parameter_mode) ||
4212 0 : (constant_count >
4213 0 : FixedArray::GetMaxLengthForNewSpaceAllocation(PACKED_ELEMENTS));
4214 : }
4215 : }
4216 :
4217 2836 : Label old_space(this, Label::kDeferred);
4218 2836 : if (handle_old_space) {
4219 : GotoIfFixedArraySizeDoesntFitInNewSpace(
4220 0 : capacity, &old_space, FixedArray::kHeaderSize, parameter_mode);
4221 : }
4222 :
4223 2836 : Comment("Copy FixedArray in young generation");
4224 : // We use PACKED_ELEMENTS to tell AllocateFixedArray and
4225 : // CopyFixedArrayElements that we want a FixedArray.
4226 : const ElementsKind to_kind = PACKED_ELEMENTS;
4227 : TNode<FixedArrayBase> to_elements =
4228 : AllocateFixedArray(to_kind, capacity, parameter_mode, allocation_flags,
4229 5672 : var_target_map.value());
4230 2836 : var_result.Bind(to_elements);
4231 :
4232 : #ifdef DEBUG
4233 : TNode<IntPtrT> object_word = BitcastTaggedToWord(to_elements);
4234 : TNode<IntPtrT> object_page = PageFromAddress(object_word);
4235 : TNode<IntPtrT> page_flags =
4236 : UncheckedCast<IntPtrT>(Load(MachineType::IntPtr(), object_page,
4237 : IntPtrConstant(Page::kFlagsOffset)));
4238 : CSA_ASSERT(
4239 : this,
4240 : WordNotEqual(
4241 : WordAnd(page_flags,
4242 : IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),
4243 : IntPtrConstant(0)));
4244 : #endif
4245 :
4246 2836 : if (convert_holes == HoleConversionMode::kDontConvert &&
4247 : !IsDoubleElementsKind(from_kind)) {
4248 : // We can use CopyElements (memcpy) because we don't need to replace or
4249 : // convert any values. Since {to_elements} is in new-space, CopyElements
4250 : // will efficiently use memcpy.
4251 : FillFixedArrayWithValue(to_kind, to_elements, count, capacity,
4252 2724 : RootIndex::kTheHoleValue, parameter_mode);
4253 2724 : CopyElements(to_kind, to_elements, IntPtrConstant(0), CAST(source),
4254 : ParameterToIntPtr(first, parameter_mode),
4255 : ParameterToIntPtr(count, parameter_mode),
4256 2724 : SKIP_WRITE_BARRIER);
4257 : } else {
4258 112 : CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4259 : count, capacity, SKIP_WRITE_BARRIER,
4260 : parameter_mode, convert_holes,
4261 224 : var_holes_converted);
4262 : }
4263 2836 : Goto(&done);
4264 :
4265 2836 : if (handle_old_space) {
4266 : BIND(&old_space);
4267 : {
4268 0 : Comment("Copy FixedArray in old generation");
4269 0 : Label copy_one_by_one(this);
4270 :
4271 : // Try to use memcpy if we don't need to convert holes to undefined.
4272 0 : if (convert_holes == HoleConversionMode::kDontConvert &&
4273 0 : source_elements_kind != nullptr) {
4274 : // Only try memcpy if we're not copying object pointers.
4275 0 : GotoIfNot(IsFastSmiElementsKind(source_elements_kind),
4276 0 : ©_one_by_one);
4277 :
4278 : const ElementsKind to_smi_kind = PACKED_SMI_ELEMENTS;
4279 0 : to_elements =
4280 : AllocateFixedArray(to_smi_kind, capacity, parameter_mode,
4281 : allocation_flags, var_target_map.value());
4282 0 : var_result.Bind(to_elements);
4283 :
4284 : FillFixedArrayWithValue(to_smi_kind, to_elements, count, capacity,
4285 0 : RootIndex::kTheHoleValue, parameter_mode);
4286 : // CopyElements will try to use memcpy if it's not conflicting with
4287 : // GC. Otherwise it will copy elements by elements, but skip write
4288 : // barriers (since we're copying smis to smis).
4289 0 : CopyElements(to_smi_kind, to_elements, IntPtrConstant(0),
4290 0 : CAST(source), ParameterToIntPtr(first, parameter_mode),
4291 : ParameterToIntPtr(count, parameter_mode),
4292 0 : SKIP_WRITE_BARRIER);
4293 0 : Goto(&done);
4294 : } else {
4295 0 : Goto(©_one_by_one);
4296 : }
4297 :
4298 : BIND(©_one_by_one);
4299 : {
4300 0 : to_elements =
4301 : AllocateFixedArray(to_kind, capacity, parameter_mode,
4302 : allocation_flags, var_target_map.value());
4303 0 : var_result.Bind(to_elements);
4304 0 : CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4305 : count, capacity, UPDATE_WRITE_BARRIER,
4306 : parameter_mode, convert_holes,
4307 0 : var_holes_converted);
4308 0 : Goto(&done);
4309 : }
4310 : }
4311 : }
4312 : }
4313 :
4314 : BIND(&done);
4315 5672 : return UncheckedCast<FixedArray>(var_result.value());
4316 : }
4317 :
4318 56 : TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedDoubleArrayFillingHoles(
4319 : Node* from_array, Node* first, Node* count, Node* capacity,
4320 : Node* fixed_array_map, TVariable<BoolT>* var_holes_converted,
4321 : AllocationFlags allocation_flags, ExtractFixedArrayFlags extract_flags,
4322 : ParameterMode mode) {
4323 : DCHECK_NE(first, nullptr);
4324 : DCHECK_NE(count, nullptr);
4325 : DCHECK_NE(capacity, nullptr);
4326 : DCHECK_NE(var_holes_converted, nullptr);
4327 : CSA_ASSERT(this, IsFixedDoubleArrayMap(fixed_array_map));
4328 :
4329 112 : VARIABLE(var_result, MachineRepresentation::kTagged);
4330 : const ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4331 112 : Node* to_elements = AllocateFixedArray(kind, capacity, mode, allocation_flags,
4332 56 : fixed_array_map);
4333 56 : var_result.Bind(to_elements);
4334 : // We first try to copy the FixedDoubleArray to a new FixedDoubleArray.
4335 : // |var_holes_converted| is set to False preliminarily.
4336 : *var_holes_converted = Int32FalseConstant();
4337 :
4338 : // The construction of the loop and the offsets for double elements is
4339 : // extracted from CopyFixedArrayElements.
4340 : CSA_SLOW_ASSERT(this, MatchesParameterMode(count, mode));
4341 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4342 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, kind));
4343 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4344 :
4345 56 : Comment("[ ExtractFixedDoubleArrayFillingHoles");
4346 :
4347 : // This copy can trigger GC, so we pre-initialize the array with holes.
4348 56 : FillFixedArrayWithValue(kind, to_elements, IntPtrOrSmiConstant(0, mode),
4349 56 : capacity, RootIndex::kTheHoleValue, mode);
4350 :
4351 : const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4352 : Node* first_from_element_offset =
4353 112 : ElementOffsetFromIndex(first, kind, mode, 0);
4354 112 : Node* limit_offset = IntPtrAdd(first_from_element_offset,
4355 112 : IntPtrConstant(first_element_offset));
4356 168 : VARIABLE(var_from_offset, MachineType::PointerRepresentation(),
4357 : ElementOffsetFromIndex(IntPtrOrSmiAdd(first, count, mode), kind,
4358 : mode, first_element_offset));
4359 :
4360 168 : Label decrement(this, {&var_from_offset}), done(this);
4361 : Node* to_array_adjusted =
4362 168 : IntPtrSub(BitcastTaggedToWord(to_elements), first_from_element_offset);
4363 :
4364 168 : Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4365 :
4366 : BIND(&decrement);
4367 : {
4368 : Node* from_offset =
4369 224 : IntPtrSub(var_from_offset.value(), IntPtrConstant(kDoubleSize));
4370 56 : var_from_offset.Bind(from_offset);
4371 :
4372 : Node* to_offset = from_offset;
4373 :
4374 56 : Label if_hole(this);
4375 :
4376 56 : Node* value = LoadElementAndPrepareForStore(
4377 56 : from_array, var_from_offset.value(), kind, kind, &if_hole);
4378 :
4379 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4380 56 : to_offset, value);
4381 :
4382 112 : Node* compare = WordNotEqual(from_offset, limit_offset);
4383 56 : Branch(compare, &decrement, &done);
4384 :
4385 : BIND(&if_hole);
4386 : // We are unlucky: there are holes! We need to restart the copy, this time
4387 : // we will copy the FixedDoubleArray to a new FixedArray with undefined
4388 : // replacing holes. We signal this to the caller through
4389 : // |var_holes_converted|.
4390 : *var_holes_converted = Int32TrueConstant();
4391 : to_elements =
4392 112 : ExtractToFixedArray(from_array, first, count, capacity, fixed_array_map,
4393 : kind, allocation_flags, extract_flags, mode,
4394 : HoleConversionMode::kConvertToUndefined);
4395 56 : var_result.Bind(to_elements);
4396 56 : Goto(&done);
4397 : }
4398 :
4399 : BIND(&done);
4400 56 : Comment("] ExtractFixedDoubleArrayFillingHoles");
4401 112 : return UncheckedCast<FixedArrayBase>(var_result.value());
4402 : }
4403 :
4404 2780 : TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
4405 : Node* source, Node* first, Node* count, Node* capacity,
4406 : ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
4407 : TVariable<BoolT>* var_holes_converted, Node* source_runtime_kind) {
4408 : DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays ||
4409 : extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays);
4410 : // If we want to replace holes, ExtractFixedArrayFlag::kDontCopyCOW should not
4411 : // be used, because that disables the iteration which detects holes.
4412 : DCHECK_IMPLIES(var_holes_converted != nullptr,
4413 : !(extract_flags & ExtractFixedArrayFlag::kDontCopyCOW));
4414 : HoleConversionMode convert_holes =
4415 : var_holes_converted != nullptr ? HoleConversionMode::kConvertToUndefined
4416 2780 : : HoleConversionMode::kDontConvert;
4417 5560 : VARIABLE(var_result, MachineRepresentation::kTagged);
4418 : const AllocationFlags allocation_flags =
4419 : (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly)
4420 : ? CodeStubAssembler::kNone
4421 2780 : : CodeStubAssembler::kAllowLargeObjectAllocation;
4422 2780 : if (first == nullptr) {
4423 560 : first = IntPtrOrSmiConstant(0, parameter_mode);
4424 : }
4425 2780 : if (count == nullptr) {
4426 : count = IntPtrOrSmiSub(
4427 : TaggedToParameter(LoadFixedArrayBaseLength(source), parameter_mode),
4428 352 : first, parameter_mode);
4429 :
4430 : CSA_ASSERT(
4431 : this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant(0, parameter_mode),
4432 : count, parameter_mode));
4433 : }
4434 2780 : if (capacity == nullptr) {
4435 : capacity = count;
4436 : } else {
4437 : CSA_ASSERT(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
4438 : IntPtrOrSmiAdd(first, count, parameter_mode), capacity,
4439 : parameter_mode)));
4440 : }
4441 :
4442 8340 : Label if_fixed_double_array(this), empty(this), done(this, {&var_result});
4443 : Node* source_map = LoadMap(source);
4444 8340 : GotoIf(WordEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity), &empty);
4445 :
4446 2780 : if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4447 872 : if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4448 1744 : GotoIf(IsFixedDoubleArrayMap(source_map), &if_fixed_double_array);
4449 : } else {
4450 : CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4451 : }
4452 : }
4453 :
4454 2780 : if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4455 : // Here we can only get |source| as FixedArray, never FixedDoubleArray.
4456 : // PACKED_ELEMENTS is used to signify that the source is a FixedArray.
4457 5560 : Node* to_elements = ExtractToFixedArray(
4458 : source, first, count, capacity, source_map, PACKED_ELEMENTS,
4459 : allocation_flags, extract_flags, parameter_mode, convert_holes,
4460 : var_holes_converted, source_runtime_kind);
4461 2780 : var_result.Bind(to_elements);
4462 2780 : Goto(&done);
4463 : }
4464 :
4465 2780 : if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4466 : BIND(&if_fixed_double_array);
4467 872 : Comment("Copy FixedDoubleArray");
4468 :
4469 872 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4470 112 : Node* to_elements = ExtractFixedDoubleArrayFillingHoles(
4471 : source, first, count, capacity, source_map, var_holes_converted,
4472 : allocation_flags, extract_flags, parameter_mode);
4473 56 : var_result.Bind(to_elements);
4474 : } else {
4475 : // We use PACKED_DOUBLE_ELEMENTS to signify that both the source and
4476 : // the target are FixedDoubleArray. That it is PACKED or HOLEY does not
4477 : // matter.
4478 : ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4479 : TNode<FixedArrayBase> to_elements = AllocateFixedArray(
4480 816 : kind, capacity, parameter_mode, allocation_flags, source_map);
4481 : FillFixedArrayWithValue(kind, to_elements, count, capacity,
4482 816 : RootIndex::kTheHoleValue, parameter_mode);
4483 816 : CopyElements(kind, to_elements, IntPtrConstant(0), CAST(source),
4484 : ParameterToIntPtr(first, parameter_mode),
4485 816 : ParameterToIntPtr(count, parameter_mode));
4486 816 : var_result.Bind(to_elements);
4487 : }
4488 :
4489 872 : Goto(&done);
4490 : }
4491 :
4492 : BIND(&empty);
4493 : {
4494 2780 : Comment("Copy empty array");
4495 :
4496 2780 : var_result.Bind(EmptyFixedArrayConstant());
4497 2780 : Goto(&done);
4498 : }
4499 :
4500 : BIND(&done);
4501 5560 : return UncheckedCast<FixedArray>(var_result.value());
4502 : }
4503 :
4504 504 : void CodeStubAssembler::InitializePropertyArrayLength(Node* property_array,
4505 : Node* length,
4506 : ParameterMode mode) {
4507 : CSA_SLOW_ASSERT(this, IsPropertyArray(property_array));
4508 : CSA_ASSERT(
4509 : this, IntPtrOrSmiGreaterThan(length, IntPtrOrSmiConstant(0, mode), mode));
4510 : CSA_ASSERT(
4511 : this,
4512 : IntPtrOrSmiLessThanOrEqual(
4513 : length, IntPtrOrSmiConstant(PropertyArray::LengthField::kMax, mode),
4514 : mode));
4515 : StoreObjectFieldNoWriteBarrier(
4516 : property_array, PropertyArray::kLengthAndHashOffset,
4517 : ParameterToTagged(length, mode), MachineRepresentation::kTaggedSigned);
4518 504 : }
4519 :
4520 504 : Node* CodeStubAssembler::AllocatePropertyArray(Node* capacity_node,
4521 : ParameterMode mode,
4522 : AllocationFlags flags) {
4523 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
4524 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
4525 : IntPtrOrSmiConstant(0, mode), mode));
4526 : TNode<IntPtrT> total_size =
4527 504 : GetPropertyArrayAllocationSize(capacity_node, mode);
4528 :
4529 1008 : TNode<Object> array = Allocate(total_size, flags);
4530 : RootIndex map_index = RootIndex::kPropertyArrayMap;
4531 : DCHECK(RootsTable::IsImmortalImmovable(map_index));
4532 504 : StoreMapNoWriteBarrier(array, map_index);
4533 504 : InitializePropertyArrayLength(array, capacity_node, mode);
4534 504 : return array;
4535 : }
4536 :
4537 504 : void CodeStubAssembler::FillPropertyArrayWithUndefined(Node* array,
4538 : Node* from_node,
4539 : Node* to_node,
4540 : ParameterMode mode) {
4541 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4542 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4543 : CSA_SLOW_ASSERT(this, IsPropertyArray(array));
4544 : ElementsKind kind = PACKED_ELEMENTS;
4545 : Node* value = UndefinedConstant();
4546 504 : BuildFastFixedArrayForEach(array, kind, from_node, to_node,
4547 504 : [this, value](Node* array, Node* offset) {
4548 504 : StoreNoWriteBarrier(
4549 : MachineRepresentation::kTagged, array,
4550 504 : offset, value);
4551 : },
4552 504 : mode);
4553 504 : }
4554 :
4555 12648 : void CodeStubAssembler::FillFixedArrayWithValue(ElementsKind kind, Node* array,
4556 : Node* from_node, Node* to_node,
4557 : RootIndex value_root_index,
4558 : ParameterMode mode) {
4559 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4560 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4561 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
4562 : DCHECK(value_root_index == RootIndex::kTheHoleValue ||
4563 : value_root_index == RootIndex::kUndefinedValue);
4564 :
4565 : // Determine the value to initialize the {array} based
4566 : // on the {value_root_index} and the elements {kind}.
4567 25296 : Node* value = LoadRoot(value_root_index);
4568 12648 : if (IsDoubleElementsKind(kind)) {
4569 : value = LoadHeapNumberValue(value);
4570 : }
4571 :
4572 25296 : BuildFastFixedArrayForEach(
4573 : array, kind, from_node, to_node,
4574 26600 : [this, value, kind](Node* array, Node* offset) {
4575 13300 : if (IsDoubleElementsKind(kind)) {
4576 2780 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, array, offset,
4577 2780 : value);
4578 : } else {
4579 10520 : StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
4580 10520 : value);
4581 : }
4582 13300 : },
4583 12648 : mode);
4584 12648 : }
4585 :
4586 112 : void CodeStubAssembler::StoreFixedDoubleArrayHole(
4587 : TNode<FixedDoubleArray> array, Node* index, ParameterMode parameter_mode) {
4588 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index, parameter_mode));
4589 : Node* offset =
4590 224 : ElementOffsetFromIndex(index, PACKED_DOUBLE_ELEMENTS, parameter_mode,
4591 : FixedArray::kHeaderSize - kHeapObjectTag);
4592 : CSA_ASSERT(this, IsOffsetInBounds(
4593 : offset, LoadAndUntagFixedArrayBaseLength(array),
4594 : FixedDoubleArray::kHeaderSize, PACKED_DOUBLE_ELEMENTS));
4595 : Node* double_hole =
4596 224 : Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4597 112 : : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4598 : // TODO(danno): When we have a Float32/Float64 wrapper class that
4599 : // preserves double bits during manipulation, remove this code/change
4600 : // this to an indexed Float64 store.
4601 112 : if (Is64()) {
4602 : StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset,
4603 112 : double_hole);
4604 : } else {
4605 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
4606 0 : double_hole);
4607 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array,
4608 0 : IntPtrAdd(offset, IntPtrConstant(kInt32Size)),
4609 0 : double_hole);
4610 : }
4611 112 : }
4612 :
4613 1188 : void CodeStubAssembler::FillFixedArrayWithSmiZero(TNode<FixedArray> array,
4614 : TNode<IntPtrT> length) {
4615 : CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4616 :
4617 : TNode<IntPtrT> byte_length = TimesTaggedSize(length);
4618 : CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4619 :
4620 : static const int32_t fa_base_data_offset =
4621 : FixedArray::kHeaderSize - kHeapObjectTag;
4622 : TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4623 3564 : IntPtrConstant(fa_base_data_offset));
4624 :
4625 : // Call out to memset to perform initialization.
4626 : TNode<ExternalReference> memset =
4627 1188 : ExternalConstant(ExternalReference::libc_memset_function());
4628 : STATIC_ASSERT(kSizetSize == kIntptrSize);
4629 : CallCFunction(memset, MachineType::Pointer(),
4630 : std::make_pair(MachineType::Pointer(), backing_store),
4631 2376 : std::make_pair(MachineType::IntPtr(), IntPtrConstant(0)),
4632 1188 : std::make_pair(MachineType::UintPtr(), byte_length));
4633 1188 : }
4634 :
4635 56 : void CodeStubAssembler::FillFixedDoubleArrayWithZero(
4636 : TNode<FixedDoubleArray> array, TNode<IntPtrT> length) {
4637 : CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4638 :
4639 : TNode<IntPtrT> byte_length = TimesDoubleSize(length);
4640 : CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4641 :
4642 : static const int32_t fa_base_data_offset =
4643 : FixedDoubleArray::kHeaderSize - kHeapObjectTag;
4644 : TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4645 168 : IntPtrConstant(fa_base_data_offset));
4646 :
4647 : // Call out to memset to perform initialization.
4648 : TNode<ExternalReference> memset =
4649 56 : ExternalConstant(ExternalReference::libc_memset_function());
4650 : STATIC_ASSERT(kSizetSize == kIntptrSize);
4651 : CallCFunction(memset, MachineType::Pointer(),
4652 : std::make_pair(MachineType::Pointer(), backing_store),
4653 112 : std::make_pair(MachineType::IntPtr(), IntPtrConstant(0)),
4654 56 : std::make_pair(MachineType::UintPtr(), byte_length));
4655 56 : }
4656 :
4657 3116 : void CodeStubAssembler::JumpIfPointersFromHereAreInteresting(
4658 : TNode<Object> object, Label* interesting) {
4659 6232 : Label finished(this);
4660 3116 : TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
4661 3116 : TNode<IntPtrT> object_page = PageFromAddress(object_word);
4662 : TNode<IntPtrT> page_flags = UncheckedCast<IntPtrT>(Load(
4663 6232 : MachineType::IntPtr(), object_page, IntPtrConstant(Page::kFlagsOffset)));
4664 3116 : Branch(
4665 6232 : WordEqual(WordAnd(page_flags,
4666 : IntPtrConstant(
4667 3116 : MemoryChunk::kPointersFromHereAreInterestingMask)),
4668 6232 : IntPtrConstant(0)),
4669 3116 : &finished, interesting);
4670 : BIND(&finished);
4671 3116 : }
4672 :
4673 392 : void CodeStubAssembler::MoveElements(ElementsKind kind,
4674 : TNode<FixedArrayBase> elements,
4675 : TNode<IntPtrT> dst_index,
4676 : TNode<IntPtrT> src_index,
4677 : TNode<IntPtrT> length) {
4678 784 : Label finished(this);
4679 392 : Label needs_barrier(this);
4680 : const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4681 :
4682 : DCHECK(IsFastElementsKind(kind));
4683 : CSA_ASSERT(this, IsFixedArrayWithKind(elements, kind));
4684 : CSA_ASSERT(this,
4685 : IntPtrLessThanOrEqual(IntPtrAdd(dst_index, length),
4686 : LoadAndUntagFixedArrayBaseLength(elements)));
4687 : CSA_ASSERT(this,
4688 : IntPtrLessThanOrEqual(IntPtrAdd(src_index, length),
4689 : LoadAndUntagFixedArrayBaseLength(elements)));
4690 :
4691 : // The write barrier can be ignored if {dst_elements} is in new space, or if
4692 : // the elements pointer is FixedDoubleArray.
4693 392 : if (needs_barrier_check) {
4694 224 : JumpIfPointersFromHereAreInteresting(elements, &needs_barrier);
4695 : }
4696 :
4697 : const TNode<IntPtrT> source_byte_length =
4698 392 : IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4699 : static const int32_t fa_base_data_offset =
4700 : FixedArrayBase::kHeaderSize - kHeapObjectTag;
4701 392 : TNode<IntPtrT> elements_intptr = BitcastTaggedToWord(elements);
4702 : TNode<IntPtrT> target_data_ptr =
4703 : IntPtrAdd(elements_intptr,
4704 : ElementOffsetFromIndex(dst_index, kind, INTPTR_PARAMETERS,
4705 392 : fa_base_data_offset));
4706 : TNode<IntPtrT> source_data_ptr =
4707 : IntPtrAdd(elements_intptr,
4708 : ElementOffsetFromIndex(src_index, kind, INTPTR_PARAMETERS,
4709 392 : fa_base_data_offset));
4710 : TNode<ExternalReference> memmove =
4711 392 : ExternalConstant(ExternalReference::libc_memmove_function());
4712 : CallCFunction(memmove, MachineType::Pointer(),
4713 : std::make_pair(MachineType::Pointer(), target_data_ptr),
4714 : std::make_pair(MachineType::Pointer(), source_data_ptr),
4715 392 : std::make_pair(MachineType::UintPtr(), source_byte_length));
4716 :
4717 392 : if (needs_barrier_check) {
4718 224 : Goto(&finished);
4719 :
4720 : BIND(&needs_barrier);
4721 : {
4722 : const TNode<IntPtrT> begin = src_index;
4723 : const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4724 :
4725 : // If dst_index is less than src_index, then walk forward.
4726 : const TNode<IntPtrT> delta =
4727 : IntPtrMul(IntPtrSub(dst_index, begin),
4728 448 : IntPtrConstant(ElementsKindToByteSize(kind)));
4729 448 : auto loop_body = [&](Node* array, Node* offset) {
4730 1344 : Node* const element = Load(MachineType::AnyTagged(), array, offset);
4731 1344 : Node* const delta_offset = IntPtrAdd(offset, delta);
4732 448 : Store(array, delta_offset, element);
4733 448 : };
4734 :
4735 224 : Label iterate_forward(this);
4736 224 : Label iterate_backward(this);
4737 672 : Branch(IntPtrLessThan(delta, IntPtrConstant(0)), &iterate_forward,
4738 224 : &iterate_backward);
4739 : BIND(&iterate_forward);
4740 : {
4741 : // Make a loop for the stores.
4742 224 : BuildFastFixedArrayForEach(elements, kind, begin, end, loop_body,
4743 : INTPTR_PARAMETERS,
4744 224 : ForEachDirection::kForward);
4745 224 : Goto(&finished);
4746 : }
4747 :
4748 : BIND(&iterate_backward);
4749 : {
4750 224 : BuildFastFixedArrayForEach(elements, kind, begin, end, loop_body,
4751 : INTPTR_PARAMETERS,
4752 224 : ForEachDirection::kReverse);
4753 224 : Goto(&finished);
4754 : }
4755 : }
4756 : BIND(&finished);
4757 : }
4758 392 : }
4759 :
4760 3764 : void CodeStubAssembler::CopyElements(ElementsKind kind,
4761 : TNode<FixedArrayBase> dst_elements,
4762 : TNode<IntPtrT> dst_index,
4763 : TNode<FixedArrayBase> src_elements,
4764 : TNode<IntPtrT> src_index,
4765 : TNode<IntPtrT> length,
4766 : WriteBarrierMode write_barrier) {
4767 7528 : Label finished(this);
4768 3764 : Label needs_barrier(this);
4769 : const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4770 :
4771 : DCHECK(IsFastElementsKind(kind));
4772 : CSA_ASSERT(this, IsFixedArrayWithKind(dst_elements, kind));
4773 : CSA_ASSERT(this, IsFixedArrayWithKind(src_elements, kind));
4774 : CSA_ASSERT(this, IntPtrLessThanOrEqual(
4775 : IntPtrAdd(dst_index, length),
4776 : LoadAndUntagFixedArrayBaseLength(dst_elements)));
4777 : CSA_ASSERT(this, IntPtrLessThanOrEqual(
4778 : IntPtrAdd(src_index, length),
4779 : LoadAndUntagFixedArrayBaseLength(src_elements)));
4780 : CSA_ASSERT(this, Word32Or(WordNotEqual(dst_elements, src_elements),
4781 : WordEqual(length, IntPtrConstant(0))));
4782 :
4783 : // The write barrier can be ignored if {dst_elements} is in new space, or if
4784 : // the elements pointer is FixedDoubleArray.
4785 3764 : if (needs_barrier_check) {
4786 2892 : JumpIfPointersFromHereAreInteresting(dst_elements, &needs_barrier);
4787 : }
4788 :
4789 : TNode<IntPtrT> source_byte_length =
4790 3764 : IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4791 : static const int32_t fa_base_data_offset =
4792 : FixedArrayBase::kHeaderSize - kHeapObjectTag;
4793 : TNode<IntPtrT> src_offset_start = ElementOffsetFromIndex(
4794 3764 : src_index, kind, INTPTR_PARAMETERS, fa_base_data_offset);
4795 : TNode<IntPtrT> dst_offset_start = ElementOffsetFromIndex(
4796 3764 : dst_index, kind, INTPTR_PARAMETERS, fa_base_data_offset);
4797 3764 : TNode<IntPtrT> src_elements_intptr = BitcastTaggedToWord(src_elements);
4798 : TNode<IntPtrT> source_data_ptr =
4799 : IntPtrAdd(src_elements_intptr, src_offset_start);
4800 3764 : TNode<IntPtrT> dst_elements_intptr = BitcastTaggedToWord(dst_elements);
4801 : TNode<IntPtrT> dst_data_ptr =
4802 : IntPtrAdd(dst_elements_intptr, dst_offset_start);
4803 : TNode<ExternalReference> memcpy =
4804 3764 : ExternalConstant(ExternalReference::libc_memcpy_function());
4805 : CallCFunction(memcpy, MachineType::Pointer(),
4806 : std::make_pair(MachineType::Pointer(), dst_data_ptr),
4807 : std::make_pair(MachineType::Pointer(), source_data_ptr),
4808 3764 : std::make_pair(MachineType::UintPtr(), source_byte_length));
4809 :
4810 3764 : if (needs_barrier_check) {
4811 2892 : Goto(&finished);
4812 :
4813 : BIND(&needs_barrier);
4814 : {
4815 : const TNode<IntPtrT> begin = src_index;
4816 : const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4817 : const TNode<IntPtrT> delta =
4818 : IntPtrMul(IntPtrSub(dst_index, src_index),
4819 5784 : IntPtrConstant(ElementsKindToByteSize(kind)));
4820 2892 : BuildFastFixedArrayForEach(
4821 : src_elements, kind, begin, end,
4822 2904 : [&](Node* array, Node* offset) {
4823 8712 : Node* const element = Load(MachineType::AnyTagged(), array, offset);
4824 8712 : Node* const delta_offset = IntPtrAdd(offset, delta);
4825 2904 : if (write_barrier == SKIP_WRITE_BARRIER) {
4826 5640 : StoreNoWriteBarrier(MachineRepresentation::kTagged, dst_elements,
4827 2736 : delta_offset, element);
4828 : } else {
4829 168 : Store(dst_elements, delta_offset, element);
4830 : }
4831 2904 : },
4832 2892 : INTPTR_PARAMETERS, ForEachDirection::kForward);
4833 2892 : Goto(&finished);
4834 : }
4835 : BIND(&finished);
4836 : }
4837 3764 : }
4838 :
4839 6308 : void CodeStubAssembler::CopyFixedArrayElements(
4840 : ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
4841 : Node* to_array, Node* first_element, Node* element_count, Node* capacity,
4842 : WriteBarrierMode barrier_mode, ParameterMode mode,
4843 : HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted) {
4844 : DCHECK_IMPLIES(var_holes_converted != nullptr,
4845 : convert_holes == HoleConversionMode::kConvertToUndefined);
4846 : CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
4847 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4848 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
4849 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
4850 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4851 : const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4852 6308 : Comment("[ CopyFixedArrayElements");
4853 :
4854 : // Typed array elements are not supported.
4855 : DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
4856 : DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
4857 :
4858 6308 : Label done(this);
4859 : bool from_double_elements = IsDoubleElementsKind(from_kind);
4860 : bool to_double_elements = IsDoubleElementsKind(to_kind);
4861 : bool doubles_to_objects_conversion =
4862 8336 : IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
4863 : bool needs_write_barrier =
4864 6308 : doubles_to_objects_conversion ||
4865 280 : (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
4866 : bool element_offset_matches =
4867 : !needs_write_barrier &&
4868 : (kTaggedSize == kDoubleSize ||
4869 6308 : IsDoubleElementsKind(from_kind) == IsDoubleElementsKind(to_kind));
4870 : Node* double_hole =
4871 12616 : Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4872 6308 : : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4873 :
4874 : // If copying might trigger a GC, we pre-initialize the FixedArray such that
4875 : // it's always in a consistent state.
4876 6308 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4877 : DCHECK(IsObjectElementsKind(to_kind));
4878 : // Use undefined for the part that we copy and holes for the rest.
4879 : // Later if we run into a hole in the source we can just skip the writing
4880 : // to the target and are still guaranteed that we get an undefined.
4881 112 : FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4882 112 : element_count, RootIndex::kUndefinedValue, mode);
4883 : FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4884 112 : RootIndex::kTheHoleValue, mode);
4885 6196 : } else if (doubles_to_objects_conversion) {
4886 : // Pre-initialized the target with holes so later if we run into a hole in
4887 : // the source we can just skip the writing to the target.
4888 1400 : FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4889 1400 : capacity, RootIndex::kTheHoleValue, mode);
4890 4796 : } else if (element_count != capacity) {
4891 3956 : FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4892 3956 : RootIndex::kTheHoleValue, mode);
4893 : }
4894 :
4895 : Node* first_from_element_offset =
4896 12616 : ElementOffsetFromIndex(first_element, from_kind, mode, 0);
4897 12616 : Node* limit_offset = IntPtrAdd(first_from_element_offset,
4898 12616 : IntPtrConstant(first_element_offset));
4899 18924 : VARIABLE(
4900 : var_from_offset, MachineType::PointerRepresentation(),
4901 : ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count, mode),
4902 : from_kind, mode, first_element_offset));
4903 : // This second variable is used only when the element sizes of source and
4904 : // destination arrays do not match.
4905 12616 : VARIABLE(var_to_offset, MachineType::PointerRepresentation());
4906 6308 : if (element_offset_matches) {
4907 4572 : var_to_offset.Bind(var_from_offset.value());
4908 : } else {
4909 3472 : var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
4910 3472 : first_element_offset));
4911 : }
4912 :
4913 6308 : Variable* vars[] = {&var_from_offset, &var_to_offset, var_holes_converted};
4914 : int num_vars =
4915 6308 : var_holes_converted != nullptr ? arraysize(vars) : arraysize(vars) - 1;
4916 12616 : Label decrement(this, num_vars, vars);
4917 :
4918 : Node* to_array_adjusted =
4919 : element_offset_matches
4920 9144 : ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
4921 6308 : : to_array;
4922 :
4923 18924 : Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4924 :
4925 : BIND(&decrement);
4926 : {
4927 12616 : Node* from_offset = IntPtrSub(
4928 : var_from_offset.value(),
4929 18924 : IntPtrConstant(from_double_elements ? kDoubleSize : kTaggedSize));
4930 6308 : var_from_offset.Bind(from_offset);
4931 :
4932 : Node* to_offset;
4933 6308 : if (element_offset_matches) {
4934 : to_offset = from_offset;
4935 : } else {
4936 3472 : to_offset = IntPtrSub(
4937 : var_to_offset.value(),
4938 5208 : IntPtrConstant(to_double_elements ? kDoubleSize : kTaggedSize));
4939 1736 : var_to_offset.Bind(to_offset);
4940 : }
4941 :
4942 6308 : Label next_iter(this), store_double_hole(this), signal_hole(this);
4943 : Label* if_hole;
4944 6308 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4945 : // The target elements array is already preinitialized with undefined
4946 : // so we only need to signal that a hole was found and continue the loop.
4947 : if_hole = &signal_hole;
4948 6196 : } else if (doubles_to_objects_conversion) {
4949 : // The target elements array is already preinitialized with holes, so we
4950 : // can just proceed with the next iteration.
4951 : if_hole = &next_iter;
4952 4796 : } else if (IsDoubleElementsKind(to_kind)) {
4953 : if_hole = &store_double_hole;
4954 : } else {
4955 : // In all the other cases don't check for holes and copy the data as is.
4956 : if_hole = nullptr;
4957 : }
4958 :
4959 6308 : Node* value = LoadElementAndPrepareForStore(
4960 6308 : from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
4961 :
4962 6308 : if (needs_write_barrier) {
4963 1736 : CHECK_EQ(to_array, to_array_adjusted);
4964 1736 : Store(to_array_adjusted, to_offset, value);
4965 4572 : } else if (to_double_elements) {
4966 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4967 1748 : to_offset, value);
4968 : } else {
4969 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array_adjusted,
4970 2824 : to_offset, value);
4971 : }
4972 6308 : Goto(&next_iter);
4973 :
4974 6308 : if (if_hole == &store_double_hole) {
4975 : BIND(&store_double_hole);
4976 : // Don't use doubles to store the hole double, since manipulating the
4977 : // signaling NaN used for the hole in C++, e.g. with bit_cast, will
4978 : // change its value on ia32 (the x87 stack is used to return values
4979 : // and stores to the stack silently clear the signalling bit).
4980 : //
4981 : // TODO(danno): When we have a Float32/Float64 wrapper class that
4982 : // preserves double bits during manipulation, remove this code/change
4983 : // this to an indexed Float64 store.
4984 1748 : if (Is64()) {
4985 : StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
4986 1748 : to_offset, double_hole);
4987 : } else {
4988 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4989 0 : to_offset, double_hole);
4990 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4991 0 : IntPtrAdd(to_offset, IntPtrConstant(kInt32Size)),
4992 0 : double_hole);
4993 : }
4994 1748 : Goto(&next_iter);
4995 4560 : } else if (if_hole == &signal_hole) {
4996 : // This case happens only when IsObjectElementsKind(to_kind).
4997 : BIND(&signal_hole);
4998 112 : if (var_holes_converted != nullptr) {
4999 : *var_holes_converted = Int32TrueConstant();
5000 : }
5001 112 : Goto(&next_iter);
5002 : }
5003 :
5004 : BIND(&next_iter);
5005 12616 : Node* compare = WordNotEqual(from_offset, limit_offset);
5006 6308 : Branch(compare, &decrement, &done);
5007 : }
5008 :
5009 : BIND(&done);
5010 6308 : Comment("] CopyFixedArrayElements");
5011 6308 : }
5012 :
5013 1312 : TNode<FixedArray> CodeStubAssembler::HeapObjectToFixedArray(
5014 : TNode<HeapObject> base, Label* cast_fail) {
5015 2624 : Label fixed_array(this);
5016 : TNode<Map> map = LoadMap(base);
5017 2624 : GotoIf(WordEqual(map, LoadRoot(RootIndex::kFixedArrayMap)), &fixed_array);
5018 2624 : GotoIf(WordNotEqual(map, LoadRoot(RootIndex::kFixedCOWArrayMap)), cast_fail);
5019 1312 : Goto(&fixed_array);
5020 : BIND(&fixed_array);
5021 1312 : return UncheckedCast<FixedArray>(base);
5022 : }
5023 :
5024 504 : void CodeStubAssembler::CopyPropertyArrayValues(Node* from_array,
5025 : Node* to_array,
5026 : Node* property_count,
5027 : WriteBarrierMode barrier_mode,
5028 : ParameterMode mode,
5029 : DestroySource destroy_source) {
5030 : CSA_SLOW_ASSERT(this, MatchesParameterMode(property_count, mode));
5031 : CSA_SLOW_ASSERT(this, Word32Or(IsPropertyArray(from_array),
5032 : IsEmptyFixedArray(from_array)));
5033 : CSA_SLOW_ASSERT(this, IsPropertyArray(to_array));
5034 504 : Comment("[ CopyPropertyArrayValues");
5035 :
5036 504 : bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
5037 :
5038 504 : if (destroy_source == DestroySource::kNo) {
5039 : // PropertyArray may contain MutableHeapNumbers, which will be cloned on the
5040 : // heap, requiring a write barrier.
5041 : needs_write_barrier = true;
5042 : }
5043 :
5044 504 : Node* start = IntPtrOrSmiConstant(0, mode);
5045 : ElementsKind kind = PACKED_ELEMENTS;
5046 1008 : BuildFastFixedArrayForEach(
5047 : from_array, kind, start, property_count,
5048 : [this, to_array, needs_write_barrier, destroy_source](Node* array,
5049 2072 : Node* offset) {
5050 504 : Node* value = Load(MachineType::AnyTagged(), array, offset);
5051 :
5052 504 : if (destroy_source == DestroySource::kNo) {
5053 112 : value = CloneIfMutablePrimitive(CAST(value));
5054 : }
5055 :
5056 504 : if (needs_write_barrier) {
5057 56 : Store(to_array, offset, value);
5058 : } else {
5059 448 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, offset,
5060 448 : value);
5061 : }
5062 504 : },
5063 504 : mode);
5064 :
5065 : #ifdef DEBUG
5066 : // Zap {from_array} if the copying above has made it invalid.
5067 : if (destroy_source == DestroySource::kYes) {
5068 : Label did_zap(this);
5069 : GotoIf(IsEmptyFixedArray(from_array), &did_zap);
5070 : FillPropertyArrayWithUndefined(from_array, start, property_count, mode);
5071 :
5072 : Goto(&did_zap);
5073 : BIND(&did_zap);
5074 : }
5075 : #endif
5076 504 : Comment("] CopyPropertyArrayValues");
5077 504 : }
5078 :
5079 3432 : void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
5080 : TNode<IntPtrT> from_index,
5081 : TNode<IntPtrT> to_index,
5082 : TNode<IntPtrT> character_count,
5083 : String::Encoding from_encoding,
5084 : String::Encoding to_encoding) {
5085 : // Cannot assert IsString(from_string) and IsString(to_string) here because
5086 : // CSA::SubString can pass in faked sequential strings when handling external
5087 : // subject strings.
5088 : bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
5089 : bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
5090 : DCHECK_IMPLIES(to_one_byte, from_one_byte);
5091 3432 : Comment("CopyStringCharacters ",
5092 : from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING", " -> ",
5093 3432 : to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
5094 :
5095 3432 : ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
5096 3432 : ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
5097 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
5098 : int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
5099 6864 : Node* from_offset = ElementOffsetFromIndex(from_index, from_kind,
5100 3432 : INTPTR_PARAMETERS, header_size);
5101 : Node* to_offset =
5102 6864 : ElementOffsetFromIndex(to_index, to_kind, INTPTR_PARAMETERS, header_size);
5103 : Node* byte_count =
5104 6864 : ElementOffsetFromIndex(character_count, from_kind, INTPTR_PARAMETERS);
5105 6864 : Node* limit_offset = IntPtrAdd(from_offset, byte_count);
5106 :
5107 : // Prepare the fast loop
5108 : MachineType type =
5109 3432 : from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
5110 : MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
5111 3432 : : MachineRepresentation::kWord16;
5112 3432 : int from_increment = 1 << ElementsKindToShiftSize(from_kind);
5113 3432 : int to_increment = 1 << ElementsKindToShiftSize(to_kind);
5114 :
5115 6864 : VARIABLE(current_to_offset, MachineType::PointerRepresentation(), to_offset);
5116 6864 : VariableList vars({¤t_to_offset}, zone());
5117 3432 : int to_index_constant = 0, from_index_constant = 0;
5118 3432 : bool index_same = (from_encoding == to_encoding) &&
5119 3372 : (from_index == to_index ||
5120 4952 : (ToInt32Constant(from_index, from_index_constant) &&
5121 2376 : ToInt32Constant(to_index, to_index_constant) &&
5122 796 : from_index_constant == to_index_constant));
5123 6864 : BuildFastLoop(vars, from_offset, limit_offset,
5124 : [this, from_string, to_string, ¤t_to_offset, to_increment,
5125 19008 : type, rep, index_same](Node* offset) {
5126 3432 : Node* value = Load(type, from_string, offset);
5127 9504 : StoreNoWriteBarrier(
5128 : rep, to_string,
5129 3432 : index_same ? offset : current_to_offset.value(), value);
5130 3432 : if (!index_same) {
5131 2640 : Increment(¤t_to_offset, to_increment);
5132 : }
5133 3432 : },
5134 3432 : from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
5135 3432 : }
5136 :
5137 6364 : Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
5138 : Node* offset,
5139 : ElementsKind from_kind,
5140 : ElementsKind to_kind,
5141 : Label* if_hole) {
5142 : CSA_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
5143 6364 : if (IsDoubleElementsKind(from_kind)) {
5144 : Node* value =
5145 4168 : LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
5146 2084 : if (!IsDoubleElementsKind(to_kind)) {
5147 2912 : value = AllocateHeapNumberWithValue(value);
5148 : }
5149 : return value;
5150 :
5151 : } else {
5152 4280 : Node* value = Load(MachineType::AnyTagged(), array, offset);
5153 4280 : if (if_hole) {
5154 1232 : GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
5155 : }
5156 4280 : if (IsDoubleElementsKind(to_kind)) {
5157 1176 : if (IsSmiElementsKind(from_kind)) {
5158 2352 : value = SmiToFloat64(value);
5159 : } else {
5160 : value = LoadHeapNumberValue(value);
5161 : }
5162 : }
5163 : return value;
5164 : }
5165 : }
5166 :
5167 2676 : Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
5168 : ParameterMode mode) {
5169 : CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
5170 2676 : Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
5171 2676 : Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
5172 : Node* padding =
5173 2676 : IntPtrOrSmiConstant(JSObject::kMinAddedElementsCapacity, mode);
5174 2676 : return IntPtrOrSmiAdd(new_capacity, padding, mode);
5175 : }
5176 :
5177 112 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
5178 : ElementsKind kind, Node* key,
5179 : Label* bailout) {
5180 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5181 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
5182 : CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
5183 : Node* capacity = LoadFixedArrayBaseLength(elements);
5184 :
5185 : ParameterMode mode = OptimalParameterMode();
5186 : capacity = TaggedToParameter(capacity, mode);
5187 : key = TaggedToParameter(key, mode);
5188 :
5189 112 : return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
5190 112 : bailout);
5191 : }
5192 :
5193 1120 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
5194 : ElementsKind kind, Node* key,
5195 : Node* capacity,
5196 : ParameterMode mode,
5197 : Label* bailout) {
5198 1120 : Comment("TryGrowElementsCapacity");
5199 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5200 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
5201 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
5202 : CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
5203 :
5204 : // If the gap growth is too big, fall back to the runtime.
5205 1120 : Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
5206 1120 : Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
5207 2240 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
5208 :
5209 : // Calculate the capacity of the new backing store.
5210 1120 : Node* new_capacity = CalculateNewElementsCapacity(
5211 1120 : IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
5212 1120 : return GrowElementsCapacity(object, elements, kind, kind, capacity,
5213 1120 : new_capacity, mode, bailout);
5214 : }
5215 :
5216 5356 : Node* CodeStubAssembler::GrowElementsCapacity(
5217 : Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
5218 : Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
5219 5356 : Comment("[ GrowElementsCapacity");
5220 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5221 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
5222 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
5223 : CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
5224 :
5225 : // If size of the allocation for the new capacity doesn't fit in a page
5226 : // that we can bump-pointer allocate from, fall back to the runtime.
5227 5356 : int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
5228 10712 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(
5229 : new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
5230 5356 : bailout);
5231 :
5232 : // Allocate the new backing store.
5233 10712 : Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
5234 :
5235 : // Copy the elements from the old elements store to the new.
5236 : // The size-check above guarantees that the |new_elements| is allocated
5237 : // in new space so we can skip the write barrier.
5238 5356 : CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
5239 5356 : new_capacity, SKIP_WRITE_BARRIER, mode);
5240 :
5241 : StoreObjectField(object, JSObject::kElementsOffset, new_elements);
5242 5356 : Comment("] GrowElementsCapacity");
5243 5356 : return new_elements;
5244 : }
5245 :
5246 1064 : void CodeStubAssembler::InitializeAllocationMemento(Node* base,
5247 : Node* base_allocation_size,
5248 : Node* allocation_site) {
5249 1064 : Comment("[Initialize AllocationMemento");
5250 : TNode<Object> memento =
5251 2128 : InnerAllocate(CAST(base), UncheckedCast<IntPtrT>(base_allocation_size));
5252 1064 : StoreMapNoWriteBarrier(memento, RootIndex::kAllocationMementoMap);
5253 : StoreObjectFieldNoWriteBarrier(
5254 : memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
5255 1064 : if (FLAG_allocation_site_pretenuring) {
5256 : TNode<Int32T> count = UncheckedCast<Int32T>(LoadObjectField(
5257 : allocation_site, AllocationSite::kPretenureCreateCountOffset,
5258 1064 : MachineType::Int32()));
5259 :
5260 1064 : TNode<Int32T> incremented_count = Int32Add(count, Int32Constant(1));
5261 : StoreObjectFieldNoWriteBarrier(
5262 : allocation_site, AllocationSite::kPretenureCreateCountOffset,
5263 : incremented_count, MachineRepresentation::kWord32);
5264 : }
5265 1064 : Comment("]");
5266 1064 : }
5267 :
5268 3696 : Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
5269 : Label* if_valueisnotnumber) {
5270 7392 : Label out(this);
5271 7392 : VARIABLE(var_result, MachineRepresentation::kFloat64);
5272 :
5273 : // Check if the {value} is a Smi or a HeapObject.
5274 3696 : Label if_valueissmi(this), if_valueisnotsmi(this);
5275 7392 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5276 :
5277 : BIND(&if_valueissmi);
5278 : {
5279 : // Convert the Smi {value}.
5280 7392 : var_result.Bind(SmiToFloat64(value));
5281 3696 : Goto(&out);
5282 : }
5283 :
5284 : BIND(&if_valueisnotsmi);
5285 : {
5286 : // Check if {value} is a HeapNumber.
5287 3696 : Label if_valueisheapnumber(this);
5288 7392 : Branch(IsHeapNumber(value), &if_valueisheapnumber, if_valueisnotnumber);
5289 :
5290 : BIND(&if_valueisheapnumber);
5291 : {
5292 : // Load the floating point value.
5293 3696 : var_result.Bind(LoadHeapNumberValue(value));
5294 3696 : Goto(&out);
5295 : }
5296 : }
5297 : BIND(&out);
5298 7392 : return var_result.value();
5299 : }
5300 :
5301 1680 : Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
5302 : // We might need to loop once due to ToNumber conversion.
5303 3360 : VARIABLE(var_value, MachineRepresentation::kTagged);
5304 3360 : VARIABLE(var_result, MachineRepresentation::kFloat64);
5305 1680 : Label loop(this, &var_value), done_loop(this, &var_result);
5306 1680 : var_value.Bind(value);
5307 1680 : Goto(&loop);
5308 : BIND(&loop);
5309 : {
5310 1680 : Label if_valueisnotnumber(this, Label::kDeferred);
5311 :
5312 : // Load the current {value}.
5313 1680 : value = var_value.value();
5314 :
5315 : // Convert {value} to Float64 if it is a number and convert it to a number
5316 : // otherwise.
5317 1680 : Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
5318 1680 : var_result.Bind(result);
5319 1680 : Goto(&done_loop);
5320 :
5321 : BIND(&if_valueisnotnumber);
5322 : {
5323 : // Convert the {value} to a Number first.
5324 3360 : var_value.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, value));
5325 1680 : Goto(&loop);
5326 : }
5327 : }
5328 : BIND(&done_loop);
5329 3360 : return var_result.value();
5330 : }
5331 :
5332 1400 : Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
5333 2800 : VARIABLE(var_result, MachineRepresentation::kWord32);
5334 1400 : Label done(this);
5335 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumber>(context, value,
5336 1400 : &done, &var_result);
5337 : BIND(&done);
5338 2800 : return var_result.value();
5339 : }
5340 :
5341 : // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5342 : // or find that it is a BigInt and jump to {if_bigint}.
5343 672 : void CodeStubAssembler::TaggedToWord32OrBigInt(Node* context, Node* value,
5344 : Label* if_number,
5345 : Variable* var_word32,
5346 : Label* if_bigint,
5347 : Variable* var_bigint) {
5348 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5349 672 : context, value, if_number, var_word32, if_bigint, var_bigint);
5350 672 : }
5351 :
5352 : // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5353 : // or find that it is a BigInt and jump to {if_bigint}. In either case,
5354 : // store the type feedback in {var_feedback}.
5355 3192 : void CodeStubAssembler::TaggedToWord32OrBigIntWithFeedback(
5356 : Node* context, Node* value, Label* if_number, Variable* var_word32,
5357 : Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
5358 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5359 : context, value, if_number, var_word32, if_bigint, var_bigint,
5360 3192 : var_feedback);
5361 3192 : }
5362 :
5363 : template <Object::Conversion conversion>
5364 5264 : void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
5365 : Node* context, Node* value, Label* if_number, Variable* var_word32,
5366 : Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
5367 : DCHECK(var_word32->rep() == MachineRepresentation::kWord32);
5368 : DCHECK(var_bigint == nullptr ||
5369 : var_bigint->rep() == MachineRepresentation::kTagged);
5370 : DCHECK(var_feedback == nullptr ||
5371 : var_feedback->rep() == MachineRepresentation::kTaggedSigned);
5372 :
5373 : // We might need to loop after conversion.
5374 10528 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5375 5264 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNone);
5376 5264 : Variable* loop_vars[] = {&var_value, var_feedback};
5377 : int num_vars =
5378 5264 : var_feedback != nullptr ? arraysize(loop_vars) : arraysize(loop_vars) - 1;
5379 10528 : Label loop(this, num_vars, loop_vars);
5380 5264 : Goto(&loop);
5381 : BIND(&loop);
5382 : {
5383 5264 : value = var_value.value();
5384 5264 : Label not_smi(this), is_heap_number(this), is_oddball(this),
5385 5264 : is_bigint(this);
5386 10528 : GotoIf(TaggedIsNotSmi(value), ¬_smi);
5387 :
5388 : // {value} is a Smi.
5389 10528 : var_word32->Bind(SmiToInt32(value));
5390 5264 : CombineFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
5391 5264 : Goto(if_number);
5392 :
5393 : BIND(¬_smi);
5394 : Node* map = LoadMap(value);
5395 10528 : GotoIf(IsHeapNumberMap(map), &is_heap_number);
5396 : Node* instance_type = LoadMapInstanceType(map);
5397 : if (conversion == Object::Conversion::kToNumeric) {
5398 3864 : GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
5399 : }
5400 :
5401 : // Not HeapNumber (or BigInt if conversion == kToNumeric).
5402 : {
5403 : if (var_feedback != nullptr) {
5404 : // We do not require an Or with earlier feedback here because once we
5405 : // convert the value to a Numeric, we cannot reach this path. We can
5406 : // only reach this path on the first pass when the feedback is kNone.
5407 : CSA_ASSERT(this, SmiEqual(CAST(var_feedback->value()),
5408 : SmiConstant(BinaryOperationFeedback::kNone)));
5409 : }
5410 10528 : GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
5411 : // Not an oddball either -> convert.
5412 : auto builtin = conversion == Object::Conversion::kToNumeric
5413 : ? Builtins::kNonNumberToNumeric
5414 : : Builtins::kNonNumberToNumber;
5415 10528 : var_value.Bind(CallBuiltin(builtin, context, value));
5416 5264 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
5417 5264 : Goto(&loop);
5418 :
5419 : BIND(&is_oddball);
5420 5264 : var_value.Bind(LoadObjectField(value, Oddball::kToNumberOffset));
5421 5264 : OverwriteFeedback(var_feedback,
5422 : BinaryOperationFeedback::kNumberOrOddball);
5423 5264 : Goto(&loop);
5424 : }
5425 :
5426 : BIND(&is_heap_number);
5427 5264 : var_word32->Bind(TruncateHeapNumberValueToWord32(value));
5428 5264 : CombineFeedback(var_feedback, BinaryOperationFeedback::kNumber);
5429 5264 : Goto(if_number);
5430 :
5431 : if (conversion == Object::Conversion::kToNumeric) {
5432 : BIND(&is_bigint);
5433 3864 : var_bigint->Bind(value);
5434 3864 : CombineFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
5435 3864 : Goto(if_bigint);
5436 : }
5437 : }
5438 5264 : }
5439 :
5440 5320 : Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
5441 : Node* value = LoadHeapNumberValue(object);
5442 10640 : return TruncateFloat64ToWord32(value);
5443 : }
5444 :
5445 340 : void CodeStubAssembler::TryHeapNumberToSmi(TNode<HeapNumber> number,
5446 : TVariable<Smi>& var_result_smi,
5447 : Label* if_smi) {
5448 340 : TNode<Float64T> value = LoadHeapNumberValue(number);
5449 340 : TryFloat64ToSmi(value, var_result_smi, if_smi);
5450 340 : }
5451 :
5452 5672 : void CodeStubAssembler::TryFloat64ToSmi(TNode<Float64T> value,
5453 : TVariable<Smi>& var_result_smi,
5454 : Label* if_smi) {
5455 5672 : TNode<Int32T> value32 = RoundFloat64ToInt32(value);
5456 5672 : TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
5457 :
5458 5672 : Label if_int32(this), if_heap_number(this, Label::kDeferred);
5459 :
5460 11344 : GotoIfNot(Float64Equal(value, value64), &if_heap_number);
5461 17016 : GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32);
5462 22688 : Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
5463 17016 : Int32Constant(0)),
5464 5672 : &if_heap_number, &if_int32);
5465 :
5466 : TVARIABLE(Number, var_result);
5467 : BIND(&if_int32);
5468 : {
5469 : if (SmiValuesAre32Bits()) {
5470 17016 : var_result_smi = SmiTag(ChangeInt32ToIntPtr(value32));
5471 : } else {
5472 : DCHECK(SmiValuesAre31Bits());
5473 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
5474 : TNode<BoolT> overflow = Projection<1>(pair);
5475 : GotoIf(overflow, &if_heap_number);
5476 : var_result_smi =
5477 : BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
5478 : }
5479 5672 : Goto(if_smi);
5480 : }
5481 : BIND(&if_heap_number);
5482 5672 : }
5483 :
5484 5332 : TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(
5485 : SloppyTNode<Float64T> value) {
5486 10664 : Label if_smi(this), done(this);
5487 : TVARIABLE(Smi, var_smi_result);
5488 : TVARIABLE(Number, var_result);
5489 5332 : TryFloat64ToSmi(value, var_smi_result, &if_smi);
5490 :
5491 10664 : var_result = AllocateHeapNumberWithValue(value);
5492 5332 : Goto(&done);
5493 :
5494 : BIND(&if_smi);
5495 : {
5496 : var_result = var_smi_result.value();
5497 5332 : Goto(&done);
5498 : }
5499 : BIND(&done);
5500 5332 : return var_result.value();
5501 : }
5502 :
5503 4984 : TNode<Number> CodeStubAssembler::ChangeInt32ToTagged(
5504 : SloppyTNode<Int32T> value) {
5505 : if (SmiValuesAre32Bits()) {
5506 14952 : return SmiTag(ChangeInt32ToIntPtr(value));
5507 : }
5508 : DCHECK(SmiValuesAre31Bits());
5509 : TVARIABLE(Number, var_result);
5510 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
5511 : TNode<BoolT> overflow = Projection<1>(pair);
5512 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
5513 : if_join(this);
5514 : Branch(overflow, &if_overflow, &if_notoverflow);
5515 : BIND(&if_overflow);
5516 : {
5517 : TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
5518 : TNode<HeapNumber> result = AllocateHeapNumberWithValue(value64);
5519 : var_result = result;
5520 : Goto(&if_join);
5521 : }
5522 : BIND(&if_notoverflow);
5523 : {
5524 : TNode<IntPtrT> almost_tagged_value =
5525 : ChangeInt32ToIntPtr(Projection<0>(pair));
5526 : TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
5527 : var_result = result;
5528 : Goto(&if_join);
5529 : }
5530 : BIND(&if_join);
5531 : return var_result.value();
5532 : }
5533 :
5534 4816 : TNode<Number> CodeStubAssembler::ChangeUint32ToTagged(
5535 : SloppyTNode<Uint32T> value) {
5536 9632 : Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5537 4816 : if_join(this);
5538 : TVARIABLE(Number, var_result);
5539 : // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5540 9632 : Branch(Uint32LessThan(Uint32Constant(Smi::kMaxValue), value), &if_overflow,
5541 4816 : &if_not_overflow);
5542 :
5543 : BIND(&if_not_overflow);
5544 : {
5545 : // The {value} is definitely in valid Smi range.
5546 14448 : var_result = SmiTag(Signed(ChangeUint32ToWord(value)));
5547 : }
5548 4816 : Goto(&if_join);
5549 :
5550 : BIND(&if_overflow);
5551 : {
5552 4816 : TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
5553 9632 : var_result = AllocateHeapNumberWithValue(float64_value);
5554 : }
5555 4816 : Goto(&if_join);
5556 :
5557 : BIND(&if_join);
5558 4816 : return var_result.value();
5559 : }
5560 :
5561 616 : TNode<Number> CodeStubAssembler::ChangeUintPtrToTagged(TNode<UintPtrT> value) {
5562 1232 : Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5563 616 : if_join(this);
5564 : TVARIABLE(Number, var_result);
5565 : // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5566 1232 : Branch(UintPtrLessThan(UintPtrConstant(Smi::kMaxValue), value), &if_overflow,
5567 616 : &if_not_overflow);
5568 :
5569 : BIND(&if_not_overflow);
5570 : {
5571 : // The {value} is definitely in valid Smi range.
5572 1232 : var_result = SmiTag(Signed(value));
5573 : }
5574 616 : Goto(&if_join);
5575 :
5576 : BIND(&if_overflow);
5577 : {
5578 616 : TNode<Float64T> float64_value = ChangeUintPtrToFloat64(value);
5579 1232 : var_result = AllocateHeapNumberWithValue(float64_value);
5580 : }
5581 616 : Goto(&if_join);
5582 :
5583 : BIND(&if_join);
5584 616 : return var_result.value();
5585 : }
5586 :
5587 840 : TNode<String> CodeStubAssembler::ToThisString(TNode<Context> context,
5588 : TNode<Object> value,
5589 : TNode<String> method_name) {
5590 1680 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5591 :
5592 : // Check if the {value} is a Smi or a HeapObject.
5593 840 : Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
5594 840 : if_valueisstring(this);
5595 1680 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5596 : BIND(&if_valueisnotsmi);
5597 : {
5598 : // Load the instance type of the {value}.
5599 1680 : Node* value_instance_type = LoadInstanceType(CAST(value));
5600 :
5601 : // Check if the {value} is already String.
5602 840 : Label if_valueisnotstring(this, Label::kDeferred);
5603 1680 : Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
5604 840 : &if_valueisnotstring);
5605 : BIND(&if_valueisnotstring);
5606 : {
5607 : // Check if the {value} is null.
5608 840 : Label if_valueisnullorundefined(this, Label::kDeferred);
5609 1680 : GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
5610 : // Convert the {value} to a String.
5611 1680 : var_value.Bind(CallBuiltin(Builtins::kToString, context, value));
5612 840 : Goto(&if_valueisstring);
5613 :
5614 : BIND(&if_valueisnullorundefined);
5615 : {
5616 : // The {value} is either null or undefined.
5617 : ThrowTypeError(context, MessageTemplate::kCalledOnNullOrUndefined,
5618 840 : method_name);
5619 : }
5620 : }
5621 : }
5622 : BIND(&if_valueissmi);
5623 : {
5624 : // The {value} is a Smi, convert it to a String.
5625 1680 : var_value.Bind(CallBuiltin(Builtins::kNumberToString, context, value));
5626 840 : Goto(&if_valueisstring);
5627 : }
5628 : BIND(&if_valueisstring);
5629 1680 : return CAST(var_value.value());
5630 : }
5631 :
5632 112 : TNode<Uint32T> CodeStubAssembler::ChangeNumberToUint32(TNode<Number> value) {
5633 112 : TVARIABLE(Uint32T, var_result);
5634 112 : Label if_smi(this), if_heapnumber(this, Label::kDeferred), done(this);
5635 224 : Branch(TaggedIsSmi(value), &if_smi, &if_heapnumber);
5636 : BIND(&if_smi);
5637 : {
5638 224 : var_result = Unsigned(SmiToInt32(CAST(value)));
5639 112 : Goto(&done);
5640 : }
5641 : BIND(&if_heapnumber);
5642 : {
5643 224 : var_result = ChangeFloat64ToUint32(LoadHeapNumberValue(CAST(value)));
5644 112 : Goto(&done);
5645 : }
5646 : BIND(&done);
5647 112 : return var_result.value();
5648 : }
5649 :
5650 12628 : TNode<Float64T> CodeStubAssembler::ChangeNumberToFloat64(
5651 : SloppyTNode<Number> value) {
5652 : // TODO(tebbi): Remove assert once argument is TNode instead of SloppyTNode.
5653 : CSA_SLOW_ASSERT(this, IsNumber(value));
5654 12628 : TVARIABLE(Float64T, result);
5655 12628 : Label smi(this);
5656 12628 : Label done(this, &result);
5657 25256 : GotoIf(TaggedIsSmi(value), &smi);
5658 : result = LoadHeapNumberValue(CAST(value));
5659 12628 : Goto(&done);
5660 :
5661 : BIND(&smi);
5662 : {
5663 12628 : result = SmiToFloat64(CAST(value));
5664 12628 : Goto(&done);
5665 : }
5666 :
5667 : BIND(&done);
5668 12628 : return result.value();
5669 : }
5670 :
5671 336 : TNode<UintPtrT> CodeStubAssembler::TryNumberToUintPtr(TNode<Number> value,
5672 : Label* if_negative) {
5673 336 : TVARIABLE(UintPtrT, result);
5674 336 : Label done(this, &result);
5675 1344 : Branch(TaggedIsSmi(value),
5676 336 : [&] {
5677 1064 : TNode<Smi> value_smi = CAST(value);
5678 336 : if (if_negative == nullptr) {
5679 : CSA_SLOW_ASSERT(this, SmiLessThan(SmiConstant(-1), value_smi));
5680 : } else {
5681 112 : GotoIfNot(TaggedIsPositiveSmi(value), if_negative);
5682 : }
5683 336 : result = UncheckedCast<UintPtrT>(SmiToIntPtr(value_smi));
5684 672 : Goto(&done);
5685 336 : },
5686 336 : [&] {
5687 1176 : TNode<HeapNumber> value_hn = CAST(value);
5688 : TNode<Float64T> value = LoadHeapNumberValue(value_hn);
5689 336 : if (if_negative != nullptr) {
5690 168 : GotoIf(Float64LessThan(value, Float64Constant(0.0)), if_negative);
5691 : }
5692 672 : result = ChangeFloat64ToUintPtr(value);
5693 672 : Goto(&done);
5694 672 : });
5695 :
5696 : BIND(&done);
5697 336 : return result.value();
5698 : }
5699 :
5700 71736 : TNode<WordT> CodeStubAssembler::TimesSystemPointerSize(
5701 : SloppyTNode<WordT> value) {
5702 71736 : return WordShl(value, kSystemPointerSizeLog2);
5703 : }
5704 :
5705 2576 : TNode<WordT> CodeStubAssembler::TimesTaggedSize(SloppyTNode<WordT> value) {
5706 9896 : return WordShl(value, kTaggedSizeLog2);
5707 : }
5708 :
5709 0 : TNode<WordT> CodeStubAssembler::TimesDoubleSize(SloppyTNode<WordT> value) {
5710 56 : return WordShl(value, kDoubleSizeLog2);
5711 : }
5712 :
5713 504 : Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
5714 : PrimitiveType primitive_type,
5715 : char const* method_name) {
5716 : // We might need to loop once due to JSValue unboxing.
5717 1008 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5718 504 : Label loop(this, &var_value), done_loop(this),
5719 504 : done_throw(this, Label::kDeferred);
5720 504 : Goto(&loop);
5721 : BIND(&loop);
5722 : {
5723 : // Load the current {value}.
5724 504 : value = var_value.value();
5725 :
5726 : // Check if the {value} is a Smi or a HeapObject.
5727 1512 : GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
5728 : ? &done_loop
5729 504 : : &done_throw);
5730 :
5731 : // Load the map of the {value}.
5732 : Node* value_map = LoadMap(value);
5733 :
5734 : // Load the instance type of the {value}.
5735 : Node* value_instance_type = LoadMapInstanceType(value_map);
5736 :
5737 : // Check if {value} is a JSValue.
5738 504 : Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
5739 1008 : Branch(InstanceTypeEqual(value_instance_type, JS_VALUE_TYPE),
5740 504 : &if_valueisvalue, &if_valueisnotvalue);
5741 :
5742 : BIND(&if_valueisvalue);
5743 : {
5744 : // Load the actual value from the {value}.
5745 504 : var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
5746 504 : Goto(&loop);
5747 : }
5748 :
5749 : BIND(&if_valueisnotvalue);
5750 : {
5751 504 : switch (primitive_type) {
5752 : case PrimitiveType::kBoolean:
5753 112 : GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
5754 112 : break;
5755 : case PrimitiveType::kNumber:
5756 56 : GotoIf(WordEqual(value_map, HeapNumberMapConstant()), &done_loop);
5757 56 : break;
5758 : case PrimitiveType::kString:
5759 224 : GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
5760 112 : break;
5761 : case PrimitiveType::kSymbol:
5762 224 : GotoIf(WordEqual(value_map, SymbolMapConstant()), &done_loop);
5763 224 : break;
5764 : }
5765 504 : Goto(&done_throw);
5766 : }
5767 : }
5768 :
5769 : BIND(&done_throw);
5770 : {
5771 : const char* primitive_name = nullptr;
5772 504 : switch (primitive_type) {
5773 : case PrimitiveType::kBoolean:
5774 : primitive_name = "Boolean";
5775 112 : break;
5776 : case PrimitiveType::kNumber:
5777 : primitive_name = "Number";
5778 56 : break;
5779 : case PrimitiveType::kString:
5780 : primitive_name = "String";
5781 112 : break;
5782 : case PrimitiveType::kSymbol:
5783 : primitive_name = "Symbol";
5784 224 : break;
5785 : }
5786 504 : CHECK_NOT_NULL(primitive_name);
5787 :
5788 : // The {value} is not a compatible receiver for this method.
5789 : ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
5790 504 : primitive_name);
5791 : }
5792 :
5793 : BIND(&done_loop);
5794 1008 : return var_value.value();
5795 : }
5796 :
5797 2912 : Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
5798 : InstanceType instance_type,
5799 : char const* method_name) {
5800 5824 : Label out(this), throw_exception(this, Label::kDeferred);
5801 5824 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
5802 :
5803 5824 : GotoIf(TaggedIsSmi(value), &throw_exception);
5804 :
5805 : // Load the instance type of the {value}.
5806 2912 : var_value_map.Bind(LoadMap(value));
5807 2912 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5808 :
5809 8736 : Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
5810 2912 : &throw_exception);
5811 :
5812 : // The {value} is not a compatible receiver for this method.
5813 : BIND(&throw_exception);
5814 : ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver,
5815 5824 : StringConstant(method_name), value);
5816 :
5817 : BIND(&out);
5818 5824 : return var_value_map.value();
5819 : }
5820 :
5821 896 : Node* CodeStubAssembler::ThrowIfNotJSReceiver(Node* context, Node* value,
5822 : MessageTemplate msg_template,
5823 : const char* method_name) {
5824 1792 : Label out(this), throw_exception(this, Label::kDeferred);
5825 1792 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
5826 :
5827 1792 : GotoIf(TaggedIsSmi(value), &throw_exception);
5828 :
5829 : // Load the instance type of the {value}.
5830 896 : var_value_map.Bind(LoadMap(value));
5831 896 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5832 :
5833 1792 : Branch(IsJSReceiverInstanceType(value_instance_type), &out, &throw_exception);
5834 :
5835 : // The {value} is not a compatible receiver for this method.
5836 : BIND(&throw_exception);
5837 896 : ThrowTypeError(context, msg_template, method_name);
5838 :
5839 : BIND(&out);
5840 1792 : return var_value_map.value();
5841 : }
5842 :
5843 4032 : void CodeStubAssembler::ThrowRangeError(Node* context, MessageTemplate message,
5844 : Node* arg0, Node* arg1, Node* arg2) {
5845 8064 : Node* template_index = SmiConstant(static_cast<int>(message));
5846 4032 : if (arg0 == nullptr) {
5847 : CallRuntime(Runtime::kThrowRangeError, context, template_index);
5848 448 : } else if (arg1 == nullptr) {
5849 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0);
5850 0 : } else if (arg2 == nullptr) {
5851 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1);
5852 : } else {
5853 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1,
5854 : arg2);
5855 : }
5856 4032 : Unreachable();
5857 4032 : }
5858 :
5859 10820 : void CodeStubAssembler::ThrowTypeError(Node* context, MessageTemplate message,
5860 : char const* arg0, char const* arg1) {
5861 : Node* arg0_node = nullptr;
5862 17204 : if (arg0) arg0_node = StringConstant(arg0);
5863 : Node* arg1_node = nullptr;
5864 11324 : if (arg1) arg1_node = StringConstant(arg1);
5865 11944 : ThrowTypeError(context, message, arg0_node, arg1_node);
5866 10820 : }
5867 :
5868 24600 : void CodeStubAssembler::ThrowTypeError(Node* context, MessageTemplate message,
5869 : Node* arg0, Node* arg1, Node* arg2) {
5870 49200 : Node* template_index = SmiConstant(static_cast<int>(message));
5871 24600 : if (arg0 == nullptr) {
5872 : CallRuntime(Runtime::kThrowTypeError, context, template_index);
5873 19040 : } else if (arg1 == nullptr) {
5874 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0);
5875 4984 : } else if (arg2 == nullptr) {
5876 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1);
5877 : } else {
5878 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1,
5879 : arg2);
5880 : }
5881 24600 : Unreachable();
5882 24600 : }
5883 :
5884 99852 : TNode<BoolT> CodeStubAssembler::InstanceTypeEqual(
5885 : SloppyTNode<Int32T> instance_type, int type) {
5886 199704 : return Word32Equal(instance_type, Int32Constant(type));
5887 : }
5888 :
5889 1568 : TNode<BoolT> CodeStubAssembler::IsDictionaryMap(SloppyTNode<Map> map) {
5890 : CSA_SLOW_ASSERT(this, IsMap(map));
5891 : Node* bit_field3 = LoadMapBitField3(map);
5892 1568 : return IsSetWord32<Map::IsDictionaryMapBit>(bit_field3);
5893 : }
5894 :
5895 168 : TNode<BoolT> CodeStubAssembler::IsExtensibleMap(SloppyTNode<Map> map) {
5896 : CSA_ASSERT(this, IsMap(map));
5897 168 : return IsSetWord32<Map::IsExtensibleBit>(LoadMapBitField2(map));
5898 : }
5899 :
5900 0 : TNode<BoolT> CodeStubAssembler::IsPackedFrozenOrSealedElementsKindMap(
5901 : SloppyTNode<Map> map) {
5902 : CSA_ASSERT(this, IsMap(map));
5903 : return IsElementsKindInRange(LoadMapElementsKind(map), PACKED_SEALED_ELEMENTS,
5904 0 : PACKED_FROZEN_ELEMENTS);
5905 : }
5906 :
5907 0 : TNode<BoolT> CodeStubAssembler::IsExtensibleNonPrototypeMap(TNode<Map> map) {
5908 : int kMask = Map::IsExtensibleBit::kMask | Map::IsPrototypeMapBit::kMask;
5909 : int kExpected = Map::IsExtensibleBit::kMask;
5910 0 : return Word32Equal(Word32And(LoadMapBitField2(map), Int32Constant(kMask)),
5911 0 : Int32Constant(kExpected));
5912 : }
5913 :
5914 9132 : TNode<BoolT> CodeStubAssembler::IsCallableMap(SloppyTNode<Map> map) {
5915 : CSA_ASSERT(this, IsMap(map));
5916 9132 : return IsSetWord32<Map::IsCallableBit>(LoadMapBitField(map));
5917 : }
5918 :
5919 728 : TNode<BoolT> CodeStubAssembler::IsDeprecatedMap(SloppyTNode<Map> map) {
5920 : CSA_ASSERT(this, IsMap(map));
5921 728 : return IsSetWord32<Map::IsDeprecatedBit>(LoadMapBitField3(map));
5922 : }
5923 :
5924 5936 : TNode<BoolT> CodeStubAssembler::IsUndetectableMap(SloppyTNode<Map> map) {
5925 : CSA_ASSERT(this, IsMap(map));
5926 5936 : return IsSetWord32<Map::IsUndetectableBit>(LoadMapBitField(map));
5927 : }
5928 :
5929 3528 : TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
5930 7056 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5931 7056 : Node* cell = LoadRoot(RootIndex::kNoElementsProtector);
5932 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5933 3528 : return WordEqual(cell_value, invalid);
5934 : }
5935 :
5936 448 : TNode<BoolT> CodeStubAssembler::IsArrayIteratorProtectorCellInvalid() {
5937 896 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5938 896 : Node* cell = LoadRoot(RootIndex::kArrayIteratorProtector);
5939 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5940 448 : return WordEqual(cell_value, invalid);
5941 : }
5942 :
5943 280 : TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
5944 560 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5945 560 : Node* cell = LoadRoot(RootIndex::kPromiseResolveProtector);
5946 : Node* cell_value = LoadObjectField(cell, Cell::kValueOffset);
5947 280 : return WordEqual(cell_value, invalid);
5948 : }
5949 :
5950 448 : TNode<BoolT> CodeStubAssembler::IsPromiseThenProtectorCellInvalid() {
5951 896 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5952 896 : Node* cell = LoadRoot(RootIndex::kPromiseThenProtector);
5953 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5954 448 : return WordEqual(cell_value, invalid);
5955 : }
5956 :
5957 280 : TNode<BoolT> CodeStubAssembler::IsArraySpeciesProtectorCellInvalid() {
5958 560 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5959 560 : Node* cell = LoadRoot(RootIndex::kArraySpeciesProtector);
5960 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5961 280 : return WordEqual(cell_value, invalid);
5962 : }
5963 :
5964 224 : TNode<BoolT> CodeStubAssembler::IsTypedArraySpeciesProtectorCellInvalid() {
5965 448 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5966 448 : Node* cell = LoadRoot(RootIndex::kTypedArraySpeciesProtector);
5967 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5968 224 : return WordEqual(cell_value, invalid);
5969 : }
5970 :
5971 952 : TNode<BoolT> CodeStubAssembler::IsRegExpSpeciesProtectorCellInvalid() {
5972 1904 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5973 1904 : Node* cell = LoadRoot(RootIndex::kRegExpSpeciesProtector);
5974 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5975 952 : return WordEqual(cell_value, invalid);
5976 : }
5977 :
5978 672 : TNode<BoolT> CodeStubAssembler::IsPromiseSpeciesProtectorCellInvalid() {
5979 1344 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5980 1344 : Node* cell = LoadRoot(RootIndex::kPromiseSpeciesProtector);
5981 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5982 672 : return WordEqual(cell_value, invalid);
5983 : }
5984 :
5985 2688 : TNode<BoolT> CodeStubAssembler::IsPrototypeInitialArrayPrototype(
5986 : SloppyTNode<Context> context, SloppyTNode<Map> map) {
5987 : Node* const native_context = LoadNativeContext(context);
5988 5376 : Node* const initial_array_prototype = LoadContextElement(
5989 2688 : native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
5990 : Node* proto = LoadMapPrototype(map);
5991 2688 : return WordEqual(proto, initial_array_prototype);
5992 : }
5993 :
5994 224 : TNode<BoolT> CodeStubAssembler::IsPrototypeTypedArrayPrototype(
5995 : SloppyTNode<Context> context, SloppyTNode<Map> map) {
5996 : TNode<Context> const native_context = LoadNativeContext(context);
5997 : TNode<Object> const typed_array_prototype =
5998 224 : LoadContextElement(native_context, Context::TYPED_ARRAY_PROTOTYPE_INDEX);
5999 : TNode<HeapObject> proto = LoadMapPrototype(map);
6000 : TNode<HeapObject> proto_of_proto = Select<HeapObject>(
6001 1344 : IsJSObject(proto), [=] { return LoadMapPrototype(LoadMap(proto)); },
6002 896 : [=] { return NullConstant(); });
6003 448 : return WordEqual(proto_of_proto, typed_array_prototype);
6004 : }
6005 :
6006 1176 : TNode<BoolT> CodeStubAssembler::IsFastAliasedArgumentsMap(
6007 : TNode<Context> context, TNode<Map> map) {
6008 : TNode<Context> const native_context = LoadNativeContext(context);
6009 : TNode<Object> const arguments_map = LoadContextElement(
6010 1176 : native_context, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
6011 2352 : return WordEqual(arguments_map, map);
6012 : }
6013 :
6014 1120 : TNode<BoolT> CodeStubAssembler::IsSlowAliasedArgumentsMap(
6015 : TNode<Context> context, TNode<Map> map) {
6016 : TNode<Context> const native_context = LoadNativeContext(context);
6017 : TNode<Object> const arguments_map = LoadContextElement(
6018 1120 : native_context, Context::SLOW_ALIASED_ARGUMENTS_MAP_INDEX);
6019 2240 : return WordEqual(arguments_map, map);
6020 : }
6021 :
6022 1176 : TNode<BoolT> CodeStubAssembler::IsSloppyArgumentsMap(TNode<Context> context,
6023 : TNode<Map> map) {
6024 : TNode<Context> const native_context = LoadNativeContext(context);
6025 : TNode<Object> const arguments_map =
6026 1176 : LoadContextElement(native_context, Context::SLOPPY_ARGUMENTS_MAP_INDEX);
6027 2352 : return WordEqual(arguments_map, map);
6028 : }
6029 :
6030 1176 : TNode<BoolT> CodeStubAssembler::IsStrictArgumentsMap(TNode<Context> context,
6031 : TNode<Map> map) {
6032 : TNode<Context> const native_context = LoadNativeContext(context);
6033 : TNode<Object> const arguments_map =
6034 1176 : LoadContextElement(native_context, Context::STRICT_ARGUMENTS_MAP_INDEX);
6035 2352 : return WordEqual(arguments_map, map);
6036 : }
6037 :
6038 168 : TNode<BoolT> CodeStubAssembler::TaggedIsCallable(TNode<Object> object) {
6039 : return Select<BoolT>(
6040 504 : TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
6041 168 : [=] {
6042 504 : return IsCallableMap(LoadMap(UncheckedCast<HeapObject>(object)));
6043 840 : });
6044 : }
6045 :
6046 4312 : TNode<BoolT> CodeStubAssembler::IsCallable(SloppyTNode<HeapObject> object) {
6047 4312 : return IsCallableMap(LoadMap(object));
6048 : }
6049 :
6050 0 : TNode<BoolT> CodeStubAssembler::IsCell(SloppyTNode<HeapObject> object) {
6051 0 : return WordEqual(LoadMap(object), LoadRoot(RootIndex::kCellMap));
6052 : }
6053 :
6054 616 : TNode<BoolT> CodeStubAssembler::IsCode(SloppyTNode<HeapObject> object) {
6055 616 : return HasInstanceType(object, CODE_TYPE);
6056 : }
6057 :
6058 1568 : TNode<BoolT> CodeStubAssembler::IsConstructorMap(SloppyTNode<Map> map) {
6059 : CSA_ASSERT(this, IsMap(map));
6060 1568 : return IsSetWord32<Map::IsConstructorBit>(LoadMapBitField(map));
6061 : }
6062 :
6063 728 : TNode<BoolT> CodeStubAssembler::IsConstructor(SloppyTNode<HeapObject> object) {
6064 728 : return IsConstructorMap(LoadMap(object));
6065 : }
6066 :
6067 112 : TNode<BoolT> CodeStubAssembler::IsFunctionWithPrototypeSlotMap(
6068 : SloppyTNode<Map> map) {
6069 : CSA_ASSERT(this, IsMap(map));
6070 112 : return IsSetWord32<Map::HasPrototypeSlotBit>(LoadMapBitField(map));
6071 : }
6072 :
6073 2868 : TNode<BoolT> CodeStubAssembler::IsSpecialReceiverInstanceType(
6074 : TNode<Int32T> instance_type) {
6075 : STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
6076 : return Int32LessThanOrEqual(instance_type,
6077 5736 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
6078 : }
6079 :
6080 1624 : TNode<BoolT> CodeStubAssembler::IsCustomElementsReceiverInstanceType(
6081 : TNode<Int32T> instance_type) {
6082 : return Int32LessThanOrEqual(instance_type,
6083 3248 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER));
6084 : }
6085 :
6086 20232 : TNode<BoolT> CodeStubAssembler::IsStringInstanceType(
6087 : SloppyTNode<Int32T> instance_type) {
6088 : STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
6089 40464 : return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
6090 : }
6091 :
6092 5488 : TNode<BoolT> CodeStubAssembler::IsOneByteStringInstanceType(
6093 : SloppyTNode<Int32T> instance_type) {
6094 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6095 : return Word32Equal(
6096 16464 : Word32And(instance_type, Int32Constant(kStringEncodingMask)),
6097 16464 : Int32Constant(kOneByteStringTag));
6098 : }
6099 :
6100 4704 : TNode<BoolT> CodeStubAssembler::IsSequentialStringInstanceType(
6101 : SloppyTNode<Int32T> instance_type) {
6102 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6103 : return Word32Equal(
6104 14112 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6105 14112 : Int32Constant(kSeqStringTag));
6106 : }
6107 :
6108 56 : TNode<BoolT> CodeStubAssembler::IsConsStringInstanceType(
6109 : SloppyTNode<Int32T> instance_type) {
6110 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6111 : return Word32Equal(
6112 168 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6113 168 : Int32Constant(kConsStringTag));
6114 : }
6115 :
6116 0 : TNode<BoolT> CodeStubAssembler::IsIndirectStringInstanceType(
6117 : SloppyTNode<Int32T> instance_type) {
6118 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6119 : STATIC_ASSERT(kIsIndirectStringMask == 0x1);
6120 : STATIC_ASSERT(kIsIndirectStringTag == 0x1);
6121 : return UncheckedCast<BoolT>(
6122 0 : Word32And(instance_type, Int32Constant(kIsIndirectStringMask)));
6123 : }
6124 :
6125 0 : TNode<BoolT> CodeStubAssembler::IsExternalStringInstanceType(
6126 : SloppyTNode<Int32T> instance_type) {
6127 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6128 : return Word32Equal(
6129 0 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6130 0 : Int32Constant(kExternalStringTag));
6131 : }
6132 :
6133 0 : TNode<BoolT> CodeStubAssembler::IsUncachedExternalStringInstanceType(
6134 : SloppyTNode<Int32T> instance_type) {
6135 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6136 : STATIC_ASSERT(kUncachedExternalStringTag != 0);
6137 4704 : return IsSetWord32(instance_type, kUncachedExternalStringMask);
6138 : }
6139 :
6140 15744 : TNode<BoolT> CodeStubAssembler::IsJSReceiverInstanceType(
6141 : SloppyTNode<Int32T> instance_type) {
6142 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6143 : return Int32GreaterThanOrEqual(instance_type,
6144 31488 : Int32Constant(FIRST_JS_RECEIVER_TYPE));
6145 : }
6146 :
6147 7784 : TNode<BoolT> CodeStubAssembler::IsJSReceiverMap(SloppyTNode<Map> map) {
6148 7784 : return IsJSReceiverInstanceType(LoadMapInstanceType(map));
6149 : }
6150 :
6151 6832 : TNode<BoolT> CodeStubAssembler::IsJSReceiver(SloppyTNode<HeapObject> object) {
6152 6832 : return IsJSReceiverMap(LoadMap(object));
6153 : }
6154 :
6155 0 : TNode<BoolT> CodeStubAssembler::IsNullOrJSReceiver(
6156 : SloppyTNode<HeapObject> object) {
6157 0 : return UncheckedCast<BoolT>(Word32Or(IsJSReceiver(object), IsNull(object)));
6158 : }
6159 :
6160 3584 : TNode<BoolT> CodeStubAssembler::IsNullOrUndefined(SloppyTNode<Object> value) {
6161 10752 : return UncheckedCast<BoolT>(Word32Or(IsUndefined(value), IsNull(value)));
6162 : }
6163 :
6164 0 : TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyInstanceType(
6165 : SloppyTNode<Int32T> instance_type) {
6166 0 : return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
6167 : }
6168 :
6169 448 : TNode<BoolT> CodeStubAssembler::IsJSObjectInstanceType(
6170 : SloppyTNode<Int32T> instance_type) {
6171 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
6172 : return Int32GreaterThanOrEqual(instance_type,
6173 896 : Int32Constant(FIRST_JS_OBJECT_TYPE));
6174 : }
6175 :
6176 336 : TNode<BoolT> CodeStubAssembler::IsJSObjectMap(SloppyTNode<Map> map) {
6177 : CSA_ASSERT(this, IsMap(map));
6178 336 : return IsJSObjectInstanceType(LoadMapInstanceType(map));
6179 : }
6180 :
6181 224 : TNode<BoolT> CodeStubAssembler::IsJSObject(SloppyTNode<HeapObject> object) {
6182 224 : return IsJSObjectMap(LoadMap(object));
6183 : }
6184 :
6185 896 : TNode<BoolT> CodeStubAssembler::IsJSPromiseMap(SloppyTNode<Map> map) {
6186 : CSA_ASSERT(this, IsMap(map));
6187 896 : return InstanceTypeEqual(LoadMapInstanceType(map), JS_PROMISE_TYPE);
6188 : }
6189 :
6190 0 : TNode<BoolT> CodeStubAssembler::IsJSPromise(SloppyTNode<HeapObject> object) {
6191 0 : return IsJSPromiseMap(LoadMap(object));
6192 : }
6193 :
6194 336 : TNode<BoolT> CodeStubAssembler::IsJSProxy(SloppyTNode<HeapObject> object) {
6195 336 : return HasInstanceType(object, JS_PROXY_TYPE);
6196 : }
6197 :
6198 728 : TNode<BoolT> CodeStubAssembler::IsJSGlobalProxy(
6199 : SloppyTNode<HeapObject> object) {
6200 728 : return HasInstanceType(object, JS_GLOBAL_PROXY_TYPE);
6201 : }
6202 :
6203 4596 : TNode<BoolT> CodeStubAssembler::IsMap(SloppyTNode<HeapObject> map) {
6204 4596 : return IsMetaMap(LoadMap(map));
6205 : }
6206 :
6207 0 : TNode<BoolT> CodeStubAssembler::IsJSValueInstanceType(
6208 : SloppyTNode<Int32T> instance_type) {
6209 3812 : return InstanceTypeEqual(instance_type, JS_VALUE_TYPE);
6210 : }
6211 :
6212 0 : TNode<BoolT> CodeStubAssembler::IsJSValue(SloppyTNode<HeapObject> object) {
6213 0 : return IsJSValueMap(LoadMap(object));
6214 : }
6215 :
6216 0 : TNode<BoolT> CodeStubAssembler::IsJSValueMap(SloppyTNode<Map> map) {
6217 0 : return IsJSValueInstanceType(LoadMapInstanceType(map));
6218 : }
6219 :
6220 0 : TNode<BoolT> CodeStubAssembler::IsJSArrayInstanceType(
6221 : SloppyTNode<Int32T> instance_type) {
6222 14736 : return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
6223 : }
6224 :
6225 8012 : TNode<BoolT> CodeStubAssembler::IsJSArray(SloppyTNode<HeapObject> object) {
6226 8012 : return IsJSArrayMap(LoadMap(object));
6227 : }
6228 :
6229 10924 : TNode<BoolT> CodeStubAssembler::IsJSArrayMap(SloppyTNode<Map> map) {
6230 10924 : return IsJSArrayInstanceType(LoadMapInstanceType(map));
6231 : }
6232 :
6233 0 : TNode<BoolT> CodeStubAssembler::IsJSArrayIterator(
6234 : SloppyTNode<HeapObject> object) {
6235 0 : return HasInstanceType(object, JS_ARRAY_ITERATOR_TYPE);
6236 : }
6237 :
6238 0 : TNode<BoolT> CodeStubAssembler::IsJSAsyncGeneratorObject(
6239 : SloppyTNode<HeapObject> object) {
6240 0 : return HasInstanceType(object, JS_ASYNC_GENERATOR_OBJECT_TYPE);
6241 : }
6242 :
6243 4 : TNode<BoolT> CodeStubAssembler::IsContext(SloppyTNode<HeapObject> object) {
6244 8 : Node* instance_type = LoadInstanceType(object);
6245 : return UncheckedCast<BoolT>(Word32And(
6246 12 : Int32GreaterThanOrEqual(instance_type, Int32Constant(FIRST_CONTEXT_TYPE)),
6247 16 : Int32LessThanOrEqual(instance_type, Int32Constant(LAST_CONTEXT_TYPE))));
6248 : }
6249 :
6250 0 : TNode<BoolT> CodeStubAssembler::IsFixedArray(SloppyTNode<HeapObject> object) {
6251 0 : return HasInstanceType(object, FIXED_ARRAY_TYPE);
6252 : }
6253 :
6254 0 : TNode<BoolT> CodeStubAssembler::IsFixedArraySubclass(
6255 : SloppyTNode<HeapObject> object) {
6256 0 : Node* instance_type = LoadInstanceType(object);
6257 : return UncheckedCast<BoolT>(
6258 0 : Word32And(Int32GreaterThanOrEqual(instance_type,
6259 0 : Int32Constant(FIRST_FIXED_ARRAY_TYPE)),
6260 0 : Int32LessThanOrEqual(instance_type,
6261 0 : Int32Constant(LAST_FIXED_ARRAY_TYPE))));
6262 : }
6263 :
6264 0 : TNode<BoolT> CodeStubAssembler::IsNotWeakFixedArraySubclass(
6265 : SloppyTNode<HeapObject> object) {
6266 0 : Node* instance_type = LoadInstanceType(object);
6267 : return UncheckedCast<BoolT>(Word32Or(
6268 0 : Int32LessThan(instance_type, Int32Constant(FIRST_WEAK_FIXED_ARRAY_TYPE)),
6269 0 : Int32GreaterThan(instance_type,
6270 0 : Int32Constant(LAST_WEAK_FIXED_ARRAY_TYPE))));
6271 : }
6272 :
6273 392 : TNode<BoolT> CodeStubAssembler::IsPromiseCapability(
6274 : SloppyTNode<HeapObject> object) {
6275 392 : return HasInstanceType(object, PROMISE_CAPABILITY_TYPE);
6276 : }
6277 :
6278 0 : TNode<BoolT> CodeStubAssembler::IsPropertyArray(
6279 : SloppyTNode<HeapObject> object) {
6280 0 : return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
6281 : }
6282 :
6283 : // This complicated check is due to elements oddities. If a smi array is empty
6284 : // after Array.p.shift, it is replaced by the empty array constant. If it is
6285 : // later filled with a double element, we try to grow it but pass in a double
6286 : // elements kind. Usually this would cause a size mismatch (since the source
6287 : // fixed array has HOLEY_ELEMENTS and destination has
6288 : // HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
6289 : // source array is empty.
6290 : // TODO(jgruber): It might we worth creating an empty_double_array constant to
6291 : // simplify this case.
6292 0 : TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKindOrEmpty(
6293 : SloppyTNode<HeapObject> object, ElementsKind kind) {
6294 0 : Label out(this);
6295 : TVARIABLE(BoolT, var_result, Int32TrueConstant());
6296 :
6297 0 : GotoIf(IsFixedArrayWithKind(object, kind), &out);
6298 :
6299 0 : TNode<Smi> const length = LoadFixedArrayBaseLength(CAST(object));
6300 0 : GotoIf(SmiEqual(length, SmiConstant(0)), &out);
6301 :
6302 : var_result = Int32FalseConstant();
6303 0 : Goto(&out);
6304 :
6305 : BIND(&out);
6306 0 : return var_result.value();
6307 : }
6308 :
6309 0 : TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKind(
6310 : SloppyTNode<HeapObject> object, ElementsKind kind) {
6311 0 : if (IsDoubleElementsKind(kind)) {
6312 0 : return IsFixedDoubleArray(object);
6313 : } else {
6314 : DCHECK(IsSmiOrObjectElementsKind(kind));
6315 0 : return IsFixedArraySubclass(object);
6316 : }
6317 : }
6318 :
6319 168 : TNode<BoolT> CodeStubAssembler::IsBoolean(SloppyTNode<HeapObject> object) {
6320 168 : return IsBooleanMap(LoadMap(object));
6321 : }
6322 :
6323 0 : TNode<BoolT> CodeStubAssembler::IsPropertyCell(SloppyTNode<HeapObject> object) {
6324 0 : return IsPropertyCellMap(LoadMap(object));
6325 : }
6326 :
6327 336 : TNode<BoolT> CodeStubAssembler::IsAccessorInfo(SloppyTNode<HeapObject> object) {
6328 336 : return IsAccessorInfoMap(LoadMap(object));
6329 : }
6330 :
6331 3980 : TNode<BoolT> CodeStubAssembler::IsAccessorPair(SloppyTNode<HeapObject> object) {
6332 3980 : return IsAccessorPairMap(LoadMap(object));
6333 : }
6334 :
6335 168 : TNode<BoolT> CodeStubAssembler::IsAllocationSite(
6336 : SloppyTNode<HeapObject> object) {
6337 336 : return IsAllocationSiteInstanceType(LoadInstanceType(object));
6338 : }
6339 :
6340 0 : TNode<BoolT> CodeStubAssembler::IsAnyHeapNumber(
6341 : SloppyTNode<HeapObject> object) {
6342 : return UncheckedCast<BoolT>(
6343 0 : Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object)));
6344 : }
6345 :
6346 33460 : TNode<BoolT> CodeStubAssembler::IsHeapNumber(SloppyTNode<HeapObject> object) {
6347 33460 : return IsHeapNumberMap(LoadMap(object));
6348 : }
6349 :
6350 56 : TNode<BoolT> CodeStubAssembler::IsHeapNumberInstanceType(
6351 : SloppyTNode<Int32T> instance_type) {
6352 452 : return InstanceTypeEqual(instance_type, HEAP_NUMBER_TYPE);
6353 : }
6354 :
6355 0 : TNode<BoolT> CodeStubAssembler::IsOddball(SloppyTNode<HeapObject> object) {
6356 0 : return IsOddballInstanceType(LoadInstanceType(object));
6357 : }
6358 :
6359 0 : TNode<BoolT> CodeStubAssembler::IsOddballInstanceType(
6360 : SloppyTNode<Int32T> instance_type) {
6361 1068 : return InstanceTypeEqual(instance_type, ODDBALL_TYPE);
6362 : }
6363 :
6364 56 : TNode<BoolT> CodeStubAssembler::IsMutableHeapNumber(
6365 : SloppyTNode<HeapObject> object) {
6366 56 : return IsMutableHeapNumberMap(LoadMap(object));
6367 : }
6368 :
6369 0 : TNode<BoolT> CodeStubAssembler::IsFeedbackCell(SloppyTNode<HeapObject> object) {
6370 0 : return HasInstanceType(object, FEEDBACK_CELL_TYPE);
6371 : }
6372 :
6373 12612 : TNode<BoolT> CodeStubAssembler::IsFeedbackVector(
6374 : SloppyTNode<HeapObject> object) {
6375 12612 : return IsFeedbackVectorMap(LoadMap(object));
6376 : }
6377 :
6378 56 : TNode<BoolT> CodeStubAssembler::IsName(SloppyTNode<HeapObject> object) {
6379 112 : return IsNameInstanceType(LoadInstanceType(object));
6380 : }
6381 :
6382 112 : TNode<BoolT> CodeStubAssembler::IsNameInstanceType(
6383 : SloppyTNode<Int32T> instance_type) {
6384 224 : return Int32LessThanOrEqual(instance_type, Int32Constant(LAST_NAME_TYPE));
6385 : }
6386 :
6387 9300 : TNode<BoolT> CodeStubAssembler::IsString(SloppyTNode<HeapObject> object) {
6388 18600 : return IsStringInstanceType(LoadInstanceType(object));
6389 : }
6390 :
6391 0 : TNode<BoolT> CodeStubAssembler::IsSymbolInstanceType(
6392 : SloppyTNode<Int32T> instance_type) {
6393 1068 : return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
6394 : }
6395 :
6396 2920 : TNode<BoolT> CodeStubAssembler::IsSymbol(SloppyTNode<HeapObject> object) {
6397 2920 : return IsSymbolMap(LoadMap(object));
6398 : }
6399 :
6400 168 : TNode<BoolT> CodeStubAssembler::IsInternalizedStringInstanceType(
6401 : TNode<Int32T> instance_type) {
6402 : STATIC_ASSERT(kNotInternalizedTag != 0);
6403 : return Word32Equal(
6404 336 : Word32And(instance_type,
6405 336 : Int32Constant(kIsNotStringMask | kIsNotInternalizedMask)),
6406 504 : Int32Constant(kStringTag | kInternalizedTag));
6407 : }
6408 :
6409 0 : TNode<BoolT> CodeStubAssembler::IsUniqueName(TNode<HeapObject> object) {
6410 0 : TNode<Int32T> instance_type = LoadInstanceType(object);
6411 : return Select<BoolT>(
6412 0 : IsInternalizedStringInstanceType(instance_type),
6413 0 : [=] { return Int32TrueConstant(); },
6414 0 : [=] { return IsSymbolInstanceType(instance_type); });
6415 : }
6416 :
6417 168 : TNode<BoolT> CodeStubAssembler::IsUniqueNameNoIndex(TNode<HeapObject> object) {
6418 168 : TNode<Int32T> instance_type = LoadInstanceType(object);
6419 : return Select<BoolT>(
6420 336 : IsInternalizedStringInstanceType(instance_type),
6421 168 : [=] {
6422 336 : return IsSetWord32(LoadNameHashField(CAST(object)),
6423 168 : Name::kIsNotArrayIndexMask);
6424 336 : },
6425 672 : [=] { return IsSymbolInstanceType(instance_type); });
6426 : }
6427 :
6428 4760 : TNode<BoolT> CodeStubAssembler::IsBigIntInstanceType(
6429 : SloppyTNode<Int32T> instance_type) {
6430 29516 : return InstanceTypeEqual(instance_type, BIGINT_TYPE);
6431 : }
6432 :
6433 10584 : TNode<BoolT> CodeStubAssembler::IsBigInt(SloppyTNode<HeapObject> object) {
6434 21168 : return IsBigIntInstanceType(LoadInstanceType(object));
6435 : }
6436 :
6437 448 : TNode<BoolT> CodeStubAssembler::IsPrimitiveInstanceType(
6438 : SloppyTNode<Int32T> instance_type) {
6439 : return Int32LessThanOrEqual(instance_type,
6440 896 : Int32Constant(LAST_PRIMITIVE_TYPE));
6441 : }
6442 :
6443 2636 : TNode<BoolT> CodeStubAssembler::IsPrivateSymbol(
6444 : SloppyTNode<HeapObject> object) {
6445 5272 : return Select<BoolT>(IsSymbol(object),
6446 2636 : [=] {
6447 5272 : TNode<Symbol> symbol = CAST(object);
6448 : TNode<Uint32T> flags = LoadObjectField<Uint32T>(
6449 : symbol, Symbol::kFlagsOffset);
6450 2636 : return IsSetWord32<Symbol::IsPrivateBit>(flags);
6451 : },
6452 10544 : [=] { return Int32FalseConstant(); });
6453 : }
6454 :
6455 168 : TNode<BoolT> CodeStubAssembler::IsNativeContext(
6456 : SloppyTNode<HeapObject> object) {
6457 336 : return WordEqual(LoadMap(object), LoadRoot(RootIndex::kNativeContextMap));
6458 : }
6459 :
6460 112 : TNode<BoolT> CodeStubAssembler::IsFixedDoubleArray(
6461 : SloppyTNode<HeapObject> object) {
6462 112 : return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
6463 : }
6464 :
6465 0 : TNode<BoolT> CodeStubAssembler::IsHashTable(SloppyTNode<HeapObject> object) {
6466 0 : Node* instance_type = LoadInstanceType(object);
6467 : return UncheckedCast<BoolT>(
6468 0 : Word32And(Int32GreaterThanOrEqual(instance_type,
6469 0 : Int32Constant(FIRST_HASH_TABLE_TYPE)),
6470 0 : Int32LessThanOrEqual(instance_type,
6471 0 : Int32Constant(LAST_HASH_TABLE_TYPE))));
6472 : }
6473 :
6474 0 : TNode<BoolT> CodeStubAssembler::IsEphemeronHashTable(
6475 : SloppyTNode<HeapObject> object) {
6476 0 : return HasInstanceType(object, EPHEMERON_HASH_TABLE_TYPE);
6477 : }
6478 :
6479 0 : TNode<BoolT> CodeStubAssembler::IsNameDictionary(
6480 : SloppyTNode<HeapObject> object) {
6481 0 : return HasInstanceType(object, NAME_DICTIONARY_TYPE);
6482 : }
6483 :
6484 0 : TNode<BoolT> CodeStubAssembler::IsGlobalDictionary(
6485 : SloppyTNode<HeapObject> object) {
6486 0 : return HasInstanceType(object, GLOBAL_DICTIONARY_TYPE);
6487 : }
6488 :
6489 0 : TNode<BoolT> CodeStubAssembler::IsNumberDictionary(
6490 : SloppyTNode<HeapObject> object) {
6491 0 : return HasInstanceType(object, NUMBER_DICTIONARY_TYPE);
6492 : }
6493 :
6494 0 : TNode<BoolT> CodeStubAssembler::IsJSGeneratorObject(
6495 : SloppyTNode<HeapObject> object) {
6496 0 : return HasInstanceType(object, JS_GENERATOR_OBJECT_TYPE);
6497 : }
6498 :
6499 0 : TNode<BoolT> CodeStubAssembler::IsJSFunctionInstanceType(
6500 : SloppyTNode<Int32T> instance_type) {
6501 3868 : return InstanceTypeEqual(instance_type, JS_FUNCTION_TYPE);
6502 : }
6503 :
6504 0 : TNode<BoolT> CodeStubAssembler::IsAllocationSiteInstanceType(
6505 : SloppyTNode<Int32T> instance_type) {
6506 168 : return InstanceTypeEqual(instance_type, ALLOCATION_SITE_TYPE);
6507 : }
6508 :
6509 56 : TNode<BoolT> CodeStubAssembler::IsJSFunction(SloppyTNode<HeapObject> object) {
6510 56 : return IsJSFunctionMap(LoadMap(object));
6511 : }
6512 :
6513 56 : TNode<BoolT> CodeStubAssembler::IsJSFunctionMap(SloppyTNode<Map> map) {
6514 56 : return IsJSFunctionInstanceType(LoadMapInstanceType(map));
6515 : }
6516 :
6517 1064 : TNode<BoolT> CodeStubAssembler::IsJSTypedArray(SloppyTNode<HeapObject> object) {
6518 1064 : return HasInstanceType(object, JS_TYPED_ARRAY_TYPE);
6519 : }
6520 :
6521 168 : TNode<BoolT> CodeStubAssembler::IsJSArrayBuffer(
6522 : SloppyTNode<HeapObject> object) {
6523 168 : return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
6524 : }
6525 :
6526 1288 : TNode<BoolT> CodeStubAssembler::IsJSDataView(TNode<HeapObject> object) {
6527 1288 : return HasInstanceType(object, JS_DATA_VIEW_TYPE);
6528 : }
6529 :
6530 0 : TNode<BoolT> CodeStubAssembler::IsFixedTypedArray(
6531 : SloppyTNode<HeapObject> object) {
6532 0 : TNode<Int32T> instance_type = LoadInstanceType(object);
6533 : return UncheckedCast<BoolT>(Word32And(
6534 0 : Int32GreaterThanOrEqual(instance_type,
6535 0 : Int32Constant(FIRST_FIXED_TYPED_ARRAY_TYPE)),
6536 0 : Int32LessThanOrEqual(instance_type,
6537 0 : Int32Constant(LAST_FIXED_TYPED_ARRAY_TYPE))));
6538 : }
6539 :
6540 1008 : TNode<BoolT> CodeStubAssembler::IsJSRegExp(SloppyTNode<HeapObject> object) {
6541 1008 : return HasInstanceType(object, JS_REGEXP_TYPE);
6542 : }
6543 :
6544 3644 : TNode<BoolT> CodeStubAssembler::IsNumber(SloppyTNode<Object> object) {
6545 10932 : return Select<BoolT>(TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6546 14576 : [=] { return IsHeapNumber(CAST(object)); });
6547 : }
6548 :
6549 112 : TNode<BoolT> CodeStubAssembler::IsNumeric(SloppyTNode<Object> object) {
6550 : return Select<BoolT>(
6551 336 : TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6552 112 : [=] {
6553 : return UncheckedCast<BoolT>(
6554 336 : Word32Or(IsHeapNumber(CAST(object)), IsBigInt(CAST(object))));
6555 448 : });
6556 : }
6557 :
6558 0 : TNode<BoolT> CodeStubAssembler::IsNumberNormalized(SloppyTNode<Number> number) {
6559 0 : TVARIABLE(BoolT, var_result, Int32TrueConstant());
6560 0 : Label out(this);
6561 :
6562 0 : GotoIf(TaggedIsSmi(number), &out);
6563 :
6564 : TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
6565 : TNode<Float64T> smi_min =
6566 0 : Float64Constant(static_cast<double>(Smi::kMinValue));
6567 : TNode<Float64T> smi_max =
6568 0 : Float64Constant(static_cast<double>(Smi::kMaxValue));
6569 :
6570 0 : GotoIf(Float64LessThan(value, smi_min), &out);
6571 0 : GotoIf(Float64GreaterThan(value, smi_max), &out);
6572 0 : GotoIfNot(Float64Equal(value, value), &out); // NaN.
6573 :
6574 : var_result = Int32FalseConstant();
6575 0 : Goto(&out);
6576 :
6577 : BIND(&out);
6578 0 : return var_result.value();
6579 : }
6580 :
6581 0 : TNode<BoolT> CodeStubAssembler::IsNumberPositive(SloppyTNode<Number> number) {
6582 0 : return Select<BoolT>(TaggedIsSmi(number),
6583 0 : [=] { return TaggedIsPositiveSmi(number); },
6584 0 : [=] { return IsHeapNumberPositive(CAST(number)); });
6585 : }
6586 :
6587 : // TODO(cbruni): Use TNode<HeapNumber> instead of custom name.
6588 4 : TNode<BoolT> CodeStubAssembler::IsHeapNumberPositive(TNode<HeapNumber> number) {
6589 : TNode<Float64T> value = LoadHeapNumberValue(number);
6590 4 : TNode<Float64T> float_zero = Float64Constant(0.);
6591 4 : return Float64GreaterThanOrEqual(value, float_zero);
6592 : }
6593 :
6594 0 : TNode<BoolT> CodeStubAssembler::IsNumberNonNegativeSafeInteger(
6595 : TNode<Number> number) {
6596 : return Select<BoolT>(
6597 : // TODO(cbruni): Introduce TaggedIsNonNegateSmi to avoid confusion.
6598 0 : TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
6599 0 : [=] {
6600 0 : TNode<HeapNumber> heap_number = CAST(number);
6601 0 : return Select<BoolT>(IsInteger(heap_number),
6602 0 : [=] { return IsHeapNumberPositive(heap_number); },
6603 0 : [=] { return Int32FalseConstant(); });
6604 0 : });
6605 : }
6606 :
6607 56 : TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<Object> number) {
6608 : return Select<BoolT>(
6609 168 : TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6610 56 : [=] {
6611 : return Select<BoolT>(
6612 224 : IsHeapNumber(CAST(number)),
6613 56 : [=] { return IsSafeInteger(UncheckedCast<HeapNumber>(number)); },
6614 224 : [=] { return Int32FalseConstant(); });
6615 280 : });
6616 : }
6617 :
6618 56 : TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<HeapNumber> number) {
6619 : // Load the actual value of {number}.
6620 : TNode<Float64T> number_value = LoadHeapNumberValue(number);
6621 : // Truncate the value of {number} to an integer (or an infinity).
6622 56 : TNode<Float64T> integer = Float64Trunc(number_value);
6623 :
6624 : return Select<BoolT>(
6625 : // Check if {number}s value matches the integer (ruling out the
6626 : // infinities).
6627 224 : Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0)),
6628 56 : [=] {
6629 : // Check if the {integer} value is in safe integer range.
6630 224 : return Float64LessThanOrEqual(Float64Abs(integer),
6631 224 : Float64Constant(kMaxSafeInteger));
6632 112 : },
6633 224 : [=] { return Int32FalseConstant(); });
6634 : }
6635 :
6636 56 : TNode<BoolT> CodeStubAssembler::IsInteger(TNode<Object> number) {
6637 : return Select<BoolT>(
6638 168 : TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6639 56 : [=] {
6640 : return Select<BoolT>(
6641 224 : IsHeapNumber(CAST(number)),
6642 56 : [=] { return IsInteger(UncheckedCast<HeapNumber>(number)); },
6643 224 : [=] { return Int32FalseConstant(); });
6644 280 : });
6645 : }
6646 :
6647 56 : TNode<BoolT> CodeStubAssembler::IsInteger(TNode<HeapNumber> number) {
6648 : TNode<Float64T> number_value = LoadHeapNumberValue(number);
6649 : // Truncate the value of {number} to an integer (or an infinity).
6650 56 : TNode<Float64T> integer = Float64Trunc(number_value);
6651 : // Check if {number}s value matches the integer (ruling out the infinities).
6652 168 : return Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0));
6653 : }
6654 :
6655 4 : TNode<BoolT> CodeStubAssembler::IsHeapNumberUint32(TNode<HeapNumber> number) {
6656 : // Check that the HeapNumber is a valid uint32
6657 : return Select<BoolT>(
6658 8 : IsHeapNumberPositive(number),
6659 4 : [=] {
6660 16 : TNode<Float64T> value = LoadHeapNumberValue(number);
6661 8 : TNode<Uint32T> int_value = Unsigned(TruncateFloat64ToWord32(value));
6662 8 : return Float64Equal(value, ChangeUint32ToFloat64(int_value));
6663 : },
6664 16 : [=] { return Int32FalseConstant(); });
6665 : }
6666 :
6667 4 : TNode<BoolT> CodeStubAssembler::IsNumberArrayIndex(TNode<Number> number) {
6668 8 : return Select<BoolT>(TaggedIsSmi(number),
6669 4 : [=] { return TaggedIsPositiveSmi(number); },
6670 16 : [=] { return IsHeapNumberUint32(CAST(number)); });
6671 : }
6672 :
6673 224 : Node* CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(Node* element_count,
6674 : int base_size,
6675 : ParameterMode mode) {
6676 : int max_newspace_elements =
6677 224 : (kMaxRegularHeapObjectSize - base_size) / kTaggedSize;
6678 224 : return IntPtrOrSmiGreaterThan(
6679 224 : element_count, IntPtrOrSmiConstant(max_newspace_elements, mode), mode);
6680 : }
6681 :
6682 2856 : TNode<Int32T> CodeStubAssembler::StringCharCodeAt(SloppyTNode<String> string,
6683 : SloppyTNode<IntPtrT> index) {
6684 : CSA_ASSERT(this, IsString(string));
6685 :
6686 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(index, IntPtrConstant(0)));
6687 : CSA_ASSERT(this, IntPtrLessThan(index, LoadStringLengthAsWord(string)));
6688 :
6689 2856 : TVARIABLE(Int32T, var_result);
6690 :
6691 2856 : Label return_result(this), if_runtime(this, Label::kDeferred),
6692 2856 : if_stringistwobyte(this), if_stringisonebyte(this);
6693 :
6694 5712 : ToDirectStringAssembler to_direct(state(), string);
6695 2856 : to_direct.TryToDirect(&if_runtime);
6696 : Node* const offset = IntPtrAdd(index, to_direct.offset());
6697 : Node* const instance_type = to_direct.instance_type();
6698 :
6699 : Node* const string_data = to_direct.PointerToData(&if_runtime);
6700 :
6701 : // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
6702 5712 : Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
6703 2856 : &if_stringistwobyte);
6704 :
6705 : BIND(&if_stringisonebyte);
6706 : {
6707 2856 : var_result =
6708 : UncheckedCast<Int32T>(Load(MachineType::Uint8(), string_data, offset));
6709 2856 : Goto(&return_result);
6710 : }
6711 :
6712 : BIND(&if_stringistwobyte);
6713 : {
6714 2856 : var_result =
6715 : UncheckedCast<Int32T>(Load(MachineType::Uint16(), string_data,
6716 8568 : WordShl(offset, IntPtrConstant(1))));
6717 2856 : Goto(&return_result);
6718 : }
6719 :
6720 : BIND(&if_runtime);
6721 : {
6722 : Node* result = CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(),
6723 2856 : string, SmiTag(index));
6724 5712 : var_result = SmiToInt32(result);
6725 2856 : Goto(&return_result);
6726 : }
6727 :
6728 : BIND(&return_result);
6729 2856 : return var_result.value();
6730 : }
6731 :
6732 784 : TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
6733 1568 : VARIABLE(var_result, MachineRepresentation::kTagged);
6734 :
6735 : // Check if the {code} is a one-byte char code.
6736 784 : Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
6737 784 : if_done(this);
6738 2352 : Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
6739 784 : &if_codeisonebyte, &if_codeistwobyte);
6740 : BIND(&if_codeisonebyte);
6741 : {
6742 : // Load the isolate wide single character string cache.
6743 : TNode<FixedArray> cache =
6744 784 : CAST(LoadRoot(RootIndex::kSingleCharacterStringCache));
6745 1568 : TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
6746 :
6747 : // Check if we have an entry for the {code} in the single character string
6748 : // cache already.
6749 784 : Label if_entryisundefined(this, Label::kDeferred),
6750 784 : if_entryisnotundefined(this);
6751 : Node* entry = UnsafeLoadFixedArrayElement(cache, code_index);
6752 1568 : Branch(IsUndefined(entry), &if_entryisundefined, &if_entryisnotundefined);
6753 :
6754 : BIND(&if_entryisundefined);
6755 : {
6756 : // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
6757 784 : TNode<String> result = AllocateSeqOneByteString(1);
6758 : StoreNoWriteBarrier(
6759 : MachineRepresentation::kWord8, result,
6760 1568 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
6761 784 : StoreFixedArrayElement(cache, code_index, result);
6762 784 : var_result.Bind(result);
6763 784 : Goto(&if_done);
6764 : }
6765 :
6766 : BIND(&if_entryisnotundefined);
6767 : {
6768 : // Return the entry from the {cache}.
6769 784 : var_result.Bind(entry);
6770 784 : Goto(&if_done);
6771 : }
6772 : }
6773 :
6774 : BIND(&if_codeistwobyte);
6775 : {
6776 : // Allocate a new SeqTwoByteString for {code}.
6777 1568 : Node* result = AllocateSeqTwoByteString(1);
6778 : StoreNoWriteBarrier(
6779 : MachineRepresentation::kWord16, result,
6780 1568 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
6781 784 : var_result.Bind(result);
6782 784 : Goto(&if_done);
6783 : }
6784 :
6785 : BIND(&if_done);
6786 : CSA_ASSERT(this, IsString(var_result.value()));
6787 1568 : return CAST(var_result.value());
6788 : }
6789 :
6790 : // A wrapper around CopyStringCharacters which determines the correct string
6791 : // encoding, allocates a corresponding sequential string, and then copies the
6792 : // given character range using CopyStringCharacters.
6793 : // |from_string| must be a sequential string.
6794 : // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
6795 896 : TNode<String> CodeStubAssembler::AllocAndCopyStringCharacters(
6796 : Node* from, Node* from_instance_type, TNode<IntPtrT> from_index,
6797 : TNode<IntPtrT> character_count) {
6798 1792 : Label end(this), one_byte_sequential(this), two_byte_sequential(this);
6799 : TVARIABLE(String, var_result);
6800 :
6801 1792 : Branch(IsOneByteStringInstanceType(from_instance_type), &one_byte_sequential,
6802 896 : &two_byte_sequential);
6803 :
6804 : // The subject string is a sequential one-byte string.
6805 : BIND(&one_byte_sequential);
6806 : {
6807 : TNode<String> result = AllocateSeqOneByteString(
6808 1792 : NoContextConstant(), Unsigned(TruncateIntPtrToInt32(character_count)));
6809 896 : CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6810 : character_count, String::ONE_BYTE_ENCODING,
6811 896 : String::ONE_BYTE_ENCODING);
6812 : var_result = result;
6813 896 : Goto(&end);
6814 : }
6815 :
6816 : // The subject string is a sequential two-byte string.
6817 : BIND(&two_byte_sequential);
6818 : {
6819 : TNode<String> result = AllocateSeqTwoByteString(
6820 1792 : NoContextConstant(), Unsigned(TruncateIntPtrToInt32(character_count)));
6821 896 : CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6822 : character_count, String::TWO_BYTE_ENCODING,
6823 896 : String::TWO_BYTE_ENCODING);
6824 : var_result = result;
6825 896 : Goto(&end);
6826 : }
6827 :
6828 : BIND(&end);
6829 896 : return var_result.value();
6830 : }
6831 :
6832 448 : TNode<String> CodeStubAssembler::SubString(TNode<String> string,
6833 : TNode<IntPtrT> from,
6834 : TNode<IntPtrT> to) {
6835 448 : TVARIABLE(String, var_result);
6836 896 : ToDirectStringAssembler to_direct(state(), string);
6837 448 : Label end(this), runtime(this);
6838 :
6839 : TNode<IntPtrT> const substr_length = IntPtrSub(to, from);
6840 448 : TNode<IntPtrT> const string_length = LoadStringLengthAsWord(string);
6841 :
6842 : // Begin dispatching based on substring length.
6843 :
6844 448 : Label original_string_or_invalid_length(this);
6845 896 : GotoIf(UintPtrGreaterThanOrEqual(substr_length, string_length),
6846 448 : &original_string_or_invalid_length);
6847 :
6848 : // A real substring (substr_length < string_length).
6849 448 : Label empty(this);
6850 1344 : GotoIf(IntPtrEqual(substr_length, IntPtrConstant(0)), &empty);
6851 :
6852 448 : Label single_char(this);
6853 1344 : GotoIf(IntPtrEqual(substr_length, IntPtrConstant(1)), &single_char);
6854 :
6855 : // Deal with different string types: update the index if necessary
6856 : // and extract the underlying string.
6857 :
6858 448 : TNode<String> direct_string = to_direct.TryToDirect(&runtime);
6859 : TNode<IntPtrT> offset = IntPtrAdd(from, to_direct.offset());
6860 : Node* const instance_type = to_direct.instance_type();
6861 :
6862 : // The subject string can only be external or sequential string of either
6863 : // encoding at this point.
6864 448 : Label external_string(this);
6865 : {
6866 : if (FLAG_string_slices) {
6867 448 : Label next(this);
6868 :
6869 : // Short slice. Copy instead of slicing.
6870 896 : GotoIf(IntPtrLessThan(substr_length,
6871 896 : IntPtrConstant(SlicedString::kMinLength)),
6872 448 : &next);
6873 :
6874 : // Allocate new sliced string.
6875 :
6876 448 : Counters* counters = isolate()->counters();
6877 448 : IncrementCounter(counters->sub_string_native(), 1);
6878 :
6879 448 : Label one_byte_slice(this), two_byte_slice(this);
6880 896 : Branch(IsOneByteStringInstanceType(to_direct.instance_type()),
6881 448 : &one_byte_slice, &two_byte_slice);
6882 :
6883 : BIND(&one_byte_slice);
6884 : {
6885 896 : var_result = AllocateSlicedOneByteString(
6886 896 : Unsigned(TruncateIntPtrToInt32(substr_length)), direct_string,
6887 : SmiTag(offset));
6888 448 : Goto(&end);
6889 : }
6890 :
6891 : BIND(&two_byte_slice);
6892 : {
6893 896 : var_result = AllocateSlicedTwoByteString(
6894 896 : Unsigned(TruncateIntPtrToInt32(substr_length)), direct_string,
6895 : SmiTag(offset));
6896 448 : Goto(&end);
6897 : }
6898 :
6899 : BIND(&next);
6900 : }
6901 :
6902 : // The subject string can only be external or sequential string of either
6903 : // encoding at this point.
6904 448 : GotoIf(to_direct.is_external(), &external_string);
6905 :
6906 448 : var_result = AllocAndCopyStringCharacters(direct_string, instance_type,
6907 : offset, substr_length);
6908 :
6909 448 : Counters* counters = isolate()->counters();
6910 448 : IncrementCounter(counters->sub_string_native(), 1);
6911 :
6912 448 : Goto(&end);
6913 : }
6914 :
6915 : // Handle external string.
6916 : BIND(&external_string);
6917 : {
6918 : Node* const fake_sequential_string = to_direct.PointerToString(&runtime);
6919 :
6920 448 : var_result = AllocAndCopyStringCharacters(
6921 : fake_sequential_string, instance_type, offset, substr_length);
6922 :
6923 448 : Counters* counters = isolate()->counters();
6924 448 : IncrementCounter(counters->sub_string_native(), 1);
6925 :
6926 448 : Goto(&end);
6927 : }
6928 :
6929 : BIND(&empty);
6930 : {
6931 : var_result = EmptyStringConstant();
6932 448 : Goto(&end);
6933 : }
6934 :
6935 : // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
6936 : BIND(&single_char);
6937 : {
6938 448 : TNode<Int32T> char_code = StringCharCodeAt(string, from);
6939 448 : var_result = StringFromSingleCharCode(char_code);
6940 448 : Goto(&end);
6941 : }
6942 :
6943 : BIND(&original_string_or_invalid_length);
6944 : {
6945 : CSA_ASSERT(this, IntPtrEqual(substr_length, string_length));
6946 :
6947 : // Equal length - check if {from, to} == {0, str.length}.
6948 1344 : GotoIf(UintPtrGreaterThan(from, IntPtrConstant(0)), &runtime);
6949 :
6950 : // Return the original string (substr_length == string_length).
6951 :
6952 448 : Counters* counters = isolate()->counters();
6953 448 : IncrementCounter(counters->sub_string_native(), 1);
6954 :
6955 : var_result = string;
6956 448 : Goto(&end);
6957 : }
6958 :
6959 : // Fall back to a runtime call.
6960 : BIND(&runtime);
6961 : {
6962 : var_result =
6963 1344 : CAST(CallRuntime(Runtime::kStringSubstring, NoContextConstant(), string,
6964 : SmiTag(from), SmiTag(to)));
6965 448 : Goto(&end);
6966 : }
6967 :
6968 : BIND(&end);
6969 448 : return var_result.value();
6970 : }
6971 :
6972 4704 : ToDirectStringAssembler::ToDirectStringAssembler(
6973 : compiler::CodeAssemblerState* state, Node* string, Flags flags)
6974 : : CodeStubAssembler(state),
6975 : var_string_(this, MachineRepresentation::kTagged, string),
6976 : var_instance_type_(this, MachineRepresentation::kWord32),
6977 : var_offset_(this, MachineType::PointerRepresentation()),
6978 : var_is_external_(this, MachineRepresentation::kWord32),
6979 4704 : flags_(flags) {
6980 : CSA_ASSERT(this, TaggedIsNotSmi(string));
6981 : CSA_ASSERT(this, IsString(string));
6982 :
6983 4704 : var_string_.Bind(string);
6984 9408 : var_offset_.Bind(IntPtrConstant(0));
6985 9408 : var_instance_type_.Bind(LoadInstanceType(string));
6986 9408 : var_is_external_.Bind(Int32Constant(0));
6987 4704 : }
6988 :
6989 4704 : TNode<String> ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
6990 9408 : VariableList vars({&var_string_, &var_offset_, &var_instance_type_}, zone());
6991 4704 : Label dispatch(this, vars);
6992 4704 : Label if_iscons(this);
6993 4704 : Label if_isexternal(this);
6994 4704 : Label if_issliced(this);
6995 4704 : Label if_isthin(this);
6996 4704 : Label out(this);
6997 :
6998 14112 : Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
6999 4704 : &dispatch);
7000 :
7001 : // Dispatch based on string representation.
7002 : BIND(&dispatch);
7003 : {
7004 : int32_t values[] = {
7005 : kSeqStringTag, kConsStringTag, kExternalStringTag,
7006 : kSlicedStringTag, kThinStringTag,
7007 4704 : };
7008 : Label* labels[] = {
7009 : &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
7010 4704 : };
7011 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
7012 :
7013 9408 : Node* const representation = Word32And(
7014 14112 : var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
7015 4704 : Switch(representation, if_bailout, values, labels, arraysize(values));
7016 : }
7017 :
7018 : // Cons string. Check whether it is flat, then fetch first part.
7019 : // Flat cons strings have an empty second part.
7020 : BIND(&if_iscons);
7021 : {
7022 4704 : Node* const string = var_string_.value();
7023 9408 : GotoIfNot(IsEmptyString(LoadObjectField(string, ConsString::kSecondOffset)),
7024 4704 : if_bailout);
7025 :
7026 : Node* const lhs = LoadObjectField(string, ConsString::kFirstOffset);
7027 4704 : var_string_.Bind(lhs);
7028 9408 : var_instance_type_.Bind(LoadInstanceType(lhs));
7029 :
7030 4704 : Goto(&dispatch);
7031 : }
7032 :
7033 : // Sliced string. Fetch parent and correct start index by offset.
7034 : BIND(&if_issliced);
7035 : {
7036 4704 : if (!FLAG_string_slices || (flags_ & kDontUnpackSlicedStrings)) {
7037 56 : Goto(if_bailout);
7038 : } else {
7039 4648 : Node* const string = var_string_.value();
7040 : Node* const sliced_offset =
7041 9296 : LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
7042 13944 : var_offset_.Bind(IntPtrAdd(var_offset_.value(), sliced_offset));
7043 :
7044 : Node* const parent = LoadObjectField(string, SlicedString::kParentOffset);
7045 4648 : var_string_.Bind(parent);
7046 9296 : var_instance_type_.Bind(LoadInstanceType(parent));
7047 :
7048 4648 : Goto(&dispatch);
7049 : }
7050 : }
7051 :
7052 : // Thin string. Fetch the actual string.
7053 : BIND(&if_isthin);
7054 : {
7055 4704 : Node* const string = var_string_.value();
7056 : Node* const actual_string =
7057 : LoadObjectField(string, ThinString::kActualOffset);
7058 9408 : Node* const actual_instance_type = LoadInstanceType(actual_string);
7059 :
7060 4704 : var_string_.Bind(actual_string);
7061 4704 : var_instance_type_.Bind(actual_instance_type);
7062 :
7063 4704 : Goto(&dispatch);
7064 : }
7065 :
7066 : // External string.
7067 : BIND(&if_isexternal);
7068 9408 : var_is_external_.Bind(Int32Constant(1));
7069 4704 : Goto(&out);
7070 :
7071 : BIND(&out);
7072 9408 : return CAST(var_string_.value());
7073 : }
7074 :
7075 4704 : TNode<RawPtrT> ToDirectStringAssembler::TryToSequential(
7076 : StringPointerKind ptr_kind, Label* if_bailout) {
7077 4704 : CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
7078 :
7079 4704 : TVARIABLE(RawPtrT, var_result);
7080 4704 : Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
7081 4704 : Branch(is_external(), &if_isexternal, &if_issequential);
7082 :
7083 : BIND(&if_issequential);
7084 : {
7085 : STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
7086 : SeqTwoByteString::kHeaderSize);
7087 9408 : TNode<IntPtrT> result = BitcastTaggedToWord(var_string_.value());
7088 4704 : if (ptr_kind == PTR_TO_DATA) {
7089 4256 : result = IntPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7090 : kHeapObjectTag));
7091 : }
7092 : var_result = ReinterpretCast<RawPtrT>(result);
7093 4704 : Goto(&out);
7094 : }
7095 :
7096 : BIND(&if_isexternal);
7097 : {
7098 9408 : GotoIf(IsUncachedExternalStringInstanceType(var_instance_type_.value()),
7099 4704 : if_bailout);
7100 :
7101 4704 : TNode<String> string = CAST(var_string_.value());
7102 : TNode<IntPtrT> result =
7103 : LoadObjectField<IntPtrT>(string, ExternalString::kResourceDataOffset);
7104 4704 : if (ptr_kind == PTR_TO_STRING) {
7105 448 : result = IntPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7106 : kHeapObjectTag));
7107 : }
7108 : var_result = ReinterpretCast<RawPtrT>(result);
7109 4704 : Goto(&out);
7110 : }
7111 :
7112 : BIND(&out);
7113 4704 : return var_result.value();
7114 : }
7115 :
7116 2016 : void CodeStubAssembler::BranchIfCanDerefIndirectString(Node* string,
7117 : Node* instance_type,
7118 : Label* can_deref,
7119 : Label* cannot_deref) {
7120 : CSA_ASSERT(this, IsString(string));
7121 : Node* representation =
7122 6048 : Word32And(instance_type, Int32Constant(kStringRepresentationMask));
7123 6048 : GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), can_deref);
7124 6048 : GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)),
7125 2016 : cannot_deref);
7126 : // Cons string.
7127 : Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
7128 4032 : GotoIf(IsEmptyString(rhs), can_deref);
7129 2016 : Goto(cannot_deref);
7130 2016 : }
7131 :
7132 0 : Node* CodeStubAssembler::DerefIndirectString(TNode<String> string,
7133 : TNode<Int32T> instance_type,
7134 : Label* cannot_deref) {
7135 0 : Label deref(this);
7136 0 : BranchIfCanDerefIndirectString(string, instance_type, &deref, cannot_deref);
7137 : BIND(&deref);
7138 : STATIC_ASSERT(static_cast<int>(ThinString::kActualOffset) ==
7139 : static_cast<int>(ConsString::kFirstOffset));
7140 0 : return LoadObjectField(string, ThinString::kActualOffset);
7141 : }
7142 :
7143 2016 : void CodeStubAssembler::DerefIndirectString(Variable* var_string,
7144 : Node* instance_type) {
7145 : #ifdef DEBUG
7146 : Label can_deref(this), cannot_deref(this);
7147 : BranchIfCanDerefIndirectString(var_string->value(), instance_type, &can_deref,
7148 : &cannot_deref);
7149 : BIND(&cannot_deref);
7150 : DebugBreak(); // Should be able to dereference string.
7151 : Goto(&can_deref);
7152 : BIND(&can_deref);
7153 : #endif // DEBUG
7154 :
7155 : STATIC_ASSERT(static_cast<int>(ThinString::kActualOffset) ==
7156 : static_cast<int>(ConsString::kFirstOffset));
7157 : var_string->Bind(
7158 4032 : LoadObjectField(var_string->value(), ThinString::kActualOffset));
7159 2016 : }
7160 :
7161 2016 : void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
7162 : Node* instance_type,
7163 : Label* did_deref,
7164 : Label* cannot_deref) {
7165 4032 : Label deref(this);
7166 2016 : BranchIfCanDerefIndirectString(var_string->value(), instance_type, &deref,
7167 2016 : cannot_deref);
7168 :
7169 : BIND(&deref);
7170 : {
7171 2016 : DerefIndirectString(var_string, instance_type);
7172 2016 : Goto(did_deref);
7173 : }
7174 2016 : }
7175 :
7176 672 : void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
7177 : Node* left_instance_type,
7178 : Variable* var_right,
7179 : Node* right_instance_type,
7180 : Label* did_something) {
7181 1344 : Label did_nothing_left(this), did_something_left(this),
7182 672 : didnt_do_anything(this);
7183 : MaybeDerefIndirectString(var_left, left_instance_type, &did_something_left,
7184 672 : &did_nothing_left);
7185 :
7186 : BIND(&did_something_left);
7187 : {
7188 : MaybeDerefIndirectString(var_right, right_instance_type, did_something,
7189 672 : did_something);
7190 : }
7191 :
7192 : BIND(&did_nothing_left);
7193 : {
7194 : MaybeDerefIndirectString(var_right, right_instance_type, did_something,
7195 672 : &didnt_do_anything);
7196 : }
7197 :
7198 : BIND(&didnt_do_anything);
7199 : // Fall through if neither string was an indirect string.
7200 672 : }
7201 :
7202 392 : TNode<String> CodeStubAssembler::StringAdd(Node* context, TNode<String> left,
7203 : TNode<String> right,
7204 : Variable* var_feedback) {
7205 392 : TVARIABLE(String, result);
7206 392 : Label check_right(this), runtime(this, Label::kDeferred), cons(this),
7207 392 : done(this, &result), done_native(this, &result);
7208 392 : Counters* counters = isolate()->counters();
7209 :
7210 : // Default to "String" feedback if we don't learn anything else below.
7211 392 : if (var_feedback != nullptr) {
7212 336 : var_feedback->Bind(SmiConstant(BinaryOperationFeedback::kString));
7213 : }
7214 :
7215 : TNode<Uint32T> left_length = LoadStringLengthAsWord32(left);
7216 784 : GotoIfNot(Word32Equal(left_length, Uint32Constant(0)), &check_right);
7217 : result = right;
7218 392 : Goto(&done_native);
7219 :
7220 : BIND(&check_right);
7221 : TNode<Uint32T> right_length = LoadStringLengthAsWord32(right);
7222 784 : GotoIfNot(Word32Equal(right_length, Uint32Constant(0)), &cons);
7223 : result = left;
7224 392 : Goto(&done_native);
7225 :
7226 : BIND(&cons);
7227 : {
7228 : TNode<Uint32T> new_length = Uint32Add(left_length, right_length);
7229 :
7230 : // If new length is greater than String::kMaxLength, goto runtime to
7231 : // throw. Note: we also need to invalidate the string length protector, so
7232 : // can't just throw here directly.
7233 784 : GotoIf(Uint32GreaterThan(new_length, Uint32Constant(String::kMaxLength)),
7234 392 : &runtime);
7235 :
7236 : TVARIABLE(String, var_left, left);
7237 : TVARIABLE(String, var_right, right);
7238 392 : Variable* input_vars[2] = {&var_left, &var_right};
7239 784 : Label non_cons(this, 2, input_vars);
7240 392 : Label slow(this, Label::kDeferred);
7241 784 : GotoIf(Uint32LessThan(new_length, Uint32Constant(ConsString::kMinLength)),
7242 392 : &non_cons);
7243 :
7244 392 : result = AllocateConsString(new_length, var_left.value(), var_right.value(),
7245 : var_feedback);
7246 392 : Goto(&done_native);
7247 :
7248 : BIND(&non_cons);
7249 :
7250 392 : Comment("Full string concatenate");
7251 784 : Node* left_instance_type = LoadInstanceType(var_left.value());
7252 784 : Node* right_instance_type = LoadInstanceType(var_right.value());
7253 : // Compute intersection and difference of instance types.
7254 :
7255 : Node* ored_instance_types =
7256 784 : Word32Or(left_instance_type, right_instance_type);
7257 : Node* xored_instance_types =
7258 784 : Word32Xor(left_instance_type, right_instance_type);
7259 :
7260 : // Check if both strings have the same encoding and both are sequential.
7261 784 : GotoIf(IsSetWord32(xored_instance_types, kStringEncodingMask), &runtime);
7262 784 : GotoIf(IsSetWord32(ored_instance_types, kStringRepresentationMask), &slow);
7263 :
7264 784 : TNode<IntPtrT> word_left_length = Signed(ChangeUint32ToWord(left_length));
7265 784 : TNode<IntPtrT> word_right_length = Signed(ChangeUint32ToWord(right_length));
7266 :
7267 392 : Label two_byte(this);
7268 1568 : GotoIf(Word32Equal(Word32And(ored_instance_types,
7269 784 : Int32Constant(kStringEncodingMask)),
7270 1176 : Int32Constant(kTwoByteStringTag)),
7271 392 : &two_byte);
7272 : // One-byte sequential string case
7273 784 : result = AllocateSeqOneByteString(context, new_length);
7274 392 : CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
7275 : IntPtrConstant(0), word_left_length,
7276 392 : String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
7277 392 : CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
7278 : word_left_length, word_right_length,
7279 392 : String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
7280 392 : Goto(&done_native);
7281 :
7282 : BIND(&two_byte);
7283 : {
7284 : // Two-byte sequential string case
7285 784 : result = AllocateSeqTwoByteString(context, new_length);
7286 392 : CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
7287 : IntPtrConstant(0), word_left_length,
7288 : String::TWO_BYTE_ENCODING,
7289 392 : String::TWO_BYTE_ENCODING);
7290 392 : CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
7291 : word_left_length, word_right_length,
7292 : String::TWO_BYTE_ENCODING,
7293 392 : String::TWO_BYTE_ENCODING);
7294 392 : Goto(&done_native);
7295 : }
7296 :
7297 : BIND(&slow);
7298 : {
7299 : // Try to unwrap indirect strings, restart the above attempt on success.
7300 : MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
7301 392 : right_instance_type, &non_cons);
7302 392 : Goto(&runtime);
7303 : }
7304 : }
7305 : BIND(&runtime);
7306 : {
7307 : result = CAST(CallRuntime(Runtime::kStringAdd, context, left, right));
7308 392 : Goto(&done);
7309 : }
7310 :
7311 : BIND(&done_native);
7312 : {
7313 392 : IncrementCounter(counters->string_add_native(), 1);
7314 392 : Goto(&done);
7315 : }
7316 :
7317 : BIND(&done);
7318 392 : return result.value();
7319 : }
7320 :
7321 112 : TNode<String> CodeStubAssembler::StringFromSingleCodePoint(
7322 : TNode<Int32T> codepoint, UnicodeEncoding encoding) {
7323 224 : VARIABLE(var_result, MachineRepresentation::kTagged, EmptyStringConstant());
7324 :
7325 112 : Label if_isword16(this), if_isword32(this), return_result(this);
7326 :
7327 336 : Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
7328 112 : &if_isword32);
7329 :
7330 : BIND(&if_isword16);
7331 : {
7332 224 : var_result.Bind(StringFromSingleCharCode(codepoint));
7333 112 : Goto(&return_result);
7334 : }
7335 :
7336 : BIND(&if_isword32);
7337 : {
7338 112 : switch (encoding) {
7339 : case UnicodeEncoding::UTF16:
7340 : break;
7341 : case UnicodeEncoding::UTF32: {
7342 : // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
7343 0 : Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
7344 :
7345 : // lead = (codepoint >> 10) + LEAD_OFFSET
7346 : Node* lead =
7347 0 : Int32Add(Word32Shr(codepoint, Int32Constant(10)), lead_offset);
7348 :
7349 : // trail = (codepoint & 0x3FF) + 0xDC00;
7350 0 : Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
7351 0 : Int32Constant(0xDC00));
7352 :
7353 : // codpoint = (trail << 16) | lead;
7354 0 : codepoint = Signed(Word32Or(Word32Shl(trail, Int32Constant(16)), lead));
7355 0 : break;
7356 : }
7357 : }
7358 :
7359 224 : Node* value = AllocateSeqTwoByteString(2);
7360 : StoreNoWriteBarrier(
7361 : MachineRepresentation::kWord32, value,
7362 224 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
7363 112 : codepoint);
7364 112 : var_result.Bind(value);
7365 112 : Goto(&return_result);
7366 : }
7367 :
7368 : BIND(&return_result);
7369 224 : return CAST(var_result.value());
7370 : }
7371 :
7372 900 : TNode<Number> CodeStubAssembler::StringToNumber(TNode<String> input) {
7373 1800 : Label runtime(this, Label::kDeferred);
7374 900 : Label end(this);
7375 :
7376 : TVARIABLE(Number, var_result);
7377 :
7378 : // Check if string has a cached array index.
7379 : TNode<Uint32T> hash = LoadNameHashField(input);
7380 1800 : GotoIf(IsSetWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
7381 900 : &runtime);
7382 :
7383 : var_result =
7384 2700 : SmiTag(Signed(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
7385 900 : Goto(&end);
7386 :
7387 : BIND(&runtime);
7388 : {
7389 : var_result =
7390 : CAST(CallRuntime(Runtime::kStringToNumber, NoContextConstant(), input));
7391 900 : Goto(&end);
7392 : }
7393 :
7394 : BIND(&end);
7395 900 : return var_result.value();
7396 : }
7397 :
7398 340 : TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
7399 340 : TVARIABLE(String, result);
7400 : TVARIABLE(Smi, smi_input);
7401 340 : Label runtime(this, Label::kDeferred), if_smi(this), if_heap_number(this),
7402 340 : done(this, &result);
7403 :
7404 : // Load the number string cache.
7405 680 : Node* number_string_cache = LoadRoot(RootIndex::kNumberStringCache);
7406 :
7407 : // Make the hash mask from the length of the number string cache. It
7408 : // contains two elements (number and string) for each cache entry.
7409 : // TODO(ishell): cleanup mask handling.
7410 : Node* mask =
7411 680 : BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
7412 340 : TNode<IntPtrT> one = IntPtrConstant(1);
7413 680 : mask = IntPtrSub(mask, one);
7414 :
7415 680 : GotoIfNot(TaggedIsSmi(input), &if_heap_number);
7416 : smi_input = CAST(input);
7417 340 : Goto(&if_smi);
7418 :
7419 : BIND(&if_heap_number);
7420 : {
7421 : TNode<HeapNumber> heap_number_input = CAST(input);
7422 : // Try normalizing the HeapNumber.
7423 340 : TryHeapNumberToSmi(heap_number_input, smi_input, &if_smi);
7424 :
7425 : // Make a hash from the two 32-bit values of the double.
7426 : TNode<Int32T> low =
7427 : LoadObjectField<Int32T>(heap_number_input, HeapNumber::kValueOffset);
7428 : TNode<Int32T> high = LoadObjectField<Int32T>(
7429 : heap_number_input, HeapNumber::kValueOffset + kIntSize);
7430 340 : TNode<Word32T> hash = Word32Xor(low, high);
7431 680 : TNode<WordT> word_hash = WordShl(ChangeInt32ToIntPtr(hash), one);
7432 : TNode<WordT> index =
7433 680 : WordAnd(word_hash, WordSar(mask, SmiShiftBitsConstant()));
7434 :
7435 : // Cache entry's key must be a heap number
7436 : Node* number_key =
7437 : UnsafeLoadFixedArrayElement(CAST(number_string_cache), index);
7438 680 : GotoIf(TaggedIsSmi(number_key), &runtime);
7439 680 : GotoIfNot(IsHeapNumber(number_key), &runtime);
7440 :
7441 : // Cache entry's key must match the heap number value we're looking for.
7442 340 : Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
7443 340 : MachineType::Int32());
7444 340 : Node* high_compare = LoadObjectField(
7445 340 : number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
7446 680 : GotoIfNot(Word32Equal(low, low_compare), &runtime);
7447 680 : GotoIfNot(Word32Equal(high, high_compare), &runtime);
7448 :
7449 : // Heap number match, return value from cache entry.
7450 : result = CAST(UnsafeLoadFixedArrayElement(CAST(number_string_cache), index,
7451 : kTaggedSize));
7452 340 : Goto(&done);
7453 : }
7454 :
7455 : BIND(&if_smi);
7456 : {
7457 : // Load the smi key, make sure it matches the smi we're looking for.
7458 680 : Node* smi_index = BitcastWordToTagged(
7459 1360 : WordAnd(WordShl(BitcastTaggedToWord(smi_input.value()), one), mask));
7460 : Node* smi_key = UnsafeLoadFixedArrayElement(CAST(number_string_cache),
7461 : smi_index, 0, SMI_PARAMETERS);
7462 340 : GotoIf(WordNotEqual(smi_key, smi_input.value()), &runtime);
7463 :
7464 : // Smi match, return value from cache entry.
7465 : result = CAST(UnsafeLoadFixedArrayElement(
7466 : CAST(number_string_cache), smi_index, kTaggedSize, SMI_PARAMETERS));
7467 340 : Goto(&done);
7468 : }
7469 :
7470 : BIND(&runtime);
7471 : {
7472 : // No cache entry, go to the runtime.
7473 : result =
7474 : CAST(CallRuntime(Runtime::kNumberToString, NoContextConstant(), input));
7475 340 : Goto(&done);
7476 : }
7477 : BIND(&done);
7478 340 : return result.value();
7479 : }
7480 :
7481 844 : Node* CodeStubAssembler::NonNumberToNumberOrNumeric(
7482 : Node* context, Node* input, Object::Conversion mode,
7483 : BigIntHandling bigint_handling) {
7484 : CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
7485 : CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(input)));
7486 :
7487 : // We might need to loop once here due to ToPrimitive conversions.
7488 1688 : VARIABLE(var_input, MachineRepresentation::kTagged, input);
7489 1688 : VARIABLE(var_result, MachineRepresentation::kTagged);
7490 844 : Label loop(this, &var_input);
7491 844 : Label end(this);
7492 844 : Goto(&loop);
7493 : BIND(&loop);
7494 : {
7495 : // Load the current {input} value (known to be a HeapObject).
7496 844 : Node* input = var_input.value();
7497 :
7498 : // Dispatch on the {input} instance type.
7499 1688 : Node* input_instance_type = LoadInstanceType(input);
7500 844 : Label if_inputisstring(this), if_inputisoddball(this),
7501 844 : if_inputisbigint(this), if_inputisreceiver(this, Label::kDeferred),
7502 844 : if_inputisother(this, Label::kDeferred);
7503 1688 : GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
7504 844 : GotoIf(IsBigIntInstanceType(input_instance_type), &if_inputisbigint);
7505 1688 : GotoIf(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE),
7506 844 : &if_inputisoddball);
7507 1688 : Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
7508 844 : &if_inputisother);
7509 :
7510 : BIND(&if_inputisstring);
7511 : {
7512 : // The {input} is a String, use the fast stub to convert it to a Number.
7513 844 : TNode<String> string_input = CAST(input);
7514 1688 : var_result.Bind(StringToNumber(string_input));
7515 844 : Goto(&end);
7516 : }
7517 :
7518 : BIND(&if_inputisbigint);
7519 844 : if (mode == Object::Conversion::kToNumeric) {
7520 112 : var_result.Bind(input);
7521 112 : Goto(&end);
7522 : } else {
7523 : DCHECK_EQ(mode, Object::Conversion::kToNumber);
7524 732 : if (bigint_handling == BigIntHandling::kThrow) {
7525 620 : Goto(&if_inputisother);
7526 : } else {
7527 : DCHECK_EQ(bigint_handling, BigIntHandling::kConvertToNumber);
7528 112 : var_result.Bind(CallRuntime(Runtime::kBigIntToNumber, context, input));
7529 112 : Goto(&end);
7530 : }
7531 : }
7532 :
7533 : BIND(&if_inputisoddball);
7534 : {
7535 : // The {input} is an Oddball, we just need to load the Number value of it.
7536 844 : var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
7537 844 : Goto(&end);
7538 : }
7539 :
7540 : BIND(&if_inputisreceiver);
7541 : {
7542 : // The {input} is a JSReceiver, we need to convert it to a Primitive first
7543 : // using the ToPrimitive type conversion, preferably yielding a Number.
7544 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
7545 844 : isolate(), ToPrimitiveHint::kNumber);
7546 1688 : Node* result = CallStub(callable, context, input);
7547 :
7548 : // Check if the {result} is already a Number/Numeric.
7549 844 : Label if_done(this), if_notdone(this);
7550 2532 : Branch(mode == Object::Conversion::kToNumber ? IsNumber(result)
7551 : : IsNumeric(result),
7552 844 : &if_done, &if_notdone);
7553 :
7554 : BIND(&if_done);
7555 : {
7556 : // The ToPrimitive conversion already gave us a Number/Numeric, so we're
7557 : // done.
7558 844 : var_result.Bind(result);
7559 844 : Goto(&end);
7560 : }
7561 :
7562 : BIND(&if_notdone);
7563 : {
7564 : // We now have a Primitive {result}, but it's not yet a Number/Numeric.
7565 844 : var_input.Bind(result);
7566 844 : Goto(&loop);
7567 : }
7568 : }
7569 :
7570 : BIND(&if_inputisother);
7571 : {
7572 : // The {input} is something else (e.g. Symbol), let the runtime figure
7573 : // out the correct exception.
7574 : // Note: We cannot tail call to the runtime here, as js-to-wasm
7575 : // trampolines also use this code currently, and they declare all
7576 : // outgoing parameters as untagged, while we would push a tagged
7577 : // object here.
7578 : auto function_id = mode == Object::Conversion::kToNumber
7579 : ? Runtime::kToNumber
7580 844 : : Runtime::kToNumeric;
7581 844 : var_result.Bind(CallRuntime(function_id, context, input));
7582 844 : Goto(&end);
7583 : }
7584 : }
7585 :
7586 : BIND(&end);
7587 : if (mode == Object::Conversion::kToNumeric) {
7588 : CSA_ASSERT(this, IsNumeric(var_result.value()));
7589 : } else {
7590 : DCHECK_EQ(mode, Object::Conversion::kToNumber);
7591 : CSA_ASSERT(this, IsNumber(var_result.value()));
7592 : }
7593 1688 : return var_result.value();
7594 : }
7595 :
7596 56 : TNode<Number> CodeStubAssembler::NonNumberToNumber(
7597 : SloppyTNode<Context> context, SloppyTNode<HeapObject> input,
7598 : BigIntHandling bigint_handling) {
7599 732 : return CAST(NonNumberToNumberOrNumeric(
7600 : context, input, Object::Conversion::kToNumber, bigint_handling));
7601 : }
7602 :
7603 112 : TNode<Numeric> CodeStubAssembler::NonNumberToNumeric(
7604 : SloppyTNode<Context> context, SloppyTNode<HeapObject> input) {
7605 : Node* result = NonNumberToNumberOrNumeric(context, input,
7606 112 : Object::Conversion::kToNumeric);
7607 : CSA_SLOW_ASSERT(this, IsNumeric(result));
7608 112 : return UncheckedCast<Numeric>(result);
7609 : }
7610 :
7611 616 : TNode<Number> CodeStubAssembler::ToNumber_Inline(SloppyTNode<Context> context,
7612 : SloppyTNode<Object> input) {
7613 616 : TVARIABLE(Number, var_result);
7614 616 : Label end(this), not_smi(this, Label::kDeferred);
7615 :
7616 1232 : GotoIfNot(TaggedIsSmi(input), ¬_smi);
7617 : var_result = CAST(input);
7618 616 : Goto(&end);
7619 :
7620 : BIND(¬_smi);
7621 : {
7622 2464 : var_result =
7623 1232 : Select<Number>(IsHeapNumber(CAST(input)), [=] { return CAST(input); },
7624 : [=] {
7625 616 : return CAST(CallBuiltin(Builtins::kNonNumberToNumber,
7626 : context, input));
7627 : });
7628 616 : Goto(&end);
7629 : }
7630 :
7631 : BIND(&end);
7632 616 : return var_result.value();
7633 : }
7634 :
7635 676 : TNode<Number> CodeStubAssembler::ToNumber(SloppyTNode<Context> context,
7636 : SloppyTNode<Object> input,
7637 : BigIntHandling bigint_handling) {
7638 676 : TVARIABLE(Number, var_result);
7639 676 : Label end(this);
7640 :
7641 676 : Label not_smi(this, Label::kDeferred);
7642 1352 : GotoIfNot(TaggedIsSmi(input), ¬_smi);
7643 : TNode<Smi> input_smi = CAST(input);
7644 : var_result = input_smi;
7645 676 : Goto(&end);
7646 :
7647 : BIND(¬_smi);
7648 : {
7649 676 : Label not_heap_number(this, Label::kDeferred);
7650 : TNode<HeapObject> input_ho = CAST(input);
7651 1352 : GotoIfNot(IsHeapNumber(input_ho), ¬_heap_number);
7652 :
7653 : TNode<HeapNumber> input_hn = CAST(input_ho);
7654 : var_result = input_hn;
7655 676 : Goto(&end);
7656 :
7657 : BIND(¬_heap_number);
7658 : {
7659 : var_result = NonNumberToNumber(context, input_ho, bigint_handling);
7660 676 : Goto(&end);
7661 : }
7662 : }
7663 :
7664 : BIND(&end);
7665 676 : return var_result.value();
7666 : }
7667 :
7668 1568 : TNode<BigInt> CodeStubAssembler::ToBigInt(SloppyTNode<Context> context,
7669 : SloppyTNode<Object> input) {
7670 1568 : TVARIABLE(BigInt, var_result);
7671 1568 : Label if_bigint(this), done(this), if_throw(this);
7672 :
7673 3136 : GotoIf(TaggedIsSmi(input), &if_throw);
7674 3136 : GotoIf(IsBigInt(CAST(input)), &if_bigint);
7675 : var_result = CAST(CallRuntime(Runtime::kToBigInt, context, input));
7676 1568 : Goto(&done);
7677 :
7678 : BIND(&if_bigint);
7679 : var_result = CAST(input);
7680 1568 : Goto(&done);
7681 :
7682 : BIND(&if_throw);
7683 1568 : ThrowTypeError(context, MessageTemplate::kBigIntFromObject, input);
7684 :
7685 : BIND(&done);
7686 1568 : return var_result.value();
7687 : }
7688 :
7689 336 : void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
7690 : Variable* var_numeric) {
7691 336 : TaggedToNumeric(context, value, done, var_numeric, nullptr);
7692 336 : }
7693 :
7694 1008 : void CodeStubAssembler::TaggedToNumericWithFeedback(Node* context, Node* value,
7695 : Label* done,
7696 : Variable* var_numeric,
7697 : Variable* var_feedback) {
7698 : DCHECK_NOT_NULL(var_feedback);
7699 1008 : TaggedToNumeric(context, value, done, var_numeric, var_feedback);
7700 1008 : }
7701 :
7702 1344 : void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
7703 : Variable* var_numeric,
7704 : Variable* var_feedback) {
7705 1344 : var_numeric->Bind(value);
7706 2688 : Label if_smi(this), if_heapnumber(this), if_bigint(this), if_oddball(this);
7707 2688 : GotoIf(TaggedIsSmi(value), &if_smi);
7708 : Node* map = LoadMap(value);
7709 2688 : GotoIf(IsHeapNumberMap(map), &if_heapnumber);
7710 : Node* instance_type = LoadMapInstanceType(map);
7711 1344 : GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
7712 :
7713 : // {value} is not a Numeric yet.
7714 4032 : GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
7715 2688 : var_numeric->Bind(CallBuiltin(Builtins::kNonNumberToNumeric, context, value));
7716 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
7717 1344 : Goto(done);
7718 :
7719 : BIND(&if_smi);
7720 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
7721 1344 : Goto(done);
7722 :
7723 : BIND(&if_heapnumber);
7724 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumber);
7725 1344 : Goto(done);
7726 :
7727 : BIND(&if_bigint);
7728 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
7729 1344 : Goto(done);
7730 :
7731 : BIND(&if_oddball);
7732 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumberOrOddball);
7733 1344 : var_numeric->Bind(LoadObjectField(value, Oddball::kToNumberOffset));
7734 1344 : Goto(done);
7735 1344 : }
7736 :
7737 : // ES#sec-touint32
7738 60 : TNode<Number> CodeStubAssembler::ToUint32(SloppyTNode<Context> context,
7739 : SloppyTNode<Object> input) {
7740 120 : Node* const float_zero = Float64Constant(0.0);
7741 120 : Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
7742 :
7743 60 : Label out(this);
7744 :
7745 120 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
7746 :
7747 : // Early exit for positive smis.
7748 : {
7749 : // TODO(jgruber): This branch and the recheck below can be removed once we
7750 : // have a ToNumber with multiple exits.
7751 60 : Label next(this, Label::kDeferred);
7752 120 : Branch(TaggedIsPositiveSmi(input), &out, &next);
7753 : BIND(&next);
7754 : }
7755 :
7756 120 : Node* const number = ToNumber(context, input);
7757 60 : var_result.Bind(number);
7758 :
7759 : // Perhaps we have a positive smi now.
7760 : {
7761 60 : Label next(this, Label::kDeferred);
7762 120 : Branch(TaggedIsPositiveSmi(number), &out, &next);
7763 : BIND(&next);
7764 : }
7765 :
7766 60 : Label if_isnegativesmi(this), if_isheapnumber(this);
7767 120 : Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
7768 :
7769 : BIND(&if_isnegativesmi);
7770 : {
7771 120 : Node* const uint32_value = SmiToInt32(number);
7772 120 : Node* float64_value = ChangeUint32ToFloat64(uint32_value);
7773 120 : var_result.Bind(AllocateHeapNumberWithValue(float64_value));
7774 60 : Goto(&out);
7775 : }
7776 :
7777 : BIND(&if_isheapnumber);
7778 : {
7779 60 : Label return_zero(this);
7780 : Node* const value = LoadHeapNumberValue(number);
7781 :
7782 : {
7783 : // +-0.
7784 60 : Label next(this);
7785 120 : Branch(Float64Equal(value, float_zero), &return_zero, &next);
7786 : BIND(&next);
7787 : }
7788 :
7789 : {
7790 : // NaN.
7791 60 : Label next(this);
7792 120 : Branch(Float64Equal(value, value), &next, &return_zero);
7793 : BIND(&next);
7794 : }
7795 :
7796 : {
7797 : // +Infinity.
7798 60 : Label next(this);
7799 : Node* const positive_infinity =
7800 120 : Float64Constant(std::numeric_limits<double>::infinity());
7801 120 : Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
7802 : BIND(&next);
7803 : }
7804 :
7805 : {
7806 : // -Infinity.
7807 60 : Label next(this);
7808 : Node* const negative_infinity =
7809 120 : Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
7810 120 : Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
7811 : BIND(&next);
7812 : }
7813 :
7814 : // * Let int be the mathematical value that is the same sign as number and
7815 : // whose magnitude is floor(abs(number)).
7816 : // * Let int32bit be int modulo 2^32.
7817 : // * Return int32bit.
7818 : {
7819 120 : Node* x = Float64Trunc(value);
7820 120 : x = Float64Mod(x, float_two_32);
7821 120 : x = Float64Add(x, float_two_32);
7822 120 : x = Float64Mod(x, float_two_32);
7823 :
7824 120 : Node* const result = ChangeFloat64ToTagged(x);
7825 60 : var_result.Bind(result);
7826 60 : Goto(&out);
7827 : }
7828 :
7829 : BIND(&return_zero);
7830 : {
7831 120 : var_result.Bind(SmiConstant(0));
7832 60 : Goto(&out);
7833 : }
7834 : }
7835 :
7836 : BIND(&out);
7837 120 : return CAST(var_result.value());
7838 : }
7839 :
7840 172 : TNode<String> CodeStubAssembler::ToString(SloppyTNode<Context> context,
7841 : SloppyTNode<Object> input) {
7842 172 : TVARIABLE(Object, result, input);
7843 172 : Label loop(this, &result), done(this);
7844 172 : Goto(&loop);
7845 : BIND(&loop);
7846 : {
7847 : // Load the current {input} value.
7848 : TNode<Object> input = result.value();
7849 :
7850 : // Dispatch based on the type of the {input.}
7851 172 : Label if_inputisnumber(this), if_inputisoddball(this),
7852 172 : if_inputissymbol(this), if_inputisreceiver(this, Label::kDeferred),
7853 172 : runtime(this, Label::kDeferred);
7854 344 : GotoIf(TaggedIsSmi(input), &if_inputisnumber);
7855 172 : TNode<Int32T> input_instance_type = LoadInstanceType(CAST(input));
7856 344 : GotoIf(IsStringInstanceType(input_instance_type), &done);
7857 344 : GotoIf(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver);
7858 172 : GotoIf(IsHeapNumberInstanceType(input_instance_type), &if_inputisnumber);
7859 172 : GotoIf(IsOddballInstanceType(input_instance_type), &if_inputisoddball);
7860 172 : Branch(IsSymbolInstanceType(input_instance_type), &if_inputissymbol,
7861 172 : &runtime);
7862 :
7863 : BIND(&if_inputisnumber);
7864 : {
7865 : // Convert the Number {input} to a String.
7866 172 : TNode<Number> number_input = CAST(input);
7867 344 : result = NumberToString(number_input);
7868 172 : Goto(&done);
7869 : }
7870 :
7871 : BIND(&if_inputisoddball);
7872 : {
7873 : // Just return the {input}'s string representation.
7874 : result = LoadObjectField(CAST(input), Oddball::kToStringOffset);
7875 172 : Goto(&done);
7876 : }
7877 :
7878 : BIND(&if_inputissymbol);
7879 : {
7880 : // Throw a type error when {input} is a Symbol.
7881 : ThrowTypeError(context, MessageTemplate::kSymbolToString);
7882 : }
7883 :
7884 : BIND(&if_inputisreceiver);
7885 : {
7886 : // Convert the JSReceiver {input} to a primitive first,
7887 : // and then run the loop again with the new {input},
7888 : // which is then a primitive value.
7889 344 : result = CallBuiltin(Builtins::kNonPrimitiveToPrimitive_String, context,
7890 : input);
7891 172 : Goto(&loop);
7892 : }
7893 :
7894 : BIND(&runtime);
7895 : {
7896 : result = CallRuntime(Runtime::kToString, context, input);
7897 172 : Goto(&done);
7898 : }
7899 : }
7900 :
7901 : BIND(&done);
7902 172 : return CAST(result.value());
7903 : }
7904 :
7905 2800 : TNode<String> CodeStubAssembler::ToString_Inline(SloppyTNode<Context> context,
7906 : SloppyTNode<Object> input) {
7907 5600 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
7908 2800 : Label stub_call(this, Label::kDeferred), out(this);
7909 :
7910 5600 : GotoIf(TaggedIsSmi(input), &stub_call);
7911 5600 : Branch(IsString(CAST(input)), &out, &stub_call);
7912 :
7913 : BIND(&stub_call);
7914 5600 : var_result.Bind(CallBuiltin(Builtins::kToString, context, input));
7915 2800 : Goto(&out);
7916 :
7917 : BIND(&out);
7918 5600 : return CAST(var_result.value());
7919 : }
7920 :
7921 112 : Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
7922 224 : Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
7923 224 : VARIABLE(result, MachineRepresentation::kTagged);
7924 112 : Label done(this, &result);
7925 :
7926 112 : BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7927 :
7928 : BIND(&if_isreceiver);
7929 : {
7930 : // Convert {input} to a primitive first passing Number hint.
7931 112 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
7932 224 : result.Bind(CallStub(callable, context, input));
7933 112 : Goto(&done);
7934 : }
7935 :
7936 : BIND(&if_isnotreceiver);
7937 : {
7938 112 : result.Bind(input);
7939 112 : Goto(&done);
7940 : }
7941 :
7942 : BIND(&done);
7943 224 : return result.value();
7944 : }
7945 :
7946 224 : TNode<JSReceiver> CodeStubAssembler::ToObject(SloppyTNode<Context> context,
7947 : SloppyTNode<Object> input) {
7948 1904 : return CAST(CallBuiltin(Builtins::kToObject, context, input));
7949 : }
7950 :
7951 1680 : TNode<JSReceiver> CodeStubAssembler::ToObject_Inline(TNode<Context> context,
7952 : TNode<Object> input) {
7953 1680 : TVARIABLE(JSReceiver, result);
7954 1680 : Label if_isreceiver(this), if_isnotreceiver(this, Label::kDeferred);
7955 1680 : Label done(this);
7956 :
7957 1680 : BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7958 :
7959 : BIND(&if_isreceiver);
7960 : {
7961 : result = CAST(input);
7962 1680 : Goto(&done);
7963 : }
7964 :
7965 : BIND(&if_isnotreceiver);
7966 : {
7967 : result = ToObject(context, input);
7968 1680 : Goto(&done);
7969 : }
7970 :
7971 : BIND(&done);
7972 1680 : return result.value();
7973 : }
7974 :
7975 560 : TNode<Smi> CodeStubAssembler::ToSmiIndex(TNode<Context> context,
7976 : TNode<Object> input,
7977 : Label* range_error) {
7978 560 : TVARIABLE(Smi, result);
7979 560 : Label check_undefined(this), return_zero(this), defined(this),
7980 560 : negative_check(this), done(this);
7981 :
7982 1120 : GotoIfNot(TaggedIsSmi(input), &check_undefined);
7983 : result = CAST(input);
7984 560 : Goto(&negative_check);
7985 :
7986 : BIND(&check_undefined);
7987 1120 : Branch(IsUndefined(input), &return_zero, &defined);
7988 :
7989 : BIND(&defined);
7990 : TNode<Number> integer_input =
7991 560 : CAST(CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
7992 1120 : GotoIfNot(TaggedIsSmi(integer_input), range_error);
7993 : result = CAST(integer_input);
7994 560 : Goto(&negative_check);
7995 :
7996 : BIND(&negative_check);
7997 1680 : Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
7998 :
7999 : BIND(&return_zero);
8000 560 : result = SmiConstant(0);
8001 560 : Goto(&done);
8002 :
8003 : BIND(&done);
8004 560 : return result.value();
8005 : }
8006 :
8007 168 : TNode<Smi> CodeStubAssembler::ToSmiLength(TNode<Context> context,
8008 : TNode<Object> input,
8009 : Label* range_error) {
8010 168 : TVARIABLE(Smi, result);
8011 168 : Label to_integer(this), negative_check(this),
8012 168 : heap_number_negative_check(this), return_zero(this), done(this);
8013 :
8014 336 : GotoIfNot(TaggedIsSmi(input), &to_integer);
8015 : result = CAST(input);
8016 168 : Goto(&negative_check);
8017 :
8018 : BIND(&to_integer);
8019 : {
8020 168 : TNode<Number> integer_input = CAST(
8021 : CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
8022 336 : GotoIfNot(TaggedIsSmi(integer_input), &heap_number_negative_check);
8023 : result = CAST(integer_input);
8024 168 : Goto(&negative_check);
8025 :
8026 : // integer_input can still be a negative HeapNumber here.
8027 : BIND(&heap_number_negative_check);
8028 168 : TNode<HeapNumber> heap_number_input = CAST(integer_input);
8029 672 : Branch(IsTrue(CallBuiltin(Builtins::kLessThan, context, heap_number_input,
8030 504 : SmiConstant(0))),
8031 168 : &return_zero, range_error);
8032 : }
8033 :
8034 : BIND(&negative_check);
8035 504 : Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
8036 :
8037 : BIND(&return_zero);
8038 168 : result = SmiConstant(0);
8039 168 : Goto(&done);
8040 :
8041 : BIND(&done);
8042 168 : return result.value();
8043 : }
8044 :
8045 1736 : TNode<Number> CodeStubAssembler::ToLength_Inline(SloppyTNode<Context> context,
8046 : SloppyTNode<Object> input) {
8047 1736 : TNode<Smi> smi_zero = SmiConstant(0);
8048 : return Select<Number>(
8049 5208 : TaggedIsSmi(input), [=] { return SmiMax(CAST(input), smi_zero); },
8050 6944 : [=] { return CAST(CallBuiltin(Builtins::kToLength, context, input)); });
8051 : }
8052 :
8053 3192 : TNode<Number> CodeStubAssembler::ToInteger_Inline(
8054 : SloppyTNode<Context> context, SloppyTNode<Object> input,
8055 : ToIntegerTruncationMode mode) {
8056 : Builtins::Name builtin = (mode == kNoTruncation)
8057 : ? Builtins::kToInteger
8058 3192 : : Builtins::kToInteger_TruncateMinusZero;
8059 : return Select<Number>(
8060 6384 : TaggedIsSmi(input), [=] { return CAST(input); },
8061 15960 : [=] { return CAST(CallBuiltin(builtin, context, input)); });
8062 : }
8063 :
8064 112 : TNode<Number> CodeStubAssembler::ToInteger(SloppyTNode<Context> context,
8065 : SloppyTNode<Object> input,
8066 : ToIntegerTruncationMode mode) {
8067 : // We might need to loop once for ToNumber conversion.
8068 112 : TVARIABLE(Object, var_arg, input);
8069 112 : Label loop(this, &var_arg), out(this);
8070 112 : Goto(&loop);
8071 : BIND(&loop);
8072 : {
8073 : // Shared entry points.
8074 112 : Label return_zero(this, Label::kDeferred);
8075 :
8076 : // Load the current {arg} value.
8077 : TNode<Object> arg = var_arg.value();
8078 :
8079 : // Check if {arg} is a Smi.
8080 224 : GotoIf(TaggedIsSmi(arg), &out);
8081 :
8082 : // Check if {arg} is a HeapNumber.
8083 112 : Label if_argisheapnumber(this),
8084 112 : if_argisnotheapnumber(this, Label::kDeferred);
8085 224 : Branch(IsHeapNumber(CAST(arg)), &if_argisheapnumber,
8086 112 : &if_argisnotheapnumber);
8087 :
8088 : BIND(&if_argisheapnumber);
8089 : {
8090 : TNode<HeapNumber> arg_hn = CAST(arg);
8091 : // Load the floating-point value of {arg}.
8092 : Node* arg_value = LoadHeapNumberValue(arg_hn);
8093 :
8094 : // Check if {arg} is NaN.
8095 224 : GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
8096 :
8097 : // Truncate {arg} towards zero.
8098 112 : TNode<Float64T> value = Float64Trunc(arg_value);
8099 :
8100 112 : if (mode == kTruncateMinusZero) {
8101 : // Truncate -0.0 to 0.
8102 168 : GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
8103 : }
8104 :
8105 224 : var_arg = ChangeFloat64ToTagged(value);
8106 112 : Goto(&out);
8107 : }
8108 :
8109 : BIND(&if_argisnotheapnumber);
8110 : {
8111 : // Need to convert {arg} to a Number first.
8112 224 : var_arg = UncheckedCast<Object>(
8113 : CallBuiltin(Builtins::kNonNumberToNumber, context, arg));
8114 112 : Goto(&loop);
8115 : }
8116 :
8117 : BIND(&return_zero);
8118 224 : var_arg = SmiConstant(0);
8119 112 : Goto(&out);
8120 : }
8121 :
8122 : BIND(&out);
8123 : if (mode == kTruncateMinusZero) {
8124 : CSA_ASSERT(this, IsNumberNormalized(CAST(var_arg.value())));
8125 : }
8126 112 : return CAST(var_arg.value());
8127 : }
8128 :
8129 35212 : TNode<Uint32T> CodeStubAssembler::DecodeWord32(SloppyTNode<Word32T> word32,
8130 : uint32_t shift, uint32_t mask) {
8131 : return UncheckedCast<Uint32T>(Word32Shr(
8132 105636 : Word32And(word32, Int32Constant(mask)), static_cast<int>(shift)));
8133 : }
8134 :
8135 21352 : TNode<UintPtrT> CodeStubAssembler::DecodeWord(SloppyTNode<WordT> word,
8136 : uint32_t shift, uint32_t mask) {
8137 : return Unsigned(
8138 64056 : WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift)));
8139 : }
8140 :
8141 392 : TNode<WordT> CodeStubAssembler::UpdateWord(TNode<WordT> word,
8142 : TNode<WordT> value, uint32_t shift,
8143 : uint32_t mask) {
8144 784 : TNode<WordT> encoded_value = WordShl(value, static_cast<int>(shift));
8145 392 : TNode<IntPtrT> inverted_mask = IntPtrConstant(~static_cast<intptr_t>(mask));
8146 : // Ensure the {value} fits fully in the mask.
8147 : CSA_ASSERT(this, WordEqual(WordAnd(encoded_value, inverted_mask),
8148 : IntPtrConstant(0)));
8149 784 : return WordOr(WordAnd(word, inverted_mask), encoded_value);
8150 : }
8151 :
8152 0 : void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
8153 0 : if (FLAG_native_code_counters && counter->Enabled()) {
8154 : Node* counter_address =
8155 0 : ExternalConstant(ExternalReference::Create(counter));
8156 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
8157 0 : Int32Constant(value));
8158 : }
8159 0 : }
8160 :
8161 3816 : void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
8162 : DCHECK_GT(delta, 0);
8163 3816 : if (FLAG_native_code_counters && counter->Enabled()) {
8164 : Node* counter_address =
8165 0 : ExternalConstant(ExternalReference::Create(counter));
8166 : // This operation has to be exactly 32-bit wide in case the external
8167 : // reference table redirects the counter to a uint32_t dummy_stats_counter_
8168 : // field.
8169 0 : Node* value = Load(MachineType::Int32(), counter_address);
8170 0 : value = Int32Add(value, Int32Constant(delta));
8171 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8172 : }
8173 3816 : }
8174 :
8175 0 : void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
8176 : DCHECK_GT(delta, 0);
8177 0 : if (FLAG_native_code_counters && counter->Enabled()) {
8178 : Node* counter_address =
8179 0 : ExternalConstant(ExternalReference::Create(counter));
8180 : // This operation has to be exactly 32-bit wide in case the external
8181 : // reference table redirects the counter to a uint32_t dummy_stats_counter_
8182 : // field.
8183 0 : Node* value = Load(MachineType::Int32(), counter_address);
8184 0 : value = Int32Sub(value, Int32Constant(delta));
8185 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8186 : }
8187 0 : }
8188 :
8189 44520 : void CodeStubAssembler::Increment(Variable* variable, int value,
8190 : ParameterMode mode) {
8191 : DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
8192 : variable->rep() == MachineType::PointerRepresentation());
8193 : DCHECK_IMPLIES(mode == SMI_PARAMETERS,
8194 : variable->rep() == MachineRepresentation::kTagged ||
8195 : variable->rep() == MachineRepresentation::kTaggedSigned);
8196 44520 : variable->Bind(IntPtrOrSmiAdd(variable->value(),
8197 44520 : IntPtrOrSmiConstant(value, mode), mode));
8198 44520 : }
8199 :
8200 56 : void CodeStubAssembler::Use(Label* label) {
8201 224 : GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
8202 56 : }
8203 :
8204 1460 : void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
8205 : Variable* var_index, Label* if_keyisunique,
8206 : Variable* var_unique, Label* if_bailout,
8207 : Label* if_notinternalized) {
8208 : DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
8209 : DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
8210 1460 : Comment("TryToName");
8211 :
8212 1460 : Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this),
8213 1460 : if_keyisother(this, Label::kDeferred);
8214 : // Handle Smi and HeapNumber keys.
8215 2920 : var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
8216 1460 : Goto(if_keyisindex);
8217 :
8218 : BIND(&if_keyisnotindex);
8219 : Node* key_map = LoadMap(key);
8220 1460 : var_unique->Bind(key);
8221 : // Symbols are unique.
8222 2920 : GotoIf(IsSymbolMap(key_map), if_keyisunique);
8223 : Node* key_instance_type = LoadMapInstanceType(key_map);
8224 : // Miss if |key| is not a String.
8225 : STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
8226 2920 : GotoIfNot(IsStringInstanceType(key_instance_type), &if_keyisother);
8227 :
8228 : // |key| is a String. Check if it has a cached array index.
8229 : Node* hash = LoadNameHashField(key);
8230 2920 : GotoIf(IsClearWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
8231 1460 : &if_hascachedindex);
8232 : // No cached array index. If the string knows that it contains an index,
8233 : // then it must be an uncacheable index. Handle this case in the runtime.
8234 2920 : GotoIf(IsClearWord32(hash, Name::kIsNotArrayIndexMask), if_bailout);
8235 : // Check if we have a ThinString.
8236 2920 : GotoIf(InstanceTypeEqual(key_instance_type, THIN_STRING_TYPE),
8237 1460 : &if_thinstring);
8238 2920 : GotoIf(InstanceTypeEqual(key_instance_type, THIN_ONE_BYTE_STRING_TYPE),
8239 1460 : &if_thinstring);
8240 : // Finally, check if |key| is internalized.
8241 : STATIC_ASSERT(kNotInternalizedTag != 0);
8242 4380 : GotoIf(IsSetWord32(key_instance_type, kIsNotInternalizedMask),
8243 1460 : if_notinternalized != nullptr ? if_notinternalized : if_bailout);
8244 1460 : Goto(if_keyisunique);
8245 :
8246 : BIND(&if_thinstring);
8247 1460 : var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
8248 1460 : Goto(if_keyisunique);
8249 :
8250 : BIND(&if_hascachedindex);
8251 2920 : var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
8252 1460 : Goto(if_keyisindex);
8253 :
8254 : BIND(&if_keyisother);
8255 2920 : GotoIfNot(InstanceTypeEqual(key_instance_type, ODDBALL_TYPE), if_bailout);
8256 1460 : var_unique->Bind(LoadObjectField(key, Oddball::kToStringOffset));
8257 1460 : Goto(if_keyisunique);
8258 1460 : }
8259 :
8260 392 : void CodeStubAssembler::TryInternalizeString(
8261 : Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
8262 : Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
8263 : DCHECK(var_index->rep() == MachineType::PointerRepresentation());
8264 : DCHECK_EQ(var_internalized->rep(), MachineRepresentation::kTagged);
8265 : CSA_SLOW_ASSERT(this, IsString(string));
8266 : Node* function =
8267 784 : ExternalConstant(ExternalReference::try_internalize_string_function());
8268 : Node* const isolate_ptr =
8269 784 : ExternalConstant(ExternalReference::isolate_address(isolate()));
8270 : Node* result =
8271 : CallCFunction(function, MachineType::AnyTagged(),
8272 : std::make_pair(MachineType::Pointer(), isolate_ptr),
8273 392 : std::make_pair(MachineType::AnyTagged(), string));
8274 392 : Label internalized(this);
8275 784 : GotoIf(TaggedIsNotSmi(result), &internalized);
8276 784 : Node* word_result = SmiUntag(result);
8277 1176 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
8278 392 : if_not_internalized);
8279 1176 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
8280 392 : if_bailout);
8281 392 : var_index->Bind(word_result);
8282 392 : Goto(if_index);
8283 :
8284 : BIND(&internalized);
8285 392 : var_internalized->Bind(result);
8286 392 : Goto(if_internalized);
8287 392 : }
8288 :
8289 : template <typename Dictionary>
8290 9340 : TNode<IntPtrT> CodeStubAssembler::EntryToIndex(TNode<IntPtrT> entry,
8291 : int field_index) {
8292 : TNode<IntPtrT> entry_index =
8293 9340 : IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
8294 : return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
8295 18680 : field_index));
8296 : }
8297 :
8298 0 : TNode<MaybeObject> CodeStubAssembler::LoadDescriptorArrayElement(
8299 : TNode<DescriptorArray> object, Node* index, int additional_offset) {
8300 : return LoadArrayElement(object, DescriptorArray::kHeaderSize, index,
8301 8180 : additional_offset);
8302 : }
8303 :
8304 392 : TNode<Name> CodeStubAssembler::LoadKeyByKeyIndex(
8305 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8306 392 : return CAST(LoadDescriptorArrayElement(container, key_index, 0));
8307 : }
8308 :
8309 952 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
8310 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8311 : const int kKeyToDetails =
8312 : DescriptorArray::ToDetailsIndex(0) - DescriptorArray::ToKeyIndex(0);
8313 : return Unsigned(
8314 4264 : LoadAndUntagToWord32ArrayElement(container, DescriptorArray::kHeaderSize,
8315 952 : key_index, kKeyToDetails * kTaggedSize));
8316 : }
8317 :
8318 2020 : TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
8319 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8320 : const int kKeyToValue =
8321 : DescriptorArray::ToValueIndex(0) - DescriptorArray::ToKeyIndex(0);
8322 2020 : return CAST(LoadDescriptorArrayElement(container, key_index,
8323 : kKeyToValue * kTaggedSize));
8324 : }
8325 :
8326 728 : TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByKeyIndex(
8327 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8328 : const int kKeyToValue =
8329 : DescriptorArray::ToValueIndex(0) - DescriptorArray::ToKeyIndex(0);
8330 : return LoadDescriptorArrayElement(container, key_index,
8331 728 : kKeyToValue * kTaggedSize);
8332 : }
8333 :
8334 4928 : TNode<IntPtrT> CodeStubAssembler::DescriptorEntryToIndex(
8335 : TNode<IntPtrT> descriptor_entry) {
8336 : return IntPtrMul(descriptor_entry,
8337 9856 : IntPtrConstant(DescriptorArray::kEntrySize));
8338 : }
8339 :
8340 112 : TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8341 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8342 224 : return CAST(LoadDescriptorArrayElement(
8343 : container, DescriptorEntryToIndex(descriptor_entry),
8344 : DescriptorArray::ToKeyIndex(0) * kTaggedSize));
8345 : }
8346 :
8347 112 : TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8348 : TNode<DescriptorArray> container, int descriptor_entry) {
8349 224 : return CAST(LoadDescriptorArrayElement(
8350 : container, IntPtrConstant(0),
8351 : DescriptorArray::ToKeyIndex(descriptor_entry) * kTaggedSize));
8352 : }
8353 :
8354 112 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8355 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8356 224 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8357 : container, DescriptorArray::kHeaderSize,
8358 224 : DescriptorEntryToIndex(descriptor_entry),
8359 112 : DescriptorArray::ToDetailsIndex(0) * kTaggedSize));
8360 : }
8361 :
8362 672 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8363 : TNode<DescriptorArray> container, int descriptor_entry) {
8364 1344 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8365 1344 : container, DescriptorArray::kHeaderSize, IntPtrConstant(0),
8366 1344 : DescriptorArray::ToDetailsIndex(descriptor_entry) * kTaggedSize));
8367 : }
8368 :
8369 112 : TNode<Object> CodeStubAssembler::LoadValueByDescriptorEntry(
8370 : TNode<DescriptorArray> container, int descriptor_entry) {
8371 224 : return CAST(LoadDescriptorArrayElement(
8372 : container, IntPtrConstant(0),
8373 : DescriptorArray::ToValueIndex(descriptor_entry) * kTaggedSize));
8374 : }
8375 :
8376 4704 : TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByDescriptorEntry(
8377 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8378 : return LoadDescriptorArrayElement(
8379 9408 : container, DescriptorEntryToIndex(descriptor_entry),
8380 4704 : DescriptorArray::ToValueIndex(0) * kTaggedSize);
8381 : }
8382 :
8383 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NameDictionary>(
8384 : TNode<IntPtrT>, int);
8385 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<GlobalDictionary>(
8386 : TNode<IntPtrT>, int);
8387 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NumberDictionary>(
8388 : TNode<IntPtrT>, int);
8389 :
8390 : // This must be kept in sync with HashTableBase::ComputeCapacity().
8391 956 : TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
8392 : TNode<IntPtrT> at_least_space_for) {
8393 : TNode<IntPtrT> capacity = IntPtrRoundUpToPowerOfTwo32(
8394 1912 : IntPtrAdd(at_least_space_for, WordShr(at_least_space_for, 1)));
8395 1912 : return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
8396 : }
8397 :
8398 1685 : TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
8399 : SloppyTNode<IntPtrT> right) {
8400 : intptr_t left_constant;
8401 : intptr_t right_constant;
8402 2302 : if (ToIntPtrConstant(left, left_constant) &&
8403 617 : ToIntPtrConstant(right, right_constant)) {
8404 617 : return IntPtrConstant(std::max(left_constant, right_constant));
8405 : }
8406 : return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
8407 2136 : right);
8408 : }
8409 :
8410 1121 : TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
8411 : SloppyTNode<IntPtrT> right) {
8412 : intptr_t left_constant;
8413 : intptr_t right_constant;
8414 1122 : if (ToIntPtrConstant(left, left_constant) &&
8415 1 : ToIntPtrConstant(right, right_constant)) {
8416 1 : return IntPtrConstant(std::min(left_constant, right_constant));
8417 : }
8418 : return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
8419 2240 : right);
8420 : }
8421 :
8422 : template <>
8423 0 : TNode<HeapObject> CodeStubAssembler::LoadName<NameDictionary>(
8424 : TNode<HeapObject> key) {
8425 : CSA_ASSERT(this, Word32Or(IsTheHole(key), IsName(key)));
8426 0 : return key;
8427 : }
8428 :
8429 : template <>
8430 0 : TNode<HeapObject> CodeStubAssembler::LoadName<GlobalDictionary>(
8431 : TNode<HeapObject> key) {
8432 : TNode<PropertyCell> property_cell = CAST(key);
8433 0 : return CAST(LoadObjectField(property_cell, PropertyCell::kNameOffset));
8434 : }
8435 :
8436 : template <typename Dictionary>
8437 6744 : void CodeStubAssembler::NameDictionaryLookup(
8438 : TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
8439 : TVariable<IntPtrT>* var_name_index, Label* if_not_found, LookupMode mode) {
8440 : static_assert(std::is_same<Dictionary, NameDictionary>::value ||
8441 : std::is_same<Dictionary, GlobalDictionary>::value,
8442 : "Unexpected NameDictionary");
8443 : DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
8444 : DCHECK_IMPLIES(mode == kFindInsertionIndex, if_found == nullptr);
8445 6744 : Comment("NameDictionaryLookup");
8446 : CSA_ASSERT(this, IsUniqueName(unique_name));
8447 :
8448 6744 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
8449 6744 : TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8450 20232 : TNode<WordT> hash = ChangeUint32ToWord(LoadNameHash(unique_name));
8451 :
8452 : // See Dictionary::FirstProbe().
8453 6744 : TNode<IntPtrT> count = IntPtrConstant(0);
8454 6744 : TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
8455 : Node* undefined = UndefinedConstant();
8456 :
8457 : // Appease the variable merging algorithm for "Goto(&loop)" below.
8458 6744 : *var_name_index = IntPtrConstant(0);
8459 :
8460 : TVARIABLE(IntPtrT, var_count, count);
8461 : TVARIABLE(IntPtrT, var_entry, entry);
8462 6744 : Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
8463 13488 : Label loop(this, arraysize(loop_vars), loop_vars);
8464 6744 : Goto(&loop);
8465 : BIND(&loop);
8466 : {
8467 : TNode<IntPtrT> entry = var_entry.value();
8468 :
8469 : TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
8470 : *var_name_index = index;
8471 :
8472 : TNode<HeapObject> current =
8473 : CAST(UnsafeLoadFixedArrayElement(dictionary, index));
8474 6744 : GotoIf(WordEqual(current, undefined), if_not_found);
8475 6744 : if (mode == kFindExisting) {
8476 : current = LoadName<Dictionary>(current);
8477 5736 : GotoIf(WordEqual(current, unique_name), if_found);
8478 : } else {
8479 : DCHECK_EQ(kFindInsertionIndex, mode);
8480 1008 : GotoIf(WordEqual(current, TheHoleConstant()), if_not_found);
8481 : }
8482 :
8483 : // See Dictionary::NextProbe().
8484 6744 : Increment(&var_count);
8485 6744 : entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8486 :
8487 : var_entry = entry;
8488 6744 : Goto(&loop);
8489 : }
8490 6744 : }
8491 :
8492 : // Instantiate template methods to workaround GCC compilation issue.
8493 : template V8_EXPORT_PRIVATE void
8494 : CodeStubAssembler::NameDictionaryLookup<NameDictionary>(TNode<NameDictionary>,
8495 : TNode<Name>, Label*,
8496 : TVariable<IntPtrT>*,
8497 : Label*, LookupMode);
8498 : template V8_EXPORT_PRIVATE void CodeStubAssembler::NameDictionaryLookup<
8499 : GlobalDictionary>(TNode<GlobalDictionary>, TNode<Name>, Label*,
8500 : TVariable<IntPtrT>*, Label*, LookupMode);
8501 :
8502 336 : Node* CodeStubAssembler::ComputeUnseededHash(Node* key) {
8503 : // See v8::internal::ComputeUnseededHash()
8504 672 : Node* hash = TruncateIntPtrToInt32(key);
8505 1680 : hash = Int32Add(Word32Xor(hash, Int32Constant(0xFFFFFFFF)),
8506 1344 : Word32Shl(hash, Int32Constant(15)));
8507 1344 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
8508 1344 : hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
8509 1344 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
8510 1008 : hash = Int32Mul(hash, Int32Constant(2057));
8511 1344 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
8512 1008 : return Word32And(hash, Int32Constant(0x3FFFFFFF));
8513 : }
8514 :
8515 1132 : Node* CodeStubAssembler::ComputeSeededHash(Node* key) {
8516 : Node* const function_addr =
8517 2264 : ExternalConstant(ExternalReference::compute_integer_hash());
8518 : Node* const isolate_ptr =
8519 2264 : ExternalConstant(ExternalReference::isolate_address(isolate()));
8520 :
8521 : MachineType type_ptr = MachineType::Pointer();
8522 : MachineType type_uint32 = MachineType::Uint32();
8523 :
8524 : Node* const result = CallCFunction(
8525 : function_addr, type_uint32, std::make_pair(type_ptr, isolate_ptr),
8526 2264 : std::make_pair(type_uint32, TruncateIntPtrToInt32(key)));
8527 1132 : return result;
8528 : }
8529 :
8530 1128 : void CodeStubAssembler::NumberDictionaryLookup(
8531 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8532 : Label* if_found, TVariable<IntPtrT>* var_entry, Label* if_not_found) {
8533 : CSA_ASSERT(this, IsNumberDictionary(dictionary));
8534 : DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
8535 1128 : Comment("NumberDictionaryLookup");
8536 :
8537 1128 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NumberDictionary>(dictionary));
8538 1128 : TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8539 :
8540 3384 : TNode<WordT> hash = ChangeUint32ToWord(ComputeSeededHash(intptr_index));
8541 1128 : Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
8542 :
8543 : // See Dictionary::FirstProbe().
8544 1128 : TNode<IntPtrT> count = IntPtrConstant(0);
8545 1128 : TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
8546 :
8547 : Node* undefined = UndefinedConstant();
8548 : Node* the_hole = TheHoleConstant();
8549 :
8550 : TVARIABLE(IntPtrT, var_count, count);
8551 1128 : Variable* loop_vars[] = {&var_count, var_entry};
8552 2256 : Label loop(this, 2, loop_vars);
8553 : *var_entry = entry;
8554 1128 : Goto(&loop);
8555 : BIND(&loop);
8556 : {
8557 : TNode<IntPtrT> entry = var_entry->value();
8558 :
8559 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(entry);
8560 : Node* current = UnsafeLoadFixedArrayElement(dictionary, index);
8561 2256 : GotoIf(WordEqual(current, undefined), if_not_found);
8562 1128 : Label next_probe(this);
8563 : {
8564 1128 : Label if_currentissmi(this), if_currentisnotsmi(this);
8565 2256 : Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
8566 : BIND(&if_currentissmi);
8567 : {
8568 2256 : Node* current_value = SmiUntag(current);
8569 2256 : Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
8570 : }
8571 : BIND(&if_currentisnotsmi);
8572 : {
8573 2256 : GotoIf(WordEqual(current, the_hole), &next_probe);
8574 : // Current must be the Number.
8575 : Node* current_value = LoadHeapNumberValue(current);
8576 2256 : Branch(Float64Equal(current_value, key_as_float64), if_found,
8577 1128 : &next_probe);
8578 : }
8579 : }
8580 :
8581 : BIND(&next_probe);
8582 : // See Dictionary::NextProbe().
8583 1128 : Increment(&var_count);
8584 1128 : entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8585 :
8586 : *var_entry = entry;
8587 1128 : Goto(&loop);
8588 : }
8589 1128 : }
8590 :
8591 336 : TNode<Object> CodeStubAssembler::BasicLoadNumberDictionaryElement(
8592 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8593 : Label* not_data, Label* if_hole) {
8594 336 : TVARIABLE(IntPtrT, var_entry);
8595 336 : Label if_found(this);
8596 : NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8597 336 : if_hole);
8598 : BIND(&if_found);
8599 :
8600 : // Check that the value is a data property.
8601 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8602 : TNode<Uint32T> details =
8603 : LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
8604 : TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8605 : // TODO(jkummerow): Support accessors without missing?
8606 1008 : GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
8607 : // Finally, load the value.
8608 336 : return LoadValueByKeyIndex<NumberDictionary>(dictionary, index);
8609 : }
8610 :
8611 56 : void CodeStubAssembler::BasicStoreNumberDictionaryElement(
8612 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8613 : TNode<Object> value, Label* not_data, Label* if_hole, Label* read_only) {
8614 56 : TVARIABLE(IntPtrT, var_entry);
8615 56 : Label if_found(this);
8616 : NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8617 56 : if_hole);
8618 : BIND(&if_found);
8619 :
8620 : // Check that the value is a data property.
8621 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8622 : TNode<Uint32T> details =
8623 : LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
8624 : TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8625 : // TODO(jkummerow): Support accessors without missing?
8626 168 : GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
8627 :
8628 : // Check that the property is writeable.
8629 112 : GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
8630 56 : read_only);
8631 :
8632 : // Finally, store the value.
8633 : StoreValueByKeyIndex<NumberDictionary>(dictionary, index, value);
8634 56 : }
8635 :
8636 : template <class Dictionary>
8637 : void CodeStubAssembler::FindInsertionEntry(TNode<Dictionary> dictionary,
8638 : TNode<Name> key,
8639 : TVariable<IntPtrT>* var_key_index) {
8640 : UNREACHABLE();
8641 : }
8642 :
8643 : template <>
8644 1008 : void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
8645 : TNode<NameDictionary> dictionary, TNode<Name> key,
8646 : TVariable<IntPtrT>* var_key_index) {
8647 2016 : Label done(this);
8648 : NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
8649 1008 : &done, kFindInsertionIndex);
8650 : BIND(&done);
8651 1008 : }
8652 :
8653 : template <class Dictionary>
8654 : void CodeStubAssembler::InsertEntry(TNode<Dictionary> dictionary,
8655 : TNode<Name> key, TNode<Object> value,
8656 : TNode<IntPtrT> index,
8657 : TNode<Smi> enum_index) {
8658 : UNREACHABLE(); // Use specializations instead.
8659 : }
8660 :
8661 : template <>
8662 1008 : void CodeStubAssembler::InsertEntry<NameDictionary>(
8663 : TNode<NameDictionary> dictionary, TNode<Name> name, TNode<Object> value,
8664 : TNode<IntPtrT> index, TNode<Smi> enum_index) {
8665 : // Store name and value.
8666 1008 : StoreFixedArrayElement(dictionary, index, name);
8667 : StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
8668 :
8669 : // Prepare details of the new property.
8670 : PropertyDetails d(kData, NONE, PropertyCellType::kNoCell);
8671 1008 : enum_index =
8672 : SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
8673 : // We OR over the actual index below, so we expect the initial value to be 0.
8674 : DCHECK_EQ(0, d.dictionary_index());
8675 1008 : TVARIABLE(Smi, var_details, SmiOr(SmiConstant(d.AsSmi()), enum_index));
8676 :
8677 : // Private names must be marked non-enumerable.
8678 1008 : Label not_private(this, &var_details);
8679 2016 : GotoIfNot(IsPrivateSymbol(name), ¬_private);
8680 : TNode<Smi> dont_enum =
8681 1008 : SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
8682 1008 : var_details = SmiOr(var_details.value(), dont_enum);
8683 1008 : Goto(¬_private);
8684 : BIND(¬_private);
8685 :
8686 : // Finally, store the details.
8687 : StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
8688 : var_details.value());
8689 1008 : }
8690 :
8691 : template <>
8692 0 : void CodeStubAssembler::InsertEntry<GlobalDictionary>(
8693 : TNode<GlobalDictionary> dictionary, TNode<Name> key, TNode<Object> value,
8694 : TNode<IntPtrT> index, TNode<Smi> enum_index) {
8695 0 : UNIMPLEMENTED();
8696 : }
8697 :
8698 : template <class Dictionary>
8699 1008 : void CodeStubAssembler::Add(TNode<Dictionary> dictionary, TNode<Name> key,
8700 : TNode<Object> value, Label* bailout) {
8701 : CSA_ASSERT(this, Word32BinaryNot(IsEmptyPropertyDictionary(dictionary)));
8702 1008 : TNode<Smi> capacity = GetCapacity<Dictionary>(dictionary);
8703 1008 : TNode<Smi> nof = GetNumberOfElements<Dictionary>(dictionary);
8704 1008 : TNode<Smi> new_nof = SmiAdd(nof, SmiConstant(1));
8705 : // Require 33% to still be free after adding additional_elements.
8706 : // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
8707 : // But that's OK here because it's only used for a comparison.
8708 1008 : TNode<Smi> required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
8709 2016 : GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
8710 : // Require rehashing if more than 50% of free elements are deleted elements.
8711 1008 : TNode<Smi> deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
8712 : CSA_ASSERT(this, SmiAbove(capacity, new_nof));
8713 1008 : TNode<Smi> half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
8714 2016 : GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
8715 :
8716 1008 : TNode<Smi> enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
8717 1008 : TNode<Smi> new_enum_index = SmiAdd(enum_index, SmiConstant(1));
8718 : TNode<Smi> max_enum_index =
8719 1008 : SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
8720 2016 : GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
8721 :
8722 : // No more bailouts after this point.
8723 : // Operations from here on can have side effects.
8724 :
8725 : SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
8726 : SetNumberOfElements<Dictionary>(dictionary, new_nof);
8727 :
8728 : TVARIABLE(IntPtrT, var_key_index);
8729 1008 : FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
8730 1008 : InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
8731 : enum_index);
8732 1008 : }
8733 :
8734 : template void CodeStubAssembler::Add<NameDictionary>(TNode<NameDictionary>,
8735 : TNode<Name>, TNode<Object>,
8736 : Label*);
8737 :
8738 : template <typename Array>
8739 2252 : void CodeStubAssembler::LookupLinear(TNode<Name> unique_name,
8740 : TNode<Array> array,
8741 : TNode<Uint32T> number_of_valid_entries,
8742 : Label* if_found,
8743 : TVariable<IntPtrT>* var_name_index,
8744 : Label* if_not_found) {
8745 : static_assert(std::is_base_of<FixedArray, Array>::value ||
8746 : std::is_base_of<WeakFixedArray, Array>::value ||
8747 : std::is_base_of<DescriptorArray, Array>::value,
8748 : "T must be a descendant of FixedArray or a WeakFixedArray");
8749 2252 : Comment("LookupLinear");
8750 : CSA_ASSERT(this, IsUniqueName(unique_name));
8751 2252 : TNode<IntPtrT> first_inclusive = IntPtrConstant(Array::ToKeyIndex(0));
8752 2252 : TNode<IntPtrT> factor = IntPtrConstant(Array::kEntrySize);
8753 4504 : TNode<IntPtrT> last_exclusive = IntPtrAdd(
8754 : first_inclusive,
8755 : IntPtrMul(ChangeInt32ToIntPtr(number_of_valid_entries), factor));
8756 :
8757 6756 : BuildFastLoop(last_exclusive, first_inclusive,
8758 2252 : [=](SloppyTNode<IntPtrT> name_index) {
8759 : TNode<MaybeObject> element =
8760 2252 : LoadArrayElement(array, Array::kHeaderSize, name_index);
8761 6756 : TNode<Name> candidate_name = CAST(element);
8762 2252 : *var_name_index = name_index;
8763 6756 : GotoIf(WordEqual(candidate_name, unique_name), if_found);
8764 2252 : },
8765 : -Array::kEntrySize, INTPTR_PARAMETERS, IndexAdvanceMode::kPre);
8766 2252 : Goto(if_not_found);
8767 2252 : }
8768 :
8769 : template <>
8770 0 : TNode<Uint32T> CodeStubAssembler::NumberOfEntries<DescriptorArray>(
8771 : TNode<DescriptorArray> descriptors) {
8772 0 : return Unsigned(LoadNumberOfDescriptors(descriptors));
8773 : }
8774 :
8775 : template <>
8776 568 : TNode<Uint32T> CodeStubAssembler::NumberOfEntries<TransitionArray>(
8777 : TNode<TransitionArray> transitions) {
8778 : TNode<IntPtrT> length = LoadAndUntagWeakFixedArrayLength(transitions);
8779 : return Select<Uint32T>(
8780 1704 : UintPtrLessThan(length, IntPtrConstant(TransitionArray::kFirstIndex)),
8781 1136 : [=] { return Unsigned(Int32Constant(0)); },
8782 568 : [=] {
8783 1136 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8784 : transitions, WeakFixedArray::kHeaderSize,
8785 1704 : IntPtrConstant(TransitionArray::kTransitionLengthIndex)));
8786 2272 : });
8787 : }
8788 :
8789 : template <typename Array>
8790 12596 : TNode<IntPtrT> CodeStubAssembler::EntryIndexToIndex(
8791 : TNode<Uint32T> entry_index) {
8792 12596 : TNode<Int32T> entry_size = Int32Constant(Array::kEntrySize);
8793 12596 : TNode<Word32T> index = Int32Mul(entry_index, entry_size);
8794 12596 : return ChangeInt32ToIntPtr(index);
8795 : }
8796 :
8797 : template <typename Array>
8798 2588 : TNode<IntPtrT> CodeStubAssembler::ToKeyIndex(TNode<Uint32T> entry_index) {
8799 : return IntPtrAdd(IntPtrConstant(Array::ToKeyIndex(0)),
8800 5176 : EntryIndexToIndex<Array>(entry_index));
8801 : }
8802 :
8803 : template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<DescriptorArray>(
8804 : TNode<Uint32T>);
8805 : template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<TransitionArray>(
8806 : TNode<Uint32T>);
8807 :
8808 : template <>
8809 3936 : TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<DescriptorArray>(
8810 : TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8811 : TNode<Uint32T> details =
8812 3936 : DescriptorArrayGetDetails(descriptors, descriptor_number);
8813 3936 : return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
8814 : }
8815 :
8816 : template <>
8817 0 : TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<TransitionArray>(
8818 : TNode<TransitionArray> transitions, TNode<Uint32T> transition_number) {
8819 0 : return transition_number;
8820 : }
8821 :
8822 : template <typename Array>
8823 4504 : TNode<Name> CodeStubAssembler::GetKey(TNode<Array> array,
8824 : TNode<Uint32T> entry_index) {
8825 : static_assert(std::is_base_of<TransitionArray, Array>::value ||
8826 : std::is_base_of<DescriptorArray, Array>::value,
8827 : "T must be a descendant of DescriptorArray or TransitionArray");
8828 : const int key_offset = Array::ToKeyIndex(0) * kTaggedSize;
8829 : TNode<MaybeObject> element =
8830 : LoadArrayElement(array, Array::kHeaderSize,
8831 9008 : EntryIndexToIndex<Array>(entry_index), key_offset);
8832 4504 : return CAST(element);
8833 : }
8834 :
8835 : template TNode<Name> CodeStubAssembler::GetKey<DescriptorArray>(
8836 : TNode<DescriptorArray>, TNode<Uint32T>);
8837 : template TNode<Name> CodeStubAssembler::GetKey<TransitionArray>(
8838 : TNode<TransitionArray>, TNode<Uint32T>);
8839 :
8840 5504 : TNode<Uint32T> CodeStubAssembler::DescriptorArrayGetDetails(
8841 : TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8842 : const int details_offset = DescriptorArray::ToDetailsIndex(0) * kTaggedSize;
8843 11008 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8844 : descriptors, DescriptorArray::kHeaderSize,
8845 11008 : EntryIndexToIndex<DescriptorArray>(descriptor_number), details_offset));
8846 : }
8847 :
8848 : template <typename Array>
8849 2252 : void CodeStubAssembler::LookupBinary(TNode<Name> unique_name,
8850 : TNode<Array> array,
8851 : TNode<Uint32T> number_of_valid_entries,
8852 : Label* if_found,
8853 : TVariable<IntPtrT>* var_name_index,
8854 : Label* if_not_found) {
8855 2252 : Comment("LookupBinary");
8856 4504 : TVARIABLE(Uint32T, var_low, Unsigned(Int32Constant(0)));
8857 : TNode<Uint32T> limit =
8858 4788 : Unsigned(Int32Sub(NumberOfEntries<Array>(array), Int32Constant(1)));
8859 : TVARIABLE(Uint32T, var_high, limit);
8860 : TNode<Uint32T> hash = LoadNameHashField(unique_name);
8861 : CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
8862 :
8863 : // Assume non-empty array.
8864 : CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
8865 :
8866 6756 : Label binary_loop(this, {&var_high, &var_low});
8867 2252 : Goto(&binary_loop);
8868 : BIND(&binary_loop);
8869 : {
8870 : // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
8871 4504 : TNode<Uint32T> mid = Unsigned(
8872 : Int32Add(var_low.value(),
8873 4504 : Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1)));
8874 : // mid_name = array->GetSortedKey(mid).
8875 2252 : TNode<Uint32T> sorted_key_index = GetSortedKeyIndex<Array>(array, mid);
8876 2252 : TNode<Name> mid_name = GetKey<Array>(array, sorted_key_index);
8877 :
8878 : TNode<Uint32T> mid_hash = LoadNameHashField(mid_name);
8879 :
8880 2252 : Label mid_greater(this), mid_less(this), merge(this);
8881 4504 : Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
8882 : BIND(&mid_greater);
8883 : {
8884 : var_high = mid;
8885 2252 : Goto(&merge);
8886 : }
8887 : BIND(&mid_less);
8888 : {
8889 6756 : var_low = Unsigned(Int32Add(mid, Int32Constant(1)));
8890 2252 : Goto(&merge);
8891 : }
8892 : BIND(&merge);
8893 4504 : GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
8894 : }
8895 :
8896 2252 : Label scan_loop(this, &var_low);
8897 2252 : Goto(&scan_loop);
8898 : BIND(&scan_loop);
8899 : {
8900 4504 : GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
8901 :
8902 : TNode<Uint32T> sort_index =
8903 1968 : GetSortedKeyIndex<Array>(array, var_low.value());
8904 2252 : TNode<Name> current_name = GetKey<Array>(array, sort_index);
8905 : TNode<Uint32T> current_hash = LoadNameHashField(current_name);
8906 4504 : GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
8907 2252 : Label next(this);
8908 2252 : GotoIf(WordNotEqual(current_name, unique_name), &next);
8909 4504 : GotoIf(Uint32GreaterThanOrEqual(sort_index, number_of_valid_entries),
8910 : if_not_found);
8911 2252 : *var_name_index = ToKeyIndex<Array>(sort_index);
8912 2252 : Goto(if_found);
8913 :
8914 : BIND(&next);
8915 6756 : var_low = Unsigned(Int32Add(var_low.value(), Int32Constant(1)));
8916 2252 : Goto(&scan_loop);
8917 : }
8918 2252 : }
8919 :
8920 112 : void CodeStubAssembler::ForEachEnumerableOwnProperty(
8921 : TNode<Context> context, TNode<Map> map, TNode<JSObject> object,
8922 : ForEachEnumerationMode mode, const ForEachKeyValueFunction& body,
8923 : Label* bailout) {
8924 112 : TNode<Int32T> type = LoadMapInstanceType(map);
8925 112 : TNode<Uint32T> bit_field3 = EnsureOnlyHasSimpleProperties(map, type, bailout);
8926 :
8927 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
8928 : TNode<Uint32T> nof_descriptors =
8929 112 : DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bit_field3);
8930 :
8931 112 : TVARIABLE(BoolT, var_stable, Int32TrueConstant());
8932 :
8933 : TVARIABLE(BoolT, var_has_symbol, Int32FalseConstant());
8934 : // false - iterate only string properties, true - iterate only symbol
8935 : // properties
8936 : TVARIABLE(BoolT, var_is_symbol_processing_loop, Int32FalseConstant());
8937 336 : TVARIABLE(IntPtrT, var_start_key_index,
8938 : ToKeyIndex<DescriptorArray>(Unsigned(Int32Constant(0))));
8939 : // Note: var_end_key_index is exclusive for the loop
8940 112 : TVARIABLE(IntPtrT, var_end_key_index,
8941 : ToKeyIndex<DescriptorArray>(nof_descriptors));
8942 : VariableList list(
8943 : {&var_stable, &var_has_symbol, &var_is_symbol_processing_loop,
8944 : &var_start_key_index, &var_end_key_index},
8945 224 : zone());
8946 : Label descriptor_array_loop(
8947 : this, {&var_stable, &var_has_symbol, &var_is_symbol_processing_loop,
8948 336 : &var_start_key_index, &var_end_key_index});
8949 :
8950 112 : Goto(&descriptor_array_loop);
8951 : BIND(&descriptor_array_loop);
8952 :
8953 448 : BuildFastLoop(
8954 : list, var_start_key_index.value(), var_end_key_index.value(),
8955 : [=, &var_stable, &var_has_symbol, &var_is_symbol_processing_loop,
8956 1232 : &var_start_key_index, &var_end_key_index](Node* index) {
8957 : TNode<IntPtrT> descriptor_key_index =
8958 : TNode<IntPtrT>::UncheckedCast(index);
8959 : TNode<Name> next_key =
8960 5544 : LoadKeyByKeyIndex(descriptors, descriptor_key_index);
8961 :
8962 224 : TVARIABLE(Object, var_value, SmiConstant(0));
8963 112 : Label callback(this), next_iteration(this);
8964 :
8965 112 : if (mode == kEnumerationOrder) {
8966 : // |next_key| is either a string or a symbol
8967 : // Skip strings or symbols depending on
8968 : // |var_is_symbol_processing_loop|.
8969 56 : Label if_string(this), if_symbol(this), if_name_ok(this);
8970 112 : Branch(IsSymbol(next_key), &if_symbol, &if_string);
8971 : BIND(&if_symbol);
8972 : {
8973 : // Process symbol property when |var_is_symbol_processing_loop| is
8974 : // true.
8975 56 : GotoIf(var_is_symbol_processing_loop.value(), &if_name_ok);
8976 : // First iteration need to calculate smaller range for processing
8977 : // symbols
8978 56 : Label if_first_symbol(this);
8979 : // var_end_key_index is still inclusive at this point.
8980 : var_end_key_index = descriptor_key_index;
8981 56 : Branch(var_has_symbol.value(), &next_iteration, &if_first_symbol);
8982 : BIND(&if_first_symbol);
8983 : {
8984 : var_start_key_index = descriptor_key_index;
8985 56 : var_has_symbol = Int32TrueConstant();
8986 56 : Goto(&next_iteration);
8987 : }
8988 : }
8989 : BIND(&if_string);
8990 : {
8991 : CSA_ASSERT(this, IsString(next_key));
8992 : // Process string property when |var_is_symbol_processing_loop| is
8993 : // false.
8994 112 : Branch(var_is_symbol_processing_loop.value(), &next_iteration,
8995 56 : &if_name_ok);
8996 : }
8997 : BIND(&if_name_ok);
8998 : }
8999 : {
9000 : TVARIABLE(Map, var_map);
9001 : TVARIABLE(HeapObject, var_meta_storage);
9002 : TVARIABLE(IntPtrT, var_entry);
9003 : TVARIABLE(Uint32T, var_details);
9004 112 : Label if_found(this);
9005 :
9006 112 : Label if_found_fast(this), if_found_dict(this);
9007 :
9008 112 : Label if_stable(this), if_not_stable(this);
9009 112 : Branch(var_stable.value(), &if_stable, &if_not_stable);
9010 : BIND(&if_stable);
9011 : {
9012 : // Directly decode from the descriptor array if |object| did not
9013 : // change shape.
9014 : var_map = map;
9015 : var_meta_storage = descriptors;
9016 : var_entry = Signed(descriptor_key_index);
9017 112 : Goto(&if_found_fast);
9018 : }
9019 : BIND(&if_not_stable);
9020 : {
9021 : // If the map did change, do a slower lookup. We are still
9022 : // guaranteed that the object has a simple shape, and that the key
9023 : // is a name.
9024 : var_map = LoadMap(object);
9025 : TryLookupPropertyInSimpleObject(
9026 : object, var_map.value(), next_key, &if_found_fast,
9027 112 : &if_found_dict, &var_meta_storage, &var_entry, &next_iteration);
9028 : }
9029 :
9030 : BIND(&if_found_fast);
9031 : {
9032 : TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
9033 : TNode<IntPtrT> name_index = var_entry.value();
9034 :
9035 : // Skip non-enumerable properties.
9036 : var_details = LoadDetailsByKeyIndex(descriptors, name_index);
9037 336 : GotoIf(IsSetWord32(var_details.value(),
9038 112 : PropertyDetails::kAttributesDontEnumMask),
9039 112 : &next_iteration);
9040 :
9041 : LoadPropertyFromFastObject(object, var_map.value(), descriptors,
9042 : name_index, var_details.value(),
9043 112 : &var_value);
9044 112 : Goto(&if_found);
9045 : }
9046 : BIND(&if_found_dict);
9047 : {
9048 : TNode<NameDictionary> dictionary = CAST(var_meta_storage.value());
9049 : TNode<IntPtrT> entry = var_entry.value();
9050 :
9051 : TNode<Uint32T> details =
9052 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, entry);
9053 : // Skip non-enumerable properties.
9054 224 : GotoIf(
9055 224 : IsSetWord32(details, PropertyDetails::kAttributesDontEnumMask),
9056 112 : &next_iteration);
9057 :
9058 : var_details = details;
9059 : var_value = LoadValueByKeyIndex<NameDictionary>(dictionary, entry);
9060 112 : Goto(&if_found);
9061 : }
9062 :
9063 : // Here we have details and value which could be an accessor.
9064 : BIND(&if_found);
9065 : {
9066 112 : Label slow_load(this, Label::kDeferred);
9067 :
9068 112 : var_value = CallGetterIfAccessor(var_value.value(),
9069 : var_details.value(), context,
9070 : object, &slow_load, kCallJSGetter);
9071 112 : Goto(&callback);
9072 :
9073 : BIND(&slow_load);
9074 112 : var_value =
9075 : CallRuntime(Runtime::kGetProperty, context, object, next_key);
9076 112 : Goto(&callback);
9077 :
9078 : BIND(&callback);
9079 : body(next_key, var_value.value());
9080 :
9081 : // Check if |object| is still stable, i.e. we can proceed using
9082 : // property details from preloaded |descriptors|.
9083 336 : var_stable =
9084 : Select<BoolT>(var_stable.value(),
9085 448 : [=] { return WordEqual(LoadMap(object), map); },
9086 112 : [=] { return Int32FalseConstant(); });
9087 :
9088 112 : Goto(&next_iteration);
9089 : }
9090 : }
9091 : BIND(&next_iteration);
9092 112 : },
9093 112 : DescriptorArray::kEntrySize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
9094 :
9095 112 : if (mode == kEnumerationOrder) {
9096 56 : Label done(this);
9097 56 : GotoIf(var_is_symbol_processing_loop.value(), &done);
9098 56 : GotoIfNot(var_has_symbol.value(), &done);
9099 : // All string properties are processed, now process symbol properties.
9100 : var_is_symbol_processing_loop = Int32TrueConstant();
9101 : // Add DescriptorArray::kEntrySize to make the var_end_key_index exclusive
9102 : // as BuildFastLoop() expects.
9103 : Increment(&var_end_key_index, DescriptorArray::kEntrySize,
9104 56 : INTPTR_PARAMETERS);
9105 56 : Goto(&descriptor_array_loop);
9106 :
9107 : BIND(&done);
9108 : }
9109 112 : }
9110 :
9111 1968 : void CodeStubAssembler::DescriptorLookup(
9112 : SloppyTNode<Name> unique_name, SloppyTNode<DescriptorArray> descriptors,
9113 : SloppyTNode<Uint32T> bitfield3, Label* if_found,
9114 : TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
9115 1968 : Comment("DescriptorArrayLookup");
9116 1968 : TNode<Uint32T> nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
9117 : Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
9118 1968 : var_name_index, if_not_found);
9119 1968 : }
9120 :
9121 284 : void CodeStubAssembler::TransitionLookup(
9122 : SloppyTNode<Name> unique_name, SloppyTNode<TransitionArray> transitions,
9123 : Label* if_found, TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
9124 284 : Comment("TransitionArrayLookup");
9125 : TNode<Uint32T> number_of_valid_transitions =
9126 284 : NumberOfEntries<TransitionArray>(transitions);
9127 : Lookup<TransitionArray>(unique_name, transitions, number_of_valid_transitions,
9128 284 : if_found, var_name_index, if_not_found);
9129 284 : }
9130 :
9131 : template <typename Array>
9132 2252 : void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
9133 : TNode<Uint32T> number_of_valid_entries,
9134 : Label* if_found,
9135 : TVariable<IntPtrT>* var_name_index,
9136 : Label* if_not_found) {
9137 2252 : Comment("ArrayLookup");
9138 2252 : if (!number_of_valid_entries) {
9139 0 : number_of_valid_entries = NumberOfEntries(array);
9140 : }
9141 6756 : GotoIf(Word32Equal(number_of_valid_entries, Int32Constant(0)), if_not_found);
9142 2252 : Label linear_search(this), binary_search(this);
9143 : const int kMaxElementsForLinearSearch = 32;
9144 6756 : Branch(Uint32LessThanOrEqual(number_of_valid_entries,
9145 : Int32Constant(kMaxElementsForLinearSearch)),
9146 : &linear_search, &binary_search);
9147 : BIND(&linear_search);
9148 : {
9149 2252 : LookupLinear<Array>(unique_name, array, number_of_valid_entries, if_found,
9150 : var_name_index, if_not_found);
9151 : }
9152 : BIND(&binary_search);
9153 : {
9154 2252 : LookupBinary<Array>(unique_name, array, number_of_valid_entries, if_found,
9155 : var_name_index, if_not_found);
9156 : }
9157 2252 : }
9158 :
9159 56 : TNode<BoolT> CodeStubAssembler::IsSimpleObjectMap(TNode<Map> map) {
9160 : uint32_t mask =
9161 : Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
9162 : // !IsSpecialReceiverType && !IsNamedInterceptor && !IsAccessCheckNeeded
9163 : return Select<BoolT>(
9164 112 : IsSpecialReceiverInstanceType(LoadMapInstanceType(map)),
9165 56 : [=] { return Int32FalseConstant(); },
9166 336 : [=] { return IsClearWord32(LoadMapBitField(map), mask); });
9167 : }
9168 :
9169 1520 : void CodeStubAssembler::TryLookupPropertyInSimpleObject(
9170 : TNode<JSObject> object, TNode<Map> map, TNode<Name> unique_name,
9171 : Label* if_found_fast, Label* if_found_dict,
9172 : TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9173 : Label* if_not_found) {
9174 : CSA_ASSERT(this, IsSimpleObjectMap(map));
9175 : CSA_ASSERT(this, IsUniqueNameNoIndex(unique_name));
9176 :
9177 : TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
9178 3040 : Label if_isfastmap(this), if_isslowmap(this);
9179 1520 : Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
9180 1520 : &if_isfastmap);
9181 : BIND(&if_isfastmap);
9182 : {
9183 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
9184 : *var_meta_storage = descriptors;
9185 :
9186 1520 : DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
9187 1520 : var_name_index, if_not_found);
9188 : }
9189 : BIND(&if_isslowmap);
9190 : {
9191 1520 : TNode<NameDictionary> dictionary = CAST(LoadSlowProperties(object));
9192 : *var_meta_storage = dictionary;
9193 :
9194 : NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
9195 1520 : var_name_index, if_not_found);
9196 : }
9197 1520 : }
9198 :
9199 1408 : void CodeStubAssembler::TryLookupProperty(
9200 : SloppyTNode<JSObject> object, SloppyTNode<Map> map,
9201 : SloppyTNode<Int32T> instance_type, SloppyTNode<Name> unique_name,
9202 : Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
9203 : TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9204 : Label* if_not_found, Label* if_bailout) {
9205 2816 : Label if_objectisspecial(this);
9206 2816 : GotoIf(IsSpecialReceiverInstanceType(instance_type), &if_objectisspecial);
9207 :
9208 : TryLookupPropertyInSimpleObject(object, map, unique_name, if_found_fast,
9209 : if_found_dict, var_meta_storage,
9210 1408 : var_name_index, if_not_found);
9211 :
9212 : BIND(&if_objectisspecial);
9213 : {
9214 : // Handle global object here and bailout for other special objects.
9215 2816 : GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
9216 1408 : if_bailout);
9217 :
9218 : // Handle interceptors and access checks in runtime.
9219 : TNode<Int32T> bit_field = LoadMapBitField(map);
9220 : int mask =
9221 : Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
9222 2816 : GotoIf(IsSetWord32(bit_field, mask), if_bailout);
9223 :
9224 1408 : TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(object));
9225 : *var_meta_storage = dictionary;
9226 :
9227 : NameDictionaryLookup<GlobalDictionary>(
9228 1408 : dictionary, unique_name, if_found_global, var_name_index, if_not_found);
9229 : }
9230 1408 : }
9231 :
9232 732 : void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
9233 : Node* instance_type,
9234 : Node* unique_name, Label* if_found,
9235 : Label* if_not_found,
9236 : Label* if_bailout) {
9237 732 : Comment("TryHasOwnProperty");
9238 : CSA_ASSERT(this, IsUniqueNameNoIndex(CAST(unique_name)));
9239 : TVARIABLE(HeapObject, var_meta_storage);
9240 : TVARIABLE(IntPtrT, var_name_index);
9241 :
9242 732 : Label if_found_global(this);
9243 732 : TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
9244 : &if_found_global, &var_meta_storage, &var_name_index,
9245 732 : if_not_found, if_bailout);
9246 :
9247 : BIND(&if_found_global);
9248 : {
9249 1464 : VARIABLE(var_value, MachineRepresentation::kTagged);
9250 1464 : VARIABLE(var_details, MachineRepresentation::kWord32);
9251 : // Check if the property cell is not deleted.
9252 : LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
9253 : var_name_index.value(), &var_value,
9254 732 : &var_details, if_not_found);
9255 732 : Goto(if_found);
9256 : }
9257 732 : }
9258 :
9259 392 : Node* CodeStubAssembler::GetMethod(Node* context, Node* object,
9260 : Handle<Name> name,
9261 : Label* if_null_or_undefined) {
9262 784 : Node* method = GetProperty(context, object, name);
9263 :
9264 784 : GotoIf(IsUndefined(method), if_null_or_undefined);
9265 784 : GotoIf(IsNull(method), if_null_or_undefined);
9266 :
9267 392 : return method;
9268 : }
9269 :
9270 56 : TNode<Object> CodeStubAssembler::GetIteratorMethod(
9271 : TNode<Context> context, TNode<HeapObject> heap_obj,
9272 : Label* if_iteratorundefined) {
9273 112 : return CAST(GetMethod(context, heap_obj,
9274 : isolate()->factory()->iterator_symbol(),
9275 : if_iteratorundefined));
9276 : }
9277 :
9278 1068 : void CodeStubAssembler::LoadPropertyFromFastObject(
9279 : Node* object, Node* map, TNode<DescriptorArray> descriptors,
9280 : Node* name_index, Variable* var_details, Variable* var_value) {
9281 : DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
9282 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
9283 :
9284 : Node* details =
9285 : LoadDetailsByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index));
9286 1068 : var_details->Bind(details);
9287 :
9288 : LoadPropertyFromFastObject(object, map, descriptors, name_index, details,
9289 1068 : var_value);
9290 1068 : }
9291 :
9292 1292 : void CodeStubAssembler::LoadPropertyFromFastObject(
9293 : Node* object, Node* map, TNode<DescriptorArray> descriptors,
9294 : Node* name_index, Node* details, Variable* var_value) {
9295 1292 : Comment("[ LoadPropertyFromFastObject");
9296 :
9297 : Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
9298 :
9299 1292 : Label if_in_field(this), if_in_descriptor(this), done(this);
9300 3876 : Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
9301 1292 : &if_in_descriptor);
9302 : BIND(&if_in_field);
9303 : {
9304 : Node* field_index =
9305 2584 : DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
9306 : Node* representation =
9307 : DecodeWord32<PropertyDetails::RepresentationField>(details);
9308 :
9309 : field_index =
9310 3876 : IntPtrAdd(field_index, LoadMapInobjectPropertiesStartInWords(map));
9311 2584 : Node* instance_size_in_words = LoadMapInstanceSizeInWords(map);
9312 :
9313 1292 : Label if_inobject(this), if_backing_store(this);
9314 2584 : VARIABLE(var_double_value, MachineRepresentation::kFloat64);
9315 1292 : Label rebox_double(this, &var_double_value);
9316 2584 : Branch(UintPtrLessThan(field_index, instance_size_in_words), &if_inobject,
9317 1292 : &if_backing_store);
9318 : BIND(&if_inobject);
9319 : {
9320 1292 : Comment("if_inobject");
9321 : Node* field_offset = TimesTaggedSize(field_index);
9322 :
9323 1292 : Label if_double(this), if_tagged(this);
9324 2584 : Branch(Word32NotEqual(representation,
9325 2584 : Int32Constant(Representation::kDouble)),
9326 1292 : &if_tagged, &if_double);
9327 : BIND(&if_tagged);
9328 : {
9329 1292 : var_value->Bind(LoadObjectField(object, field_offset));
9330 1292 : Goto(&done);
9331 : }
9332 : BIND(&if_double);
9333 : {
9334 : if (FLAG_unbox_double_fields) {
9335 2584 : var_double_value.Bind(
9336 1292 : LoadObjectField(object, field_offset, MachineType::Float64()));
9337 : } else {
9338 : Node* mutable_heap_number = LoadObjectField(object, field_offset);
9339 : var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
9340 : }
9341 1292 : Goto(&rebox_double);
9342 : }
9343 : }
9344 : BIND(&if_backing_store);
9345 : {
9346 1292 : Comment("if_backing_store");
9347 1292 : TNode<HeapObject> properties = LoadFastProperties(object);
9348 2584 : field_index = IntPtrSub(field_index, instance_size_in_words);
9349 : Node* value = LoadPropertyArrayElement(CAST(properties), field_index);
9350 :
9351 1292 : Label if_double(this), if_tagged(this);
9352 2584 : Branch(Word32NotEqual(representation,
9353 2584 : Int32Constant(Representation::kDouble)),
9354 1292 : &if_tagged, &if_double);
9355 : BIND(&if_tagged);
9356 : {
9357 1292 : var_value->Bind(value);
9358 1292 : Goto(&done);
9359 : }
9360 : BIND(&if_double);
9361 : {
9362 1292 : var_double_value.Bind(LoadHeapNumberValue(value));
9363 1292 : Goto(&rebox_double);
9364 : }
9365 : }
9366 : BIND(&rebox_double);
9367 : {
9368 1292 : Comment("rebox_double");
9369 3876 : Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
9370 1292 : var_value->Bind(heap_number);
9371 1292 : Goto(&done);
9372 : }
9373 : }
9374 : BIND(&if_in_descriptor);
9375 : {
9376 : var_value->Bind(
9377 2584 : LoadValueByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index)));
9378 1292 : Goto(&done);
9379 : }
9380 : BIND(&done);
9381 :
9382 1292 : Comment("] LoadPropertyFromFastObject");
9383 1292 : }
9384 :
9385 2412 : void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
9386 : Node* name_index,
9387 : Variable* var_details,
9388 : Variable* var_value) {
9389 2412 : Comment("LoadPropertyFromNameDictionary");
9390 : CSA_ASSERT(this, IsNameDictionary(dictionary));
9391 :
9392 : var_details->Bind(
9393 2412 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
9394 2412 : var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
9395 :
9396 2412 : Comment("] LoadPropertyFromNameDictionary");
9397 2412 : }
9398 :
9399 1184 : void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
9400 : Node* name_index,
9401 : Variable* var_details,
9402 : Variable* var_value,
9403 : Label* if_deleted) {
9404 1184 : Comment("[ LoadPropertyFromGlobalDictionary");
9405 : CSA_ASSERT(this, IsGlobalDictionary(dictionary));
9406 :
9407 2368 : Node* property_cell = LoadFixedArrayElement(CAST(dictionary), name_index);
9408 : CSA_ASSERT(this, IsPropertyCell(property_cell));
9409 :
9410 : Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
9411 1184 : GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
9412 :
9413 1184 : var_value->Bind(value);
9414 :
9415 : Node* details = LoadAndUntagToWord32ObjectField(
9416 : property_cell, PropertyCell::kPropertyDetailsRawOffset);
9417 1184 : var_details->Bind(details);
9418 :
9419 1184 : Comment("] LoadPropertyFromGlobalDictionary");
9420 1184 : }
9421 :
9422 : // |value| is the property backing store's contents, which is either a value
9423 : // or an accessor pair, as specified by |details|.
9424 : // Returns either the original value, or the result of the getter call.
9425 3812 : TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
9426 : Node* value, Node* details, Node* context, Node* receiver,
9427 : Label* if_bailout, GetOwnPropertyMode mode) {
9428 7624 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
9429 3812 : Label done(this), if_accessor_info(this, Label::kDeferred);
9430 :
9431 : Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
9432 11436 : GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
9433 :
9434 : // Accessor case.
9435 7624 : GotoIfNot(IsAccessorPair(value), &if_accessor_info);
9436 :
9437 : // AccessorPair case.
9438 : {
9439 3812 : if (mode == kCallJSGetter) {
9440 : Node* accessor_pair = value;
9441 : Node* getter =
9442 : LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
9443 : Node* getter_map = LoadMap(getter);
9444 : Node* instance_type = LoadMapInstanceType(getter_map);
9445 : // FunctionTemplateInfo getters are not supported yet.
9446 7176 : GotoIf(InstanceTypeEqual(instance_type, FUNCTION_TEMPLATE_INFO_TYPE),
9447 3588 : if_bailout);
9448 :
9449 : // Return undefined if the {getter} is not callable.
9450 3588 : var_value.Bind(UndefinedConstant());
9451 7176 : GotoIfNot(IsCallableMap(getter_map), &done);
9452 :
9453 : // Call the accessor.
9454 3588 : Callable callable = CodeFactory::Call(isolate());
9455 3588 : Node* result = CallJS(callable, context, getter, receiver);
9456 3588 : var_value.Bind(result);
9457 : }
9458 3812 : Goto(&done);
9459 : }
9460 :
9461 : // AccessorInfo case.
9462 : BIND(&if_accessor_info);
9463 : {
9464 : Node* accessor_info = value;
9465 : CSA_ASSERT(this, IsAccessorInfo(value));
9466 : CSA_ASSERT(this, TaggedIsNotSmi(receiver));
9467 3812 : Label if_array(this), if_function(this), if_value(this);
9468 :
9469 : // Dispatch based on {receiver} instance type.
9470 : Node* receiver_map = LoadMap(receiver);
9471 : Node* receiver_instance_type = LoadMapInstanceType(receiver_map);
9472 3812 : GotoIf(IsJSArrayInstanceType(receiver_instance_type), &if_array);
9473 3812 : GotoIf(IsJSFunctionInstanceType(receiver_instance_type), &if_function);
9474 3812 : Branch(IsJSValueInstanceType(receiver_instance_type), &if_value,
9475 3812 : if_bailout);
9476 :
9477 : // JSArray AccessorInfo case.
9478 : BIND(&if_array);
9479 : {
9480 : // We only deal with the "length" accessor on JSArray.
9481 7624 : GotoIfNot(IsLengthString(
9482 3812 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9483 3812 : if_bailout);
9484 3812 : var_value.Bind(LoadJSArrayLength(receiver));
9485 3812 : Goto(&done);
9486 : }
9487 :
9488 : // JSFunction AccessorInfo case.
9489 : BIND(&if_function);
9490 : {
9491 : // We only deal with the "prototype" accessor on JSFunction here.
9492 7624 : GotoIfNot(IsPrototypeString(
9493 3812 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9494 3812 : if_bailout);
9495 :
9496 : GotoIfPrototypeRequiresRuntimeLookup(CAST(receiver), CAST(receiver_map),
9497 3812 : if_bailout);
9498 3812 : var_value.Bind(LoadJSFunctionPrototype(receiver, if_bailout));
9499 3812 : Goto(&done);
9500 : }
9501 :
9502 : // JSValue AccessorInfo case.
9503 : BIND(&if_value);
9504 : {
9505 : // We only deal with the "length" accessor on JSValue string wrappers.
9506 7624 : GotoIfNot(IsLengthString(
9507 3812 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9508 3812 : if_bailout);
9509 : Node* receiver_value = LoadJSValueValue(receiver);
9510 7624 : GotoIfNot(TaggedIsNotSmi(receiver_value), if_bailout);
9511 7624 : GotoIfNot(IsString(receiver_value), if_bailout);
9512 7624 : var_value.Bind(LoadStringLengthAsSmi(receiver_value));
9513 3812 : Goto(&done);
9514 : }
9515 : }
9516 :
9517 : BIND(&done);
9518 7624 : return UncheckedCast<Object>(var_value.value());
9519 : }
9520 :
9521 228 : void CodeStubAssembler::TryGetOwnProperty(
9522 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
9523 : Node* unique_name, Label* if_found_value, Variable* var_value,
9524 : Label* if_not_found, Label* if_bailout) {
9525 : TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
9526 : if_found_value, var_value, nullptr, nullptr, if_not_found,
9527 228 : if_bailout, kCallJSGetter);
9528 228 : }
9529 :
9530 452 : void CodeStubAssembler::TryGetOwnProperty(
9531 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
9532 : Node* unique_name, Label* if_found_value, Variable* var_value,
9533 : Variable* var_details, Variable* var_raw_value, Label* if_not_found,
9534 : Label* if_bailout, GetOwnPropertyMode mode) {
9535 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
9536 452 : Comment("TryGetOwnProperty");
9537 : CSA_ASSERT(this, IsUniqueNameNoIndex(CAST(unique_name)));
9538 :
9539 : TVARIABLE(HeapObject, var_meta_storage);
9540 : TVARIABLE(IntPtrT, var_entry);
9541 :
9542 452 : Label if_found_fast(this), if_found_dict(this), if_found_global(this);
9543 :
9544 904 : VARIABLE(local_var_details, MachineRepresentation::kWord32);
9545 452 : if (!var_details) {
9546 : var_details = &local_var_details;
9547 : }
9548 452 : Label if_found(this);
9549 :
9550 452 : TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
9551 : &if_found_dict, &if_found_global, &var_meta_storage,
9552 452 : &var_entry, if_not_found, if_bailout);
9553 : BIND(&if_found_fast);
9554 : {
9555 452 : TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
9556 : Node* name_index = var_entry.value();
9557 :
9558 : LoadPropertyFromFastObject(object, map, descriptors, name_index,
9559 452 : var_details, var_value);
9560 452 : Goto(&if_found);
9561 : }
9562 : BIND(&if_found_dict);
9563 : {
9564 : Node* dictionary = var_meta_storage.value();
9565 : Node* entry = var_entry.value();
9566 452 : LoadPropertyFromNameDictionary(dictionary, entry, var_details, var_value);
9567 452 : Goto(&if_found);
9568 : }
9569 : BIND(&if_found_global);
9570 : {
9571 : Node* dictionary = var_meta_storage.value();
9572 : Node* entry = var_entry.value();
9573 :
9574 : LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
9575 452 : if_not_found);
9576 452 : Goto(&if_found);
9577 : }
9578 : // Here we have details and value which could be an accessor.
9579 : BIND(&if_found);
9580 : {
9581 : // TODO(ishell): Execute C++ accessor in case of accessor info
9582 452 : if (var_raw_value) {
9583 224 : var_raw_value->Bind(var_value->value());
9584 : }
9585 904 : Node* value = CallGetterIfAccessor(var_value->value(), var_details->value(),
9586 452 : context, receiver, if_bailout, mode);
9587 452 : var_value->Bind(value);
9588 452 : Goto(if_found_value);
9589 : }
9590 452 : }
9591 :
9592 732 : void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
9593 : SloppyTNode<Int32T> instance_type,
9594 : SloppyTNode<IntPtrT> intptr_index,
9595 : Label* if_found, Label* if_absent,
9596 : Label* if_not_found,
9597 : Label* if_bailout) {
9598 : // Handle special objects in runtime.
9599 1464 : GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
9600 :
9601 1464 : Node* elements_kind = LoadMapElementsKind(map);
9602 :
9603 : // TODO(verwaest): Support other elements kinds as well.
9604 732 : Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
9605 732 : if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
9606 732 : if_typedarray(this);
9607 : // clang-format off
9608 : int32_t values[] = {
9609 : // Handled by {if_isobjectorsmi}.
9610 : PACKED_SMI_ELEMENTS, HOLEY_SMI_ELEMENTS, PACKED_ELEMENTS,
9611 : HOLEY_ELEMENTS,
9612 : // Handled by {if_isdouble}.
9613 : PACKED_DOUBLE_ELEMENTS, HOLEY_DOUBLE_ELEMENTS,
9614 : // Handled by {if_isdictionary}.
9615 : DICTIONARY_ELEMENTS,
9616 : // Handled by {if_isfaststringwrapper}.
9617 : FAST_STRING_WRAPPER_ELEMENTS,
9618 : // Handled by {if_isslowstringwrapper}.
9619 : SLOW_STRING_WRAPPER_ELEMENTS,
9620 : // Handled by {if_not_found}.
9621 : NO_ELEMENTS,
9622 : // Handled by {if_typed_array}.
9623 : UINT8_ELEMENTS,
9624 : INT8_ELEMENTS,
9625 : UINT16_ELEMENTS,
9626 : INT16_ELEMENTS,
9627 : UINT32_ELEMENTS,
9628 : INT32_ELEMENTS,
9629 : FLOAT32_ELEMENTS,
9630 : FLOAT64_ELEMENTS,
9631 : UINT8_CLAMPED_ELEMENTS,
9632 : BIGUINT64_ELEMENTS,
9633 : BIGINT64_ELEMENTS,
9634 732 : };
9635 : Label* labels[] = {
9636 : &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
9637 : &if_isobjectorsmi,
9638 : &if_isdouble, &if_isdouble,
9639 : &if_isdictionary,
9640 : &if_isfaststringwrapper,
9641 : &if_isslowstringwrapper,
9642 : if_not_found,
9643 : &if_typedarray,
9644 : &if_typedarray,
9645 : &if_typedarray,
9646 : &if_typedarray,
9647 : &if_typedarray,
9648 : &if_typedarray,
9649 : &if_typedarray,
9650 : &if_typedarray,
9651 : &if_typedarray,
9652 : &if_typedarray,
9653 : &if_typedarray,
9654 732 : };
9655 : // clang-format on
9656 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
9657 732 : Switch(elements_kind, if_bailout, values, labels, arraysize(values));
9658 :
9659 : BIND(&if_isobjectorsmi);
9660 : {
9661 : TNode<FixedArray> elements = CAST(LoadElements(object));
9662 : TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
9663 :
9664 1464 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
9665 :
9666 : TNode<Object> element = UnsafeLoadFixedArrayElement(elements, intptr_index);
9667 : TNode<Oddball> the_hole = TheHoleConstant();
9668 732 : Branch(WordEqual(element, the_hole), if_not_found, if_found);
9669 : }
9670 : BIND(&if_isdouble);
9671 : {
9672 : TNode<FixedArrayBase> elements = LoadElements(object);
9673 : TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
9674 :
9675 1464 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
9676 :
9677 : // Check if the element is a double hole, but don't load it.
9678 : LoadFixedDoubleArrayElement(CAST(elements), intptr_index,
9679 : MachineType::None(), 0, INTPTR_PARAMETERS,
9680 732 : if_not_found);
9681 732 : Goto(if_found);
9682 : }
9683 : BIND(&if_isdictionary);
9684 : {
9685 : // Negative keys must be converted to property names.
9686 2196 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9687 :
9688 : TVARIABLE(IntPtrT, var_entry);
9689 732 : TNode<NumberDictionary> elements = CAST(LoadElements(object));
9690 : NumberDictionaryLookup(elements, intptr_index, if_found, &var_entry,
9691 732 : if_not_found);
9692 : }
9693 : BIND(&if_isfaststringwrapper);
9694 : {
9695 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
9696 : Node* string = LoadJSValueValue(object);
9697 : CSA_ASSERT(this, IsString(string));
9698 1464 : Node* length = LoadStringLengthAsWord(string);
9699 1464 : GotoIf(UintPtrLessThan(intptr_index, length), if_found);
9700 732 : Goto(&if_isobjectorsmi);
9701 : }
9702 : BIND(&if_isslowstringwrapper);
9703 : {
9704 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
9705 : Node* string = LoadJSValueValue(object);
9706 : CSA_ASSERT(this, IsString(string));
9707 1464 : Node* length = LoadStringLengthAsWord(string);
9708 1464 : GotoIf(UintPtrLessThan(intptr_index, length), if_found);
9709 732 : Goto(&if_isdictionary);
9710 : }
9711 : BIND(&if_typedarray);
9712 : {
9713 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
9714 1464 : GotoIf(IsDetachedBuffer(buffer), if_absent);
9715 :
9716 1464 : Node* length = SmiUntag(LoadJSTypedArrayLength(CAST(object)));
9717 1464 : Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
9718 : }
9719 : BIND(&if_oob);
9720 : {
9721 : // Positive OOB indices mean "not found", negative indices must be
9722 : // converted to property names.
9723 2196 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9724 732 : Goto(if_not_found);
9725 : }
9726 732 : }
9727 :
9728 728 : void CodeStubAssembler::BranchIfMaybeSpecialIndex(TNode<String> name_string,
9729 : Label* if_maybe_special_index,
9730 : Label* if_not_special_index) {
9731 : // TODO(cwhan.tunz): Implement fast cases more.
9732 :
9733 : // If a name is empty or too long, it's not a special index
9734 : // Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
9735 : const int kBufferSize = 24;
9736 728 : TNode<Smi> string_length = LoadStringLengthAsSmi(name_string);
9737 1456 : GotoIf(SmiEqual(string_length, SmiConstant(0)), if_not_special_index);
9738 1456 : GotoIf(SmiGreaterThan(string_length, SmiConstant(kBufferSize)),
9739 728 : if_not_special_index);
9740 :
9741 : // If the first character of name is not a digit or '-', or we can't match it
9742 : // to Infinity or NaN, then this is not a special index.
9743 1456 : TNode<Int32T> first_char = StringCharCodeAt(name_string, IntPtrConstant(0));
9744 : // If the name starts with '-', it can be a negative index.
9745 2184 : GotoIf(Word32Equal(first_char, Int32Constant('-')), if_maybe_special_index);
9746 : // If the name starts with 'I', it can be "Infinity".
9747 2184 : GotoIf(Word32Equal(first_char, Int32Constant('I')), if_maybe_special_index);
9748 : // If the name starts with 'N', it can be "NaN".
9749 2184 : GotoIf(Word32Equal(first_char, Int32Constant('N')), if_maybe_special_index);
9750 : // Finally, if the first character is not a digit either, then we are sure
9751 : // that the name is not a special index.
9752 2184 : GotoIf(Uint32LessThan(first_char, Int32Constant('0')), if_not_special_index);
9753 2184 : GotoIf(Uint32LessThan(Int32Constant('9'), first_char), if_not_special_index);
9754 728 : Goto(if_maybe_special_index);
9755 728 : }
9756 :
9757 728 : void CodeStubAssembler::TryPrototypeChainLookup(
9758 : Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
9759 : const LookupInHolder& lookup_element_in_holder, Label* if_end,
9760 : Label* if_bailout, Label* if_proxy) {
9761 : // Ensure receiver is JSReceiver, otherwise bailout.
9762 1456 : Label if_objectisnotsmi(this);
9763 1456 : Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
9764 : BIND(&if_objectisnotsmi);
9765 :
9766 : Node* map = LoadMap(receiver);
9767 : Node* instance_type = LoadMapInstanceType(map);
9768 : {
9769 728 : Label if_objectisreceiver(this);
9770 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
9771 : STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
9772 1456 : Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
9773 728 : if_bailout);
9774 : BIND(&if_objectisreceiver);
9775 :
9776 728 : if (if_proxy) {
9777 1456 : GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
9778 : }
9779 : }
9780 :
9781 1456 : VARIABLE(var_index, MachineType::PointerRepresentation());
9782 1456 : VARIABLE(var_unique, MachineRepresentation::kTagged);
9783 :
9784 728 : Label if_keyisindex(this), if_iskeyunique(this);
9785 : TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
9786 728 : if_bailout);
9787 :
9788 : BIND(&if_iskeyunique);
9789 : {
9790 1456 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
9791 1456 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
9792 1456 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
9793 : instance_type);
9794 :
9795 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
9796 728 : &var_holder_instance_type};
9797 1456 : Label loop(this, arraysize(merged_variables), merged_variables);
9798 728 : Goto(&loop);
9799 : BIND(&loop);
9800 : {
9801 728 : Node* holder_map = var_holder_map.value();
9802 728 : Node* holder_instance_type = var_holder_instance_type.value();
9803 :
9804 728 : Label next_proto(this), check_integer_indexed_exotic(this);
9805 728 : lookup_property_in_holder(receiver, var_holder.value(), holder_map,
9806 : holder_instance_type, var_unique.value(),
9807 : &check_integer_indexed_exotic, if_bailout);
9808 :
9809 : BIND(&check_integer_indexed_exotic);
9810 : {
9811 : // Bailout if it can be an integer indexed exotic case.
9812 1456 : GotoIfNot(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
9813 728 : &next_proto);
9814 2184 : GotoIfNot(IsString(var_unique.value()), &next_proto);
9815 728 : BranchIfMaybeSpecialIndex(CAST(var_unique.value()), if_bailout,
9816 728 : &next_proto);
9817 : }
9818 :
9819 : BIND(&next_proto);
9820 :
9821 : Node* proto = LoadMapPrototype(holder_map);
9822 :
9823 1456 : GotoIf(IsNull(proto), if_end);
9824 :
9825 : Node* map = LoadMap(proto);
9826 : Node* instance_type = LoadMapInstanceType(map);
9827 :
9828 728 : var_holder.Bind(proto);
9829 728 : var_holder_map.Bind(map);
9830 728 : var_holder_instance_type.Bind(instance_type);
9831 728 : Goto(&loop);
9832 : }
9833 : }
9834 : BIND(&if_keyisindex);
9835 : {
9836 1456 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
9837 1456 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
9838 1456 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
9839 : instance_type);
9840 :
9841 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
9842 728 : &var_holder_instance_type};
9843 1456 : Label loop(this, arraysize(merged_variables), merged_variables);
9844 728 : Goto(&loop);
9845 : BIND(&loop);
9846 : {
9847 728 : Label next_proto(this);
9848 728 : lookup_element_in_holder(receiver, var_holder.value(),
9849 : var_holder_map.value(),
9850 : var_holder_instance_type.value(),
9851 : var_index.value(), &next_proto, if_bailout);
9852 : BIND(&next_proto);
9853 :
9854 728 : Node* proto = LoadMapPrototype(var_holder_map.value());
9855 :
9856 1456 : GotoIf(IsNull(proto), if_end);
9857 :
9858 : Node* map = LoadMap(proto);
9859 : Node* instance_type = LoadMapInstanceType(map);
9860 :
9861 728 : var_holder.Bind(proto);
9862 728 : var_holder_map.Bind(map);
9863 728 : var_holder_instance_type.Bind(instance_type);
9864 728 : Goto(&loop);
9865 : }
9866 : }
9867 728 : }
9868 :
9869 168 : Node* CodeStubAssembler::HasInPrototypeChain(Node* context, Node* object,
9870 : Node* prototype) {
9871 : CSA_ASSERT(this, TaggedIsNotSmi(object));
9872 336 : VARIABLE(var_result, MachineRepresentation::kTagged);
9873 168 : Label return_false(this), return_true(this),
9874 168 : return_runtime(this, Label::kDeferred), return_result(this);
9875 :
9876 : // Loop through the prototype chain looking for the {prototype}.
9877 336 : VARIABLE(var_object_map, MachineRepresentation::kTagged, LoadMap(object));
9878 168 : Label loop(this, &var_object_map);
9879 168 : Goto(&loop);
9880 : BIND(&loop);
9881 : {
9882 : // Check if we can determine the prototype directly from the {object_map}.
9883 168 : Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
9884 168 : Node* object_map = var_object_map.value();
9885 : TNode<Int32T> object_instance_type = LoadMapInstanceType(object_map);
9886 336 : Branch(IsSpecialReceiverInstanceType(object_instance_type),
9887 168 : &if_objectisspecial, &if_objectisdirect);
9888 : BIND(&if_objectisspecial);
9889 : {
9890 : // The {object_map} is a special receiver map or a primitive map, check
9891 : // if we need to use the if_objectisspecial path in the runtime.
9892 336 : GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
9893 168 : &return_runtime);
9894 : Node* object_bitfield = LoadMapBitField(object_map);
9895 : int mask = Map::HasNamedInterceptorBit::kMask |
9896 : Map::IsAccessCheckNeededBit::kMask;
9897 336 : Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
9898 168 : &if_objectisdirect);
9899 : }
9900 : BIND(&if_objectisdirect);
9901 :
9902 : // Check the current {object} prototype.
9903 : Node* object_prototype = LoadMapPrototype(object_map);
9904 336 : GotoIf(IsNull(object_prototype), &return_false);
9905 336 : GotoIf(WordEqual(object_prototype, prototype), &return_true);
9906 :
9907 : // Continue with the prototype.
9908 : CSA_ASSERT(this, TaggedIsNotSmi(object_prototype));
9909 168 : var_object_map.Bind(LoadMap(object_prototype));
9910 168 : Goto(&loop);
9911 : }
9912 :
9913 : BIND(&return_true);
9914 168 : var_result.Bind(TrueConstant());
9915 168 : Goto(&return_result);
9916 :
9917 : BIND(&return_false);
9918 168 : var_result.Bind(FalseConstant());
9919 168 : Goto(&return_result);
9920 :
9921 : BIND(&return_runtime);
9922 : {
9923 : // Fallback to the runtime implementation.
9924 : var_result.Bind(
9925 168 : CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype));
9926 : }
9927 168 : Goto(&return_result);
9928 :
9929 : BIND(&return_result);
9930 336 : return var_result.value();
9931 : }
9932 :
9933 112 : Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
9934 : Node* object) {
9935 224 : VARIABLE(var_result, MachineRepresentation::kTagged);
9936 112 : Label return_runtime(this, Label::kDeferred), return_result(this);
9937 :
9938 : GotoIfForceSlowPath(&return_runtime);
9939 :
9940 : // Goto runtime if {object} is a Smi.
9941 224 : GotoIf(TaggedIsSmi(object), &return_runtime);
9942 :
9943 : // Goto runtime if {callable} is a Smi.
9944 224 : GotoIf(TaggedIsSmi(callable), &return_runtime);
9945 :
9946 : // Load map of {callable}.
9947 : Node* callable_map = LoadMap(callable);
9948 :
9949 : // Goto runtime if {callable} is not a JSFunction.
9950 : Node* callable_instance_type = LoadMapInstanceType(callable_map);
9951 224 : GotoIfNot(InstanceTypeEqual(callable_instance_type, JS_FUNCTION_TYPE),
9952 112 : &return_runtime);
9953 :
9954 : GotoIfPrototypeRequiresRuntimeLookup(CAST(callable), CAST(callable_map),
9955 112 : &return_runtime);
9956 :
9957 : // Get the "prototype" (or initial map) of the {callable}.
9958 : Node* callable_prototype =
9959 : LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
9960 : {
9961 112 : Label no_initial_map(this), walk_prototype_chain(this);
9962 224 : VARIABLE(var_callable_prototype, MachineRepresentation::kTagged,
9963 : callable_prototype);
9964 :
9965 : // Resolve the "prototype" if the {callable} has an initial map.
9966 224 : GotoIfNot(IsMap(callable_prototype), &no_initial_map);
9967 : var_callable_prototype.Bind(
9968 112 : LoadObjectField(callable_prototype, Map::kPrototypeOffset));
9969 112 : Goto(&walk_prototype_chain);
9970 :
9971 : BIND(&no_initial_map);
9972 : // {callable_prototype} is the hole if the "prototype" property hasn't been
9973 : // requested so far.
9974 112 : Branch(WordEqual(callable_prototype, TheHoleConstant()), &return_runtime,
9975 112 : &walk_prototype_chain);
9976 :
9977 : BIND(&walk_prototype_chain);
9978 112 : callable_prototype = var_callable_prototype.value();
9979 : }
9980 :
9981 : // Loop through the prototype chain looking for the {callable} prototype.
9982 : CSA_ASSERT(this, IsJSReceiver(callable_prototype));
9983 112 : var_result.Bind(HasInPrototypeChain(context, object, callable_prototype));
9984 112 : Goto(&return_result);
9985 :
9986 : BIND(&return_runtime);
9987 : {
9988 : // Fallback to the runtime implementation.
9989 : var_result.Bind(
9990 112 : CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
9991 : }
9992 112 : Goto(&return_result);
9993 :
9994 : BIND(&return_result);
9995 224 : return var_result.value();
9996 : }
9997 :
9998 310324 : TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
9999 : ElementsKind kind,
10000 : ParameterMode mode,
10001 : int base_size) {
10002 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, mode));
10003 310324 : int element_size_shift = ElementsKindToShiftSize(kind);
10004 310324 : int element_size = 1 << element_size_shift;
10005 : int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
10006 310324 : intptr_t index = 0;
10007 : bool constant_index = false;
10008 310324 : if (mode == SMI_PARAMETERS) {
10009 22944 : element_size_shift -= kSmiShiftBits;
10010 22944 : Smi smi_index;
10011 22944 : constant_index = ToSmiConstant(index_node, &smi_index);
10012 24828 : if (constant_index) index = smi_index->value();
10013 45888 : index_node = BitcastTaggedToWord(index_node);
10014 : } else {
10015 : DCHECK(mode == INTPTR_PARAMETERS);
10016 287380 : constant_index = ToIntPtrConstant(index_node, index);
10017 : }
10018 310324 : if (constant_index) {
10019 88924 : return IntPtrConstant(base_size + element_size * index);
10020 : }
10021 :
10022 : TNode<WordT> shifted_index =
10023 : (element_size_shift == 0)
10024 : ? UncheckedCast<WordT>(index_node)
10025 : : ((element_size_shift > 0)
10026 192500 : ? WordShl(index_node, IntPtrConstant(element_size_shift))
10027 434960 : : WordSar(index_node, IntPtrConstant(-element_size_shift)));
10028 221400 : return IntPtrAdd(IntPtrConstant(base_size), Signed(shifted_index));
10029 : }
10030 :
10031 0 : TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(SloppyTNode<IntPtrT> offset,
10032 : SloppyTNode<IntPtrT> length,
10033 : int header_size,
10034 : ElementsKind kind) {
10035 : // Make sure we point to the last field.
10036 0 : int element_size = 1 << ElementsKindToShiftSize(kind);
10037 0 : int correction = header_size - kHeapObjectTag - element_size;
10038 : TNode<IntPtrT> last_offset =
10039 0 : ElementOffsetFromIndex(length, kind, INTPTR_PARAMETERS, correction);
10040 0 : return IntPtrLessThanOrEqual(offset, last_offset);
10041 : }
10042 :
10043 12836 : TNode<HeapObject> CodeStubAssembler::LoadFeedbackCellValue(
10044 : SloppyTNode<JSFunction> closure) {
10045 : TNode<FeedbackCell> feedback_cell =
10046 : CAST(LoadObjectField(closure, JSFunction::kFeedbackCellOffset));
10047 12836 : return CAST(LoadObjectField(feedback_cell, FeedbackCell::kValueOffset));
10048 : }
10049 :
10050 12612 : TNode<HeapObject> CodeStubAssembler::LoadFeedbackVector(
10051 : SloppyTNode<JSFunction> closure) {
10052 12612 : TVARIABLE(HeapObject, maybe_vector, LoadFeedbackCellValue(closure));
10053 12612 : Label done(this);
10054 :
10055 : // If the closure doesn't have a feedback vector allocated yet, return
10056 : // undefined. FeedbackCell can contain Undefined / FixedArray (for lazy
10057 : // allocations) / FeedbackVector.
10058 25224 : GotoIf(IsFeedbackVector(maybe_vector.value()), &done);
10059 :
10060 : // In all other cases return Undefined.
10061 : maybe_vector = UndefinedConstant();
10062 12612 : Goto(&done);
10063 :
10064 : BIND(&done);
10065 12612 : return maybe_vector.value();
10066 : }
10067 :
10068 168 : TNode<ClosureFeedbackCellArray> CodeStubAssembler::LoadClosureFeedbackArray(
10069 : SloppyTNode<JSFunction> closure) {
10070 168 : TVARIABLE(HeapObject, feedback_cell_array, LoadFeedbackCellValue(closure));
10071 168 : Label end(this);
10072 :
10073 : // When feedback vectors are not yet allocated feedback cell contains a
10074 : // an array of feedback cells used by create closures.
10075 336 : GotoIf(HasInstanceType(feedback_cell_array.value(),
10076 168 : CLOSURE_FEEDBACK_CELL_ARRAY_TYPE),
10077 168 : &end);
10078 :
10079 : // Load FeedbackCellArray from feedback vector.
10080 : TNode<FeedbackVector> vector = CAST(feedback_cell_array.value());
10081 : feedback_cell_array = CAST(
10082 : LoadObjectField(vector, FeedbackVector::kClosureFeedbackCellArrayOffset));
10083 168 : Goto(&end);
10084 :
10085 : BIND(&end);
10086 168 : return CAST(feedback_cell_array.value());
10087 : }
10088 :
10089 504 : TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorForStub() {
10090 : TNode<JSFunction> function =
10091 504 : CAST(LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset));
10092 504 : return CAST(LoadFeedbackVector(function));
10093 : }
10094 :
10095 8736 : void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* maybe_vector,
10096 : Node* slot_id) {
10097 17472 : Label end(this);
10098 : // If feedback_vector is not valid, then nothing to do.
10099 17472 : GotoIf(IsUndefined(maybe_vector), &end);
10100 :
10101 : // This method is used for binary op and compare feedback. These
10102 : // vector nodes are initialized with a smi 0, so we can simply OR
10103 : // our new feedback in place.
10104 : TNode<FeedbackVector> feedback_vector = CAST(maybe_vector);
10105 : TNode<MaybeObject> feedback_element =
10106 8736 : LoadFeedbackVectorSlot(feedback_vector, slot_id);
10107 8736 : TNode<Smi> previous_feedback = CAST(feedback_element);
10108 8736 : TNode<Smi> combined_feedback = SmiOr(previous_feedback, CAST(feedback));
10109 :
10110 17472 : GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
10111 : {
10112 : StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
10113 8736 : SKIP_WRITE_BARRIER);
10114 8736 : ReportFeedbackUpdate(feedback_vector, slot_id, "UpdateFeedback");
10115 8736 : Goto(&end);
10116 : }
10117 :
10118 : BIND(&end);
10119 8736 : }
10120 :
10121 13272 : void CodeStubAssembler::ReportFeedbackUpdate(
10122 : SloppyTNode<FeedbackVector> feedback_vector, SloppyTNode<IntPtrT> slot_id,
10123 : const char* reason) {
10124 : // Reset profiler ticks.
10125 : StoreObjectFieldNoWriteBarrier(
10126 26544 : feedback_vector, FeedbackVector::kProfilerTicksOffset, Int32Constant(0),
10127 : MachineRepresentation::kWord32);
10128 :
10129 : #ifdef V8_TRACE_FEEDBACK_UPDATES
10130 : // Trace the update.
10131 : CallRuntime(Runtime::kInterpreterTraceUpdateFeedback, NoContextConstant(),
10132 : LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset),
10133 : SmiTag(slot_id), StringConstant(reason));
10134 : #endif // V8_TRACE_FEEDBACK_UPDATES
10135 13272 : }
10136 :
10137 33320 : void CodeStubAssembler::OverwriteFeedback(Variable* existing_feedback,
10138 : int new_feedback) {
10139 33320 : if (existing_feedback == nullptr) return;
10140 46368 : existing_feedback->Bind(SmiConstant(new_feedback));
10141 : }
10142 :
10143 26320 : void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
10144 : int feedback) {
10145 26320 : if (existing_feedback == nullptr) return;
10146 : existing_feedback->Bind(
10147 38640 : SmiOr(CAST(existing_feedback->value()), SmiConstant(feedback)));
10148 : }
10149 :
10150 560 : void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
10151 : Node* feedback) {
10152 560 : if (existing_feedback == nullptr) return;
10153 : existing_feedback->Bind(
10154 1008 : SmiOr(CAST(existing_feedback->value()), CAST(feedback)));
10155 : }
10156 :
10157 896 : void CodeStubAssembler::CheckForAssociatedProtector(Node* name,
10158 : Label* if_protector) {
10159 : // This list must be kept in sync with LookupIterator::UpdateProtector!
10160 : // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
10161 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kconstructor_string)),
10162 896 : if_protector);
10163 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kiterator_symbol)), if_protector);
10164 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::knext_string)), if_protector);
10165 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kspecies_symbol)), if_protector);
10166 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kis_concat_spreadable_symbol)),
10167 896 : if_protector);
10168 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kresolve_string)), if_protector);
10169 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kthen_string)), if_protector);
10170 : // Fall through if no case matched.
10171 896 : }
10172 :
10173 728 : TNode<Map> CodeStubAssembler::LoadReceiverMap(SloppyTNode<Object> receiver) {
10174 : return Select<Map>(
10175 1456 : TaggedIsSmi(receiver),
10176 728 : [=] { return CAST(LoadRoot(RootIndex::kHeapNumberMap)); },
10177 3640 : [=] { return LoadMap(UncheckedCast<HeapObject>(receiver)); });
10178 : }
10179 :
10180 8964 : TNode<IntPtrT> CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
10181 8964 : TVARIABLE(IntPtrT, var_intptr_key);
10182 8964 : Label done(this, &var_intptr_key), key_is_smi(this);
10183 17928 : GotoIf(TaggedIsSmi(key), &key_is_smi);
10184 : // Try to convert a heap number to a Smi.
10185 17928 : GotoIfNot(IsHeapNumber(key), miss);
10186 : {
10187 : TNode<Float64T> value = LoadHeapNumberValue(key);
10188 8964 : TNode<Int32T> int_value = RoundFloat64ToInt32(value);
10189 26892 : GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
10190 17928 : var_intptr_key = ChangeInt32ToIntPtr(int_value);
10191 8964 : Goto(&done);
10192 : }
10193 :
10194 : BIND(&key_is_smi);
10195 : {
10196 17928 : var_intptr_key = SmiUntag(key);
10197 8964 : Goto(&done);
10198 : }
10199 :
10200 : BIND(&done);
10201 8964 : return var_intptr_key.value();
10202 : }
10203 :
10204 336 : Node* CodeStubAssembler::EmitKeyedSloppyArguments(
10205 : Node* receiver, Node* key, Node* value, Label* bailout,
10206 : ArgumentsAccessMode access_mode) {
10207 : // Mapped arguments are actual arguments. Unmapped arguments are values added
10208 : // to the arguments object after it was created for the call. Mapped arguments
10209 : // are stored in the context at indexes given by elements[key + 2]. Unmapped
10210 : // arguments are stored as regular indexed properties in the arguments array,
10211 : // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
10212 : // look at argument object construction.
10213 : //
10214 : // The sloppy arguments elements array has a special format:
10215 : //
10216 : // 0: context
10217 : // 1: unmapped arguments array
10218 : // 2: mapped_index0,
10219 : // 3: mapped_index1,
10220 : // ...
10221 : //
10222 : // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
10223 : // If key + 2 >= elements.length then attempt to look in the unmapped
10224 : // arguments array (given by elements[1]) and return the value at key, missing
10225 : // to the runtime if the unmapped arguments array is not a fixed array or if
10226 : // key >= unmapped_arguments_array.length.
10227 : //
10228 : // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
10229 : // in the unmapped arguments array, as described above. Otherwise, t is a Smi
10230 : // index into the context array given at elements[0]. Return the value at
10231 : // context[t].
10232 :
10233 672 : GotoIfNot(TaggedIsSmi(key), bailout);
10234 672 : key = SmiUntag(key);
10235 1008 : GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
10236 :
10237 : TNode<FixedArray> elements = CAST(LoadElements(receiver));
10238 : TNode<IntPtrT> elements_length = LoadAndUntagFixedArrayBaseLength(elements);
10239 :
10240 672 : VARIABLE(var_result, MachineRepresentation::kTagged);
10241 336 : if (access_mode == ArgumentsAccessMode::kStore) {
10242 224 : var_result.Bind(value);
10243 : } else {
10244 : DCHECK(access_mode == ArgumentsAccessMode::kLoad ||
10245 : access_mode == ArgumentsAccessMode::kHas);
10246 : }
10247 336 : Label if_mapped(this), if_unmapped(this), end(this, &var_result);
10248 672 : Node* intptr_two = IntPtrConstant(2);
10249 672 : Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
10250 :
10251 672 : GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
10252 :
10253 : TNode<Object> mapped_index =
10254 672 : LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
10255 336 : Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
10256 :
10257 : BIND(&if_mapped);
10258 : {
10259 336 : TNode<IntPtrT> mapped_index_intptr = SmiUntag(CAST(mapped_index));
10260 336 : TNode<Context> the_context = CAST(LoadFixedArrayElement(elements, 0));
10261 336 : if (access_mode == ArgumentsAccessMode::kLoad) {
10262 112 : Node* result = LoadContextElement(the_context, mapped_index_intptr);
10263 : CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
10264 56 : var_result.Bind(result);
10265 280 : } else if (access_mode == ArgumentsAccessMode::kHas) {
10266 : CSA_ASSERT(this, Word32BinaryNot(IsTheHole(LoadContextElement(
10267 : the_context, mapped_index_intptr))));
10268 56 : var_result.Bind(TrueConstant());
10269 : } else {
10270 224 : StoreContextElement(the_context, mapped_index_intptr, value);
10271 : }
10272 336 : Goto(&end);
10273 : }
10274 :
10275 : BIND(&if_unmapped);
10276 : {
10277 : TNode<HeapObject> backing_store_ho =
10278 336 : CAST(LoadFixedArrayElement(elements, 1));
10279 336 : GotoIf(WordNotEqual(LoadMap(backing_store_ho), FixedArrayMapConstant()),
10280 336 : bailout);
10281 : TNode<FixedArray> backing_store = CAST(backing_store_ho);
10282 :
10283 : TNode<IntPtrT> backing_store_length =
10284 : LoadAndUntagFixedArrayBaseLength(backing_store);
10285 336 : if (access_mode == ArgumentsAccessMode::kHas) {
10286 56 : Label out_of_bounds(this);
10287 112 : GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length),
10288 56 : &out_of_bounds);
10289 112 : Node* result = LoadFixedArrayElement(backing_store, key);
10290 : var_result.Bind(
10291 112 : SelectBooleanConstant(WordNotEqual(result, TheHoleConstant())));
10292 56 : Goto(&end);
10293 :
10294 : BIND(&out_of_bounds);
10295 56 : var_result.Bind(FalseConstant());
10296 56 : Goto(&end);
10297 : } else {
10298 560 : GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
10299 :
10300 : // The key falls into unmapped range.
10301 280 : if (access_mode == ArgumentsAccessMode::kLoad) {
10302 112 : Node* result = LoadFixedArrayElement(backing_store, key);
10303 56 : GotoIf(WordEqual(result, TheHoleConstant()), bailout);
10304 56 : var_result.Bind(result);
10305 : } else {
10306 224 : StoreFixedArrayElement(backing_store, key, value);
10307 : }
10308 280 : Goto(&end);
10309 : }
10310 : }
10311 :
10312 : BIND(&end);
10313 672 : return var_result.value();
10314 : }
10315 :
10316 840 : TNode<Context> CodeStubAssembler::LoadScriptContext(
10317 : TNode<Context> context, TNode<IntPtrT> context_index) {
10318 : TNode<Context> native_context = LoadNativeContext(context);
10319 840 : TNode<ScriptContextTable> script_context_table = CAST(
10320 : LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX));
10321 :
10322 : TNode<Context> script_context = CAST(LoadFixedArrayElement(
10323 : script_context_table, context_index,
10324 : ScriptContextTable::kFirstContextSlotIndex * kTaggedSize));
10325 840 : return script_context;
10326 : }
10327 :
10328 : namespace {
10329 :
10330 : // Converts typed array elements kind to a machine representations.
10331 4032 : MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
10332 4032 : switch (kind) {
10333 : case UINT8_CLAMPED_ELEMENTS:
10334 : case UINT8_ELEMENTS:
10335 : case INT8_ELEMENTS:
10336 : return MachineRepresentation::kWord8;
10337 : case UINT16_ELEMENTS:
10338 : case INT16_ELEMENTS:
10339 896 : return MachineRepresentation::kWord16;
10340 : case UINT32_ELEMENTS:
10341 : case INT32_ELEMENTS:
10342 896 : return MachineRepresentation::kWord32;
10343 : case FLOAT32_ELEMENTS:
10344 448 : return MachineRepresentation::kFloat32;
10345 : case FLOAT64_ELEMENTS:
10346 448 : return MachineRepresentation::kFloat64;
10347 : default:
10348 0 : UNREACHABLE();
10349 : }
10350 : }
10351 :
10352 : } // namespace
10353 :
10354 8828 : void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
10355 : Node* index, Node* value,
10356 : ParameterMode mode) {
10357 8828 : if (IsFixedTypedArrayElementsKind(kind)) {
10358 : if (kind == UINT8_CLAMPED_ELEMENTS) {
10359 : CSA_ASSERT(this,
10360 : Word32Equal(value, Word32And(Int32Constant(0xFF), value)));
10361 : }
10362 8064 : Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
10363 : // TODO(cbruni): Add OOB check once typed.
10364 4032 : MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
10365 4032 : StoreNoWriteBarrier(rep, elements, offset, value);
10366 4032 : return;
10367 4796 : } else if (IsDoubleElementsKind(kind)) {
10368 1524 : TNode<Float64T> value_float64 = UncheckedCast<Float64T>(value);
10369 1524 : StoreFixedDoubleArrayElement(CAST(elements), index, value_float64, mode);
10370 : } else {
10371 : WriteBarrierMode barrier_mode =
10372 3272 : IsSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
10373 3272 : StoreFixedArrayElement(CAST(elements), index, value, barrier_mode, 0, mode);
10374 : }
10375 : }
10376 :
10377 392 : Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
10378 784 : Label done(this);
10379 784 : Node* int32_zero = Int32Constant(0);
10380 784 : Node* int32_255 = Int32Constant(255);
10381 784 : VARIABLE(var_value, MachineRepresentation::kWord32, int32_value);
10382 784 : GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
10383 392 : var_value.Bind(int32_zero);
10384 784 : GotoIf(Int32LessThan(int32_value, int32_zero), &done);
10385 392 : var_value.Bind(int32_255);
10386 392 : Goto(&done);
10387 : BIND(&done);
10388 784 : return var_value.value();
10389 : }
10390 :
10391 392 : Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
10392 784 : Label done(this);
10393 1176 : VARIABLE(var_value, MachineRepresentation::kWord32, Int32Constant(0));
10394 1176 : GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
10395 784 : var_value.Bind(Int32Constant(255));
10396 1176 : GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
10397 : {
10398 784 : Node* rounded_value = Float64RoundToEven(float64_value);
10399 784 : var_value.Bind(TruncateFloat64ToWord32(rounded_value));
10400 392 : Goto(&done);
10401 : }
10402 : BIND(&done);
10403 784 : return var_value.value();
10404 : }
10405 :
10406 4088 : Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
10407 : TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
10408 : DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
10409 :
10410 : MachineRepresentation rep;
10411 4088 : switch (elements_kind) {
10412 : case UINT8_ELEMENTS:
10413 : case INT8_ELEMENTS:
10414 : case UINT16_ELEMENTS:
10415 : case INT16_ELEMENTS:
10416 : case UINT32_ELEMENTS:
10417 : case INT32_ELEMENTS:
10418 : case UINT8_CLAMPED_ELEMENTS:
10419 : rep = MachineRepresentation::kWord32;
10420 : break;
10421 : case FLOAT32_ELEMENTS:
10422 : rep = MachineRepresentation::kFloat32;
10423 392 : break;
10424 : case FLOAT64_ELEMENTS:
10425 : rep = MachineRepresentation::kFloat64;
10426 392 : break;
10427 : case BIGINT64_ELEMENTS:
10428 : case BIGUINT64_ELEMENTS:
10429 1120 : return ToBigInt(context, input);
10430 : default:
10431 0 : UNREACHABLE();
10432 : }
10433 :
10434 7056 : VARIABLE(var_result, rep);
10435 7056 : VARIABLE(var_input, MachineRepresentation::kTagged, input);
10436 3528 : Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
10437 3528 : convert(this), loop(this, &var_input);
10438 3528 : Goto(&loop);
10439 : BIND(&loop);
10440 10584 : GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
10441 : // We can handle both HeapNumber and Oddball here, since Oddball has the
10442 : // same layout as the HeapNumber for the HeapNumber::value field. This
10443 : // way we can also properly optimize stores of oddballs to typed arrays.
10444 10584 : GotoIf(IsHeapNumber(var_input.value()), &if_heapnumber_or_oddball);
10445 : STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
10446 : Oddball::kToNumberRawOffset);
10447 10584 : Branch(HasInstanceType(var_input.value(), ODDBALL_TYPE),
10448 3528 : &if_heapnumber_or_oddball, &convert);
10449 :
10450 : BIND(&if_heapnumber_or_oddball);
10451 : {
10452 : Node* value = UncheckedCast<Float64T>(LoadObjectField(
10453 7056 : var_input.value(), HeapNumber::kValueOffset, MachineType::Float64()));
10454 3528 : if (rep == MachineRepresentation::kWord32) {
10455 2744 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10456 392 : value = Float64ToUint8Clamped(value);
10457 : } else {
10458 4704 : value = TruncateFloat64ToWord32(value);
10459 : }
10460 784 : } else if (rep == MachineRepresentation::kFloat32) {
10461 784 : value = TruncateFloat64ToFloat32(value);
10462 : } else {
10463 : DCHECK_EQ(MachineRepresentation::kFloat64, rep);
10464 : }
10465 3528 : var_result.Bind(value);
10466 3528 : Goto(&done);
10467 : }
10468 :
10469 : BIND(&if_smi);
10470 : {
10471 10584 : Node* value = SmiToInt32(var_input.value());
10472 3528 : if (rep == MachineRepresentation::kFloat32) {
10473 784 : value = RoundInt32ToFloat32(value);
10474 3136 : } else if (rep == MachineRepresentation::kFloat64) {
10475 784 : value = ChangeInt32ToFloat64(value);
10476 : } else {
10477 : DCHECK_EQ(MachineRepresentation::kWord32, rep);
10478 2744 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10479 392 : value = Int32ToUint8Clamped(value);
10480 : }
10481 : }
10482 3528 : var_result.Bind(value);
10483 3528 : Goto(&done);
10484 : }
10485 :
10486 : BIND(&convert);
10487 : {
10488 7056 : var_input.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, input));
10489 3528 : Goto(&loop);
10490 : }
10491 :
10492 : BIND(&done);
10493 3528 : return var_result.value();
10494 : }
10495 :
10496 224 : void CodeStubAssembler::EmitBigTypedArrayElementStore(
10497 : TNode<JSTypedArray> object, TNode<FixedTypedArrayBase> elements,
10498 : TNode<IntPtrT> intptr_key, TNode<Object> value, TNode<Context> context,
10499 : Label* opt_if_detached) {
10500 224 : TNode<BigInt> bigint_value = ToBigInt(context, value);
10501 :
10502 224 : if (opt_if_detached != nullptr) {
10503 : // Check if buffer has been detached. Must happen after {ToBigInt}!
10504 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
10505 448 : GotoIf(IsDetachedBuffer(buffer), opt_if_detached);
10506 : }
10507 :
10508 224 : TNode<RawPtrT> backing_store = LoadFixedTypedArrayBackingStore(elements);
10509 : TNode<IntPtrT> offset = ElementOffsetFromIndex(intptr_key, BIGINT64_ELEMENTS,
10510 224 : INTPTR_PARAMETERS, 0);
10511 224 : EmitBigTypedArrayElementStore(elements, backing_store, offset, bigint_value);
10512 224 : }
10513 :
10514 1456 : void CodeStubAssembler::BigIntToRawBytes(TNode<BigInt> bigint,
10515 : TVariable<UintPtrT>* var_low,
10516 : TVariable<UintPtrT>* var_high) {
10517 2912 : Label done(this);
10518 2912 : *var_low = Unsigned(IntPtrConstant(0));
10519 2912 : *var_high = Unsigned(IntPtrConstant(0));
10520 : TNode<Word32T> bitfield = LoadBigIntBitfield(bigint);
10521 : TNode<Uint32T> length = DecodeWord32<BigIntBase::LengthBits>(bitfield);
10522 : TNode<Uint32T> sign = DecodeWord32<BigIntBase::SignBits>(bitfield);
10523 4368 : GotoIf(Word32Equal(length, Int32Constant(0)), &done);
10524 : *var_low = LoadBigIntDigit(bigint, 0);
10525 1456 : if (!Is64()) {
10526 0 : Label load_done(this);
10527 0 : GotoIf(Word32Equal(length, Int32Constant(1)), &load_done);
10528 : *var_high = LoadBigIntDigit(bigint, 1);
10529 0 : Goto(&load_done);
10530 : BIND(&load_done);
10531 : }
10532 4368 : GotoIf(Word32Equal(sign, Int32Constant(0)), &done);
10533 : // Negative value. Simulate two's complement.
10534 1456 : if (!Is64()) {
10535 0 : *var_high = Unsigned(IntPtrSub(IntPtrConstant(0), var_high->value()));
10536 0 : Label no_carry(this);
10537 0 : GotoIf(WordEqual(var_low->value(), IntPtrConstant(0)), &no_carry);
10538 0 : *var_high = Unsigned(IntPtrSub(var_high->value(), IntPtrConstant(1)));
10539 0 : Goto(&no_carry);
10540 : BIND(&no_carry);
10541 : }
10542 4368 : *var_low = Unsigned(IntPtrSub(IntPtrConstant(0), var_low->value()));
10543 1456 : Goto(&done);
10544 : BIND(&done);
10545 1456 : }
10546 :
10547 896 : void CodeStubAssembler::EmitBigTypedArrayElementStore(
10548 : TNode<FixedTypedArrayBase> elements, TNode<RawPtrT> backing_store,
10549 : TNode<IntPtrT> offset, TNode<BigInt> bigint_value) {
10550 896 : TVARIABLE(UintPtrT, var_low);
10551 : // Only used on 32-bit platforms.
10552 : TVARIABLE(UintPtrT, var_high);
10553 896 : BigIntToRawBytes(bigint_value, &var_low, &var_high);
10554 :
10555 : MachineRepresentation rep = WordT::kMachineRepresentation;
10556 : #if defined(V8_TARGET_BIG_ENDIAN)
10557 : if (!Is64()) {
10558 : StoreNoWriteBarrier(rep, backing_store, offset, var_high.value());
10559 : StoreNoWriteBarrier(rep, backing_store,
10560 : IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
10561 : var_low.value());
10562 : } else {
10563 : StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
10564 : }
10565 : #else
10566 896 : StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
10567 896 : if (!Is64()) {
10568 : StoreNoWriteBarrier(rep, backing_store,
10569 0 : IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
10570 0 : var_high.value());
10571 : }
10572 : #endif
10573 896 : }
10574 :
10575 7336 : void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
10576 : ElementsKind elements_kind,
10577 : KeyedAccessStoreMode store_mode,
10578 : Label* bailout, Node* context) {
10579 : CSA_ASSERT(this, Word32BinaryNot(IsJSProxy(object)));
10580 :
10581 : Node* elements = LoadElements(object);
10582 7336 : if (!IsSmiOrObjectElementsKind(elements_kind)) {
10583 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10584 2688 : } else if (!IsCOWHandlingStoreMode(store_mode)) {
10585 2688 : GotoIf(IsFixedCOWArrayMap(LoadMap(elements)), bailout);
10586 : }
10587 :
10588 : // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
10589 : ParameterMode parameter_mode = INTPTR_PARAMETERS;
10590 7336 : TNode<IntPtrT> intptr_key = TryToIntptr(key, bailout);
10591 :
10592 7336 : if (IsFixedTypedArrayElementsKind(elements_kind)) {
10593 6160 : Label done(this);
10594 :
10595 : // IntegerIndexedElementSet converts value to a Number/BigInt prior to the
10596 : // bounds check.
10597 3080 : value = PrepareValueForWriteToTypedArray(CAST(value), elements_kind,
10598 3080 : CAST(context));
10599 :
10600 : // There must be no allocations between the buffer load and
10601 : // and the actual store to backing store, because GC may decide that
10602 : // the buffer is not alive or move the elements.
10603 : // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
10604 :
10605 : // Check if buffer has been detached.
10606 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
10607 6160 : GotoIf(IsDetachedBuffer(buffer), bailout);
10608 :
10609 : // Bounds check.
10610 : Node* length =
10611 : TaggedToParameter(LoadJSTypedArrayLength(CAST(object)), parameter_mode);
10612 :
10613 3080 : if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
10614 : // Skip the store if we write beyond the length or
10615 : // to a property with a negative integer index.
10616 1232 : GotoIfNot(UintPtrLessThan(intptr_key, length), &done);
10617 2464 : } else if (store_mode == STANDARD_STORE) {
10618 2464 : GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
10619 : } else {
10620 : // This case is produced due to the dispatched call in
10621 : // ElementsTransitionAndStore and StoreFastElement.
10622 : // TODO(jgruber): Avoid generating unsupported combinations to save code
10623 : // size.
10624 1232 : DebugBreak();
10625 : }
10626 :
10627 3080 : if (elements_kind == BIGINT64_ELEMENTS ||
10628 : elements_kind == BIGUINT64_ELEMENTS) {
10629 560 : TNode<BigInt> bigint_value = UncheckedCast<BigInt>(value);
10630 :
10631 : TNode<RawPtrT> backing_store =
10632 560 : LoadFixedTypedArrayBackingStore(CAST(elements));
10633 : TNode<IntPtrT> offset = ElementOffsetFromIndex(
10634 560 : intptr_key, BIGINT64_ELEMENTS, INTPTR_PARAMETERS, 0);
10635 : EmitBigTypedArrayElementStore(CAST(elements), backing_store, offset,
10636 560 : bigint_value);
10637 : } else {
10638 5040 : Node* backing_store = LoadFixedTypedArrayBackingStore(CAST(elements));
10639 : StoreElement(backing_store, elements_kind, intptr_key, value,
10640 2520 : parameter_mode);
10641 : }
10642 3080 : Goto(&done);
10643 :
10644 : BIND(&done);
10645 : return;
10646 : }
10647 : DCHECK(IsFastElementsKind(elements_kind) ||
10648 : elements_kind == PACKED_SEALED_ELEMENTS);
10649 :
10650 : Node* length =
10651 25536 : SelectImpl(IsJSArray(object), [=]() { return LoadJSArrayLength(object); },
10652 8512 : [=]() { return LoadFixedArrayBaseLength(elements); },
10653 4256 : MachineRepresentation::kTagged);
10654 : length = TaggedToParameter(length, parameter_mode);
10655 :
10656 : // In case value is stored into a fast smi array, assure that the value is
10657 : // a smi before manipulating the backing store. Otherwise the backing store
10658 : // may be left in an invalid state.
10659 4256 : if (IsSmiElementsKind(elements_kind)) {
10660 1344 : GotoIfNot(TaggedIsSmi(value), bailout);
10661 3584 : } else if (IsDoubleElementsKind(elements_kind)) {
10662 1344 : value = TryTaggedToFloat64(value, bailout);
10663 : }
10664 :
10665 4256 : if (IsGrowStoreMode(store_mode) &&
10666 : !(elements_kind == PACKED_SEALED_ELEMENTS)) {
10667 1008 : elements = CheckForCapacityGrow(object, elements, elements_kind, length,
10668 1008 : intptr_key, parameter_mode, bailout);
10669 : } else {
10670 6496 : GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
10671 : }
10672 :
10673 : // If we didn't grow {elements}, it might still be COW, in which case we
10674 : // copy it now.
10675 4256 : if (!IsSmiOrObjectElementsKind(elements_kind)) {
10676 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10677 2688 : } else if (IsCOWHandlingStoreMode(store_mode)) {
10678 1344 : elements = CopyElementsOnWrite(object, elements, elements_kind, length,
10679 1344 : parameter_mode, bailout);
10680 : }
10681 :
10682 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10683 4256 : StoreElement(elements, elements_kind, intptr_key, value, parameter_mode);
10684 : }
10685 :
10686 1008 : Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
10687 : ElementsKind kind, Node* length,
10688 : Node* key, ParameterMode mode,
10689 : Label* bailout) {
10690 : DCHECK(IsFastElementsKind(kind));
10691 2016 : VARIABLE(checked_elements, MachineRepresentation::kTagged);
10692 1008 : Label grow_case(this), no_grow_case(this), done(this),
10693 1008 : grow_bailout(this, Label::kDeferred);
10694 :
10695 : Node* condition;
10696 1008 : if (IsHoleyElementsKind(kind)) {
10697 1344 : condition = UintPtrGreaterThanOrEqual(key, length);
10698 : } else {
10699 : // We don't support growing here unless the value is being appended.
10700 672 : condition = WordEqual(key, length);
10701 : }
10702 1008 : Branch(condition, &grow_case, &no_grow_case);
10703 :
10704 : BIND(&grow_case);
10705 : {
10706 : Node* current_capacity =
10707 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
10708 1008 : checked_elements.Bind(elements);
10709 1008 : Label fits_capacity(this);
10710 : // If key is negative, we will notice in Runtime::kGrowArrayElements.
10711 2016 : GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
10712 :
10713 : {
10714 1008 : Node* new_elements = TryGrowElementsCapacity(
10715 1008 : object, elements, kind, key, current_capacity, mode, &grow_bailout);
10716 1008 : checked_elements.Bind(new_elements);
10717 1008 : Goto(&fits_capacity);
10718 : }
10719 :
10720 : BIND(&grow_bailout);
10721 : {
10722 : Node* tagged_key = mode == SMI_PARAMETERS
10723 : ? key
10724 3024 : : ChangeInt32ToTagged(TruncateIntPtrToInt32(key));
10725 : Node* maybe_elements = CallRuntime(
10726 : Runtime::kGrowArrayElements, NoContextConstant(), object, tagged_key);
10727 2016 : GotoIf(TaggedIsSmi(maybe_elements), bailout);
10728 : CSA_ASSERT(this, IsFixedArrayWithKind(maybe_elements, kind));
10729 1008 : checked_elements.Bind(maybe_elements);
10730 1008 : Goto(&fits_capacity);
10731 : }
10732 :
10733 : BIND(&fits_capacity);
10734 2016 : GotoIfNot(IsJSArray(object), &done);
10735 :
10736 3024 : Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
10737 : StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
10738 : ParameterToTagged(new_length, mode));
10739 1008 : Goto(&done);
10740 : }
10741 :
10742 : BIND(&no_grow_case);
10743 : {
10744 2016 : GotoIfNot(UintPtrLessThan(key, length), bailout);
10745 1008 : checked_elements.Bind(elements);
10746 1008 : Goto(&done);
10747 : }
10748 :
10749 : BIND(&done);
10750 2016 : return checked_elements.value();
10751 : }
10752 :
10753 1344 : Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
10754 : ElementsKind kind, Node* length,
10755 : ParameterMode mode,
10756 : Label* bailout) {
10757 2688 : VARIABLE(new_elements_var, MachineRepresentation::kTagged, elements);
10758 1344 : Label done(this);
10759 :
10760 2688 : GotoIfNot(IsFixedCOWArrayMap(LoadMap(elements)), &done);
10761 : {
10762 : Node* capacity =
10763 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
10764 1344 : Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
10765 1344 : length, capacity, mode, bailout);
10766 1344 : new_elements_var.Bind(new_elements);
10767 1344 : Goto(&done);
10768 : }
10769 :
10770 : BIND(&done);
10771 2688 : return new_elements_var.value();
10772 : }
10773 :
10774 2688 : void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
10775 : ElementsKind from_kind,
10776 : ElementsKind to_kind,
10777 : Label* bailout) {
10778 : DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind));
10779 2688 : if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
10780 1568 : TrapAllocationMemento(object, bailout);
10781 : }
10782 :
10783 2688 : if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
10784 1344 : Comment("Non-simple map transition");
10785 : Node* elements = LoadElements(object);
10786 :
10787 1344 : Label done(this);
10788 1344 : GotoIf(WordEqual(elements, EmptyFixedArrayConstant()), &done);
10789 :
10790 : // TODO(ishell): Use OptimalParameterMode().
10791 : ParameterMode mode = INTPTR_PARAMETERS;
10792 2688 : Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
10793 5376 : Node* array_length = SelectImpl(
10794 : IsJSArray(object),
10795 1344 : [=]() {
10796 : CSA_ASSERT(this, IsFastElementsKind(LoadElementsKind(object)));
10797 2688 : return SmiUntag(LoadFastJSArrayLength(object));
10798 : },
10799 1344 : [=]() { return elements_length; },
10800 1344 : MachineType::PointerRepresentation());
10801 :
10802 : CSA_ASSERT(this, WordNotEqual(elements_length, IntPtrConstant(0)));
10803 :
10804 : GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
10805 1344 : elements_length, mode, bailout);
10806 1344 : Goto(&done);
10807 : BIND(&done);
10808 : }
10809 :
10810 : StoreMap(object, map);
10811 2688 : }
10812 :
10813 2744 : void CodeStubAssembler::TrapAllocationMemento(Node* object,
10814 : Label* memento_found) {
10815 2744 : Comment("[ TrapAllocationMemento");
10816 2744 : Label no_memento_found(this);
10817 2744 : Label top_check(this), map_check(this);
10818 :
10819 : TNode<ExternalReference> new_space_top_address = ExternalConstant(
10820 2744 : ExternalReference::new_space_allocation_top_address(isolate()));
10821 : const int kMementoMapOffset = JSArray::kSize;
10822 : const int kMementoLastWordOffset =
10823 : kMementoMapOffset + AllocationMemento::kSize - kTaggedSize;
10824 :
10825 : // Bail out if the object is not in new space.
10826 2744 : TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
10827 2744 : TNode<IntPtrT> object_page = PageFromAddress(object_word);
10828 : {
10829 : TNode<IntPtrT> page_flags =
10830 : UncheckedCast<IntPtrT>(Load(MachineType::IntPtr(), object_page,
10831 5488 : IntPtrConstant(Page::kFlagsOffset)));
10832 5488 : GotoIf(WordEqual(
10833 : WordAnd(page_flags,
10834 2744 : IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),
10835 5488 : IntPtrConstant(0)),
10836 2744 : &no_memento_found);
10837 : // TODO(ulan): Support allocation memento for a large object by allocating
10838 : // additional word for the memento after the large object.
10839 5488 : GotoIf(WordNotEqual(WordAnd(page_flags,
10840 2744 : IntPtrConstant(MemoryChunk::kIsLargePageMask)),
10841 5488 : IntPtrConstant(0)),
10842 2744 : &no_memento_found);
10843 : }
10844 :
10845 : TNode<IntPtrT> memento_last_word = IntPtrAdd(
10846 2744 : object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
10847 2744 : TNode<IntPtrT> memento_last_word_page = PageFromAddress(memento_last_word);
10848 :
10849 : TNode<IntPtrT> new_space_top = UncheckedCast<IntPtrT>(
10850 2744 : Load(MachineType::Pointer(), new_space_top_address));
10851 2744 : TNode<IntPtrT> new_space_top_page = PageFromAddress(new_space_top);
10852 :
10853 : // If the object is in new space, we need to check whether respective
10854 : // potential memento object is on the same page as the current top.
10855 5488 : GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
10856 :
10857 : // The object is on a different page than allocation top. Bail out if the
10858 : // object sits on the page boundary as no memento can follow and we cannot
10859 : // touch the memory following it.
10860 5488 : Branch(WordEqual(object_page, memento_last_word_page), &map_check,
10861 2744 : &no_memento_found);
10862 :
10863 : // If top is on the same page as the current object, we need to check whether
10864 : // we are below top.
10865 : BIND(&top_check);
10866 : {
10867 5488 : Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
10868 2744 : &no_memento_found, &map_check);
10869 : }
10870 :
10871 : // Memento map check.
10872 : BIND(&map_check);
10873 : {
10874 : TNode<Object> memento_map = LoadObjectField(object, kMementoMapOffset);
10875 5488 : Branch(WordEqual(memento_map, LoadRoot(RootIndex::kAllocationMementoMap)),
10876 2744 : memento_found, &no_memento_found);
10877 : }
10878 : BIND(&no_memento_found);
10879 2744 : Comment("] TrapAllocationMemento");
10880 2744 : }
10881 :
10882 11628 : TNode<IntPtrT> CodeStubAssembler::PageFromAddress(TNode<IntPtrT> address) {
10883 23256 : return WordAnd(address, IntPtrConstant(~kPageAlignmentMask));
10884 : }
10885 :
10886 392 : TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
10887 : SloppyTNode<FeedbackVector> feedback_vector, TNode<Smi> slot) {
10888 392 : TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSizeWithWeakNext);
10889 784 : Node* site = Allocate(size, CodeStubAssembler::kPretenured);
10890 392 : StoreMapNoWriteBarrier(site, RootIndex::kAllocationSiteWithWeakNextMap);
10891 : // Should match AllocationSite::Initialize.
10892 : TNode<WordT> field = UpdateWord<AllocationSite::ElementsKindBits>(
10893 1176 : IntPtrConstant(0), IntPtrConstant(GetInitialFastElementsKind()));
10894 : StoreObjectFieldNoWriteBarrier(
10895 : site, AllocationSite::kTransitionInfoOrBoilerplateOffset,
10896 784 : SmiTag(Signed(field)));
10897 :
10898 : // Unlike literals, constructed arrays don't have nested sites
10899 392 : TNode<Smi> zero = SmiConstant(0);
10900 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
10901 :
10902 : // Pretenuring calculation field.
10903 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
10904 784 : Int32Constant(0),
10905 : MachineRepresentation::kWord32);
10906 :
10907 : // Pretenuring memento creation count field.
10908 : StoreObjectFieldNoWriteBarrier(
10909 784 : site, AllocationSite::kPretenureCreateCountOffset, Int32Constant(0),
10910 : MachineRepresentation::kWord32);
10911 :
10912 : // Store an empty fixed array for the code dependency.
10913 : StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
10914 392 : RootIndex::kEmptyWeakFixedArray);
10915 :
10916 : // Link the object to the allocation site list
10917 : TNode<ExternalReference> site_list = ExternalConstant(
10918 392 : ExternalReference::allocation_sites_list_address(isolate()));
10919 392 : TNode<Object> next_site = CAST(LoadBufferObject(site_list, 0));
10920 :
10921 : // TODO(mvstanton): This is a store to a weak pointer, which we may want to
10922 : // mark as such in order to skip the write barrier, once we have a unified
10923 : // system for weakness. For now we decided to keep it like this because having
10924 : // an initial write barrier backed store makes this pointer strong until the
10925 : // next GC, and allocation sites are designed to survive several GCs anyway.
10926 : StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
10927 392 : StoreFullTaggedNoWriteBarrier(site_list, site);
10928 :
10929 : StoreFeedbackVectorSlot(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
10930 392 : SMI_PARAMETERS);
10931 392 : return CAST(site);
10932 : }
10933 :
10934 2240 : TNode<MaybeObject> CodeStubAssembler::StoreWeakReferenceInFeedbackVector(
10935 : SloppyTNode<FeedbackVector> feedback_vector, Node* slot,
10936 : SloppyTNode<HeapObject> value, int additional_offset,
10937 : ParameterMode parameter_mode) {
10938 2240 : TNode<MaybeObject> weak_value = MakeWeak(value);
10939 : StoreFeedbackVectorSlot(feedback_vector, slot, weak_value,
10940 : UPDATE_WRITE_BARRIER, additional_offset,
10941 2240 : parameter_mode);
10942 2240 : return weak_value;
10943 : }
10944 :
10945 672 : TNode<BoolT> CodeStubAssembler::NotHasBoilerplate(
10946 : TNode<Object> maybe_literal_site) {
10947 672 : return TaggedIsSmi(maybe_literal_site);
10948 : }
10949 :
10950 56 : TNode<Smi> CodeStubAssembler::LoadTransitionInfo(
10951 : TNode<AllocationSite> allocation_site) {
10952 : TNode<Smi> transition_info = CAST(LoadObjectField(
10953 : allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10954 56 : return transition_info;
10955 : }
10956 :
10957 448 : TNode<JSObject> CodeStubAssembler::LoadBoilerplate(
10958 : TNode<AllocationSite> allocation_site) {
10959 : TNode<JSObject> boilerplate = CAST(LoadObjectField(
10960 : allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10961 448 : return boilerplate;
10962 : }
10963 :
10964 280 : TNode<Int32T> CodeStubAssembler::LoadElementsKind(
10965 : TNode<AllocationSite> allocation_site) {
10966 : TNode<Smi> transition_info = LoadTransitionInfo(allocation_site);
10967 : TNode<Int32T> elements_kind =
10968 : Signed(DecodeWord32<AllocationSite::ElementsKindBits>(
10969 560 : SmiToInt32(transition_info)));
10970 : CSA_ASSERT(this, IsFastElementsKind(elements_kind));
10971 280 : return elements_kind;
10972 : }
10973 :
10974 30792 : Node* CodeStubAssembler::BuildFastLoop(
10975 : const CodeStubAssembler::VariableList& vars, Node* start_index,
10976 : Node* end_index, const FastLoopBody& body, int increment,
10977 : ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
10978 : CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
10979 : CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
10980 : MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
10981 : ? MachineType::PointerRepresentation()
10982 30792 : : MachineRepresentation::kTaggedSigned;
10983 61584 : VARIABLE(var, index_rep, start_index);
10984 30792 : VariableList vars_copy(vars.begin(), vars.end(), zone());
10985 61584 : vars_copy.push_back(&var);
10986 30792 : Label loop(this, vars_copy);
10987 30792 : Label after_loop(this);
10988 : // Introduce an explicit second check of the termination condition before the
10989 : // loop that helps turbofan generate better code. If there's only a single
10990 : // check, then the CodeStubAssembler forces it to be at the beginning of the
10991 : // loop requiring a backwards branch at the end of the loop (it's not possible
10992 : // to force the loop header check at the end of the loop and branch forward to
10993 : // it from the pre-header). The extra branch is slower in the case that the
10994 : // loop actually iterates.
10995 92376 : Node* first_check = WordEqual(var.value(), end_index);
10996 : int32_t first_check_val;
10997 30792 : if (ToInt32Constant(first_check, first_check_val)) {
10998 1036 : if (first_check_val) return var.value();
10999 72 : Goto(&loop);
11000 : } else {
11001 29756 : Branch(first_check, &after_loop, &loop);
11002 : }
11003 :
11004 : BIND(&loop);
11005 : {
11006 29828 : if (advance_mode == IndexAdvanceMode::kPre) {
11007 18192 : Increment(&var, increment, parameter_mode);
11008 : }
11009 29828 : body(var.value());
11010 29828 : if (advance_mode == IndexAdvanceMode::kPost) {
11011 11636 : Increment(&var, increment, parameter_mode);
11012 : }
11013 89484 : Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
11014 : }
11015 : BIND(&after_loop);
11016 29828 : return var.value();
11017 : }
11018 :
11019 16996 : void CodeStubAssembler::BuildFastFixedArrayForEach(
11020 : const CodeStubAssembler::VariableList& vars, Node* fixed_array,
11021 : ElementsKind kind, Node* first_element_inclusive,
11022 : Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
11023 : ParameterMode mode, ForEachDirection direction) {
11024 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
11025 : CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
11026 : CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
11027 : CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(fixed_array, kind),
11028 : IsPropertyArray(fixed_array)));
11029 : int32_t first_val;
11030 16996 : bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
11031 : int32_t last_val;
11032 16996 : bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
11033 16996 : if (constant_first && constent_last) {
11034 1032 : int delta = last_val - first_val;
11035 : DCHECK_GE(delta, 0);
11036 1032 : if (delta <= kElementLoopUnrollThreshold) {
11037 976 : if (direction == ForEachDirection::kForward) {
11038 60 : for (int i = first_val; i < last_val; ++i) {
11039 48 : Node* index = IntPtrConstant(i);
11040 : Node* offset =
11041 48 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
11042 24 : FixedArray::kHeaderSize - kHeapObjectTag);
11043 : body(fixed_array, offset);
11044 : }
11045 : } else {
11046 3092 : for (int i = last_val - 1; i >= first_val; --i) {
11047 4256 : Node* index = IntPtrConstant(i);
11048 : Node* offset =
11049 4256 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
11050 2128 : FixedArray::kHeaderSize - kHeapObjectTag);
11051 : body(fixed_array, offset);
11052 : }
11053 : }
11054 976 : return;
11055 : }
11056 : }
11057 :
11058 : Node* start =
11059 32040 : ElementOffsetFromIndex(first_element_inclusive, kind, mode,
11060 16020 : FixedArray::kHeaderSize - kHeapObjectTag);
11061 : Node* limit =
11062 32040 : ElementOffsetFromIndex(last_element_exclusive, kind, mode,
11063 : FixedArray::kHeaderSize - kHeapObjectTag);
11064 16020 : if (direction == ForEachDirection::kReverse) std::swap(start, limit);
11065 :
11066 : int increment = IsDoubleElementsKind(kind) ? kDoubleSize : kTaggedSize;
11067 32040 : BuildFastLoop(
11068 : vars, start, limit,
11069 31016 : [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
11070 : direction == ForEachDirection::kReverse ? -increment : increment,
11071 : INTPTR_PARAMETERS,
11072 : direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
11073 16020 : : IndexAdvanceMode::kPost);
11074 : }
11075 :
11076 224 : void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
11077 : Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
11078 672 : GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size, mode),
11079 224 : doesnt_fit);
11080 224 : }
11081 :
11082 3876 : void CodeStubAssembler::InitializeFieldsWithRoot(Node* object,
11083 : Node* start_offset,
11084 : Node* end_offset,
11085 : RootIndex root_index) {
11086 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
11087 11628 : start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
11088 11628 : end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
11089 7752 : Node* root_value = LoadRoot(root_index);
11090 3876 : BuildFastLoop(
11091 : end_offset, start_offset,
11092 3424 : [this, object, root_value](Node* current) {
11093 3424 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, current,
11094 3424 : root_value);
11095 : },
11096 : -kTaggedSize, INTPTR_PARAMETERS,
11097 3876 : CodeStubAssembler::IndexAdvanceMode::kPre);
11098 3876 : }
11099 :
11100 8712 : void CodeStubAssembler::BranchIfNumberRelationalComparison(
11101 : Operation op, Node* left, Node* right, Label* if_true, Label* if_false) {
11102 : CSA_SLOW_ASSERT(this, IsNumber(left));
11103 : CSA_SLOW_ASSERT(this, IsNumber(right));
11104 :
11105 17424 : Label do_float_comparison(this);
11106 : TVARIABLE(Float64T, var_left_float);
11107 : TVARIABLE(Float64T, var_right_float);
11108 :
11109 43560 : Branch(
11110 : TaggedIsSmi(left),
11111 8712 : [&] {
11112 34848 : TNode<Smi> smi_left = CAST(left);
11113 :
11114 52272 : Branch(
11115 26136 : TaggedIsSmi(right),
11116 8712 : [&] {
11117 8712 : TNode<Smi> smi_right = CAST(right);
11118 :
11119 : // Both {left} and {right} are Smi, so just perform a fast
11120 : // Smi comparison.
11121 17424 : switch (op) {
11122 : case Operation::kEqual:
11123 30960 : BranchIfSmiEqual(smi_left, smi_right, if_true, if_false);
11124 348 : break;
11125 : case Operation::kLessThan:
11126 9576 : BranchIfSmiLessThan(smi_left, smi_right, if_true, if_false);
11127 3192 : break;
11128 : case Operation::kLessThanOrEqual:
11129 112 : BranchIfSmiLessThanOrEqual(smi_left, smi_right, if_true,
11130 56 : if_false);
11131 56 : break;
11132 : case Operation::kGreaterThan:
11133 5712 : BranchIfSmiLessThan(smi_right, smi_left, if_true, if_false);
11134 1904 : break;
11135 : case Operation::kGreaterThanOrEqual:
11136 6424 : BranchIfSmiLessThanOrEqual(smi_right, smi_left, if_true,
11137 3212 : if_false);
11138 3212 : break;
11139 : default:
11140 0 : UNREACHABLE();
11141 : }
11142 8712 : },
11143 8712 : [&] {
11144 : CSA_ASSERT(this, IsHeapNumber(right));
11145 52272 : var_left_float = SmiToFloat64(smi_left);
11146 26136 : var_right_float = LoadHeapNumberValue(right);
11147 26136 : Goto(&do_float_comparison);
11148 17424 : });
11149 8712 : },
11150 8712 : [&] {
11151 : CSA_ASSERT(this, IsHeapNumber(left));
11152 52272 : var_left_float = LoadHeapNumberValue(left);
11153 :
11154 52272 : Branch(
11155 26136 : TaggedIsSmi(right),
11156 8712 : [&] {
11157 52272 : var_right_float = SmiToFloat64(right);
11158 34848 : Goto(&do_float_comparison);
11159 8712 : },
11160 8712 : [&] {
11161 : CSA_ASSERT(this, IsHeapNumber(right));
11162 26136 : var_right_float = LoadHeapNumberValue(right);
11163 17424 : Goto(&do_float_comparison);
11164 17424 : });
11165 17424 : });
11166 :
11167 : BIND(&do_float_comparison);
11168 : {
11169 8712 : switch (op) {
11170 : case Operation::kEqual:
11171 696 : Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
11172 696 : if_true, if_false);
11173 348 : break;
11174 : case Operation::kLessThan:
11175 6384 : Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11176 6384 : if_true, if_false);
11177 3192 : break;
11178 : case Operation::kLessThanOrEqual:
11179 112 : Branch(Float64LessThanOrEqual(var_left_float.value(),
11180 56 : var_right_float.value()),
11181 112 : if_true, if_false);
11182 56 : break;
11183 : case Operation::kGreaterThan:
11184 1904 : Branch(
11185 3808 : Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11186 3808 : if_true, if_false);
11187 1904 : break;
11188 : case Operation::kGreaterThanOrEqual:
11189 6424 : Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11190 3212 : var_right_float.value()),
11191 6424 : if_true, if_false);
11192 3212 : break;
11193 : default:
11194 0 : UNREACHABLE();
11195 : }
11196 : }
11197 8712 : }
11198 :
11199 2760 : void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(Node* left, Node* right,
11200 : Label* if_true) {
11201 5520 : Label if_false(this);
11202 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, left,
11203 2760 : right, if_true, &if_false);
11204 : BIND(&if_false);
11205 2760 : }
11206 :
11207 : namespace {
11208 2688 : Operation Reverse(Operation op) {
11209 2688 : switch (op) {
11210 : case Operation::kLessThan:
11211 : return Operation::kGreaterThan;
11212 : case Operation::kLessThanOrEqual:
11213 672 : return Operation::kGreaterThanOrEqual;
11214 : case Operation::kGreaterThan:
11215 672 : return Operation::kLessThan;
11216 : case Operation::kGreaterThanOrEqual:
11217 672 : return Operation::kLessThanOrEqual;
11218 : default:
11219 : break;
11220 : }
11221 0 : UNREACHABLE();
11222 : }
11223 : } // anonymous namespace
11224 :
11225 896 : Node* CodeStubAssembler::RelationalComparison(Operation op, Node* left,
11226 : Node* right, Node* context,
11227 : Variable* var_type_feedback) {
11228 1792 : Label return_true(this), return_false(this), do_float_comparison(this),
11229 896 : end(this);
11230 : TVARIABLE(Oddball, var_result); // Actually only "true" or "false".
11231 : TVARIABLE(Float64T, var_left_float);
11232 : TVARIABLE(Float64T, var_right_float);
11233 :
11234 : // We might need to loop several times due to ToPrimitive and/or ToNumeric
11235 : // conversions.
11236 1792 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
11237 1792 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
11238 1792 : VariableList loop_variable_list({&var_left, &var_right}, zone());
11239 896 : if (var_type_feedback != nullptr) {
11240 : // Initialize the type feedback to None. The current feedback is combined
11241 : // with the previous feedback.
11242 672 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
11243 672 : loop_variable_list.push_back(var_type_feedback);
11244 : }
11245 896 : Label loop(this, loop_variable_list);
11246 896 : Goto(&loop);
11247 : BIND(&loop);
11248 : {
11249 896 : left = var_left.value();
11250 896 : right = var_right.value();
11251 :
11252 896 : Label if_left_smi(this), if_left_not_smi(this);
11253 1792 : Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11254 :
11255 : BIND(&if_left_smi);
11256 : {
11257 : TNode<Smi> smi_left = CAST(left);
11258 896 : Label if_right_smi(this), if_right_heapnumber(this),
11259 896 : if_right_bigint(this, Label::kDeferred),
11260 896 : if_right_not_numeric(this, Label::kDeferred);
11261 1792 : GotoIf(TaggedIsSmi(right), &if_right_smi);
11262 : Node* right_map = LoadMap(right);
11263 1792 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11264 : Node* right_instance_type = LoadMapInstanceType(right_map);
11265 896 : Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
11266 896 : &if_right_not_numeric);
11267 :
11268 : BIND(&if_right_smi);
11269 : {
11270 896 : TNode<Smi> smi_right = CAST(right);
11271 : CombineFeedback(var_type_feedback,
11272 896 : CompareOperationFeedback::kSignedSmall);
11273 896 : switch (op) {
11274 : case Operation::kLessThan:
11275 : BranchIfSmiLessThan(smi_left, smi_right, &return_true,
11276 224 : &return_false);
11277 224 : break;
11278 : case Operation::kLessThanOrEqual:
11279 : BranchIfSmiLessThanOrEqual(smi_left, smi_right, &return_true,
11280 224 : &return_false);
11281 224 : break;
11282 : case Operation::kGreaterThan:
11283 : BranchIfSmiLessThan(smi_right, smi_left, &return_true,
11284 224 : &return_false);
11285 224 : break;
11286 : case Operation::kGreaterThanOrEqual:
11287 : BranchIfSmiLessThanOrEqual(smi_right, smi_left, &return_true,
11288 224 : &return_false);
11289 224 : break;
11290 : default:
11291 0 : UNREACHABLE();
11292 : }
11293 : }
11294 :
11295 : BIND(&if_right_heapnumber);
11296 : {
11297 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11298 1792 : var_left_float = SmiToFloat64(smi_left);
11299 : var_right_float = LoadHeapNumberValue(right);
11300 896 : Goto(&do_float_comparison);
11301 : }
11302 :
11303 : BIND(&if_right_bigint);
11304 : {
11305 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11306 896 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11307 : NoContextConstant(),
11308 : SmiConstant(Reverse(op)), right, left));
11309 896 : Goto(&end);
11310 : }
11311 :
11312 : BIND(&if_right_not_numeric);
11313 : {
11314 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11315 : // Convert {right} to a Numeric; we don't need to perform the
11316 : // dedicated ToPrimitive(right, hint Number) operation, as the
11317 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11318 : // a Number hint.
11319 : var_right.Bind(
11320 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11321 896 : Goto(&loop);
11322 : }
11323 : }
11324 :
11325 : BIND(&if_left_not_smi);
11326 : {
11327 : Node* left_map = LoadMap(left);
11328 :
11329 896 : Label if_right_smi(this), if_right_not_smi(this);
11330 1792 : Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11331 :
11332 : BIND(&if_right_smi);
11333 : {
11334 896 : Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
11335 896 : if_left_not_numeric(this, Label::kDeferred);
11336 1792 : GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
11337 : Node* left_instance_type = LoadMapInstanceType(left_map);
11338 896 : Branch(IsBigIntInstanceType(left_instance_type), &if_left_bigint,
11339 896 : &if_left_not_numeric);
11340 :
11341 : BIND(&if_left_heapnumber);
11342 : {
11343 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11344 : var_left_float = LoadHeapNumberValue(left);
11345 1792 : var_right_float = SmiToFloat64(right);
11346 896 : Goto(&do_float_comparison);
11347 : }
11348 :
11349 : BIND(&if_left_bigint);
11350 : {
11351 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11352 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11353 : NoContextConstant(), SmiConstant(op),
11354 : left, right));
11355 896 : Goto(&end);
11356 : }
11357 :
11358 : BIND(&if_left_not_numeric);
11359 : {
11360 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11361 : // Convert {left} to a Numeric; we don't need to perform the
11362 : // dedicated ToPrimitive(left, hint Number) operation, as the
11363 : // ToNumeric(left) will by itself already invoke ToPrimitive with
11364 : // a Number hint.
11365 : var_left.Bind(
11366 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11367 896 : Goto(&loop);
11368 : }
11369 : }
11370 :
11371 : BIND(&if_right_not_smi);
11372 : {
11373 : Node* right_map = LoadMap(right);
11374 :
11375 896 : Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
11376 896 : if_left_string(this), if_left_other(this, Label::kDeferred);
11377 1792 : GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
11378 : Node* left_instance_type = LoadMapInstanceType(left_map);
11379 896 : GotoIf(IsBigIntInstanceType(left_instance_type), &if_left_bigint);
11380 1792 : Branch(IsStringInstanceType(left_instance_type), &if_left_string,
11381 896 : &if_left_other);
11382 :
11383 : BIND(&if_left_heapnumber);
11384 : {
11385 896 : Label if_right_heapnumber(this),
11386 896 : if_right_bigint(this, Label::kDeferred),
11387 896 : if_right_not_numeric(this, Label::kDeferred);
11388 1792 : GotoIf(WordEqual(right_map, left_map), &if_right_heapnumber);
11389 : Node* right_instance_type = LoadMapInstanceType(right_map);
11390 896 : Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
11391 896 : &if_right_not_numeric);
11392 :
11393 : BIND(&if_right_heapnumber);
11394 : {
11395 : CombineFeedback(var_type_feedback,
11396 896 : CompareOperationFeedback::kNumber);
11397 : var_left_float = LoadHeapNumberValue(left);
11398 : var_right_float = LoadHeapNumberValue(right);
11399 896 : Goto(&do_float_comparison);
11400 : }
11401 :
11402 : BIND(&if_right_bigint);
11403 : {
11404 : OverwriteFeedback(var_type_feedback,
11405 896 : CompareOperationFeedback::kAny);
11406 896 : var_result = CAST(CallRuntime(
11407 : Runtime::kBigIntCompareToNumber, NoContextConstant(),
11408 : SmiConstant(Reverse(op)), right, left));
11409 896 : Goto(&end);
11410 : }
11411 :
11412 : BIND(&if_right_not_numeric);
11413 : {
11414 : OverwriteFeedback(var_type_feedback,
11415 896 : CompareOperationFeedback::kAny);
11416 : // Convert {right} to a Numeric; we don't need to perform
11417 : // dedicated ToPrimitive(right, hint Number) operation, as the
11418 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11419 : // a Number hint.
11420 : var_right.Bind(
11421 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11422 896 : Goto(&loop);
11423 : }
11424 : }
11425 :
11426 : BIND(&if_left_bigint);
11427 : {
11428 896 : Label if_right_heapnumber(this), if_right_bigint(this),
11429 896 : if_right_string(this), if_right_other(this);
11430 1792 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11431 : Node* right_instance_type = LoadMapInstanceType(right_map);
11432 896 : GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
11433 1792 : Branch(IsStringInstanceType(right_instance_type), &if_right_string,
11434 896 : &if_right_other);
11435 :
11436 : BIND(&if_right_heapnumber);
11437 : {
11438 : OverwriteFeedback(var_type_feedback,
11439 896 : CompareOperationFeedback::kAny);
11440 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11441 : NoContextConstant(), SmiConstant(op),
11442 : left, right));
11443 896 : Goto(&end);
11444 : }
11445 :
11446 : BIND(&if_right_bigint);
11447 : {
11448 : CombineFeedback(var_type_feedback,
11449 896 : CompareOperationFeedback::kBigInt);
11450 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToBigInt,
11451 : NoContextConstant(), SmiConstant(op),
11452 : left, right));
11453 896 : Goto(&end);
11454 : }
11455 :
11456 : BIND(&if_right_string);
11457 : {
11458 : OverwriteFeedback(var_type_feedback,
11459 896 : CompareOperationFeedback::kAny);
11460 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToString,
11461 : NoContextConstant(), SmiConstant(op),
11462 : left, right));
11463 896 : Goto(&end);
11464 : }
11465 :
11466 : // {right} is not a Number, BigInt, or String.
11467 : BIND(&if_right_other);
11468 : {
11469 : OverwriteFeedback(var_type_feedback,
11470 896 : CompareOperationFeedback::kAny);
11471 : // Convert {right} to a Numeric; we don't need to perform
11472 : // dedicated ToPrimitive(right, hint Number) operation, as the
11473 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11474 : // a Number hint.
11475 : var_right.Bind(
11476 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11477 896 : Goto(&loop);
11478 : }
11479 : }
11480 :
11481 : BIND(&if_left_string);
11482 : {
11483 : Node* right_instance_type = LoadMapInstanceType(right_map);
11484 :
11485 896 : Label if_right_not_string(this, Label::kDeferred);
11486 1792 : GotoIfNot(IsStringInstanceType(right_instance_type),
11487 896 : &if_right_not_string);
11488 :
11489 : // Both {left} and {right} are strings.
11490 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
11491 : Builtins::Name builtin;
11492 896 : switch (op) {
11493 : case Operation::kLessThan:
11494 : builtin = Builtins::kStringLessThan;
11495 : break;
11496 : case Operation::kLessThanOrEqual:
11497 : builtin = Builtins::kStringLessThanOrEqual;
11498 224 : break;
11499 : case Operation::kGreaterThan:
11500 : builtin = Builtins::kStringGreaterThan;
11501 224 : break;
11502 : case Operation::kGreaterThanOrEqual:
11503 : builtin = Builtins::kStringGreaterThanOrEqual;
11504 224 : break;
11505 : default:
11506 0 : UNREACHABLE();
11507 : }
11508 1792 : var_result = CAST(CallBuiltin(builtin, context, left, right));
11509 896 : Goto(&end);
11510 :
11511 : BIND(&if_right_not_string);
11512 : {
11513 : OverwriteFeedback(var_type_feedback,
11514 896 : CompareOperationFeedback::kAny);
11515 : // {left} is a String, while {right} isn't. Check if {right} is
11516 : // a BigInt, otherwise call ToPrimitive(right, hint Number) if
11517 : // {right} is a receiver, or ToNumeric(left) and then
11518 : // ToNumeric(right) in the other cases.
11519 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
11520 896 : Label if_right_bigint(this),
11521 896 : if_right_receiver(this, Label::kDeferred);
11522 896 : GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
11523 1792 : GotoIf(IsJSReceiverInstanceType(right_instance_type),
11524 896 : &if_right_receiver);
11525 :
11526 : var_left.Bind(
11527 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11528 1792 : var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
11529 896 : Goto(&loop);
11530 :
11531 : BIND(&if_right_bigint);
11532 : {
11533 896 : var_result = CAST(CallRuntime(
11534 : Runtime::kBigIntCompareToString, NoContextConstant(),
11535 : SmiConstant(Reverse(op)), right, left));
11536 896 : Goto(&end);
11537 : }
11538 :
11539 : BIND(&if_right_receiver);
11540 : {
11541 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
11542 896 : isolate(), ToPrimitiveHint::kNumber);
11543 1792 : var_right.Bind(CallStub(callable, context, right));
11544 896 : Goto(&loop);
11545 : }
11546 : }
11547 : }
11548 :
11549 : BIND(&if_left_other);
11550 : {
11551 : // {left} is neither a Numeric nor a String, and {right} is not a Smi.
11552 896 : if (var_type_feedback != nullptr) {
11553 : // Collect NumberOrOddball feedback if {left} is an Oddball
11554 : // and {right} is either a HeapNumber or Oddball. Otherwise collect
11555 : // Any feedback.
11556 672 : Label collect_any_feedback(this), collect_oddball_feedback(this),
11557 672 : collect_feedback_done(this);
11558 1344 : GotoIfNot(InstanceTypeEqual(left_instance_type, ODDBALL_TYPE),
11559 672 : &collect_any_feedback);
11560 :
11561 1344 : GotoIf(IsHeapNumberMap(right_map), &collect_oddball_feedback);
11562 : Node* right_instance_type = LoadMapInstanceType(right_map);
11563 1344 : Branch(InstanceTypeEqual(right_instance_type, ODDBALL_TYPE),
11564 672 : &collect_oddball_feedback, &collect_any_feedback);
11565 :
11566 : BIND(&collect_oddball_feedback);
11567 : {
11568 : CombineFeedback(var_type_feedback,
11569 672 : CompareOperationFeedback::kNumberOrOddball);
11570 672 : Goto(&collect_feedback_done);
11571 : }
11572 :
11573 : BIND(&collect_any_feedback);
11574 : {
11575 : OverwriteFeedback(var_type_feedback,
11576 672 : CompareOperationFeedback::kAny);
11577 672 : Goto(&collect_feedback_done);
11578 : }
11579 :
11580 : BIND(&collect_feedback_done);
11581 : }
11582 :
11583 : // If {left} is a receiver, call ToPrimitive(left, hint Number).
11584 : // Otherwise call ToNumeric(right) and then ToNumeric(left), the
11585 : // order here is important as it's observable by user code.
11586 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
11587 896 : Label if_left_receiver(this, Label::kDeferred);
11588 1792 : GotoIf(IsJSReceiverInstanceType(left_instance_type),
11589 896 : &if_left_receiver);
11590 :
11591 1792 : var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
11592 : var_left.Bind(
11593 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11594 896 : Goto(&loop);
11595 :
11596 : BIND(&if_left_receiver);
11597 : {
11598 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
11599 896 : isolate(), ToPrimitiveHint::kNumber);
11600 1792 : var_left.Bind(CallStub(callable, context, left));
11601 896 : Goto(&loop);
11602 : }
11603 : }
11604 : }
11605 : }
11606 : }
11607 :
11608 : BIND(&do_float_comparison);
11609 : {
11610 896 : switch (op) {
11611 : case Operation::kLessThan:
11612 448 : Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11613 224 : &return_true, &return_false);
11614 224 : break;
11615 : case Operation::kLessThanOrEqual:
11616 448 : Branch(Float64LessThanOrEqual(var_left_float.value(),
11617 224 : var_right_float.value()),
11618 224 : &return_true, &return_false);
11619 224 : break;
11620 : case Operation::kGreaterThan:
11621 224 : Branch(
11622 448 : Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11623 224 : &return_true, &return_false);
11624 224 : break;
11625 : case Operation::kGreaterThanOrEqual:
11626 448 : Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11627 224 : var_right_float.value()),
11628 224 : &return_true, &return_false);
11629 224 : break;
11630 : default:
11631 0 : UNREACHABLE();
11632 : }
11633 : }
11634 :
11635 : BIND(&return_true);
11636 : {
11637 : var_result = TrueConstant();
11638 896 : Goto(&end);
11639 : }
11640 :
11641 : BIND(&return_false);
11642 : {
11643 : var_result = FalseConstant();
11644 896 : Goto(&end);
11645 : }
11646 :
11647 : BIND(&end);
11648 896 : return var_result.value();
11649 : }
11650 :
11651 1120 : TNode<Smi> CodeStubAssembler::CollectFeedbackForString(
11652 : SloppyTNode<Int32T> instance_type) {
11653 : TNode<Smi> feedback = SelectSmiConstant(
11654 2240 : Word32Equal(
11655 3360 : Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
11656 3360 : Int32Constant(kInternalizedTag)),
11657 : CompareOperationFeedback::kInternalizedString,
11658 : CompareOperationFeedback::kString);
11659 1120 : return feedback;
11660 : }
11661 :
11662 616 : void CodeStubAssembler::GenerateEqual_Same(Node* value, Label* if_equal,
11663 : Label* if_notequal,
11664 : Variable* var_type_feedback) {
11665 : // In case of abstract or strict equality checks, we need additional checks
11666 : // for NaN values because they are not considered equal, even if both the
11667 : // left and the right hand side reference exactly the same value.
11668 :
11669 1232 : Label if_smi(this), if_heapnumber(this);
11670 1232 : GotoIf(TaggedIsSmi(value), &if_smi);
11671 :
11672 : Node* value_map = LoadMap(value);
11673 1232 : GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
11674 :
11675 : // For non-HeapNumbers, all we do is collect type feedback.
11676 616 : if (var_type_feedback != nullptr) {
11677 : Node* instance_type = LoadMapInstanceType(value_map);
11678 :
11679 336 : Label if_string(this), if_receiver(this), if_oddball(this), if_symbol(this),
11680 336 : if_bigint(this);
11681 672 : GotoIf(IsStringInstanceType(instance_type), &if_string);
11682 672 : GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
11683 336 : GotoIf(IsOddballInstanceType(instance_type), &if_oddball);
11684 336 : Branch(IsBigIntInstanceType(instance_type), &if_bigint, &if_symbol);
11685 :
11686 : BIND(&if_string);
11687 : {
11688 : CSA_ASSERT(this, IsString(value));
11689 : CombineFeedback(var_type_feedback,
11690 672 : CollectFeedbackForString(instance_type));
11691 336 : Goto(if_equal);
11692 : }
11693 :
11694 : BIND(&if_symbol);
11695 : {
11696 : CSA_ASSERT(this, IsSymbol(value));
11697 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kSymbol);
11698 336 : Goto(if_equal);
11699 : }
11700 :
11701 : BIND(&if_receiver);
11702 : {
11703 : CSA_ASSERT(this, IsJSReceiver(value));
11704 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
11705 336 : Goto(if_equal);
11706 : }
11707 :
11708 : BIND(&if_bigint);
11709 : {
11710 : CSA_ASSERT(this, IsBigInt(value));
11711 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11712 336 : Goto(if_equal);
11713 : }
11714 :
11715 : BIND(&if_oddball);
11716 : {
11717 : CSA_ASSERT(this, IsOddball(value));
11718 336 : Label if_boolean(this), if_not_boolean(this);
11719 672 : Branch(IsBooleanMap(value_map), &if_boolean, &if_not_boolean);
11720 :
11721 : BIND(&if_boolean);
11722 : {
11723 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11724 336 : Goto(if_equal);
11725 : }
11726 :
11727 : BIND(&if_not_boolean);
11728 : {
11729 : CSA_ASSERT(this, IsNullOrUndefined(value));
11730 : CombineFeedback(var_type_feedback,
11731 336 : CompareOperationFeedback::kReceiverOrNullOrUndefined);
11732 336 : Goto(if_equal);
11733 : }
11734 : }
11735 : } else {
11736 280 : Goto(if_equal);
11737 : }
11738 :
11739 : BIND(&if_heapnumber);
11740 : {
11741 616 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11742 : Node* number_value = LoadHeapNumberValue(value);
11743 616 : BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
11744 : }
11745 :
11746 : BIND(&if_smi);
11747 : {
11748 616 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kSignedSmall);
11749 616 : Goto(if_equal);
11750 : }
11751 616 : }
11752 :
11753 : // ES6 section 7.2.12 Abstract Equality Comparison
11754 224 : Node* CodeStubAssembler::Equal(Node* left, Node* right, Node* context,
11755 : Variable* var_type_feedback) {
11756 : // This is a slightly optimized version of Object::Equals. Whenever you
11757 : // change something functionality wise in here, remember to update the
11758 : // Object::Equals method as well.
11759 :
11760 448 : Label if_equal(this), if_notequal(this), do_float_comparison(this),
11761 224 : do_right_stringtonumber(this, Label::kDeferred), end(this);
11762 448 : VARIABLE(result, MachineRepresentation::kTagged);
11763 : TVARIABLE(Float64T, var_left_float);
11764 : TVARIABLE(Float64T, var_right_float);
11765 :
11766 : // We can avoid code duplication by exploiting the fact that abstract equality
11767 : // is symmetric.
11768 224 : Label use_symmetry(this);
11769 :
11770 : // We might need to loop several times due to ToPrimitive and/or ToNumber
11771 : // conversions.
11772 448 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
11773 448 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
11774 448 : VariableList loop_variable_list({&var_left, &var_right}, zone());
11775 224 : if (var_type_feedback != nullptr) {
11776 : // Initialize the type feedback to None. The current feedback will be
11777 : // combined with the previous feedback.
11778 168 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
11779 168 : loop_variable_list.push_back(var_type_feedback);
11780 : }
11781 224 : Label loop(this, loop_variable_list);
11782 224 : Goto(&loop);
11783 : BIND(&loop);
11784 : {
11785 224 : left = var_left.value();
11786 224 : right = var_right.value();
11787 :
11788 224 : Label if_notsame(this);
11789 448 : GotoIf(WordNotEqual(left, right), &if_notsame);
11790 : {
11791 : // {left} and {right} reference the exact same value, yet we need special
11792 : // treatment for HeapNumber, as NaN is not equal to NaN.
11793 224 : GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
11794 : }
11795 :
11796 : BIND(&if_notsame);
11797 224 : Label if_left_smi(this), if_left_not_smi(this);
11798 448 : Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11799 :
11800 : BIND(&if_left_smi);
11801 : {
11802 224 : Label if_right_smi(this), if_right_not_smi(this);
11803 448 : Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11804 :
11805 : BIND(&if_right_smi);
11806 : {
11807 : // We have already checked for {left} and {right} being the same value,
11808 : // so when we get here they must be different Smis.
11809 : CombineFeedback(var_type_feedback,
11810 224 : CompareOperationFeedback::kSignedSmall);
11811 224 : Goto(&if_notequal);
11812 : }
11813 :
11814 : BIND(&if_right_not_smi);
11815 : Node* right_map = LoadMap(right);
11816 224 : Label if_right_heapnumber(this), if_right_boolean(this),
11817 224 : if_right_bigint(this, Label::kDeferred),
11818 224 : if_right_receiver(this, Label::kDeferred);
11819 448 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11820 : // {left} is Smi and {right} is not HeapNumber or Smi.
11821 224 : if (var_type_feedback != nullptr) {
11822 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
11823 : }
11824 448 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11825 : Node* right_type = LoadMapInstanceType(right_map);
11826 448 : GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11827 224 : GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11828 448 : Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
11829 224 : &if_notequal);
11830 :
11831 : BIND(&if_right_heapnumber);
11832 : {
11833 448 : var_left_float = SmiToFloat64(left);
11834 : var_right_float = LoadHeapNumberValue(right);
11835 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11836 224 : Goto(&do_float_comparison);
11837 : }
11838 :
11839 : BIND(&if_right_boolean);
11840 : {
11841 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11842 224 : Goto(&loop);
11843 : }
11844 :
11845 : BIND(&if_right_bigint);
11846 : {
11847 : result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
11848 224 : NoContextConstant(), right, left));
11849 224 : Goto(&end);
11850 : }
11851 :
11852 : BIND(&if_right_receiver);
11853 : {
11854 224 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
11855 448 : var_right.Bind(CallStub(callable, context, right));
11856 224 : Goto(&loop);
11857 : }
11858 : }
11859 :
11860 : BIND(&if_left_not_smi);
11861 : {
11862 448 : GotoIf(TaggedIsSmi(right), &use_symmetry);
11863 :
11864 224 : Label if_left_symbol(this), if_left_number(this), if_left_string(this),
11865 224 : if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
11866 224 : if_left_receiver(this);
11867 :
11868 : Node* left_map = LoadMap(left);
11869 : Node* right_map = LoadMap(right);
11870 : Node* left_type = LoadMapInstanceType(left_map);
11871 : Node* right_type = LoadMapInstanceType(right_map);
11872 :
11873 448 : GotoIf(IsStringInstanceType(left_type), &if_left_string);
11874 224 : GotoIf(IsSymbolInstanceType(left_type), &if_left_symbol);
11875 224 : GotoIf(IsHeapNumberInstanceType(left_type), &if_left_number);
11876 224 : GotoIf(IsOddballInstanceType(left_type), &if_left_oddball);
11877 224 : Branch(IsBigIntInstanceType(left_type), &if_left_bigint,
11878 224 : &if_left_receiver);
11879 :
11880 : BIND(&if_left_string);
11881 : {
11882 448 : GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
11883 448 : result.Bind(CallBuiltin(Builtins::kStringEqual, context, left, right));
11884 : CombineFeedback(var_type_feedback,
11885 448 : SmiOr(CollectFeedbackForString(left_type),
11886 896 : CollectFeedbackForString(right_type)));
11887 224 : Goto(&end);
11888 : }
11889 :
11890 : BIND(&if_left_number);
11891 : {
11892 224 : Label if_right_not_number(this);
11893 448 : GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
11894 :
11895 : var_left_float = LoadHeapNumberValue(left);
11896 : var_right_float = LoadHeapNumberValue(right);
11897 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11898 224 : Goto(&do_float_comparison);
11899 :
11900 : BIND(&if_right_not_number);
11901 : {
11902 224 : Label if_right_boolean(this);
11903 224 : if (var_type_feedback != nullptr) {
11904 168 : var_type_feedback->Bind(
11905 168 : SmiConstant(CompareOperationFeedback::kAny));
11906 : }
11907 448 : GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11908 448 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11909 224 : GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
11910 448 : Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11911 224 : &if_notequal);
11912 :
11913 : BIND(&if_right_boolean);
11914 : {
11915 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11916 224 : Goto(&loop);
11917 : }
11918 : }
11919 : }
11920 :
11921 : BIND(&if_left_bigint);
11922 : {
11923 224 : Label if_right_heapnumber(this), if_right_bigint(this),
11924 224 : if_right_string(this), if_right_boolean(this);
11925 448 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11926 224 : GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11927 448 : GotoIf(IsStringInstanceType(right_type), &if_right_string);
11928 448 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11929 448 : Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11930 224 : &if_notequal);
11931 :
11932 : BIND(&if_right_heapnumber);
11933 : {
11934 224 : if (var_type_feedback != nullptr) {
11935 168 : var_type_feedback->Bind(
11936 168 : SmiConstant(CompareOperationFeedback::kAny));
11937 : }
11938 : result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
11939 224 : NoContextConstant(), left, right));
11940 224 : Goto(&end);
11941 : }
11942 :
11943 : BIND(&if_right_bigint);
11944 : {
11945 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11946 : result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
11947 224 : NoContextConstant(), left, right));
11948 224 : Goto(&end);
11949 : }
11950 :
11951 : BIND(&if_right_string);
11952 : {
11953 224 : if (var_type_feedback != nullptr) {
11954 168 : var_type_feedback->Bind(
11955 168 : SmiConstant(CompareOperationFeedback::kAny));
11956 : }
11957 : result.Bind(CallRuntime(Runtime::kBigIntEqualToString,
11958 224 : NoContextConstant(), left, right));
11959 224 : Goto(&end);
11960 : }
11961 :
11962 : BIND(&if_right_boolean);
11963 : {
11964 224 : if (var_type_feedback != nullptr) {
11965 168 : var_type_feedback->Bind(
11966 168 : SmiConstant(CompareOperationFeedback::kAny));
11967 : }
11968 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11969 224 : Goto(&loop);
11970 : }
11971 : }
11972 :
11973 : BIND(&if_left_oddball);
11974 : {
11975 224 : Label if_left_boolean(this), if_left_not_boolean(this);
11976 448 : Branch(IsBooleanMap(left_map), &if_left_boolean, &if_left_not_boolean);
11977 :
11978 : BIND(&if_left_not_boolean);
11979 : {
11980 : // {left} is either Null or Undefined. Check if {right} is
11981 : // undetectable (which includes Null and Undefined).
11982 224 : Label if_right_undetectable(this), if_right_not_undetectable(this);
11983 448 : Branch(IsUndetectableMap(right_map), &if_right_undetectable,
11984 224 : &if_right_not_undetectable);
11985 :
11986 : BIND(&if_right_undetectable);
11987 : {
11988 224 : if (var_type_feedback != nullptr) {
11989 : // If {right} is undetectable, it must be either also
11990 : // Null or Undefined, or a Receiver (aka document.all).
11991 168 : var_type_feedback->Bind(SmiConstant(
11992 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
11993 : }
11994 224 : Goto(&if_equal);
11995 : }
11996 :
11997 : BIND(&if_right_not_undetectable);
11998 : {
11999 224 : if (var_type_feedback != nullptr) {
12000 : // Track whether {right} is Null, Undefined or Receiver.
12001 168 : var_type_feedback->Bind(SmiConstant(
12002 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12003 336 : GotoIf(IsJSReceiverInstanceType(right_type), &if_notequal);
12004 336 : GotoIfNot(IsBooleanMap(right_map), &if_notequal);
12005 168 : var_type_feedback->Bind(
12006 168 : SmiConstant(CompareOperationFeedback::kAny));
12007 : }
12008 224 : Goto(&if_notequal);
12009 : }
12010 : }
12011 :
12012 : BIND(&if_left_boolean);
12013 : {
12014 224 : if (var_type_feedback != nullptr) {
12015 168 : var_type_feedback->Bind(
12016 168 : SmiConstant(CompareOperationFeedback::kAny));
12017 : }
12018 :
12019 : // If {right} is a Boolean too, it must be a different Boolean.
12020 448 : GotoIf(WordEqual(right_map, left_map), &if_notequal);
12021 :
12022 : // Otherwise, convert {left} to number and try again.
12023 224 : var_left.Bind(LoadObjectField(left, Oddball::kToNumberOffset));
12024 224 : Goto(&loop);
12025 : }
12026 : }
12027 :
12028 : BIND(&if_left_symbol);
12029 : {
12030 224 : Label if_right_receiver(this);
12031 448 : GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
12032 : // {right} is not a JSReceiver and also not the same Symbol as {left},
12033 : // so the result is "not equal".
12034 224 : if (var_type_feedback != nullptr) {
12035 168 : Label if_right_symbol(this);
12036 168 : GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
12037 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
12038 168 : Goto(&if_notequal);
12039 :
12040 : BIND(&if_right_symbol);
12041 : {
12042 : CombineFeedback(var_type_feedback,
12043 168 : CompareOperationFeedback::kSymbol);
12044 168 : Goto(&if_notequal);
12045 : }
12046 : } else {
12047 56 : Goto(&if_notequal);
12048 : }
12049 :
12050 : BIND(&if_right_receiver);
12051 : {
12052 : // {left} is a Primitive and {right} is a JSReceiver, so swapping
12053 : // the order is not observable.
12054 224 : if (var_type_feedback != nullptr) {
12055 168 : var_type_feedback->Bind(
12056 168 : SmiConstant(CompareOperationFeedback::kAny));
12057 : }
12058 224 : Goto(&use_symmetry);
12059 : }
12060 : }
12061 :
12062 : BIND(&if_left_receiver);
12063 : {
12064 : CSA_ASSERT(this, IsJSReceiverInstanceType(left_type));
12065 224 : Label if_right_receiver(this), if_right_not_receiver(this);
12066 448 : Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
12067 224 : &if_right_not_receiver);
12068 :
12069 : BIND(&if_right_receiver);
12070 : {
12071 : // {left} and {right} are different JSReceiver references.
12072 : CombineFeedback(var_type_feedback,
12073 224 : CompareOperationFeedback::kReceiver);
12074 224 : Goto(&if_notequal);
12075 : }
12076 :
12077 : BIND(&if_right_not_receiver);
12078 : {
12079 : // Check if {right} is undetectable, which means it must be Null
12080 : // or Undefined, since we already ruled out Receiver for {right}.
12081 224 : Label if_right_undetectable(this),
12082 224 : if_right_not_undetectable(this, Label::kDeferred);
12083 448 : Branch(IsUndetectableMap(right_map), &if_right_undetectable,
12084 224 : &if_right_not_undetectable);
12085 :
12086 : BIND(&if_right_undetectable);
12087 : {
12088 : // When we get here, {right} must be either Null or Undefined.
12089 : CSA_ASSERT(this, IsNullOrUndefined(right));
12090 224 : if (var_type_feedback != nullptr) {
12091 168 : var_type_feedback->Bind(SmiConstant(
12092 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12093 : }
12094 448 : Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
12095 : }
12096 :
12097 : BIND(&if_right_not_undetectable);
12098 : {
12099 : // {right} is a Primitive, and neither Null or Undefined;
12100 : // convert {left} to Primitive too.
12101 224 : if (var_type_feedback != nullptr) {
12102 168 : var_type_feedback->Bind(
12103 168 : SmiConstant(CompareOperationFeedback::kAny));
12104 : }
12105 224 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
12106 448 : var_left.Bind(CallStub(callable, context, left));
12107 224 : Goto(&loop);
12108 : }
12109 : }
12110 : }
12111 : }
12112 :
12113 : BIND(&do_right_stringtonumber);
12114 : {
12115 448 : var_right.Bind(CallBuiltin(Builtins::kStringToNumber, context, right));
12116 224 : Goto(&loop);
12117 : }
12118 :
12119 : BIND(&use_symmetry);
12120 : {
12121 224 : var_left.Bind(right);
12122 224 : var_right.Bind(left);
12123 224 : Goto(&loop);
12124 : }
12125 : }
12126 :
12127 : BIND(&do_float_comparison);
12128 : {
12129 448 : Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
12130 224 : &if_equal, &if_notequal);
12131 : }
12132 :
12133 : BIND(&if_equal);
12134 : {
12135 224 : result.Bind(TrueConstant());
12136 224 : Goto(&end);
12137 : }
12138 :
12139 : BIND(&if_notequal);
12140 : {
12141 224 : result.Bind(FalseConstant());
12142 224 : Goto(&end);
12143 : }
12144 :
12145 : BIND(&end);
12146 448 : return result.value();
12147 : }
12148 :
12149 392 : Node* CodeStubAssembler::StrictEqual(Node* lhs, Node* rhs,
12150 : Variable* var_type_feedback) {
12151 : // Pseudo-code for the algorithm below:
12152 : //
12153 : // if (lhs == rhs) {
12154 : // if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
12155 : // return true;
12156 : // }
12157 : // if (!lhs->IsSmi()) {
12158 : // if (lhs->IsHeapNumber()) {
12159 : // if (rhs->IsSmi()) {
12160 : // return Smi::ToInt(rhs) == HeapNumber::cast(lhs)->value();
12161 : // } else if (rhs->IsHeapNumber()) {
12162 : // return HeapNumber::cast(rhs)->value() ==
12163 : // HeapNumber::cast(lhs)->value();
12164 : // } else {
12165 : // return false;
12166 : // }
12167 : // } else {
12168 : // if (rhs->IsSmi()) {
12169 : // return false;
12170 : // } else {
12171 : // if (lhs->IsString()) {
12172 : // if (rhs->IsString()) {
12173 : // return %StringEqual(lhs, rhs);
12174 : // } else {
12175 : // return false;
12176 : // }
12177 : // } else if (lhs->IsBigInt()) {
12178 : // if (rhs->IsBigInt()) {
12179 : // return %BigIntEqualToBigInt(lhs, rhs);
12180 : // } else {
12181 : // return false;
12182 : // }
12183 : // } else {
12184 : // return false;
12185 : // }
12186 : // }
12187 : // }
12188 : // } else {
12189 : // if (rhs->IsSmi()) {
12190 : // return false;
12191 : // } else {
12192 : // if (rhs->IsHeapNumber()) {
12193 : // return Smi::ToInt(lhs) == HeapNumber::cast(rhs)->value();
12194 : // } else {
12195 : // return false;
12196 : // }
12197 : // }
12198 : // }
12199 :
12200 784 : Label if_equal(this), if_notequal(this), end(this);
12201 784 : VARIABLE(result, MachineRepresentation::kTagged);
12202 :
12203 : // Check if {lhs} and {rhs} refer to the same object.
12204 392 : Label if_same(this), if_notsame(this);
12205 784 : Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
12206 :
12207 : BIND(&if_same);
12208 : {
12209 : // The {lhs} and {rhs} reference the exact same value, yet we need special
12210 : // treatment for HeapNumber, as NaN is not equal to NaN.
12211 392 : if (var_type_feedback != nullptr) {
12212 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
12213 : }
12214 392 : GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
12215 : }
12216 :
12217 : BIND(&if_notsame);
12218 : {
12219 : // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
12220 : // BigInt and String they can still be considered equal.
12221 :
12222 392 : if (var_type_feedback != nullptr) {
12223 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
12224 : }
12225 :
12226 : // Check if {lhs} is a Smi or a HeapObject.
12227 392 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
12228 784 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
12229 :
12230 : BIND(&if_lhsisnotsmi);
12231 : {
12232 : // Load the map of {lhs}.
12233 : Node* lhs_map = LoadMap(lhs);
12234 :
12235 : // Check if {lhs} is a HeapNumber.
12236 392 : Label if_lhsisnumber(this), if_lhsisnotnumber(this);
12237 784 : Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
12238 :
12239 : BIND(&if_lhsisnumber);
12240 : {
12241 : // Check if {rhs} is a Smi or a HeapObject.
12242 392 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12243 784 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12244 :
12245 : BIND(&if_rhsissmi);
12246 : {
12247 : // Convert {lhs} and {rhs} to floating point values.
12248 : Node* lhs_value = LoadHeapNumberValue(lhs);
12249 784 : Node* rhs_value = SmiToFloat64(rhs);
12250 :
12251 392 : if (var_type_feedback != nullptr) {
12252 : var_type_feedback->Bind(
12253 168 : SmiConstant(CompareOperationFeedback::kNumber));
12254 : }
12255 :
12256 : // Perform a floating point comparison of {lhs} and {rhs}.
12257 784 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12258 : }
12259 :
12260 : BIND(&if_rhsisnotsmi);
12261 : {
12262 : // Load the map of {rhs}.
12263 : Node* rhs_map = LoadMap(rhs);
12264 :
12265 : // Check if {rhs} is also a HeapNumber.
12266 392 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
12267 784 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
12268 :
12269 : BIND(&if_rhsisnumber);
12270 : {
12271 : // Convert {lhs} and {rhs} to floating point values.
12272 : Node* lhs_value = LoadHeapNumberValue(lhs);
12273 : Node* rhs_value = LoadHeapNumberValue(rhs);
12274 :
12275 392 : if (var_type_feedback != nullptr) {
12276 : var_type_feedback->Bind(
12277 168 : SmiConstant(CompareOperationFeedback::kNumber));
12278 : }
12279 :
12280 : // Perform a floating point comparison of {lhs} and {rhs}.
12281 784 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12282 : }
12283 :
12284 : BIND(&if_rhsisnotnumber);
12285 392 : Goto(&if_notequal);
12286 : }
12287 : }
12288 :
12289 : BIND(&if_lhsisnotnumber);
12290 : {
12291 : // Check if {rhs} is a Smi or a HeapObject.
12292 392 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12293 784 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12294 :
12295 : BIND(&if_rhsissmi);
12296 392 : Goto(&if_notequal);
12297 :
12298 : BIND(&if_rhsisnotsmi);
12299 : {
12300 : // Load the instance type of {lhs}.
12301 : Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
12302 :
12303 : // Check if {lhs} is a String.
12304 392 : Label if_lhsisstring(this), if_lhsisnotstring(this);
12305 784 : Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
12306 392 : &if_lhsisnotstring);
12307 :
12308 : BIND(&if_lhsisstring);
12309 : {
12310 : // Load the instance type of {rhs}.
12311 784 : Node* rhs_instance_type = LoadInstanceType(rhs);
12312 :
12313 : // Check if {rhs} is also a String.
12314 392 : Label if_rhsisstring(this, Label::kDeferred),
12315 392 : if_rhsisnotstring(this);
12316 784 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
12317 392 : &if_rhsisnotstring);
12318 :
12319 : BIND(&if_rhsisstring);
12320 : {
12321 392 : if (var_type_feedback != nullptr) {
12322 : TNode<Smi> lhs_feedback =
12323 168 : CollectFeedbackForString(lhs_instance_type);
12324 : TNode<Smi> rhs_feedback =
12325 168 : CollectFeedbackForString(rhs_instance_type);
12326 336 : var_type_feedback->Bind(SmiOr(lhs_feedback, rhs_feedback));
12327 : }
12328 784 : result.Bind(CallBuiltin(Builtins::kStringEqual,
12329 784 : NoContextConstant(), lhs, rhs));
12330 392 : Goto(&end);
12331 : }
12332 :
12333 : BIND(&if_rhsisnotstring);
12334 392 : Goto(&if_notequal);
12335 : }
12336 :
12337 : BIND(&if_lhsisnotstring);
12338 :
12339 : // Check if {lhs} is a BigInt.
12340 392 : Label if_lhsisbigint(this), if_lhsisnotbigint(this);
12341 392 : Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
12342 392 : &if_lhsisnotbigint);
12343 :
12344 : BIND(&if_lhsisbigint);
12345 : {
12346 : // Load the instance type of {rhs}.
12347 784 : Node* rhs_instance_type = LoadInstanceType(rhs);
12348 :
12349 : // Check if {rhs} is also a BigInt.
12350 392 : Label if_rhsisbigint(this, Label::kDeferred),
12351 392 : if_rhsisnotbigint(this);
12352 392 : Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
12353 392 : &if_rhsisnotbigint);
12354 :
12355 : BIND(&if_rhsisbigint);
12356 : {
12357 392 : if (var_type_feedback != nullptr) {
12358 : var_type_feedback->Bind(
12359 168 : SmiConstant(CompareOperationFeedback::kBigInt));
12360 : }
12361 : result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
12362 392 : NoContextConstant(), lhs, rhs));
12363 392 : Goto(&end);
12364 : }
12365 :
12366 : BIND(&if_rhsisnotbigint);
12367 392 : Goto(&if_notequal);
12368 : }
12369 :
12370 : BIND(&if_lhsisnotbigint);
12371 392 : if (var_type_feedback != nullptr) {
12372 : // Load the instance type of {rhs}.
12373 : Node* rhs_map = LoadMap(rhs);
12374 : Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
12375 :
12376 168 : Label if_lhsissymbol(this), if_lhsisreceiver(this),
12377 168 : if_lhsisoddball(this);
12378 336 : GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
12379 168 : &if_lhsisreceiver);
12380 336 : GotoIf(IsBooleanMap(lhs_map), &if_notequal);
12381 168 : GotoIf(IsOddballInstanceType(lhs_instance_type), &if_lhsisoddball);
12382 168 : Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
12383 168 : &if_notequal);
12384 :
12385 : BIND(&if_lhsisreceiver);
12386 : {
12387 336 : GotoIf(IsBooleanMap(rhs_map), &if_notequal);
12388 : var_type_feedback->Bind(
12389 168 : SmiConstant(CompareOperationFeedback::kReceiver));
12390 336 : GotoIf(IsJSReceiverInstanceType(rhs_instance_type), &if_notequal);
12391 : var_type_feedback->Bind(SmiConstant(
12392 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12393 168 : GotoIf(IsOddballInstanceType(rhs_instance_type), &if_notequal);
12394 : var_type_feedback->Bind(
12395 168 : SmiConstant(CompareOperationFeedback::kAny));
12396 168 : Goto(&if_notequal);
12397 : }
12398 :
12399 : BIND(&if_lhsisoddball);
12400 : {
12401 : STATIC_ASSERT(LAST_PRIMITIVE_TYPE == ODDBALL_TYPE);
12402 336 : GotoIf(IsBooleanMap(rhs_map), &if_notequal);
12403 168 : GotoIf(
12404 504 : Int32LessThan(rhs_instance_type, Int32Constant(ODDBALL_TYPE)),
12405 168 : &if_notequal);
12406 : var_type_feedback->Bind(SmiConstant(
12407 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12408 168 : Goto(&if_notequal);
12409 : }
12410 :
12411 : BIND(&if_lhsissymbol);
12412 : {
12413 168 : GotoIfNot(IsSymbolInstanceType(rhs_instance_type), &if_notequal);
12414 : var_type_feedback->Bind(
12415 168 : SmiConstant(CompareOperationFeedback::kSymbol));
12416 168 : Goto(&if_notequal);
12417 : }
12418 : } else {
12419 224 : Goto(&if_notequal);
12420 : }
12421 : }
12422 : }
12423 : }
12424 :
12425 : BIND(&if_lhsissmi);
12426 : {
12427 : // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
12428 : // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
12429 : // HeapNumber with an equal floating point value.
12430 :
12431 : // Check if {rhs} is a Smi or a HeapObject.
12432 392 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12433 784 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12434 :
12435 : BIND(&if_rhsissmi);
12436 392 : if (var_type_feedback != nullptr) {
12437 : var_type_feedback->Bind(
12438 168 : SmiConstant(CompareOperationFeedback::kSignedSmall));
12439 : }
12440 392 : Goto(&if_notequal);
12441 :
12442 : BIND(&if_rhsisnotsmi);
12443 : {
12444 : // Load the map of the {rhs}.
12445 : Node* rhs_map = LoadMap(rhs);
12446 :
12447 : // The {rhs} could be a HeapNumber with the same value as {lhs}.
12448 392 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
12449 784 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
12450 :
12451 : BIND(&if_rhsisnumber);
12452 : {
12453 : // Convert {lhs} and {rhs} to floating point values.
12454 784 : Node* lhs_value = SmiToFloat64(lhs);
12455 : Node* rhs_value = LoadHeapNumberValue(rhs);
12456 :
12457 392 : if (var_type_feedback != nullptr) {
12458 : var_type_feedback->Bind(
12459 168 : SmiConstant(CompareOperationFeedback::kNumber));
12460 : }
12461 :
12462 : // Perform a floating point comparison of {lhs} and {rhs}.
12463 784 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12464 : }
12465 :
12466 : BIND(&if_rhsisnotnumber);
12467 392 : Goto(&if_notequal);
12468 : }
12469 : }
12470 : }
12471 :
12472 : BIND(&if_equal);
12473 : {
12474 392 : result.Bind(TrueConstant());
12475 392 : Goto(&end);
12476 : }
12477 :
12478 : BIND(&if_notequal);
12479 : {
12480 392 : result.Bind(FalseConstant());
12481 392 : Goto(&end);
12482 : }
12483 :
12484 : BIND(&end);
12485 784 : return result.value();
12486 : }
12487 :
12488 : // ECMA#sec-samevalue
12489 : // This algorithm differs from the Strict Equality Comparison Algorithm in its
12490 : // treatment of signed zeroes and NaNs.
12491 896 : void CodeStubAssembler::BranchIfSameValue(Node* lhs, Node* rhs, Label* if_true,
12492 : Label* if_false, SameValueMode mode) {
12493 1792 : VARIABLE(var_lhs_value, MachineRepresentation::kFloat64);
12494 1792 : VARIABLE(var_rhs_value, MachineRepresentation::kFloat64);
12495 896 : Label do_fcmp(this);
12496 :
12497 : // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
12498 : // StrictEqual - SameValue considers two NaNs to be equal.
12499 3584 : GotoIf(WordEqual(lhs, rhs), if_true);
12500 :
12501 : // Check if the {lhs} is a Smi.
12502 896 : Label if_lhsissmi(this), if_lhsisheapobject(this);
12503 2688 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
12504 :
12505 : BIND(&if_lhsissmi);
12506 : {
12507 : // Since {lhs} is a Smi, the comparison can only yield true
12508 : // iff the {rhs} is a HeapNumber with the same float64 value.
12509 4480 : Branch(TaggedIsSmi(rhs), if_false, [&] {
12510 6272 : GotoIfNot(IsHeapNumber(rhs), if_false);
12511 2688 : var_lhs_value.Bind(SmiToFloat64(lhs));
12512 1792 : var_rhs_value.Bind(LoadHeapNumberValue(rhs));
12513 1792 : Goto(&do_fcmp);
12514 2688 : });
12515 : }
12516 :
12517 : BIND(&if_lhsisheapobject);
12518 : {
12519 : // Check if the {rhs} is a Smi.
12520 4480 : Branch(TaggedIsSmi(rhs),
12521 896 : [&] {
12522 : // Since {rhs} is a Smi, the comparison can only yield true
12523 : // iff the {lhs} is a HeapNumber with the same float64 value.
12524 5376 : GotoIfNot(IsHeapNumber(lhs), if_false);
12525 1792 : var_lhs_value.Bind(LoadHeapNumberValue(lhs));
12526 2688 : var_rhs_value.Bind(SmiToFloat64(rhs));
12527 1792 : Goto(&do_fcmp);
12528 896 : },
12529 896 : [&] {
12530 : // Now this can only yield true if either both {lhs} and {rhs} are
12531 : // HeapNumbers with the same value, or both are Strings with the
12532 : // same character sequence, or both are BigInts with the same
12533 : // value.
12534 18928 : Label if_lhsisheapnumber(this), if_lhsisstring(this),
12535 896 : if_lhsisbigint(this);
12536 2464 : Node* const lhs_map = LoadMap(lhs);
12537 1792 : GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
12538 1792 : if (mode != SameValueMode::kNumbersOnly) {
12539 : Node* const lhs_instance_type = LoadMapInstanceType(lhs_map);
12540 672 : GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
12541 336 : GotoIf(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint);
12542 : }
12543 4032 : Goto(if_false);
12544 :
12545 : BIND(&if_lhsisheapnumber);
12546 : {
12547 4928 : GotoIfNot(IsHeapNumber(rhs), if_false);
12548 1792 : var_lhs_value.Bind(LoadHeapNumberValue(lhs));
12549 1792 : var_rhs_value.Bind(LoadHeapNumberValue(rhs));
12550 1792 : Goto(&do_fcmp);
12551 : }
12552 :
12553 896 : if (mode != SameValueMode::kNumbersOnly) {
12554 : BIND(&if_lhsisstring);
12555 : {
12556 : // Now we can only yield true if {rhs} is also a String
12557 : // with the same sequence of characters.
12558 1344 : GotoIfNot(IsString(rhs), if_false);
12559 672 : Node* const result = CallBuiltin(
12560 1008 : Builtins::kStringEqual, NoContextConstant(), lhs, rhs);
12561 1344 : Branch(IsTrue(result), if_true, if_false);
12562 : }
12563 :
12564 : BIND(&if_lhsisbigint);
12565 : {
12566 1344 : GotoIfNot(IsBigInt(rhs), if_false);
12567 : Node* const result =
12568 : CallRuntime(Runtime::kBigIntEqualToBigInt,
12569 1008 : NoContextConstant(), lhs, rhs);
12570 1344 : Branch(IsTrue(result), if_true, if_false);
12571 : }
12572 : }
12573 1792 : });
12574 : }
12575 :
12576 : BIND(&do_fcmp);
12577 : {
12578 896 : TNode<Float64T> lhs_value = UncheckedCast<Float64T>(var_lhs_value.value());
12579 896 : TNode<Float64T> rhs_value = UncheckedCast<Float64T>(var_rhs_value.value());
12580 896 : BranchIfSameNumberValue(lhs_value, rhs_value, if_true, if_false);
12581 : }
12582 896 : }
12583 :
12584 1792 : void CodeStubAssembler::BranchIfSameNumberValue(TNode<Float64T> lhs_value,
12585 : TNode<Float64T> rhs_value,
12586 : Label* if_true,
12587 : Label* if_false) {
12588 3584 : Label if_equal(this), if_notequal(this);
12589 3584 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12590 :
12591 : BIND(&if_equal);
12592 : {
12593 : // We still need to handle the case when {lhs} and {rhs} are -0.0 and
12594 : // 0.0 (or vice versa). Compare the high word to
12595 : // distinguish between the two.
12596 3584 : Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_value);
12597 3584 : Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_value);
12598 :
12599 : // If x is +0 and y is -0, return false.
12600 : // If x is -0 and y is +0, return false.
12601 3584 : Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
12602 : }
12603 :
12604 : BIND(&if_notequal);
12605 : {
12606 : // Return true iff both {rhs} and {lhs} are NaN.
12607 3584 : GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
12608 3584 : Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
12609 : }
12610 1792 : }
12611 :
12612 672 : TNode<Oddball> CodeStubAssembler::HasProperty(SloppyTNode<Context> context,
12613 : SloppyTNode<Object> object,
12614 : SloppyTNode<Object> key,
12615 : HasPropertyLookupMode mode) {
12616 1344 : Label call_runtime(this, Label::kDeferred), return_true(this),
12617 672 : return_false(this), end(this), if_proxy(this, Label::kDeferred);
12618 :
12619 : CodeStubAssembler::LookupInHolder lookup_property_in_holder =
12620 : [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
12621 : Node* holder_instance_type, Node* unique_name,
12622 672 : Label* next_holder, Label* if_bailout) {
12623 : TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
12624 672 : &return_true, next_holder, if_bailout);
12625 : };
12626 :
12627 : CodeStubAssembler::LookupInHolder lookup_element_in_holder =
12628 : [this, &return_true, &return_false](
12629 : Node* receiver, Node* holder, Node* holder_map,
12630 : Node* holder_instance_type, Node* index, Label* next_holder,
12631 1344 : Label* if_bailout) {
12632 672 : TryLookupElement(holder, holder_map, holder_instance_type, index,
12633 672 : &return_true, &return_false, next_holder, if_bailout);
12634 : };
12635 :
12636 : TryPrototypeChainLookup(object, key, lookup_property_in_holder,
12637 : lookup_element_in_holder, &return_false,
12638 672 : &call_runtime, &if_proxy);
12639 :
12640 : TVARIABLE(Oddball, result);
12641 :
12642 : BIND(&if_proxy);
12643 : {
12644 672 : TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
12645 672 : switch (mode) {
12646 : case kHasProperty:
12647 1232 : GotoIf(IsPrivateSymbol(name), &return_false);
12648 :
12649 1232 : result = CAST(
12650 : CallBuiltin(Builtins::kProxyHasProperty, context, object, name));
12651 616 : Goto(&end);
12652 616 : break;
12653 : case kForInHasProperty:
12654 56 : Goto(&call_runtime);
12655 56 : break;
12656 : }
12657 : }
12658 :
12659 : BIND(&return_true);
12660 : {
12661 : result = TrueConstant();
12662 672 : Goto(&end);
12663 : }
12664 :
12665 : BIND(&return_false);
12666 : {
12667 : result = FalseConstant();
12668 672 : Goto(&end);
12669 : }
12670 :
12671 : BIND(&call_runtime);
12672 : {
12673 : Runtime::FunctionId fallback_runtime_function_id;
12674 672 : switch (mode) {
12675 : case kHasProperty:
12676 : fallback_runtime_function_id = Runtime::kHasProperty;
12677 616 : break;
12678 : case kForInHasProperty:
12679 : fallback_runtime_function_id = Runtime::kForInHasProperty;
12680 56 : break;
12681 : }
12682 :
12683 : result =
12684 : CAST(CallRuntime(fallback_runtime_function_id, context, object, key));
12685 672 : Goto(&end);
12686 : }
12687 :
12688 : BIND(&end);
12689 : CSA_ASSERT(this, IsBoolean(result.value()));
12690 672 : return result.value();
12691 : }
12692 :
12693 392 : Node* CodeStubAssembler::Typeof(Node* value) {
12694 784 : VARIABLE(result_var, MachineRepresentation::kTagged);
12695 :
12696 392 : Label return_number(this, Label::kDeferred), if_oddball(this),
12697 392 : return_function(this), return_undefined(this), return_object(this),
12698 392 : return_string(this), return_bigint(this), return_result(this);
12699 :
12700 784 : GotoIf(TaggedIsSmi(value), &return_number);
12701 :
12702 : Node* map = LoadMap(value);
12703 :
12704 784 : GotoIf(IsHeapNumberMap(map), &return_number);
12705 :
12706 : Node* instance_type = LoadMapInstanceType(map);
12707 :
12708 784 : GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
12709 :
12710 784 : Node* callable_or_undetectable_mask = Word32And(
12711 : LoadMapBitField(map),
12712 784 : Int32Constant(Map::IsCallableBit::kMask | Map::IsUndetectableBit::kMask));
12713 :
12714 784 : GotoIf(Word32Equal(callable_or_undetectable_mask,
12715 784 : Int32Constant(Map::IsCallableBit::kMask)),
12716 392 : &return_function);
12717 :
12718 1176 : GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
12719 392 : &return_undefined);
12720 :
12721 784 : GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
12722 :
12723 784 : GotoIf(IsStringInstanceType(instance_type), &return_string);
12724 :
12725 392 : GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
12726 :
12727 : CSA_ASSERT(this, InstanceTypeEqual(instance_type, SYMBOL_TYPE));
12728 784 : result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
12729 392 : Goto(&return_result);
12730 :
12731 : BIND(&return_number);
12732 : {
12733 784 : result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
12734 392 : Goto(&return_result);
12735 : }
12736 :
12737 : BIND(&if_oddball);
12738 : {
12739 : Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
12740 392 : result_var.Bind(type);
12741 392 : Goto(&return_result);
12742 : }
12743 :
12744 : BIND(&return_function);
12745 : {
12746 784 : result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
12747 392 : Goto(&return_result);
12748 : }
12749 :
12750 : BIND(&return_undefined);
12751 : {
12752 784 : result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
12753 392 : Goto(&return_result);
12754 : }
12755 :
12756 : BIND(&return_object);
12757 : {
12758 784 : result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
12759 392 : Goto(&return_result);
12760 : }
12761 :
12762 : BIND(&return_string);
12763 : {
12764 784 : result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
12765 392 : Goto(&return_result);
12766 : }
12767 :
12768 : BIND(&return_bigint);
12769 : {
12770 784 : result_var.Bind(HeapConstant(isolate()->factory()->bigint_string()));
12771 392 : Goto(&return_result);
12772 : }
12773 :
12774 : BIND(&return_result);
12775 784 : return result_var.value();
12776 : }
12777 :
12778 224 : TNode<Object> CodeStubAssembler::GetSuperConstructor(
12779 : SloppyTNode<Context> context, SloppyTNode<JSFunction> active_function) {
12780 448 : Label is_not_constructor(this, Label::kDeferred), out(this);
12781 : TVARIABLE(Object, result);
12782 :
12783 : TNode<Map> map = LoadMap(active_function);
12784 : TNode<Object> prototype = LoadMapPrototype(map);
12785 : TNode<Map> prototype_map = LoadMap(CAST(prototype));
12786 448 : GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
12787 :
12788 : result = prototype;
12789 224 : Goto(&out);
12790 :
12791 : BIND(&is_not_constructor);
12792 : {
12793 : CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
12794 : active_function);
12795 224 : Unreachable();
12796 : }
12797 :
12798 : BIND(&out);
12799 224 : return result.value();
12800 : }
12801 :
12802 504 : TNode<JSReceiver> CodeStubAssembler::SpeciesConstructor(
12803 : SloppyTNode<Context> context, SloppyTNode<Object> object,
12804 : SloppyTNode<JSReceiver> default_constructor) {
12805 504 : Isolate* isolate = this->isolate();
12806 : TVARIABLE(JSReceiver, var_result, default_constructor);
12807 :
12808 : // 2. Let C be ? Get(O, "constructor").
12809 : TNode<Object> constructor =
12810 504 : GetProperty(context, object, isolate->factory()->constructor_string());
12811 :
12812 : // 3. If C is undefined, return defaultConstructor.
12813 504 : Label out(this);
12814 1008 : GotoIf(IsUndefined(constructor), &out);
12815 :
12816 : // 4. If Type(C) is not Object, throw a TypeError exception.
12817 : ThrowIfNotJSReceiver(context, constructor,
12818 504 : MessageTemplate::kConstructorNotReceiver);
12819 :
12820 : // 5. Let S be ? Get(C, @@species).
12821 : TNode<Object> species =
12822 504 : GetProperty(context, constructor, isolate->factory()->species_symbol());
12823 :
12824 : // 6. If S is either undefined or null, return defaultConstructor.
12825 1008 : GotoIf(IsNullOrUndefined(species), &out);
12826 :
12827 : // 7. If IsConstructor(S) is true, return S.
12828 504 : Label throw_error(this);
12829 1008 : GotoIf(TaggedIsSmi(species), &throw_error);
12830 1008 : GotoIfNot(IsConstructorMap(LoadMap(CAST(species))), &throw_error);
12831 : var_result = CAST(species);
12832 504 : Goto(&out);
12833 :
12834 : // 8. Throw a TypeError exception.
12835 : BIND(&throw_error);
12836 : ThrowTypeError(context, MessageTemplate::kSpeciesNotConstructor);
12837 :
12838 : BIND(&out);
12839 504 : return var_result.value();
12840 : }
12841 :
12842 224 : Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
12843 : Node* context) {
12844 448 : VARIABLE(var_result, MachineRepresentation::kTagged);
12845 224 : Label if_notcallable(this, Label::kDeferred),
12846 224 : if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
12847 224 : if_nohandler(this, Label::kDeferred), return_true(this),
12848 224 : return_false(this), return_result(this, &var_result);
12849 :
12850 : // Ensure that the {callable} is actually a JSReceiver.
12851 448 : GotoIf(TaggedIsSmi(callable), &if_notreceiver);
12852 448 : GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
12853 :
12854 : // Load the @@hasInstance property from {callable}.
12855 : Node* inst_of_handler =
12856 : GetProperty(context, callable, HasInstanceSymbolConstant());
12857 :
12858 : // Optimize for the likely case where {inst_of_handler} is the builtin
12859 : // Function.prototype[@@hasInstance] method, and emit a direct call in
12860 : // that case without any additional checking.
12861 : Node* native_context = LoadNativeContext(context);
12862 : Node* function_has_instance =
12863 448 : LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
12864 448 : GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
12865 224 : &if_otherhandler);
12866 : {
12867 : // Call to Function.prototype[@@hasInstance] directly.
12868 : Callable builtin(BUILTIN_CODE(isolate(), FunctionPrototypeHasInstance),
12869 448 : CallTrampolineDescriptor{});
12870 224 : Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
12871 224 : var_result.Bind(result);
12872 224 : Goto(&return_result);
12873 : }
12874 :
12875 : BIND(&if_otherhandler);
12876 : {
12877 : // Check if there's actually an {inst_of_handler}.
12878 448 : GotoIf(IsNull(inst_of_handler), &if_nohandler);
12879 448 : GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
12880 :
12881 : // Call the {inst_of_handler} for {callable} and {object}.
12882 : Node* result = CallJS(
12883 448 : CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
12884 224 : context, inst_of_handler, callable, object);
12885 :
12886 : // Convert the {result} to a Boolean.
12887 224 : BranchIfToBooleanIsTrue(result, &return_true, &return_false);
12888 : }
12889 :
12890 : BIND(&if_nohandler);
12891 : {
12892 : // Ensure that the {callable} is actually Callable.
12893 448 : GotoIfNot(IsCallable(callable), &if_notcallable);
12894 :
12895 : // Use the OrdinaryHasInstance algorithm.
12896 : Node* result =
12897 448 : CallBuiltin(Builtins::kOrdinaryHasInstance, context, callable, object);
12898 224 : var_result.Bind(result);
12899 224 : Goto(&return_result);
12900 : }
12901 :
12902 : BIND(&if_notcallable);
12903 : { ThrowTypeError(context, MessageTemplate::kNonCallableInInstanceOfCheck); }
12904 :
12905 : BIND(&if_notreceiver);
12906 : { ThrowTypeError(context, MessageTemplate::kNonObjectInInstanceOfCheck); }
12907 :
12908 : BIND(&return_true);
12909 224 : var_result.Bind(TrueConstant());
12910 224 : Goto(&return_result);
12911 :
12912 : BIND(&return_false);
12913 224 : var_result.Bind(FalseConstant());
12914 224 : Goto(&return_result);
12915 :
12916 : BIND(&return_result);
12917 448 : return var_result.value();
12918 : }
12919 :
12920 1064 : TNode<Number> CodeStubAssembler::NumberInc(SloppyTNode<Number> value) {
12921 1064 : TVARIABLE(Number, var_result);
12922 : TVARIABLE(Float64T, var_finc_value);
12923 1064 : Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
12924 2128 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12925 :
12926 : BIND(&if_issmi);
12927 : {
12928 1064 : Label if_overflow(this);
12929 : TNode<Smi> smi_value = CAST(value);
12930 1064 : TNode<Smi> one = SmiConstant(1);
12931 2128 : var_result = TrySmiAdd(smi_value, one, &if_overflow);
12932 1064 : Goto(&end);
12933 :
12934 : BIND(&if_overflow);
12935 : {
12936 2128 : var_finc_value = SmiToFloat64(smi_value);
12937 1064 : Goto(&do_finc);
12938 : }
12939 : }
12940 :
12941 : BIND(&if_isnotsmi);
12942 : {
12943 : TNode<HeapNumber> heap_number_value = CAST(value);
12944 :
12945 : // Load the HeapNumber value.
12946 : var_finc_value = LoadHeapNumberValue(heap_number_value);
12947 1064 : Goto(&do_finc);
12948 : }
12949 :
12950 : BIND(&do_finc);
12951 : {
12952 : TNode<Float64T> finc_value = var_finc_value.value();
12953 1064 : TNode<Float64T> one = Float64Constant(1.0);
12954 1064 : TNode<Float64T> finc_result = Float64Add(finc_value, one);
12955 2128 : var_result = AllocateHeapNumberWithValue(finc_result);
12956 1064 : Goto(&end);
12957 : }
12958 :
12959 : BIND(&end);
12960 1064 : return var_result.value();
12961 : }
12962 :
12963 224 : TNode<Number> CodeStubAssembler::NumberDec(SloppyTNode<Number> value) {
12964 224 : TVARIABLE(Number, var_result);
12965 : TVARIABLE(Float64T, var_fdec_value);
12966 224 : Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
12967 448 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12968 :
12969 : BIND(&if_issmi);
12970 : {
12971 : TNode<Smi> smi_value = CAST(value);
12972 224 : TNode<Smi> one = SmiConstant(1);
12973 224 : Label if_overflow(this);
12974 448 : var_result = TrySmiSub(smi_value, one, &if_overflow);
12975 224 : Goto(&end);
12976 :
12977 : BIND(&if_overflow);
12978 : {
12979 448 : var_fdec_value = SmiToFloat64(smi_value);
12980 224 : Goto(&do_fdec);
12981 : }
12982 : }
12983 :
12984 : BIND(&if_isnotsmi);
12985 : {
12986 : TNode<HeapNumber> heap_number_value = CAST(value);
12987 :
12988 : // Load the HeapNumber value.
12989 : var_fdec_value = LoadHeapNumberValue(heap_number_value);
12990 224 : Goto(&do_fdec);
12991 : }
12992 :
12993 : BIND(&do_fdec);
12994 : {
12995 : TNode<Float64T> fdec_value = var_fdec_value.value();
12996 224 : TNode<Float64T> minus_one = Float64Constant(-1.0);
12997 224 : TNode<Float64T> fdec_result = Float64Add(fdec_value, minus_one);
12998 448 : var_result = AllocateHeapNumberWithValue(fdec_result);
12999 224 : Goto(&end);
13000 : }
13001 :
13002 : BIND(&end);
13003 224 : return var_result.value();
13004 : }
13005 :
13006 2524 : TNode<Number> CodeStubAssembler::NumberAdd(SloppyTNode<Number> a,
13007 : SloppyTNode<Number> b) {
13008 2524 : TVARIABLE(Number, var_result);
13009 2524 : Label float_add(this, Label::kDeferred), end(this);
13010 5048 : GotoIf(TaggedIsNotSmi(a), &float_add);
13011 5048 : GotoIf(TaggedIsNotSmi(b), &float_add);
13012 :
13013 : // Try fast Smi addition first.
13014 5048 : var_result = TrySmiAdd(CAST(a), CAST(b), &float_add);
13015 2524 : Goto(&end);
13016 :
13017 : BIND(&float_add);
13018 : {
13019 5048 : var_result = ChangeFloat64ToTagged(
13020 10096 : Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
13021 2524 : Goto(&end);
13022 : }
13023 :
13024 : BIND(&end);
13025 2524 : return var_result.value();
13026 : }
13027 :
13028 2076 : TNode<Number> CodeStubAssembler::NumberSub(SloppyTNode<Number> a,
13029 : SloppyTNode<Number> b) {
13030 2076 : TVARIABLE(Number, var_result);
13031 2076 : Label float_sub(this, Label::kDeferred), end(this);
13032 4152 : GotoIf(TaggedIsNotSmi(a), &float_sub);
13033 4152 : GotoIf(TaggedIsNotSmi(b), &float_sub);
13034 :
13035 : // Try fast Smi subtraction first.
13036 4152 : var_result = TrySmiSub(CAST(a), CAST(b), &float_sub);
13037 2076 : Goto(&end);
13038 :
13039 : BIND(&float_sub);
13040 : {
13041 4152 : var_result = ChangeFloat64ToTagged(
13042 8304 : Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
13043 2076 : Goto(&end);
13044 : }
13045 :
13046 : BIND(&end);
13047 2076 : return var_result.value();
13048 : }
13049 :
13050 236 : void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
13051 472 : Label is_number(this);
13052 472 : GotoIf(TaggedIsSmi(input), &is_number);
13053 472 : Branch(IsHeapNumber(input), &is_number, is_not_number);
13054 : BIND(&is_number);
13055 236 : }
13056 :
13057 112 : void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
13058 224 : GotoIf(TaggedIsSmi(input), is_number);
13059 224 : GotoIf(IsHeapNumber(input), is_number);
13060 112 : }
13061 :
13062 2352 : TNode<Number> CodeStubAssembler::BitwiseOp(Node* left32, Node* right32,
13063 : Operation bitwise_op) {
13064 2352 : switch (bitwise_op) {
13065 : case Operation::kBitwiseAnd:
13066 784 : return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
13067 : case Operation::kBitwiseOr:
13068 784 : return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
13069 : case Operation::kBitwiseXor:
13070 784 : return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
13071 : case Operation::kShiftLeft:
13072 392 : if (!Word32ShiftIsSafe()) {
13073 0 : right32 = Word32And(right32, Int32Constant(0x1F));
13074 : }
13075 784 : return ChangeInt32ToTagged(Signed(Word32Shl(left32, right32)));
13076 : case Operation::kShiftRight:
13077 392 : if (!Word32ShiftIsSafe()) {
13078 0 : right32 = Word32And(right32, Int32Constant(0x1F));
13079 : }
13080 784 : return ChangeInt32ToTagged(Signed(Word32Sar(left32, right32)));
13081 : case Operation::kShiftRightLogical:
13082 392 : if (!Word32ShiftIsSafe()) {
13083 0 : right32 = Word32And(right32, Int32Constant(0x1F));
13084 : }
13085 784 : return ChangeUint32ToTagged(Unsigned(Word32Shr(left32, right32)));
13086 : default:
13087 : break;
13088 : }
13089 0 : UNREACHABLE();
13090 : }
13091 :
13092 : // ES #sec-createarrayiterator
13093 336 : TNode<JSArrayIterator> CodeStubAssembler::CreateArrayIterator(
13094 : TNode<Context> context, TNode<Object> object, IterationKind kind) {
13095 : TNode<Context> native_context = LoadNativeContext(context);
13096 336 : TNode<Map> iterator_map = CAST(LoadContextElement(
13097 : native_context, Context::INITIAL_ARRAY_ITERATOR_MAP_INDEX));
13098 : Node* iterator = Allocate(JSArrayIterator::kSize);
13099 : StoreMapNoWriteBarrier(iterator, iterator_map);
13100 : StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOrHashOffset,
13101 336 : RootIndex::kEmptyFixedArray);
13102 : StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
13103 336 : RootIndex::kEmptyFixedArray);
13104 : StoreObjectFieldNoWriteBarrier(
13105 : iterator, JSArrayIterator::kIteratedObjectOffset, object);
13106 : StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
13107 672 : SmiConstant(0));
13108 : StoreObjectFieldNoWriteBarrier(
13109 : iterator, JSArrayIterator::kKindOffset,
13110 672 : SmiConstant(Smi::FromInt(static_cast<int>(kind))));
13111 336 : return CAST(iterator);
13112 : }
13113 :
13114 336 : Node* CodeStubAssembler::AllocateJSIteratorResult(Node* context, Node* value,
13115 : Node* done) {
13116 : CSA_ASSERT(this, IsBoolean(done));
13117 : Node* native_context = LoadNativeContext(context);
13118 : Node* map =
13119 672 : LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
13120 : Node* result = Allocate(JSIteratorResult::kSize);
13121 : StoreMapNoWriteBarrier(result, map);
13122 : StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
13123 336 : RootIndex::kEmptyFixedArray);
13124 : StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
13125 336 : RootIndex::kEmptyFixedArray);
13126 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
13127 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
13128 336 : return result;
13129 : }
13130 :
13131 168 : Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
13132 : Node* key,
13133 : Node* value) {
13134 : Node* native_context = LoadNativeContext(context);
13135 336 : Node* length = SmiConstant(2);
13136 : int const elements_size = FixedArray::SizeFor(2);
13137 : TNode<FixedArray> elements = UncheckedCast<FixedArray>(
13138 : Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize));
13139 : StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
13140 168 : RootIndex::kFixedArrayMap);
13141 : StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
13142 168 : StoreFixedArrayElement(elements, 0, key);
13143 168 : StoreFixedArrayElement(elements, 1, value);
13144 336 : Node* array_map = LoadContextElement(
13145 168 : native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX);
13146 : TNode<HeapObject> array = InnerAllocate(elements, elements_size);
13147 : StoreMapNoWriteBarrier(array, array_map);
13148 : StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
13149 168 : RootIndex::kEmptyFixedArray);
13150 : StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
13151 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
13152 : Node* iterator_map =
13153 336 : LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
13154 : TNode<HeapObject> result = InnerAllocate(array, JSArray::kSize);
13155 : StoreMapNoWriteBarrier(result, iterator_map);
13156 : StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
13157 168 : RootIndex::kEmptyFixedArray);
13158 : StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
13159 168 : RootIndex::kEmptyFixedArray);
13160 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
13161 : StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
13162 168 : RootIndex::kFalseValue);
13163 168 : return result;
13164 : }
13165 :
13166 224 : TNode<JSReceiver> CodeStubAssembler::ArraySpeciesCreate(TNode<Context> context,
13167 : TNode<Object> o,
13168 : TNode<Number> len) {
13169 : TNode<JSReceiver> constructor =
13170 224 : CAST(CallRuntime(Runtime::kArraySpeciesConstructor, context, o));
13171 224 : return Construct(context, constructor, len);
13172 : }
13173 :
13174 9636 : Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
13175 : CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
13176 : TNode<Uint32T> buffer_bit_field = LoadJSArrayBufferBitField(CAST(buffer));
13177 9636 : return IsSetWord32<JSArrayBuffer::WasDetachedBit>(buffer_bit_field);
13178 : }
13179 :
13180 952 : void CodeStubAssembler::ThrowIfArrayBufferIsDetached(
13181 : SloppyTNode<Context> context, TNode<JSArrayBuffer> array_buffer,
13182 : const char* method_name) {
13183 1904 : Label if_detached(this, Label::kDeferred), if_not_detached(this);
13184 1904 : Branch(IsDetachedBuffer(array_buffer), &if_detached, &if_not_detached);
13185 : BIND(&if_detached);
13186 952 : ThrowTypeError(context, MessageTemplate::kDetachedOperation, method_name);
13187 : BIND(&if_not_detached);
13188 952 : }
13189 :
13190 896 : void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
13191 : SloppyTNode<Context> context, TNode<JSArrayBufferView> array_buffer_view,
13192 : const char* method_name) {
13193 896 : TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array_buffer_view);
13194 896 : ThrowIfArrayBufferIsDetached(context, buffer, method_name);
13195 896 : }
13196 :
13197 504 : TNode<Uint32T> CodeStubAssembler::LoadJSArrayBufferBitField(
13198 : TNode<JSArrayBuffer> array_buffer) {
13199 504 : return LoadObjectField<Uint32T>(array_buffer, JSArrayBuffer::kBitFieldOffset);
13200 : }
13201 :
13202 504 : TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStore(
13203 : TNode<JSArrayBuffer> array_buffer) {
13204 : return LoadObjectField<RawPtrT>(array_buffer,
13205 504 : JSArrayBuffer::kBackingStoreOffset);
13206 : }
13207 :
13208 784 : TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer(
13209 : TNode<JSArrayBufferView> array_buffer_view) {
13210 : return LoadObjectField<JSArrayBuffer>(array_buffer_view,
13211 784 : JSArrayBufferView::kBufferOffset);
13212 : }
13213 :
13214 56 : TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteLength(
13215 : TNode<JSArrayBufferView> array_buffer_view) {
13216 : return LoadObjectField<UintPtrT>(array_buffer_view,
13217 56 : JSArrayBufferView::kByteLengthOffset);
13218 : }
13219 :
13220 560 : TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteOffset(
13221 : TNode<JSArrayBufferView> array_buffer_view) {
13222 : return LoadObjectField<UintPtrT>(array_buffer_view,
13223 560 : JSArrayBufferView::kByteOffsetOffset);
13224 : }
13225 :
13226 1232 : TNode<Smi> CodeStubAssembler::LoadJSTypedArrayLength(
13227 : TNode<JSTypedArray> typed_array) {
13228 1232 : return LoadObjectField<Smi>(typed_array, JSTypedArray::kLengthOffset);
13229 : }
13230 :
13231 7780 : CodeStubArguments::CodeStubArguments(
13232 : CodeStubAssembler* assembler, Node* argc, Node* fp,
13233 : CodeStubAssembler::ParameterMode param_mode, ReceiverMode receiver_mode)
13234 : : assembler_(assembler),
13235 : argc_mode_(param_mode),
13236 : receiver_mode_(receiver_mode),
13237 : argc_(argc),
13238 : base_(),
13239 15560 : fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
13240 15560 : Node* offset = assembler_->ElementOffsetFromIndex(
13241 : argc_, SYSTEM_POINTER_ELEMENTS, param_mode,
13242 : (StandardFrameConstants::kFixedSlotCountAboveFp - 1) *
13243 7780 : kSystemPointerSize);
13244 23340 : base_ =
13245 7780 : assembler_->UncheckedCast<RawPtrT>(assembler_->IntPtrAdd(fp_, offset));
13246 7780 : }
13247 :
13248 6508 : TNode<Object> CodeStubArguments::GetReceiver() const {
13249 : DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
13250 6508 : return assembler_->UncheckedCast<Object>(assembler_->LoadFullTagged(
13251 13016 : base_, assembler_->IntPtrConstant(kSystemPointerSize)));
13252 : }
13253 :
13254 224 : void CodeStubArguments::SetReceiver(TNode<Object> object) const {
13255 : DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
13256 224 : assembler_->StoreFullTaggedNoWriteBarrier(
13257 448 : base_, assembler_->IntPtrConstant(kSystemPointerSize), object);
13258 224 : }
13259 :
13260 12432 : TNode<WordT> CodeStubArguments::AtIndexPtr(
13261 : Node* index, CodeStubAssembler::ParameterMode mode) const {
13262 : typedef compiler::Node Node;
13263 12432 : Node* negated_index = assembler_->IntPtrOrSmiSub(
13264 24864 : assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
13265 24864 : Node* offset = assembler_->ElementOffsetFromIndex(
13266 : negated_index, SYSTEM_POINTER_ELEMENTS, mode, 0);
13267 12432 : return assembler_->IntPtrAdd(assembler_->UncheckedCast<IntPtrT>(base_),
13268 12432 : offset);
13269 : }
13270 :
13271 12376 : TNode<Object> CodeStubArguments::AtIndex(
13272 : Node* index, CodeStubAssembler::ParameterMode mode) const {
13273 : DCHECK_EQ(argc_mode_, mode);
13274 : CSA_ASSERT(assembler_,
13275 : assembler_->UintPtrOrSmiLessThan(index, GetLength(mode), mode));
13276 : return assembler_->UncheckedCast<Object>(
13277 24752 : assembler_->LoadFullTagged(AtIndexPtr(index, mode)));
13278 : }
13279 :
13280 3808 : TNode<Object> CodeStubArguments::AtIndex(int index) const {
13281 7616 : return AtIndex(assembler_->IntPtrConstant(index));
13282 : }
13283 :
13284 2856 : TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
13285 : int index, TNode<Object> default_value) {
13286 2856 : CodeStubAssembler::TVariable<Object> result(assembler_);
13287 5712 : CodeStubAssembler::Label argument_missing(assembler_),
13288 5712 : argument_done(assembler_, &result);
13289 :
13290 11424 : assembler_->GotoIf(assembler_->UintPtrOrSmiGreaterThanOrEqual(
13291 2856 : assembler_->IntPtrOrSmiConstant(index, argc_mode_),
13292 : argc_, argc_mode_),
13293 2856 : &argument_missing);
13294 2856 : result = AtIndex(index);
13295 2856 : assembler_->Goto(&argument_done);
13296 :
13297 2856 : assembler_->BIND(&argument_missing);
13298 : result = default_value;
13299 2856 : assembler_->Goto(&argument_done);
13300 :
13301 2856 : assembler_->BIND(&argument_done);
13302 2856 : return result.value();
13303 : }
13304 :
13305 7784 : TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
13306 : TNode<IntPtrT> index, TNode<Object> default_value) {
13307 7784 : CodeStubAssembler::TVariable<Object> result(assembler_);
13308 15568 : CodeStubAssembler::Label argument_missing(assembler_),
13309 15568 : argument_done(assembler_, &result);
13310 :
13311 31136 : assembler_->GotoIf(
13312 7784 : assembler_->UintPtrOrSmiGreaterThanOrEqual(
13313 7784 : assembler_->IntPtrToParameter(index, argc_mode_), argc_, argc_mode_),
13314 7784 : &argument_missing);
13315 7784 : result = AtIndex(index);
13316 7784 : assembler_->Goto(&argument_done);
13317 :
13318 7784 : assembler_->BIND(&argument_missing);
13319 : result = default_value;
13320 7784 : assembler_->Goto(&argument_done);
13321 :
13322 7784 : assembler_->BIND(&argument_done);
13323 7784 : return result.value();
13324 : }
13325 :
13326 1048 : void CodeStubArguments::ForEach(
13327 : const CodeStubAssembler::VariableList& vars,
13328 : const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
13329 : CodeStubAssembler::ParameterMode mode) {
13330 1048 : assembler_->Comment("CodeStubArguments::ForEach");
13331 1048 : if (first == nullptr) {
13332 452 : first = assembler_->IntPtrOrSmiConstant(0, mode);
13333 : }
13334 1048 : if (last == nullptr) {
13335 : DCHECK_EQ(mode, argc_mode_);
13336 1048 : last = argc_;
13337 : }
13338 : Node* start = assembler_->IntPtrSub(
13339 1048 : assembler_->UncheckedCast<IntPtrT>(base_),
13340 2096 : assembler_->ElementOffsetFromIndex(first, SYSTEM_POINTER_ELEMENTS, mode));
13341 : Node* end = assembler_->IntPtrSub(
13342 1048 : assembler_->UncheckedCast<IntPtrT>(base_),
13343 2096 : assembler_->ElementOffsetFromIndex(last, SYSTEM_POINTER_ELEMENTS, mode));
13344 2096 : assembler_->BuildFastLoop(
13345 : vars, start, end,
13346 2096 : [this, &body](Node* current) {
13347 1048 : Node* arg = assembler_->Load(MachineType::AnyTagged(), current);
13348 : body(arg);
13349 1048 : },
13350 : -kSystemPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
13351 1048 : CodeStubAssembler::IndexAdvanceMode::kPost);
13352 1048 : }
13353 :
13354 12728 : void CodeStubArguments::PopAndReturn(Node* value) {
13355 : Node* pop_count;
13356 12728 : if (receiver_mode_ == ReceiverMode::kHasReceiver) {
13357 25456 : pop_count = assembler_->IntPtrOrSmiAdd(
13358 25456 : argc_, assembler_->IntPtrOrSmiConstant(1, argc_mode_), argc_mode_);
13359 : } else {
13360 0 : pop_count = argc_;
13361 : }
13362 :
13363 25456 : assembler_->PopAndReturn(assembler_->ParameterToIntPtr(pop_count, argc_mode_),
13364 12728 : value);
13365 12728 : }
13366 :
13367 3472 : Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
13368 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13369 6944 : return Uint32LessThanOrEqual(elements_kind,
13370 6944 : Int32Constant(LAST_FAST_ELEMENTS_KIND));
13371 : }
13372 :
13373 284 : TNode<BoolT> CodeStubAssembler::IsDoubleElementsKind(
13374 : TNode<Int32T> elements_kind) {
13375 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13376 : STATIC_ASSERT((PACKED_DOUBLE_ELEMENTS & 1) == 0);
13377 : STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS);
13378 852 : return Word32Equal(Word32Shr(elements_kind, Int32Constant(1)),
13379 852 : Int32Constant(PACKED_DOUBLE_ELEMENTS / 2));
13380 : }
13381 :
13382 336 : Node* CodeStubAssembler::IsFastSmiOrTaggedElementsKind(Node* elements_kind) {
13383 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13384 : STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
13385 : STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
13386 672 : return Uint32LessThanOrEqual(elements_kind,
13387 672 : Int32Constant(TERMINAL_FAST_ELEMENTS_KIND));
13388 : }
13389 :
13390 112 : Node* CodeStubAssembler::IsFastSmiElementsKind(Node* elements_kind) {
13391 224 : return Uint32LessThanOrEqual(elements_kind,
13392 224 : Int32Constant(HOLEY_SMI_ELEMENTS));
13393 : }
13394 :
13395 0 : Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
13396 : CSA_ASSERT(this, IsFastElementsKind(elements_kind));
13397 :
13398 : STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
13399 : STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
13400 : STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
13401 112 : return IsSetWord32(elements_kind, 1);
13402 : }
13403 :
13404 728 : Node* CodeStubAssembler::IsElementsKindGreaterThan(
13405 : Node* target_kind, ElementsKind reference_kind) {
13406 2184 : return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
13407 : }
13408 :
13409 1456 : TNode<BoolT> CodeStubAssembler::IsElementsKindLessThanOrEqual(
13410 : TNode<Int32T> target_kind, ElementsKind reference_kind) {
13411 2912 : return Int32LessThanOrEqual(target_kind, Int32Constant(reference_kind));
13412 : }
13413 :
13414 0 : TNode<BoolT> CodeStubAssembler::IsElementsKindInRange(
13415 : TNode<Int32T> target_kind, ElementsKind lower_reference_kind,
13416 : ElementsKind higher_reference_kind) {
13417 : return Int32LessThanOrEqual(
13418 0 : Int32Sub(target_kind, Int32Constant(lower_reference_kind)),
13419 0 : Int32Sub(Int32Constant(higher_reference_kind),
13420 0 : Int32Constant(lower_reference_kind)));
13421 : }
13422 :
13423 508 : Node* CodeStubAssembler::IsDebugActive() {
13424 : Node* is_debug_active = Load(
13425 : MachineType::Uint8(),
13426 1016 : ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
13427 1524 : return Word32NotEqual(is_debug_active, Int32Constant(0));
13428 : }
13429 :
13430 2576 : TNode<BoolT> CodeStubAssembler::IsRuntimeCallStatsEnabled() {
13431 : STATIC_ASSERT(sizeof(TracingFlags::runtime_stats) == kInt32Size);
13432 : TNode<Word32T> flag_value = UncheckedCast<Word32T>(Load(
13433 : MachineType::Int32(),
13434 5152 : ExternalConstant(ExternalReference::address_of_runtime_stats_flag())));
13435 5152 : return Word32NotEqual(flag_value, Int32Constant(0));
13436 : }
13437 :
13438 56 : Node* CodeStubAssembler::IsPromiseHookEnabled() {
13439 : Node* const promise_hook = Load(
13440 : MachineType::Pointer(),
13441 112 : ExternalConstant(ExternalReference::promise_hook_address(isolate())));
13442 168 : return WordNotEqual(promise_hook, IntPtrConstant(0));
13443 : }
13444 :
13445 224 : Node* CodeStubAssembler::HasAsyncEventDelegate() {
13446 : Node* const async_event_delegate =
13447 : Load(MachineType::Pointer(),
13448 448 : ExternalConstant(
13449 448 : ExternalReference::async_event_delegate_address(isolate())));
13450 672 : return WordNotEqual(async_event_delegate, IntPtrConstant(0));
13451 : }
13452 :
13453 756 : Node* CodeStubAssembler::IsPromiseHookEnabledOrHasAsyncEventDelegate() {
13454 : Node* const promise_hook_or_async_event_delegate =
13455 : Load(MachineType::Uint8(),
13456 1512 : ExternalConstant(
13457 : ExternalReference::promise_hook_or_async_event_delegate_address(
13458 1512 : isolate())));
13459 2268 : return Word32NotEqual(promise_hook_or_async_event_delegate, Int32Constant(0));
13460 : }
13461 :
13462 1232 : Node* CodeStubAssembler::
13463 : IsPromiseHookEnabledOrDebugIsActiveOrHasAsyncEventDelegate() {
13464 : Node* const promise_hook_or_debug_is_active_or_async_event_delegate = Load(
13465 : MachineType::Uint8(),
13466 2464 : ExternalConstant(
13467 : ExternalReference::
13468 : promise_hook_or_debug_is_active_or_async_event_delegate_address(
13469 2464 : isolate())));
13470 2464 : return Word32NotEqual(promise_hook_or_debug_is_active_or_async_event_delegate,
13471 2464 : Int32Constant(0));
13472 : }
13473 :
13474 2476 : TNode<Code> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
13475 : CSA_ASSERT(this, SmiGreaterThanOrEqual(builtin_id, SmiConstant(0)));
13476 : CSA_ASSERT(this,
13477 : SmiLessThan(builtin_id, SmiConstant(Builtins::builtin_count)));
13478 :
13479 : int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
13480 : int index_shift = kSystemPointerSizeLog2 - kSmiShiftBits;
13481 : TNode<WordT> table_index =
13482 : index_shift >= 0 ? WordShl(BitcastTaggedToWord(builtin_id), index_shift)
13483 4952 : : WordSar(BitcastTaggedToWord(builtin_id), -index_shift);
13484 :
13485 4952 : return CAST(
13486 : Load(MachineType::TaggedPointer(),
13487 : ExternalConstant(ExternalReference::builtins_address(isolate())),
13488 : table_index));
13489 : }
13490 :
13491 1132 : TNode<Code> CodeStubAssembler::GetSharedFunctionInfoCode(
13492 : SloppyTNode<SharedFunctionInfo> shared_info, Label* if_compile_lazy) {
13493 : TNode<Object> sfi_data =
13494 : LoadObjectField(shared_info, SharedFunctionInfo::kFunctionDataOffset);
13495 :
13496 1132 : TVARIABLE(Code, sfi_code);
13497 :
13498 1132 : Label done(this);
13499 1132 : Label check_instance_type(this);
13500 :
13501 : // IsSmi: Is builtin
13502 2264 : GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
13503 1132 : if (if_compile_lazy) {
13504 112 : GotoIf(SmiEqual(CAST(sfi_data), SmiConstant(Builtins::kCompileLazy)),
13505 56 : if_compile_lazy);
13506 : }
13507 1132 : sfi_code = LoadBuiltin(CAST(sfi_data));
13508 1132 : Goto(&done);
13509 :
13510 : // Switch on data's instance type.
13511 : BIND(&check_instance_type);
13512 1132 : TNode<Int32T> data_type = LoadInstanceType(CAST(sfi_data));
13513 :
13514 : int32_t case_values[] = {BYTECODE_ARRAY_TYPE,
13515 : WASM_EXPORTED_FUNCTION_DATA_TYPE,
13516 : ASM_WASM_DATA_TYPE,
13517 : UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE,
13518 : UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE,
13519 1132 : FUNCTION_TEMPLATE_INFO_TYPE};
13520 1132 : Label check_is_bytecode_array(this);
13521 1132 : Label check_is_exported_function_data(this);
13522 1132 : Label check_is_asm_wasm_data(this);
13523 1132 : Label check_is_uncompiled_data_without_preparse_data(this);
13524 1132 : Label check_is_uncompiled_data_with_preparse_data(this);
13525 1132 : Label check_is_function_template_info(this);
13526 1132 : Label check_is_interpreter_data(this);
13527 : Label* case_labels[] = {&check_is_bytecode_array,
13528 : &check_is_exported_function_data,
13529 : &check_is_asm_wasm_data,
13530 : &check_is_uncompiled_data_without_preparse_data,
13531 : &check_is_uncompiled_data_with_preparse_data,
13532 1132 : &check_is_function_template_info};
13533 : STATIC_ASSERT(arraysize(case_values) == arraysize(case_labels));
13534 : Switch(data_type, &check_is_interpreter_data, case_values, case_labels,
13535 1132 : arraysize(case_labels));
13536 :
13537 : // IsBytecodeArray: Interpret bytecode
13538 : BIND(&check_is_bytecode_array);
13539 2264 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
13540 1132 : Goto(&done);
13541 :
13542 : // IsWasmExportedFunctionData: Use the wrapper code
13543 : BIND(&check_is_exported_function_data);
13544 : sfi_code = CAST(LoadObjectField(
13545 : CAST(sfi_data), WasmExportedFunctionData::kWrapperCodeOffset));
13546 1132 : Goto(&done);
13547 :
13548 : // IsAsmWasmData: Instantiate using AsmWasmData
13549 : BIND(&check_is_asm_wasm_data);
13550 2264 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InstantiateAsmJs));
13551 1132 : Goto(&done);
13552 :
13553 : // IsUncompiledDataWithPreparseData | IsUncompiledDataWithoutPreparseData:
13554 : // Compile lazy
13555 : BIND(&check_is_uncompiled_data_with_preparse_data);
13556 1132 : Goto(&check_is_uncompiled_data_without_preparse_data);
13557 : BIND(&check_is_uncompiled_data_without_preparse_data);
13558 2264 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
13559 1132 : Goto(if_compile_lazy ? if_compile_lazy : &done);
13560 :
13561 : // IsFunctionTemplateInfo: API call
13562 : BIND(&check_is_function_template_info);
13563 2264 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), HandleApiCall));
13564 1132 : Goto(&done);
13565 :
13566 : // IsInterpreterData: Interpret bytecode
13567 : BIND(&check_is_interpreter_data);
13568 : // This is the default branch, so assert that we have the expected data type.
13569 : CSA_ASSERT(this,
13570 : Word32Equal(data_type, Int32Constant(INTERPRETER_DATA_TYPE)));
13571 : sfi_code = CAST(LoadObjectField(
13572 : CAST(sfi_data), InterpreterData::kInterpreterTrampolineOffset));
13573 1132 : Goto(&done);
13574 :
13575 : BIND(&done);
13576 1132 : return sfi_code.value();
13577 : }
13578 :
13579 1020 : Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
13580 : Node* shared_info,
13581 : Node* context) {
13582 : CSA_SLOW_ASSERT(this, IsMap(map));
13583 :
13584 2040 : Node* const code = GetSharedFunctionInfoCode(shared_info);
13585 :
13586 : // TODO(ishell): All the callers of this function pass map loaded from
13587 : // Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove
13588 : // map parameter.
13589 : CSA_ASSERT(this, Word32BinaryNot(IsConstructorMap(map)));
13590 : CSA_ASSERT(this, Word32BinaryNot(IsFunctionWithPrototypeSlotMap(map)));
13591 : Node* const fun = Allocate(JSFunction::kSizeWithoutPrototype);
13592 : STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
13593 : StoreMapNoWriteBarrier(fun, map);
13594 : StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
13595 1020 : RootIndex::kEmptyFixedArray);
13596 : StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
13597 1020 : RootIndex::kEmptyFixedArray);
13598 : StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
13599 1020 : RootIndex::kManyClosuresCell);
13600 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
13601 : shared_info);
13602 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
13603 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeOffset, code);
13604 1020 : return fun;
13605 : }
13606 :
13607 0 : Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
13608 : StackFrame::Type frame_type) {
13609 0 : return WordEqual(marker_or_function,
13610 0 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
13611 : }
13612 :
13613 0 : Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
13614 : StackFrame::Type frame_type) {
13615 0 : return WordNotEqual(marker_or_function,
13616 0 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
13617 : }
13618 :
13619 448 : void CodeStubAssembler::CheckPrototypeEnumCache(Node* receiver,
13620 : Node* receiver_map,
13621 : Label* if_fast,
13622 : Label* if_slow) {
13623 896 : VARIABLE(var_object, MachineRepresentation::kTagged, receiver);
13624 896 : VARIABLE(var_object_map, MachineRepresentation::kTagged, receiver_map);
13625 :
13626 1344 : Label loop(this, {&var_object, &var_object_map}), done_loop(this);
13627 448 : Goto(&loop);
13628 : BIND(&loop);
13629 : {
13630 : // Check that there are no elements on the current {object}.
13631 448 : Label if_no_elements(this);
13632 448 : Node* object = var_object.value();
13633 448 : Node* object_map = var_object_map.value();
13634 :
13635 : // The following relies on the elements only aliasing with JSProxy::target,
13636 : // which is a Javascript value and hence cannot be confused with an elements
13637 : // backing store.
13638 : STATIC_ASSERT(static_cast<int>(JSObject::kElementsOffset) ==
13639 : static_cast<int>(JSProxy::kTargetOffset));
13640 : Node* object_elements = LoadObjectField(object, JSObject::kElementsOffset);
13641 896 : GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
13642 896 : GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
13643 :
13644 : // It might still be an empty JSArray.
13645 896 : GotoIfNot(IsJSArrayMap(object_map), if_slow);
13646 : Node* object_length = LoadJSArrayLength(object);
13647 896 : Branch(WordEqual(object_length, SmiConstant(0)), &if_no_elements, if_slow);
13648 :
13649 : // Continue with the {object}s prototype.
13650 : BIND(&if_no_elements);
13651 : object = LoadMapPrototype(object_map);
13652 896 : GotoIf(IsNull(object), if_fast);
13653 :
13654 : // For all {object}s but the {receiver}, check that the cache is empty.
13655 448 : var_object.Bind(object);
13656 : object_map = LoadMap(object);
13657 448 : var_object_map.Bind(object_map);
13658 448 : Node* object_enum_length = LoadMapEnumLength(object_map);
13659 1344 : Branch(WordEqual(object_enum_length, IntPtrConstant(0)), &loop, if_slow);
13660 : }
13661 448 : }
13662 :
13663 224 : Node* CodeStubAssembler::CheckEnumCache(Node* receiver, Label* if_empty,
13664 : Label* if_runtime) {
13665 448 : Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
13666 : Node* receiver_map = LoadMap(receiver);
13667 :
13668 : // Check if the enum length field of the {receiver} is properly initialized,
13669 : // indicating that there is an enum cache.
13670 224 : Node* receiver_enum_length = LoadMapEnumLength(receiver_map);
13671 448 : Branch(WordEqual(receiver_enum_length,
13672 448 : IntPtrConstant(kInvalidEnumCacheSentinel)),
13673 224 : &if_no_cache, &if_cache);
13674 :
13675 : BIND(&if_no_cache);
13676 : {
13677 : // Avoid runtime-call for empty dictionary receivers.
13678 448 : GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
13679 224 : TNode<NameDictionary> properties = CAST(LoadSlowProperties(receiver));
13680 : TNode<Smi> length = GetNumberOfElements(properties);
13681 448 : GotoIfNot(WordEqual(length, SmiConstant(0)), if_runtime);
13682 : // Check that there are no elements on the {receiver} and its prototype
13683 : // chain. Given that we do not create an EnumCache for dict-mode objects,
13684 : // directly jump to {if_empty} if there are no elements and no properties
13685 : // on the {receiver}.
13686 224 : CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
13687 : }
13688 :
13689 : // Check that there are no elements on the fast {receiver} and its
13690 : // prototype chain.
13691 : BIND(&if_cache);
13692 224 : CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
13693 :
13694 : BIND(&if_fast);
13695 224 : return receiver_map;
13696 : }
13697 :
13698 7784 : TNode<Object> CodeStubAssembler::GetArgumentValue(
13699 : BaseBuiltinsFromDSLAssembler::Arguments args, TNode<IntPtrT> index) {
13700 7784 : return CodeStubArguments(this, args).GetOptionalArgumentValue(index);
13701 : }
13702 :
13703 4312 : BaseBuiltinsFromDSLAssembler::Arguments CodeStubAssembler::GetFrameArguments(
13704 : TNode<RawPtrT> frame, TNode<IntPtrT> argc) {
13705 : return CodeStubArguments(this, argc, frame, INTPTR_PARAMETERS)
13706 8624 : .GetTorqueArguments();
13707 : }
13708 :
13709 0 : void CodeStubAssembler::Print(const char* s) {
13710 0 : std::string formatted(s);
13711 : formatted += "\n";
13712 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13713 0 : StringConstant(formatted.c_str()));
13714 0 : }
13715 :
13716 0 : void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
13717 0 : if (prefix != nullptr) {
13718 0 : std::string formatted(prefix);
13719 : formatted += ": ";
13720 : Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
13721 0 : formatted.c_str(), AllocationType::kOld);
13722 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13723 : HeapConstant(string));
13724 : }
13725 : CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
13726 0 : }
13727 :
13728 19432 : void CodeStubAssembler::PerformStackCheck(TNode<Context> context) {
13729 38864 : Label ok(this), stack_check_interrupt(this, Label::kDeferred);
13730 :
13731 : // The instruction sequence below is carefully crafted to hit our pattern
13732 : // matcher for stack checks within instruction selection.
13733 : // See StackCheckMatcher::Matched and JSGenericLowering::LowerJSStackCheck.
13734 :
13735 19432 : TNode<UintPtrT> sp = UncheckedCast<UintPtrT>(LoadStackPointer());
13736 : TNode<UintPtrT> stack_limit = UncheckedCast<UintPtrT>(Load(
13737 : MachineType::Pointer(),
13738 38864 : ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))));
13739 19432 : TNode<BoolT> sp_within_limit = UintPtrLessThan(stack_limit, sp);
13740 :
13741 19432 : Branch(sp_within_limit, &ok, &stack_check_interrupt);
13742 :
13743 : BIND(&stack_check_interrupt);
13744 : CallRuntime(Runtime::kStackGuard, context);
13745 19432 : Goto(&ok);
13746 :
13747 : BIND(&ok);
13748 19432 : }
13749 :
13750 744 : void CodeStubAssembler::InitializeFunctionContext(Node* native_context,
13751 : Node* context, int slots) {
13752 : DCHECK_GE(slots, Context::MIN_CONTEXT_SLOTS);
13753 744 : StoreMapNoWriteBarrier(context, RootIndex::kFunctionContextMap);
13754 : StoreObjectFieldNoWriteBarrier(context, FixedArray::kLengthOffset,
13755 1488 : SmiConstant(slots));
13756 :
13757 : Node* const empty_scope_info =
13758 1488 : LoadContextElement(native_context, Context::SCOPE_INFO_INDEX);
13759 744 : StoreContextElementNoWriteBarrier(context, Context::SCOPE_INFO_INDEX,
13760 744 : empty_scope_info);
13761 744 : StoreContextElementNoWriteBarrier(context, Context::PREVIOUS_INDEX,
13762 744 : UndefinedConstant());
13763 744 : StoreContextElementNoWriteBarrier(context, Context::EXTENSION_INDEX,
13764 744 : TheHoleConstant());
13765 744 : StoreContextElementNoWriteBarrier(context, Context::NATIVE_CONTEXT_INDEX,
13766 744 : native_context);
13767 744 : }
13768 :
13769 112 : TNode<JSArray> CodeStubAssembler::ArrayCreate(TNode<Context> context,
13770 : TNode<Number> length) {
13771 112 : TVARIABLE(JSArray, array);
13772 112 : Label allocate_js_array(this);
13773 :
13774 112 : Label done(this), next(this), runtime(this, Label::kDeferred);
13775 112 : TNode<Smi> limit = SmiConstant(JSArray::kInitialMaxFastElementArray);
13776 : CSA_ASSERT_BRANCH(this, [=](Label* ok, Label* not_ok) {
13777 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13778 : SmiConstant(0), ok, not_ok);
13779 : });
13780 : // This check also transitively covers the case where length is too big
13781 : // to be representable by a SMI and so is not usable with
13782 : // AllocateJSArray.
13783 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13784 112 : limit, &runtime, &next);
13785 :
13786 : BIND(&runtime);
13787 : {
13788 : TNode<Context> native_context = LoadNativeContext(context);
13789 : TNode<JSFunction> array_function =
13790 112 : CAST(LoadContextElement(native_context, Context::ARRAY_FUNCTION_INDEX));
13791 : array = CAST(CallRuntime(Runtime::kNewArray, context, array_function,
13792 : length, array_function, UndefinedConstant()));
13793 112 : Goto(&done);
13794 : }
13795 :
13796 : BIND(&next);
13797 : CSA_ASSERT(this, TaggedIsSmi(length));
13798 :
13799 112 : TNode<Map> array_map = CAST(LoadContextElement(
13800 : context, Context::JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX));
13801 :
13802 : // TODO(delphick): Consider using
13803 : // AllocateUninitializedJSArrayWithElements to avoid initializing an
13804 : // array and then writing over it.
13805 224 : array =
13806 : AllocateJSArray(PACKED_SMI_ELEMENTS, array_map, length, SmiConstant(0),
13807 : nullptr, ParameterMode::SMI_PARAMETERS);
13808 112 : Goto(&done);
13809 :
13810 : BIND(&done);
13811 112 : return array.value();
13812 : }
13813 :
13814 112 : void CodeStubAssembler::SetPropertyLength(TNode<Context> context,
13815 : TNode<Object> array,
13816 : TNode<Number> length) {
13817 224 : Label fast(this), runtime(this), done(this);
13818 : // There's no need to set the length, if
13819 : // 1) the array is a fast JS array and
13820 : // 2) the new length is equal to the old length.
13821 : // as the set is not observable. Otherwise fall back to the run-time.
13822 :
13823 : // 1) Check that the array has fast elements.
13824 : // TODO(delphick): Consider changing this since it does an an unnecessary
13825 : // check for SMIs.
13826 : // TODO(delphick): Also we could hoist this to after the array construction
13827 : // and copy the args into array in the same way as the Array constructor.
13828 112 : BranchIfFastJSArray(array, context, &fast, &runtime);
13829 :
13830 : BIND(&fast);
13831 : {
13832 : TNode<JSArray> fast_array = CAST(array);
13833 :
13834 112 : TNode<Smi> length_smi = CAST(length);
13835 112 : TNode<Smi> old_length = LoadFastJSArrayLength(fast_array);
13836 : CSA_ASSERT(this, TaggedIsPositiveSmi(old_length));
13837 :
13838 : // 2) If the created array's length matches the required length, then
13839 : // there's nothing else to do. Otherwise use the runtime to set the
13840 : // property as that will insert holes into excess elements or shrink
13841 : // the backing store as appropriate.
13842 224 : Branch(SmiNotEqual(length_smi, old_length), &runtime, &done);
13843 : }
13844 :
13845 : BIND(&runtime);
13846 : {
13847 : SetPropertyStrict(context, array, CodeStubAssembler::LengthStringConstant(),
13848 : length);
13849 112 : Goto(&done);
13850 : }
13851 :
13852 : BIND(&done);
13853 112 : }
13854 :
13855 224 : void CodeStubAssembler::GotoIfInitialPrototypePropertyModified(
13856 : TNode<Map> object_map, TNode<Map> initial_prototype_map, int descriptor,
13857 : RootIndex field_name_root_index, Label* if_modified) {
13858 : DescriptorIndexAndName index_name{descriptor, field_name_root_index};
13859 224 : GotoIfInitialPrototypePropertiesModified(
13860 : object_map, initial_prototype_map,
13861 224 : Vector<DescriptorIndexAndName>(&index_name, 1), if_modified);
13862 224 : }
13863 :
13864 1176 : void CodeStubAssembler::GotoIfInitialPrototypePropertiesModified(
13865 : TNode<Map> object_map, TNode<Map> initial_prototype_map,
13866 : Vector<DescriptorIndexAndName> properties, Label* if_modified) {
13867 : TNode<Map> prototype_map = LoadMap(LoadMapPrototype(object_map));
13868 2352 : GotoIfNot(WordEqual(prototype_map, initial_prototype_map), if_modified);
13869 :
13870 : if (FLAG_track_constant_fields) {
13871 : // With constant field tracking, we need to make sure that important
13872 : // properties in the prototype has not been tampered with. We do this by
13873 : // checking that their slots in the prototype's descriptor array are still
13874 : // marked as const.
13875 1176 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(prototype_map);
13876 :
13877 : TNode<Uint32T> combined_details;
13878 4312 : for (int i = 0; i < properties.length(); i++) {
13879 : // Assert the descriptor index is in-bounds.
13880 3136 : int descriptor = properties[i].descriptor_index;
13881 : CSA_ASSERT(this, Int32LessThan(Int32Constant(descriptor),
13882 : LoadNumberOfDescriptors(descriptors)));
13883 : // Assert that the name is correct. This essentially checks that
13884 : // the descriptor index corresponds to the insertion order in
13885 : // the bootstrapper.
13886 : CSA_ASSERT(this,
13887 : WordEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),
13888 : LoadRoot(properties[i].name_root_index)));
13889 :
13890 : TNode<Uint32T> details =
13891 1568 : DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
13892 1568 : if (i == 0) {
13893 : combined_details = details;
13894 : } else {
13895 392 : combined_details = Unsigned(Word32And(combined_details, details));
13896 : }
13897 : }
13898 :
13899 : TNode<Uint32T> constness =
13900 : DecodeWord32<PropertyDetails::ConstnessField>(combined_details);
13901 :
13902 1176 : GotoIfNot(
13903 2352 : Word32Equal(constness,
13904 2352 : Int32Constant(static_cast<int>(PropertyConstness::kConst))),
13905 1176 : if_modified);
13906 : }
13907 1176 : }
13908 :
13909 224 : TNode<String> CodeStubAssembler::TaggedToDirectString(TNode<Object> value,
13910 : Label* fail) {
13911 448 : ToDirectStringAssembler to_direct(state(), value);
13912 224 : to_direct.TryToDirect(fail);
13913 : to_direct.PointerToData(fail);
13914 224 : return CAST(value);
13915 : }
13916 :
13917 : } // namespace internal
13918 59456 : } // namespace v8
|