Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/code-stub-assembler.h"
6 :
7 : #include "src/code-factory.h"
8 : #include "src/counters.h"
9 : #include "src/frames-inl.h"
10 : #include "src/frames.h"
11 : #include "src/function-kind.h"
12 : #include "src/heap/heap-inl.h" // For Page/MemoryChunk. TODO(jkummerow): Drop.
13 : #include "src/objects/api-callbacks.h"
14 : #include "src/objects/cell.h"
15 : #include "src/objects/descriptor-array.h"
16 : #include "src/objects/heap-number.h"
17 : #include "src/objects/oddball.h"
18 : #include "src/objects/ordered-hash-table-inl.h"
19 : #include "src/objects/property-cell.h"
20 : #include "src/wasm/wasm-objects.h"
21 :
22 : namespace v8 {
23 : namespace internal {
24 :
25 : using compiler::Node;
26 : template <class T>
27 : using TNode = compiler::TNode<T>;
28 : template <class T>
29 : using SloppyTNode = compiler::SloppyTNode<T>;
30 :
31 534541 : CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
32 : : compiler::CodeAssembler(state), BaseBuiltinsFromDSLAssembler(state) {
33 : if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
34 : HandleBreakOnNode();
35 : }
36 534541 : }
37 :
38 0 : void CodeStubAssembler::HandleBreakOnNode() {
39 : // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
40 : // string specifying the name of a stub and NODE is number specifying node id.
41 : const char* name = state()->name();
42 0 : size_t name_length = strlen(name);
43 0 : if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
44 : // Different name.
45 0 : return;
46 : }
47 0 : size_t option_length = strlen(FLAG_csa_trap_on_node);
48 0 : if (option_length < name_length + 2 ||
49 0 : FLAG_csa_trap_on_node[name_length] != ',') {
50 : // Option is too short.
51 : return;
52 : }
53 0 : const char* start = &FLAG_csa_trap_on_node[name_length + 1];
54 : char* end;
55 0 : int node_id = static_cast<int>(strtol(start, &end, 10));
56 0 : if (start == end) {
57 : // Bad node id.
58 : return;
59 : }
60 0 : BreakOnNode(node_id);
61 : }
62 :
63 0 : void CodeStubAssembler::Assert(const BranchGenerator& branch,
64 : const char* message, const char* file, int line,
65 : Node* extra_node1, const char* extra_node1_name,
66 : Node* extra_node2, const char* extra_node2_name,
67 : Node* extra_node3, const char* extra_node3_name,
68 : Node* extra_node4, const char* extra_node4_name,
69 : Node* extra_node5,
70 : const char* extra_node5_name) {
71 : #if defined(DEBUG)
72 : if (FLAG_debug_code) {
73 : Check(branch, message, file, line, extra_node1, extra_node1_name,
74 : extra_node2, extra_node2_name, extra_node3, extra_node3_name,
75 : extra_node4, extra_node4_name, extra_node5, extra_node5_name);
76 : }
77 : #endif
78 0 : }
79 :
80 0 : void CodeStubAssembler::Assert(const NodeGenerator& condition_body,
81 : const char* message, const char* file, int line,
82 : Node* extra_node1, const char* extra_node1_name,
83 : Node* extra_node2, const char* extra_node2_name,
84 : Node* extra_node3, const char* extra_node3_name,
85 : Node* extra_node4, const char* extra_node4_name,
86 : Node* extra_node5,
87 : const char* extra_node5_name) {
88 : #if defined(DEBUG)
89 : if (FLAG_debug_code) {
90 : Check(condition_body, message, file, line, extra_node1, extra_node1_name,
91 : extra_node2, extra_node2_name, extra_node3, extra_node3_name,
92 : extra_node4, extra_node4_name, extra_node5, extra_node5_name);
93 : }
94 : #endif
95 0 : }
96 :
97 : #ifdef DEBUG
98 : namespace {
99 : void MaybePrintNodeWithName(CodeStubAssembler* csa, Node* node,
100 : const char* node_name) {
101 : if (node != nullptr) {
102 : csa->CallRuntime(Runtime::kPrintWithNameForAssert, csa->SmiConstant(0),
103 : csa->StringConstant(node_name), node);
104 : }
105 : }
106 : } // namespace
107 : #endif
108 :
109 0 : void CodeStubAssembler::Check(const BranchGenerator& branch,
110 : const char* message, const char* file, int line,
111 : Node* extra_node1, const char* extra_node1_name,
112 : Node* extra_node2, const char* extra_node2_name,
113 : Node* extra_node3, const char* extra_node3_name,
114 : Node* extra_node4, const char* extra_node4_name,
115 : Node* extra_node5, const char* extra_node5_name) {
116 0 : Label ok(this);
117 0 : Label not_ok(this, Label::kDeferred);
118 0 : if (message != nullptr && FLAG_code_comments) {
119 0 : Comment("[ Assert: ", message);
120 : } else {
121 0 : Comment("[ Assert");
122 : }
123 : branch(&ok, ¬_ok);
124 :
125 : BIND(¬_ok);
126 : FailAssert(message, file, line, extra_node1, extra_node1_name, extra_node2,
127 : extra_node2_name, extra_node3, extra_node3_name, extra_node4,
128 0 : extra_node4_name, extra_node5, extra_node5_name);
129 :
130 : BIND(&ok);
131 0 : Comment("] Assert");
132 0 : }
133 :
134 0 : void CodeStubAssembler::Check(const NodeGenerator& condition_body,
135 : const char* message, const char* file, int line,
136 : Node* extra_node1, const char* extra_node1_name,
137 : Node* extra_node2, const char* extra_node2_name,
138 : Node* extra_node3, const char* extra_node3_name,
139 : Node* extra_node4, const char* extra_node4_name,
140 : Node* extra_node5, const char* extra_node5_name) {
141 0 : BranchGenerator branch = [=](Label* ok, Label* not_ok) {
142 : Node* condition = condition_body();
143 : DCHECK_NOT_NULL(condition);
144 0 : Branch(condition, ok, not_ok);
145 0 : };
146 :
147 : Check(branch, message, file, line, extra_node1, extra_node1_name, extra_node2,
148 : extra_node2_name, extra_node3, extra_node3_name, extra_node4,
149 0 : extra_node4_name, extra_node5, extra_node5_name);
150 0 : }
151 :
152 68676 : void CodeStubAssembler::FastCheck(TNode<BoolT> condition) {
153 137352 : Label ok(this), not_ok(this, Label::kDeferred);
154 68676 : Branch(condition, &ok, ¬_ok);
155 : BIND(¬_ok);
156 : {
157 68676 : DebugBreak();
158 68676 : Goto(&ok);
159 : }
160 : BIND(&ok);
161 68676 : }
162 :
163 440 : void CodeStubAssembler::FailAssert(
164 : const char* message, const char* file, int line, Node* extra_node1,
165 : const char* extra_node1_name, Node* extra_node2,
166 : const char* extra_node2_name, Node* extra_node3,
167 : const char* extra_node3_name, Node* extra_node4,
168 : const char* extra_node4_name, Node* extra_node5,
169 : const char* extra_node5_name) {
170 : DCHECK_NOT_NULL(message);
171 : char chars[1024];
172 : Vector<char> buffer(chars);
173 440 : if (file != nullptr) {
174 440 : SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
175 : } else {
176 0 : SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
177 : }
178 880 : Node* message_node = StringConstant(&(buffer[0]));
179 :
180 : #ifdef DEBUG
181 : // Only print the extra nodes in debug builds.
182 : MaybePrintNodeWithName(this, extra_node1, extra_node1_name);
183 : MaybePrintNodeWithName(this, extra_node2, extra_node2_name);
184 : MaybePrintNodeWithName(this, extra_node3, extra_node3_name);
185 : MaybePrintNodeWithName(this, extra_node4, extra_node4_name);
186 : MaybePrintNodeWithName(this, extra_node5, extra_node5_name);
187 : #endif
188 :
189 440 : DebugAbort(message_node);
190 440 : Unreachable();
191 440 : }
192 :
193 47260 : Node* CodeStubAssembler::SelectImpl(TNode<BoolT> condition,
194 : const NodeGenerator& true_body,
195 : const NodeGenerator& false_body,
196 : MachineRepresentation rep) {
197 94520 : VARIABLE(value, rep);
198 47260 : Label vtrue(this), vfalse(this), end(this);
199 47260 : Branch(condition, &vtrue, &vfalse);
200 :
201 : BIND(&vtrue);
202 : {
203 47260 : value.Bind(true_body());
204 47260 : Goto(&end);
205 : }
206 : BIND(&vfalse);
207 : {
208 47260 : value.Bind(false_body());
209 47260 : Goto(&end);
210 : }
211 :
212 : BIND(&end);
213 94520 : return value.value();
214 : }
215 :
216 224 : TNode<Int32T> CodeStubAssembler::SelectInt32Constant(
217 : SloppyTNode<BoolT> condition, int true_value, int false_value) {
218 : return SelectConstant<Int32T>(condition, Int32Constant(true_value),
219 448 : Int32Constant(false_value));
220 : }
221 :
222 0 : TNode<IntPtrT> CodeStubAssembler::SelectIntPtrConstant(
223 : SloppyTNode<BoolT> condition, int true_value, int false_value) {
224 : return SelectConstant<IntPtrT>(condition, IntPtrConstant(true_value),
225 0 : IntPtrConstant(false_value));
226 : }
227 :
228 3084 : TNode<Oddball> CodeStubAssembler::SelectBooleanConstant(
229 : SloppyTNode<BoolT> condition) {
230 3084 : return SelectConstant<Oddball>(condition, TrueConstant(), FalseConstant());
231 : }
232 :
233 4144 : TNode<Smi> CodeStubAssembler::SelectSmiConstant(SloppyTNode<BoolT> condition,
234 : Smi true_value,
235 : Smi false_value) {
236 : return SelectConstant<Smi>(condition, SmiConstant(true_value),
237 8288 : SmiConstant(false_value));
238 : }
239 :
240 2464 : TNode<Object> CodeStubAssembler::NoContextConstant() {
241 111992 : return SmiConstant(Context::kNoContext);
242 : }
243 :
244 : #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
245 : compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
246 : std::declval<Heap>().rootAccessorName())>::type>::type> \
247 : CodeStubAssembler::name##Constant() { \
248 : return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
249 : std::declval<Heap>().rootAccessorName())>::type>::type>( \
250 : LoadRoot(RootIndex::k##rootIndexName)); \
251 : }
252 0 : HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)
253 : #undef HEAP_CONSTANT_ACCESSOR
254 :
255 : #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
256 : compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
257 : std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type> \
258 : CodeStubAssembler::name##Constant() { \
259 : return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
260 : std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type>( \
261 : LoadRoot(RootIndex::k##rootIndexName)); \
262 : }
263 305316 : HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)
264 : #undef HEAP_CONSTANT_ACCESSOR
265 :
266 : #define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
267 : compiler::TNode<BoolT> CodeStubAssembler::Is##name( \
268 : SloppyTNode<Object> value) { \
269 : return WordEqual(value, name##Constant()); \
270 : } \
271 : compiler::TNode<BoolT> CodeStubAssembler::IsNot##name( \
272 : SloppyTNode<Object> value) { \
273 : return WordNotEqual(value, name##Constant()); \
274 : }
275 322440 : HEAP_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_TEST)
276 : #undef HEAP_CONSTANT_TEST
277 :
278 110616 : Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
279 110616 : if (mode == SMI_PARAMETERS) {
280 6928 : return SmiConstant(value);
281 : } else {
282 : DCHECK_EQ(INTPTR_PARAMETERS, mode);
283 214304 : return IntPtrConstant(value);
284 : }
285 : }
286 :
287 2968 : bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
288 : ParameterMode mode) {
289 : int32_t constant_test;
290 2968 : Smi smi_test;
291 2968 : if (mode == INTPTR_PARAMETERS) {
292 1736 : if (ToInt32Constant(test, constant_test) && constant_test == 0) {
293 : return true;
294 : }
295 : } else {
296 : DCHECK_EQ(mode, SMI_PARAMETERS);
297 1736 : if (ToSmiConstant(test, &smi_test) && smi_test->value() == 0) {
298 : return true;
299 : }
300 : }
301 : return false;
302 : }
303 :
304 0 : bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
305 : int* value,
306 : ParameterMode mode) {
307 : int32_t int32_constant;
308 0 : if (mode == INTPTR_PARAMETERS) {
309 0 : if (ToInt32Constant(maybe_constant, int32_constant)) {
310 0 : *value = int32_constant;
311 0 : return true;
312 : }
313 : } else {
314 : DCHECK_EQ(mode, SMI_PARAMETERS);
315 0 : Smi smi_constant;
316 0 : if (ToSmiConstant(maybe_constant, &smi_constant)) {
317 0 : *value = Smi::ToInt(smi_constant);
318 0 : return true;
319 : }
320 : }
321 : return false;
322 : }
323 :
324 956 : TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
325 : TNode<IntPtrT> value) {
326 956 : Comment("IntPtrRoundUpToPowerOfTwo32");
327 : CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
328 956 : value = Signed(IntPtrSub(value, IntPtrConstant(1)));
329 10516 : for (int i = 1; i <= 16; i *= 2) {
330 9560 : value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
331 : }
332 1912 : return Signed(IntPtrAdd(value, IntPtrConstant(1)));
333 : }
334 :
335 0 : Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
336 0 : if (mode == SMI_PARAMETERS) {
337 0 : return TaggedIsSmi(value);
338 : } else {
339 0 : return Int32Constant(1);
340 : }
341 : }
342 :
343 0 : TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
344 : // value && !(value & (value - 1))
345 : return WordEqual(
346 0 : Select<IntPtrT>(
347 0 : WordEqual(value, IntPtrConstant(0)),
348 0 : [=] { return IntPtrConstant(1); },
349 0 : [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }),
350 0 : IntPtrConstant(0));
351 : }
352 :
353 56 : TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
354 112 : Node* one = Float64Constant(1.0);
355 112 : Node* one_half = Float64Constant(0.5);
356 :
357 56 : Label return_x(this);
358 :
359 : // Round up {x} towards Infinity.
360 168 : VARIABLE(var_x, MachineRepresentation::kFloat64, Float64Ceil(x));
361 :
362 224 : GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
363 56 : &return_x);
364 168 : var_x.Bind(Float64Sub(var_x.value(), one));
365 56 : Goto(&return_x);
366 :
367 : BIND(&return_x);
368 112 : return TNode<Float64T>::UncheckedCast(var_x.value());
369 : }
370 :
371 112 : TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
372 112 : if (IsFloat64RoundUpSupported()) {
373 110 : return Float64RoundUp(x);
374 : }
375 :
376 4 : Node* one = Float64Constant(1.0);
377 4 : Node* zero = Float64Constant(0.0);
378 4 : Node* two_52 = Float64Constant(4503599627370496.0E0);
379 4 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
380 :
381 4 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
382 2 : Label return_x(this), return_minus_x(this);
383 :
384 : // Check if {x} is greater than zero.
385 2 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
386 4 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
387 2 : &if_xnotgreaterthanzero);
388 :
389 : BIND(&if_xgreaterthanzero);
390 : {
391 : // Just return {x} unless it's in the range ]0,2^52[.
392 4 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
393 :
394 : // Round positive {x} towards Infinity.
395 6 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
396 6 : GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
397 6 : var_x.Bind(Float64Add(var_x.value(), one));
398 2 : Goto(&return_x);
399 : }
400 :
401 : BIND(&if_xnotgreaterthanzero);
402 : {
403 : // Just return {x} unless it's in the range ]-2^52,0[
404 4 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
405 4 : GotoIfNot(Float64LessThan(x, zero), &return_x);
406 :
407 : // Round negated {x} towards Infinity and return the result negated.
408 4 : Node* minus_x = Float64Neg(x);
409 6 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
410 6 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
411 6 : var_x.Bind(Float64Sub(var_x.value(), one));
412 2 : Goto(&return_minus_x);
413 : }
414 :
415 : BIND(&return_minus_x);
416 6 : var_x.Bind(Float64Neg(var_x.value()));
417 2 : Goto(&return_x);
418 :
419 : BIND(&return_x);
420 2 : return TNode<Float64T>::UncheckedCast(var_x.value());
421 : }
422 :
423 119 : TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
424 119 : if (IsFloat64RoundDownSupported()) {
425 110 : return Float64RoundDown(x);
426 : }
427 :
428 18 : Node* one = Float64Constant(1.0);
429 18 : Node* zero = Float64Constant(0.0);
430 18 : Node* two_52 = Float64Constant(4503599627370496.0E0);
431 18 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
432 :
433 18 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
434 9 : Label return_x(this), return_minus_x(this);
435 :
436 : // Check if {x} is greater than zero.
437 9 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
438 18 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
439 9 : &if_xnotgreaterthanzero);
440 :
441 : BIND(&if_xgreaterthanzero);
442 : {
443 : // Just return {x} unless it's in the range ]0,2^52[.
444 18 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
445 :
446 : // Round positive {x} towards -Infinity.
447 27 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
448 27 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
449 27 : var_x.Bind(Float64Sub(var_x.value(), one));
450 9 : Goto(&return_x);
451 : }
452 :
453 : BIND(&if_xnotgreaterthanzero);
454 : {
455 : // Just return {x} unless it's in the range ]-2^52,0[
456 18 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
457 18 : GotoIfNot(Float64LessThan(x, zero), &return_x);
458 :
459 : // Round negated {x} towards -Infinity and return the result negated.
460 18 : Node* minus_x = Float64Neg(x);
461 27 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
462 27 : GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
463 27 : var_x.Bind(Float64Add(var_x.value(), one));
464 9 : Goto(&return_minus_x);
465 : }
466 :
467 : BIND(&return_minus_x);
468 27 : var_x.Bind(Float64Neg(var_x.value()));
469 9 : Goto(&return_x);
470 :
471 : BIND(&return_x);
472 9 : return TNode<Float64T>::UncheckedCast(var_x.value());
473 : }
474 :
475 392 : TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
476 392 : if (IsFloat64RoundTiesEvenSupported()) {
477 385 : return Float64RoundTiesEven(x);
478 : }
479 : // See ES#sec-touint8clamp for details.
480 14 : Node* f = Float64Floor(x);
481 21 : Node* f_and_half = Float64Add(f, Float64Constant(0.5));
482 :
483 14 : VARIABLE(var_result, MachineRepresentation::kFloat64);
484 7 : Label return_f(this), return_f_plus_one(this), done(this);
485 :
486 14 : GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
487 14 : GotoIf(Float64LessThan(x, f_and_half), &return_f);
488 : {
489 21 : Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
490 21 : Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
491 7 : &return_f_plus_one);
492 : }
493 :
494 : BIND(&return_f);
495 7 : var_result.Bind(f);
496 7 : Goto(&done);
497 :
498 : BIND(&return_f_plus_one);
499 21 : var_result.Bind(Float64Add(f, Float64Constant(1.0)));
500 7 : Goto(&done);
501 :
502 : BIND(&done);
503 7 : return TNode<Float64T>::UncheckedCast(var_result.value());
504 : }
505 :
506 340 : TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
507 340 : if (IsFloat64RoundTruncateSupported()) {
508 334 : return Float64RoundTruncate(x);
509 : }
510 :
511 12 : Node* one = Float64Constant(1.0);
512 12 : Node* zero = Float64Constant(0.0);
513 12 : Node* two_52 = Float64Constant(4503599627370496.0E0);
514 12 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
515 :
516 12 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
517 6 : Label return_x(this), return_minus_x(this);
518 :
519 : // Check if {x} is greater than 0.
520 6 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
521 12 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
522 6 : &if_xnotgreaterthanzero);
523 :
524 : BIND(&if_xgreaterthanzero);
525 : {
526 6 : if (IsFloat64RoundDownSupported()) {
527 0 : var_x.Bind(Float64RoundDown(x));
528 : } else {
529 : // Just return {x} unless it's in the range ]0,2^52[.
530 12 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
531 :
532 : // Round positive {x} towards -Infinity.
533 18 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
534 18 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
535 18 : var_x.Bind(Float64Sub(var_x.value(), one));
536 : }
537 6 : Goto(&return_x);
538 : }
539 :
540 : BIND(&if_xnotgreaterthanzero);
541 : {
542 6 : if (IsFloat64RoundUpSupported()) {
543 0 : var_x.Bind(Float64RoundUp(x));
544 0 : Goto(&return_x);
545 : } else {
546 : // Just return {x} unless its in the range ]-2^52,0[.
547 12 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
548 12 : GotoIfNot(Float64LessThan(x, zero), &return_x);
549 :
550 : // Round negated {x} towards -Infinity and return result negated.
551 12 : Node* minus_x = Float64Neg(x);
552 18 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
553 18 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
554 18 : var_x.Bind(Float64Sub(var_x.value(), one));
555 6 : Goto(&return_minus_x);
556 : }
557 : }
558 :
559 : BIND(&return_minus_x);
560 18 : var_x.Bind(Float64Neg(var_x.value()));
561 6 : Goto(&return_x);
562 :
563 : BIND(&return_x);
564 6 : return TNode<Float64T>::UncheckedCast(var_x.value());
565 : }
566 :
567 0 : TNode<BoolT> CodeStubAssembler::IsValidSmi(TNode<Smi> smi) {
568 : if (SmiValuesAre31Bits() && kSystemPointerSize == kInt64Size) {
569 : // Check that the Smi value is properly sign-extended.
570 : TNode<IntPtrT> value = Signed(BitcastTaggedToWord(smi));
571 : return WordEqual(value, ChangeInt32ToIntPtr(TruncateIntPtrToInt32(value)));
572 : }
573 0 : return Int32TrueConstant();
574 : }
575 :
576 0 : Node* CodeStubAssembler::SmiShiftBitsConstant() {
577 427728 : return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
578 : }
579 :
580 10768 : TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) {
581 10768 : TNode<IntPtrT> value_intptr = ChangeInt32ToIntPtr(value);
582 : TNode<Smi> smi =
583 21536 : BitcastWordToTaggedSigned(WordShl(value_intptr, SmiShiftBitsConstant()));
584 10768 : return smi;
585 : }
586 :
587 7332 : TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
588 : intptr_t constant_value;
589 7332 : if (ToIntPtrConstant(value, constant_value)) {
590 44 : return (static_cast<uintptr_t>(constant_value) <=
591 : static_cast<uintptr_t>(Smi::kMaxValue))
592 : ? Int32TrueConstant()
593 44 : : Int32FalseConstant();
594 : }
595 :
596 14576 : return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
597 : }
598 :
599 75769 : TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
600 : int32_t constant_value;
601 75769 : if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
602 6925 : return SmiConstant(constant_value);
603 : }
604 : TNode<Smi> smi =
605 137688 : BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
606 68844 : return smi;
607 : }
608 :
609 134329 : TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
610 : intptr_t constant_value;
611 134329 : if (ToIntPtrConstant(value, constant_value)) {
612 417 : return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
613 : }
614 267824 : return Signed(WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()));
615 : }
616 :
617 67620 : TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
618 67620 : TNode<IntPtrT> result = SmiUntag(value);
619 67620 : return TruncateIntPtrToInt32(result);
620 : }
621 :
622 48868 : TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
623 97736 : return ChangeInt32ToFloat64(SmiToInt32(value));
624 : }
625 :
626 2016 : TNode<Smi> CodeStubAssembler::SmiMax(TNode<Smi> a, TNode<Smi> b) {
627 4032 : return SelectConstant<Smi>(SmiLessThan(a, b), b, a);
628 : }
629 :
630 224 : TNode<Smi> CodeStubAssembler::SmiMin(TNode<Smi> a, TNode<Smi> b) {
631 448 : return SelectConstant<Smi>(SmiLessThan(a, b), a, b);
632 : }
633 :
634 4820 : TNode<IntPtrT> CodeStubAssembler::TryIntPtrAdd(TNode<IntPtrT> a,
635 : TNode<IntPtrT> b,
636 : Label* if_overflow) {
637 4820 : TNode<PairT<IntPtrT, BoolT>> pair = IntPtrAddWithOverflow(a, b);
638 : TNode<BoolT> overflow = Projection<1>(pair);
639 4820 : GotoIf(overflow, if_overflow);
640 4820 : return Projection<0>(pair);
641 : }
642 :
643 4148 : TNode<Smi> CodeStubAssembler::TrySmiAdd(TNode<Smi> lhs, TNode<Smi> rhs,
644 : Label* if_overflow) {
645 : if (SmiValuesAre32Bits()) {
646 8296 : return BitcastWordToTaggedSigned(TryIntPtrAdd(
647 16592 : BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs), if_overflow));
648 : } else {
649 : DCHECK(SmiValuesAre31Bits());
650 : TNode<PairT<Int32T, BoolT>> pair =
651 : Int32AddWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
652 : TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
653 : TNode<BoolT> overflow = Projection<1>(pair);
654 : GotoIf(overflow, if_overflow);
655 : TNode<Int32T> result = Projection<0>(pair);
656 : return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
657 : }
658 : }
659 :
660 2916 : TNode<Smi> CodeStubAssembler::TrySmiSub(TNode<Smi> lhs, TNode<Smi> rhs,
661 : Label* if_overflow) {
662 : if (SmiValuesAre32Bits()) {
663 : TNode<PairT<IntPtrT, BoolT>> pair = IntPtrSubWithOverflow(
664 8748 : BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs));
665 : TNode<BoolT> overflow = Projection<1>(pair);
666 2916 : GotoIf(overflow, if_overflow);
667 : TNode<IntPtrT> result = Projection<0>(pair);
668 2916 : return BitcastWordToTaggedSigned(result);
669 : } else {
670 : DCHECK(SmiValuesAre31Bits());
671 : TNode<PairT<Int32T, BoolT>> pair =
672 : Int32SubWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
673 : TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
674 : TNode<BoolT> overflow = Projection<1>(pair);
675 : GotoIf(overflow, if_overflow);
676 : TNode<Int32T> result = Projection<0>(pair);
677 : return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
678 : }
679 : }
680 :
681 564 : TNode<Number> CodeStubAssembler::NumberMax(SloppyTNode<Number> a,
682 : SloppyTNode<Number> b) {
683 : // TODO(danno): This could be optimized by specifically handling smi cases.
684 564 : TVARIABLE(Number, result);
685 564 : Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
686 564 : GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
687 564 : GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
688 : result = NanConstant();
689 564 : Goto(&done);
690 : BIND(&greater_than_equal_a);
691 : result = a;
692 564 : Goto(&done);
693 : BIND(&greater_than_equal_b);
694 : result = b;
695 564 : Goto(&done);
696 : BIND(&done);
697 564 : return result.value();
698 : }
699 :
700 620 : TNode<Number> CodeStubAssembler::NumberMin(SloppyTNode<Number> a,
701 : SloppyTNode<Number> b) {
702 : // TODO(danno): This could be optimized by specifically handling smi cases.
703 620 : TVARIABLE(Number, result);
704 620 : Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
705 620 : GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
706 620 : GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
707 : result = NanConstant();
708 620 : Goto(&done);
709 : BIND(&greater_than_equal_a);
710 : result = b;
711 620 : Goto(&done);
712 : BIND(&greater_than_equal_b);
713 : result = a;
714 620 : Goto(&done);
715 : BIND(&done);
716 620 : return result.value();
717 : }
718 :
719 392 : TNode<IntPtrT> CodeStubAssembler::ConvertToRelativeIndex(
720 : TNode<Context> context, TNode<Object> index, TNode<IntPtrT> length) {
721 392 : TVARIABLE(IntPtrT, result);
722 :
723 : TNode<Number> const index_int =
724 392 : ToInteger_Inline(context, index, CodeStubAssembler::kTruncateMinusZero);
725 392 : TNode<IntPtrT> zero = IntPtrConstant(0);
726 :
727 392 : Label done(this);
728 392 : Label if_issmi(this), if_isheapnumber(this, Label::kDeferred);
729 784 : Branch(TaggedIsSmi(index_int), &if_issmi, &if_isheapnumber);
730 :
731 : BIND(&if_issmi);
732 : {
733 : TNode<Smi> const index_smi = CAST(index_int);
734 1568 : result = Select<IntPtrT>(
735 1176 : IntPtrLessThan(SmiUntag(index_smi), zero),
736 1176 : [=] { return IntPtrMax(IntPtrAdd(length, SmiUntag(index_smi)), zero); },
737 1176 : [=] { return IntPtrMin(SmiUntag(index_smi), length); });
738 392 : Goto(&done);
739 : }
740 :
741 : BIND(&if_isheapnumber);
742 : {
743 : // If {index} is a heap number, it is definitely out of bounds. If it is
744 : // negative, {index} = max({length} + {index}),0) = 0'. If it is positive,
745 : // set {index} to {length}.
746 : TNode<HeapNumber> const index_hn = CAST(index_int);
747 392 : TNode<Float64T> const float_zero = Float64Constant(0.);
748 : TNode<Float64T> const index_float = LoadHeapNumberValue(index_hn);
749 784 : result = SelectConstant<IntPtrT>(Float64LessThan(index_float, float_zero),
750 : zero, length);
751 392 : Goto(&done);
752 : }
753 : BIND(&done);
754 392 : return result.value();
755 : }
756 :
757 392 : TNode<Number> CodeStubAssembler::SmiMod(TNode<Smi> a, TNode<Smi> b) {
758 392 : TVARIABLE(Number, var_result);
759 392 : Label return_result(this, &var_result),
760 392 : return_minuszero(this, Label::kDeferred),
761 392 : return_nan(this, Label::kDeferred);
762 :
763 : // Untag {a} and {b}.
764 392 : TNode<Int32T> int_a = SmiToInt32(a);
765 392 : TNode<Int32T> int_b = SmiToInt32(b);
766 :
767 : // Return NaN if {b} is zero.
768 1176 : GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
769 :
770 : // Check if {a} is non-negative.
771 392 : Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
772 1176 : Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
773 392 : &if_aisnegative);
774 :
775 : BIND(&if_aisnotnegative);
776 : {
777 : // Fast case, don't need to check any other edge cases.
778 392 : TNode<Int32T> r = Int32Mod(int_a, int_b);
779 784 : var_result = SmiFromInt32(r);
780 392 : Goto(&return_result);
781 : }
782 :
783 : BIND(&if_aisnegative);
784 : {
785 : if (SmiValuesAre32Bits()) {
786 : // Check if {a} is kMinInt and {b} is -1 (only relevant if the
787 : // kMinInt is actually representable as a Smi).
788 392 : Label join(this);
789 1176 : GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
790 1176 : GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
791 392 : Goto(&join);
792 : BIND(&join);
793 : }
794 :
795 : // Perform the integer modulus operation.
796 392 : TNode<Int32T> r = Int32Mod(int_a, int_b);
797 :
798 : // Check if {r} is zero, and if so return -0, because we have to
799 : // take the sign of the left hand side {a}, which is negative.
800 1176 : GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
801 :
802 : // The remainder {r} can be outside the valid Smi range on 32bit
803 : // architectures, so we cannot just say SmiFromInt32(r) here.
804 784 : var_result = ChangeInt32ToTagged(r);
805 392 : Goto(&return_result);
806 : }
807 :
808 : BIND(&return_minuszero);
809 : var_result = MinusZeroConstant();
810 392 : Goto(&return_result);
811 :
812 : BIND(&return_nan);
813 : var_result = NanConstant();
814 392 : Goto(&return_result);
815 :
816 : BIND(&return_result);
817 392 : return var_result.value();
818 : }
819 :
820 448 : TNode<Number> CodeStubAssembler::SmiMul(TNode<Smi> a, TNode<Smi> b) {
821 448 : TVARIABLE(Number, var_result);
822 896 : VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64);
823 896 : VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64);
824 448 : Label return_result(this, &var_result);
825 :
826 : // Both {a} and {b} are Smis. Convert them to integers and multiply.
827 896 : Node* lhs32 = SmiToInt32(a);
828 896 : Node* rhs32 = SmiToInt32(b);
829 896 : Node* pair = Int32MulWithOverflow(lhs32, rhs32);
830 :
831 448 : Node* overflow = Projection(1, pair);
832 :
833 : // Check if the multiplication overflowed.
834 448 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
835 448 : Branch(overflow, &if_overflow, &if_notoverflow);
836 : BIND(&if_notoverflow);
837 : {
838 : // If the answer is zero, we may need to return -0.0, depending on the
839 : // input.
840 448 : Label answer_zero(this), answer_not_zero(this);
841 448 : Node* answer = Projection(0, pair);
842 896 : Node* zero = Int32Constant(0);
843 896 : Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
844 : BIND(&answer_not_zero);
845 : {
846 896 : var_result = ChangeInt32ToTagged(answer);
847 448 : Goto(&return_result);
848 : }
849 : BIND(&answer_zero);
850 : {
851 896 : Node* or_result = Word32Or(lhs32, rhs32);
852 448 : Label if_should_be_negative_zero(this), if_should_be_zero(this);
853 896 : Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
854 448 : &if_should_be_zero);
855 : BIND(&if_should_be_negative_zero);
856 : {
857 : var_result = MinusZeroConstant();
858 448 : Goto(&return_result);
859 : }
860 : BIND(&if_should_be_zero);
861 : {
862 896 : var_result = SmiConstant(0);
863 448 : Goto(&return_result);
864 : }
865 : }
866 : }
867 : BIND(&if_overflow);
868 : {
869 896 : var_lhs_float64.Bind(SmiToFloat64(a));
870 896 : var_rhs_float64.Bind(SmiToFloat64(b));
871 1792 : Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
872 896 : var_result = AllocateHeapNumberWithValue(value);
873 448 : Goto(&return_result);
874 : }
875 :
876 : BIND(&return_result);
877 448 : return var_result.value();
878 : }
879 :
880 336 : TNode<Smi> CodeStubAssembler::TrySmiDiv(TNode<Smi> dividend, TNode<Smi> divisor,
881 : Label* bailout) {
882 : // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
883 : // is zero.
884 672 : GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
885 :
886 : // Do floating point division if {dividend} is zero and {divisor} is
887 : // negative.
888 336 : Label dividend_is_zero(this), dividend_is_not_zero(this);
889 672 : Branch(WordEqual(dividend, SmiConstant(0)), ÷nd_is_zero,
890 336 : ÷nd_is_not_zero);
891 :
892 : BIND(÷nd_is_zero);
893 : {
894 672 : GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
895 336 : Goto(÷nd_is_not_zero);
896 : }
897 : BIND(÷nd_is_not_zero);
898 :
899 336 : TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
900 336 : TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
901 :
902 : // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
903 : // if the Smi size is 31) and {divisor} is -1.
904 336 : Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
905 1008 : Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
906 336 : &divisor_is_minus_one, &divisor_is_not_minus_one);
907 :
908 : BIND(&divisor_is_minus_one);
909 : {
910 672 : GotoIf(Word32Equal(
911 : untagged_dividend,
912 672 : Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
913 336 : bailout);
914 336 : Goto(&divisor_is_not_minus_one);
915 : }
916 : BIND(&divisor_is_not_minus_one);
917 :
918 336 : TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
919 336 : TNode<Int32T> truncated = Signed(Int32Mul(untagged_result, untagged_divisor));
920 :
921 : // Do floating point division if the remainder is not 0.
922 672 : GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
923 :
924 672 : return SmiFromInt32(untagged_result);
925 : }
926 :
927 56 : TNode<Smi> CodeStubAssembler::SmiLexicographicCompare(TNode<Smi> x,
928 : TNode<Smi> y) {
929 : TNode<ExternalReference> smi_lexicographic_compare =
930 56 : ExternalConstant(ExternalReference::smi_lexicographic_compare_function());
931 : TNode<ExternalReference> isolate_ptr =
932 56 : ExternalConstant(ExternalReference::isolate_address(isolate()));
933 56 : return CAST(CallCFunction(smi_lexicographic_compare, MachineType::AnyTagged(),
934 : std::make_pair(MachineType::Pointer(), isolate_ptr),
935 : std::make_pair(MachineType::AnyTagged(), x),
936 : std::make_pair(MachineType::AnyTagged(), y)));
937 : }
938 :
939 92036 : TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(
940 : SloppyTNode<IntPtrT> value) {
941 92036 : if (Is64()) {
942 92036 : return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
943 : }
944 : return ReinterpretCast<Int32T>(value);
945 : }
946 :
947 167720 : TNode<BoolT> CodeStubAssembler::TaggedIsSmi(SloppyTNode<Object> a) {
948 167720 : return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
949 335440 : IntPtrConstant(0));
950 : }
951 :
952 1680 : TNode<BoolT> CodeStubAssembler::TaggedIsSmi(TNode<MaybeObject> a) {
953 : return WordEqual(
954 5040 : WordAnd(BitcastMaybeObjectToWord(a), IntPtrConstant(kSmiTagMask)),
955 5040 : IntPtrConstant(0));
956 : }
957 :
958 24684 : TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(SloppyTNode<Object> a) {
959 : return WordNotEqual(
960 24684 : WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
961 49368 : IntPtrConstant(0));
962 : }
963 :
964 1972 : TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(SloppyTNode<Object> a) {
965 : return WordEqual(WordAnd(BitcastTaggedToWord(a),
966 1972 : IntPtrConstant(kSmiTagMask | kSmiSignMask)),
967 3944 : IntPtrConstant(0));
968 : }
969 :
970 0 : TNode<BoolT> CodeStubAssembler::WordIsAligned(SloppyTNode<WordT> word,
971 : size_t alignment) {
972 : DCHECK(base::bits::IsPowerOfTwo(alignment));
973 0 : return WordEqual(IntPtrConstant(0),
974 0 : WordAnd(word, IntPtrConstant(alignment - 1)));
975 : }
976 :
977 : #if DEBUG
978 : void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
979 : CodeAssembler::Bind(label, debug_info);
980 : }
981 : #endif // DEBUG
982 :
983 1456229 : void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
984 :
985 1512 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
986 : TNode<FixedDoubleArray> array, TNode<Smi> index, Label* if_hole) {
987 : return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
988 1512 : SMI_PARAMETERS, if_hole);
989 : }
990 :
991 0 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
992 : TNode<FixedDoubleArray> array, TNode<IntPtrT> index, Label* if_hole) {
993 : return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
994 0 : INTPTR_PARAMETERS, if_hole);
995 : }
996 :
997 168 : void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
998 : Node* receiver_map, Label* definitely_no_elements,
999 : Label* possibly_elements) {
1000 : CSA_SLOW_ASSERT(this, IsMap(receiver_map));
1001 336 : VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
1002 168 : Label loop_body(this, &var_map);
1003 336 : Node* empty_fixed_array = LoadRoot(RootIndex::kEmptyFixedArray);
1004 : Node* empty_slow_element_dictionary =
1005 336 : LoadRoot(RootIndex::kEmptySlowElementDictionary);
1006 168 : Goto(&loop_body);
1007 :
1008 : BIND(&loop_body);
1009 : {
1010 168 : Node* map = var_map.value();
1011 : Node* prototype = LoadMapPrototype(map);
1012 336 : GotoIf(IsNull(prototype), definitely_no_elements);
1013 : Node* prototype_map = LoadMap(prototype);
1014 : TNode<Int32T> prototype_instance_type = LoadMapInstanceType(prototype_map);
1015 :
1016 : // Pessimistically assume elements if a Proxy, Special API Object,
1017 : // or JSValue wrapper is found on the prototype chain. After this
1018 : // instance type check, it's not necessary to check for interceptors or
1019 : // access checks.
1020 168 : Label if_custom(this, Label::kDeferred), if_notcustom(this);
1021 336 : Branch(IsCustomElementsReceiverInstanceType(prototype_instance_type),
1022 168 : &if_custom, &if_notcustom);
1023 :
1024 : BIND(&if_custom);
1025 : {
1026 : // For string JSValue wrappers we still support the checks as long
1027 : // as they wrap the empty string.
1028 336 : GotoIfNot(InstanceTypeEqual(prototype_instance_type, JS_VALUE_TYPE),
1029 168 : possibly_elements);
1030 : Node* prototype_value = LoadJSValueValue(prototype);
1031 336 : Branch(IsEmptyString(prototype_value), &if_notcustom, possibly_elements);
1032 : }
1033 :
1034 : BIND(&if_notcustom);
1035 : {
1036 : Node* prototype_elements = LoadElements(prototype);
1037 168 : var_map.Bind(prototype_map);
1038 336 : GotoIf(WordEqual(prototype_elements, empty_fixed_array), &loop_body);
1039 336 : Branch(WordEqual(prototype_elements, empty_slow_element_dictionary),
1040 168 : &loop_body, possibly_elements);
1041 : }
1042 : }
1043 168 : }
1044 :
1045 1904 : void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
1046 : Label* if_false) {
1047 3808 : GotoIf(TaggedIsSmi(object), if_false);
1048 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1049 3808 : Branch(IsJSReceiver(object), if_true, if_false);
1050 1904 : }
1051 :
1052 2464 : void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
1053 : #ifdef V8_ENABLE_FORCE_SLOW_PATH
1054 : Node* const force_slow_path_addr =
1055 : ExternalConstant(ExternalReference::force_slow_path(isolate()));
1056 : Node* const force_slow = Load(MachineType::Uint8(), force_slow_path_addr);
1057 :
1058 : GotoIf(force_slow, if_true);
1059 : #endif
1060 2464 : }
1061 :
1062 4 : void CodeStubAssembler::GotoIfDebugExecutionModeChecksSideEffects(
1063 : Label* if_true) {
1064 : STATIC_ASSERT(sizeof(DebugInfo::ExecutionMode) >= sizeof(int32_t));
1065 :
1066 : TNode<ExternalReference> execution_mode_address = ExternalConstant(
1067 4 : ExternalReference::debug_execution_mode_address(isolate()));
1068 : TNode<Int32T> execution_mode =
1069 4 : UncheckedCast<Int32T>(Load(MachineType::Int32(), execution_mode_address));
1070 :
1071 12 : GotoIf(Word32Equal(execution_mode, Int32Constant(DebugInfo::kSideEffects)),
1072 4 : if_true);
1073 4 : }
1074 :
1075 7708 : TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
1076 : AllocationFlags flags,
1077 : TNode<RawPtrT> top_address,
1078 : TNode<RawPtrT> limit_address) {
1079 15416 : Label if_out_of_memory(this, Label::kDeferred);
1080 :
1081 : // TODO(jgruber,jkummerow): Extract the slow paths (= probably everything
1082 : // but bump pointer allocation) into a builtin to save code space. The
1083 : // size_in_bytes check may be moved there as well since a non-smi
1084 : // size_in_bytes probably doesn't fit into the bump pointer region
1085 : // (double-check that).
1086 :
1087 : intptr_t size_in_bytes_constant;
1088 : bool size_in_bytes_is_constant = false;
1089 7708 : if (ToIntPtrConstant(size_in_bytes, size_in_bytes_constant)) {
1090 : size_in_bytes_is_constant = true;
1091 1176 : CHECK(Internals::IsValidSmi(size_in_bytes_constant));
1092 588 : CHECK_GT(size_in_bytes_constant, 0);
1093 : } else {
1094 14240 : GotoIfNot(IsValidPositiveSmi(size_in_bytes), &if_out_of_memory);
1095 : }
1096 :
1097 : TNode<RawPtrT> top =
1098 7708 : UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), top_address));
1099 : TNode<RawPtrT> limit =
1100 7708 : UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), limit_address));
1101 :
1102 : // If there's not enough space, call the runtime.
1103 : TVARIABLE(Object, result);
1104 7708 : Label runtime_call(this, Label::kDeferred), no_runtime_call(this), out(this);
1105 :
1106 7708 : bool needs_double_alignment = flags & kDoubleAlignment;
1107 :
1108 7708 : if (flags & kAllowLargeObjectAllocation) {
1109 5736 : Label next(this);
1110 11472 : GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1111 :
1112 5736 : if (FLAG_young_generation_large_objects) {
1113 11472 : result = CallRuntime(Runtime::kAllocateInYoungGeneration,
1114 : NoContextConstant(), SmiTag(size_in_bytes));
1115 : } else {
1116 : TNode<Smi> alignment_flag = SmiConstant(Smi::FromInt(
1117 0 : AllocateDoubleAlignFlag::encode(needs_double_alignment)));
1118 0 : result =
1119 : CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1120 : SmiTag(size_in_bytes), alignment_flag);
1121 : }
1122 5736 : Goto(&out);
1123 :
1124 : BIND(&next);
1125 : }
1126 :
1127 : TVARIABLE(IntPtrT, adjusted_size, size_in_bytes);
1128 :
1129 7708 : if (needs_double_alignment) {
1130 0 : Label next(this);
1131 0 : GotoIfNot(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &next);
1132 :
1133 0 : adjusted_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1134 0 : Goto(&next);
1135 :
1136 : BIND(&next);
1137 : }
1138 :
1139 : TNode<IntPtrT> new_top =
1140 : IntPtrAdd(UncheckedCast<IntPtrT>(top), adjusted_size.value());
1141 :
1142 15416 : Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1143 7708 : &no_runtime_call);
1144 :
1145 : BIND(&runtime_call);
1146 : {
1147 7708 : if (flags & kPretenured) {
1148 : TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
1149 0 : AllocateDoubleAlignFlag::encode(needs_double_alignment)));
1150 0 : result =
1151 : CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1152 : SmiTag(size_in_bytes), runtime_flags);
1153 : } else {
1154 15416 : result = CallRuntime(Runtime::kAllocateInYoungGeneration,
1155 : NoContextConstant(), SmiTag(size_in_bytes));
1156 : }
1157 7708 : Goto(&out);
1158 : }
1159 :
1160 : // When there is enough space, return `top' and bump it up.
1161 : BIND(&no_runtime_call);
1162 : {
1163 : StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
1164 7708 : new_top);
1165 :
1166 : TVARIABLE(IntPtrT, address, UncheckedCast<IntPtrT>(top));
1167 :
1168 7708 : if (needs_double_alignment) {
1169 0 : Label next(this);
1170 0 : GotoIf(IntPtrEqual(adjusted_size.value(), size_in_bytes), &next);
1171 :
1172 : // Store a filler and increase the address by 4.
1173 : StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
1174 0 : LoadRoot(RootIndex::kOnePointerFillerMap));
1175 0 : address = IntPtrAdd(UncheckedCast<IntPtrT>(top), IntPtrConstant(4));
1176 0 : Goto(&next);
1177 :
1178 : BIND(&next);
1179 : }
1180 :
1181 15416 : result = BitcastWordToTagged(
1182 7708 : IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1183 7708 : Goto(&out);
1184 : }
1185 :
1186 7708 : if (!size_in_bytes_is_constant) {
1187 : BIND(&if_out_of_memory);
1188 : CallRuntime(Runtime::kFatalProcessOutOfMemoryInAllocateRaw,
1189 : NoContextConstant());
1190 7120 : Unreachable();
1191 : }
1192 :
1193 : BIND(&out);
1194 7708 : return UncheckedCast<HeapObject>(result.value());
1195 : }
1196 :
1197 0 : TNode<HeapObject> CodeStubAssembler::AllocateRawUnaligned(
1198 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1199 : TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1200 : DCHECK_EQ(flags & kDoubleAlignment, 0);
1201 4920 : return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1202 : }
1203 :
1204 0 : TNode<HeapObject> CodeStubAssembler::AllocateRawDoubleAligned(
1205 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1206 : TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1207 : #if defined(V8_HOST_ARCH_32_BIT)
1208 : return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
1209 : limit_address);
1210 : #elif defined(V8_HOST_ARCH_64_BIT)
1211 : #ifdef V8_COMPRESS_POINTERS
1212 : // TODO(ishell, v8:8875): Consider using aligned allocations once the
1213 : // allocation alignment inconsistency is fixed. For now we keep using
1214 : // unaligned access since both x64 and arm64 architectures (where pointer
1215 : // compression is supported) allow unaligned access to doubles and full words.
1216 : #endif // V8_COMPRESS_POINTERS
1217 : // Allocation on 64 bit machine is naturally double aligned
1218 : return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
1219 2788 : limit_address);
1220 : #else
1221 : #error Architecture not supported
1222 : #endif
1223 : }
1224 :
1225 840 : TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(
1226 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags) {
1227 : DCHECK(flags == kNone || flags == kDoubleAlignment);
1228 : CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
1229 12560 : return Allocate(size_in_bytes, flags);
1230 : }
1231 :
1232 65240 : TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
1233 : AllocationFlags flags) {
1234 65240 : Comment("Allocate");
1235 : bool const new_space = !(flags & kPretenured);
1236 65240 : if (!(flags & kAllowLargeObjectAllocation)) {
1237 : intptr_t size_constant;
1238 59504 : if (ToIntPtrConstant(size_in_bytes, size_constant)) {
1239 44544 : CHECK_LE(size_constant, kMaxRegularHeapObjectSize);
1240 : }
1241 : }
1242 65240 : if (!(flags & kDoubleAlignment) && !(flags & kAllowLargeObjectAllocation)) {
1243 : return OptimizedAllocate(size_in_bytes, new_space ? AllocationType::kYoung
1244 57532 : : AllocationType::kOld);
1245 : }
1246 : TNode<ExternalReference> top_address = ExternalConstant(
1247 : new_space
1248 7708 : ? ExternalReference::new_space_allocation_top_address(isolate())
1249 15416 : : ExternalReference::old_space_allocation_top_address(isolate()));
1250 : DCHECK_EQ(kSystemPointerSize,
1251 : ExternalReference::new_space_allocation_limit_address(isolate())
1252 : .address() -
1253 : ExternalReference::new_space_allocation_top_address(isolate())
1254 : .address());
1255 : DCHECK_EQ(kSystemPointerSize,
1256 : ExternalReference::old_space_allocation_limit_address(isolate())
1257 : .address() -
1258 : ExternalReference::old_space_allocation_top_address(isolate())
1259 : .address());
1260 : TNode<IntPtrT> limit_address =
1261 : IntPtrAdd(ReinterpretCast<IntPtrT>(top_address),
1262 7708 : IntPtrConstant(kSystemPointerSize));
1263 :
1264 7708 : if (flags & kDoubleAlignment) {
1265 : return AllocateRawDoubleAligned(size_in_bytes, flags,
1266 : ReinterpretCast<RawPtrT>(top_address),
1267 : ReinterpretCast<RawPtrT>(limit_address));
1268 : } else {
1269 : return AllocateRawUnaligned(size_in_bytes, flags,
1270 : ReinterpretCast<RawPtrT>(top_address),
1271 : ReinterpretCast<RawPtrT>(limit_address));
1272 : }
1273 : }
1274 :
1275 1696 : TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1276 : AllocationFlags flags) {
1277 1696 : CHECK(flags == kNone || flags == kDoubleAlignment);
1278 : DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1279 1696 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1280 : }
1281 :
1282 2320 : TNode<HeapObject> CodeStubAssembler::Allocate(int size_in_bytes,
1283 : AllocationFlags flags) {
1284 33368 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1285 : }
1286 :
1287 5432 : TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1288 : TNode<IntPtrT> offset) {
1289 : return UncheckedCast<HeapObject>(
1290 10864 : BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset)));
1291 : }
1292 :
1293 2240 : TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1294 : int offset) {
1295 4312 : return InnerAllocate(previous, IntPtrConstant(offset));
1296 : }
1297 :
1298 5904 : TNode<BoolT> CodeStubAssembler::IsRegularHeapObjectSize(TNode<IntPtrT> size) {
1299 : return UintPtrLessThanOrEqual(size,
1300 11808 : IntPtrConstant(kMaxRegularHeapObjectSize));
1301 : }
1302 :
1303 5152 : void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
1304 : Label* if_false) {
1305 10304 : Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1306 5152 : if_bigint(this, Label::kDeferred);
1307 : // Rule out false {value}.
1308 5152 : GotoIf(WordEqual(value, FalseConstant()), if_false);
1309 :
1310 : // Check if {value} is a Smi or a HeapObject.
1311 10304 : Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1312 :
1313 : BIND(&if_smi);
1314 : {
1315 : // The {value} is a Smi, only need to check against zero.
1316 5152 : BranchIfSmiEqual(CAST(value), SmiConstant(0), if_false, if_true);
1317 : }
1318 :
1319 : BIND(&if_notsmi);
1320 : {
1321 : // Check if {value} is the empty string.
1322 10304 : GotoIf(IsEmptyString(value), if_false);
1323 :
1324 : // The {value} is a HeapObject, load its map.
1325 : Node* value_map = LoadMap(value);
1326 :
1327 : // Only null, undefined and document.all have the undetectable bit set,
1328 : // so we can return false immediately when that bit is set.
1329 10304 : GotoIf(IsUndetectableMap(value_map), if_false);
1330 :
1331 : // We still need to handle numbers specially, but all other {value}s
1332 : // that make it here yield true.
1333 10304 : GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1334 10304 : Branch(IsBigInt(value), &if_bigint, if_true);
1335 :
1336 : BIND(&if_heapnumber);
1337 : {
1338 : // Load the floating point value of {value}.
1339 5152 : Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
1340 5152 : MachineType::Float64());
1341 :
1342 : // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1343 20608 : Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1344 5152 : if_true, if_false);
1345 : }
1346 :
1347 : BIND(&if_bigint);
1348 : {
1349 : Node* result =
1350 : CallRuntime(Runtime::kBigIntToBoolean, NoContextConstant(), value);
1351 : CSA_ASSERT(this, IsBoolean(result));
1352 5152 : Branch(WordEqual(result, TrueConstant()), if_true, if_false);
1353 : }
1354 : }
1355 5152 : }
1356 :
1357 2184 : Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
1358 2184 : Node* frame_pointer = LoadParentFramePointer();
1359 4368 : return Load(rep, frame_pointer, IntPtrConstant(offset));
1360 : }
1361 :
1362 3204 : Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
1363 : MachineType rep) {
1364 6408 : return Load(rep, buffer, IntPtrConstant(offset));
1365 : }
1366 :
1367 905672 : Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1368 : int offset, MachineType rep) {
1369 : CSA_ASSERT(this, IsStrong(object));
1370 1811344 : return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
1371 : }
1372 :
1373 66768 : Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1374 : SloppyTNode<IntPtrT> offset,
1375 : MachineType rep) {
1376 : CSA_ASSERT(this, IsStrong(object));
1377 133536 : return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
1378 : }
1379 :
1380 70476 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1381 : SloppyTNode<HeapObject> object, int offset) {
1382 : if (SmiValuesAre32Bits()) {
1383 : #if V8_TARGET_LITTLE_ENDIAN
1384 70476 : offset += 4;
1385 : #endif
1386 : return ChangeInt32ToIntPtr(
1387 140952 : LoadObjectField(object, offset, MachineType::Int32()));
1388 : } else {
1389 : return SmiToIntPtr(
1390 : LoadObjectField(object, offset, MachineType::AnyTagged()));
1391 : }
1392 : }
1393 :
1394 3080 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
1395 : int offset) {
1396 : if (SmiValuesAre32Bits()) {
1397 : #if V8_TARGET_LITTLE_ENDIAN
1398 3080 : offset += 4;
1399 : #endif
1400 : return UncheckedCast<Int32T>(
1401 4264 : LoadObjectField(object, offset, MachineType::Int32()));
1402 : } else {
1403 : return SmiToInt32(
1404 : LoadObjectField(object, offset, MachineType::AnyTagged()));
1405 : }
1406 : }
1407 :
1408 1512 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
1409 : if (SmiValuesAre32Bits()) {
1410 : #if V8_TARGET_LITTLE_ENDIAN
1411 1512 : index += 4;
1412 : #endif
1413 : return ChangeInt32ToIntPtr(
1414 4536 : Load(MachineType::Int32(), base, IntPtrConstant(index)));
1415 : } else {
1416 : return SmiToIntPtr(
1417 : Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
1418 : }
1419 : }
1420 :
1421 56405 : void CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
1422 : if (SmiValuesAre32Bits()) {
1423 56405 : int zero_offset = offset + 4;
1424 : int payload_offset = offset;
1425 : #if V8_TARGET_LITTLE_ENDIAN
1426 : std::swap(zero_offset, payload_offset);
1427 : #endif
1428 : StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1429 169215 : IntPtrConstant(zero_offset), Int32Constant(0));
1430 : StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1431 112810 : IntPtrConstant(payload_offset),
1432 169215 : TruncateInt64ToInt32(value));
1433 : } else {
1434 : StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
1435 : IntPtrConstant(offset), SmiTag(value));
1436 : }
1437 56405 : }
1438 :
1439 13588 : TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1440 : SloppyTNode<HeapNumber> object) {
1441 : return TNode<Float64T>::UncheckedCast(LoadObjectField(
1442 88380 : object, HeapNumber::kValueOffset, MachineType::Float64()));
1443 : }
1444 :
1445 27344 : TNode<Map> CodeStubAssembler::LoadMap(SloppyTNode<HeapObject> object) {
1446 : return UncheckedCast<Map>(LoadObjectField(object, HeapObject::kMapOffset,
1447 220428 : MachineType::TaggedPointer()));
1448 : }
1449 :
1450 65608 : TNode<Int32T> CodeStubAssembler::LoadInstanceType(
1451 : SloppyTNode<HeapObject> object) {
1452 65608 : return LoadMapInstanceType(LoadMap(object));
1453 : }
1454 :
1455 10584 : TNode<BoolT> CodeStubAssembler::HasInstanceType(SloppyTNode<HeapObject> object,
1456 : InstanceType instance_type) {
1457 21168 : return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1458 : }
1459 :
1460 504 : TNode<BoolT> CodeStubAssembler::DoesntHaveInstanceType(
1461 : SloppyTNode<HeapObject> object, InstanceType instance_type) {
1462 1512 : return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1463 : }
1464 :
1465 0 : TNode<BoolT> CodeStubAssembler::TaggedDoesntHaveInstanceType(
1466 : SloppyTNode<HeapObject> any_tagged, InstanceType type) {
1467 : /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1468 0 : TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
1469 : return Select<BoolT>(
1470 0 : tagged_is_smi, [=]() { return tagged_is_smi; },
1471 0 : [=]() { return DoesntHaveInstanceType(any_tagged, type); });
1472 : }
1473 :
1474 3756 : TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1475 : SloppyTNode<JSObject> object) {
1476 : CSA_SLOW_ASSERT(this, Word32BinaryNot(IsDictionaryMap(LoadMap(object))));
1477 3756 : TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1478 7512 : return Select<HeapObject>(TaggedIsSmi(properties),
1479 3756 : [=] { return EmptyFixedArrayConstant(); },
1480 11268 : [=] { return CAST(properties); });
1481 : }
1482 :
1483 6344 : TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1484 : SloppyTNode<JSObject> object) {
1485 : CSA_SLOW_ASSERT(this, IsDictionaryMap(LoadMap(object)));
1486 6344 : TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1487 12688 : return Select<HeapObject>(TaggedIsSmi(properties),
1488 6344 : [=] { return EmptyPropertyDictionaryConstant(); },
1489 19032 : [=] { return CAST(properties); });
1490 : }
1491 :
1492 392 : TNode<Number> CodeStubAssembler::LoadJSArrayLength(SloppyTNode<JSArray> array) {
1493 : CSA_ASSERT(this, IsJSArray(array));
1494 392 : return CAST(LoadObjectField(array, JSArray::kLengthOffset));
1495 : }
1496 :
1497 0 : TNode<Object> CodeStubAssembler::LoadJSArgumentsObjectWithLength(
1498 : SloppyTNode<JSArgumentsObjectWithLength> array) {
1499 0 : return LoadObjectField(array, JSArgumentsObjectWithLength::kLengthOffset);
1500 : }
1501 :
1502 2408 : TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(
1503 : SloppyTNode<JSArray> array) {
1504 : TNode<Object> length = LoadJSArrayLength(array);
1505 : CSA_ASSERT(this, Word32Or(IsFastElementsKind(LoadElementsKind(array)),
1506 : IsElementsKindInRange(LoadElementsKind(array),
1507 : PACKED_SEALED_ELEMENTS,
1508 : PACKED_FROZEN_ELEMENTS)));
1509 : // JSArray length is always a positive Smi for fast arrays.
1510 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
1511 2408 : return UncheckedCast<Smi>(length);
1512 : }
1513 :
1514 2520 : TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1515 : SloppyTNode<FixedArrayBase> array) {
1516 : CSA_SLOW_ASSERT(this, IsNotWeakFixedArraySubclass(array));
1517 2520 : return CAST(LoadObjectField(array, FixedArrayBase::kLengthOffset));
1518 : }
1519 :
1520 1680 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1521 : SloppyTNode<FixedArrayBase> array) {
1522 61788 : return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1523 : }
1524 :
1525 0 : TNode<IntPtrT> CodeStubAssembler::LoadFeedbackVectorLength(
1526 : TNode<FeedbackVector> vector) {
1527 : return ChangeInt32ToIntPtr(
1528 0 : LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset));
1529 : }
1530 :
1531 0 : TNode<Smi> CodeStubAssembler::LoadWeakFixedArrayLength(
1532 : TNode<WeakFixedArray> array) {
1533 0 : return CAST(LoadObjectField(array, WeakFixedArray::kLengthOffset));
1534 : }
1535 :
1536 728 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagWeakFixedArrayLength(
1537 : SloppyTNode<WeakFixedArray> array) {
1538 1296 : return LoadAndUntagObjectField(array, WeakFixedArray::kLengthOffset);
1539 : }
1540 :
1541 56 : TNode<Int32T> CodeStubAssembler::LoadNumberOfDescriptors(
1542 : TNode<DescriptorArray> array) {
1543 : return UncheckedCast<Int32T>(
1544 : LoadObjectField(array, DescriptorArray::kNumberOfDescriptorsOffset,
1545 2024 : MachineType::Int16()));
1546 : }
1547 :
1548 336 : TNode<Int32T> CodeStubAssembler::LoadMapBitField(SloppyTNode<Map> map) {
1549 : CSA_SLOW_ASSERT(this, IsMap(map));
1550 : return UncheckedCast<Int32T>(
1551 27068 : LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8()));
1552 : }
1553 :
1554 504 : TNode<Int32T> CodeStubAssembler::LoadMapBitField2(SloppyTNode<Map> map) {
1555 : CSA_SLOW_ASSERT(this, IsMap(map));
1556 : return UncheckedCast<Int32T>(
1557 9916 : LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()));
1558 : }
1559 :
1560 1680 : TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(SloppyTNode<Map> map) {
1561 : CSA_SLOW_ASSERT(this, IsMap(map));
1562 : return UncheckedCast<Uint32T>(
1563 6952 : LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()));
1564 : }
1565 :
1566 6060 : TNode<Int32T> CodeStubAssembler::LoadMapInstanceType(SloppyTNode<Map> map) {
1567 : return UncheckedCast<Int32T>(
1568 122588 : LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint16()));
1569 : }
1570 :
1571 9020 : TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(SloppyTNode<Map> map) {
1572 : CSA_SLOW_ASSERT(this, IsMap(map));
1573 : Node* bit_field2 = LoadMapBitField2(map);
1574 9020 : return Signed(DecodeWord32<Map::ElementsKindBits>(bit_field2));
1575 : }
1576 :
1577 2576 : TNode<Int32T> CodeStubAssembler::LoadElementsKind(
1578 : SloppyTNode<HeapObject> object) {
1579 2576 : return LoadMapElementsKind(LoadMap(object));
1580 : }
1581 :
1582 6384 : TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(
1583 : SloppyTNode<Map> map) {
1584 : CSA_SLOW_ASSERT(this, IsMap(map));
1585 6384 : return CAST(LoadObjectField(map, Map::kDescriptorsOffset));
1586 : }
1587 :
1588 5432 : TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(SloppyTNode<Map> map) {
1589 : CSA_SLOW_ASSERT(this, IsMap(map));
1590 5432 : return CAST(LoadObjectField(map, Map::kPrototypeOffset));
1591 : }
1592 :
1593 168 : TNode<PrototypeInfo> CodeStubAssembler::LoadMapPrototypeInfo(
1594 : SloppyTNode<Map> map, Label* if_no_proto_info) {
1595 336 : Label if_strong_heap_object(this);
1596 : CSA_ASSERT(this, IsMap(map));
1597 : TNode<MaybeObject> maybe_prototype_info =
1598 168 : LoadMaybeWeakObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1599 : TVARIABLE(Object, prototype_info);
1600 : DispatchMaybeObject(maybe_prototype_info, if_no_proto_info, if_no_proto_info,
1601 : if_no_proto_info, &if_strong_heap_object,
1602 168 : &prototype_info);
1603 :
1604 : BIND(&if_strong_heap_object);
1605 168 : GotoIfNot(WordEqual(LoadMap(CAST(prototype_info.value())),
1606 168 : LoadRoot(RootIndex::kPrototypeInfoMap)),
1607 168 : if_no_proto_info);
1608 168 : return CAST(prototype_info.value());
1609 : }
1610 :
1611 4720 : TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSizeInWords(
1612 : SloppyTNode<Map> map) {
1613 : CSA_SLOW_ASSERT(this, IsMap(map));
1614 : return ChangeInt32ToIntPtr(LoadObjectField(
1615 9440 : map, Map::kInstanceSizeInWordsOffset, MachineType::Uint8()));
1616 : }
1617 :
1618 2132 : TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectPropertiesStartInWords(
1619 : SloppyTNode<Map> map) {
1620 : CSA_SLOW_ASSERT(this, IsMap(map));
1621 : // See Map::GetInObjectPropertiesStartInWords() for details.
1622 : CSA_ASSERT(this, IsJSObjectMap(map));
1623 : return ChangeInt32ToIntPtr(LoadObjectField(
1624 : map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1625 4264 : MachineType::Uint8()));
1626 : }
1627 :
1628 56 : TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1629 : SloppyTNode<Map> map) {
1630 : CSA_SLOW_ASSERT(this, IsMap(map));
1631 : // See Map::GetConstructorFunctionIndex() for details.
1632 : CSA_ASSERT(this, IsPrimitiveInstanceType(LoadMapInstanceType(map)));
1633 : return ChangeInt32ToIntPtr(LoadObjectField(
1634 : map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1635 112 : MachineType::Uint8()));
1636 : }
1637 :
1638 0 : TNode<Object> CodeStubAssembler::LoadMapConstructor(SloppyTNode<Map> map) {
1639 : CSA_SLOW_ASSERT(this, IsMap(map));
1640 0 : TVARIABLE(Object, result,
1641 : LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1642 :
1643 0 : Label done(this), loop(this, &result);
1644 0 : Goto(&loop);
1645 : BIND(&loop);
1646 : {
1647 0 : GotoIf(TaggedIsSmi(result.value()), &done);
1648 : Node* is_map_type =
1649 0 : InstanceTypeEqual(LoadInstanceType(CAST(result.value())), MAP_TYPE);
1650 0 : GotoIfNot(is_map_type, &done);
1651 : result = LoadObjectField(CAST(result.value()),
1652 : Map::kConstructorOrBackPointerOffset);
1653 0 : Goto(&loop);
1654 : }
1655 : BIND(&done);
1656 0 : return result.value();
1657 : }
1658 :
1659 840 : Node* CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) {
1660 : CSA_SLOW_ASSERT(this, IsMap(map));
1661 : Node* bit_field3 = LoadMapBitField3(map);
1662 1680 : return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3);
1663 : }
1664 :
1665 0 : TNode<Object> CodeStubAssembler::LoadMapBackPointer(SloppyTNode<Map> map) {
1666 : TNode<HeapObject> object =
1667 : CAST(LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1668 0 : return Select<Object>(IsMap(object), [=] { return object; },
1669 0 : [=] { return UndefinedConstant(); });
1670 : }
1671 :
1672 112 : TNode<Uint32T> CodeStubAssembler::EnsureOnlyHasSimpleProperties(
1673 : TNode<Map> map, TNode<Int32T> instance_type, Label* bailout) {
1674 : // This check can have false positives, since it applies to any JSValueType.
1675 224 : GotoIf(IsCustomElementsReceiverInstanceType(instance_type), bailout);
1676 :
1677 : TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1678 224 : GotoIf(IsSetWord32(bit_field3, Map::IsDictionaryMapBit::kMask |
1679 112 : Map::HasHiddenPrototypeBit::kMask),
1680 112 : bailout);
1681 :
1682 112 : return bit_field3;
1683 : }
1684 :
1685 672 : TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
1686 : SloppyTNode<Object> receiver, Label* if_no_hash) {
1687 672 : TVARIABLE(IntPtrT, var_hash);
1688 672 : Label done(this), if_smi(this), if_property_array(this),
1689 672 : if_property_dictionary(this), if_fixed_array(this);
1690 :
1691 : TNode<Object> properties_or_hash =
1692 : LoadObjectField(TNode<HeapObject>::UncheckedCast(receiver),
1693 : JSReceiver::kPropertiesOrHashOffset);
1694 1344 : GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
1695 :
1696 : TNode<HeapObject> properties =
1697 : TNode<HeapObject>::UncheckedCast(properties_or_hash);
1698 672 : TNode<Int32T> properties_instance_type = LoadInstanceType(properties);
1699 :
1700 1344 : GotoIf(InstanceTypeEqual(properties_instance_type, PROPERTY_ARRAY_TYPE),
1701 672 : &if_property_array);
1702 1344 : Branch(InstanceTypeEqual(properties_instance_type, NAME_DICTIONARY_TYPE),
1703 672 : &if_property_dictionary, &if_fixed_array);
1704 :
1705 : BIND(&if_fixed_array);
1706 : {
1707 672 : var_hash = IntPtrConstant(PropertyArray::kNoHashSentinel);
1708 672 : Goto(&done);
1709 : }
1710 :
1711 : BIND(&if_smi);
1712 : {
1713 1344 : var_hash = SmiUntag(TNode<Smi>::UncheckedCast(properties_or_hash));
1714 672 : Goto(&done);
1715 : }
1716 :
1717 : BIND(&if_property_array);
1718 : {
1719 : TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
1720 672 : properties, PropertyArray::kLengthAndHashOffset);
1721 : var_hash = TNode<IntPtrT>::UncheckedCast(
1722 : DecodeWord<PropertyArray::HashField>(length_and_hash));
1723 672 : Goto(&done);
1724 : }
1725 :
1726 : BIND(&if_property_dictionary);
1727 : {
1728 672 : var_hash = SmiUntag(CAST(LoadFixedArrayElement(
1729 : CAST(properties), NameDictionary::kObjectHashIndex)));
1730 672 : Goto(&done);
1731 : }
1732 :
1733 : BIND(&done);
1734 672 : if (if_no_hash != nullptr) {
1735 672 : GotoIf(IntPtrEqual(var_hash.value(),
1736 672 : IntPtrConstant(PropertyArray::kNoHashSentinel)),
1737 336 : if_no_hash);
1738 : }
1739 672 : return var_hash.value();
1740 : }
1741 :
1742 404 : TNode<Uint32T> CodeStubAssembler::LoadNameHashField(SloppyTNode<Name> name) {
1743 : CSA_ASSERT(this, IsName(name));
1744 404 : return LoadObjectField<Uint32T>(name, Name::kHashFieldOffset);
1745 : }
1746 :
1747 7080 : TNode<Uint32T> CodeStubAssembler::LoadNameHash(SloppyTNode<Name> name,
1748 : Label* if_hash_not_computed) {
1749 : TNode<Uint32T> hash_field = LoadNameHashField(name);
1750 7080 : if (if_hash_not_computed != nullptr) {
1751 1008 : GotoIf(IsSetWord32(hash_field, Name::kHashNotComputedMask),
1752 336 : if_hash_not_computed);
1753 : }
1754 14160 : return Unsigned(Word32Shr(hash_field, Int32Constant(Name::kHashShift)));
1755 : }
1756 :
1757 6108 : TNode<Smi> CodeStubAssembler::LoadStringLengthAsSmi(
1758 : SloppyTNode<String> string) {
1759 12216 : return SmiFromIntPtr(LoadStringLengthAsWord(string));
1760 : }
1761 :
1762 11716 : TNode<IntPtrT> CodeStubAssembler::LoadStringLengthAsWord(
1763 : SloppyTNode<String> string) {
1764 23432 : return Signed(ChangeUint32ToWord(LoadStringLengthAsWord32(string)));
1765 : }
1766 :
1767 224 : TNode<Uint32T> CodeStubAssembler::LoadStringLengthAsWord32(
1768 : SloppyTNode<String> string) {
1769 : CSA_ASSERT(this, IsString(string));
1770 224 : return LoadObjectField<Uint32T>(string, String::kLengthOffset);
1771 : }
1772 :
1773 56 : Node* CodeStubAssembler::PointerToSeqStringData(Node* seq_string) {
1774 : CSA_ASSERT(this, IsString(seq_string));
1775 : CSA_ASSERT(this,
1776 : IsSequentialStringInstanceType(LoadInstanceType(seq_string)));
1777 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
1778 : return IntPtrAdd(
1779 : BitcastTaggedToWord(seq_string),
1780 168 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag));
1781 : }
1782 :
1783 112 : Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1784 : CSA_ASSERT(this, IsJSValue(object));
1785 112 : return LoadObjectField(object, JSValue::kValueOffset);
1786 : }
1787 :
1788 448 : void CodeStubAssembler::DispatchMaybeObject(TNode<MaybeObject> maybe_object,
1789 : Label* if_smi, Label* if_cleared,
1790 : Label* if_weak, Label* if_strong,
1791 : TVariable<Object>* extracted) {
1792 896 : Label inner_if_smi(this), inner_if_strong(this);
1793 :
1794 896 : GotoIf(TaggedIsSmi(maybe_object), &inner_if_smi);
1795 :
1796 896 : GotoIf(IsCleared(maybe_object), if_cleared);
1797 :
1798 2688 : GotoIf(Word32Equal(Word32And(TruncateIntPtrToInt32(
1799 896 : BitcastMaybeObjectToWord(maybe_object)),
1800 1344 : Int32Constant(kHeapObjectTagMask)),
1801 1344 : Int32Constant(kHeapObjectTag)),
1802 448 : &inner_if_strong);
1803 :
1804 896 : *extracted =
1805 : BitcastWordToTagged(WordAnd(BitcastMaybeObjectToWord(maybe_object),
1806 1344 : IntPtrConstant(~kWeakHeapObjectMask)));
1807 448 : Goto(if_weak);
1808 :
1809 : BIND(&inner_if_smi);
1810 : *extracted = CAST(maybe_object);
1811 448 : Goto(if_smi);
1812 :
1813 : BIND(&inner_if_strong);
1814 : *extracted = CAST(maybe_object);
1815 448 : Goto(if_strong);
1816 448 : }
1817 :
1818 560 : TNode<BoolT> CodeStubAssembler::IsStrong(TNode<MaybeObject> value) {
1819 : return WordEqual(WordAnd(BitcastMaybeObjectToWord(value),
1820 1680 : IntPtrConstant(kHeapObjectTagMask)),
1821 1680 : IntPtrConstant(kHeapObjectTag));
1822 : }
1823 :
1824 560 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectIfStrong(
1825 : TNode<MaybeObject> value, Label* if_not_strong) {
1826 1120 : GotoIfNot(IsStrong(value), if_not_strong);
1827 560 : return CAST(value);
1828 : }
1829 :
1830 504 : TNode<BoolT> CodeStubAssembler::IsWeakOrCleared(TNode<MaybeObject> value) {
1831 : return Word32Equal(
1832 2520 : Word32And(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1833 1512 : Int32Constant(kHeapObjectTagMask)),
1834 1512 : Int32Constant(kWeakHeapObjectTag));
1835 : }
1836 :
1837 7280 : TNode<BoolT> CodeStubAssembler::IsCleared(TNode<MaybeObject> value) {
1838 21840 : return Word32Equal(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1839 21840 : Int32Constant(kClearedWeakHeapObjectLower32));
1840 : }
1841 :
1842 1288 : TNode<BoolT> CodeStubAssembler::IsNotCleared(TNode<MaybeObject> value) {
1843 3864 : return Word32NotEqual(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1844 3864 : Int32Constant(kClearedWeakHeapObjectLower32));
1845 : }
1846 :
1847 6216 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1848 : TNode<MaybeObject> value) {
1849 : CSA_ASSERT(this, IsWeakOrCleared(value));
1850 : CSA_ASSERT(this, IsNotCleared(value));
1851 : return UncheckedCast<HeapObject>(BitcastWordToTagged(WordAnd(
1852 18648 : BitcastMaybeObjectToWord(value), IntPtrConstant(~kWeakHeapObjectMask))));
1853 : }
1854 :
1855 4480 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1856 : TNode<MaybeObject> value, Label* if_cleared) {
1857 8960 : GotoIf(IsCleared(value), if_cleared);
1858 4480 : return GetHeapObjectAssumeWeak(value);
1859 : }
1860 :
1861 2184 : TNode<BoolT> CodeStubAssembler::IsWeakReferenceTo(TNode<MaybeObject> object,
1862 : TNode<Object> value) {
1863 : return WordEqual(WordAnd(BitcastMaybeObjectToWord(object),
1864 6552 : IntPtrConstant(~kWeakHeapObjectMask)),
1865 6552 : BitcastTaggedToWord(value));
1866 : }
1867 :
1868 1512 : TNode<BoolT> CodeStubAssembler::IsStrongReferenceTo(TNode<MaybeObject> object,
1869 : TNode<Object> value) {
1870 3024 : return WordEqual(BitcastMaybeObjectToWord(object),
1871 4536 : BitcastTaggedToWord(value));
1872 : }
1873 :
1874 1288 : TNode<BoolT> CodeStubAssembler::IsNotWeakReferenceTo(TNode<MaybeObject> object,
1875 : TNode<Object> value) {
1876 : return WordNotEqual(WordAnd(BitcastMaybeObjectToWord(object),
1877 3864 : IntPtrConstant(~kWeakHeapObjectMask)),
1878 3864 : BitcastTaggedToWord(value));
1879 : }
1880 :
1881 2240 : TNode<MaybeObject> CodeStubAssembler::MakeWeak(TNode<HeapObject> value) {
1882 4480 : return ReinterpretCast<MaybeObject>(BitcastWordToTagged(
1883 8960 : WordOr(BitcastTaggedToWord(value), IntPtrConstant(kWeakHeapObjectTag))));
1884 : }
1885 :
1886 : template <>
1887 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<FixedArray> array) {
1888 0 : return LoadAndUntagFixedArrayBaseLength(array);
1889 : }
1890 :
1891 : template <>
1892 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<WeakFixedArray> array) {
1893 0 : return LoadAndUntagWeakFixedArrayLength(array);
1894 : }
1895 :
1896 : template <>
1897 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<PropertyArray> array) {
1898 0 : return LoadPropertyArrayLength(array);
1899 : }
1900 :
1901 : template <>
1902 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1903 : TNode<DescriptorArray> array) {
1904 : return IntPtrMul(ChangeInt32ToIntPtr(LoadNumberOfDescriptors(array)),
1905 0 : IntPtrConstant(DescriptorArray::kEntrySize));
1906 : }
1907 :
1908 : template <>
1909 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1910 : TNode<TransitionArray> array) {
1911 0 : return LoadAndUntagWeakFixedArrayLength(array);
1912 : }
1913 :
1914 : template <typename Array>
1915 90424 : TNode<MaybeObject> CodeStubAssembler::LoadArrayElement(
1916 : TNode<Array> array, int array_header_size, Node* index_node,
1917 : int additional_offset, ParameterMode parameter_mode,
1918 : LoadSensitivity needs_poisoning) {
1919 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
1920 : ParameterToIntPtr(index_node, parameter_mode),
1921 : IntPtrConstant(0)));
1922 : DCHECK(IsAligned(additional_offset, kTaggedSize));
1923 90424 : int32_t header_size = array_header_size + additional_offset - kHeapObjectTag;
1924 : TNode<IntPtrT> offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
1925 90424 : parameter_mode, header_size);
1926 : CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(array),
1927 : array_header_size));
1928 : return UncheckedCast<MaybeObject>(
1929 90424 : Load(MachineType::AnyTagged(), array, offset, needs_poisoning));
1930 : }
1931 :
1932 : template TNode<MaybeObject>
1933 : CodeStubAssembler::LoadArrayElement<TransitionArray>(TNode<TransitionArray>,
1934 : int, Node*, int,
1935 : ParameterMode,
1936 : LoadSensitivity);
1937 :
1938 : template TNode<MaybeObject>
1939 : CodeStubAssembler::LoadArrayElement<DescriptorArray>(TNode<DescriptorArray>,
1940 : int, Node*, int,
1941 : ParameterMode,
1942 : LoadSensitivity);
1943 :
1944 66660 : void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
1945 : Node* index,
1946 : int additional_offset,
1947 : ParameterMode parameter_mode) {
1948 : if (!FLAG_fixed_array_bounds_checks) return;
1949 : DCHECK(IsAligned(additional_offset, kTaggedSize));
1950 66660 : if (parameter_mode == ParameterMode::SMI_PARAMETERS) {
1951 : TNode<Smi> effective_index;
1952 8688 : Smi constant_index;
1953 8688 : bool index_is_constant = ToSmiConstant(index, &constant_index);
1954 8688 : if (index_is_constant) {
1955 4 : effective_index = SmiConstant(Smi::ToInt(constant_index) +
1956 4 : additional_offset / kTaggedSize);
1957 8684 : } else if (additional_offset != 0) {
1958 0 : effective_index =
1959 : SmiAdd(CAST(index), SmiConstant(additional_offset / kTaggedSize));
1960 : } else {
1961 : effective_index = CAST(index);
1962 : }
1963 8688 : CSA_CHECK(this, SmiBelow(effective_index, LoadFixedArrayBaseLength(array)));
1964 : } else {
1965 : // IntPtrAdd does constant-folding automatically.
1966 : TNode<IntPtrT> effective_index =
1967 : IntPtrAdd(UncheckedCast<IntPtrT>(index),
1968 57972 : IntPtrConstant(additional_offset / kTaggedSize));
1969 115944 : CSA_CHECK(this, UintPtrLessThan(effective_index,
1970 57972 : LoadAndUntagFixedArrayBaseLength(array)));
1971 : }
1972 : }
1973 :
1974 71900 : TNode<Object> CodeStubAssembler::LoadFixedArrayElement(
1975 : TNode<FixedArray> object, Node* index_node, int additional_offset,
1976 : ParameterMode parameter_mode, LoadSensitivity needs_poisoning,
1977 : CheckBounds check_bounds) {
1978 : CSA_ASSERT(this, IsFixedArraySubclass(object));
1979 : CSA_ASSERT(this, IsNotWeakFixedArraySubclass(object));
1980 71900 : if (NeedsBoundsCheck(check_bounds)) {
1981 30800 : FixedArrayBoundsCheck(object, index_node, additional_offset,
1982 30800 : parameter_mode);
1983 : }
1984 : TNode<MaybeObject> element =
1985 : LoadArrayElement(object, FixedArray::kHeaderSize, index_node,
1986 71900 : additional_offset, parameter_mode, needs_poisoning);
1987 71900 : return CAST(element);
1988 : }
1989 :
1990 560 : TNode<Object> CodeStubAssembler::LoadPropertyArrayElement(
1991 : TNode<PropertyArray> object, SloppyTNode<IntPtrT> index) {
1992 : int additional_offset = 0;
1993 : ParameterMode parameter_mode = INTPTR_PARAMETERS;
1994 : LoadSensitivity needs_poisoning = LoadSensitivity::kSafe;
1995 1852 : return CAST(LoadArrayElement(object, PropertyArray::kHeaderSize, index,
1996 : additional_offset, parameter_mode,
1997 : needs_poisoning));
1998 : }
1999 :
2000 56 : TNode<IntPtrT> CodeStubAssembler::LoadPropertyArrayLength(
2001 : TNode<PropertyArray> object) {
2002 : TNode<IntPtrT> value =
2003 56 : LoadAndUntagObjectField(object, PropertyArray::kLengthAndHashOffset);
2004 56 : return Signed(DecodeWord<PropertyArray::LengthField>(value));
2005 : }
2006 :
2007 6608 : TNode<RawPtrT> CodeStubAssembler::LoadFixedTypedArrayBackingStore(
2008 : TNode<FixedTypedArrayBase> typed_array) {
2009 : // Backing store = external_pointer + base_pointer.
2010 : Node* external_pointer =
2011 6608 : LoadObjectField(typed_array, FixedTypedArrayBase::kExternalPointerOffset,
2012 6608 : MachineType::Pointer());
2013 : Node* base_pointer =
2014 : LoadObjectField(typed_array, FixedTypedArrayBase::kBasePointerOffset);
2015 : return UncheckedCast<RawPtrT>(
2016 13216 : IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer)));
2017 : }
2018 :
2019 56 : TNode<RawPtrT> CodeStubAssembler::LoadFixedTypedArrayOnHeapBackingStore(
2020 : TNode<FixedTypedArrayBase> typed_array) {
2021 : // This is specialized method of retrieving the backing store pointer for on
2022 : // heap allocated typed array buffer. On heap allocated buffer's backing
2023 : // stores are a fixed offset from the pointer to a typed array's elements. See
2024 : // TypedArrayBuiltinsAssembler::AllocateOnHeapElements().
2025 : TNode<WordT> backing_store =
2026 : IntPtrAdd(BitcastTaggedToWord(typed_array),
2027 : IntPtrConstant(
2028 168 : FixedTypedArrayBase::ExternalPointerValueForOnHeapArray()));
2029 :
2030 : #ifdef DEBUG
2031 : // Verify that this is an on heap backing store.
2032 : TNode<RawPtrT> expected_backing_store_pointer =
2033 : LoadFixedTypedArrayBackingStore(typed_array);
2034 : CSA_ASSERT(this, WordEqual(backing_store, expected_backing_store_pointer));
2035 : #endif
2036 :
2037 56 : return UncheckedCast<RawPtrT>(backing_store);
2038 : }
2039 :
2040 336 : Node* CodeStubAssembler::LoadFixedBigInt64ArrayElementAsTagged(
2041 : Node* data_pointer, Node* offset) {
2042 336 : if (Is64()) {
2043 : TNode<IntPtrT> value = UncheckedCast<IntPtrT>(
2044 336 : Load(MachineType::IntPtr(), data_pointer, offset));
2045 672 : return BigIntFromInt64(value);
2046 : } else {
2047 : DCHECK(!Is64());
2048 : #if defined(V8_TARGET_BIG_ENDIAN)
2049 : TNode<IntPtrT> high = UncheckedCast<IntPtrT>(
2050 : Load(MachineType::UintPtr(), data_pointer, offset));
2051 : TNode<IntPtrT> low = UncheckedCast<IntPtrT>(
2052 : Load(MachineType::UintPtr(), data_pointer,
2053 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2054 : #else
2055 : TNode<IntPtrT> low = UncheckedCast<IntPtrT>(
2056 0 : Load(MachineType::UintPtr(), data_pointer, offset));
2057 : TNode<IntPtrT> high = UncheckedCast<IntPtrT>(
2058 : Load(MachineType::UintPtr(), data_pointer,
2059 0 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2060 : #endif
2061 0 : return BigIntFromInt32Pair(low, high);
2062 : }
2063 : }
2064 :
2065 0 : TNode<BigInt> CodeStubAssembler::BigIntFromInt32Pair(TNode<IntPtrT> low,
2066 : TNode<IntPtrT> high) {
2067 : DCHECK(!Is64());
2068 0 : TVARIABLE(BigInt, var_result);
2069 0 : TVARIABLE(Word32T, var_sign, Int32Constant(BigInt::SignBits::encode(false)));
2070 : TVARIABLE(IntPtrT, var_high, high);
2071 : TVARIABLE(IntPtrT, var_low, low);
2072 0 : Label high_zero(this), negative(this), allocate_one_digit(this),
2073 0 : allocate_two_digits(this), if_zero(this), done(this);
2074 :
2075 0 : GotoIf(WordEqual(var_high.value(), IntPtrConstant(0)), &high_zero);
2076 0 : Branch(IntPtrLessThan(var_high.value(), IntPtrConstant(0)), &negative,
2077 0 : &allocate_two_digits);
2078 :
2079 : BIND(&high_zero);
2080 0 : Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &if_zero,
2081 0 : &allocate_one_digit);
2082 :
2083 : BIND(&negative);
2084 : {
2085 0 : var_sign = Int32Constant(BigInt::SignBits::encode(true));
2086 : // We must negate the value by computing "0 - (high|low)", performing
2087 : // both parts of the subtraction separately and manually taking care
2088 : // of the carry bit (which is 1 iff low != 0).
2089 0 : var_high = IntPtrSub(IntPtrConstant(0), var_high.value());
2090 0 : Label carry(this), no_carry(this);
2091 0 : Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &no_carry, &carry);
2092 : BIND(&carry);
2093 0 : var_high = IntPtrSub(var_high.value(), IntPtrConstant(1));
2094 0 : Goto(&no_carry);
2095 : BIND(&no_carry);
2096 0 : var_low = IntPtrSub(IntPtrConstant(0), var_low.value());
2097 : // var_high was non-zero going into this block, but subtracting the
2098 : // carry bit from it could bring us back onto the "one digit" path.
2099 0 : Branch(WordEqual(var_high.value(), IntPtrConstant(0)), &allocate_one_digit,
2100 0 : &allocate_two_digits);
2101 : }
2102 :
2103 : BIND(&allocate_one_digit);
2104 : {
2105 0 : var_result = AllocateRawBigInt(IntPtrConstant(1));
2106 0 : StoreBigIntBitfield(var_result.value(),
2107 : Word32Or(var_sign.value(),
2108 0 : Int32Constant(BigInt::LengthBits::encode(1))));
2109 : StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2110 0 : Goto(&done);
2111 : }
2112 :
2113 : BIND(&allocate_two_digits);
2114 : {
2115 0 : var_result = AllocateRawBigInt(IntPtrConstant(2));
2116 0 : StoreBigIntBitfield(var_result.value(),
2117 : Word32Or(var_sign.value(),
2118 0 : Int32Constant(BigInt::LengthBits::encode(2))));
2119 : StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2120 : StoreBigIntDigit(var_result.value(), 1, Unsigned(var_high.value()));
2121 0 : Goto(&done);
2122 : }
2123 :
2124 : BIND(&if_zero);
2125 0 : var_result = AllocateBigInt(IntPtrConstant(0));
2126 0 : Goto(&done);
2127 :
2128 : BIND(&done);
2129 0 : return var_result.value();
2130 : }
2131 :
2132 840 : TNode<BigInt> CodeStubAssembler::BigIntFromInt64(TNode<IntPtrT> value) {
2133 : DCHECK(Is64());
2134 840 : TVARIABLE(BigInt, var_result);
2135 840 : Label done(this), if_positive(this), if_negative(this), if_zero(this);
2136 2520 : GotoIf(WordEqual(value, IntPtrConstant(0)), &if_zero);
2137 840 : var_result = AllocateRawBigInt(IntPtrConstant(1));
2138 2520 : Branch(IntPtrGreaterThan(value, IntPtrConstant(0)), &if_positive,
2139 840 : &if_negative);
2140 :
2141 : BIND(&if_positive);
2142 : {
2143 : StoreBigIntBitfield(var_result.value(),
2144 1680 : Int32Constant(BigInt::SignBits::encode(false) |
2145 : BigInt::LengthBits::encode(1)));
2146 : StoreBigIntDigit(var_result.value(), 0, Unsigned(value));
2147 840 : Goto(&done);
2148 : }
2149 :
2150 : BIND(&if_negative);
2151 : {
2152 : StoreBigIntBitfield(var_result.value(),
2153 1680 : Int32Constant(BigInt::SignBits::encode(true) |
2154 : BigInt::LengthBits::encode(1)));
2155 : StoreBigIntDigit(var_result.value(), 0,
2156 840 : Unsigned(IntPtrSub(IntPtrConstant(0), value)));
2157 840 : Goto(&done);
2158 : }
2159 :
2160 : BIND(&if_zero);
2161 : {
2162 840 : var_result = AllocateBigInt(IntPtrConstant(0));
2163 840 : Goto(&done);
2164 : }
2165 :
2166 : BIND(&done);
2167 840 : return var_result.value();
2168 : }
2169 :
2170 336 : Node* CodeStubAssembler::LoadFixedBigUint64ArrayElementAsTagged(
2171 : Node* data_pointer, Node* offset) {
2172 672 : Label if_zero(this), done(this);
2173 336 : if (Is64()) {
2174 : TNode<UintPtrT> value = UncheckedCast<UintPtrT>(
2175 336 : Load(MachineType::UintPtr(), data_pointer, offset));
2176 672 : return BigIntFromUint64(value);
2177 : } else {
2178 : DCHECK(!Is64());
2179 : #if defined(V8_TARGET_BIG_ENDIAN)
2180 : TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2181 : Load(MachineType::UintPtr(), data_pointer, offset));
2182 : TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2183 : Load(MachineType::UintPtr(), data_pointer,
2184 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2185 : #else
2186 : TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2187 0 : Load(MachineType::UintPtr(), data_pointer, offset));
2188 : TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2189 : Load(MachineType::UintPtr(), data_pointer,
2190 0 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2191 : #endif
2192 0 : return BigIntFromUint32Pair(low, high);
2193 : }
2194 : }
2195 :
2196 0 : TNode<BigInt> CodeStubAssembler::BigIntFromUint32Pair(TNode<UintPtrT> low,
2197 : TNode<UintPtrT> high) {
2198 : DCHECK(!Is64());
2199 0 : TVARIABLE(BigInt, var_result);
2200 0 : Label high_zero(this), if_zero(this), done(this);
2201 :
2202 0 : GotoIf(WordEqual(high, IntPtrConstant(0)), &high_zero);
2203 0 : var_result = AllocateBigInt(IntPtrConstant(2));
2204 : StoreBigIntDigit(var_result.value(), 0, low);
2205 : StoreBigIntDigit(var_result.value(), 1, high);
2206 0 : Goto(&done);
2207 :
2208 : BIND(&high_zero);
2209 0 : GotoIf(WordEqual(low, IntPtrConstant(0)), &if_zero);
2210 0 : var_result = AllocateBigInt(IntPtrConstant(1));
2211 : StoreBigIntDigit(var_result.value(), 0, low);
2212 0 : Goto(&done);
2213 :
2214 : BIND(&if_zero);
2215 0 : var_result = AllocateBigInt(IntPtrConstant(0));
2216 0 : Goto(&done);
2217 :
2218 : BIND(&done);
2219 0 : return var_result.value();
2220 : }
2221 :
2222 784 : TNode<BigInt> CodeStubAssembler::BigIntFromUint64(TNode<UintPtrT> value) {
2223 : DCHECK(Is64());
2224 784 : TVARIABLE(BigInt, var_result);
2225 784 : Label done(this), if_zero(this);
2226 2352 : GotoIf(WordEqual(value, IntPtrConstant(0)), &if_zero);
2227 784 : var_result = AllocateBigInt(IntPtrConstant(1));
2228 : StoreBigIntDigit(var_result.value(), 0, value);
2229 784 : Goto(&done);
2230 :
2231 : BIND(&if_zero);
2232 784 : var_result = AllocateBigInt(IntPtrConstant(0));
2233 784 : Goto(&done);
2234 : BIND(&done);
2235 784 : return var_result.value();
2236 : }
2237 :
2238 2688 : Node* CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2239 : Node* data_pointer, Node* index_node, ElementsKind elements_kind,
2240 : ParameterMode parameter_mode) {
2241 : Node* offset =
2242 5376 : ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2243 2688 : switch (elements_kind) {
2244 : case UINT8_ELEMENTS: /* fall through */
2245 : case UINT8_CLAMPED_ELEMENTS:
2246 1344 : return SmiFromInt32(Load(MachineType::Uint8(), data_pointer, offset));
2247 : case INT8_ELEMENTS:
2248 672 : return SmiFromInt32(Load(MachineType::Int8(), data_pointer, offset));
2249 : case UINT16_ELEMENTS:
2250 672 : return SmiFromInt32(Load(MachineType::Uint16(), data_pointer, offset));
2251 : case INT16_ELEMENTS:
2252 672 : return SmiFromInt32(Load(MachineType::Int16(), data_pointer, offset));
2253 : case UINT32_ELEMENTS:
2254 448 : return ChangeUint32ToTagged(
2255 448 : Load(MachineType::Uint32(), data_pointer, offset));
2256 : case INT32_ELEMENTS:
2257 448 : return ChangeInt32ToTagged(
2258 448 : Load(MachineType::Int32(), data_pointer, offset));
2259 : case FLOAT32_ELEMENTS:
2260 896 : return AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(
2261 672 : Load(MachineType::Float32(), data_pointer, offset)));
2262 : case FLOAT64_ELEMENTS:
2263 448 : return AllocateHeapNumberWithValue(
2264 448 : Load(MachineType::Float64(), data_pointer, offset));
2265 : case BIGINT64_ELEMENTS:
2266 336 : return LoadFixedBigInt64ArrayElementAsTagged(data_pointer, offset);
2267 : case BIGUINT64_ELEMENTS:
2268 336 : return LoadFixedBigUint64ArrayElementAsTagged(data_pointer, offset);
2269 : default:
2270 0 : UNREACHABLE();
2271 : }
2272 : }
2273 :
2274 56 : TNode<Numeric> CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2275 : TNode<WordT> data_pointer, TNode<Smi> index, TNode<Int32T> elements_kind) {
2276 56 : TVARIABLE(Numeric, var_result);
2277 56 : Label done(this), if_unknown_type(this, Label::kDeferred);
2278 : int32_t elements_kinds[] = {
2279 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) TYPE##_ELEMENTS,
2280 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2281 : #undef TYPED_ARRAY_CASE
2282 56 : };
2283 :
2284 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) Label if_##type##array(this);
2285 56 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2286 : #undef TYPED_ARRAY_CASE
2287 :
2288 : Label* elements_kind_labels[] = {
2289 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) &if_##type##array,
2290 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2291 : #undef TYPED_ARRAY_CASE
2292 56 : };
2293 : STATIC_ASSERT(arraysize(elements_kinds) == arraysize(elements_kind_labels));
2294 :
2295 : Switch(elements_kind, &if_unknown_type, elements_kinds, elements_kind_labels,
2296 56 : arraysize(elements_kinds));
2297 :
2298 : BIND(&if_unknown_type);
2299 56 : Unreachable();
2300 :
2301 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2302 : BIND(&if_##type##array); \
2303 : { \
2304 : var_result = CAST(LoadFixedTypedArrayElementAsTagged( \
2305 : data_pointer, index, TYPE##_ELEMENTS, SMI_PARAMETERS)); \
2306 : Goto(&done); \
2307 : }
2308 1232 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2309 : #undef TYPED_ARRAY_CASE
2310 :
2311 : BIND(&done);
2312 56 : return var_result.value();
2313 : }
2314 :
2315 616 : void CodeStubAssembler::StoreFixedTypedArrayElementFromTagged(
2316 : TNode<Context> context, TNode<FixedTypedArrayBase> elements,
2317 : TNode<Object> index_node, TNode<Object> value, ElementsKind elements_kind,
2318 : ParameterMode parameter_mode) {
2319 616 : TNode<RawPtrT> data_pointer = LoadFixedTypedArrayBackingStore(elements);
2320 616 : switch (elements_kind) {
2321 : case UINT8_ELEMENTS:
2322 : case UINT8_CLAMPED_ELEMENTS:
2323 : case INT8_ELEMENTS:
2324 : case UINT16_ELEMENTS:
2325 : case INT16_ELEMENTS:
2326 280 : StoreElement(data_pointer, elements_kind, index_node,
2327 560 : SmiToInt32(CAST(value)), parameter_mode);
2328 280 : break;
2329 : case UINT32_ELEMENTS:
2330 : case INT32_ELEMENTS:
2331 112 : StoreElement(data_pointer, elements_kind, index_node,
2332 112 : TruncateTaggedToWord32(context, value), parameter_mode);
2333 112 : break;
2334 : case FLOAT32_ELEMENTS:
2335 56 : StoreElement(data_pointer, elements_kind, index_node,
2336 112 : TruncateFloat64ToFloat32(LoadHeapNumberValue(CAST(value))),
2337 56 : parameter_mode);
2338 56 : break;
2339 : case FLOAT64_ELEMENTS:
2340 56 : StoreElement(data_pointer, elements_kind, index_node,
2341 56 : LoadHeapNumberValue(CAST(value)), parameter_mode);
2342 56 : break;
2343 : case BIGUINT64_ELEMENTS:
2344 : case BIGINT64_ELEMENTS: {
2345 : TNode<IntPtrT> offset =
2346 112 : ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2347 : EmitBigTypedArrayElementStore(elements, data_pointer, offset,
2348 112 : CAST(value));
2349 : break;
2350 : }
2351 : default:
2352 0 : UNREACHABLE();
2353 : }
2354 616 : }
2355 :
2356 15904 : TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2357 : Node* object, Node* slot_index_node, int additional_offset,
2358 : ParameterMode parameter_mode) {
2359 : CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2360 : CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2361 : int32_t header_size =
2362 15904 : FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2363 31808 : Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2364 : parameter_mode, header_size);
2365 : CSA_SLOW_ASSERT(
2366 : this, IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2367 : FeedbackVector::kHeaderSize));
2368 : return UncheckedCast<MaybeObject>(
2369 15904 : Load(MachineType::AnyTagged(), object, offset));
2370 : }
2371 :
2372 : template <typename Array>
2373 15032 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ArrayElement(
2374 : TNode<Array> object, int array_header_size, Node* index_node,
2375 : int additional_offset, ParameterMode parameter_mode) {
2376 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2377 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2378 : int endian_correction = 0;
2379 : #if V8_TARGET_LITTLE_ENDIAN
2380 : if (SmiValuesAre32Bits()) endian_correction = 4;
2381 : #endif
2382 : int32_t header_size = array_header_size + additional_offset - kHeapObjectTag +
2383 15032 : endian_correction;
2384 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2385 30064 : parameter_mode, header_size);
2386 : CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(object),
2387 : array_header_size + endian_correction));
2388 : if (SmiValuesAre32Bits()) {
2389 15032 : return UncheckedCast<Int32T>(Load(MachineType::Int32(), object, offset));
2390 : } else {
2391 : return SmiToInt32(Load(MachineType::AnyTagged(), object, offset));
2392 : }
2393 : }
2394 :
2395 3128 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
2396 : TNode<FixedArray> object, Node* index_node, int additional_offset,
2397 : ParameterMode parameter_mode) {
2398 : CSA_SLOW_ASSERT(this, IsFixedArraySubclass(object));
2399 : return LoadAndUntagToWord32ArrayElement(object, FixedArray::kHeaderSize,
2400 : index_node, additional_offset,
2401 6044 : parameter_mode);
2402 : }
2403 :
2404 1456 : TNode<MaybeObject> CodeStubAssembler::LoadWeakFixedArrayElement(
2405 : TNode<WeakFixedArray> object, Node* index, int additional_offset,
2406 : ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
2407 : return LoadArrayElement(object, WeakFixedArray::kHeaderSize, index,
2408 1456 : additional_offset, parameter_mode, needs_poisoning);
2409 : }
2410 :
2411 3364 : TNode<Float64T> CodeStubAssembler::LoadFixedDoubleArrayElement(
2412 : SloppyTNode<FixedDoubleArray> object, Node* index_node,
2413 : MachineType machine_type, int additional_offset,
2414 : ParameterMode parameter_mode, Label* if_hole) {
2415 : CSA_ASSERT(this, IsFixedDoubleArray(object));
2416 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2417 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2418 : int32_t header_size =
2419 3364 : FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
2420 : TNode<IntPtrT> offset = ElementOffsetFromIndex(
2421 3364 : index_node, HOLEY_DOUBLE_ELEMENTS, parameter_mode, header_size);
2422 : CSA_ASSERT(this, IsOffsetInBounds(
2423 : offset, LoadAndUntagFixedArrayBaseLength(object),
2424 : FixedDoubleArray::kHeaderSize, HOLEY_DOUBLE_ELEMENTS));
2425 3364 : return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
2426 : }
2427 :
2428 56 : TNode<Object> CodeStubAssembler::LoadFixedArrayBaseElementAsTagged(
2429 : TNode<FixedArrayBase> elements, TNode<IntPtrT> index,
2430 : TNode<Int32T> elements_kind, Label* if_accessor, Label* if_hole) {
2431 56 : TVARIABLE(Object, var_result);
2432 56 : Label done(this), if_packed(this), if_holey(this), if_packed_double(this),
2433 56 : if_holey_double(this), if_dictionary(this, Label::kDeferred);
2434 :
2435 : int32_t kinds[] = {// Handled by if_packed.
2436 : PACKED_SMI_ELEMENTS, PACKED_ELEMENTS,
2437 : PACKED_SEALED_ELEMENTS, PACKED_FROZEN_ELEMENTS,
2438 : // Handled by if_holey.
2439 : HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS,
2440 : // Handled by if_packed_double.
2441 : PACKED_DOUBLE_ELEMENTS,
2442 : // Handled by if_holey_double.
2443 56 : HOLEY_DOUBLE_ELEMENTS};
2444 : Label* labels[] = {// PACKED_{SMI,}_ELEMENTS
2445 : &if_packed, &if_packed, &if_packed, &if_packed,
2446 : // HOLEY_{SMI,}_ELEMENTS
2447 : &if_holey, &if_holey,
2448 : // PACKED_DOUBLE_ELEMENTS
2449 : &if_packed_double,
2450 : // HOLEY_DOUBLE_ELEMENTS
2451 56 : &if_holey_double};
2452 56 : Switch(elements_kind, &if_dictionary, kinds, labels, arraysize(kinds));
2453 :
2454 : BIND(&if_packed);
2455 : {
2456 : var_result = LoadFixedArrayElement(CAST(elements), index, 0);
2457 56 : Goto(&done);
2458 : }
2459 :
2460 : BIND(&if_holey);
2461 : {
2462 : var_result = LoadFixedArrayElement(CAST(elements), index);
2463 56 : Branch(WordEqual(var_result.value(), TheHoleConstant()), if_hole, &done);
2464 : }
2465 :
2466 : BIND(&if_packed_double);
2467 : {
2468 224 : var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2469 56 : CAST(elements), index, MachineType::Float64()));
2470 56 : Goto(&done);
2471 : }
2472 :
2473 : BIND(&if_holey_double);
2474 : {
2475 224 : var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2476 : CAST(elements), index, MachineType::Float64(), 0, INTPTR_PARAMETERS,
2477 56 : if_hole));
2478 56 : Goto(&done);
2479 : }
2480 :
2481 : BIND(&if_dictionary);
2482 : {
2483 : CSA_ASSERT(this, IsDictionaryElementsKind(elements_kind));
2484 56 : var_result = BasicLoadNumberDictionaryElement(CAST(elements), index,
2485 : if_accessor, if_hole);
2486 56 : Goto(&done);
2487 : }
2488 :
2489 : BIND(&done);
2490 56 : return var_result.value();
2491 : }
2492 :
2493 5616 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
2494 : SloppyTNode<Object> base, SloppyTNode<IntPtrT> offset, Label* if_hole,
2495 : MachineType machine_type) {
2496 5616 : if (if_hole) {
2497 : // TODO(ishell): Compare only the upper part for the hole once the
2498 : // compiler is able to fold addition of already complex |offset| with
2499 : // |kIeeeDoubleExponentWordOffset| into one addressing mode.
2500 5112 : if (Is64()) {
2501 5112 : Node* element = Load(MachineType::Uint64(), base, offset);
2502 15336 : GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
2503 : } else {
2504 : Node* element_upper = Load(
2505 : MachineType::Uint32(), base,
2506 0 : IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
2507 0 : GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
2508 0 : if_hole);
2509 : }
2510 : }
2511 5616 : if (machine_type.IsNone()) {
2512 : // This means the actual value is not needed.
2513 956 : return TNode<Float64T>();
2514 : }
2515 4660 : return UncheckedCast<Float64T>(Load(machine_type, base, offset));
2516 : }
2517 :
2518 78908 : TNode<Object> CodeStubAssembler::LoadContextElement(
2519 : SloppyTNode<Context> context, int slot_index) {
2520 : int offset = Context::SlotOffset(slot_index);
2521 : return UncheckedCast<Object>(
2522 157816 : Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)));
2523 : }
2524 :
2525 5832 : TNode<Object> CodeStubAssembler::LoadContextElement(
2526 : SloppyTNode<Context> context, SloppyTNode<IntPtrT> slot_index) {
2527 11664 : Node* offset = ElementOffsetFromIndex(
2528 : slot_index, PACKED_ELEMENTS, INTPTR_PARAMETERS, Context::SlotOffset(0));
2529 5832 : return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2530 : }
2531 :
2532 56 : TNode<Object> CodeStubAssembler::LoadContextElement(TNode<Context> context,
2533 : TNode<Smi> slot_index) {
2534 112 : Node* offset = ElementOffsetFromIndex(slot_index, PACKED_ELEMENTS,
2535 : SMI_PARAMETERS, Context::SlotOffset(0));
2536 56 : return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2537 : }
2538 :
2539 224 : void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2540 : int slot_index,
2541 : SloppyTNode<Object> value) {
2542 : int offset = Context::SlotOffset(slot_index);
2543 448 : Store(context, IntPtrConstant(offset), value);
2544 224 : }
2545 :
2546 1008 : void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2547 : SloppyTNode<IntPtrT> slot_index,
2548 : SloppyTNode<Object> value) {
2549 : Node* offset = IntPtrAdd(TimesTaggedSize(slot_index),
2550 1008 : IntPtrConstant(Context::SlotOffset(0)));
2551 1008 : Store(context, offset, value);
2552 1008 : }
2553 :
2554 7384 : void CodeStubAssembler::StoreContextElementNoWriteBarrier(
2555 : SloppyTNode<Context> context, int slot_index, SloppyTNode<Object> value) {
2556 : int offset = Context::SlotOffset(slot_index);
2557 : StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
2558 14768 : IntPtrConstant(offset), value);
2559 7384 : }
2560 :
2561 22620 : TNode<Context> CodeStubAssembler::LoadNativeContext(
2562 : SloppyTNode<Context> context) {
2563 : return UncheckedCast<Context>(
2564 32588 : LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX));
2565 : }
2566 :
2567 168 : TNode<Context> CodeStubAssembler::LoadModuleContext(
2568 : SloppyTNode<Context> context) {
2569 336 : Node* module_map = LoadRoot(RootIndex::kModuleContextMap);
2570 336 : Variable cur_context(this, MachineRepresentation::kTaggedPointer);
2571 168 : cur_context.Bind(context);
2572 :
2573 168 : Label context_found(this);
2574 :
2575 168 : Variable* context_search_loop_variables[1] = {&cur_context};
2576 336 : Label context_search(this, 1, context_search_loop_variables);
2577 :
2578 : // Loop until cur_context->map() is module_map.
2579 168 : Goto(&context_search);
2580 : BIND(&context_search);
2581 : {
2582 : CSA_ASSERT(this, Word32BinaryNot(IsNativeContext(cur_context.value())));
2583 336 : GotoIf(WordEqual(LoadMap(cur_context.value()), module_map), &context_found);
2584 :
2585 : cur_context.Bind(
2586 504 : LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
2587 168 : Goto(&context_search);
2588 : }
2589 :
2590 : BIND(&context_found);
2591 336 : return UncheckedCast<Context>(cur_context.value());
2592 : }
2593 :
2594 788 : TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2595 : SloppyTNode<Int32T> kind, SloppyTNode<Context> native_context) {
2596 : CSA_ASSERT(this, IsFastElementsKind(kind));
2597 : CSA_ASSERT(this, IsNativeContext(native_context));
2598 : Node* offset = IntPtrAdd(IntPtrConstant(Context::FIRST_JS_ARRAY_MAP_SLOT),
2599 1576 : ChangeInt32ToIntPtr(kind));
2600 788 : return UncheckedCast<Map>(LoadContextElement(native_context, offset));
2601 : }
2602 :
2603 4760 : TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2604 : ElementsKind kind, SloppyTNode<Context> native_context) {
2605 : CSA_ASSERT(this, IsNativeContext(native_context));
2606 : return UncheckedCast<Map>(
2607 4760 : LoadContextElement(native_context, Context::ArrayMapIndex(kind)));
2608 : }
2609 :
2610 3980 : TNode<BoolT> CodeStubAssembler::IsGeneratorFunction(
2611 : TNode<JSFunction> function) {
2612 : TNode<SharedFunctionInfo> const shared_function_info =
2613 : CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
2614 :
2615 : TNode<Uint32T> const function_kind =
2616 : DecodeWord32<SharedFunctionInfo::FunctionKindBits>(LoadObjectField(
2617 : shared_function_info, SharedFunctionInfo::kFlagsOffset,
2618 7960 : MachineType::Uint32()));
2619 :
2620 7960 : return TNode<BoolT>::UncheckedCast(Word32Or(
2621 7960 : Word32Or(
2622 7960 : Word32Or(
2623 7960 : Word32Equal(function_kind,
2624 7960 : Int32Constant(FunctionKind::kAsyncGeneratorFunction)),
2625 7960 : Word32Equal(
2626 : function_kind,
2627 15920 : Int32Constant(FunctionKind::kAsyncConciseGeneratorMethod))),
2628 7960 : Word32Equal(function_kind,
2629 15920 : Int32Constant(FunctionKind::kGeneratorFunction))),
2630 7960 : Word32Equal(function_kind,
2631 15920 : Int32Constant(FunctionKind::kConciseGeneratorMethod))));
2632 : }
2633 :
2634 3980 : TNode<BoolT> CodeStubAssembler::HasPrototypeProperty(TNode<JSFunction> function,
2635 : TNode<Map> map) {
2636 : // (has_prototype_slot() && IsConstructor()) ||
2637 : // IsGeneratorFunction(shared()->kind())
2638 : uint32_t mask =
2639 : Map::HasPrototypeSlotBit::kMask | Map::IsConstructorBit::kMask;
2640 : return TNode<BoolT>::UncheckedCast(
2641 15920 : Word32Or(IsAllSetWord32(LoadMapBitField(map), mask),
2642 15920 : IsGeneratorFunction(function)));
2643 : }
2644 :
2645 3980 : void CodeStubAssembler::GotoIfPrototypeRequiresRuntimeLookup(
2646 : TNode<JSFunction> function, TNode<Map> map, Label* runtime) {
2647 : // !has_prototype_property() || has_non_instance_prototype()
2648 7960 : GotoIfNot(HasPrototypeProperty(function, map), runtime);
2649 3980 : GotoIf(IsSetWord32<Map::HasNonInstancePrototypeBit>(LoadMapBitField(map)),
2650 3980 : runtime);
2651 3980 : }
2652 :
2653 3924 : Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function,
2654 : Label* if_bailout) {
2655 : CSA_ASSERT(this, TaggedIsNotSmi(function));
2656 : CSA_ASSERT(this, IsJSFunction(function));
2657 : CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)));
2658 : CSA_ASSERT(this, IsClearWord32<Map::HasNonInstancePrototypeBit>(
2659 : LoadMapBitField(LoadMap(function))));
2660 : Node* proto_or_map =
2661 : LoadObjectField(function, JSFunction::kPrototypeOrInitialMapOffset);
2662 7848 : GotoIf(IsTheHole(proto_or_map), if_bailout);
2663 :
2664 7848 : VARIABLE(var_result, MachineRepresentation::kTagged, proto_or_map);
2665 3924 : Label done(this, &var_result);
2666 7848 : GotoIfNot(IsMap(proto_or_map), &done);
2667 :
2668 3924 : var_result.Bind(LoadMapPrototype(proto_or_map));
2669 3924 : Goto(&done);
2670 :
2671 : BIND(&done);
2672 7848 : return var_result.value();
2673 : }
2674 :
2675 112 : TNode<BytecodeArray> CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray(
2676 : SloppyTNode<SharedFunctionInfo> shared) {
2677 : Node* function_data =
2678 : LoadObjectField(shared, SharedFunctionInfo::kFunctionDataOffset);
2679 :
2680 224 : VARIABLE(var_result, MachineRepresentation::kTagged, function_data);
2681 112 : Label done(this, &var_result);
2682 :
2683 224 : GotoIfNot(HasInstanceType(function_data, INTERPRETER_DATA_TYPE), &done);
2684 : Node* bytecode_array =
2685 : LoadObjectField(function_data, InterpreterData::kBytecodeArrayOffset);
2686 112 : var_result.Bind(bytecode_array);
2687 112 : Goto(&done);
2688 :
2689 : BIND(&done);
2690 224 : return CAST(var_result.value());
2691 : }
2692 :
2693 24 : void CodeStubAssembler::StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,
2694 : int offset,
2695 : TNode<Word32T> value) {
2696 : StoreNoWriteBarrier(MachineRepresentation::kWord8, object,
2697 48 : IntPtrConstant(offset - kHeapObjectTag), value);
2698 24 : }
2699 :
2700 784 : void CodeStubAssembler::StoreHeapNumberValue(SloppyTNode<HeapNumber> object,
2701 : SloppyTNode<Float64T> value) {
2702 : StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
2703 : MachineRepresentation::kFloat64);
2704 784 : }
2705 :
2706 0 : void CodeStubAssembler::StoreMutableHeapNumberValue(
2707 : SloppyTNode<MutableHeapNumber> object, SloppyTNode<Float64T> value) {
2708 : StoreObjectFieldNoWriteBarrier(object, MutableHeapNumber::kValueOffset, value,
2709 : MachineRepresentation::kFloat64);
2710 0 : }
2711 :
2712 8176 : void CodeStubAssembler::StoreObjectField(Node* object, int offset,
2713 : Node* value) {
2714 : DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
2715 :
2716 8176 : OptimizedStoreField(MachineRepresentation::kTagged,
2717 : UncheckedCast<HeapObject>(object), offset, value,
2718 16684 : WriteBarrierKind::kFullWriteBarrier);
2719 8176 : }
2720 :
2721 4160 : void CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
2722 : Node* value) {
2723 : int const_offset;
2724 4160 : if (ToInt32Constant(offset, const_offset)) {
2725 2760 : StoreObjectField(object, const_offset, value);
2726 : } else {
2727 4200 : Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2728 : }
2729 4160 : }
2730 :
2731 35596 : void CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2732 : Node* object, int offset, Node* value, MachineRepresentation rep) {
2733 61792 : OptimizedStoreField(rep, UncheckedCast<HeapObject>(object), offset, value,
2734 159024 : WriteBarrierKind::kNoWriteBarrier);
2735 35596 : }
2736 :
2737 4636 : void CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2738 : Node* object, Node* offset, Node* value, MachineRepresentation rep) {
2739 : int const_offset;
2740 4636 : if (ToInt32Constant(offset, const_offset)) {
2741 5336 : return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
2742 : }
2743 : StoreNoWriteBarrier(rep, object,
2744 5904 : IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2745 : }
2746 :
2747 4544 : void CodeStubAssembler::StoreMap(Node* object, Node* map) {
2748 10828 : OptimizedStoreMap(UncheckedCast<HeapObject>(object), CAST(map));
2749 4544 : }
2750 :
2751 51556 : void CodeStubAssembler::StoreMapNoWriteBarrier(Node* object,
2752 : RootIndex map_root_index) {
2753 103112 : StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
2754 51556 : }
2755 :
2756 4840 : void CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
2757 : CSA_SLOW_ASSERT(this, IsMap(map));
2758 10384 : OptimizedStoreField(MachineRepresentation::kTaggedPointer,
2759 : UncheckedCast<HeapObject>(object), HeapObject::kMapOffset,
2760 66508 : map, WriteBarrierKind::kNoWriteBarrier);
2761 4840 : }
2762 :
2763 22764 : void CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
2764 : RootIndex root_index) {
2765 22764 : if (RootsTable::IsImmortalImmovable(root_index)) {
2766 45528 : return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
2767 : } else {
2768 0 : return StoreObjectField(object, offset, LoadRoot(root_index));
2769 : }
2770 : }
2771 :
2772 0 : void CodeStubAssembler::StoreJSArrayLength(TNode<JSArray> array,
2773 : TNode<Smi> length) {
2774 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2775 0 : }
2776 :
2777 0 : void CodeStubAssembler::StoreElements(TNode<Object> object,
2778 : TNode<FixedArrayBase> elements) {
2779 : StoreObjectField(object, JSObject::kElementsOffset, elements);
2780 0 : }
2781 :
2782 38032 : void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
2783 : Node* object, Node* index_node, Node* value, WriteBarrierMode barrier_mode,
2784 : int additional_offset, ParameterMode parameter_mode) {
2785 : CSA_SLOW_ASSERT(
2786 : this, Word32Or(IsFixedArraySubclass(object), IsPropertyArray(object)));
2787 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2788 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2789 : barrier_mode == UPDATE_WRITE_BARRIER ||
2790 : barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER);
2791 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2792 : STATIC_ASSERT(static_cast<int>(FixedArray::kHeaderSize) ==
2793 : static_cast<int>(PropertyArray::kHeaderSize));
2794 : int header_size =
2795 38032 : FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
2796 76064 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2797 : parameter_mode, header_size);
2798 : STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2799 : static_cast<int>(WeakFixedArray::kLengthOffset));
2800 : STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2801 : static_cast<int>(PropertyArray::kLengthAndHashOffset));
2802 : // Check that index_node + additional_offset <= object.length.
2803 : // TODO(cbruni): Use proper LoadXXLength helpers
2804 : CSA_ASSERT(
2805 : this,
2806 : IsOffsetInBounds(
2807 : offset,
2808 : Select<IntPtrT>(
2809 : IsPropertyArray(object),
2810 : [=] {
2811 : TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
2812 : object, PropertyArray::kLengthAndHashOffset);
2813 : return TNode<IntPtrT>::UncheckedCast(
2814 : DecodeWord<PropertyArray::LengthField>(length_and_hash));
2815 : },
2816 : [=] {
2817 : return LoadAndUntagObjectField(object,
2818 : FixedArrayBase::kLengthOffset);
2819 : }),
2820 : FixedArray::kHeaderSize));
2821 38032 : if (barrier_mode == SKIP_WRITE_BARRIER) {
2822 20188 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2823 17844 : } else if (barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER) {
2824 56 : StoreEphemeronKey(object, offset, value);
2825 : } else {
2826 17788 : Store(object, offset, value);
2827 : }
2828 38032 : }
2829 :
2830 1916 : void CodeStubAssembler::StoreFixedDoubleArrayElement(
2831 : TNode<FixedDoubleArray> object, Node* index_node, TNode<Float64T> value,
2832 : ParameterMode parameter_mode, CheckBounds check_bounds) {
2833 : CSA_ASSERT(this, IsFixedDoubleArray(object));
2834 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2835 1916 : if (NeedsBoundsCheck(check_bounds)) {
2836 1916 : FixedArrayBoundsCheck(object, index_node, 0, parameter_mode);
2837 : }
2838 : Node* offset =
2839 3832 : ElementOffsetFromIndex(index_node, PACKED_DOUBLE_ELEMENTS, parameter_mode,
2840 : FixedArray::kHeaderSize - kHeapObjectTag);
2841 : MachineRepresentation rep = MachineRepresentation::kFloat64;
2842 : // Make sure we do not store signalling NaNs into double arrays.
2843 1916 : TNode<Float64T> value_silenced = Float64SilenceNaN(value);
2844 1916 : StoreNoWriteBarrier(rep, object, offset, value_silenced);
2845 1916 : }
2846 :
2847 15960 : void CodeStubAssembler::StoreFeedbackVectorSlot(Node* object,
2848 : Node* slot_index_node,
2849 : Node* value,
2850 : WriteBarrierMode barrier_mode,
2851 : int additional_offset,
2852 : ParameterMode parameter_mode) {
2853 : CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2854 : CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2855 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2856 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2857 : barrier_mode == UPDATE_WRITE_BARRIER);
2858 : int header_size =
2859 15960 : FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2860 31920 : Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2861 : parameter_mode, header_size);
2862 : // Check that slot_index_node <= object.length.
2863 : CSA_ASSERT(this,
2864 : IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2865 : FeedbackVector::kHeaderSize));
2866 15960 : if (barrier_mode == SKIP_WRITE_BARRIER) {
2867 13160 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2868 : } else {
2869 2800 : Store(object, offset, value);
2870 : }
2871 15960 : }
2872 :
2873 336 : void CodeStubAssembler::EnsureArrayLengthWritable(TNode<Map> map,
2874 : Label* bailout) {
2875 : // Don't support arrays in dictionary named property mode.
2876 672 : GotoIf(IsDictionaryMap(map), bailout);
2877 :
2878 : // Check whether the length property is writable. The length property is the
2879 : // only default named property on arrays. It's nonconfigurable, hence is
2880 : // guaranteed to stay the first property.
2881 336 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
2882 :
2883 : int length_index = JSArray::kLengthDescriptorIndex;
2884 : #ifdef DEBUG
2885 : TNode<Name> maybe_length =
2886 : LoadKeyByDescriptorEntry(descriptors, length_index);
2887 : CSA_ASSERT(this,
2888 : WordEqual(maybe_length, LoadRoot(RootIndex::klength_string)));
2889 : #endif
2890 :
2891 : TNode<Uint32T> details =
2892 336 : LoadDetailsByDescriptorEntry(descriptors, length_index);
2893 672 : GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
2894 336 : bailout);
2895 336 : }
2896 :
2897 224 : TNode<Int32T> CodeStubAssembler::EnsureArrayPushable(TNode<Map> map,
2898 : Label* bailout) {
2899 : // Disallow pushing onto prototypes. It might be the JSArray prototype.
2900 : // Disallow pushing onto non-extensible objects.
2901 224 : Comment("Disallow pushing onto prototypes");
2902 : Node* bit_field2 = LoadMapBitField2(map);
2903 : int mask = Map::IsPrototypeMapBit::kMask | Map::IsExtensibleBit::kMask;
2904 672 : Node* test = Word32And(bit_field2, Int32Constant(mask));
2905 672 : GotoIf(Word32NotEqual(test, Int32Constant(Map::IsExtensibleBit::kMask)),
2906 224 : bailout);
2907 :
2908 224 : EnsureArrayLengthWritable(map, bailout);
2909 :
2910 : TNode<Uint32T> kind = DecodeWord32<Map::ElementsKindBits>(bit_field2);
2911 224 : return Signed(kind);
2912 : }
2913 :
2914 540 : void CodeStubAssembler::PossiblyGrowElementsCapacity(
2915 : ParameterMode mode, ElementsKind kind, Node* array, Node* length,
2916 : Variable* var_elements, Node* growth, Label* bailout) {
2917 1080 : Label fits(this, var_elements);
2918 : Node* capacity =
2919 540 : TaggedToParameter(LoadFixedArrayBaseLength(var_elements->value()), mode);
2920 : // length and growth nodes are already in a ParameterMode appropriate
2921 : // representation.
2922 540 : Node* new_length = IntPtrOrSmiAdd(growth, length, mode);
2923 1080 : GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
2924 540 : Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
2925 540 : var_elements->Bind(GrowElementsCapacity(array, var_elements->value(), kind,
2926 : kind, capacity, new_capacity, mode,
2927 540 : bailout));
2928 540 : Goto(&fits);
2929 : BIND(&fits);
2930 540 : }
2931 :
2932 204 : TNode<Smi> CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
2933 : SloppyTNode<JSArray> array,
2934 : CodeStubArguments* args,
2935 : TVariable<IntPtrT>* arg_index,
2936 : Label* bailout) {
2937 : CSA_SLOW_ASSERT(this, IsJSArray(array));
2938 204 : Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
2939 204 : Label pre_bailout(this);
2940 204 : Label success(this);
2941 : TVARIABLE(Smi, var_tagged_length);
2942 : ParameterMode mode = OptimalParameterMode();
2943 408 : VARIABLE(var_length, OptimalParameterRepresentation(),
2944 : TaggedToParameter(LoadFastJSArrayLength(array), mode));
2945 408 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2946 :
2947 : // Resize the capacity of the fixed array if it doesn't fit.
2948 : TNode<IntPtrT> first = arg_index->value();
2949 : Node* growth = IntPtrToParameter(
2950 : IntPtrSub(UncheckedCast<IntPtrT>(args->GetLength(INTPTR_PARAMETERS)),
2951 : first),
2952 : mode);
2953 204 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2954 204 : &var_elements, growth, &pre_bailout);
2955 :
2956 : // Push each argument onto the end of the array now that there is enough
2957 : // capacity.
2958 408 : CodeStubAssembler::VariableList push_vars({&var_length}, zone());
2959 204 : Node* elements = var_elements.value();
2960 204 : args->ForEach(
2961 : push_vars,
2962 816 : [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
2963 408 : TryStoreArrayElement(kind, mode, &pre_bailout, elements,
2964 204 : var_length.value(), arg);
2965 204 : Increment(&var_length, 1, mode);
2966 204 : },
2967 204 : first, nullptr);
2968 : {
2969 204 : TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2970 : var_tagged_length = length;
2971 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2972 204 : Goto(&success);
2973 : }
2974 :
2975 : BIND(&pre_bailout);
2976 : {
2977 204 : TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2978 : var_tagged_length = length;
2979 408 : Node* diff = SmiSub(length, LoadFastJSArrayLength(array));
2980 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2981 408 : *arg_index = IntPtrAdd(arg_index->value(), SmiUntag(diff));
2982 204 : Goto(bailout);
2983 : }
2984 :
2985 : BIND(&success);
2986 204 : return var_tagged_length.value();
2987 : }
2988 :
2989 540 : void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind,
2990 : ParameterMode mode, Label* bailout,
2991 : Node* elements, Node* index,
2992 : Node* value) {
2993 540 : if (IsSmiElementsKind(kind)) {
2994 368 : GotoIf(TaggedIsNotSmi(value), bailout);
2995 356 : } else if (IsDoubleElementsKind(kind)) {
2996 180 : GotoIfNotNumber(value, bailout);
2997 : }
2998 540 : if (IsDoubleElementsKind(kind)) {
2999 360 : value = ChangeNumberToFloat64(value);
3000 : }
3001 540 : StoreElement(elements, kind, index, value, mode);
3002 540 : }
3003 :
3004 336 : void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
3005 : Node* value, Label* bailout) {
3006 : CSA_SLOW_ASSERT(this, IsJSArray(array));
3007 336 : Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
3008 : ParameterMode mode = OptimalParameterMode();
3009 672 : VARIABLE(var_length, OptimalParameterRepresentation(),
3010 : TaggedToParameter(LoadFastJSArrayLength(array), mode));
3011 672 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
3012 :
3013 : // Resize the capacity of the fixed array if it doesn't fit.
3014 336 : Node* growth = IntPtrOrSmiConstant(1, mode);
3015 336 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
3016 336 : &var_elements, growth, bailout);
3017 :
3018 : // Push each argument onto the end of the array now that there is enough
3019 : // capacity.
3020 336 : TryStoreArrayElement(kind, mode, bailout, var_elements.value(),
3021 336 : var_length.value(), value);
3022 336 : Increment(&var_length, 1, mode);
3023 :
3024 336 : Node* length = ParameterToTagged(var_length.value(), mode);
3025 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3026 336 : }
3027 :
3028 0 : Node* CodeStubAssembler::AllocateCellWithValue(Node* value,
3029 : WriteBarrierMode mode) {
3030 : Node* result = Allocate(Cell::kSize, kNone);
3031 0 : StoreMapNoWriteBarrier(result, RootIndex::kCellMap);
3032 0 : StoreCellValue(result, value, mode);
3033 0 : return result;
3034 : }
3035 :
3036 1288 : Node* CodeStubAssembler::LoadCellValue(Node* cell) {
3037 : CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
3038 1288 : return LoadObjectField(cell, Cell::kValueOffset);
3039 : }
3040 :
3041 0 : void CodeStubAssembler::StoreCellValue(Node* cell, Node* value,
3042 : WriteBarrierMode mode) {
3043 : CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
3044 : DCHECK(mode == SKIP_WRITE_BARRIER || mode == UPDATE_WRITE_BARRIER);
3045 :
3046 0 : if (mode == UPDATE_WRITE_BARRIER) {
3047 : StoreObjectField(cell, Cell::kValueOffset, value);
3048 : } else {
3049 : StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
3050 : }
3051 0 : }
3052 :
3053 26108 : TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber() {
3054 : Node* result = Allocate(HeapNumber::kSize, kNone);
3055 : RootIndex heap_map_index = RootIndex::kHeapNumberMap;
3056 26108 : StoreMapNoWriteBarrier(result, heap_map_index);
3057 26108 : return UncheckedCast<HeapNumber>(result);
3058 : }
3059 :
3060 22884 : TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
3061 : SloppyTNode<Float64T> value) {
3062 22884 : TNode<HeapNumber> result = AllocateHeapNumber();
3063 : StoreHeapNumberValue(result, value);
3064 22884 : return result;
3065 : }
3066 :
3067 504 : TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumber() {
3068 : Node* result = Allocate(MutableHeapNumber::kSize, kNone);
3069 : RootIndex heap_map_index = RootIndex::kMutableHeapNumberMap;
3070 504 : StoreMapNoWriteBarrier(result, heap_map_index);
3071 504 : return UncheckedCast<MutableHeapNumber>(result);
3072 : }
3073 :
3074 56 : TNode<Object> CodeStubAssembler::CloneIfMutablePrimitive(TNode<Object> object) {
3075 56 : TVARIABLE(Object, result, object);
3076 56 : Label done(this);
3077 :
3078 112 : GotoIf(TaggedIsSmi(object), &done);
3079 112 : GotoIfNot(IsMutableHeapNumber(UncheckedCast<HeapObject>(object)), &done);
3080 : {
3081 : // Mutable heap number found --- allocate a clone.
3082 : TNode<Float64T> value =
3083 : LoadHeapNumberValue(UncheckedCast<HeapNumber>(object));
3084 112 : result = AllocateMutableHeapNumberWithValue(value);
3085 56 : Goto(&done);
3086 : }
3087 :
3088 : BIND(&done);
3089 56 : return result.value();
3090 : }
3091 :
3092 504 : TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumberWithValue(
3093 : SloppyTNode<Float64T> value) {
3094 504 : TNode<MutableHeapNumber> result = AllocateMutableHeapNumber();
3095 : StoreMutableHeapNumberValue(result, value);
3096 504 : return result;
3097 : }
3098 :
3099 2632 : TNode<BigInt> CodeStubAssembler::AllocateBigInt(TNode<IntPtrT> length) {
3100 2632 : TNode<BigInt> result = AllocateRawBigInt(length);
3101 7896 : StoreBigIntBitfield(result,
3102 5264 : Word32Shl(TruncateIntPtrToInt32(length),
3103 5264 : Int32Constant(BigInt::LengthBits::kShift)));
3104 2632 : return result;
3105 : }
3106 :
3107 3472 : TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
3108 : // This is currently used only for 64-bit wide BigInts. If more general
3109 : // applicability is required, a large-object check must be added.
3110 : CSA_ASSERT(this, UintPtrLessThan(length, IntPtrConstant(3)));
3111 :
3112 : TNode<IntPtrT> size =
3113 : IntPtrAdd(IntPtrConstant(BigInt::kHeaderSize),
3114 6944 : Signed(WordShl(length, kSystemPointerSizeLog2)));
3115 6944 : Node* raw_result = Allocate(size, kNone);
3116 3472 : StoreMapNoWriteBarrier(raw_result, RootIndex::kBigIntMap);
3117 : if (FIELD_SIZE(BigInt::kOptionalPaddingOffset) != 0) {
3118 : DCHECK_EQ(4, FIELD_SIZE(BigInt::kOptionalPaddingOffset));
3119 : StoreObjectFieldNoWriteBarrier(raw_result, BigInt::kOptionalPaddingOffset,
3120 6944 : Int32Constant(0),
3121 : MachineRepresentation::kWord32);
3122 : }
3123 3472 : return UncheckedCast<BigInt>(raw_result);
3124 : }
3125 :
3126 224 : void CodeStubAssembler::StoreBigIntBitfield(TNode<BigInt> bigint,
3127 : TNode<Word32T> bitfield) {
3128 : StoreObjectFieldNoWriteBarrier(bigint, BigInt::kBitfieldOffset, bitfield,
3129 : MachineRepresentation::kWord32);
3130 224 : }
3131 :
3132 112 : void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint, int digit_index,
3133 : TNode<UintPtrT> digit) {
3134 112 : StoreObjectFieldNoWriteBarrier(
3135 : bigint, BigInt::kDigitsOffset + digit_index * kSystemPointerSize, digit,
3136 : UintPtrT::kMachineRepresentation);
3137 112 : }
3138 :
3139 224 : TNode<Word32T> CodeStubAssembler::LoadBigIntBitfield(TNode<BigInt> bigint) {
3140 : return UncheckedCast<Word32T>(
3141 1680 : LoadObjectField(bigint, BigInt::kBitfieldOffset, MachineType::Uint32()));
3142 : }
3143 :
3144 112 : TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
3145 : int digit_index) {
3146 : return UncheckedCast<UintPtrT>(LoadObjectField(
3147 : bigint, BigInt::kDigitsOffset + digit_index * kSystemPointerSize,
3148 1680 : MachineType::UintPtr()));
3149 : }
3150 :
3151 784 : TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3152 : uint32_t length, AllocationFlags flags) {
3153 784 : Comment("AllocateSeqOneByteString");
3154 784 : if (length == 0) {
3155 0 : return CAST(LoadRoot(RootIndex::kempty_string));
3156 : }
3157 : Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
3158 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3159 784 : StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3160 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3161 : Uint32Constant(length),
3162 : MachineRepresentation::kWord32);
3163 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3164 1568 : Int32Constant(String::kEmptyHashField),
3165 : MachineRepresentation::kWord32);
3166 : return CAST(result);
3167 : }
3168 :
3169 0 : TNode<BoolT> CodeStubAssembler::IsZeroOrContext(SloppyTNode<Object> object) {
3170 0 : return Select<BoolT>(WordEqual(object, SmiConstant(0)),
3171 0 : [=] { return Int32TrueConstant(); },
3172 0 : [=] { return IsContext(CAST(object)); });
3173 : }
3174 :
3175 1624 : TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3176 : Node* context, TNode<Uint32T> length, AllocationFlags flags) {
3177 1624 : Comment("AllocateSeqOneByteString");
3178 : CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3179 3248 : VARIABLE(var_result, MachineRepresentation::kTagged);
3180 :
3181 : // Compute the SeqOneByteString size and check if it fits into new space.
3182 1624 : Label if_lengthiszero(this), if_sizeissmall(this),
3183 1624 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
3184 3248 : GotoIf(Word32Equal(length, Uint32Constant(0)), &if_lengthiszero);
3185 :
3186 : Node* raw_size = GetArrayAllocationSize(
3187 3248 : Signed(ChangeUint32ToWord(length)), UINT8_ELEMENTS, INTPTR_PARAMETERS,
3188 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3189 3248 : TNode<WordT> size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3190 4872 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3191 1624 : &if_sizeissmall, &if_notsizeissmall);
3192 :
3193 : BIND(&if_sizeissmall);
3194 : {
3195 : // Just allocate the SeqOneByteString in new space.
3196 : TNode<Object> result =
3197 : AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3198 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3199 1624 : StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3200 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3201 : length, MachineRepresentation::kWord32);
3202 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3203 3248 : Int32Constant(String::kEmptyHashField),
3204 : MachineRepresentation::kWord32);
3205 1624 : var_result.Bind(result);
3206 1624 : Goto(&if_join);
3207 : }
3208 :
3209 : BIND(&if_notsizeissmall);
3210 : {
3211 : // We might need to allocate in large object space, go to the runtime.
3212 : Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
3213 3248 : ChangeUint32ToTagged(length));
3214 1624 : var_result.Bind(result);
3215 1624 : Goto(&if_join);
3216 : }
3217 :
3218 : BIND(&if_lengthiszero);
3219 : {
3220 3248 : var_result.Bind(LoadRoot(RootIndex::kempty_string));
3221 1624 : Goto(&if_join);
3222 : }
3223 :
3224 : BIND(&if_join);
3225 3248 : return CAST(var_result.value());
3226 : }
3227 :
3228 896 : TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3229 : uint32_t length, AllocationFlags flags) {
3230 896 : Comment("AllocateSeqTwoByteString");
3231 896 : if (length == 0) {
3232 0 : return CAST(LoadRoot(RootIndex::kempty_string));
3233 : }
3234 896 : Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
3235 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3236 896 : StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3237 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3238 : Uint32Constant(length),
3239 : MachineRepresentation::kWord32);
3240 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3241 1792 : Int32Constant(String::kEmptyHashField),
3242 : MachineRepresentation::kWord32);
3243 : return CAST(result);
3244 : }
3245 :
3246 1232 : TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3247 : Node* context, TNode<Uint32T> length, AllocationFlags flags) {
3248 : CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3249 1232 : Comment("AllocateSeqTwoByteString");
3250 2464 : VARIABLE(var_result, MachineRepresentation::kTagged);
3251 :
3252 : // Compute the SeqTwoByteString size and check if it fits into new space.
3253 1232 : Label if_lengthiszero(this), if_sizeissmall(this),
3254 1232 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
3255 2464 : GotoIf(Word32Equal(length, Uint32Constant(0)), &if_lengthiszero);
3256 :
3257 : Node* raw_size = GetArrayAllocationSize(
3258 2464 : Signed(ChangeUint32ToWord(length)), UINT16_ELEMENTS, INTPTR_PARAMETERS,
3259 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3260 2464 : TNode<WordT> size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3261 3696 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3262 1232 : &if_sizeissmall, &if_notsizeissmall);
3263 :
3264 : BIND(&if_sizeissmall);
3265 : {
3266 : // Just allocate the SeqTwoByteString in new space.
3267 : TNode<Object> result =
3268 : AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3269 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3270 1232 : StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3271 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3272 : length, MachineRepresentation::kWord32);
3273 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3274 2464 : Int32Constant(String::kEmptyHashField),
3275 : MachineRepresentation::kWord32);
3276 1232 : var_result.Bind(result);
3277 1232 : Goto(&if_join);
3278 : }
3279 :
3280 : BIND(&if_notsizeissmall);
3281 : {
3282 : // We might need to allocate in large object space, go to the runtime.
3283 : Node* result = CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
3284 2464 : ChangeUint32ToTagged(length));
3285 1232 : var_result.Bind(result);
3286 1232 : Goto(&if_join);
3287 : }
3288 :
3289 : BIND(&if_lengthiszero);
3290 : {
3291 2464 : var_result.Bind(LoadRoot(RootIndex::kempty_string));
3292 1232 : Goto(&if_join);
3293 : }
3294 :
3295 : BIND(&if_join);
3296 2464 : return CAST(var_result.value());
3297 : }
3298 :
3299 896 : TNode<String> CodeStubAssembler::AllocateSlicedString(RootIndex map_root_index,
3300 : TNode<Uint32T> length,
3301 : TNode<String> parent,
3302 : TNode<Smi> offset) {
3303 : DCHECK(map_root_index == RootIndex::kSlicedOneByteStringMap ||
3304 : map_root_index == RootIndex::kSlicedStringMap);
3305 : Node* result = Allocate(SlicedString::kSize);
3306 : DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
3307 896 : StoreMapNoWriteBarrier(result, map_root_index);
3308 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
3309 1792 : Int32Constant(String::kEmptyHashField),
3310 : MachineRepresentation::kWord32);
3311 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
3312 : MachineRepresentation::kWord32);
3313 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
3314 : MachineRepresentation::kTagged);
3315 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
3316 : MachineRepresentation::kTagged);
3317 896 : return CAST(result);
3318 : }
3319 :
3320 0 : TNode<String> CodeStubAssembler::AllocateSlicedOneByteString(
3321 : TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3322 : return AllocateSlicedString(RootIndex::kSlicedOneByteStringMap, length,
3323 448 : parent, offset);
3324 : }
3325 :
3326 0 : TNode<String> CodeStubAssembler::AllocateSlicedTwoByteString(
3327 : TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3328 : return AllocateSlicedString(RootIndex::kSlicedStringMap, length, parent,
3329 448 : offset);
3330 : }
3331 :
3332 56 : TNode<String> CodeStubAssembler::AllocateConsString(TNode<Uint32T> length,
3333 : TNode<String> left,
3334 : TNode<String> right) {
3335 : // Added string can be a cons string.
3336 56 : Comment("Allocating ConsString");
3337 112 : Node* left_instance_type = LoadInstanceType(left);
3338 112 : Node* right_instance_type = LoadInstanceType(right);
3339 :
3340 : // Determine the resulting ConsString map to use depending on whether
3341 : // any of {left} or {right} has two byte encoding.
3342 : STATIC_ASSERT(kOneByteStringTag != 0);
3343 : STATIC_ASSERT(kTwoByteStringTag == 0);
3344 : Node* combined_instance_type =
3345 112 : Word32And(left_instance_type, right_instance_type);
3346 280 : TNode<Map> result_map = CAST(Select<Object>(
3347 : IsSetWord32(combined_instance_type, kStringEncodingMask),
3348 : [=] { return LoadRoot(RootIndex::kConsOneByteStringMap); },
3349 : [=] { return LoadRoot(RootIndex::kConsStringMap); }));
3350 112 : Node* result = AllocateInNewSpace(ConsString::kSize);
3351 : StoreMapNoWriteBarrier(result, result_map);
3352 : StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
3353 : MachineRepresentation::kWord32);
3354 : StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset,
3355 112 : Int32Constant(String::kEmptyHashField),
3356 : MachineRepresentation::kWord32);
3357 : StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, left);
3358 : StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, right);
3359 56 : return CAST(result);
3360 : }
3361 :
3362 616 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3363 : int at_least_space_for) {
3364 1232 : return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
3365 : }
3366 :
3367 228 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3368 : TNode<IntPtrT> at_least_space_for) {
3369 : CSA_ASSERT(this, UintPtrLessThanOrEqual(
3370 : at_least_space_for,
3371 : IntPtrConstant(NameDictionary::kMaxCapacity)));
3372 844 : TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
3373 844 : return AllocateNameDictionaryWithCapacity(capacity);
3374 : }
3375 :
3376 1068 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
3377 : TNode<IntPtrT> capacity) {
3378 : CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3379 : CSA_ASSERT(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
3380 : TNode<IntPtrT> length = EntryToIndex<NameDictionary>(capacity);
3381 : TNode<IntPtrT> store_size = IntPtrAdd(
3382 1068 : TimesTaggedSize(length), IntPtrConstant(NameDictionary::kHeaderSize));
3383 :
3384 : TNode<NameDictionary> result =
3385 : UncheckedCast<NameDictionary>(AllocateInNewSpace(store_size));
3386 1068 : Comment("Initialize NameDictionary");
3387 : // Initialize FixedArray fields.
3388 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kNameDictionaryMap));
3389 1068 : StoreMapNoWriteBarrier(result, RootIndex::kNameDictionaryMap);
3390 : StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
3391 : SmiFromIntPtr(length));
3392 : // Initialized HashTable fields.
3393 1068 : TNode<Smi> zero = SmiConstant(0);
3394 1068 : StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
3395 1068 : SKIP_WRITE_BARRIER);
3396 1068 : StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
3397 1068 : zero, SKIP_WRITE_BARRIER);
3398 1068 : StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
3399 3204 : SmiTag(capacity), SKIP_WRITE_BARRIER);
3400 : // Initialize Dictionary fields.
3401 : TNode<HeapObject> filler = UndefinedConstant();
3402 1068 : StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
3403 2136 : SmiConstant(PropertyDetails::kInitialIndex),
3404 1068 : SKIP_WRITE_BARRIER);
3405 1068 : StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
3406 2136 : SmiConstant(PropertyArray::kNoHashSentinel),
3407 1068 : SKIP_WRITE_BARRIER);
3408 :
3409 : // Initialize NameDictionary elements.
3410 2136 : TNode<WordT> result_word = BitcastTaggedToWord(result);
3411 : TNode<WordT> start_address = IntPtrAdd(
3412 2136 : result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
3413 : NameDictionary::kElementsStartIndex) -
3414 1068 : kHeapObjectTag));
3415 : TNode<WordT> end_address = IntPtrAdd(
3416 2136 : result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
3417 1068 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3418 1068 : return result;
3419 : }
3420 :
3421 224 : TNode<NameDictionary> CodeStubAssembler::CopyNameDictionary(
3422 : TNode<NameDictionary> dictionary, Label* large_object_fallback) {
3423 224 : Comment("Copy boilerplate property dict");
3424 224 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NameDictionary>(dictionary));
3425 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(capacity, IntPtrConstant(0)));
3426 448 : GotoIf(UintPtrGreaterThan(
3427 448 : capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
3428 224 : large_object_fallback);
3429 : TNode<NameDictionary> properties =
3430 224 : AllocateNameDictionaryWithCapacity(capacity);
3431 224 : TNode<IntPtrT> length = SmiUntag(LoadFixedArrayBaseLength(dictionary));
3432 : CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
3433 224 : SKIP_WRITE_BARRIER, INTPTR_PARAMETERS);
3434 224 : return properties;
3435 : }
3436 :
3437 : template <typename CollectionType>
3438 112 : Node* CodeStubAssembler::AllocateOrderedHashTable() {
3439 : static const int kCapacity = CollectionType::kMinCapacity;
3440 : static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
3441 : static const int kDataTableLength = kCapacity * CollectionType::kEntrySize;
3442 : static const int kFixedArrayLength =
3443 : CollectionType::HashTableStartIndex() + kBucketCount + kDataTableLength;
3444 : static const int kDataTableStartIndex =
3445 : CollectionType::HashTableStartIndex() + kBucketCount;
3446 :
3447 : STATIC_ASSERT(base::bits::IsPowerOfTwo(kCapacity));
3448 : STATIC_ASSERT(kCapacity <= CollectionType::MaxCapacity());
3449 :
3450 : // Allocate the table and add the proper map.
3451 : const ElementsKind elements_kind = HOLEY_ELEMENTS;
3452 112 : TNode<IntPtrT> length_intptr = IntPtrConstant(kFixedArrayLength);
3453 : TNode<Map> fixed_array_map =
3454 112 : CAST(LoadRoot(CollectionType::GetMapRootIndex()));
3455 : TNode<FixedArray> table =
3456 : CAST(AllocateFixedArray(elements_kind, length_intptr,
3457 : kAllowLargeObjectAllocation, fixed_array_map));
3458 :
3459 : // Initialize the OrderedHashTable fields.
3460 : const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
3461 224 : StoreFixedArrayElement(table, CollectionType::NumberOfElementsIndex(),
3462 : SmiConstant(0), barrier_mode);
3463 224 : StoreFixedArrayElement(table, CollectionType::NumberOfDeletedElementsIndex(),
3464 : SmiConstant(0), barrier_mode);
3465 224 : StoreFixedArrayElement(table, CollectionType::NumberOfBucketsIndex(),
3466 : SmiConstant(kBucketCount), barrier_mode);
3467 :
3468 : // Fill the buckets with kNotFound.
3469 112 : TNode<Smi> not_found = SmiConstant(CollectionType::kNotFound);
3470 : STATIC_ASSERT(CollectionType::HashTableStartIndex() ==
3471 : CollectionType::NumberOfBucketsIndex() + 1);
3472 : STATIC_ASSERT((CollectionType::HashTableStartIndex() + kBucketCount) ==
3473 : kDataTableStartIndex);
3474 560 : for (int i = 0; i < kBucketCount; i++) {
3475 224 : StoreFixedArrayElement(table, CollectionType::HashTableStartIndex() + i,
3476 : not_found, barrier_mode);
3477 : }
3478 :
3479 : // Fill the data table with undefined.
3480 : STATIC_ASSERT(kDataTableStartIndex + kDataTableLength == kFixedArrayLength);
3481 2352 : for (int i = 0; i < kDataTableLength; i++) {
3482 1120 : StoreFixedArrayElement(table, kDataTableStartIndex + i, UndefinedConstant(),
3483 : barrier_mode);
3484 : }
3485 :
3486 112 : return table;
3487 : }
3488 :
3489 : template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashMap>();
3490 : template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashSet>();
3491 :
3492 : template <typename CollectionType>
3493 8 : TNode<CollectionType> CodeStubAssembler::AllocateSmallOrderedHashTable(
3494 : TNode<IntPtrT> capacity) {
3495 : CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3496 : CSA_ASSERT(this, IntPtrLessThan(
3497 : capacity, IntPtrConstant(CollectionType::kMaxCapacity)));
3498 :
3499 : TNode<IntPtrT> data_table_start_offset =
3500 8 : IntPtrConstant(CollectionType::DataTableStartOffset());
3501 :
3502 : TNode<IntPtrT> data_table_size = IntPtrMul(
3503 8 : capacity, IntPtrConstant(CollectionType::kEntrySize * kTaggedSize));
3504 :
3505 : TNode<Int32T> hash_table_size =
3506 8 : Int32Div(TruncateIntPtrToInt32(capacity),
3507 32 : Int32Constant(CollectionType::kLoadFactor));
3508 :
3509 : TNode<IntPtrT> hash_table_start_offset =
3510 : IntPtrAdd(data_table_start_offset, data_table_size);
3511 :
3512 : TNode<IntPtrT> hash_table_and_chain_table_size =
3513 16 : IntPtrAdd(ChangeInt32ToIntPtr(hash_table_size), capacity);
3514 :
3515 : TNode<IntPtrT> total_size =
3516 : IntPtrAdd(hash_table_start_offset, hash_table_and_chain_table_size);
3517 :
3518 : TNode<IntPtrT> total_size_word_aligned =
3519 8 : IntPtrAdd(total_size, IntPtrConstant(kTaggedSize - 1));
3520 16 : total_size_word_aligned = ChangeInt32ToIntPtr(
3521 24 : Int32Div(TruncateIntPtrToInt32(total_size_word_aligned),
3522 : Int32Constant(kTaggedSize)));
3523 : total_size_word_aligned =
3524 : UncheckedCast<IntPtrT>(TimesTaggedSize(total_size_word_aligned));
3525 :
3526 : // Allocate the table and add the proper map.
3527 : TNode<Map> small_ordered_hash_map =
3528 8 : CAST(LoadRoot(CollectionType::GetMapRootIndex()));
3529 : TNode<Object> table_obj = AllocateInNewSpace(total_size_word_aligned);
3530 : StoreMapNoWriteBarrier(table_obj, small_ordered_hash_map);
3531 : TNode<CollectionType> table = UncheckedCast<CollectionType>(table_obj);
3532 :
3533 : // Initialize the SmallOrderedHashTable fields.
3534 24 : StoreObjectByteNoWriteBarrier(
3535 : table, CollectionType::NumberOfBucketsOffset(),
3536 : Word32And(Int32Constant(0xFF), hash_table_size));
3537 16 : StoreObjectByteNoWriteBarrier(table, CollectionType::NumberOfElementsOffset(),
3538 : Int32Constant(0));
3539 16 : StoreObjectByteNoWriteBarrier(
3540 : table, CollectionType::NumberOfDeletedElementsOffset(), Int32Constant(0));
3541 :
3542 : TNode<IntPtrT> table_address =
3543 24 : IntPtrSub(BitcastTaggedToWord(table), IntPtrConstant(kHeapObjectTag));
3544 : TNode<IntPtrT> hash_table_start_address =
3545 : IntPtrAdd(table_address, hash_table_start_offset);
3546 :
3547 : // Initialize the HashTable part.
3548 16 : Node* memset = ExternalConstant(ExternalReference::libc_memset_function());
3549 16 : CallCFunction(
3550 : memset, MachineType::AnyTagged(),
3551 : std::make_pair(MachineType::Pointer(), hash_table_start_address),
3552 : std::make_pair(MachineType::IntPtr(), IntPtrConstant(0xFF)),
3553 : std::make_pair(MachineType::UintPtr(), hash_table_and_chain_table_size));
3554 :
3555 : // Initialize the DataTable part.
3556 : TNode<HeapObject> filler = TheHoleConstant();
3557 : TNode<WordT> data_table_start_address =
3558 : IntPtrAdd(table_address, data_table_start_offset);
3559 : TNode<WordT> data_table_end_address =
3560 8 : IntPtrAdd(data_table_start_address, data_table_size);
3561 8 : StoreFieldsNoWriteBarrier(data_table_start_address, data_table_end_address,
3562 : filler);
3563 :
3564 8 : return table;
3565 : }
3566 :
3567 : template V8_EXPORT_PRIVATE TNode<SmallOrderedHashMap>
3568 : CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashMap>(
3569 : TNode<IntPtrT> capacity);
3570 : template V8_EXPORT_PRIVATE TNode<SmallOrderedHashSet>
3571 : CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashSet>(
3572 : TNode<IntPtrT> capacity);
3573 :
3574 : template <typename CollectionType>
3575 1680 : void CodeStubAssembler::FindOrderedHashTableEntry(
3576 : Node* table, Node* hash,
3577 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3578 : Variable* entry_start_position, Label* entry_found, Label* not_found) {
3579 : // Get the index of the bucket.
3580 : Node* const number_of_buckets = SmiUntag(CAST(UnsafeLoadFixedArrayElement(
3581 3360 : CAST(table), CollectionType::NumberOfBucketsIndex())));
3582 : Node* const bucket =
3583 6720 : WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1)));
3584 : Node* const first_entry = SmiUntag(CAST(UnsafeLoadFixedArrayElement(
3585 : CAST(table), bucket,
3586 3360 : CollectionType::HashTableStartIndex() * kTaggedSize)));
3587 :
3588 : // Walk the bucket chain.
3589 : Node* entry_start;
3590 1680 : Label if_key_found(this);
3591 : {
3592 3360 : VARIABLE(var_entry, MachineType::PointerRepresentation(), first_entry);
3593 5040 : Label loop(this, {&var_entry, entry_start_position}),
3594 1680 : continue_next_entry(this);
3595 1680 : Goto(&loop);
3596 : BIND(&loop);
3597 :
3598 : // If the entry index is the not-found sentinel, we are done.
3599 6720 : GotoIf(
3600 : WordEqual(var_entry.value(), IntPtrConstant(CollectionType::kNotFound)),
3601 : not_found);
3602 :
3603 : // Make sure the entry index is within range.
3604 : CSA_ASSERT(
3605 : this,
3606 : UintPtrLessThan(
3607 : var_entry.value(),
3608 : SmiUntag(SmiAdd(
3609 : CAST(UnsafeLoadFixedArrayElement(
3610 : CAST(table), CollectionType::NumberOfElementsIndex())),
3611 : CAST(UnsafeLoadFixedArrayElement(
3612 : CAST(table),
3613 : CollectionType::NumberOfDeletedElementsIndex()))))));
3614 :
3615 : // Compute the index of the entry relative to kHashTableStartIndex.
3616 8400 : entry_start =
3617 : IntPtrAdd(IntPtrMul(var_entry.value(),
3618 : IntPtrConstant(CollectionType::kEntrySize)),
3619 : number_of_buckets);
3620 :
3621 : // Load the key from the entry.
3622 : Node* const candidate_key = UnsafeLoadFixedArrayElement(
3623 : CAST(table), entry_start,
3624 : CollectionType::HashTableStartIndex() * kTaggedSize);
3625 :
3626 : key_compare(candidate_key, &if_key_found, &continue_next_entry);
3627 :
3628 : BIND(&continue_next_entry);
3629 : // Load the index of the next entry in the bucket chain.
3630 3360 : var_entry.Bind(SmiUntag(CAST(UnsafeLoadFixedArrayElement(
3631 : CAST(table), entry_start,
3632 : (CollectionType::HashTableStartIndex() + CollectionType::kChainOffset) *
3633 : kTaggedSize))));
3634 :
3635 1680 : Goto(&loop);
3636 : }
3637 :
3638 : BIND(&if_key_found);
3639 1680 : entry_start_position->Bind(entry_start);
3640 1680 : Goto(entry_found);
3641 1680 : }
3642 :
3643 : template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashMap>(
3644 : Node* table, Node* hash,
3645 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3646 : Variable* entry_start_position, Label* entry_found, Label* not_found);
3647 : template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashSet>(
3648 : Node* table, Node* hash,
3649 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3650 : Variable* entry_start_position, Label* entry_found, Label* not_found);
3651 :
3652 8 : Node* CodeStubAssembler::AllocateStruct(Node* map, AllocationFlags flags) {
3653 8 : Comment("AllocateStruct");
3654 : CSA_ASSERT(this, IsMap(map));
3655 16 : TNode<IntPtrT> size = TimesTaggedSize(LoadMapInstanceSizeInWords(map));
3656 16 : TNode<Object> object = Allocate(size, flags);
3657 : StoreMapNoWriteBarrier(object, map);
3658 8 : InitializeStructBody(object, map, size, Struct::kHeaderSize);
3659 8 : return object;
3660 : }
3661 :
3662 8 : void CodeStubAssembler::InitializeStructBody(Node* object, Node* map,
3663 : Node* size, int start_offset) {
3664 : CSA_SLOW_ASSERT(this, IsMap(map));
3665 8 : Comment("InitializeStructBody");
3666 : Node* filler = UndefinedConstant();
3667 : // Calculate the untagged field addresses.
3668 16 : object = BitcastTaggedToWord(object);
3669 : Node* start_address =
3670 24 : IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
3671 : Node* end_address =
3672 32 : IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
3673 8 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3674 8 : }
3675 :
3676 2244 : Node* CodeStubAssembler::AllocateJSObjectFromMap(
3677 : Node* map, Node* properties, Node* elements, AllocationFlags flags,
3678 : SlackTrackingMode slack_tracking_mode) {
3679 : CSA_ASSERT(this, IsMap(map));
3680 : CSA_ASSERT(this, Word32BinaryNot(IsJSFunctionMap(map)));
3681 : CSA_ASSERT(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),
3682 : JS_GLOBAL_OBJECT_TYPE)));
3683 : TNode<IntPtrT> instance_size =
3684 4488 : TimesTaggedSize(LoadMapInstanceSizeInWords(map));
3685 : TNode<Object> object = AllocateInNewSpace(instance_size, flags);
3686 : StoreMapNoWriteBarrier(object, map);
3687 : InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
3688 2244 : slack_tracking_mode);
3689 2244 : return object;
3690 : }
3691 :
3692 2244 : void CodeStubAssembler::InitializeJSObjectFromMap(
3693 : Node* object, Node* map, Node* instance_size, Node* properties,
3694 : Node* elements, SlackTrackingMode slack_tracking_mode) {
3695 : CSA_SLOW_ASSERT(this, IsMap(map));
3696 : // This helper assumes that the object is in new-space, as guarded by the
3697 : // check in AllocatedJSObjectFromMap.
3698 2244 : if (properties == nullptr) {
3699 : CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
3700 : StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
3701 1232 : RootIndex::kEmptyFixedArray);
3702 : } else {
3703 : CSA_ASSERT(this, Word32Or(Word32Or(IsPropertyArray(properties),
3704 : IsNameDictionary(properties)),
3705 : IsEmptyFixedArray(properties)));
3706 : StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
3707 : properties);
3708 : }
3709 2244 : if (elements == nullptr) {
3710 : StoreObjectFieldRoot(object, JSObject::kElementsOffset,
3711 2072 : RootIndex::kEmptyFixedArray);
3712 : } else {
3713 : CSA_ASSERT(this, IsFixedArray(elements));
3714 : StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
3715 : }
3716 2244 : if (slack_tracking_mode == kNoSlackTracking) {
3717 1740 : InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3718 : } else {
3719 : DCHECK_EQ(slack_tracking_mode, kWithSlackTracking);
3720 504 : InitializeJSObjectBodyWithSlackTracking(object, map, instance_size);
3721 : }
3722 2244 : }
3723 :
3724 2300 : void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
3725 : Node* object, Node* map, Node* instance_size, int start_offset) {
3726 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3727 : CSA_ASSERT(
3728 : this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
3729 4600 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
3730 2300 : RootIndex::kUndefinedValue);
3731 2300 : }
3732 :
3733 504 : void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
3734 : Node* object, Node* map, Node* instance_size) {
3735 : CSA_SLOW_ASSERT(this, IsMap(map));
3736 504 : Comment("InitializeJSObjectBodyNoSlackTracking");
3737 :
3738 : // Perform in-object slack tracking if requested.
3739 : int start_offset = JSObject::kHeaderSize;
3740 : Node* bit_field3 = LoadMapBitField3(map);
3741 504 : Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
3742 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3743 504 : GotoIf(IsSetWord32<Map::ConstructionCounterBits>(bit_field3),
3744 504 : &slack_tracking);
3745 504 : Comment("No slack tracking");
3746 504 : InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3747 504 : Goto(&end);
3748 :
3749 : BIND(&slack_tracking);
3750 : {
3751 504 : Comment("Decrease construction counter");
3752 : // Slack tracking is only done on initial maps.
3753 : CSA_ASSERT(this, IsUndefined(LoadMapBackPointer(map)));
3754 : STATIC_ASSERT(Map::ConstructionCounterBits::kNext == 32);
3755 1008 : Node* new_bit_field3 = Int32Sub(
3756 1008 : bit_field3, Int32Constant(1 << Map::ConstructionCounterBits::kShift));
3757 : StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3,
3758 : MachineRepresentation::kWord32);
3759 : STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3760 :
3761 : // The object still has in-object slack therefore the |unsed_or_unused|
3762 : // field contain the "used" value.
3763 : Node* used_size = TimesTaggedSize(ChangeUint32ToWord(
3764 : LoadObjectField(map, Map::kUsedOrUnusedInstanceSizeInWordsOffset,
3765 1512 : MachineType::Uint8())));
3766 :
3767 504 : Comment("iInitialize filler fields");
3768 : InitializeFieldsWithRoot(object, used_size, instance_size,
3769 504 : RootIndex::kOnePointerFillerMap);
3770 :
3771 504 : Comment("Initialize undefined fields");
3772 1008 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
3773 504 : RootIndex::kUndefinedValue);
3774 :
3775 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3776 504 : GotoIf(IsClearWord32<Map::ConstructionCounterBits>(new_bit_field3),
3777 504 : &complete);
3778 504 : Goto(&end);
3779 : }
3780 :
3781 : // Finalize the instance size.
3782 : BIND(&complete);
3783 : {
3784 : // ComplextInobjectSlackTracking doesn't allocate and thus doesn't need a
3785 : // context.
3786 : CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
3787 : NoContextConstant(), map);
3788 504 : Goto(&end);
3789 : }
3790 :
3791 : BIND(&end);
3792 504 : }
3793 :
3794 1084 : void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
3795 : Node* end_address,
3796 : Node* value) {
3797 1084 : Comment("StoreFieldsNoWriteBarrier");
3798 : CSA_ASSERT(this, WordIsAligned(start_address, kTaggedSize));
3799 : CSA_ASSERT(this, WordIsAligned(end_address, kTaggedSize));
3800 1084 : BuildFastLoop(
3801 : start_address, end_address,
3802 1084 : [this, value](Node* current) {
3803 1084 : StoreNoWriteBarrier(MachineRepresentation::kTagged, current, value);
3804 : },
3805 1084 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
3806 1084 : }
3807 :
3808 168 : TNode<BoolT> CodeStubAssembler::IsValidFastJSArrayCapacity(
3809 : Node* capacity, ParameterMode capacity_mode) {
3810 : return UncheckedCast<BoolT>(
3811 : UintPtrLessThanOrEqual(ParameterToIntPtr(capacity, capacity_mode),
3812 336 : IntPtrConstant(JSArray::kMaxFastArrayLength)));
3813 : }
3814 :
3815 3808 : TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3816 : TNode<Map> array_map, TNode<FixedArrayBase> elements, TNode<Smi> length,
3817 : Node* allocation_site) {
3818 3808 : Comment("begin allocation of JSArray passing in elements");
3819 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3820 :
3821 : int base_size = JSArray::kSize;
3822 3808 : if (allocation_site != nullptr) {
3823 : base_size += AllocationMemento::kSize;
3824 : }
3825 :
3826 3808 : TNode<IntPtrT> size = IntPtrConstant(base_size);
3827 : TNode<JSArray> result =
3828 3808 : AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3829 : StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, elements);
3830 3808 : return result;
3831 : }
3832 :
3833 : std::pair<TNode<JSArray>, TNode<FixedArrayBase>>
3834 2576 : CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
3835 : ElementsKind kind, TNode<Map> array_map, TNode<Smi> length,
3836 : Node* allocation_site, Node* capacity, ParameterMode capacity_mode,
3837 : AllocationFlags allocation_flags) {
3838 2576 : Comment("begin allocation of JSArray with elements");
3839 2576 : CHECK_EQ(allocation_flags & ~kAllowLargeObjectAllocation, 0);
3840 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3841 :
3842 : TVARIABLE(JSArray, array);
3843 : TVARIABLE(FixedArrayBase, elements);
3844 :
3845 2576 : if (IsIntPtrOrSmiConstantZero(capacity, capacity_mode)) {
3846 : TNode<FixedArrayBase> empty_array = EmptyFixedArrayConstant();
3847 840 : array = AllocateJSArray(array_map, empty_array, length, allocation_site);
3848 840 : return {array.value(), empty_array};
3849 : }
3850 :
3851 1736 : Label out(this), empty(this), nonempty(this);
3852 :
3853 5208 : Branch(SmiEqual(ParameterToTagged(capacity, capacity_mode), SmiConstant(0)),
3854 1736 : &empty, &nonempty);
3855 :
3856 : BIND(&empty);
3857 : {
3858 : TNode<FixedArrayBase> empty_array = EmptyFixedArrayConstant();
3859 1736 : array = AllocateJSArray(array_map, empty_array, length, allocation_site);
3860 : elements = empty_array;
3861 1736 : Goto(&out);
3862 : }
3863 :
3864 : BIND(&nonempty);
3865 : {
3866 : int base_size = JSArray::kSize;
3867 1736 : if (allocation_site != nullptr) base_size += AllocationMemento::kSize;
3868 :
3869 : const int elements_offset = base_size;
3870 :
3871 : // Compute space for elements
3872 1736 : base_size += FixedArray::kHeaderSize;
3873 : TNode<IntPtrT> size =
3874 1736 : ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
3875 :
3876 : // For very large arrays in which the requested allocation exceeds the
3877 : // maximal size of a regular heap object, we cannot use the allocation
3878 : // folding trick. Instead, we first allocate the elements in large object
3879 : // space, and then allocate the JSArray (and possibly the allocation
3880 : // memento) in new space.
3881 1736 : if (allocation_flags & kAllowLargeObjectAllocation) {
3882 168 : Label next(this);
3883 336 : GotoIf(IsRegularHeapObjectSize(size), &next);
3884 :
3885 168 : CSA_CHECK(this, IsValidFastJSArrayCapacity(capacity, capacity_mode));
3886 :
3887 : // Allocate and initialize the elements first. Full initialization is
3888 : // needed because the upcoming JSArray allocation could trigger GC.
3889 336 : elements =
3890 : AllocateFixedArray(kind, capacity, capacity_mode, allocation_flags);
3891 :
3892 168 : if (IsDoubleElementsKind(kind)) {
3893 : FillFixedDoubleArrayWithZero(CAST(elements.value()),
3894 0 : ParameterToIntPtr(capacity, capacity_mode));
3895 : } else {
3896 : FillFixedArrayWithSmiZero(CAST(elements.value()),
3897 168 : ParameterToIntPtr(capacity, capacity_mode));
3898 : }
3899 :
3900 : // The JSArray and possibly allocation memento next. Note that
3901 : // allocation_flags are *not* passed on here and the resulting JSArray
3902 : // will always be in new space.
3903 168 : array =
3904 : AllocateJSArray(array_map, elements.value(), length, allocation_site);
3905 :
3906 168 : Goto(&out);
3907 :
3908 : BIND(&next);
3909 : }
3910 :
3911 : // Fold all objects into a single new space allocation.
3912 1736 : array =
3913 : AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3914 : elements = UncheckedCast<FixedArrayBase>(
3915 : InnerAllocate(array.value(), elements_offset));
3916 :
3917 : StoreObjectFieldNoWriteBarrier(array.value(), JSObject::kElementsOffset,
3918 : elements.value());
3919 :
3920 : // Setup elements object.
3921 : STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kTaggedSize);
3922 : RootIndex elements_map_index = IsDoubleElementsKind(kind)
3923 : ? RootIndex::kFixedDoubleArrayMap
3924 1736 : : RootIndex::kFixedArrayMap;
3925 : DCHECK(RootsTable::IsImmortalImmovable(elements_map_index));
3926 1736 : StoreMapNoWriteBarrier(elements.value(), elements_map_index);
3927 :
3928 : TNode<Smi> capacity_smi = ParameterToTagged(capacity, capacity_mode);
3929 : CSA_ASSERT(this, SmiGreaterThan(capacity_smi, SmiConstant(0)));
3930 : StoreObjectFieldNoWriteBarrier(elements.value(), FixedArray::kLengthOffset,
3931 : capacity_smi);
3932 1736 : Goto(&out);
3933 : }
3934 :
3935 : BIND(&out);
3936 1736 : return {array.value(), elements.value()};
3937 : }
3938 :
3939 5544 : TNode<JSArray> CodeStubAssembler::AllocateUninitializedJSArray(
3940 : TNode<Map> array_map, TNode<Smi> length, Node* allocation_site,
3941 : TNode<IntPtrT> size_in_bytes) {
3942 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3943 :
3944 : // Allocate space for the JSArray and the elements FixedArray in one go.
3945 : TNode<Object> array = AllocateInNewSpace(size_in_bytes);
3946 :
3947 : StoreMapNoWriteBarrier(array, array_map);
3948 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3949 : StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
3950 5544 : RootIndex::kEmptyFixedArray);
3951 :
3952 5544 : if (allocation_site != nullptr) {
3953 1680 : InitializeAllocationMemento(array, IntPtrConstant(JSArray::kSize),
3954 840 : allocation_site);
3955 : }
3956 :
3957 5544 : return CAST(array);
3958 : }
3959 :
3960 2408 : TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3961 : ElementsKind kind, TNode<Map> array_map, Node* capacity, TNode<Smi> length,
3962 : Node* allocation_site, ParameterMode capacity_mode,
3963 : AllocationFlags allocation_flags) {
3964 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3965 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
3966 :
3967 : TNode<JSArray> array;
3968 : TNode<FixedArrayBase> elements;
3969 :
3970 4816 : std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
3971 : kind, array_map, length, allocation_site, capacity, capacity_mode,
3972 2408 : allocation_flags);
3973 :
3974 4816 : Label out(this), nonempty(this);
3975 :
3976 7224 : Branch(SmiEqual(ParameterToTagged(capacity, capacity_mode), SmiConstant(0)),
3977 2408 : &out, &nonempty);
3978 :
3979 : BIND(&nonempty);
3980 : {
3981 2408 : FillFixedArrayWithValue(kind, elements,
3982 : IntPtrOrSmiConstant(0, capacity_mode), capacity,
3983 2408 : RootIndex::kTheHoleValue, capacity_mode);
3984 2408 : Goto(&out);
3985 : }
3986 :
3987 : BIND(&out);
3988 4816 : return array;
3989 : }
3990 :
3991 56 : Node* CodeStubAssembler::ExtractFastJSArray(Node* context, Node* array,
3992 : Node* begin, Node* count,
3993 : ParameterMode mode, Node* capacity,
3994 : Node* allocation_site) {
3995 : Node* original_array_map = LoadMap(array);
3996 112 : Node* elements_kind = LoadMapElementsKind(original_array_map);
3997 :
3998 : // Use the cannonical map for the Array's ElementsKind
3999 : Node* native_context = LoadNativeContext(context);
4000 56 : TNode<Map> array_map = LoadJSArrayElementsMap(elements_kind, native_context);
4001 :
4002 : TNode<FixedArrayBase> new_elements = ExtractFixedArray(
4003 : LoadElements(array), begin, count, capacity,
4004 56 : ExtractFixedArrayFlag::kAllFixedArrays, mode, nullptr, elements_kind);
4005 :
4006 112 : TNode<Object> result = AllocateJSArray(
4007 : array_map, new_elements, ParameterToTagged(count, mode), allocation_site);
4008 56 : return result;
4009 : }
4010 :
4011 336 : Node* CodeStubAssembler::CloneFastJSArray(Node* context, Node* array,
4012 : ParameterMode mode,
4013 : Node* allocation_site,
4014 : HoleConversionMode convert_holes) {
4015 : // TODO(dhai): we should be able to assert IsFastJSArray(array) here, but this
4016 : // function is also used to copy boilerplates even when the no-elements
4017 : // protector is invalid. This function should be renamed to reflect its uses.
4018 : CSA_ASSERT(this, IsJSArray(array));
4019 :
4020 : Node* length = LoadJSArrayLength(array);
4021 : Node* new_elements = nullptr;
4022 672 : VARIABLE(var_new_elements, MachineRepresentation::kTagged);
4023 672 : TVARIABLE(Int32T, var_elements_kind, LoadMapElementsKind(LoadMap(array)));
4024 :
4025 336 : Label allocate_jsarray(this), holey_extract(this);
4026 :
4027 : bool need_conversion =
4028 : convert_holes == HoleConversionMode::kConvertToUndefined;
4029 336 : if (need_conversion) {
4030 : // We need to take care of holes, if the array is of holey elements kind.
4031 56 : GotoIf(IsHoleyFastElementsKind(var_elements_kind.value()), &holey_extract);
4032 : }
4033 :
4034 : // Simple extraction that preserves holes.
4035 : new_elements =
4036 672 : ExtractFixedArray(LoadElements(array), IntPtrOrSmiConstant(0, mode),
4037 : TaggedToParameter(length, mode), nullptr,
4038 : ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW, mode,
4039 672 : nullptr, var_elements_kind.value());
4040 336 : var_new_elements.Bind(new_elements);
4041 336 : Goto(&allocate_jsarray);
4042 :
4043 336 : if (need_conversion) {
4044 : BIND(&holey_extract);
4045 : // Convert holes to undefined.
4046 : TVARIABLE(BoolT, var_holes_converted, Int32FalseConstant());
4047 : // Copy |array|'s elements store. The copy will be compatible with the
4048 : // original elements kind unless there are holes in the source. Any holes
4049 : // get converted to undefined, hence in that case the copy is compatible
4050 : // only with PACKED_ELEMENTS and HOLEY_ELEMENTS, and we will choose
4051 : // PACKED_ELEMENTS. Also, if we want to replace holes, we must not use
4052 : // ExtractFixedArrayFlag::kDontCopyCOW.
4053 112 : new_elements = ExtractFixedArray(
4054 : LoadElements(array), IntPtrOrSmiConstant(0, mode),
4055 : TaggedToParameter(length, mode), nullptr,
4056 112 : ExtractFixedArrayFlag::kAllFixedArrays, mode, &var_holes_converted);
4057 56 : var_new_elements.Bind(new_elements);
4058 : // If the array type didn't change, use the original elements kind.
4059 56 : GotoIfNot(var_holes_converted.value(), &allocate_jsarray);
4060 : // Otherwise use PACKED_ELEMENTS for the target's elements kind.
4061 56 : var_elements_kind = Int32Constant(PACKED_ELEMENTS);
4062 56 : Goto(&allocate_jsarray);
4063 : }
4064 :
4065 : BIND(&allocate_jsarray);
4066 : // Use the cannonical map for the chosen elements kind.
4067 : Node* native_context = LoadNativeContext(context);
4068 : TNode<Map> array_map =
4069 336 : LoadJSArrayElementsMap(var_elements_kind.value(), native_context);
4070 :
4071 672 : TNode<Object> result = AllocateJSArray(
4072 336 : array_map, CAST(var_new_elements.value()), CAST(length), allocation_site);
4073 336 : return result;
4074 : }
4075 :
4076 12000 : TNode<FixedArrayBase> CodeStubAssembler::AllocateFixedArray(
4077 : ElementsKind kind, Node* capacity, ParameterMode mode,
4078 : AllocationFlags flags, SloppyTNode<Map> fixed_array_map) {
4079 12000 : Comment("AllocateFixedArray");
4080 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4081 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity,
4082 : IntPtrOrSmiConstant(0, mode), mode));
4083 :
4084 : const intptr_t kMaxLength = IsDoubleElementsKind(kind)
4085 : ? FixedDoubleArray::kMaxLength
4086 : : FixedArray::kMaxLength;
4087 : intptr_t capacity_constant;
4088 12000 : if (ToParameterConstant(capacity, &capacity_constant, mode)) {
4089 656 : CHECK_LE(capacity_constant, kMaxLength);
4090 : } else {
4091 11344 : Label if_out_of_memory(this, Label::kDeferred), next(this);
4092 22688 : Branch(IntPtrOrSmiGreaterThan(
4093 : capacity,
4094 : IntPtrOrSmiConstant(static_cast<int>(kMaxLength), mode), mode),
4095 11344 : &if_out_of_memory, &next);
4096 :
4097 : BIND(&if_out_of_memory);
4098 : CallRuntime(Runtime::kFatalProcessOutOfMemoryInvalidArrayLength,
4099 : NoContextConstant());
4100 11344 : Unreachable();
4101 :
4102 : BIND(&next);
4103 : }
4104 :
4105 12000 : TNode<IntPtrT> total_size = GetFixedArrayAllocationSize(capacity, kind, mode);
4106 :
4107 12000 : if (IsDoubleElementsKind(kind)) flags |= kDoubleAlignment;
4108 : // Allocate both array and elements object, and initialize the JSArray.
4109 24000 : Node* array = Allocate(total_size, flags);
4110 12000 : if (fixed_array_map != nullptr) {
4111 : // Conservatively only skip the write barrier if there are no allocation
4112 : // flags, this ensures that the object hasn't ended up in LOS. Note that the
4113 : // fixed array map is currently always immortal and technically wouldn't
4114 : // need the write barrier even in LOS, but it's better to not take chances
4115 : // in case this invariant changes later, since it's difficult to enforce
4116 : // locally here.
4117 3820 : if (flags == CodeStubAssembler::kNone) {
4118 : StoreMapNoWriteBarrier(array, fixed_array_map);
4119 : } else {
4120 : StoreMap(array, fixed_array_map);
4121 : }
4122 : } else {
4123 : RootIndex map_index = IsDoubleElementsKind(kind)
4124 : ? RootIndex::kFixedDoubleArrayMap
4125 8180 : : RootIndex::kFixedArrayMap;
4126 : DCHECK(RootsTable::IsImmortalImmovable(map_index));
4127 8180 : StoreMapNoWriteBarrier(array, map_index);
4128 : }
4129 : StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
4130 : ParameterToTagged(capacity, mode));
4131 12000 : return UncheckedCast<FixedArray>(array);
4132 : }
4133 :
4134 2836 : TNode<FixedArray> CodeStubAssembler::ExtractToFixedArray(
4135 : Node* source, Node* first, Node* count, Node* capacity, Node* source_map,
4136 : ElementsKind from_kind, AllocationFlags allocation_flags,
4137 : ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
4138 : HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted,
4139 : Node* source_elements_kind) {
4140 : DCHECK_NE(first, nullptr);
4141 : DCHECK_NE(count, nullptr);
4142 : DCHECK_NE(capacity, nullptr);
4143 : DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays);
4144 : CSA_ASSERT(this,
4145 : WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity));
4146 : CSA_ASSERT(this, WordEqual(source_map, LoadMap(source)));
4147 :
4148 5672 : VARIABLE(var_result, MachineRepresentation::kTagged);
4149 5672 : VARIABLE(var_target_map, MachineRepresentation::kTagged, source_map);
4150 :
4151 8508 : Label done(this, {&var_result}), is_cow(this),
4152 8508 : new_space_check(this, {&var_target_map});
4153 :
4154 : // If source_map is either FixedDoubleArrayMap, or FixedCOWArrayMap but
4155 : // we can't just use COW, use FixedArrayMap as the target map. Otherwise, use
4156 : // source_map as the target map.
4157 2836 : if (IsDoubleElementsKind(from_kind)) {
4158 : CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4159 112 : var_target_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
4160 56 : Goto(&new_space_check);
4161 : } else {
4162 : CSA_ASSERT(this, Word32BinaryNot(IsFixedDoubleArrayMap(source_map)));
4163 2780 : Branch(WordEqual(var_target_map.value(),
4164 2780 : LoadRoot(RootIndex::kFixedCOWArrayMap)),
4165 2780 : &is_cow, &new_space_check);
4166 :
4167 : BIND(&is_cow);
4168 : {
4169 : // |source| is a COW array, so we don't actually need to allocate a new
4170 : // array unless:
4171 : // 1) |extract_flags| forces us to, or
4172 : // 2) we're asked to extract only part of the |source| (|first| != 0).
4173 2780 : if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
4174 2560 : Branch(WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), first),
4175 632 : &new_space_check, [&] {
4176 632 : var_result.Bind(source);
4177 632 : Goto(&done);
4178 1272 : });
4179 : } else {
4180 4280 : var_target_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
4181 2140 : Goto(&new_space_check);
4182 : }
4183 : }
4184 : }
4185 :
4186 : BIND(&new_space_check);
4187 : {
4188 2836 : bool handle_old_space = !FLAG_young_generation_large_objects;
4189 2836 : if (handle_old_space) {
4190 0 : if (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly) {
4191 : handle_old_space = false;
4192 : CSA_ASSERT(this, Word32BinaryNot(FixedArraySizeDoesntFitInNewSpace(
4193 : count, FixedArray::kHeaderSize, parameter_mode)));
4194 : } else {
4195 : int constant_count;
4196 : handle_old_space =
4197 0 : !TryGetIntPtrOrSmiConstantValue(count, &constant_count,
4198 0 : parameter_mode) ||
4199 0 : (constant_count >
4200 0 : FixedArray::GetMaxLengthForNewSpaceAllocation(PACKED_ELEMENTS));
4201 : }
4202 : }
4203 :
4204 2836 : Label old_space(this, Label::kDeferred);
4205 2836 : if (handle_old_space) {
4206 : GotoIfFixedArraySizeDoesntFitInNewSpace(
4207 0 : capacity, &old_space, FixedArray::kHeaderSize, parameter_mode);
4208 : }
4209 :
4210 2836 : Comment("Copy FixedArray in young generation");
4211 : // We use PACKED_ELEMENTS to tell AllocateFixedArray and
4212 : // CopyFixedArrayElements that we want a FixedArray.
4213 : const ElementsKind to_kind = PACKED_ELEMENTS;
4214 : TNode<FixedArrayBase> to_elements =
4215 : AllocateFixedArray(to_kind, capacity, parameter_mode, allocation_flags,
4216 5672 : var_target_map.value());
4217 2836 : var_result.Bind(to_elements);
4218 :
4219 : #ifdef DEBUG
4220 : TNode<IntPtrT> object_word = BitcastTaggedToWord(to_elements);
4221 : TNode<IntPtrT> object_page = PageFromAddress(object_word);
4222 : TNode<IntPtrT> page_flags =
4223 : UncheckedCast<IntPtrT>(Load(MachineType::IntPtr(), object_page,
4224 : IntPtrConstant(Page::kFlagsOffset)));
4225 : CSA_ASSERT(
4226 : this,
4227 : WordNotEqual(
4228 : WordAnd(page_flags,
4229 : IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),
4230 : IntPtrConstant(0)));
4231 : #endif
4232 :
4233 2836 : if (convert_holes == HoleConversionMode::kDontConvert &&
4234 : !IsDoubleElementsKind(from_kind)) {
4235 : // We can use CopyElements (memcpy) because we don't need to replace or
4236 : // convert any values. Since {to_elements} is in new-space, CopyElements
4237 : // will efficiently use memcpy.
4238 : FillFixedArrayWithValue(to_kind, to_elements, count, capacity,
4239 2724 : RootIndex::kTheHoleValue, parameter_mode);
4240 2724 : CopyElements(to_kind, to_elements, IntPtrConstant(0), CAST(source),
4241 : ParameterToIntPtr(first, parameter_mode),
4242 : ParameterToIntPtr(count, parameter_mode),
4243 2724 : SKIP_WRITE_BARRIER);
4244 : } else {
4245 112 : CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4246 : count, capacity, SKIP_WRITE_BARRIER,
4247 : parameter_mode, convert_holes,
4248 224 : var_holes_converted);
4249 : }
4250 2836 : Goto(&done);
4251 :
4252 2836 : if (handle_old_space) {
4253 : BIND(&old_space);
4254 : {
4255 0 : Comment("Copy FixedArray in old generation");
4256 0 : Label copy_one_by_one(this);
4257 :
4258 : // Try to use memcpy if we don't need to convert holes to undefined.
4259 0 : if (convert_holes == HoleConversionMode::kDontConvert &&
4260 0 : source_elements_kind != nullptr) {
4261 : // Only try memcpy if we're not copying object pointers.
4262 0 : GotoIfNot(IsFastSmiElementsKind(source_elements_kind),
4263 0 : ©_one_by_one);
4264 :
4265 : const ElementsKind to_smi_kind = PACKED_SMI_ELEMENTS;
4266 0 : to_elements =
4267 : AllocateFixedArray(to_smi_kind, capacity, parameter_mode,
4268 : allocation_flags, var_target_map.value());
4269 0 : var_result.Bind(to_elements);
4270 :
4271 : FillFixedArrayWithValue(to_smi_kind, to_elements, count, capacity,
4272 0 : RootIndex::kTheHoleValue, parameter_mode);
4273 : // CopyElements will try to use memcpy if it's not conflicting with
4274 : // GC. Otherwise it will copy elements by elements, but skip write
4275 : // barriers (since we're copying smis to smis).
4276 0 : CopyElements(to_smi_kind, to_elements, IntPtrConstant(0),
4277 0 : CAST(source), ParameterToIntPtr(first, parameter_mode),
4278 : ParameterToIntPtr(count, parameter_mode),
4279 0 : SKIP_WRITE_BARRIER);
4280 0 : Goto(&done);
4281 : } else {
4282 0 : Goto(©_one_by_one);
4283 : }
4284 :
4285 : BIND(©_one_by_one);
4286 : {
4287 0 : to_elements =
4288 : AllocateFixedArray(to_kind, capacity, parameter_mode,
4289 : allocation_flags, var_target_map.value());
4290 0 : var_result.Bind(to_elements);
4291 0 : CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4292 : count, capacity, UPDATE_WRITE_BARRIER,
4293 : parameter_mode, convert_holes,
4294 0 : var_holes_converted);
4295 0 : Goto(&done);
4296 : }
4297 : }
4298 : }
4299 : }
4300 :
4301 : BIND(&done);
4302 5672 : return UncheckedCast<FixedArray>(var_result.value());
4303 : }
4304 :
4305 56 : TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedDoubleArrayFillingHoles(
4306 : Node* from_array, Node* first, Node* count, Node* capacity,
4307 : Node* fixed_array_map, TVariable<BoolT>* var_holes_converted,
4308 : AllocationFlags allocation_flags, ExtractFixedArrayFlags extract_flags,
4309 : ParameterMode mode) {
4310 : DCHECK_NE(first, nullptr);
4311 : DCHECK_NE(count, nullptr);
4312 : DCHECK_NE(capacity, nullptr);
4313 : DCHECK_NE(var_holes_converted, nullptr);
4314 : CSA_ASSERT(this, IsFixedDoubleArrayMap(fixed_array_map));
4315 :
4316 112 : VARIABLE(var_result, MachineRepresentation::kTagged);
4317 : const ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4318 112 : Node* to_elements = AllocateFixedArray(kind, capacity, mode, allocation_flags,
4319 56 : fixed_array_map);
4320 56 : var_result.Bind(to_elements);
4321 : // We first try to copy the FixedDoubleArray to a new FixedDoubleArray.
4322 : // |var_holes_converted| is set to False preliminarily.
4323 : *var_holes_converted = Int32FalseConstant();
4324 :
4325 : // The construction of the loop and the offsets for double elements is
4326 : // extracted from CopyFixedArrayElements.
4327 : CSA_SLOW_ASSERT(this, MatchesParameterMode(count, mode));
4328 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4329 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, kind));
4330 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4331 :
4332 56 : Comment("[ ExtractFixedDoubleArrayFillingHoles");
4333 :
4334 : // This copy can trigger GC, so we pre-initialize the array with holes.
4335 56 : FillFixedArrayWithValue(kind, to_elements, IntPtrOrSmiConstant(0, mode),
4336 56 : capacity, RootIndex::kTheHoleValue, mode);
4337 :
4338 : const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4339 : Node* first_from_element_offset =
4340 112 : ElementOffsetFromIndex(first, kind, mode, 0);
4341 112 : Node* limit_offset = IntPtrAdd(first_from_element_offset,
4342 112 : IntPtrConstant(first_element_offset));
4343 168 : VARIABLE(var_from_offset, MachineType::PointerRepresentation(),
4344 : ElementOffsetFromIndex(IntPtrOrSmiAdd(first, count, mode), kind,
4345 : mode, first_element_offset));
4346 :
4347 168 : Label decrement(this, {&var_from_offset}), done(this);
4348 : Node* to_array_adjusted =
4349 168 : IntPtrSub(BitcastTaggedToWord(to_elements), first_from_element_offset);
4350 :
4351 168 : Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4352 :
4353 : BIND(&decrement);
4354 : {
4355 : Node* from_offset =
4356 224 : IntPtrSub(var_from_offset.value(), IntPtrConstant(kDoubleSize));
4357 56 : var_from_offset.Bind(from_offset);
4358 :
4359 : Node* to_offset = from_offset;
4360 :
4361 56 : Label if_hole(this);
4362 :
4363 56 : Node* value = LoadElementAndPrepareForStore(
4364 56 : from_array, var_from_offset.value(), kind, kind, &if_hole);
4365 :
4366 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4367 56 : to_offset, value);
4368 :
4369 112 : Node* compare = WordNotEqual(from_offset, limit_offset);
4370 56 : Branch(compare, &decrement, &done);
4371 :
4372 : BIND(&if_hole);
4373 : // We are unlucky: there are holes! We need to restart the copy, this time
4374 : // we will copy the FixedDoubleArray to a new FixedArray with undefined
4375 : // replacing holes. We signal this to the caller through
4376 : // |var_holes_converted|.
4377 : *var_holes_converted = Int32TrueConstant();
4378 : to_elements =
4379 112 : ExtractToFixedArray(from_array, first, count, capacity, fixed_array_map,
4380 : kind, allocation_flags, extract_flags, mode,
4381 : HoleConversionMode::kConvertToUndefined);
4382 56 : var_result.Bind(to_elements);
4383 56 : Goto(&done);
4384 : }
4385 :
4386 : BIND(&done);
4387 56 : Comment("] ExtractFixedDoubleArrayFillingHoles");
4388 112 : return UncheckedCast<FixedArrayBase>(var_result.value());
4389 : }
4390 :
4391 2780 : TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
4392 : Node* source, Node* first, Node* count, Node* capacity,
4393 : ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
4394 : TVariable<BoolT>* var_holes_converted, Node* source_runtime_kind) {
4395 : DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays ||
4396 : extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays);
4397 : // If we want to replace holes, ExtractFixedArrayFlag::kDontCopyCOW should not
4398 : // be used, because that disables the iteration which detects holes.
4399 : DCHECK_IMPLIES(var_holes_converted != nullptr,
4400 : !(extract_flags & ExtractFixedArrayFlag::kDontCopyCOW));
4401 : HoleConversionMode convert_holes =
4402 : var_holes_converted != nullptr ? HoleConversionMode::kConvertToUndefined
4403 2780 : : HoleConversionMode::kDontConvert;
4404 5560 : VARIABLE(var_result, MachineRepresentation::kTagged);
4405 : const AllocationFlags allocation_flags =
4406 : (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly)
4407 : ? CodeStubAssembler::kNone
4408 2780 : : CodeStubAssembler::kAllowLargeObjectAllocation;
4409 2780 : if (first == nullptr) {
4410 560 : first = IntPtrOrSmiConstant(0, parameter_mode);
4411 : }
4412 2780 : if (count == nullptr) {
4413 : count = IntPtrOrSmiSub(
4414 : TaggedToParameter(LoadFixedArrayBaseLength(source), parameter_mode),
4415 352 : first, parameter_mode);
4416 :
4417 : CSA_ASSERT(
4418 : this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant(0, parameter_mode),
4419 : count, parameter_mode));
4420 : }
4421 2780 : if (capacity == nullptr) {
4422 : capacity = count;
4423 : } else {
4424 : CSA_ASSERT(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
4425 : IntPtrOrSmiAdd(first, count, parameter_mode), capacity,
4426 : parameter_mode)));
4427 : }
4428 :
4429 8340 : Label if_fixed_double_array(this), empty(this), done(this, {&var_result});
4430 : Node* source_map = LoadMap(source);
4431 8340 : GotoIf(WordEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity), &empty);
4432 :
4433 2780 : if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4434 872 : if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4435 1744 : GotoIf(IsFixedDoubleArrayMap(source_map), &if_fixed_double_array);
4436 : } else {
4437 : CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4438 : }
4439 : }
4440 :
4441 2780 : if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4442 : // Here we can only get |source| as FixedArray, never FixedDoubleArray.
4443 : // PACKED_ELEMENTS is used to signify that the source is a FixedArray.
4444 5560 : Node* to_elements = ExtractToFixedArray(
4445 : source, first, count, capacity, source_map, PACKED_ELEMENTS,
4446 : allocation_flags, extract_flags, parameter_mode, convert_holes,
4447 : var_holes_converted, source_runtime_kind);
4448 2780 : var_result.Bind(to_elements);
4449 2780 : Goto(&done);
4450 : }
4451 :
4452 2780 : if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4453 : BIND(&if_fixed_double_array);
4454 872 : Comment("Copy FixedDoubleArray");
4455 :
4456 872 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4457 112 : Node* to_elements = ExtractFixedDoubleArrayFillingHoles(
4458 : source, first, count, capacity, source_map, var_holes_converted,
4459 : allocation_flags, extract_flags, parameter_mode);
4460 56 : var_result.Bind(to_elements);
4461 : } else {
4462 : // We use PACKED_DOUBLE_ELEMENTS to signify that both the source and
4463 : // the target are FixedDoubleArray. That it is PACKED or HOLEY does not
4464 : // matter.
4465 : ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4466 : TNode<FixedArrayBase> to_elements = AllocateFixedArray(
4467 816 : kind, capacity, parameter_mode, allocation_flags, source_map);
4468 : FillFixedArrayWithValue(kind, to_elements, count, capacity,
4469 816 : RootIndex::kTheHoleValue, parameter_mode);
4470 816 : CopyElements(kind, to_elements, IntPtrConstant(0), CAST(source),
4471 : ParameterToIntPtr(first, parameter_mode),
4472 816 : ParameterToIntPtr(count, parameter_mode));
4473 816 : var_result.Bind(to_elements);
4474 : }
4475 :
4476 872 : Goto(&done);
4477 : }
4478 :
4479 : BIND(&empty);
4480 : {
4481 2780 : Comment("Copy empty array");
4482 :
4483 2780 : var_result.Bind(EmptyFixedArrayConstant());
4484 2780 : Goto(&done);
4485 : }
4486 :
4487 : BIND(&done);
4488 5560 : return UncheckedCast<FixedArray>(var_result.value());
4489 : }
4490 :
4491 504 : void CodeStubAssembler::InitializePropertyArrayLength(Node* property_array,
4492 : Node* length,
4493 : ParameterMode mode) {
4494 : CSA_SLOW_ASSERT(this, IsPropertyArray(property_array));
4495 : CSA_ASSERT(
4496 : this, IntPtrOrSmiGreaterThan(length, IntPtrOrSmiConstant(0, mode), mode));
4497 : CSA_ASSERT(
4498 : this,
4499 : IntPtrOrSmiLessThanOrEqual(
4500 : length, IntPtrOrSmiConstant(PropertyArray::LengthField::kMax, mode),
4501 : mode));
4502 : StoreObjectFieldNoWriteBarrier(
4503 : property_array, PropertyArray::kLengthAndHashOffset,
4504 : ParameterToTagged(length, mode), MachineRepresentation::kTaggedSigned);
4505 504 : }
4506 :
4507 504 : Node* CodeStubAssembler::AllocatePropertyArray(Node* capacity_node,
4508 : ParameterMode mode,
4509 : AllocationFlags flags) {
4510 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
4511 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
4512 : IntPtrOrSmiConstant(0, mode), mode));
4513 : TNode<IntPtrT> total_size =
4514 504 : GetPropertyArrayAllocationSize(capacity_node, mode);
4515 :
4516 1008 : TNode<Object> array = Allocate(total_size, flags);
4517 : RootIndex map_index = RootIndex::kPropertyArrayMap;
4518 : DCHECK(RootsTable::IsImmortalImmovable(map_index));
4519 504 : StoreMapNoWriteBarrier(array, map_index);
4520 504 : InitializePropertyArrayLength(array, capacity_node, mode);
4521 504 : return array;
4522 : }
4523 :
4524 504 : void CodeStubAssembler::FillPropertyArrayWithUndefined(Node* array,
4525 : Node* from_node,
4526 : Node* to_node,
4527 : ParameterMode mode) {
4528 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4529 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4530 : CSA_SLOW_ASSERT(this, IsPropertyArray(array));
4531 : ElementsKind kind = PACKED_ELEMENTS;
4532 : Node* value = UndefinedConstant();
4533 504 : BuildFastFixedArrayForEach(array, kind, from_node, to_node,
4534 504 : [this, value](Node* array, Node* offset) {
4535 504 : StoreNoWriteBarrier(
4536 : MachineRepresentation::kTagged, array,
4537 504 : offset, value);
4538 : },
4539 504 : mode);
4540 504 : }
4541 :
4542 12648 : void CodeStubAssembler::FillFixedArrayWithValue(ElementsKind kind, Node* array,
4543 : Node* from_node, Node* to_node,
4544 : RootIndex value_root_index,
4545 : ParameterMode mode) {
4546 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4547 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4548 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
4549 : DCHECK(value_root_index == RootIndex::kTheHoleValue ||
4550 : value_root_index == RootIndex::kUndefinedValue);
4551 :
4552 : // Determine the value to initialize the {array} based
4553 : // on the {value_root_index} and the elements {kind}.
4554 25296 : Node* value = LoadRoot(value_root_index);
4555 12648 : if (IsDoubleElementsKind(kind)) {
4556 : value = LoadHeapNumberValue(value);
4557 : }
4558 :
4559 25296 : BuildFastFixedArrayForEach(
4560 : array, kind, from_node, to_node,
4561 26600 : [this, value, kind](Node* array, Node* offset) {
4562 13300 : if (IsDoubleElementsKind(kind)) {
4563 2780 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, array, offset,
4564 2780 : value);
4565 : } else {
4566 10520 : StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
4567 10520 : value);
4568 : }
4569 13300 : },
4570 12648 : mode);
4571 12648 : }
4572 :
4573 112 : void CodeStubAssembler::StoreFixedDoubleArrayHole(
4574 : TNode<FixedDoubleArray> array, Node* index, ParameterMode parameter_mode) {
4575 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index, parameter_mode));
4576 : Node* offset =
4577 224 : ElementOffsetFromIndex(index, PACKED_DOUBLE_ELEMENTS, parameter_mode,
4578 : FixedArray::kHeaderSize - kHeapObjectTag);
4579 : CSA_ASSERT(this, IsOffsetInBounds(
4580 : offset, LoadAndUntagFixedArrayBaseLength(array),
4581 : FixedDoubleArray::kHeaderSize, PACKED_DOUBLE_ELEMENTS));
4582 : Node* double_hole =
4583 224 : Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4584 112 : : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4585 : // TODO(danno): When we have a Float32/Float64 wrapper class that
4586 : // preserves double bits during manipulation, remove this code/change
4587 : // this to an indexed Float64 store.
4588 112 : if (Is64()) {
4589 : StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset,
4590 112 : double_hole);
4591 : } else {
4592 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
4593 0 : double_hole);
4594 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array,
4595 0 : IntPtrAdd(offset, IntPtrConstant(kInt32Size)),
4596 0 : double_hole);
4597 : }
4598 112 : }
4599 :
4600 1188 : void CodeStubAssembler::FillFixedArrayWithSmiZero(TNode<FixedArray> array,
4601 : TNode<IntPtrT> length) {
4602 : CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4603 :
4604 : TNode<IntPtrT> byte_length = TimesTaggedSize(length);
4605 : CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4606 :
4607 : static const int32_t fa_base_data_offset =
4608 : FixedArray::kHeaderSize - kHeapObjectTag;
4609 : TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4610 3564 : IntPtrConstant(fa_base_data_offset));
4611 :
4612 : // Call out to memset to perform initialization.
4613 : TNode<ExternalReference> memset =
4614 1188 : ExternalConstant(ExternalReference::libc_memset_function());
4615 : STATIC_ASSERT(kSizetSize == kIntptrSize);
4616 : CallCFunction(memset, MachineType::Pointer(),
4617 : std::make_pair(MachineType::Pointer(), backing_store),
4618 2376 : std::make_pair(MachineType::IntPtr(), IntPtrConstant(0)),
4619 1188 : std::make_pair(MachineType::UintPtr(), byte_length));
4620 1188 : }
4621 :
4622 56 : void CodeStubAssembler::FillFixedDoubleArrayWithZero(
4623 : TNode<FixedDoubleArray> array, TNode<IntPtrT> length) {
4624 : CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4625 :
4626 : TNode<IntPtrT> byte_length = TimesDoubleSize(length);
4627 : CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4628 :
4629 : static const int32_t fa_base_data_offset =
4630 : FixedDoubleArray::kHeaderSize - kHeapObjectTag;
4631 : TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4632 168 : IntPtrConstant(fa_base_data_offset));
4633 :
4634 : // Call out to memset to perform initialization.
4635 : TNode<ExternalReference> memset =
4636 56 : ExternalConstant(ExternalReference::libc_memset_function());
4637 : STATIC_ASSERT(kSizetSize == kIntptrSize);
4638 : CallCFunction(memset, MachineType::Pointer(),
4639 : std::make_pair(MachineType::Pointer(), backing_store),
4640 112 : std::make_pair(MachineType::IntPtr(), IntPtrConstant(0)),
4641 56 : std::make_pair(MachineType::UintPtr(), byte_length));
4642 56 : }
4643 :
4644 3116 : void CodeStubAssembler::JumpIfPointersFromHereAreInteresting(
4645 : TNode<Object> object, Label* interesting) {
4646 6232 : Label finished(this);
4647 3116 : TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
4648 3116 : TNode<IntPtrT> object_page = PageFromAddress(object_word);
4649 : TNode<IntPtrT> page_flags = UncheckedCast<IntPtrT>(Load(
4650 6232 : MachineType::IntPtr(), object_page, IntPtrConstant(Page::kFlagsOffset)));
4651 3116 : Branch(
4652 6232 : WordEqual(WordAnd(page_flags,
4653 : IntPtrConstant(
4654 3116 : MemoryChunk::kPointersFromHereAreInterestingMask)),
4655 6232 : IntPtrConstant(0)),
4656 3116 : &finished, interesting);
4657 : BIND(&finished);
4658 3116 : }
4659 :
4660 392 : void CodeStubAssembler::MoveElements(ElementsKind kind,
4661 : TNode<FixedArrayBase> elements,
4662 : TNode<IntPtrT> dst_index,
4663 : TNode<IntPtrT> src_index,
4664 : TNode<IntPtrT> length) {
4665 784 : Label finished(this);
4666 392 : Label needs_barrier(this);
4667 : const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4668 :
4669 : DCHECK(IsFastElementsKind(kind));
4670 : CSA_ASSERT(this, IsFixedArrayWithKind(elements, kind));
4671 : CSA_ASSERT(this,
4672 : IntPtrLessThanOrEqual(IntPtrAdd(dst_index, length),
4673 : LoadAndUntagFixedArrayBaseLength(elements)));
4674 : CSA_ASSERT(this,
4675 : IntPtrLessThanOrEqual(IntPtrAdd(src_index, length),
4676 : LoadAndUntagFixedArrayBaseLength(elements)));
4677 :
4678 : // The write barrier can be ignored if {dst_elements} is in new space, or if
4679 : // the elements pointer is FixedDoubleArray.
4680 392 : if (needs_barrier_check) {
4681 224 : JumpIfPointersFromHereAreInteresting(elements, &needs_barrier);
4682 : }
4683 :
4684 : const TNode<IntPtrT> source_byte_length =
4685 392 : IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4686 : static const int32_t fa_base_data_offset =
4687 : FixedArrayBase::kHeaderSize - kHeapObjectTag;
4688 392 : TNode<IntPtrT> elements_intptr = BitcastTaggedToWord(elements);
4689 : TNode<IntPtrT> target_data_ptr =
4690 : IntPtrAdd(elements_intptr,
4691 : ElementOffsetFromIndex(dst_index, kind, INTPTR_PARAMETERS,
4692 392 : fa_base_data_offset));
4693 : TNode<IntPtrT> source_data_ptr =
4694 : IntPtrAdd(elements_intptr,
4695 : ElementOffsetFromIndex(src_index, kind, INTPTR_PARAMETERS,
4696 392 : fa_base_data_offset));
4697 : TNode<ExternalReference> memmove =
4698 392 : ExternalConstant(ExternalReference::libc_memmove_function());
4699 : CallCFunction(memmove, MachineType::Pointer(),
4700 : std::make_pair(MachineType::Pointer(), target_data_ptr),
4701 : std::make_pair(MachineType::Pointer(), source_data_ptr),
4702 392 : std::make_pair(MachineType::UintPtr(), source_byte_length));
4703 :
4704 392 : if (needs_barrier_check) {
4705 224 : Goto(&finished);
4706 :
4707 : BIND(&needs_barrier);
4708 : {
4709 : const TNode<IntPtrT> begin = src_index;
4710 : const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4711 :
4712 : // If dst_index is less than src_index, then walk forward.
4713 : const TNode<IntPtrT> delta =
4714 : IntPtrMul(IntPtrSub(dst_index, begin),
4715 448 : IntPtrConstant(ElementsKindToByteSize(kind)));
4716 448 : auto loop_body = [&](Node* array, Node* offset) {
4717 1344 : Node* const element = Load(MachineType::AnyTagged(), array, offset);
4718 1344 : Node* const delta_offset = IntPtrAdd(offset, delta);
4719 448 : Store(array, delta_offset, element);
4720 448 : };
4721 :
4722 224 : Label iterate_forward(this);
4723 224 : Label iterate_backward(this);
4724 672 : Branch(IntPtrLessThan(delta, IntPtrConstant(0)), &iterate_forward,
4725 224 : &iterate_backward);
4726 : BIND(&iterate_forward);
4727 : {
4728 : // Make a loop for the stores.
4729 224 : BuildFastFixedArrayForEach(elements, kind, begin, end, loop_body,
4730 : INTPTR_PARAMETERS,
4731 224 : ForEachDirection::kForward);
4732 224 : Goto(&finished);
4733 : }
4734 :
4735 : BIND(&iterate_backward);
4736 : {
4737 224 : BuildFastFixedArrayForEach(elements, kind, begin, end, loop_body,
4738 : INTPTR_PARAMETERS,
4739 224 : ForEachDirection::kReverse);
4740 224 : Goto(&finished);
4741 : }
4742 : }
4743 : BIND(&finished);
4744 : }
4745 392 : }
4746 :
4747 3764 : void CodeStubAssembler::CopyElements(ElementsKind kind,
4748 : TNode<FixedArrayBase> dst_elements,
4749 : TNode<IntPtrT> dst_index,
4750 : TNode<FixedArrayBase> src_elements,
4751 : TNode<IntPtrT> src_index,
4752 : TNode<IntPtrT> length,
4753 : WriteBarrierMode write_barrier) {
4754 7528 : Label finished(this);
4755 3764 : Label needs_barrier(this);
4756 : const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4757 :
4758 : DCHECK(IsFastElementsKind(kind));
4759 : CSA_ASSERT(this, IsFixedArrayWithKind(dst_elements, kind));
4760 : CSA_ASSERT(this, IsFixedArrayWithKind(src_elements, kind));
4761 : CSA_ASSERT(this, IntPtrLessThanOrEqual(
4762 : IntPtrAdd(dst_index, length),
4763 : LoadAndUntagFixedArrayBaseLength(dst_elements)));
4764 : CSA_ASSERT(this, IntPtrLessThanOrEqual(
4765 : IntPtrAdd(src_index, length),
4766 : LoadAndUntagFixedArrayBaseLength(src_elements)));
4767 : CSA_ASSERT(this, Word32Or(WordNotEqual(dst_elements, src_elements),
4768 : WordEqual(length, IntPtrConstant(0))));
4769 :
4770 : // The write barrier can be ignored if {dst_elements} is in new space, or if
4771 : // the elements pointer is FixedDoubleArray.
4772 3764 : if (needs_barrier_check) {
4773 2892 : JumpIfPointersFromHereAreInteresting(dst_elements, &needs_barrier);
4774 : }
4775 :
4776 : TNode<IntPtrT> source_byte_length =
4777 3764 : IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4778 : static const int32_t fa_base_data_offset =
4779 : FixedArrayBase::kHeaderSize - kHeapObjectTag;
4780 : TNode<IntPtrT> src_offset_start = ElementOffsetFromIndex(
4781 3764 : src_index, kind, INTPTR_PARAMETERS, fa_base_data_offset);
4782 : TNode<IntPtrT> dst_offset_start = ElementOffsetFromIndex(
4783 3764 : dst_index, kind, INTPTR_PARAMETERS, fa_base_data_offset);
4784 3764 : TNode<IntPtrT> src_elements_intptr = BitcastTaggedToWord(src_elements);
4785 : TNode<IntPtrT> source_data_ptr =
4786 : IntPtrAdd(src_elements_intptr, src_offset_start);
4787 3764 : TNode<IntPtrT> dst_elements_intptr = BitcastTaggedToWord(dst_elements);
4788 : TNode<IntPtrT> dst_data_ptr =
4789 : IntPtrAdd(dst_elements_intptr, dst_offset_start);
4790 : TNode<ExternalReference> memcpy =
4791 3764 : ExternalConstant(ExternalReference::libc_memcpy_function());
4792 : CallCFunction(memcpy, MachineType::Pointer(),
4793 : std::make_pair(MachineType::Pointer(), dst_data_ptr),
4794 : std::make_pair(MachineType::Pointer(), source_data_ptr),
4795 3764 : std::make_pair(MachineType::UintPtr(), source_byte_length));
4796 :
4797 3764 : if (needs_barrier_check) {
4798 2892 : Goto(&finished);
4799 :
4800 : BIND(&needs_barrier);
4801 : {
4802 : const TNode<IntPtrT> begin = src_index;
4803 : const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4804 : const TNode<IntPtrT> delta =
4805 : IntPtrMul(IntPtrSub(dst_index, src_index),
4806 5784 : IntPtrConstant(ElementsKindToByteSize(kind)));
4807 2892 : BuildFastFixedArrayForEach(
4808 : src_elements, kind, begin, end,
4809 2904 : [&](Node* array, Node* offset) {
4810 8712 : Node* const element = Load(MachineType::AnyTagged(), array, offset);
4811 8712 : Node* const delta_offset = IntPtrAdd(offset, delta);
4812 2904 : if (write_barrier == SKIP_WRITE_BARRIER) {
4813 5640 : StoreNoWriteBarrier(MachineRepresentation::kTagged, dst_elements,
4814 2736 : delta_offset, element);
4815 : } else {
4816 168 : Store(dst_elements, delta_offset, element);
4817 : }
4818 2904 : },
4819 2892 : INTPTR_PARAMETERS, ForEachDirection::kForward);
4820 2892 : Goto(&finished);
4821 : }
4822 : BIND(&finished);
4823 : }
4824 3764 : }
4825 :
4826 6308 : void CodeStubAssembler::CopyFixedArrayElements(
4827 : ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
4828 : Node* to_array, Node* first_element, Node* element_count, Node* capacity,
4829 : WriteBarrierMode barrier_mode, ParameterMode mode,
4830 : HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted) {
4831 : DCHECK_IMPLIES(var_holes_converted != nullptr,
4832 : convert_holes == HoleConversionMode::kConvertToUndefined);
4833 : CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
4834 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4835 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
4836 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
4837 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4838 : const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4839 6308 : Comment("[ CopyFixedArrayElements");
4840 :
4841 : // Typed array elements are not supported.
4842 : DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
4843 : DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
4844 :
4845 6308 : Label done(this);
4846 : bool from_double_elements = IsDoubleElementsKind(from_kind);
4847 : bool to_double_elements = IsDoubleElementsKind(to_kind);
4848 : bool doubles_to_objects_conversion =
4849 8336 : IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
4850 : bool needs_write_barrier =
4851 6308 : doubles_to_objects_conversion ||
4852 280 : (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
4853 : bool element_offset_matches =
4854 : !needs_write_barrier &&
4855 : (kTaggedSize == kDoubleSize ||
4856 6308 : IsDoubleElementsKind(from_kind) == IsDoubleElementsKind(to_kind));
4857 : Node* double_hole =
4858 12616 : Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4859 6308 : : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4860 :
4861 : // If copying might trigger a GC, we pre-initialize the FixedArray such that
4862 : // it's always in a consistent state.
4863 6308 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4864 : DCHECK(IsObjectElementsKind(to_kind));
4865 : // Use undefined for the part that we copy and holes for the rest.
4866 : // Later if we run into a hole in the source we can just skip the writing
4867 : // to the target and are still guaranteed that we get an undefined.
4868 112 : FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4869 112 : element_count, RootIndex::kUndefinedValue, mode);
4870 : FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4871 112 : RootIndex::kTheHoleValue, mode);
4872 6196 : } else if (doubles_to_objects_conversion) {
4873 : // Pre-initialized the target with holes so later if we run into a hole in
4874 : // the source we can just skip the writing to the target.
4875 1400 : FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4876 1400 : capacity, RootIndex::kTheHoleValue, mode);
4877 4796 : } else if (element_count != capacity) {
4878 3956 : FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4879 3956 : RootIndex::kTheHoleValue, mode);
4880 : }
4881 :
4882 : Node* first_from_element_offset =
4883 12616 : ElementOffsetFromIndex(first_element, from_kind, mode, 0);
4884 12616 : Node* limit_offset = IntPtrAdd(first_from_element_offset,
4885 12616 : IntPtrConstant(first_element_offset));
4886 18924 : VARIABLE(
4887 : var_from_offset, MachineType::PointerRepresentation(),
4888 : ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count, mode),
4889 : from_kind, mode, first_element_offset));
4890 : // This second variable is used only when the element sizes of source and
4891 : // destination arrays do not match.
4892 12616 : VARIABLE(var_to_offset, MachineType::PointerRepresentation());
4893 6308 : if (element_offset_matches) {
4894 4572 : var_to_offset.Bind(var_from_offset.value());
4895 : } else {
4896 3472 : var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
4897 3472 : first_element_offset));
4898 : }
4899 :
4900 6308 : Variable* vars[] = {&var_from_offset, &var_to_offset, var_holes_converted};
4901 : int num_vars =
4902 6308 : var_holes_converted != nullptr ? arraysize(vars) : arraysize(vars) - 1;
4903 12616 : Label decrement(this, num_vars, vars);
4904 :
4905 : Node* to_array_adjusted =
4906 : element_offset_matches
4907 9144 : ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
4908 6308 : : to_array;
4909 :
4910 18924 : Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4911 :
4912 : BIND(&decrement);
4913 : {
4914 12616 : Node* from_offset = IntPtrSub(
4915 : var_from_offset.value(),
4916 18924 : IntPtrConstant(from_double_elements ? kDoubleSize : kTaggedSize));
4917 6308 : var_from_offset.Bind(from_offset);
4918 :
4919 : Node* to_offset;
4920 6308 : if (element_offset_matches) {
4921 : to_offset = from_offset;
4922 : } else {
4923 3472 : to_offset = IntPtrSub(
4924 : var_to_offset.value(),
4925 5208 : IntPtrConstant(to_double_elements ? kDoubleSize : kTaggedSize));
4926 1736 : var_to_offset.Bind(to_offset);
4927 : }
4928 :
4929 6308 : Label next_iter(this), store_double_hole(this), signal_hole(this);
4930 : Label* if_hole;
4931 6308 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4932 : // The target elements array is already preinitialized with undefined
4933 : // so we only need to signal that a hole was found and continue the loop.
4934 : if_hole = &signal_hole;
4935 6196 : } else if (doubles_to_objects_conversion) {
4936 : // The target elements array is already preinitialized with holes, so we
4937 : // can just proceed with the next iteration.
4938 : if_hole = &next_iter;
4939 4796 : } else if (IsDoubleElementsKind(to_kind)) {
4940 : if_hole = &store_double_hole;
4941 : } else {
4942 : // In all the other cases don't check for holes and copy the data as is.
4943 : if_hole = nullptr;
4944 : }
4945 :
4946 6308 : Node* value = LoadElementAndPrepareForStore(
4947 6308 : from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
4948 :
4949 6308 : if (needs_write_barrier) {
4950 1736 : CHECK_EQ(to_array, to_array_adjusted);
4951 1736 : Store(to_array_adjusted, to_offset, value);
4952 4572 : } else if (to_double_elements) {
4953 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4954 1748 : to_offset, value);
4955 : } else {
4956 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array_adjusted,
4957 2824 : to_offset, value);
4958 : }
4959 6308 : Goto(&next_iter);
4960 :
4961 6308 : if (if_hole == &store_double_hole) {
4962 : BIND(&store_double_hole);
4963 : // Don't use doubles to store the hole double, since manipulating the
4964 : // signaling NaN used for the hole in C++, e.g. with bit_cast, will
4965 : // change its value on ia32 (the x87 stack is used to return values
4966 : // and stores to the stack silently clear the signalling bit).
4967 : //
4968 : // TODO(danno): When we have a Float32/Float64 wrapper class that
4969 : // preserves double bits during manipulation, remove this code/change
4970 : // this to an indexed Float64 store.
4971 1748 : if (Is64()) {
4972 : StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
4973 1748 : to_offset, double_hole);
4974 : } else {
4975 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4976 0 : to_offset, double_hole);
4977 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4978 0 : IntPtrAdd(to_offset, IntPtrConstant(kInt32Size)),
4979 0 : double_hole);
4980 : }
4981 1748 : Goto(&next_iter);
4982 4560 : } else if (if_hole == &signal_hole) {
4983 : // This case happens only when IsObjectElementsKind(to_kind).
4984 : BIND(&signal_hole);
4985 112 : if (var_holes_converted != nullptr) {
4986 : *var_holes_converted = Int32TrueConstant();
4987 : }
4988 112 : Goto(&next_iter);
4989 : }
4990 :
4991 : BIND(&next_iter);
4992 12616 : Node* compare = WordNotEqual(from_offset, limit_offset);
4993 6308 : Branch(compare, &decrement, &done);
4994 : }
4995 :
4996 : BIND(&done);
4997 6308 : Comment("] CopyFixedArrayElements");
4998 6308 : }
4999 :
5000 1312 : TNode<FixedArray> CodeStubAssembler::HeapObjectToFixedArray(
5001 : TNode<HeapObject> base, Label* cast_fail) {
5002 2624 : Label fixed_array(this);
5003 : TNode<Map> map = LoadMap(base);
5004 2624 : GotoIf(WordEqual(map, LoadRoot(RootIndex::kFixedArrayMap)), &fixed_array);
5005 2624 : GotoIf(WordNotEqual(map, LoadRoot(RootIndex::kFixedCOWArrayMap)), cast_fail);
5006 1312 : Goto(&fixed_array);
5007 : BIND(&fixed_array);
5008 1312 : return UncheckedCast<FixedArray>(base);
5009 : }
5010 :
5011 504 : void CodeStubAssembler::CopyPropertyArrayValues(Node* from_array,
5012 : Node* to_array,
5013 : Node* property_count,
5014 : WriteBarrierMode barrier_mode,
5015 : ParameterMode mode,
5016 : DestroySource destroy_source) {
5017 : CSA_SLOW_ASSERT(this, MatchesParameterMode(property_count, mode));
5018 : CSA_SLOW_ASSERT(this, Word32Or(IsPropertyArray(from_array),
5019 : IsEmptyFixedArray(from_array)));
5020 : CSA_SLOW_ASSERT(this, IsPropertyArray(to_array));
5021 504 : Comment("[ CopyPropertyArrayValues");
5022 :
5023 504 : bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
5024 :
5025 504 : if (destroy_source == DestroySource::kNo) {
5026 : // PropertyArray may contain MutableHeapNumbers, which will be cloned on the
5027 : // heap, requiring a write barrier.
5028 : needs_write_barrier = true;
5029 : }
5030 :
5031 504 : Node* start = IntPtrOrSmiConstant(0, mode);
5032 : ElementsKind kind = PACKED_ELEMENTS;
5033 1008 : BuildFastFixedArrayForEach(
5034 : from_array, kind, start, property_count,
5035 : [this, to_array, needs_write_barrier, destroy_source](Node* array,
5036 2072 : Node* offset) {
5037 504 : Node* value = Load(MachineType::AnyTagged(), array, offset);
5038 :
5039 504 : if (destroy_source == DestroySource::kNo) {
5040 112 : value = CloneIfMutablePrimitive(CAST(value));
5041 : }
5042 :
5043 504 : if (needs_write_barrier) {
5044 56 : Store(to_array, offset, value);
5045 : } else {
5046 448 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, offset,
5047 448 : value);
5048 : }
5049 504 : },
5050 504 : mode);
5051 :
5052 : #ifdef DEBUG
5053 : // Zap {from_array} if the copying above has made it invalid.
5054 : if (destroy_source == DestroySource::kYes) {
5055 : Label did_zap(this);
5056 : GotoIf(IsEmptyFixedArray(from_array), &did_zap);
5057 : FillPropertyArrayWithUndefined(from_array, start, property_count, mode);
5058 :
5059 : Goto(&did_zap);
5060 : BIND(&did_zap);
5061 : }
5062 : #endif
5063 504 : Comment("] CopyPropertyArrayValues");
5064 504 : }
5065 :
5066 2088 : void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
5067 : TNode<IntPtrT> from_index,
5068 : TNode<IntPtrT> to_index,
5069 : TNode<IntPtrT> character_count,
5070 : String::Encoding from_encoding,
5071 : String::Encoding to_encoding) {
5072 : // Cannot assert IsString(from_string) and IsString(to_string) here because
5073 : // CSA::SubString can pass in faked sequential strings when handling external
5074 : // subject strings.
5075 : bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
5076 : bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
5077 : DCHECK_IMPLIES(to_one_byte, from_one_byte);
5078 2088 : Comment("CopyStringCharacters ",
5079 : from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING", " -> ",
5080 2088 : to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
5081 :
5082 2088 : ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
5083 2088 : ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
5084 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
5085 : int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
5086 4176 : Node* from_offset = ElementOffsetFromIndex(from_index, from_kind,
5087 2088 : INTPTR_PARAMETERS, header_size);
5088 : Node* to_offset =
5089 4176 : ElementOffsetFromIndex(to_index, to_kind, INTPTR_PARAMETERS, header_size);
5090 : Node* byte_count =
5091 4176 : ElementOffsetFromIndex(character_count, from_kind, INTPTR_PARAMETERS);
5092 4176 : Node* limit_offset = IntPtrAdd(from_offset, byte_count);
5093 :
5094 : // Prepare the fast loop
5095 : MachineType type =
5096 2088 : from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
5097 : MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
5098 2088 : : MachineRepresentation::kWord16;
5099 2088 : int from_increment = 1 << ElementsKindToShiftSize(from_kind);
5100 2088 : int to_increment = 1 << ElementsKindToShiftSize(to_kind);
5101 :
5102 4176 : VARIABLE(current_to_offset, MachineType::PointerRepresentation(), to_offset);
5103 4176 : VariableList vars({¤t_to_offset}, zone());
5104 2088 : int to_index_constant = 0, from_index_constant = 0;
5105 2088 : bool index_same = (from_encoding == to_encoding) &&
5106 2028 : (from_index == to_index ||
5107 2264 : (ToInt32Constant(from_index, from_index_constant) &&
5108 360 : ToInt32Constant(to_index, to_index_constant) &&
5109 124 : from_index_constant == to_index_constant));
5110 4176 : BuildFastLoop(vars, from_offset, limit_offset,
5111 : [this, from_string, to_string, ¤t_to_offset, to_increment,
5112 12288 : type, rep, index_same](Node* offset) {
5113 2088 : Node* value = Load(type, from_string, offset);
5114 6144 : StoreNoWriteBarrier(
5115 : rep, to_string,
5116 2088 : index_same ? offset : current_to_offset.value(), value);
5117 2088 : if (!index_same) {
5118 1968 : Increment(¤t_to_offset, to_increment);
5119 : }
5120 2088 : },
5121 2088 : from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
5122 2088 : }
5123 :
5124 6364 : Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
5125 : Node* offset,
5126 : ElementsKind from_kind,
5127 : ElementsKind to_kind,
5128 : Label* if_hole) {
5129 : CSA_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
5130 6364 : if (IsDoubleElementsKind(from_kind)) {
5131 : Node* value =
5132 4168 : LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
5133 2084 : if (!IsDoubleElementsKind(to_kind)) {
5134 2912 : value = AllocateHeapNumberWithValue(value);
5135 : }
5136 : return value;
5137 :
5138 : } else {
5139 4280 : Node* value = Load(MachineType::AnyTagged(), array, offset);
5140 4280 : if (if_hole) {
5141 1232 : GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
5142 : }
5143 4280 : if (IsDoubleElementsKind(to_kind)) {
5144 1176 : if (IsSmiElementsKind(from_kind)) {
5145 2352 : value = SmiToFloat64(value);
5146 : } else {
5147 : value = LoadHeapNumberValue(value);
5148 : }
5149 : }
5150 : return value;
5151 : }
5152 : }
5153 :
5154 2676 : Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
5155 : ParameterMode mode) {
5156 : CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
5157 2676 : Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
5158 2676 : Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
5159 : Node* padding =
5160 2676 : IntPtrOrSmiConstant(JSObject::kMinAddedElementsCapacity, mode);
5161 2676 : return IntPtrOrSmiAdd(new_capacity, padding, mode);
5162 : }
5163 :
5164 112 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
5165 : ElementsKind kind, Node* key,
5166 : Label* bailout) {
5167 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5168 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
5169 : CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
5170 : Node* capacity = LoadFixedArrayBaseLength(elements);
5171 :
5172 : ParameterMode mode = OptimalParameterMode();
5173 : capacity = TaggedToParameter(capacity, mode);
5174 : key = TaggedToParameter(key, mode);
5175 :
5176 112 : return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
5177 112 : bailout);
5178 : }
5179 :
5180 1120 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
5181 : ElementsKind kind, Node* key,
5182 : Node* capacity,
5183 : ParameterMode mode,
5184 : Label* bailout) {
5185 1120 : Comment("TryGrowElementsCapacity");
5186 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5187 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
5188 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
5189 : CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
5190 :
5191 : // If the gap growth is too big, fall back to the runtime.
5192 1120 : Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
5193 1120 : Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
5194 2240 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
5195 :
5196 : // Calculate the capacity of the new backing store.
5197 1120 : Node* new_capacity = CalculateNewElementsCapacity(
5198 1120 : IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
5199 1120 : return GrowElementsCapacity(object, elements, kind, kind, capacity,
5200 1120 : new_capacity, mode, bailout);
5201 : }
5202 :
5203 5356 : Node* CodeStubAssembler::GrowElementsCapacity(
5204 : Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
5205 : Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
5206 5356 : Comment("[ GrowElementsCapacity");
5207 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5208 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
5209 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
5210 : CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
5211 :
5212 : // If size of the allocation for the new capacity doesn't fit in a page
5213 : // that we can bump-pointer allocate from, fall back to the runtime.
5214 5356 : int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
5215 10712 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(
5216 : new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
5217 5356 : bailout);
5218 :
5219 : // Allocate the new backing store.
5220 10712 : Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
5221 :
5222 : // Copy the elements from the old elements store to the new.
5223 : // The size-check above guarantees that the |new_elements| is allocated
5224 : // in new space so we can skip the write barrier.
5225 5356 : CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
5226 5356 : new_capacity, SKIP_WRITE_BARRIER, mode);
5227 :
5228 : StoreObjectField(object, JSObject::kElementsOffset, new_elements);
5229 5356 : Comment("] GrowElementsCapacity");
5230 5356 : return new_elements;
5231 : }
5232 :
5233 1064 : void CodeStubAssembler::InitializeAllocationMemento(Node* base,
5234 : Node* base_allocation_size,
5235 : Node* allocation_site) {
5236 1064 : Comment("[Initialize AllocationMemento");
5237 : TNode<Object> memento =
5238 2128 : InnerAllocate(CAST(base), UncheckedCast<IntPtrT>(base_allocation_size));
5239 1064 : StoreMapNoWriteBarrier(memento, RootIndex::kAllocationMementoMap);
5240 : StoreObjectFieldNoWriteBarrier(
5241 : memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
5242 1064 : if (FLAG_allocation_site_pretenuring) {
5243 : TNode<Int32T> count = UncheckedCast<Int32T>(LoadObjectField(
5244 : allocation_site, AllocationSite::kPretenureCreateCountOffset,
5245 1064 : MachineType::Int32()));
5246 :
5247 1064 : TNode<Int32T> incremented_count = Int32Add(count, Int32Constant(1));
5248 : StoreObjectFieldNoWriteBarrier(
5249 : allocation_site, AllocationSite::kPretenureCreateCountOffset,
5250 : incremented_count, MachineRepresentation::kWord32);
5251 : }
5252 1064 : Comment("]");
5253 1064 : }
5254 :
5255 3696 : Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
5256 : Label* if_valueisnotnumber) {
5257 7392 : Label out(this);
5258 7392 : VARIABLE(var_result, MachineRepresentation::kFloat64);
5259 :
5260 : // Check if the {value} is a Smi or a HeapObject.
5261 3696 : Label if_valueissmi(this), if_valueisnotsmi(this);
5262 7392 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5263 :
5264 : BIND(&if_valueissmi);
5265 : {
5266 : // Convert the Smi {value}.
5267 7392 : var_result.Bind(SmiToFloat64(value));
5268 3696 : Goto(&out);
5269 : }
5270 :
5271 : BIND(&if_valueisnotsmi);
5272 : {
5273 : // Check if {value} is a HeapNumber.
5274 3696 : Label if_valueisheapnumber(this);
5275 7392 : Branch(IsHeapNumber(value), &if_valueisheapnumber, if_valueisnotnumber);
5276 :
5277 : BIND(&if_valueisheapnumber);
5278 : {
5279 : // Load the floating point value.
5280 3696 : var_result.Bind(LoadHeapNumberValue(value));
5281 3696 : Goto(&out);
5282 : }
5283 : }
5284 : BIND(&out);
5285 7392 : return var_result.value();
5286 : }
5287 :
5288 1680 : Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
5289 : // We might need to loop once due to ToNumber conversion.
5290 3360 : VARIABLE(var_value, MachineRepresentation::kTagged);
5291 3360 : VARIABLE(var_result, MachineRepresentation::kFloat64);
5292 1680 : Label loop(this, &var_value), done_loop(this, &var_result);
5293 1680 : var_value.Bind(value);
5294 1680 : Goto(&loop);
5295 : BIND(&loop);
5296 : {
5297 1680 : Label if_valueisnotnumber(this, Label::kDeferred);
5298 :
5299 : // Load the current {value}.
5300 1680 : value = var_value.value();
5301 :
5302 : // Convert {value} to Float64 if it is a number and convert it to a number
5303 : // otherwise.
5304 1680 : Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
5305 1680 : var_result.Bind(result);
5306 1680 : Goto(&done_loop);
5307 :
5308 : BIND(&if_valueisnotnumber);
5309 : {
5310 : // Convert the {value} to a Number first.
5311 3360 : var_value.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, value));
5312 1680 : Goto(&loop);
5313 : }
5314 : }
5315 : BIND(&done_loop);
5316 3360 : return var_result.value();
5317 : }
5318 :
5319 1400 : Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
5320 2800 : VARIABLE(var_result, MachineRepresentation::kWord32);
5321 1400 : Label done(this);
5322 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumber>(context, value,
5323 1400 : &done, &var_result);
5324 : BIND(&done);
5325 2800 : return var_result.value();
5326 : }
5327 :
5328 : // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5329 : // or find that it is a BigInt and jump to {if_bigint}.
5330 672 : void CodeStubAssembler::TaggedToWord32OrBigInt(Node* context, Node* value,
5331 : Label* if_number,
5332 : Variable* var_word32,
5333 : Label* if_bigint,
5334 : Variable* var_bigint) {
5335 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5336 672 : context, value, if_number, var_word32, if_bigint, var_bigint);
5337 672 : }
5338 :
5339 : // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5340 : // or find that it is a BigInt and jump to {if_bigint}. In either case,
5341 : // store the type feedback in {var_feedback}.
5342 3192 : void CodeStubAssembler::TaggedToWord32OrBigIntWithFeedback(
5343 : Node* context, Node* value, Label* if_number, Variable* var_word32,
5344 : Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
5345 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5346 : context, value, if_number, var_word32, if_bigint, var_bigint,
5347 3192 : var_feedback);
5348 3192 : }
5349 :
5350 : template <Object::Conversion conversion>
5351 5264 : void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
5352 : Node* context, Node* value, Label* if_number, Variable* var_word32,
5353 : Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
5354 : DCHECK(var_word32->rep() == MachineRepresentation::kWord32);
5355 : DCHECK(var_bigint == nullptr ||
5356 : var_bigint->rep() == MachineRepresentation::kTagged);
5357 : DCHECK(var_feedback == nullptr ||
5358 : var_feedback->rep() == MachineRepresentation::kTaggedSigned);
5359 :
5360 : // We might need to loop after conversion.
5361 10528 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5362 5264 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNone);
5363 5264 : Variable* loop_vars[] = {&var_value, var_feedback};
5364 : int num_vars =
5365 5264 : var_feedback != nullptr ? arraysize(loop_vars) : arraysize(loop_vars) - 1;
5366 10528 : Label loop(this, num_vars, loop_vars);
5367 5264 : Goto(&loop);
5368 : BIND(&loop);
5369 : {
5370 5264 : value = var_value.value();
5371 5264 : Label not_smi(this), is_heap_number(this), is_oddball(this),
5372 5264 : is_bigint(this);
5373 10528 : GotoIf(TaggedIsNotSmi(value), ¬_smi);
5374 :
5375 : // {value} is a Smi.
5376 10528 : var_word32->Bind(SmiToInt32(value));
5377 5264 : CombineFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
5378 5264 : Goto(if_number);
5379 :
5380 : BIND(¬_smi);
5381 : Node* map = LoadMap(value);
5382 10528 : GotoIf(IsHeapNumberMap(map), &is_heap_number);
5383 : Node* instance_type = LoadMapInstanceType(map);
5384 : if (conversion == Object::Conversion::kToNumeric) {
5385 3864 : GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
5386 : }
5387 :
5388 : // Not HeapNumber (or BigInt if conversion == kToNumeric).
5389 : {
5390 : if (var_feedback != nullptr) {
5391 : // We do not require an Or with earlier feedback here because once we
5392 : // convert the value to a Numeric, we cannot reach this path. We can
5393 : // only reach this path on the first pass when the feedback is kNone.
5394 : CSA_ASSERT(this, SmiEqual(CAST(var_feedback->value()),
5395 : SmiConstant(BinaryOperationFeedback::kNone)));
5396 : }
5397 10528 : GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
5398 : // Not an oddball either -> convert.
5399 : auto builtin = conversion == Object::Conversion::kToNumeric
5400 : ? Builtins::kNonNumberToNumeric
5401 : : Builtins::kNonNumberToNumber;
5402 10528 : var_value.Bind(CallBuiltin(builtin, context, value));
5403 5264 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
5404 5264 : Goto(&loop);
5405 :
5406 : BIND(&is_oddball);
5407 5264 : var_value.Bind(LoadObjectField(value, Oddball::kToNumberOffset));
5408 5264 : OverwriteFeedback(var_feedback,
5409 : BinaryOperationFeedback::kNumberOrOddball);
5410 5264 : Goto(&loop);
5411 : }
5412 :
5413 : BIND(&is_heap_number);
5414 5264 : var_word32->Bind(TruncateHeapNumberValueToWord32(value));
5415 5264 : CombineFeedback(var_feedback, BinaryOperationFeedback::kNumber);
5416 5264 : Goto(if_number);
5417 :
5418 : if (conversion == Object::Conversion::kToNumeric) {
5419 : BIND(&is_bigint);
5420 3864 : var_bigint->Bind(value);
5421 3864 : CombineFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
5422 3864 : Goto(if_bigint);
5423 : }
5424 : }
5425 5264 : }
5426 :
5427 5320 : Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
5428 : Node* value = LoadHeapNumberValue(object);
5429 10640 : return TruncateFloat64ToWord32(value);
5430 : }
5431 :
5432 340 : void CodeStubAssembler::TryHeapNumberToSmi(TNode<HeapNumber> number,
5433 : TVariable<Smi>& var_result_smi,
5434 : Label* if_smi) {
5435 340 : TNode<Float64T> value = LoadHeapNumberValue(number);
5436 340 : TryFloat64ToSmi(value, var_result_smi, if_smi);
5437 340 : }
5438 :
5439 5672 : void CodeStubAssembler::TryFloat64ToSmi(TNode<Float64T> value,
5440 : TVariable<Smi>& var_result_smi,
5441 : Label* if_smi) {
5442 5672 : TNode<Int32T> value32 = RoundFloat64ToInt32(value);
5443 5672 : TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
5444 :
5445 5672 : Label if_int32(this), if_heap_number(this, Label::kDeferred);
5446 :
5447 11344 : GotoIfNot(Float64Equal(value, value64), &if_heap_number);
5448 17016 : GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32);
5449 22688 : Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
5450 17016 : Int32Constant(0)),
5451 5672 : &if_heap_number, &if_int32);
5452 :
5453 : TVARIABLE(Number, var_result);
5454 : BIND(&if_int32);
5455 : {
5456 : if (SmiValuesAre32Bits()) {
5457 17016 : var_result_smi = SmiTag(ChangeInt32ToIntPtr(value32));
5458 : } else {
5459 : DCHECK(SmiValuesAre31Bits());
5460 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
5461 : TNode<BoolT> overflow = Projection<1>(pair);
5462 : GotoIf(overflow, &if_heap_number);
5463 : var_result_smi =
5464 : BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
5465 : }
5466 5672 : Goto(if_smi);
5467 : }
5468 : BIND(&if_heap_number);
5469 5672 : }
5470 :
5471 5332 : TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(
5472 : SloppyTNode<Float64T> value) {
5473 10664 : Label if_smi(this), done(this);
5474 : TVARIABLE(Smi, var_smi_result);
5475 : TVARIABLE(Number, var_result);
5476 5332 : TryFloat64ToSmi(value, var_smi_result, &if_smi);
5477 :
5478 10664 : var_result = AllocateHeapNumberWithValue(value);
5479 5332 : Goto(&done);
5480 :
5481 : BIND(&if_smi);
5482 : {
5483 : var_result = var_smi_result.value();
5484 5332 : Goto(&done);
5485 : }
5486 : BIND(&done);
5487 5332 : return var_result.value();
5488 : }
5489 :
5490 4984 : TNode<Number> CodeStubAssembler::ChangeInt32ToTagged(
5491 : SloppyTNode<Int32T> value) {
5492 : if (SmiValuesAre32Bits()) {
5493 14952 : return SmiTag(ChangeInt32ToIntPtr(value));
5494 : }
5495 : DCHECK(SmiValuesAre31Bits());
5496 : TVARIABLE(Number, var_result);
5497 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
5498 : TNode<BoolT> overflow = Projection<1>(pair);
5499 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
5500 : if_join(this);
5501 : Branch(overflow, &if_overflow, &if_notoverflow);
5502 : BIND(&if_overflow);
5503 : {
5504 : TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
5505 : TNode<HeapNumber> result = AllocateHeapNumberWithValue(value64);
5506 : var_result = result;
5507 : Goto(&if_join);
5508 : }
5509 : BIND(&if_notoverflow);
5510 : {
5511 : TNode<IntPtrT> almost_tagged_value =
5512 : ChangeInt32ToIntPtr(Projection<0>(pair));
5513 : TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
5514 : var_result = result;
5515 : Goto(&if_join);
5516 : }
5517 : BIND(&if_join);
5518 : return var_result.value();
5519 : }
5520 :
5521 4144 : TNode<Number> CodeStubAssembler::ChangeUint32ToTagged(
5522 : SloppyTNode<Uint32T> value) {
5523 8288 : Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5524 4144 : if_join(this);
5525 : TVARIABLE(Number, var_result);
5526 : // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5527 8288 : Branch(Uint32LessThan(Uint32Constant(Smi::kMaxValue), value), &if_overflow,
5528 4144 : &if_not_overflow);
5529 :
5530 : BIND(&if_not_overflow);
5531 : {
5532 : // The {value} is definitely in valid Smi range.
5533 12432 : var_result = SmiTag(Signed(ChangeUint32ToWord(value)));
5534 : }
5535 4144 : Goto(&if_join);
5536 :
5537 : BIND(&if_overflow);
5538 : {
5539 4144 : TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
5540 8288 : var_result = AllocateHeapNumberWithValue(float64_value);
5541 : }
5542 4144 : Goto(&if_join);
5543 :
5544 : BIND(&if_join);
5545 4144 : return var_result.value();
5546 : }
5547 :
5548 616 : TNode<Number> CodeStubAssembler::ChangeUintPtrToTagged(TNode<UintPtrT> value) {
5549 1232 : Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5550 616 : if_join(this);
5551 : TVARIABLE(Number, var_result);
5552 : // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5553 1232 : Branch(UintPtrLessThan(UintPtrConstant(Smi::kMaxValue), value), &if_overflow,
5554 616 : &if_not_overflow);
5555 :
5556 : BIND(&if_not_overflow);
5557 : {
5558 : // The {value} is definitely in valid Smi range.
5559 1232 : var_result = SmiTag(Signed(value));
5560 : }
5561 616 : Goto(&if_join);
5562 :
5563 : BIND(&if_overflow);
5564 : {
5565 616 : TNode<Float64T> float64_value = ChangeUintPtrToFloat64(value);
5566 1232 : var_result = AllocateHeapNumberWithValue(float64_value);
5567 : }
5568 616 : Goto(&if_join);
5569 :
5570 : BIND(&if_join);
5571 616 : return var_result.value();
5572 : }
5573 :
5574 840 : TNode<String> CodeStubAssembler::ToThisString(TNode<Context> context,
5575 : TNode<Object> value,
5576 : TNode<String> method_name) {
5577 1680 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5578 :
5579 : // Check if the {value} is a Smi or a HeapObject.
5580 840 : Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
5581 840 : if_valueisstring(this);
5582 1680 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5583 : BIND(&if_valueisnotsmi);
5584 : {
5585 : // Load the instance type of the {value}.
5586 1680 : Node* value_instance_type = LoadInstanceType(CAST(value));
5587 :
5588 : // Check if the {value} is already String.
5589 840 : Label if_valueisnotstring(this, Label::kDeferred);
5590 1680 : Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
5591 840 : &if_valueisnotstring);
5592 : BIND(&if_valueisnotstring);
5593 : {
5594 : // Check if the {value} is null.
5595 840 : Label if_valueisnullorundefined(this, Label::kDeferred);
5596 1680 : GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
5597 : // Convert the {value} to a String.
5598 1680 : var_value.Bind(CallBuiltin(Builtins::kToString, context, value));
5599 840 : Goto(&if_valueisstring);
5600 :
5601 : BIND(&if_valueisnullorundefined);
5602 : {
5603 : // The {value} is either null or undefined.
5604 : ThrowTypeError(context, MessageTemplate::kCalledOnNullOrUndefined,
5605 840 : method_name);
5606 : }
5607 : }
5608 : }
5609 : BIND(&if_valueissmi);
5610 : {
5611 : // The {value} is a Smi, convert it to a String.
5612 1680 : var_value.Bind(CallBuiltin(Builtins::kNumberToString, context, value));
5613 840 : Goto(&if_valueisstring);
5614 : }
5615 : BIND(&if_valueisstring);
5616 1680 : return CAST(var_value.value());
5617 : }
5618 :
5619 112 : TNode<Uint32T> CodeStubAssembler::ChangeNumberToUint32(TNode<Number> value) {
5620 112 : TVARIABLE(Uint32T, var_result);
5621 112 : Label if_smi(this), if_heapnumber(this, Label::kDeferred), done(this);
5622 224 : Branch(TaggedIsSmi(value), &if_smi, &if_heapnumber);
5623 : BIND(&if_smi);
5624 : {
5625 224 : var_result = Unsigned(SmiToInt32(CAST(value)));
5626 112 : Goto(&done);
5627 : }
5628 : BIND(&if_heapnumber);
5629 : {
5630 224 : var_result = ChangeFloat64ToUint32(LoadHeapNumberValue(CAST(value)));
5631 112 : Goto(&done);
5632 : }
5633 : BIND(&done);
5634 112 : return var_result.value();
5635 : }
5636 :
5637 12628 : TNode<Float64T> CodeStubAssembler::ChangeNumberToFloat64(
5638 : SloppyTNode<Number> value) {
5639 : // TODO(tebbi): Remove assert once argument is TNode instead of SloppyTNode.
5640 : CSA_SLOW_ASSERT(this, IsNumber(value));
5641 12628 : TVARIABLE(Float64T, result);
5642 12628 : Label smi(this);
5643 12628 : Label done(this, &result);
5644 25256 : GotoIf(TaggedIsSmi(value), &smi);
5645 : result = LoadHeapNumberValue(CAST(value));
5646 12628 : Goto(&done);
5647 :
5648 : BIND(&smi);
5649 : {
5650 12628 : result = SmiToFloat64(CAST(value));
5651 12628 : Goto(&done);
5652 : }
5653 :
5654 : BIND(&done);
5655 12628 : return result.value();
5656 : }
5657 :
5658 336 : TNode<UintPtrT> CodeStubAssembler::TryNumberToUintPtr(TNode<Number> value,
5659 : Label* if_negative) {
5660 336 : TVARIABLE(UintPtrT, result);
5661 336 : Label done(this, &result);
5662 1344 : Branch(TaggedIsSmi(value),
5663 336 : [&] {
5664 1064 : TNode<Smi> value_smi = CAST(value);
5665 336 : if (if_negative == nullptr) {
5666 : CSA_SLOW_ASSERT(this, SmiLessThan(SmiConstant(-1), value_smi));
5667 : } else {
5668 112 : GotoIfNot(TaggedIsPositiveSmi(value), if_negative);
5669 : }
5670 336 : result = UncheckedCast<UintPtrT>(SmiToIntPtr(value_smi));
5671 672 : Goto(&done);
5672 336 : },
5673 336 : [&] {
5674 1176 : TNode<HeapNumber> value_hn = CAST(value);
5675 : TNode<Float64T> value = LoadHeapNumberValue(value_hn);
5676 336 : if (if_negative != nullptr) {
5677 168 : GotoIf(Float64LessThan(value, Float64Constant(0.0)), if_negative);
5678 : }
5679 672 : result = ChangeFloat64ToUintPtr(value);
5680 672 : Goto(&done);
5681 672 : });
5682 :
5683 : BIND(&done);
5684 336 : return result.value();
5685 : }
5686 :
5687 71736 : TNode<WordT> CodeStubAssembler::TimesSystemPointerSize(
5688 : SloppyTNode<WordT> value) {
5689 71736 : return WordShl(value, kSystemPointerSizeLog2);
5690 : }
5691 :
5692 2576 : TNode<WordT> CodeStubAssembler::TimesTaggedSize(SloppyTNode<WordT> value) {
5693 9896 : return WordShl(value, kTaggedSizeLog2);
5694 : }
5695 :
5696 0 : TNode<WordT> CodeStubAssembler::TimesDoubleSize(SloppyTNode<WordT> value) {
5697 56 : return WordShl(value, kDoubleSizeLog2);
5698 : }
5699 :
5700 504 : Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
5701 : PrimitiveType primitive_type,
5702 : char const* method_name) {
5703 : // We might need to loop once due to JSValue unboxing.
5704 1008 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5705 504 : Label loop(this, &var_value), done_loop(this),
5706 504 : done_throw(this, Label::kDeferred);
5707 504 : Goto(&loop);
5708 : BIND(&loop);
5709 : {
5710 : // Load the current {value}.
5711 504 : value = var_value.value();
5712 :
5713 : // Check if the {value} is a Smi or a HeapObject.
5714 1512 : GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
5715 : ? &done_loop
5716 504 : : &done_throw);
5717 :
5718 : // Load the map of the {value}.
5719 : Node* value_map = LoadMap(value);
5720 :
5721 : // Load the instance type of the {value}.
5722 : Node* value_instance_type = LoadMapInstanceType(value_map);
5723 :
5724 : // Check if {value} is a JSValue.
5725 504 : Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
5726 1008 : Branch(InstanceTypeEqual(value_instance_type, JS_VALUE_TYPE),
5727 504 : &if_valueisvalue, &if_valueisnotvalue);
5728 :
5729 : BIND(&if_valueisvalue);
5730 : {
5731 : // Load the actual value from the {value}.
5732 504 : var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
5733 504 : Goto(&loop);
5734 : }
5735 :
5736 : BIND(&if_valueisnotvalue);
5737 : {
5738 504 : switch (primitive_type) {
5739 : case PrimitiveType::kBoolean:
5740 112 : GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
5741 112 : break;
5742 : case PrimitiveType::kNumber:
5743 56 : GotoIf(WordEqual(value_map, HeapNumberMapConstant()), &done_loop);
5744 56 : break;
5745 : case PrimitiveType::kString:
5746 224 : GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
5747 112 : break;
5748 : case PrimitiveType::kSymbol:
5749 224 : GotoIf(WordEqual(value_map, SymbolMapConstant()), &done_loop);
5750 224 : break;
5751 : }
5752 504 : Goto(&done_throw);
5753 : }
5754 : }
5755 :
5756 : BIND(&done_throw);
5757 : {
5758 : const char* primitive_name = nullptr;
5759 504 : switch (primitive_type) {
5760 : case PrimitiveType::kBoolean:
5761 : primitive_name = "Boolean";
5762 112 : break;
5763 : case PrimitiveType::kNumber:
5764 : primitive_name = "Number";
5765 56 : break;
5766 : case PrimitiveType::kString:
5767 : primitive_name = "String";
5768 112 : break;
5769 : case PrimitiveType::kSymbol:
5770 : primitive_name = "Symbol";
5771 224 : break;
5772 : }
5773 504 : CHECK_NOT_NULL(primitive_name);
5774 :
5775 : // The {value} is not a compatible receiver for this method.
5776 : ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
5777 504 : primitive_name);
5778 : }
5779 :
5780 : BIND(&done_loop);
5781 1008 : return var_value.value();
5782 : }
5783 :
5784 2912 : Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
5785 : InstanceType instance_type,
5786 : char const* method_name) {
5787 5824 : Label out(this), throw_exception(this, Label::kDeferred);
5788 5824 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
5789 :
5790 5824 : GotoIf(TaggedIsSmi(value), &throw_exception);
5791 :
5792 : // Load the instance type of the {value}.
5793 2912 : var_value_map.Bind(LoadMap(value));
5794 2912 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5795 :
5796 8736 : Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
5797 2912 : &throw_exception);
5798 :
5799 : // The {value} is not a compatible receiver for this method.
5800 : BIND(&throw_exception);
5801 : ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver,
5802 5824 : StringConstant(method_name), value);
5803 :
5804 : BIND(&out);
5805 5824 : return var_value_map.value();
5806 : }
5807 :
5808 896 : Node* CodeStubAssembler::ThrowIfNotJSReceiver(Node* context, Node* value,
5809 : MessageTemplate msg_template,
5810 : const char* method_name) {
5811 1792 : Label out(this), throw_exception(this, Label::kDeferred);
5812 1792 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
5813 :
5814 1792 : GotoIf(TaggedIsSmi(value), &throw_exception);
5815 :
5816 : // Load the instance type of the {value}.
5817 896 : var_value_map.Bind(LoadMap(value));
5818 896 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5819 :
5820 1792 : Branch(IsJSReceiverInstanceType(value_instance_type), &out, &throw_exception);
5821 :
5822 : // The {value} is not a compatible receiver for this method.
5823 : BIND(&throw_exception);
5824 896 : ThrowTypeError(context, msg_template, method_name);
5825 :
5826 : BIND(&out);
5827 1792 : return var_value_map.value();
5828 : }
5829 :
5830 4032 : void CodeStubAssembler::ThrowRangeError(Node* context, MessageTemplate message,
5831 : Node* arg0, Node* arg1, Node* arg2) {
5832 8064 : Node* template_index = SmiConstant(static_cast<int>(message));
5833 4032 : if (arg0 == nullptr) {
5834 : CallRuntime(Runtime::kThrowRangeError, context, template_index);
5835 448 : } else if (arg1 == nullptr) {
5836 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0);
5837 0 : } else if (arg2 == nullptr) {
5838 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1);
5839 : } else {
5840 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1,
5841 : arg2);
5842 : }
5843 4032 : Unreachable();
5844 4032 : }
5845 :
5846 10820 : void CodeStubAssembler::ThrowTypeError(Node* context, MessageTemplate message,
5847 : char const* arg0, char const* arg1) {
5848 : Node* arg0_node = nullptr;
5849 17204 : if (arg0) arg0_node = StringConstant(arg0);
5850 : Node* arg1_node = nullptr;
5851 11324 : if (arg1) arg1_node = StringConstant(arg1);
5852 11944 : ThrowTypeError(context, message, arg0_node, arg1_node);
5853 10820 : }
5854 :
5855 24600 : void CodeStubAssembler::ThrowTypeError(Node* context, MessageTemplate message,
5856 : Node* arg0, Node* arg1, Node* arg2) {
5857 49200 : Node* template_index = SmiConstant(static_cast<int>(message));
5858 24600 : if (arg0 == nullptr) {
5859 : CallRuntime(Runtime::kThrowTypeError, context, template_index);
5860 19040 : } else if (arg1 == nullptr) {
5861 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0);
5862 4984 : } else if (arg2 == nullptr) {
5863 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1);
5864 : } else {
5865 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1,
5866 : arg2);
5867 : }
5868 24600 : Unreachable();
5869 24600 : }
5870 :
5871 99852 : TNode<BoolT> CodeStubAssembler::InstanceTypeEqual(
5872 : SloppyTNode<Int32T> instance_type, int type) {
5873 199704 : return Word32Equal(instance_type, Int32Constant(type));
5874 : }
5875 :
5876 1568 : TNode<BoolT> CodeStubAssembler::IsDictionaryMap(SloppyTNode<Map> map) {
5877 : CSA_SLOW_ASSERT(this, IsMap(map));
5878 : Node* bit_field3 = LoadMapBitField3(map);
5879 1568 : return IsSetWord32<Map::IsDictionaryMapBit>(bit_field3);
5880 : }
5881 :
5882 168 : TNode<BoolT> CodeStubAssembler::IsExtensibleMap(SloppyTNode<Map> map) {
5883 : CSA_ASSERT(this, IsMap(map));
5884 168 : return IsSetWord32<Map::IsExtensibleBit>(LoadMapBitField2(map));
5885 : }
5886 :
5887 0 : TNode<BoolT> CodeStubAssembler::IsPackedFrozenOrSealedElementsKindMap(
5888 : SloppyTNode<Map> map) {
5889 : CSA_ASSERT(this, IsMap(map));
5890 : return IsElementsKindInRange(LoadMapElementsKind(map), PACKED_SEALED_ELEMENTS,
5891 0 : PACKED_FROZEN_ELEMENTS);
5892 : }
5893 :
5894 0 : TNode<BoolT> CodeStubAssembler::IsExtensibleNonPrototypeMap(TNode<Map> map) {
5895 : int kMask = Map::IsExtensibleBit::kMask | Map::IsPrototypeMapBit::kMask;
5896 : int kExpected = Map::IsExtensibleBit::kMask;
5897 0 : return Word32Equal(Word32And(LoadMapBitField2(map), Int32Constant(kMask)),
5898 0 : Int32Constant(kExpected));
5899 : }
5900 :
5901 9132 : TNode<BoolT> CodeStubAssembler::IsCallableMap(SloppyTNode<Map> map) {
5902 : CSA_ASSERT(this, IsMap(map));
5903 9132 : return IsSetWord32<Map::IsCallableBit>(LoadMapBitField(map));
5904 : }
5905 :
5906 728 : TNode<BoolT> CodeStubAssembler::IsDeprecatedMap(SloppyTNode<Map> map) {
5907 : CSA_ASSERT(this, IsMap(map));
5908 728 : return IsSetWord32<Map::IsDeprecatedBit>(LoadMapBitField3(map));
5909 : }
5910 :
5911 5936 : TNode<BoolT> CodeStubAssembler::IsUndetectableMap(SloppyTNode<Map> map) {
5912 : CSA_ASSERT(this, IsMap(map));
5913 5936 : return IsSetWord32<Map::IsUndetectableBit>(LoadMapBitField(map));
5914 : }
5915 :
5916 3528 : TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
5917 7056 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5918 7056 : Node* cell = LoadRoot(RootIndex::kNoElementsProtector);
5919 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5920 3528 : return WordEqual(cell_value, invalid);
5921 : }
5922 :
5923 448 : TNode<BoolT> CodeStubAssembler::IsArrayIteratorProtectorCellInvalid() {
5924 896 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5925 896 : Node* cell = LoadRoot(RootIndex::kArrayIteratorProtector);
5926 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5927 448 : return WordEqual(cell_value, invalid);
5928 : }
5929 :
5930 280 : TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
5931 560 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5932 560 : Node* cell = LoadRoot(RootIndex::kPromiseResolveProtector);
5933 : Node* cell_value = LoadObjectField(cell, Cell::kValueOffset);
5934 280 : return WordEqual(cell_value, invalid);
5935 : }
5936 :
5937 448 : TNode<BoolT> CodeStubAssembler::IsPromiseThenProtectorCellInvalid() {
5938 896 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5939 896 : Node* cell = LoadRoot(RootIndex::kPromiseThenProtector);
5940 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5941 448 : return WordEqual(cell_value, invalid);
5942 : }
5943 :
5944 280 : TNode<BoolT> CodeStubAssembler::IsArraySpeciesProtectorCellInvalid() {
5945 560 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5946 560 : Node* cell = LoadRoot(RootIndex::kArraySpeciesProtector);
5947 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5948 280 : return WordEqual(cell_value, invalid);
5949 : }
5950 :
5951 224 : TNode<BoolT> CodeStubAssembler::IsTypedArraySpeciesProtectorCellInvalid() {
5952 448 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5953 448 : Node* cell = LoadRoot(RootIndex::kTypedArraySpeciesProtector);
5954 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5955 224 : return WordEqual(cell_value, invalid);
5956 : }
5957 :
5958 952 : TNode<BoolT> CodeStubAssembler::IsRegExpSpeciesProtectorCellInvalid() {
5959 1904 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5960 1904 : Node* cell = LoadRoot(RootIndex::kRegExpSpeciesProtector);
5961 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5962 952 : return WordEqual(cell_value, invalid);
5963 : }
5964 :
5965 672 : TNode<BoolT> CodeStubAssembler::IsPromiseSpeciesProtectorCellInvalid() {
5966 1344 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5967 1344 : Node* cell = LoadRoot(RootIndex::kPromiseSpeciesProtector);
5968 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5969 672 : return WordEqual(cell_value, invalid);
5970 : }
5971 :
5972 2688 : TNode<BoolT> CodeStubAssembler::IsPrototypeInitialArrayPrototype(
5973 : SloppyTNode<Context> context, SloppyTNode<Map> map) {
5974 : Node* const native_context = LoadNativeContext(context);
5975 5376 : Node* const initial_array_prototype = LoadContextElement(
5976 2688 : native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
5977 : Node* proto = LoadMapPrototype(map);
5978 2688 : return WordEqual(proto, initial_array_prototype);
5979 : }
5980 :
5981 224 : TNode<BoolT> CodeStubAssembler::IsPrototypeTypedArrayPrototype(
5982 : SloppyTNode<Context> context, SloppyTNode<Map> map) {
5983 : TNode<Context> const native_context = LoadNativeContext(context);
5984 : TNode<Object> const typed_array_prototype =
5985 224 : LoadContextElement(native_context, Context::TYPED_ARRAY_PROTOTYPE_INDEX);
5986 : TNode<HeapObject> proto = LoadMapPrototype(map);
5987 : TNode<HeapObject> proto_of_proto = Select<HeapObject>(
5988 1344 : IsJSObject(proto), [=] { return LoadMapPrototype(LoadMap(proto)); },
5989 896 : [=] { return NullConstant(); });
5990 448 : return WordEqual(proto_of_proto, typed_array_prototype);
5991 : }
5992 :
5993 1176 : TNode<BoolT> CodeStubAssembler::IsFastAliasedArgumentsMap(
5994 : TNode<Context> context, TNode<Map> map) {
5995 : TNode<Context> const native_context = LoadNativeContext(context);
5996 : TNode<Object> const arguments_map = LoadContextElement(
5997 1176 : native_context, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
5998 2352 : return WordEqual(arguments_map, map);
5999 : }
6000 :
6001 1120 : TNode<BoolT> CodeStubAssembler::IsSlowAliasedArgumentsMap(
6002 : TNode<Context> context, TNode<Map> map) {
6003 : TNode<Context> const native_context = LoadNativeContext(context);
6004 : TNode<Object> const arguments_map = LoadContextElement(
6005 1120 : native_context, Context::SLOW_ALIASED_ARGUMENTS_MAP_INDEX);
6006 2240 : return WordEqual(arguments_map, map);
6007 : }
6008 :
6009 1176 : TNode<BoolT> CodeStubAssembler::IsSloppyArgumentsMap(TNode<Context> context,
6010 : TNode<Map> map) {
6011 : TNode<Context> const native_context = LoadNativeContext(context);
6012 : TNode<Object> const arguments_map =
6013 1176 : LoadContextElement(native_context, Context::SLOPPY_ARGUMENTS_MAP_INDEX);
6014 2352 : return WordEqual(arguments_map, map);
6015 : }
6016 :
6017 1176 : TNode<BoolT> CodeStubAssembler::IsStrictArgumentsMap(TNode<Context> context,
6018 : TNode<Map> map) {
6019 : TNode<Context> const native_context = LoadNativeContext(context);
6020 : TNode<Object> const arguments_map =
6021 1176 : LoadContextElement(native_context, Context::STRICT_ARGUMENTS_MAP_INDEX);
6022 2352 : return WordEqual(arguments_map, map);
6023 : }
6024 :
6025 168 : TNode<BoolT> CodeStubAssembler::TaggedIsCallable(TNode<Object> object) {
6026 : return Select<BoolT>(
6027 504 : TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
6028 168 : [=] {
6029 504 : return IsCallableMap(LoadMap(UncheckedCast<HeapObject>(object)));
6030 840 : });
6031 : }
6032 :
6033 4312 : TNode<BoolT> CodeStubAssembler::IsCallable(SloppyTNode<HeapObject> object) {
6034 4312 : return IsCallableMap(LoadMap(object));
6035 : }
6036 :
6037 0 : TNode<BoolT> CodeStubAssembler::IsCell(SloppyTNode<HeapObject> object) {
6038 0 : return WordEqual(LoadMap(object), LoadRoot(RootIndex::kCellMap));
6039 : }
6040 :
6041 616 : TNode<BoolT> CodeStubAssembler::IsCode(SloppyTNode<HeapObject> object) {
6042 616 : return HasInstanceType(object, CODE_TYPE);
6043 : }
6044 :
6045 1568 : TNode<BoolT> CodeStubAssembler::IsConstructorMap(SloppyTNode<Map> map) {
6046 : CSA_ASSERT(this, IsMap(map));
6047 1568 : return IsSetWord32<Map::IsConstructorBit>(LoadMapBitField(map));
6048 : }
6049 :
6050 728 : TNode<BoolT> CodeStubAssembler::IsConstructor(SloppyTNode<HeapObject> object) {
6051 728 : return IsConstructorMap(LoadMap(object));
6052 : }
6053 :
6054 112 : TNode<BoolT> CodeStubAssembler::IsFunctionWithPrototypeSlotMap(
6055 : SloppyTNode<Map> map) {
6056 : CSA_ASSERT(this, IsMap(map));
6057 112 : return IsSetWord32<Map::HasPrototypeSlotBit>(LoadMapBitField(map));
6058 : }
6059 :
6060 2868 : TNode<BoolT> CodeStubAssembler::IsSpecialReceiverInstanceType(
6061 : TNode<Int32T> instance_type) {
6062 : STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
6063 : return Int32LessThanOrEqual(instance_type,
6064 5736 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
6065 : }
6066 :
6067 1624 : TNode<BoolT> CodeStubAssembler::IsCustomElementsReceiverInstanceType(
6068 : TNode<Int32T> instance_type) {
6069 : return Int32LessThanOrEqual(instance_type,
6070 3248 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER));
6071 : }
6072 :
6073 20232 : TNode<BoolT> CodeStubAssembler::IsStringInstanceType(
6074 : SloppyTNode<Int32T> instance_type) {
6075 : STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
6076 40464 : return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
6077 : }
6078 :
6079 5488 : TNode<BoolT> CodeStubAssembler::IsOneByteStringInstanceType(
6080 : SloppyTNode<Int32T> instance_type) {
6081 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6082 : return Word32Equal(
6083 16464 : Word32And(instance_type, Int32Constant(kStringEncodingMask)),
6084 16464 : Int32Constant(kOneByteStringTag));
6085 : }
6086 :
6087 4704 : TNode<BoolT> CodeStubAssembler::IsSequentialStringInstanceType(
6088 : SloppyTNode<Int32T> instance_type) {
6089 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6090 : return Word32Equal(
6091 14112 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6092 14112 : Int32Constant(kSeqStringTag));
6093 : }
6094 :
6095 56 : TNode<BoolT> CodeStubAssembler::IsConsStringInstanceType(
6096 : SloppyTNode<Int32T> instance_type) {
6097 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6098 : return Word32Equal(
6099 168 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6100 168 : Int32Constant(kConsStringTag));
6101 : }
6102 :
6103 0 : TNode<BoolT> CodeStubAssembler::IsIndirectStringInstanceType(
6104 : SloppyTNode<Int32T> instance_type) {
6105 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6106 : STATIC_ASSERT(kIsIndirectStringMask == 0x1);
6107 : STATIC_ASSERT(kIsIndirectStringTag == 0x1);
6108 : return UncheckedCast<BoolT>(
6109 0 : Word32And(instance_type, Int32Constant(kIsIndirectStringMask)));
6110 : }
6111 :
6112 0 : TNode<BoolT> CodeStubAssembler::IsExternalStringInstanceType(
6113 : SloppyTNode<Int32T> instance_type) {
6114 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6115 : return Word32Equal(
6116 0 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6117 0 : Int32Constant(kExternalStringTag));
6118 : }
6119 :
6120 0 : TNode<BoolT> CodeStubAssembler::IsUncachedExternalStringInstanceType(
6121 : SloppyTNode<Int32T> instance_type) {
6122 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6123 : STATIC_ASSERT(kUncachedExternalStringTag != 0);
6124 4704 : return IsSetWord32(instance_type, kUncachedExternalStringMask);
6125 : }
6126 :
6127 15744 : TNode<BoolT> CodeStubAssembler::IsJSReceiverInstanceType(
6128 : SloppyTNode<Int32T> instance_type) {
6129 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6130 : return Int32GreaterThanOrEqual(instance_type,
6131 31488 : Int32Constant(FIRST_JS_RECEIVER_TYPE));
6132 : }
6133 :
6134 7784 : TNode<BoolT> CodeStubAssembler::IsJSReceiverMap(SloppyTNode<Map> map) {
6135 7784 : return IsJSReceiverInstanceType(LoadMapInstanceType(map));
6136 : }
6137 :
6138 6832 : TNode<BoolT> CodeStubAssembler::IsJSReceiver(SloppyTNode<HeapObject> object) {
6139 6832 : return IsJSReceiverMap(LoadMap(object));
6140 : }
6141 :
6142 0 : TNode<BoolT> CodeStubAssembler::IsNullOrJSReceiver(
6143 : SloppyTNode<HeapObject> object) {
6144 0 : return UncheckedCast<BoolT>(Word32Or(IsJSReceiver(object), IsNull(object)));
6145 : }
6146 :
6147 3584 : TNode<BoolT> CodeStubAssembler::IsNullOrUndefined(SloppyTNode<Object> value) {
6148 10752 : return UncheckedCast<BoolT>(Word32Or(IsUndefined(value), IsNull(value)));
6149 : }
6150 :
6151 0 : TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyInstanceType(
6152 : SloppyTNode<Int32T> instance_type) {
6153 0 : return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
6154 : }
6155 :
6156 448 : TNode<BoolT> CodeStubAssembler::IsJSObjectInstanceType(
6157 : SloppyTNode<Int32T> instance_type) {
6158 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
6159 : return Int32GreaterThanOrEqual(instance_type,
6160 896 : Int32Constant(FIRST_JS_OBJECT_TYPE));
6161 : }
6162 :
6163 336 : TNode<BoolT> CodeStubAssembler::IsJSObjectMap(SloppyTNode<Map> map) {
6164 : CSA_ASSERT(this, IsMap(map));
6165 336 : return IsJSObjectInstanceType(LoadMapInstanceType(map));
6166 : }
6167 :
6168 224 : TNode<BoolT> CodeStubAssembler::IsJSObject(SloppyTNode<HeapObject> object) {
6169 224 : return IsJSObjectMap(LoadMap(object));
6170 : }
6171 :
6172 896 : TNode<BoolT> CodeStubAssembler::IsJSPromiseMap(SloppyTNode<Map> map) {
6173 : CSA_ASSERT(this, IsMap(map));
6174 896 : return InstanceTypeEqual(LoadMapInstanceType(map), JS_PROMISE_TYPE);
6175 : }
6176 :
6177 0 : TNode<BoolT> CodeStubAssembler::IsJSPromise(SloppyTNode<HeapObject> object) {
6178 0 : return IsJSPromiseMap(LoadMap(object));
6179 : }
6180 :
6181 336 : TNode<BoolT> CodeStubAssembler::IsJSProxy(SloppyTNode<HeapObject> object) {
6182 336 : return HasInstanceType(object, JS_PROXY_TYPE);
6183 : }
6184 :
6185 728 : TNode<BoolT> CodeStubAssembler::IsJSGlobalProxy(
6186 : SloppyTNode<HeapObject> object) {
6187 728 : return HasInstanceType(object, JS_GLOBAL_PROXY_TYPE);
6188 : }
6189 :
6190 4596 : TNode<BoolT> CodeStubAssembler::IsMap(SloppyTNode<HeapObject> map) {
6191 4596 : return IsMetaMap(LoadMap(map));
6192 : }
6193 :
6194 0 : TNode<BoolT> CodeStubAssembler::IsJSValueInstanceType(
6195 : SloppyTNode<Int32T> instance_type) {
6196 3812 : return InstanceTypeEqual(instance_type, JS_VALUE_TYPE);
6197 : }
6198 :
6199 0 : TNode<BoolT> CodeStubAssembler::IsJSValue(SloppyTNode<HeapObject> object) {
6200 0 : return IsJSValueMap(LoadMap(object));
6201 : }
6202 :
6203 0 : TNode<BoolT> CodeStubAssembler::IsJSValueMap(SloppyTNode<Map> map) {
6204 0 : return IsJSValueInstanceType(LoadMapInstanceType(map));
6205 : }
6206 :
6207 0 : TNode<BoolT> CodeStubAssembler::IsJSArrayInstanceType(
6208 : SloppyTNode<Int32T> instance_type) {
6209 14736 : return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
6210 : }
6211 :
6212 8012 : TNode<BoolT> CodeStubAssembler::IsJSArray(SloppyTNode<HeapObject> object) {
6213 8012 : return IsJSArrayMap(LoadMap(object));
6214 : }
6215 :
6216 10924 : TNode<BoolT> CodeStubAssembler::IsJSArrayMap(SloppyTNode<Map> map) {
6217 10924 : return IsJSArrayInstanceType(LoadMapInstanceType(map));
6218 : }
6219 :
6220 0 : TNode<BoolT> CodeStubAssembler::IsJSArrayIterator(
6221 : SloppyTNode<HeapObject> object) {
6222 0 : return HasInstanceType(object, JS_ARRAY_ITERATOR_TYPE);
6223 : }
6224 :
6225 0 : TNode<BoolT> CodeStubAssembler::IsJSAsyncGeneratorObject(
6226 : SloppyTNode<HeapObject> object) {
6227 0 : return HasInstanceType(object, JS_ASYNC_GENERATOR_OBJECT_TYPE);
6228 : }
6229 :
6230 4 : TNode<BoolT> CodeStubAssembler::IsContext(SloppyTNode<HeapObject> object) {
6231 8 : Node* instance_type = LoadInstanceType(object);
6232 : return UncheckedCast<BoolT>(Word32And(
6233 12 : Int32GreaterThanOrEqual(instance_type, Int32Constant(FIRST_CONTEXT_TYPE)),
6234 16 : Int32LessThanOrEqual(instance_type, Int32Constant(LAST_CONTEXT_TYPE))));
6235 : }
6236 :
6237 0 : TNode<BoolT> CodeStubAssembler::IsFixedArray(SloppyTNode<HeapObject> object) {
6238 0 : return HasInstanceType(object, FIXED_ARRAY_TYPE);
6239 : }
6240 :
6241 0 : TNode<BoolT> CodeStubAssembler::IsFixedArraySubclass(
6242 : SloppyTNode<HeapObject> object) {
6243 0 : Node* instance_type = LoadInstanceType(object);
6244 : return UncheckedCast<BoolT>(
6245 0 : Word32And(Int32GreaterThanOrEqual(instance_type,
6246 0 : Int32Constant(FIRST_FIXED_ARRAY_TYPE)),
6247 0 : Int32LessThanOrEqual(instance_type,
6248 0 : Int32Constant(LAST_FIXED_ARRAY_TYPE))));
6249 : }
6250 :
6251 0 : TNode<BoolT> CodeStubAssembler::IsNotWeakFixedArraySubclass(
6252 : SloppyTNode<HeapObject> object) {
6253 0 : Node* instance_type = LoadInstanceType(object);
6254 : return UncheckedCast<BoolT>(Word32Or(
6255 0 : Int32LessThan(instance_type, Int32Constant(FIRST_WEAK_FIXED_ARRAY_TYPE)),
6256 0 : Int32GreaterThan(instance_type,
6257 0 : Int32Constant(LAST_WEAK_FIXED_ARRAY_TYPE))));
6258 : }
6259 :
6260 392 : TNode<BoolT> CodeStubAssembler::IsPromiseCapability(
6261 : SloppyTNode<HeapObject> object) {
6262 392 : return HasInstanceType(object, PROMISE_CAPABILITY_TYPE);
6263 : }
6264 :
6265 0 : TNode<BoolT> CodeStubAssembler::IsPropertyArray(
6266 : SloppyTNode<HeapObject> object) {
6267 0 : return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
6268 : }
6269 :
6270 : // This complicated check is due to elements oddities. If a smi array is empty
6271 : // after Array.p.shift, it is replaced by the empty array constant. If it is
6272 : // later filled with a double element, we try to grow it but pass in a double
6273 : // elements kind. Usually this would cause a size mismatch (since the source
6274 : // fixed array has HOLEY_ELEMENTS and destination has
6275 : // HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
6276 : // source array is empty.
6277 : // TODO(jgruber): It might we worth creating an empty_double_array constant to
6278 : // simplify this case.
6279 0 : TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKindOrEmpty(
6280 : SloppyTNode<HeapObject> object, ElementsKind kind) {
6281 0 : Label out(this);
6282 : TVARIABLE(BoolT, var_result, Int32TrueConstant());
6283 :
6284 0 : GotoIf(IsFixedArrayWithKind(object, kind), &out);
6285 :
6286 0 : TNode<Smi> const length = LoadFixedArrayBaseLength(CAST(object));
6287 0 : GotoIf(SmiEqual(length, SmiConstant(0)), &out);
6288 :
6289 : var_result = Int32FalseConstant();
6290 0 : Goto(&out);
6291 :
6292 : BIND(&out);
6293 0 : return var_result.value();
6294 : }
6295 :
6296 0 : TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKind(
6297 : SloppyTNode<HeapObject> object, ElementsKind kind) {
6298 0 : if (IsDoubleElementsKind(kind)) {
6299 0 : return IsFixedDoubleArray(object);
6300 : } else {
6301 : DCHECK(IsSmiOrObjectElementsKind(kind));
6302 0 : return IsFixedArraySubclass(object);
6303 : }
6304 : }
6305 :
6306 168 : TNode<BoolT> CodeStubAssembler::IsBoolean(SloppyTNode<HeapObject> object) {
6307 168 : return IsBooleanMap(LoadMap(object));
6308 : }
6309 :
6310 0 : TNode<BoolT> CodeStubAssembler::IsPropertyCell(SloppyTNode<HeapObject> object) {
6311 0 : return IsPropertyCellMap(LoadMap(object));
6312 : }
6313 :
6314 336 : TNode<BoolT> CodeStubAssembler::IsAccessorInfo(SloppyTNode<HeapObject> object) {
6315 336 : return IsAccessorInfoMap(LoadMap(object));
6316 : }
6317 :
6318 3980 : TNode<BoolT> CodeStubAssembler::IsAccessorPair(SloppyTNode<HeapObject> object) {
6319 3980 : return IsAccessorPairMap(LoadMap(object));
6320 : }
6321 :
6322 168 : TNode<BoolT> CodeStubAssembler::IsAllocationSite(
6323 : SloppyTNode<HeapObject> object) {
6324 336 : return IsAllocationSiteInstanceType(LoadInstanceType(object));
6325 : }
6326 :
6327 0 : TNode<BoolT> CodeStubAssembler::IsAnyHeapNumber(
6328 : SloppyTNode<HeapObject> object) {
6329 : return UncheckedCast<BoolT>(
6330 0 : Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object)));
6331 : }
6332 :
6333 33628 : TNode<BoolT> CodeStubAssembler::IsHeapNumber(SloppyTNode<HeapObject> object) {
6334 33628 : return IsHeapNumberMap(LoadMap(object));
6335 : }
6336 :
6337 56 : TNode<BoolT> CodeStubAssembler::IsHeapNumberInstanceType(
6338 : SloppyTNode<Int32T> instance_type) {
6339 452 : return InstanceTypeEqual(instance_type, HEAP_NUMBER_TYPE);
6340 : }
6341 :
6342 0 : TNode<BoolT> CodeStubAssembler::IsOddball(SloppyTNode<HeapObject> object) {
6343 0 : return IsOddballInstanceType(LoadInstanceType(object));
6344 : }
6345 :
6346 0 : TNode<BoolT> CodeStubAssembler::IsOddballInstanceType(
6347 : SloppyTNode<Int32T> instance_type) {
6348 1068 : return InstanceTypeEqual(instance_type, ODDBALL_TYPE);
6349 : }
6350 :
6351 56 : TNode<BoolT> CodeStubAssembler::IsMutableHeapNumber(
6352 : SloppyTNode<HeapObject> object) {
6353 56 : return IsMutableHeapNumberMap(LoadMap(object));
6354 : }
6355 :
6356 0 : TNode<BoolT> CodeStubAssembler::IsFeedbackCell(SloppyTNode<HeapObject> object) {
6357 0 : return HasInstanceType(object, FEEDBACK_CELL_TYPE);
6358 : }
6359 :
6360 12612 : TNode<BoolT> CodeStubAssembler::IsFeedbackVector(
6361 : SloppyTNode<HeapObject> object) {
6362 12612 : return IsFeedbackVectorMap(LoadMap(object));
6363 : }
6364 :
6365 56 : TNode<BoolT> CodeStubAssembler::IsName(SloppyTNode<HeapObject> object) {
6366 112 : return IsNameInstanceType(LoadInstanceType(object));
6367 : }
6368 :
6369 112 : TNode<BoolT> CodeStubAssembler::IsNameInstanceType(
6370 : SloppyTNode<Int32T> instance_type) {
6371 224 : return Int32LessThanOrEqual(instance_type, Int32Constant(LAST_NAME_TYPE));
6372 : }
6373 :
6374 9300 : TNode<BoolT> CodeStubAssembler::IsString(SloppyTNode<HeapObject> object) {
6375 18600 : return IsStringInstanceType(LoadInstanceType(object));
6376 : }
6377 :
6378 0 : TNode<BoolT> CodeStubAssembler::IsSymbolInstanceType(
6379 : SloppyTNode<Int32T> instance_type) {
6380 1068 : return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
6381 : }
6382 :
6383 2920 : TNode<BoolT> CodeStubAssembler::IsSymbol(SloppyTNode<HeapObject> object) {
6384 2920 : return IsSymbolMap(LoadMap(object));
6385 : }
6386 :
6387 168 : TNode<BoolT> CodeStubAssembler::IsInternalizedStringInstanceType(
6388 : TNode<Int32T> instance_type) {
6389 : STATIC_ASSERT(kNotInternalizedTag != 0);
6390 : return Word32Equal(
6391 336 : Word32And(instance_type,
6392 336 : Int32Constant(kIsNotStringMask | kIsNotInternalizedMask)),
6393 504 : Int32Constant(kStringTag | kInternalizedTag));
6394 : }
6395 :
6396 0 : TNode<BoolT> CodeStubAssembler::IsUniqueName(TNode<HeapObject> object) {
6397 0 : TNode<Int32T> instance_type = LoadInstanceType(object);
6398 : return Select<BoolT>(
6399 0 : IsInternalizedStringInstanceType(instance_type),
6400 0 : [=] { return Int32TrueConstant(); },
6401 0 : [=] { return IsSymbolInstanceType(instance_type); });
6402 : }
6403 :
6404 168 : TNode<BoolT> CodeStubAssembler::IsUniqueNameNoIndex(TNode<HeapObject> object) {
6405 168 : TNode<Int32T> instance_type = LoadInstanceType(object);
6406 : return Select<BoolT>(
6407 336 : IsInternalizedStringInstanceType(instance_type),
6408 168 : [=] {
6409 336 : return IsSetWord32(LoadNameHashField(CAST(object)),
6410 168 : Name::kIsNotArrayIndexMask);
6411 336 : },
6412 672 : [=] { return IsSymbolInstanceType(instance_type); });
6413 : }
6414 :
6415 4760 : TNode<BoolT> CodeStubAssembler::IsBigIntInstanceType(
6416 : SloppyTNode<Int32T> instance_type) {
6417 29516 : return InstanceTypeEqual(instance_type, BIGINT_TYPE);
6418 : }
6419 :
6420 10584 : TNode<BoolT> CodeStubAssembler::IsBigInt(SloppyTNode<HeapObject> object) {
6421 21168 : return IsBigIntInstanceType(LoadInstanceType(object));
6422 : }
6423 :
6424 448 : TNode<BoolT> CodeStubAssembler::IsPrimitiveInstanceType(
6425 : SloppyTNode<Int32T> instance_type) {
6426 : return Int32LessThanOrEqual(instance_type,
6427 896 : Int32Constant(LAST_PRIMITIVE_TYPE));
6428 : }
6429 :
6430 2636 : TNode<BoolT> CodeStubAssembler::IsPrivateSymbol(
6431 : SloppyTNode<HeapObject> object) {
6432 5272 : return Select<BoolT>(IsSymbol(object),
6433 2636 : [=] {
6434 5272 : TNode<Symbol> symbol = CAST(object);
6435 : TNode<Uint32T> flags = LoadObjectField<Uint32T>(
6436 : symbol, Symbol::kFlagsOffset);
6437 2636 : return IsSetWord32<Symbol::IsPrivateBit>(flags);
6438 : },
6439 10544 : [=] { return Int32FalseConstant(); });
6440 : }
6441 :
6442 168 : TNode<BoolT> CodeStubAssembler::IsNativeContext(
6443 : SloppyTNode<HeapObject> object) {
6444 336 : return WordEqual(LoadMap(object), LoadRoot(RootIndex::kNativeContextMap));
6445 : }
6446 :
6447 112 : TNode<BoolT> CodeStubAssembler::IsFixedDoubleArray(
6448 : SloppyTNode<HeapObject> object) {
6449 112 : return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
6450 : }
6451 :
6452 0 : TNode<BoolT> CodeStubAssembler::IsHashTable(SloppyTNode<HeapObject> object) {
6453 0 : Node* instance_type = LoadInstanceType(object);
6454 : return UncheckedCast<BoolT>(
6455 0 : Word32And(Int32GreaterThanOrEqual(instance_type,
6456 0 : Int32Constant(FIRST_HASH_TABLE_TYPE)),
6457 0 : Int32LessThanOrEqual(instance_type,
6458 0 : Int32Constant(LAST_HASH_TABLE_TYPE))));
6459 : }
6460 :
6461 0 : TNode<BoolT> CodeStubAssembler::IsEphemeronHashTable(
6462 : SloppyTNode<HeapObject> object) {
6463 0 : return HasInstanceType(object, EPHEMERON_HASH_TABLE_TYPE);
6464 : }
6465 :
6466 0 : TNode<BoolT> CodeStubAssembler::IsNameDictionary(
6467 : SloppyTNode<HeapObject> object) {
6468 0 : return HasInstanceType(object, NAME_DICTIONARY_TYPE);
6469 : }
6470 :
6471 0 : TNode<BoolT> CodeStubAssembler::IsGlobalDictionary(
6472 : SloppyTNode<HeapObject> object) {
6473 0 : return HasInstanceType(object, GLOBAL_DICTIONARY_TYPE);
6474 : }
6475 :
6476 0 : TNode<BoolT> CodeStubAssembler::IsNumberDictionary(
6477 : SloppyTNode<HeapObject> object) {
6478 0 : return HasInstanceType(object, NUMBER_DICTIONARY_TYPE);
6479 : }
6480 :
6481 0 : TNode<BoolT> CodeStubAssembler::IsJSGeneratorObject(
6482 : SloppyTNode<HeapObject> object) {
6483 0 : return HasInstanceType(object, JS_GENERATOR_OBJECT_TYPE);
6484 : }
6485 :
6486 0 : TNode<BoolT> CodeStubAssembler::IsJSFunctionInstanceType(
6487 : SloppyTNode<Int32T> instance_type) {
6488 3868 : return InstanceTypeEqual(instance_type, JS_FUNCTION_TYPE);
6489 : }
6490 :
6491 0 : TNode<BoolT> CodeStubAssembler::IsAllocationSiteInstanceType(
6492 : SloppyTNode<Int32T> instance_type) {
6493 168 : return InstanceTypeEqual(instance_type, ALLOCATION_SITE_TYPE);
6494 : }
6495 :
6496 56 : TNode<BoolT> CodeStubAssembler::IsJSFunction(SloppyTNode<HeapObject> object) {
6497 56 : return IsJSFunctionMap(LoadMap(object));
6498 : }
6499 :
6500 56 : TNode<BoolT> CodeStubAssembler::IsJSFunctionMap(SloppyTNode<Map> map) {
6501 56 : return IsJSFunctionInstanceType(LoadMapInstanceType(map));
6502 : }
6503 :
6504 1064 : TNode<BoolT> CodeStubAssembler::IsJSTypedArray(SloppyTNode<HeapObject> object) {
6505 1064 : return HasInstanceType(object, JS_TYPED_ARRAY_TYPE);
6506 : }
6507 :
6508 168 : TNode<BoolT> CodeStubAssembler::IsJSArrayBuffer(
6509 : SloppyTNode<HeapObject> object) {
6510 168 : return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
6511 : }
6512 :
6513 1288 : TNode<BoolT> CodeStubAssembler::IsJSDataView(TNode<HeapObject> object) {
6514 1288 : return HasInstanceType(object, JS_DATA_VIEW_TYPE);
6515 : }
6516 :
6517 0 : TNode<BoolT> CodeStubAssembler::IsFixedTypedArray(
6518 : SloppyTNode<HeapObject> object) {
6519 0 : TNode<Int32T> instance_type = LoadInstanceType(object);
6520 : return UncheckedCast<BoolT>(Word32And(
6521 0 : Int32GreaterThanOrEqual(instance_type,
6522 0 : Int32Constant(FIRST_FIXED_TYPED_ARRAY_TYPE)),
6523 0 : Int32LessThanOrEqual(instance_type,
6524 0 : Int32Constant(LAST_FIXED_TYPED_ARRAY_TYPE))));
6525 : }
6526 :
6527 1008 : TNode<BoolT> CodeStubAssembler::IsJSRegExp(SloppyTNode<HeapObject> object) {
6528 1008 : return HasInstanceType(object, JS_REGEXP_TYPE);
6529 : }
6530 :
6531 3644 : TNode<BoolT> CodeStubAssembler::IsNumber(SloppyTNode<Object> object) {
6532 10932 : return Select<BoolT>(TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6533 14576 : [=] { return IsHeapNumber(CAST(object)); });
6534 : }
6535 :
6536 112 : TNode<BoolT> CodeStubAssembler::IsNumeric(SloppyTNode<Object> object) {
6537 : return Select<BoolT>(
6538 336 : TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6539 112 : [=] {
6540 : return UncheckedCast<BoolT>(
6541 336 : Word32Or(IsHeapNumber(CAST(object)), IsBigInt(CAST(object))));
6542 448 : });
6543 : }
6544 :
6545 0 : TNode<BoolT> CodeStubAssembler::IsNumberNormalized(SloppyTNode<Number> number) {
6546 0 : TVARIABLE(BoolT, var_result, Int32TrueConstant());
6547 0 : Label out(this);
6548 :
6549 0 : GotoIf(TaggedIsSmi(number), &out);
6550 :
6551 : TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
6552 : TNode<Float64T> smi_min =
6553 0 : Float64Constant(static_cast<double>(Smi::kMinValue));
6554 : TNode<Float64T> smi_max =
6555 0 : Float64Constant(static_cast<double>(Smi::kMaxValue));
6556 :
6557 0 : GotoIf(Float64LessThan(value, smi_min), &out);
6558 0 : GotoIf(Float64GreaterThan(value, smi_max), &out);
6559 0 : GotoIfNot(Float64Equal(value, value), &out); // NaN.
6560 :
6561 : var_result = Int32FalseConstant();
6562 0 : Goto(&out);
6563 :
6564 : BIND(&out);
6565 0 : return var_result.value();
6566 : }
6567 :
6568 0 : TNode<BoolT> CodeStubAssembler::IsNumberPositive(SloppyTNode<Number> number) {
6569 0 : return Select<BoolT>(TaggedIsSmi(number),
6570 0 : [=] { return TaggedIsPositiveSmi(number); },
6571 0 : [=] { return IsHeapNumberPositive(CAST(number)); });
6572 : }
6573 :
6574 : // TODO(cbruni): Use TNode<HeapNumber> instead of custom name.
6575 4 : TNode<BoolT> CodeStubAssembler::IsHeapNumberPositive(TNode<HeapNumber> number) {
6576 : TNode<Float64T> value = LoadHeapNumberValue(number);
6577 4 : TNode<Float64T> float_zero = Float64Constant(0.);
6578 4 : return Float64GreaterThanOrEqual(value, float_zero);
6579 : }
6580 :
6581 0 : TNode<BoolT> CodeStubAssembler::IsNumberNonNegativeSafeInteger(
6582 : TNode<Number> number) {
6583 : return Select<BoolT>(
6584 : // TODO(cbruni): Introduce TaggedIsNonNegateSmi to avoid confusion.
6585 0 : TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
6586 0 : [=] {
6587 0 : TNode<HeapNumber> heap_number = CAST(number);
6588 0 : return Select<BoolT>(IsInteger(heap_number),
6589 0 : [=] { return IsHeapNumberPositive(heap_number); },
6590 0 : [=] { return Int32FalseConstant(); });
6591 0 : });
6592 : }
6593 :
6594 56 : TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<Object> number) {
6595 : return Select<BoolT>(
6596 168 : TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6597 56 : [=] {
6598 : return Select<BoolT>(
6599 224 : IsHeapNumber(CAST(number)),
6600 56 : [=] { return IsSafeInteger(UncheckedCast<HeapNumber>(number)); },
6601 224 : [=] { return Int32FalseConstant(); });
6602 280 : });
6603 : }
6604 :
6605 56 : TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<HeapNumber> number) {
6606 : // Load the actual value of {number}.
6607 : TNode<Float64T> number_value = LoadHeapNumberValue(number);
6608 : // Truncate the value of {number} to an integer (or an infinity).
6609 56 : TNode<Float64T> integer = Float64Trunc(number_value);
6610 :
6611 : return Select<BoolT>(
6612 : // Check if {number}s value matches the integer (ruling out the
6613 : // infinities).
6614 224 : Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0)),
6615 56 : [=] {
6616 : // Check if the {integer} value is in safe integer range.
6617 224 : return Float64LessThanOrEqual(Float64Abs(integer),
6618 224 : Float64Constant(kMaxSafeInteger));
6619 112 : },
6620 224 : [=] { return Int32FalseConstant(); });
6621 : }
6622 :
6623 56 : TNode<BoolT> CodeStubAssembler::IsInteger(TNode<Object> number) {
6624 : return Select<BoolT>(
6625 168 : TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6626 56 : [=] {
6627 : return Select<BoolT>(
6628 224 : IsHeapNumber(CAST(number)),
6629 56 : [=] { return IsInteger(UncheckedCast<HeapNumber>(number)); },
6630 224 : [=] { return Int32FalseConstant(); });
6631 280 : });
6632 : }
6633 :
6634 56 : TNode<BoolT> CodeStubAssembler::IsInteger(TNode<HeapNumber> number) {
6635 : TNode<Float64T> number_value = LoadHeapNumberValue(number);
6636 : // Truncate the value of {number} to an integer (or an infinity).
6637 56 : TNode<Float64T> integer = Float64Trunc(number_value);
6638 : // Check if {number}s value matches the integer (ruling out the infinities).
6639 168 : return Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0));
6640 : }
6641 :
6642 4 : TNode<BoolT> CodeStubAssembler::IsHeapNumberUint32(TNode<HeapNumber> number) {
6643 : // Check that the HeapNumber is a valid uint32
6644 : return Select<BoolT>(
6645 8 : IsHeapNumberPositive(number),
6646 4 : [=] {
6647 16 : TNode<Float64T> value = LoadHeapNumberValue(number);
6648 8 : TNode<Uint32T> int_value = Unsigned(TruncateFloat64ToWord32(value));
6649 8 : return Float64Equal(value, ChangeUint32ToFloat64(int_value));
6650 : },
6651 16 : [=] { return Int32FalseConstant(); });
6652 : }
6653 :
6654 4 : TNode<BoolT> CodeStubAssembler::IsNumberArrayIndex(TNode<Number> number) {
6655 8 : return Select<BoolT>(TaggedIsSmi(number),
6656 4 : [=] { return TaggedIsPositiveSmi(number); },
6657 16 : [=] { return IsHeapNumberUint32(CAST(number)); });
6658 : }
6659 :
6660 224 : Node* CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(Node* element_count,
6661 : int base_size,
6662 : ParameterMode mode) {
6663 : int max_newspace_elements =
6664 224 : (kMaxRegularHeapObjectSize - base_size) / kTaggedSize;
6665 224 : return IntPtrOrSmiGreaterThan(
6666 224 : element_count, IntPtrOrSmiConstant(max_newspace_elements, mode), mode);
6667 : }
6668 :
6669 2856 : TNode<Int32T> CodeStubAssembler::StringCharCodeAt(SloppyTNode<String> string,
6670 : SloppyTNode<IntPtrT> index) {
6671 : CSA_ASSERT(this, IsString(string));
6672 :
6673 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(index, IntPtrConstant(0)));
6674 : CSA_ASSERT(this, IntPtrLessThan(index, LoadStringLengthAsWord(string)));
6675 :
6676 2856 : TVARIABLE(Int32T, var_result);
6677 :
6678 2856 : Label return_result(this), if_runtime(this, Label::kDeferred),
6679 2856 : if_stringistwobyte(this), if_stringisonebyte(this);
6680 :
6681 5712 : ToDirectStringAssembler to_direct(state(), string);
6682 2856 : to_direct.TryToDirect(&if_runtime);
6683 : Node* const offset = IntPtrAdd(index, to_direct.offset());
6684 : Node* const instance_type = to_direct.instance_type();
6685 :
6686 : Node* const string_data = to_direct.PointerToData(&if_runtime);
6687 :
6688 : // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
6689 5712 : Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
6690 2856 : &if_stringistwobyte);
6691 :
6692 : BIND(&if_stringisonebyte);
6693 : {
6694 2856 : var_result =
6695 : UncheckedCast<Int32T>(Load(MachineType::Uint8(), string_data, offset));
6696 2856 : Goto(&return_result);
6697 : }
6698 :
6699 : BIND(&if_stringistwobyte);
6700 : {
6701 2856 : var_result =
6702 : UncheckedCast<Int32T>(Load(MachineType::Uint16(), string_data,
6703 8568 : WordShl(offset, IntPtrConstant(1))));
6704 2856 : Goto(&return_result);
6705 : }
6706 :
6707 : BIND(&if_runtime);
6708 : {
6709 : Node* result = CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(),
6710 2856 : string, SmiTag(index));
6711 5712 : var_result = SmiToInt32(result);
6712 2856 : Goto(&return_result);
6713 : }
6714 :
6715 : BIND(&return_result);
6716 2856 : return var_result.value();
6717 : }
6718 :
6719 784 : TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
6720 1568 : VARIABLE(var_result, MachineRepresentation::kTagged);
6721 :
6722 : // Check if the {code} is a one-byte char code.
6723 784 : Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
6724 784 : if_done(this);
6725 2352 : Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
6726 784 : &if_codeisonebyte, &if_codeistwobyte);
6727 : BIND(&if_codeisonebyte);
6728 : {
6729 : // Load the isolate wide single character string cache.
6730 : TNode<FixedArray> cache =
6731 784 : CAST(LoadRoot(RootIndex::kSingleCharacterStringCache));
6732 1568 : TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
6733 :
6734 : // Check if we have an entry for the {code} in the single character string
6735 : // cache already.
6736 784 : Label if_entryisundefined(this, Label::kDeferred),
6737 784 : if_entryisnotundefined(this);
6738 : Node* entry = UnsafeLoadFixedArrayElement(cache, code_index);
6739 1568 : Branch(IsUndefined(entry), &if_entryisundefined, &if_entryisnotundefined);
6740 :
6741 : BIND(&if_entryisundefined);
6742 : {
6743 : // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
6744 784 : TNode<String> result = AllocateSeqOneByteString(1);
6745 : StoreNoWriteBarrier(
6746 : MachineRepresentation::kWord8, result,
6747 1568 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
6748 784 : StoreFixedArrayElement(cache, code_index, result);
6749 784 : var_result.Bind(result);
6750 784 : Goto(&if_done);
6751 : }
6752 :
6753 : BIND(&if_entryisnotundefined);
6754 : {
6755 : // Return the entry from the {cache}.
6756 784 : var_result.Bind(entry);
6757 784 : Goto(&if_done);
6758 : }
6759 : }
6760 :
6761 : BIND(&if_codeistwobyte);
6762 : {
6763 : // Allocate a new SeqTwoByteString for {code}.
6764 1568 : Node* result = AllocateSeqTwoByteString(1);
6765 : StoreNoWriteBarrier(
6766 : MachineRepresentation::kWord16, result,
6767 1568 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
6768 784 : var_result.Bind(result);
6769 784 : Goto(&if_done);
6770 : }
6771 :
6772 : BIND(&if_done);
6773 : CSA_ASSERT(this, IsString(var_result.value()));
6774 1568 : return CAST(var_result.value());
6775 : }
6776 :
6777 : // A wrapper around CopyStringCharacters which determines the correct string
6778 : // encoding, allocates a corresponding sequential string, and then copies the
6779 : // given character range using CopyStringCharacters.
6780 : // |from_string| must be a sequential string.
6781 : // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
6782 896 : TNode<String> CodeStubAssembler::AllocAndCopyStringCharacters(
6783 : Node* from, Node* from_instance_type, TNode<IntPtrT> from_index,
6784 : TNode<IntPtrT> character_count) {
6785 1792 : Label end(this), one_byte_sequential(this), two_byte_sequential(this);
6786 : TVARIABLE(String, var_result);
6787 :
6788 1792 : Branch(IsOneByteStringInstanceType(from_instance_type), &one_byte_sequential,
6789 896 : &two_byte_sequential);
6790 :
6791 : // The subject string is a sequential one-byte string.
6792 : BIND(&one_byte_sequential);
6793 : {
6794 : TNode<String> result = AllocateSeqOneByteString(
6795 1792 : NoContextConstant(), Unsigned(TruncateIntPtrToInt32(character_count)));
6796 896 : CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6797 : character_count, String::ONE_BYTE_ENCODING,
6798 896 : String::ONE_BYTE_ENCODING);
6799 : var_result = result;
6800 896 : Goto(&end);
6801 : }
6802 :
6803 : // The subject string is a sequential two-byte string.
6804 : BIND(&two_byte_sequential);
6805 : {
6806 : TNode<String> result = AllocateSeqTwoByteString(
6807 1792 : NoContextConstant(), Unsigned(TruncateIntPtrToInt32(character_count)));
6808 896 : CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6809 : character_count, String::TWO_BYTE_ENCODING,
6810 896 : String::TWO_BYTE_ENCODING);
6811 : var_result = result;
6812 896 : Goto(&end);
6813 : }
6814 :
6815 : BIND(&end);
6816 896 : return var_result.value();
6817 : }
6818 :
6819 448 : TNode<String> CodeStubAssembler::SubString(TNode<String> string,
6820 : TNode<IntPtrT> from,
6821 : TNode<IntPtrT> to) {
6822 448 : TVARIABLE(String, var_result);
6823 896 : ToDirectStringAssembler to_direct(state(), string);
6824 448 : Label end(this), runtime(this);
6825 :
6826 : TNode<IntPtrT> const substr_length = IntPtrSub(to, from);
6827 448 : TNode<IntPtrT> const string_length = LoadStringLengthAsWord(string);
6828 :
6829 : // Begin dispatching based on substring length.
6830 :
6831 448 : Label original_string_or_invalid_length(this);
6832 896 : GotoIf(UintPtrGreaterThanOrEqual(substr_length, string_length),
6833 448 : &original_string_or_invalid_length);
6834 :
6835 : // A real substring (substr_length < string_length).
6836 448 : Label empty(this);
6837 1344 : GotoIf(IntPtrEqual(substr_length, IntPtrConstant(0)), &empty);
6838 :
6839 448 : Label single_char(this);
6840 1344 : GotoIf(IntPtrEqual(substr_length, IntPtrConstant(1)), &single_char);
6841 :
6842 : // Deal with different string types: update the index if necessary
6843 : // and extract the underlying string.
6844 :
6845 448 : TNode<String> direct_string = to_direct.TryToDirect(&runtime);
6846 : TNode<IntPtrT> offset = IntPtrAdd(from, to_direct.offset());
6847 : Node* const instance_type = to_direct.instance_type();
6848 :
6849 : // The subject string can only be external or sequential string of either
6850 : // encoding at this point.
6851 448 : Label external_string(this);
6852 : {
6853 : if (FLAG_string_slices) {
6854 448 : Label next(this);
6855 :
6856 : // Short slice. Copy instead of slicing.
6857 896 : GotoIf(IntPtrLessThan(substr_length,
6858 896 : IntPtrConstant(SlicedString::kMinLength)),
6859 448 : &next);
6860 :
6861 : // Allocate new sliced string.
6862 :
6863 448 : Counters* counters = isolate()->counters();
6864 448 : IncrementCounter(counters->sub_string_native(), 1);
6865 :
6866 448 : Label one_byte_slice(this), two_byte_slice(this);
6867 896 : Branch(IsOneByteStringInstanceType(to_direct.instance_type()),
6868 448 : &one_byte_slice, &two_byte_slice);
6869 :
6870 : BIND(&one_byte_slice);
6871 : {
6872 896 : var_result = AllocateSlicedOneByteString(
6873 896 : Unsigned(TruncateIntPtrToInt32(substr_length)), direct_string,
6874 : SmiTag(offset));
6875 448 : Goto(&end);
6876 : }
6877 :
6878 : BIND(&two_byte_slice);
6879 : {
6880 896 : var_result = AllocateSlicedTwoByteString(
6881 896 : Unsigned(TruncateIntPtrToInt32(substr_length)), direct_string,
6882 : SmiTag(offset));
6883 448 : Goto(&end);
6884 : }
6885 :
6886 : BIND(&next);
6887 : }
6888 :
6889 : // The subject string can only be external or sequential string of either
6890 : // encoding at this point.
6891 448 : GotoIf(to_direct.is_external(), &external_string);
6892 :
6893 448 : var_result = AllocAndCopyStringCharacters(direct_string, instance_type,
6894 : offset, substr_length);
6895 :
6896 448 : Counters* counters = isolate()->counters();
6897 448 : IncrementCounter(counters->sub_string_native(), 1);
6898 :
6899 448 : Goto(&end);
6900 : }
6901 :
6902 : // Handle external string.
6903 : BIND(&external_string);
6904 : {
6905 : Node* const fake_sequential_string = to_direct.PointerToString(&runtime);
6906 :
6907 448 : var_result = AllocAndCopyStringCharacters(
6908 : fake_sequential_string, instance_type, offset, substr_length);
6909 :
6910 448 : Counters* counters = isolate()->counters();
6911 448 : IncrementCounter(counters->sub_string_native(), 1);
6912 :
6913 448 : Goto(&end);
6914 : }
6915 :
6916 : BIND(&empty);
6917 : {
6918 : var_result = EmptyStringConstant();
6919 448 : Goto(&end);
6920 : }
6921 :
6922 : // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
6923 : BIND(&single_char);
6924 : {
6925 448 : TNode<Int32T> char_code = StringCharCodeAt(string, from);
6926 448 : var_result = StringFromSingleCharCode(char_code);
6927 448 : Goto(&end);
6928 : }
6929 :
6930 : BIND(&original_string_or_invalid_length);
6931 : {
6932 : CSA_ASSERT(this, IntPtrEqual(substr_length, string_length));
6933 :
6934 : // Equal length - check if {from, to} == {0, str.length}.
6935 1344 : GotoIf(UintPtrGreaterThan(from, IntPtrConstant(0)), &runtime);
6936 :
6937 : // Return the original string (substr_length == string_length).
6938 :
6939 448 : Counters* counters = isolate()->counters();
6940 448 : IncrementCounter(counters->sub_string_native(), 1);
6941 :
6942 : var_result = string;
6943 448 : Goto(&end);
6944 : }
6945 :
6946 : // Fall back to a runtime call.
6947 : BIND(&runtime);
6948 : {
6949 : var_result =
6950 1344 : CAST(CallRuntime(Runtime::kStringSubstring, NoContextConstant(), string,
6951 : SmiTag(from), SmiTag(to)));
6952 448 : Goto(&end);
6953 : }
6954 :
6955 : BIND(&end);
6956 448 : return var_result.value();
6957 : }
6958 :
6959 4704 : ToDirectStringAssembler::ToDirectStringAssembler(
6960 : compiler::CodeAssemblerState* state, Node* string, Flags flags)
6961 : : CodeStubAssembler(state),
6962 : var_string_(this, MachineRepresentation::kTagged, string),
6963 : var_instance_type_(this, MachineRepresentation::kWord32),
6964 : var_offset_(this, MachineType::PointerRepresentation()),
6965 : var_is_external_(this, MachineRepresentation::kWord32),
6966 4704 : flags_(flags) {
6967 : CSA_ASSERT(this, TaggedIsNotSmi(string));
6968 : CSA_ASSERT(this, IsString(string));
6969 :
6970 4704 : var_string_.Bind(string);
6971 9408 : var_offset_.Bind(IntPtrConstant(0));
6972 9408 : var_instance_type_.Bind(LoadInstanceType(string));
6973 9408 : var_is_external_.Bind(Int32Constant(0));
6974 4704 : }
6975 :
6976 4704 : TNode<String> ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
6977 9408 : VariableList vars({&var_string_, &var_offset_, &var_instance_type_}, zone());
6978 4704 : Label dispatch(this, vars);
6979 4704 : Label if_iscons(this);
6980 4704 : Label if_isexternal(this);
6981 4704 : Label if_issliced(this);
6982 4704 : Label if_isthin(this);
6983 4704 : Label out(this);
6984 :
6985 14112 : Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
6986 4704 : &dispatch);
6987 :
6988 : // Dispatch based on string representation.
6989 : BIND(&dispatch);
6990 : {
6991 : int32_t values[] = {
6992 : kSeqStringTag, kConsStringTag, kExternalStringTag,
6993 : kSlicedStringTag, kThinStringTag,
6994 4704 : };
6995 : Label* labels[] = {
6996 : &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
6997 4704 : };
6998 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
6999 :
7000 9408 : Node* const representation = Word32And(
7001 14112 : var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
7002 4704 : Switch(representation, if_bailout, values, labels, arraysize(values));
7003 : }
7004 :
7005 : // Cons string. Check whether it is flat, then fetch first part.
7006 : // Flat cons strings have an empty second part.
7007 : BIND(&if_iscons);
7008 : {
7009 4704 : Node* const string = var_string_.value();
7010 9408 : GotoIfNot(IsEmptyString(LoadObjectField(string, ConsString::kSecondOffset)),
7011 4704 : if_bailout);
7012 :
7013 : Node* const lhs = LoadObjectField(string, ConsString::kFirstOffset);
7014 4704 : var_string_.Bind(lhs);
7015 9408 : var_instance_type_.Bind(LoadInstanceType(lhs));
7016 :
7017 4704 : Goto(&dispatch);
7018 : }
7019 :
7020 : // Sliced string. Fetch parent and correct start index by offset.
7021 : BIND(&if_issliced);
7022 : {
7023 4704 : if (!FLAG_string_slices || (flags_ & kDontUnpackSlicedStrings)) {
7024 56 : Goto(if_bailout);
7025 : } else {
7026 4648 : Node* const string = var_string_.value();
7027 : Node* const sliced_offset =
7028 9296 : LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
7029 13944 : var_offset_.Bind(IntPtrAdd(var_offset_.value(), sliced_offset));
7030 :
7031 : Node* const parent = LoadObjectField(string, SlicedString::kParentOffset);
7032 4648 : var_string_.Bind(parent);
7033 9296 : var_instance_type_.Bind(LoadInstanceType(parent));
7034 :
7035 4648 : Goto(&dispatch);
7036 : }
7037 : }
7038 :
7039 : // Thin string. Fetch the actual string.
7040 : BIND(&if_isthin);
7041 : {
7042 4704 : Node* const string = var_string_.value();
7043 : Node* const actual_string =
7044 : LoadObjectField(string, ThinString::kActualOffset);
7045 9408 : Node* const actual_instance_type = LoadInstanceType(actual_string);
7046 :
7047 4704 : var_string_.Bind(actual_string);
7048 4704 : var_instance_type_.Bind(actual_instance_type);
7049 :
7050 4704 : Goto(&dispatch);
7051 : }
7052 :
7053 : // External string.
7054 : BIND(&if_isexternal);
7055 9408 : var_is_external_.Bind(Int32Constant(1));
7056 4704 : Goto(&out);
7057 :
7058 : BIND(&out);
7059 9408 : return CAST(var_string_.value());
7060 : }
7061 :
7062 4704 : TNode<RawPtrT> ToDirectStringAssembler::TryToSequential(
7063 : StringPointerKind ptr_kind, Label* if_bailout) {
7064 4704 : CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
7065 :
7066 4704 : TVARIABLE(RawPtrT, var_result);
7067 4704 : Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
7068 4704 : Branch(is_external(), &if_isexternal, &if_issequential);
7069 :
7070 : BIND(&if_issequential);
7071 : {
7072 : STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
7073 : SeqTwoByteString::kHeaderSize);
7074 9408 : TNode<IntPtrT> result = BitcastTaggedToWord(var_string_.value());
7075 4704 : if (ptr_kind == PTR_TO_DATA) {
7076 4256 : result = IntPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7077 : kHeapObjectTag));
7078 : }
7079 : var_result = ReinterpretCast<RawPtrT>(result);
7080 4704 : Goto(&out);
7081 : }
7082 :
7083 : BIND(&if_isexternal);
7084 : {
7085 9408 : GotoIf(IsUncachedExternalStringInstanceType(var_instance_type_.value()),
7086 4704 : if_bailout);
7087 :
7088 4704 : TNode<String> string = CAST(var_string_.value());
7089 : TNode<IntPtrT> result =
7090 : LoadObjectField<IntPtrT>(string, ExternalString::kResourceDataOffset);
7091 4704 : if (ptr_kind == PTR_TO_STRING) {
7092 448 : result = IntPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7093 : kHeapObjectTag));
7094 : }
7095 : var_result = ReinterpretCast<RawPtrT>(result);
7096 4704 : Goto(&out);
7097 : }
7098 :
7099 : BIND(&out);
7100 4704 : return var_result.value();
7101 : }
7102 :
7103 1008 : void CodeStubAssembler::BranchIfCanDerefIndirectString(Node* string,
7104 : Node* instance_type,
7105 : Label* can_deref,
7106 : Label* cannot_deref) {
7107 : CSA_ASSERT(this, IsString(string));
7108 : Node* representation =
7109 3024 : Word32And(instance_type, Int32Constant(kStringRepresentationMask));
7110 3024 : GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), can_deref);
7111 3024 : GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)),
7112 1008 : cannot_deref);
7113 : // Cons string.
7114 : Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
7115 2016 : GotoIf(IsEmptyString(rhs), can_deref);
7116 1008 : Goto(cannot_deref);
7117 1008 : }
7118 :
7119 0 : Node* CodeStubAssembler::DerefIndirectString(TNode<String> string,
7120 : TNode<Int32T> instance_type,
7121 : Label* cannot_deref) {
7122 0 : Label deref(this);
7123 0 : BranchIfCanDerefIndirectString(string, instance_type, &deref, cannot_deref);
7124 : BIND(&deref);
7125 : STATIC_ASSERT(static_cast<int>(ThinString::kActualOffset) ==
7126 : static_cast<int>(ConsString::kFirstOffset));
7127 0 : return LoadObjectField(string, ThinString::kActualOffset);
7128 : }
7129 :
7130 1008 : void CodeStubAssembler::DerefIndirectString(Variable* var_string,
7131 : Node* instance_type) {
7132 : #ifdef DEBUG
7133 : Label can_deref(this), cannot_deref(this);
7134 : BranchIfCanDerefIndirectString(var_string->value(), instance_type, &can_deref,
7135 : &cannot_deref);
7136 : BIND(&cannot_deref);
7137 : DebugBreak(); // Should be able to dereference string.
7138 : Goto(&can_deref);
7139 : BIND(&can_deref);
7140 : #endif // DEBUG
7141 :
7142 : STATIC_ASSERT(static_cast<int>(ThinString::kActualOffset) ==
7143 : static_cast<int>(ConsString::kFirstOffset));
7144 : var_string->Bind(
7145 2016 : LoadObjectField(var_string->value(), ThinString::kActualOffset));
7146 1008 : }
7147 :
7148 1008 : void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
7149 : Node* instance_type,
7150 : Label* did_deref,
7151 : Label* cannot_deref) {
7152 2016 : Label deref(this);
7153 1008 : BranchIfCanDerefIndirectString(var_string->value(), instance_type, &deref,
7154 1008 : cannot_deref);
7155 :
7156 : BIND(&deref);
7157 : {
7158 1008 : DerefIndirectString(var_string, instance_type);
7159 1008 : Goto(did_deref);
7160 : }
7161 1008 : }
7162 :
7163 336 : void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
7164 : Node* left_instance_type,
7165 : Variable* var_right,
7166 : Node* right_instance_type,
7167 : Label* did_something) {
7168 672 : Label did_nothing_left(this), did_something_left(this),
7169 336 : didnt_do_anything(this);
7170 : MaybeDerefIndirectString(var_left, left_instance_type, &did_something_left,
7171 336 : &did_nothing_left);
7172 :
7173 : BIND(&did_something_left);
7174 : {
7175 : MaybeDerefIndirectString(var_right, right_instance_type, did_something,
7176 336 : did_something);
7177 : }
7178 :
7179 : BIND(&did_nothing_left);
7180 : {
7181 : MaybeDerefIndirectString(var_right, right_instance_type, did_something,
7182 336 : &didnt_do_anything);
7183 : }
7184 :
7185 : BIND(&didnt_do_anything);
7186 : // Fall through if neither string was an indirect string.
7187 336 : }
7188 :
7189 56 : TNode<String> CodeStubAssembler::StringAdd(Node* context, TNode<String> left,
7190 : TNode<String> right) {
7191 56 : TVARIABLE(String, result);
7192 56 : Label check_right(this), runtime(this, Label::kDeferred), cons(this),
7193 56 : done(this, &result), done_native(this, &result);
7194 56 : Counters* counters = isolate()->counters();
7195 :
7196 : TNode<Uint32T> left_length = LoadStringLengthAsWord32(left);
7197 112 : GotoIfNot(Word32Equal(left_length, Uint32Constant(0)), &check_right);
7198 : result = right;
7199 56 : Goto(&done_native);
7200 :
7201 : BIND(&check_right);
7202 : TNode<Uint32T> right_length = LoadStringLengthAsWord32(right);
7203 112 : GotoIfNot(Word32Equal(right_length, Uint32Constant(0)), &cons);
7204 : result = left;
7205 56 : Goto(&done_native);
7206 :
7207 : BIND(&cons);
7208 : {
7209 : TNode<Uint32T> new_length = Uint32Add(left_length, right_length);
7210 :
7211 : // If new length is greater than String::kMaxLength, goto runtime to
7212 : // throw. Note: we also need to invalidate the string length protector, so
7213 : // can't just throw here directly.
7214 112 : GotoIf(Uint32GreaterThan(new_length, Uint32Constant(String::kMaxLength)),
7215 56 : &runtime);
7216 :
7217 : TVARIABLE(String, var_left, left);
7218 : TVARIABLE(String, var_right, right);
7219 56 : Variable* input_vars[2] = {&var_left, &var_right};
7220 112 : Label non_cons(this, 2, input_vars);
7221 56 : Label slow(this, Label::kDeferred);
7222 112 : GotoIf(Uint32LessThan(new_length, Uint32Constant(ConsString::kMinLength)),
7223 56 : &non_cons);
7224 :
7225 56 : result =
7226 : AllocateConsString(new_length, var_left.value(), var_right.value());
7227 56 : Goto(&done_native);
7228 :
7229 : BIND(&non_cons);
7230 :
7231 56 : Comment("Full string concatenate");
7232 112 : Node* left_instance_type = LoadInstanceType(var_left.value());
7233 112 : Node* right_instance_type = LoadInstanceType(var_right.value());
7234 : // Compute intersection and difference of instance types.
7235 :
7236 : Node* ored_instance_types =
7237 112 : Word32Or(left_instance_type, right_instance_type);
7238 : Node* xored_instance_types =
7239 112 : Word32Xor(left_instance_type, right_instance_type);
7240 :
7241 : // Check if both strings have the same encoding and both are sequential.
7242 112 : GotoIf(IsSetWord32(xored_instance_types, kStringEncodingMask), &runtime);
7243 112 : GotoIf(IsSetWord32(ored_instance_types, kStringRepresentationMask), &slow);
7244 :
7245 112 : TNode<IntPtrT> word_left_length = Signed(ChangeUint32ToWord(left_length));
7246 112 : TNode<IntPtrT> word_right_length = Signed(ChangeUint32ToWord(right_length));
7247 :
7248 56 : Label two_byte(this);
7249 224 : GotoIf(Word32Equal(Word32And(ored_instance_types,
7250 112 : Int32Constant(kStringEncodingMask)),
7251 168 : Int32Constant(kTwoByteStringTag)),
7252 56 : &two_byte);
7253 : // One-byte sequential string case
7254 112 : result = AllocateSeqOneByteString(context, new_length);
7255 56 : CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
7256 : IntPtrConstant(0), word_left_length,
7257 56 : String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
7258 56 : CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
7259 : word_left_length, word_right_length,
7260 56 : String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
7261 56 : Goto(&done_native);
7262 :
7263 : BIND(&two_byte);
7264 : {
7265 : // Two-byte sequential string case
7266 112 : result = AllocateSeqTwoByteString(context, new_length);
7267 56 : CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
7268 : IntPtrConstant(0), word_left_length,
7269 : String::TWO_BYTE_ENCODING,
7270 56 : String::TWO_BYTE_ENCODING);
7271 56 : CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
7272 : word_left_length, word_right_length,
7273 : String::TWO_BYTE_ENCODING,
7274 56 : String::TWO_BYTE_ENCODING);
7275 56 : Goto(&done_native);
7276 : }
7277 :
7278 : BIND(&slow);
7279 : {
7280 : // Try to unwrap indirect strings, restart the above attempt on success.
7281 : MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
7282 56 : right_instance_type, &non_cons);
7283 56 : Goto(&runtime);
7284 : }
7285 : }
7286 : BIND(&runtime);
7287 : {
7288 : result = CAST(CallRuntime(Runtime::kStringAdd, context, left, right));
7289 56 : Goto(&done);
7290 : }
7291 :
7292 : BIND(&done_native);
7293 : {
7294 56 : IncrementCounter(counters->string_add_native(), 1);
7295 56 : Goto(&done);
7296 : }
7297 :
7298 : BIND(&done);
7299 56 : return result.value();
7300 : }
7301 :
7302 112 : TNode<String> CodeStubAssembler::StringFromSingleCodePoint(
7303 : TNode<Int32T> codepoint, UnicodeEncoding encoding) {
7304 224 : VARIABLE(var_result, MachineRepresentation::kTagged, EmptyStringConstant());
7305 :
7306 112 : Label if_isword16(this), if_isword32(this), return_result(this);
7307 :
7308 336 : Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
7309 112 : &if_isword32);
7310 :
7311 : BIND(&if_isword16);
7312 : {
7313 224 : var_result.Bind(StringFromSingleCharCode(codepoint));
7314 112 : Goto(&return_result);
7315 : }
7316 :
7317 : BIND(&if_isword32);
7318 : {
7319 112 : switch (encoding) {
7320 : case UnicodeEncoding::UTF16:
7321 : break;
7322 : case UnicodeEncoding::UTF32: {
7323 : // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
7324 0 : Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
7325 :
7326 : // lead = (codepoint >> 10) + LEAD_OFFSET
7327 : Node* lead =
7328 0 : Int32Add(Word32Shr(codepoint, Int32Constant(10)), lead_offset);
7329 :
7330 : // trail = (codepoint & 0x3FF) + 0xDC00;
7331 0 : Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
7332 0 : Int32Constant(0xDC00));
7333 :
7334 : // codpoint = (trail << 16) | lead;
7335 0 : codepoint = Signed(Word32Or(Word32Shl(trail, Int32Constant(16)), lead));
7336 0 : break;
7337 : }
7338 : }
7339 :
7340 224 : Node* value = AllocateSeqTwoByteString(2);
7341 : StoreNoWriteBarrier(
7342 : MachineRepresentation::kWord32, value,
7343 224 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
7344 112 : codepoint);
7345 112 : var_result.Bind(value);
7346 112 : Goto(&return_result);
7347 : }
7348 :
7349 : BIND(&return_result);
7350 224 : return CAST(var_result.value());
7351 : }
7352 :
7353 900 : TNode<Number> CodeStubAssembler::StringToNumber(TNode<String> input) {
7354 1800 : Label runtime(this, Label::kDeferred);
7355 900 : Label end(this);
7356 :
7357 : TVARIABLE(Number, var_result);
7358 :
7359 : // Check if string has a cached array index.
7360 : TNode<Uint32T> hash = LoadNameHashField(input);
7361 1800 : GotoIf(IsSetWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
7362 900 : &runtime);
7363 :
7364 : var_result =
7365 2700 : SmiTag(Signed(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
7366 900 : Goto(&end);
7367 :
7368 : BIND(&runtime);
7369 : {
7370 : var_result =
7371 : CAST(CallRuntime(Runtime::kStringToNumber, NoContextConstant(), input));
7372 900 : Goto(&end);
7373 : }
7374 :
7375 : BIND(&end);
7376 900 : return var_result.value();
7377 : }
7378 :
7379 340 : TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
7380 340 : TVARIABLE(String, result);
7381 : TVARIABLE(Smi, smi_input);
7382 340 : Label runtime(this, Label::kDeferred), if_smi(this), if_heap_number(this),
7383 340 : done(this, &result);
7384 :
7385 : // Load the number string cache.
7386 680 : Node* number_string_cache = LoadRoot(RootIndex::kNumberStringCache);
7387 :
7388 : // Make the hash mask from the length of the number string cache. It
7389 : // contains two elements (number and string) for each cache entry.
7390 : // TODO(ishell): cleanup mask handling.
7391 : Node* mask =
7392 680 : BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
7393 340 : TNode<IntPtrT> one = IntPtrConstant(1);
7394 680 : mask = IntPtrSub(mask, one);
7395 :
7396 680 : GotoIfNot(TaggedIsSmi(input), &if_heap_number);
7397 : smi_input = CAST(input);
7398 340 : Goto(&if_smi);
7399 :
7400 : BIND(&if_heap_number);
7401 : {
7402 : TNode<HeapNumber> heap_number_input = CAST(input);
7403 : // Try normalizing the HeapNumber.
7404 340 : TryHeapNumberToSmi(heap_number_input, smi_input, &if_smi);
7405 :
7406 : // Make a hash from the two 32-bit values of the double.
7407 : TNode<Int32T> low =
7408 : LoadObjectField<Int32T>(heap_number_input, HeapNumber::kValueOffset);
7409 : TNode<Int32T> high = LoadObjectField<Int32T>(
7410 : heap_number_input, HeapNumber::kValueOffset + kIntSize);
7411 340 : TNode<Word32T> hash = Word32Xor(low, high);
7412 680 : TNode<WordT> word_hash = WordShl(ChangeInt32ToIntPtr(hash), one);
7413 : TNode<WordT> index =
7414 680 : WordAnd(word_hash, WordSar(mask, SmiShiftBitsConstant()));
7415 :
7416 : // Cache entry's key must be a heap number
7417 : Node* number_key =
7418 : UnsafeLoadFixedArrayElement(CAST(number_string_cache), index);
7419 680 : GotoIf(TaggedIsSmi(number_key), &runtime);
7420 680 : GotoIfNot(IsHeapNumber(number_key), &runtime);
7421 :
7422 : // Cache entry's key must match the heap number value we're looking for.
7423 340 : Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
7424 340 : MachineType::Int32());
7425 340 : Node* high_compare = LoadObjectField(
7426 340 : number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
7427 680 : GotoIfNot(Word32Equal(low, low_compare), &runtime);
7428 680 : GotoIfNot(Word32Equal(high, high_compare), &runtime);
7429 :
7430 : // Heap number match, return value from cache entry.
7431 : result = CAST(UnsafeLoadFixedArrayElement(CAST(number_string_cache), index,
7432 : kTaggedSize));
7433 340 : Goto(&done);
7434 : }
7435 :
7436 : BIND(&if_smi);
7437 : {
7438 : // Load the smi key, make sure it matches the smi we're looking for.
7439 680 : Node* smi_index = BitcastWordToTagged(
7440 1360 : WordAnd(WordShl(BitcastTaggedToWord(smi_input.value()), one), mask));
7441 : Node* smi_key = UnsafeLoadFixedArrayElement(CAST(number_string_cache),
7442 : smi_index, 0, SMI_PARAMETERS);
7443 340 : GotoIf(WordNotEqual(smi_key, smi_input.value()), &runtime);
7444 :
7445 : // Smi match, return value from cache entry.
7446 : result = CAST(UnsafeLoadFixedArrayElement(
7447 : CAST(number_string_cache), smi_index, kTaggedSize, SMI_PARAMETERS));
7448 340 : Goto(&done);
7449 : }
7450 :
7451 : BIND(&runtime);
7452 : {
7453 : // No cache entry, go to the runtime.
7454 : result =
7455 : CAST(CallRuntime(Runtime::kNumberToString, NoContextConstant(), input));
7456 340 : Goto(&done);
7457 : }
7458 : BIND(&done);
7459 340 : return result.value();
7460 : }
7461 :
7462 844 : Node* CodeStubAssembler::NonNumberToNumberOrNumeric(
7463 : Node* context, Node* input, Object::Conversion mode,
7464 : BigIntHandling bigint_handling) {
7465 : CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
7466 : CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(input)));
7467 :
7468 : // We might need to loop once here due to ToPrimitive conversions.
7469 1688 : VARIABLE(var_input, MachineRepresentation::kTagged, input);
7470 1688 : VARIABLE(var_result, MachineRepresentation::kTagged);
7471 844 : Label loop(this, &var_input);
7472 844 : Label end(this);
7473 844 : Goto(&loop);
7474 : BIND(&loop);
7475 : {
7476 : // Load the current {input} value (known to be a HeapObject).
7477 844 : Node* input = var_input.value();
7478 :
7479 : // Dispatch on the {input} instance type.
7480 1688 : Node* input_instance_type = LoadInstanceType(input);
7481 844 : Label if_inputisstring(this), if_inputisoddball(this),
7482 844 : if_inputisbigint(this), if_inputisreceiver(this, Label::kDeferred),
7483 844 : if_inputisother(this, Label::kDeferred);
7484 1688 : GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
7485 844 : GotoIf(IsBigIntInstanceType(input_instance_type), &if_inputisbigint);
7486 1688 : GotoIf(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE),
7487 844 : &if_inputisoddball);
7488 1688 : Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
7489 844 : &if_inputisother);
7490 :
7491 : BIND(&if_inputisstring);
7492 : {
7493 : // The {input} is a String, use the fast stub to convert it to a Number.
7494 844 : TNode<String> string_input = CAST(input);
7495 1688 : var_result.Bind(StringToNumber(string_input));
7496 844 : Goto(&end);
7497 : }
7498 :
7499 : BIND(&if_inputisbigint);
7500 844 : if (mode == Object::Conversion::kToNumeric) {
7501 112 : var_result.Bind(input);
7502 112 : Goto(&end);
7503 : } else {
7504 : DCHECK_EQ(mode, Object::Conversion::kToNumber);
7505 732 : if (bigint_handling == BigIntHandling::kThrow) {
7506 620 : Goto(&if_inputisother);
7507 : } else {
7508 : DCHECK_EQ(bigint_handling, BigIntHandling::kConvertToNumber);
7509 112 : var_result.Bind(CallRuntime(Runtime::kBigIntToNumber, context, input));
7510 112 : Goto(&end);
7511 : }
7512 : }
7513 :
7514 : BIND(&if_inputisoddball);
7515 : {
7516 : // The {input} is an Oddball, we just need to load the Number value of it.
7517 844 : var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
7518 844 : Goto(&end);
7519 : }
7520 :
7521 : BIND(&if_inputisreceiver);
7522 : {
7523 : // The {input} is a JSReceiver, we need to convert it to a Primitive first
7524 : // using the ToPrimitive type conversion, preferably yielding a Number.
7525 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
7526 844 : isolate(), ToPrimitiveHint::kNumber);
7527 1688 : Node* result = CallStub(callable, context, input);
7528 :
7529 : // Check if the {result} is already a Number/Numeric.
7530 844 : Label if_done(this), if_notdone(this);
7531 2532 : Branch(mode == Object::Conversion::kToNumber ? IsNumber(result)
7532 : : IsNumeric(result),
7533 844 : &if_done, &if_notdone);
7534 :
7535 : BIND(&if_done);
7536 : {
7537 : // The ToPrimitive conversion already gave us a Number/Numeric, so we're
7538 : // done.
7539 844 : var_result.Bind(result);
7540 844 : Goto(&end);
7541 : }
7542 :
7543 : BIND(&if_notdone);
7544 : {
7545 : // We now have a Primitive {result}, but it's not yet a Number/Numeric.
7546 844 : var_input.Bind(result);
7547 844 : Goto(&loop);
7548 : }
7549 : }
7550 :
7551 : BIND(&if_inputisother);
7552 : {
7553 : // The {input} is something else (e.g. Symbol), let the runtime figure
7554 : // out the correct exception.
7555 : // Note: We cannot tail call to the runtime here, as js-to-wasm
7556 : // trampolines also use this code currently, and they declare all
7557 : // outgoing parameters as untagged, while we would push a tagged
7558 : // object here.
7559 : auto function_id = mode == Object::Conversion::kToNumber
7560 : ? Runtime::kToNumber
7561 844 : : Runtime::kToNumeric;
7562 844 : var_result.Bind(CallRuntime(function_id, context, input));
7563 844 : Goto(&end);
7564 : }
7565 : }
7566 :
7567 : BIND(&end);
7568 : if (mode == Object::Conversion::kToNumeric) {
7569 : CSA_ASSERT(this, IsNumeric(var_result.value()));
7570 : } else {
7571 : DCHECK_EQ(mode, Object::Conversion::kToNumber);
7572 : CSA_ASSERT(this, IsNumber(var_result.value()));
7573 : }
7574 1688 : return var_result.value();
7575 : }
7576 :
7577 56 : TNode<Number> CodeStubAssembler::NonNumberToNumber(
7578 : SloppyTNode<Context> context, SloppyTNode<HeapObject> input,
7579 : BigIntHandling bigint_handling) {
7580 732 : return CAST(NonNumberToNumberOrNumeric(
7581 : context, input, Object::Conversion::kToNumber, bigint_handling));
7582 : }
7583 :
7584 112 : TNode<Numeric> CodeStubAssembler::NonNumberToNumeric(
7585 : SloppyTNode<Context> context, SloppyTNode<HeapObject> input) {
7586 : Node* result = NonNumberToNumberOrNumeric(context, input,
7587 112 : Object::Conversion::kToNumeric);
7588 : CSA_SLOW_ASSERT(this, IsNumeric(result));
7589 112 : return UncheckedCast<Numeric>(result);
7590 : }
7591 :
7592 616 : TNode<Number> CodeStubAssembler::ToNumber_Inline(SloppyTNode<Context> context,
7593 : SloppyTNode<Object> input) {
7594 616 : TVARIABLE(Number, var_result);
7595 616 : Label end(this), not_smi(this, Label::kDeferred);
7596 :
7597 1232 : GotoIfNot(TaggedIsSmi(input), ¬_smi);
7598 : var_result = CAST(input);
7599 616 : Goto(&end);
7600 :
7601 : BIND(¬_smi);
7602 : {
7603 2464 : var_result =
7604 1232 : Select<Number>(IsHeapNumber(CAST(input)), [=] { return CAST(input); },
7605 : [=] {
7606 616 : return CAST(CallBuiltin(Builtins::kNonNumberToNumber,
7607 : context, input));
7608 : });
7609 616 : Goto(&end);
7610 : }
7611 :
7612 : BIND(&end);
7613 616 : return var_result.value();
7614 : }
7615 :
7616 676 : TNode<Number> CodeStubAssembler::ToNumber(SloppyTNode<Context> context,
7617 : SloppyTNode<Object> input,
7618 : BigIntHandling bigint_handling) {
7619 676 : TVARIABLE(Number, var_result);
7620 676 : Label end(this);
7621 :
7622 676 : Label not_smi(this, Label::kDeferred);
7623 1352 : GotoIfNot(TaggedIsSmi(input), ¬_smi);
7624 : TNode<Smi> input_smi = CAST(input);
7625 : var_result = input_smi;
7626 676 : Goto(&end);
7627 :
7628 : BIND(¬_smi);
7629 : {
7630 676 : Label not_heap_number(this, Label::kDeferred);
7631 : TNode<HeapObject> input_ho = CAST(input);
7632 1352 : GotoIfNot(IsHeapNumber(input_ho), ¬_heap_number);
7633 :
7634 : TNode<HeapNumber> input_hn = CAST(input_ho);
7635 : var_result = input_hn;
7636 676 : Goto(&end);
7637 :
7638 : BIND(¬_heap_number);
7639 : {
7640 : var_result = NonNumberToNumber(context, input_ho, bigint_handling);
7641 676 : Goto(&end);
7642 : }
7643 : }
7644 :
7645 : BIND(&end);
7646 676 : return var_result.value();
7647 : }
7648 :
7649 1568 : TNode<BigInt> CodeStubAssembler::ToBigInt(SloppyTNode<Context> context,
7650 : SloppyTNode<Object> input) {
7651 1568 : TVARIABLE(BigInt, var_result);
7652 1568 : Label if_bigint(this), done(this), if_throw(this);
7653 :
7654 3136 : GotoIf(TaggedIsSmi(input), &if_throw);
7655 3136 : GotoIf(IsBigInt(CAST(input)), &if_bigint);
7656 : var_result = CAST(CallRuntime(Runtime::kToBigInt, context, input));
7657 1568 : Goto(&done);
7658 :
7659 : BIND(&if_bigint);
7660 : var_result = CAST(input);
7661 1568 : Goto(&done);
7662 :
7663 : BIND(&if_throw);
7664 1568 : ThrowTypeError(context, MessageTemplate::kBigIntFromObject, input);
7665 :
7666 : BIND(&done);
7667 1568 : return var_result.value();
7668 : }
7669 :
7670 336 : void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
7671 : Variable* var_numeric) {
7672 336 : TaggedToNumeric(context, value, done, var_numeric, nullptr);
7673 336 : }
7674 :
7675 1008 : void CodeStubAssembler::TaggedToNumericWithFeedback(Node* context, Node* value,
7676 : Label* done,
7677 : Variable* var_numeric,
7678 : Variable* var_feedback) {
7679 : DCHECK_NOT_NULL(var_feedback);
7680 1008 : TaggedToNumeric(context, value, done, var_numeric, var_feedback);
7681 1008 : }
7682 :
7683 1344 : void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
7684 : Variable* var_numeric,
7685 : Variable* var_feedback) {
7686 1344 : var_numeric->Bind(value);
7687 2688 : Label if_smi(this), if_heapnumber(this), if_bigint(this), if_oddball(this);
7688 2688 : GotoIf(TaggedIsSmi(value), &if_smi);
7689 : Node* map = LoadMap(value);
7690 2688 : GotoIf(IsHeapNumberMap(map), &if_heapnumber);
7691 : Node* instance_type = LoadMapInstanceType(map);
7692 1344 : GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
7693 :
7694 : // {value} is not a Numeric yet.
7695 4032 : GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
7696 2688 : var_numeric->Bind(CallBuiltin(Builtins::kNonNumberToNumeric, context, value));
7697 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
7698 1344 : Goto(done);
7699 :
7700 : BIND(&if_smi);
7701 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
7702 1344 : Goto(done);
7703 :
7704 : BIND(&if_heapnumber);
7705 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumber);
7706 1344 : Goto(done);
7707 :
7708 : BIND(&if_bigint);
7709 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
7710 1344 : Goto(done);
7711 :
7712 : BIND(&if_oddball);
7713 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumberOrOddball);
7714 1344 : var_numeric->Bind(LoadObjectField(value, Oddball::kToNumberOffset));
7715 1344 : Goto(done);
7716 1344 : }
7717 :
7718 : // ES#sec-touint32
7719 60 : TNode<Number> CodeStubAssembler::ToUint32(SloppyTNode<Context> context,
7720 : SloppyTNode<Object> input) {
7721 120 : Node* const float_zero = Float64Constant(0.0);
7722 120 : Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
7723 :
7724 60 : Label out(this);
7725 :
7726 120 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
7727 :
7728 : // Early exit for positive smis.
7729 : {
7730 : // TODO(jgruber): This branch and the recheck below can be removed once we
7731 : // have a ToNumber with multiple exits.
7732 60 : Label next(this, Label::kDeferred);
7733 120 : Branch(TaggedIsPositiveSmi(input), &out, &next);
7734 : BIND(&next);
7735 : }
7736 :
7737 120 : Node* const number = ToNumber(context, input);
7738 60 : var_result.Bind(number);
7739 :
7740 : // Perhaps we have a positive smi now.
7741 : {
7742 60 : Label next(this, Label::kDeferred);
7743 120 : Branch(TaggedIsPositiveSmi(number), &out, &next);
7744 : BIND(&next);
7745 : }
7746 :
7747 60 : Label if_isnegativesmi(this), if_isheapnumber(this);
7748 120 : Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
7749 :
7750 : BIND(&if_isnegativesmi);
7751 : {
7752 120 : Node* const uint32_value = SmiToInt32(number);
7753 120 : Node* float64_value = ChangeUint32ToFloat64(uint32_value);
7754 120 : var_result.Bind(AllocateHeapNumberWithValue(float64_value));
7755 60 : Goto(&out);
7756 : }
7757 :
7758 : BIND(&if_isheapnumber);
7759 : {
7760 60 : Label return_zero(this);
7761 : Node* const value = LoadHeapNumberValue(number);
7762 :
7763 : {
7764 : // +-0.
7765 60 : Label next(this);
7766 120 : Branch(Float64Equal(value, float_zero), &return_zero, &next);
7767 : BIND(&next);
7768 : }
7769 :
7770 : {
7771 : // NaN.
7772 60 : Label next(this);
7773 120 : Branch(Float64Equal(value, value), &next, &return_zero);
7774 : BIND(&next);
7775 : }
7776 :
7777 : {
7778 : // +Infinity.
7779 60 : Label next(this);
7780 : Node* const positive_infinity =
7781 120 : Float64Constant(std::numeric_limits<double>::infinity());
7782 120 : Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
7783 : BIND(&next);
7784 : }
7785 :
7786 : {
7787 : // -Infinity.
7788 60 : Label next(this);
7789 : Node* const negative_infinity =
7790 120 : Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
7791 120 : Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
7792 : BIND(&next);
7793 : }
7794 :
7795 : // * Let int be the mathematical value that is the same sign as number and
7796 : // whose magnitude is floor(abs(number)).
7797 : // * Let int32bit be int modulo 2^32.
7798 : // * Return int32bit.
7799 : {
7800 120 : Node* x = Float64Trunc(value);
7801 120 : x = Float64Mod(x, float_two_32);
7802 120 : x = Float64Add(x, float_two_32);
7803 120 : x = Float64Mod(x, float_two_32);
7804 :
7805 120 : Node* const result = ChangeFloat64ToTagged(x);
7806 60 : var_result.Bind(result);
7807 60 : Goto(&out);
7808 : }
7809 :
7810 : BIND(&return_zero);
7811 : {
7812 120 : var_result.Bind(SmiConstant(0));
7813 60 : Goto(&out);
7814 : }
7815 : }
7816 :
7817 : BIND(&out);
7818 120 : return CAST(var_result.value());
7819 : }
7820 :
7821 172 : TNode<String> CodeStubAssembler::ToString(SloppyTNode<Context> context,
7822 : SloppyTNode<Object> input) {
7823 172 : TVARIABLE(Object, result, input);
7824 172 : Label loop(this, &result), done(this);
7825 172 : Goto(&loop);
7826 : BIND(&loop);
7827 : {
7828 : // Load the current {input} value.
7829 : TNode<Object> input = result.value();
7830 :
7831 : // Dispatch based on the type of the {input.}
7832 172 : Label if_inputisnumber(this), if_inputisoddball(this),
7833 172 : if_inputissymbol(this), if_inputisreceiver(this, Label::kDeferred),
7834 172 : runtime(this, Label::kDeferred);
7835 344 : GotoIf(TaggedIsSmi(input), &if_inputisnumber);
7836 172 : TNode<Int32T> input_instance_type = LoadInstanceType(CAST(input));
7837 344 : GotoIf(IsStringInstanceType(input_instance_type), &done);
7838 344 : GotoIf(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver);
7839 172 : GotoIf(IsHeapNumberInstanceType(input_instance_type), &if_inputisnumber);
7840 172 : GotoIf(IsOddballInstanceType(input_instance_type), &if_inputisoddball);
7841 172 : Branch(IsSymbolInstanceType(input_instance_type), &if_inputissymbol,
7842 172 : &runtime);
7843 :
7844 : BIND(&if_inputisnumber);
7845 : {
7846 : // Convert the Number {input} to a String.
7847 172 : TNode<Number> number_input = CAST(input);
7848 344 : result = NumberToString(number_input);
7849 172 : Goto(&done);
7850 : }
7851 :
7852 : BIND(&if_inputisoddball);
7853 : {
7854 : // Just return the {input}'s string representation.
7855 : result = LoadObjectField(CAST(input), Oddball::kToStringOffset);
7856 172 : Goto(&done);
7857 : }
7858 :
7859 : BIND(&if_inputissymbol);
7860 : {
7861 : // Throw a type error when {input} is a Symbol.
7862 : ThrowTypeError(context, MessageTemplate::kSymbolToString);
7863 : }
7864 :
7865 : BIND(&if_inputisreceiver);
7866 : {
7867 : // Convert the JSReceiver {input} to a primitive first,
7868 : // and then run the loop again with the new {input},
7869 : // which is then a primitive value.
7870 344 : result = CallBuiltin(Builtins::kNonPrimitiveToPrimitive_String, context,
7871 : input);
7872 172 : Goto(&loop);
7873 : }
7874 :
7875 : BIND(&runtime);
7876 : {
7877 : result = CallRuntime(Runtime::kToString, context, input);
7878 172 : Goto(&done);
7879 : }
7880 : }
7881 :
7882 : BIND(&done);
7883 172 : return CAST(result.value());
7884 : }
7885 :
7886 2800 : TNode<String> CodeStubAssembler::ToString_Inline(SloppyTNode<Context> context,
7887 : SloppyTNode<Object> input) {
7888 5600 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
7889 2800 : Label stub_call(this, Label::kDeferred), out(this);
7890 :
7891 5600 : GotoIf(TaggedIsSmi(input), &stub_call);
7892 5600 : Branch(IsString(CAST(input)), &out, &stub_call);
7893 :
7894 : BIND(&stub_call);
7895 5600 : var_result.Bind(CallBuiltin(Builtins::kToString, context, input));
7896 2800 : Goto(&out);
7897 :
7898 : BIND(&out);
7899 5600 : return CAST(var_result.value());
7900 : }
7901 :
7902 112 : Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
7903 224 : Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
7904 224 : VARIABLE(result, MachineRepresentation::kTagged);
7905 112 : Label done(this, &result);
7906 :
7907 112 : BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7908 :
7909 : BIND(&if_isreceiver);
7910 : {
7911 : // Convert {input} to a primitive first passing Number hint.
7912 112 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
7913 224 : result.Bind(CallStub(callable, context, input));
7914 112 : Goto(&done);
7915 : }
7916 :
7917 : BIND(&if_isnotreceiver);
7918 : {
7919 112 : result.Bind(input);
7920 112 : Goto(&done);
7921 : }
7922 :
7923 : BIND(&done);
7924 224 : return result.value();
7925 : }
7926 :
7927 224 : TNode<JSReceiver> CodeStubAssembler::ToObject(SloppyTNode<Context> context,
7928 : SloppyTNode<Object> input) {
7929 1904 : return CAST(CallBuiltin(Builtins::kToObject, context, input));
7930 : }
7931 :
7932 1680 : TNode<JSReceiver> CodeStubAssembler::ToObject_Inline(TNode<Context> context,
7933 : TNode<Object> input) {
7934 1680 : TVARIABLE(JSReceiver, result);
7935 1680 : Label if_isreceiver(this), if_isnotreceiver(this, Label::kDeferred);
7936 1680 : Label done(this);
7937 :
7938 1680 : BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7939 :
7940 : BIND(&if_isreceiver);
7941 : {
7942 : result = CAST(input);
7943 1680 : Goto(&done);
7944 : }
7945 :
7946 : BIND(&if_isnotreceiver);
7947 : {
7948 : result = ToObject(context, input);
7949 1680 : Goto(&done);
7950 : }
7951 :
7952 : BIND(&done);
7953 1680 : return result.value();
7954 : }
7955 :
7956 560 : TNode<Smi> CodeStubAssembler::ToSmiIndex(TNode<Context> context,
7957 : TNode<Object> input,
7958 : Label* range_error) {
7959 560 : TVARIABLE(Smi, result);
7960 560 : Label check_undefined(this), return_zero(this), defined(this),
7961 560 : negative_check(this), done(this);
7962 :
7963 1120 : GotoIfNot(TaggedIsSmi(input), &check_undefined);
7964 : result = CAST(input);
7965 560 : Goto(&negative_check);
7966 :
7967 : BIND(&check_undefined);
7968 1120 : Branch(IsUndefined(input), &return_zero, &defined);
7969 :
7970 : BIND(&defined);
7971 : TNode<Number> integer_input =
7972 560 : CAST(CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
7973 1120 : GotoIfNot(TaggedIsSmi(integer_input), range_error);
7974 : result = CAST(integer_input);
7975 560 : Goto(&negative_check);
7976 :
7977 : BIND(&negative_check);
7978 1680 : Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
7979 :
7980 : BIND(&return_zero);
7981 560 : result = SmiConstant(0);
7982 560 : Goto(&done);
7983 :
7984 : BIND(&done);
7985 560 : return result.value();
7986 : }
7987 :
7988 168 : TNode<Smi> CodeStubAssembler::ToSmiLength(TNode<Context> context,
7989 : TNode<Object> input,
7990 : Label* range_error) {
7991 168 : TVARIABLE(Smi, result);
7992 168 : Label to_integer(this), negative_check(this),
7993 168 : heap_number_negative_check(this), return_zero(this), done(this);
7994 :
7995 336 : GotoIfNot(TaggedIsSmi(input), &to_integer);
7996 : result = CAST(input);
7997 168 : Goto(&negative_check);
7998 :
7999 : BIND(&to_integer);
8000 : {
8001 168 : TNode<Number> integer_input = CAST(
8002 : CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
8003 336 : GotoIfNot(TaggedIsSmi(integer_input), &heap_number_negative_check);
8004 : result = CAST(integer_input);
8005 168 : Goto(&negative_check);
8006 :
8007 : // integer_input can still be a negative HeapNumber here.
8008 : BIND(&heap_number_negative_check);
8009 168 : TNode<HeapNumber> heap_number_input = CAST(integer_input);
8010 672 : Branch(IsTrue(CallBuiltin(Builtins::kLessThan, context, heap_number_input,
8011 504 : SmiConstant(0))),
8012 168 : &return_zero, range_error);
8013 : }
8014 :
8015 : BIND(&negative_check);
8016 504 : Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
8017 :
8018 : BIND(&return_zero);
8019 168 : result = SmiConstant(0);
8020 168 : Goto(&done);
8021 :
8022 : BIND(&done);
8023 168 : return result.value();
8024 : }
8025 :
8026 1736 : TNode<Number> CodeStubAssembler::ToLength_Inline(SloppyTNode<Context> context,
8027 : SloppyTNode<Object> input) {
8028 1736 : TNode<Smi> smi_zero = SmiConstant(0);
8029 : return Select<Number>(
8030 5208 : TaggedIsSmi(input), [=] { return SmiMax(CAST(input), smi_zero); },
8031 6944 : [=] { return CAST(CallBuiltin(Builtins::kToLength, context, input)); });
8032 : }
8033 :
8034 3192 : TNode<Number> CodeStubAssembler::ToInteger_Inline(
8035 : SloppyTNode<Context> context, SloppyTNode<Object> input,
8036 : ToIntegerTruncationMode mode) {
8037 : Builtins::Name builtin = (mode == kNoTruncation)
8038 : ? Builtins::kToInteger
8039 3192 : : Builtins::kToInteger_TruncateMinusZero;
8040 : return Select<Number>(
8041 6384 : TaggedIsSmi(input), [=] { return CAST(input); },
8042 15960 : [=] { return CAST(CallBuiltin(builtin, context, input)); });
8043 : }
8044 :
8045 112 : TNode<Number> CodeStubAssembler::ToInteger(SloppyTNode<Context> context,
8046 : SloppyTNode<Object> input,
8047 : ToIntegerTruncationMode mode) {
8048 : // We might need to loop once for ToNumber conversion.
8049 112 : TVARIABLE(Object, var_arg, input);
8050 112 : Label loop(this, &var_arg), out(this);
8051 112 : Goto(&loop);
8052 : BIND(&loop);
8053 : {
8054 : // Shared entry points.
8055 112 : Label return_zero(this, Label::kDeferred);
8056 :
8057 : // Load the current {arg} value.
8058 : TNode<Object> arg = var_arg.value();
8059 :
8060 : // Check if {arg} is a Smi.
8061 224 : GotoIf(TaggedIsSmi(arg), &out);
8062 :
8063 : // Check if {arg} is a HeapNumber.
8064 112 : Label if_argisheapnumber(this),
8065 112 : if_argisnotheapnumber(this, Label::kDeferred);
8066 224 : Branch(IsHeapNumber(CAST(arg)), &if_argisheapnumber,
8067 112 : &if_argisnotheapnumber);
8068 :
8069 : BIND(&if_argisheapnumber);
8070 : {
8071 : TNode<HeapNumber> arg_hn = CAST(arg);
8072 : // Load the floating-point value of {arg}.
8073 : Node* arg_value = LoadHeapNumberValue(arg_hn);
8074 :
8075 : // Check if {arg} is NaN.
8076 224 : GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
8077 :
8078 : // Truncate {arg} towards zero.
8079 112 : TNode<Float64T> value = Float64Trunc(arg_value);
8080 :
8081 112 : if (mode == kTruncateMinusZero) {
8082 : // Truncate -0.0 to 0.
8083 168 : GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
8084 : }
8085 :
8086 224 : var_arg = ChangeFloat64ToTagged(value);
8087 112 : Goto(&out);
8088 : }
8089 :
8090 : BIND(&if_argisnotheapnumber);
8091 : {
8092 : // Need to convert {arg} to a Number first.
8093 224 : var_arg = UncheckedCast<Object>(
8094 : CallBuiltin(Builtins::kNonNumberToNumber, context, arg));
8095 112 : Goto(&loop);
8096 : }
8097 :
8098 : BIND(&return_zero);
8099 224 : var_arg = SmiConstant(0);
8100 112 : Goto(&out);
8101 : }
8102 :
8103 : BIND(&out);
8104 : if (mode == kTruncateMinusZero) {
8105 : CSA_ASSERT(this, IsNumberNormalized(CAST(var_arg.value())));
8106 : }
8107 112 : return CAST(var_arg.value());
8108 : }
8109 :
8110 35212 : TNode<Uint32T> CodeStubAssembler::DecodeWord32(SloppyTNode<Word32T> word32,
8111 : uint32_t shift, uint32_t mask) {
8112 : return UncheckedCast<Uint32T>(Word32Shr(
8113 105636 : Word32And(word32, Int32Constant(mask)), static_cast<int>(shift)));
8114 : }
8115 :
8116 21352 : TNode<UintPtrT> CodeStubAssembler::DecodeWord(SloppyTNode<WordT> word,
8117 : uint32_t shift, uint32_t mask) {
8118 : return Unsigned(
8119 64056 : WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift)));
8120 : }
8121 :
8122 392 : TNode<WordT> CodeStubAssembler::UpdateWord(TNode<WordT> word,
8123 : TNode<WordT> value, uint32_t shift,
8124 : uint32_t mask) {
8125 784 : TNode<WordT> encoded_value = WordShl(value, static_cast<int>(shift));
8126 392 : TNode<IntPtrT> inverted_mask = IntPtrConstant(~static_cast<intptr_t>(mask));
8127 : // Ensure the {value} fits fully in the mask.
8128 : CSA_ASSERT(this, WordEqual(WordAnd(encoded_value, inverted_mask),
8129 : IntPtrConstant(0)));
8130 784 : return WordOr(WordAnd(word, inverted_mask), encoded_value);
8131 : }
8132 :
8133 0 : void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
8134 0 : if (FLAG_native_code_counters && counter->Enabled()) {
8135 : Node* counter_address =
8136 0 : ExternalConstant(ExternalReference::Create(counter));
8137 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
8138 0 : Int32Constant(value));
8139 : }
8140 0 : }
8141 :
8142 3480 : void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
8143 : DCHECK_GT(delta, 0);
8144 3480 : if (FLAG_native_code_counters && counter->Enabled()) {
8145 : Node* counter_address =
8146 0 : ExternalConstant(ExternalReference::Create(counter));
8147 : // This operation has to be exactly 32-bit wide in case the external
8148 : // reference table redirects the counter to a uint32_t dummy_stats_counter_
8149 : // field.
8150 0 : Node* value = Load(MachineType::Int32(), counter_address);
8151 0 : value = Int32Add(value, Int32Constant(delta));
8152 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8153 : }
8154 3480 : }
8155 :
8156 0 : void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
8157 : DCHECK_GT(delta, 0);
8158 0 : if (FLAG_native_code_counters && counter->Enabled()) {
8159 : Node* counter_address =
8160 0 : ExternalConstant(ExternalReference::Create(counter));
8161 : // This operation has to be exactly 32-bit wide in case the external
8162 : // reference table redirects the counter to a uint32_t dummy_stats_counter_
8163 : // field.
8164 0 : Node* value = Load(MachineType::Int32(), counter_address);
8165 0 : value = Int32Sub(value, Int32Constant(delta));
8166 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8167 : }
8168 0 : }
8169 :
8170 42504 : void CodeStubAssembler::Increment(Variable* variable, int value,
8171 : ParameterMode mode) {
8172 : DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
8173 : variable->rep() == MachineType::PointerRepresentation());
8174 : DCHECK_IMPLIES(mode == SMI_PARAMETERS,
8175 : variable->rep() == MachineRepresentation::kTagged ||
8176 : variable->rep() == MachineRepresentation::kTaggedSigned);
8177 42504 : variable->Bind(IntPtrOrSmiAdd(variable->value(),
8178 42504 : IntPtrOrSmiConstant(value, mode), mode));
8179 42504 : }
8180 :
8181 56 : void CodeStubAssembler::Use(Label* label) {
8182 224 : GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
8183 56 : }
8184 :
8185 1460 : void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
8186 : Variable* var_index, Label* if_keyisunique,
8187 : Variable* var_unique, Label* if_bailout,
8188 : Label* if_notinternalized) {
8189 : DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
8190 : DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
8191 1460 : Comment("TryToName");
8192 :
8193 1460 : Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this),
8194 1460 : if_keyisother(this, Label::kDeferred);
8195 : // Handle Smi and HeapNumber keys.
8196 2920 : var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
8197 1460 : Goto(if_keyisindex);
8198 :
8199 : BIND(&if_keyisnotindex);
8200 : Node* key_map = LoadMap(key);
8201 1460 : var_unique->Bind(key);
8202 : // Symbols are unique.
8203 2920 : GotoIf(IsSymbolMap(key_map), if_keyisunique);
8204 : Node* key_instance_type = LoadMapInstanceType(key_map);
8205 : // Miss if |key| is not a String.
8206 : STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
8207 2920 : GotoIfNot(IsStringInstanceType(key_instance_type), &if_keyisother);
8208 :
8209 : // |key| is a String. Check if it has a cached array index.
8210 : Node* hash = LoadNameHashField(key);
8211 2920 : GotoIf(IsClearWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
8212 1460 : &if_hascachedindex);
8213 : // No cached array index. If the string knows that it contains an index,
8214 : // then it must be an uncacheable index. Handle this case in the runtime.
8215 2920 : GotoIf(IsClearWord32(hash, Name::kIsNotArrayIndexMask), if_bailout);
8216 : // Check if we have a ThinString.
8217 2920 : GotoIf(InstanceTypeEqual(key_instance_type, THIN_STRING_TYPE),
8218 1460 : &if_thinstring);
8219 2920 : GotoIf(InstanceTypeEqual(key_instance_type, THIN_ONE_BYTE_STRING_TYPE),
8220 1460 : &if_thinstring);
8221 : // Finally, check if |key| is internalized.
8222 : STATIC_ASSERT(kNotInternalizedTag != 0);
8223 4380 : GotoIf(IsSetWord32(key_instance_type, kIsNotInternalizedMask),
8224 1460 : if_notinternalized != nullptr ? if_notinternalized : if_bailout);
8225 1460 : Goto(if_keyisunique);
8226 :
8227 : BIND(&if_thinstring);
8228 1460 : var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
8229 1460 : Goto(if_keyisunique);
8230 :
8231 : BIND(&if_hascachedindex);
8232 2920 : var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
8233 1460 : Goto(if_keyisindex);
8234 :
8235 : BIND(&if_keyisother);
8236 2920 : GotoIfNot(InstanceTypeEqual(key_instance_type, ODDBALL_TYPE), if_bailout);
8237 1460 : var_unique->Bind(LoadObjectField(key, Oddball::kToStringOffset));
8238 1460 : Goto(if_keyisunique);
8239 1460 : }
8240 :
8241 392 : void CodeStubAssembler::TryInternalizeString(
8242 : Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
8243 : Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
8244 : DCHECK(var_index->rep() == MachineType::PointerRepresentation());
8245 : DCHECK_EQ(var_internalized->rep(), MachineRepresentation::kTagged);
8246 : CSA_SLOW_ASSERT(this, IsString(string));
8247 : Node* function =
8248 784 : ExternalConstant(ExternalReference::try_internalize_string_function());
8249 : Node* const isolate_ptr =
8250 784 : ExternalConstant(ExternalReference::isolate_address(isolate()));
8251 : Node* result =
8252 : CallCFunction(function, MachineType::AnyTagged(),
8253 : std::make_pair(MachineType::Pointer(), isolate_ptr),
8254 392 : std::make_pair(MachineType::AnyTagged(), string));
8255 392 : Label internalized(this);
8256 784 : GotoIf(TaggedIsNotSmi(result), &internalized);
8257 784 : Node* word_result = SmiUntag(result);
8258 1176 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
8259 392 : if_not_internalized);
8260 1176 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
8261 392 : if_bailout);
8262 392 : var_index->Bind(word_result);
8263 392 : Goto(if_index);
8264 :
8265 : BIND(&internalized);
8266 392 : var_internalized->Bind(result);
8267 392 : Goto(if_internalized);
8268 392 : }
8269 :
8270 : template <typename Dictionary>
8271 9340 : TNode<IntPtrT> CodeStubAssembler::EntryToIndex(TNode<IntPtrT> entry,
8272 : int field_index) {
8273 : TNode<IntPtrT> entry_index =
8274 9340 : IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
8275 : return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
8276 18680 : field_index));
8277 : }
8278 :
8279 0 : TNode<MaybeObject> CodeStubAssembler::LoadDescriptorArrayElement(
8280 : TNode<DescriptorArray> object, Node* index, int additional_offset) {
8281 : return LoadArrayElement(object, DescriptorArray::kHeaderSize, index,
8282 8180 : additional_offset);
8283 : }
8284 :
8285 392 : TNode<Name> CodeStubAssembler::LoadKeyByKeyIndex(
8286 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8287 392 : return CAST(LoadDescriptorArrayElement(container, key_index, 0));
8288 : }
8289 :
8290 952 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
8291 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8292 : const int kKeyToDetails =
8293 : DescriptorArray::ToDetailsIndex(0) - DescriptorArray::ToKeyIndex(0);
8294 : return Unsigned(
8295 4264 : LoadAndUntagToWord32ArrayElement(container, DescriptorArray::kHeaderSize,
8296 952 : key_index, kKeyToDetails * kTaggedSize));
8297 : }
8298 :
8299 2020 : TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
8300 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8301 : const int kKeyToValue =
8302 : DescriptorArray::ToValueIndex(0) - DescriptorArray::ToKeyIndex(0);
8303 2020 : return CAST(LoadDescriptorArrayElement(container, key_index,
8304 : kKeyToValue * kTaggedSize));
8305 : }
8306 :
8307 728 : TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByKeyIndex(
8308 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8309 : const int kKeyToValue =
8310 : DescriptorArray::ToValueIndex(0) - DescriptorArray::ToKeyIndex(0);
8311 : return LoadDescriptorArrayElement(container, key_index,
8312 728 : kKeyToValue * kTaggedSize);
8313 : }
8314 :
8315 4928 : TNode<IntPtrT> CodeStubAssembler::DescriptorEntryToIndex(
8316 : TNode<IntPtrT> descriptor_entry) {
8317 : return IntPtrMul(descriptor_entry,
8318 9856 : IntPtrConstant(DescriptorArray::kEntrySize));
8319 : }
8320 :
8321 112 : TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8322 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8323 224 : return CAST(LoadDescriptorArrayElement(
8324 : container, DescriptorEntryToIndex(descriptor_entry),
8325 : DescriptorArray::ToKeyIndex(0) * kTaggedSize));
8326 : }
8327 :
8328 112 : TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8329 : TNode<DescriptorArray> container, int descriptor_entry) {
8330 224 : return CAST(LoadDescriptorArrayElement(
8331 : container, IntPtrConstant(0),
8332 : DescriptorArray::ToKeyIndex(descriptor_entry) * kTaggedSize));
8333 : }
8334 :
8335 112 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8336 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8337 224 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8338 : container, DescriptorArray::kHeaderSize,
8339 224 : DescriptorEntryToIndex(descriptor_entry),
8340 112 : DescriptorArray::ToDetailsIndex(0) * kTaggedSize));
8341 : }
8342 :
8343 672 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8344 : TNode<DescriptorArray> container, int descriptor_entry) {
8345 1344 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8346 1344 : container, DescriptorArray::kHeaderSize, IntPtrConstant(0),
8347 1344 : DescriptorArray::ToDetailsIndex(descriptor_entry) * kTaggedSize));
8348 : }
8349 :
8350 112 : TNode<Object> CodeStubAssembler::LoadValueByDescriptorEntry(
8351 : TNode<DescriptorArray> container, int descriptor_entry) {
8352 224 : return CAST(LoadDescriptorArrayElement(
8353 : container, IntPtrConstant(0),
8354 : DescriptorArray::ToValueIndex(descriptor_entry) * kTaggedSize));
8355 : }
8356 :
8357 4704 : TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByDescriptorEntry(
8358 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8359 : return LoadDescriptorArrayElement(
8360 9408 : container, DescriptorEntryToIndex(descriptor_entry),
8361 4704 : DescriptorArray::ToValueIndex(0) * kTaggedSize);
8362 : }
8363 :
8364 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NameDictionary>(
8365 : TNode<IntPtrT>, int);
8366 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<GlobalDictionary>(
8367 : TNode<IntPtrT>, int);
8368 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NumberDictionary>(
8369 : TNode<IntPtrT>, int);
8370 :
8371 : // This must be kept in sync with HashTableBase::ComputeCapacity().
8372 956 : TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
8373 : TNode<IntPtrT> at_least_space_for) {
8374 : TNode<IntPtrT> capacity = IntPtrRoundUpToPowerOfTwo32(
8375 1912 : IntPtrAdd(at_least_space_for, WordShr(at_least_space_for, 1)));
8376 1912 : return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
8377 : }
8378 :
8379 1685 : TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
8380 : SloppyTNode<IntPtrT> right) {
8381 : intptr_t left_constant;
8382 : intptr_t right_constant;
8383 2302 : if (ToIntPtrConstant(left, left_constant) &&
8384 617 : ToIntPtrConstant(right, right_constant)) {
8385 617 : return IntPtrConstant(std::max(left_constant, right_constant));
8386 : }
8387 : return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
8388 2136 : right);
8389 : }
8390 :
8391 1121 : TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
8392 : SloppyTNode<IntPtrT> right) {
8393 : intptr_t left_constant;
8394 : intptr_t right_constant;
8395 1122 : if (ToIntPtrConstant(left, left_constant) &&
8396 1 : ToIntPtrConstant(right, right_constant)) {
8397 1 : return IntPtrConstant(std::min(left_constant, right_constant));
8398 : }
8399 : return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
8400 2240 : right);
8401 : }
8402 :
8403 : template <>
8404 0 : TNode<HeapObject> CodeStubAssembler::LoadName<NameDictionary>(
8405 : TNode<HeapObject> key) {
8406 : CSA_ASSERT(this, Word32Or(IsTheHole(key), IsName(key)));
8407 0 : return key;
8408 : }
8409 :
8410 : template <>
8411 0 : TNode<HeapObject> CodeStubAssembler::LoadName<GlobalDictionary>(
8412 : TNode<HeapObject> key) {
8413 : TNode<PropertyCell> property_cell = CAST(key);
8414 0 : return CAST(LoadObjectField(property_cell, PropertyCell::kNameOffset));
8415 : }
8416 :
8417 : template <typename Dictionary>
8418 6744 : void CodeStubAssembler::NameDictionaryLookup(
8419 : TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
8420 : TVariable<IntPtrT>* var_name_index, Label* if_not_found, LookupMode mode) {
8421 : static_assert(std::is_same<Dictionary, NameDictionary>::value ||
8422 : std::is_same<Dictionary, GlobalDictionary>::value,
8423 : "Unexpected NameDictionary");
8424 : DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
8425 : DCHECK_IMPLIES(mode == kFindInsertionIndex, if_found == nullptr);
8426 6744 : Comment("NameDictionaryLookup");
8427 : CSA_ASSERT(this, IsUniqueName(unique_name));
8428 :
8429 6744 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
8430 6744 : TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8431 20232 : TNode<WordT> hash = ChangeUint32ToWord(LoadNameHash(unique_name));
8432 :
8433 : // See Dictionary::FirstProbe().
8434 6744 : TNode<IntPtrT> count = IntPtrConstant(0);
8435 6744 : TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
8436 : Node* undefined = UndefinedConstant();
8437 :
8438 : // Appease the variable merging algorithm for "Goto(&loop)" below.
8439 6744 : *var_name_index = IntPtrConstant(0);
8440 :
8441 : TVARIABLE(IntPtrT, var_count, count);
8442 : TVARIABLE(IntPtrT, var_entry, entry);
8443 6744 : Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
8444 13488 : Label loop(this, arraysize(loop_vars), loop_vars);
8445 6744 : Goto(&loop);
8446 : BIND(&loop);
8447 : {
8448 : TNode<IntPtrT> entry = var_entry.value();
8449 :
8450 : TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
8451 : *var_name_index = index;
8452 :
8453 : TNode<HeapObject> current =
8454 : CAST(UnsafeLoadFixedArrayElement(dictionary, index));
8455 6744 : GotoIf(WordEqual(current, undefined), if_not_found);
8456 6744 : if (mode == kFindExisting) {
8457 : current = LoadName<Dictionary>(current);
8458 5736 : GotoIf(WordEqual(current, unique_name), if_found);
8459 : } else {
8460 : DCHECK_EQ(kFindInsertionIndex, mode);
8461 1008 : GotoIf(WordEqual(current, TheHoleConstant()), if_not_found);
8462 : }
8463 :
8464 : // See Dictionary::NextProbe().
8465 6744 : Increment(&var_count);
8466 6744 : entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8467 :
8468 : var_entry = entry;
8469 6744 : Goto(&loop);
8470 : }
8471 6744 : }
8472 :
8473 : // Instantiate template methods to workaround GCC compilation issue.
8474 : template V8_EXPORT_PRIVATE void
8475 : CodeStubAssembler::NameDictionaryLookup<NameDictionary>(TNode<NameDictionary>,
8476 : TNode<Name>, Label*,
8477 : TVariable<IntPtrT>*,
8478 : Label*, LookupMode);
8479 : template V8_EXPORT_PRIVATE void CodeStubAssembler::NameDictionaryLookup<
8480 : GlobalDictionary>(TNode<GlobalDictionary>, TNode<Name>, Label*,
8481 : TVariable<IntPtrT>*, Label*, LookupMode);
8482 :
8483 336 : Node* CodeStubAssembler::ComputeUnseededHash(Node* key) {
8484 : // See v8::internal::ComputeUnseededHash()
8485 672 : Node* hash = TruncateIntPtrToInt32(key);
8486 1680 : hash = Int32Add(Word32Xor(hash, Int32Constant(0xFFFFFFFF)),
8487 1344 : Word32Shl(hash, Int32Constant(15)));
8488 1344 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
8489 1344 : hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
8490 1344 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
8491 1008 : hash = Int32Mul(hash, Int32Constant(2057));
8492 1344 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
8493 1008 : return Word32And(hash, Int32Constant(0x3FFFFFFF));
8494 : }
8495 :
8496 1132 : Node* CodeStubAssembler::ComputeSeededHash(Node* key) {
8497 : Node* const function_addr =
8498 2264 : ExternalConstant(ExternalReference::compute_integer_hash());
8499 : Node* const isolate_ptr =
8500 2264 : ExternalConstant(ExternalReference::isolate_address(isolate()));
8501 :
8502 : MachineType type_ptr = MachineType::Pointer();
8503 : MachineType type_uint32 = MachineType::Uint32();
8504 :
8505 : Node* const result = CallCFunction(
8506 : function_addr, type_uint32, std::make_pair(type_ptr, isolate_ptr),
8507 2264 : std::make_pair(type_uint32, TruncateIntPtrToInt32(key)));
8508 1132 : return result;
8509 : }
8510 :
8511 1128 : void CodeStubAssembler::NumberDictionaryLookup(
8512 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8513 : Label* if_found, TVariable<IntPtrT>* var_entry, Label* if_not_found) {
8514 : CSA_ASSERT(this, IsNumberDictionary(dictionary));
8515 : DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
8516 1128 : Comment("NumberDictionaryLookup");
8517 :
8518 1128 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NumberDictionary>(dictionary));
8519 1128 : TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8520 :
8521 3384 : TNode<WordT> hash = ChangeUint32ToWord(ComputeSeededHash(intptr_index));
8522 1128 : Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
8523 :
8524 : // See Dictionary::FirstProbe().
8525 1128 : TNode<IntPtrT> count = IntPtrConstant(0);
8526 1128 : TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
8527 :
8528 : Node* undefined = UndefinedConstant();
8529 : Node* the_hole = TheHoleConstant();
8530 :
8531 : TVARIABLE(IntPtrT, var_count, count);
8532 1128 : Variable* loop_vars[] = {&var_count, var_entry};
8533 2256 : Label loop(this, 2, loop_vars);
8534 : *var_entry = entry;
8535 1128 : Goto(&loop);
8536 : BIND(&loop);
8537 : {
8538 : TNode<IntPtrT> entry = var_entry->value();
8539 :
8540 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(entry);
8541 : Node* current = UnsafeLoadFixedArrayElement(dictionary, index);
8542 2256 : GotoIf(WordEqual(current, undefined), if_not_found);
8543 1128 : Label next_probe(this);
8544 : {
8545 1128 : Label if_currentissmi(this), if_currentisnotsmi(this);
8546 2256 : Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
8547 : BIND(&if_currentissmi);
8548 : {
8549 2256 : Node* current_value = SmiUntag(current);
8550 2256 : Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
8551 : }
8552 : BIND(&if_currentisnotsmi);
8553 : {
8554 2256 : GotoIf(WordEqual(current, the_hole), &next_probe);
8555 : // Current must be the Number.
8556 : Node* current_value = LoadHeapNumberValue(current);
8557 2256 : Branch(Float64Equal(current_value, key_as_float64), if_found,
8558 1128 : &next_probe);
8559 : }
8560 : }
8561 :
8562 : BIND(&next_probe);
8563 : // See Dictionary::NextProbe().
8564 1128 : Increment(&var_count);
8565 1128 : entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8566 :
8567 : *var_entry = entry;
8568 1128 : Goto(&loop);
8569 : }
8570 1128 : }
8571 :
8572 336 : TNode<Object> CodeStubAssembler::BasicLoadNumberDictionaryElement(
8573 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8574 : Label* not_data, Label* if_hole) {
8575 336 : TVARIABLE(IntPtrT, var_entry);
8576 336 : Label if_found(this);
8577 : NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8578 336 : if_hole);
8579 : BIND(&if_found);
8580 :
8581 : // Check that the value is a data property.
8582 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8583 : TNode<Uint32T> details =
8584 : LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
8585 : TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8586 : // TODO(jkummerow): Support accessors without missing?
8587 1008 : GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
8588 : // Finally, load the value.
8589 336 : return LoadValueByKeyIndex<NumberDictionary>(dictionary, index);
8590 : }
8591 :
8592 56 : void CodeStubAssembler::BasicStoreNumberDictionaryElement(
8593 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8594 : TNode<Object> value, Label* not_data, Label* if_hole, Label* read_only) {
8595 56 : TVARIABLE(IntPtrT, var_entry);
8596 56 : Label if_found(this);
8597 : NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8598 56 : if_hole);
8599 : BIND(&if_found);
8600 :
8601 : // Check that the value is a data property.
8602 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8603 : TNode<Uint32T> details =
8604 : LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
8605 : TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8606 : // TODO(jkummerow): Support accessors without missing?
8607 168 : GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
8608 :
8609 : // Check that the property is writeable.
8610 112 : GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
8611 56 : read_only);
8612 :
8613 : // Finally, store the value.
8614 : StoreValueByKeyIndex<NumberDictionary>(dictionary, index, value);
8615 56 : }
8616 :
8617 : template <class Dictionary>
8618 : void CodeStubAssembler::FindInsertionEntry(TNode<Dictionary> dictionary,
8619 : TNode<Name> key,
8620 : TVariable<IntPtrT>* var_key_index) {
8621 : UNREACHABLE();
8622 : }
8623 :
8624 : template <>
8625 1008 : void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
8626 : TNode<NameDictionary> dictionary, TNode<Name> key,
8627 : TVariable<IntPtrT>* var_key_index) {
8628 2016 : Label done(this);
8629 : NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
8630 1008 : &done, kFindInsertionIndex);
8631 : BIND(&done);
8632 1008 : }
8633 :
8634 : template <class Dictionary>
8635 : void CodeStubAssembler::InsertEntry(TNode<Dictionary> dictionary,
8636 : TNode<Name> key, TNode<Object> value,
8637 : TNode<IntPtrT> index,
8638 : TNode<Smi> enum_index) {
8639 : UNREACHABLE(); // Use specializations instead.
8640 : }
8641 :
8642 : template <>
8643 1008 : void CodeStubAssembler::InsertEntry<NameDictionary>(
8644 : TNode<NameDictionary> dictionary, TNode<Name> name, TNode<Object> value,
8645 : TNode<IntPtrT> index, TNode<Smi> enum_index) {
8646 : // Store name and value.
8647 1008 : StoreFixedArrayElement(dictionary, index, name);
8648 : StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
8649 :
8650 : // Prepare details of the new property.
8651 : PropertyDetails d(kData, NONE, PropertyCellType::kNoCell);
8652 1008 : enum_index =
8653 : SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
8654 : // We OR over the actual index below, so we expect the initial value to be 0.
8655 : DCHECK_EQ(0, d.dictionary_index());
8656 1008 : TVARIABLE(Smi, var_details, SmiOr(SmiConstant(d.AsSmi()), enum_index));
8657 :
8658 : // Private names must be marked non-enumerable.
8659 1008 : Label not_private(this, &var_details);
8660 2016 : GotoIfNot(IsPrivateSymbol(name), ¬_private);
8661 : TNode<Smi> dont_enum =
8662 1008 : SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
8663 1008 : var_details = SmiOr(var_details.value(), dont_enum);
8664 1008 : Goto(¬_private);
8665 : BIND(¬_private);
8666 :
8667 : // Finally, store the details.
8668 : StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
8669 : var_details.value());
8670 1008 : }
8671 :
8672 : template <>
8673 0 : void CodeStubAssembler::InsertEntry<GlobalDictionary>(
8674 : TNode<GlobalDictionary> dictionary, TNode<Name> key, TNode<Object> value,
8675 : TNode<IntPtrT> index, TNode<Smi> enum_index) {
8676 0 : UNIMPLEMENTED();
8677 : }
8678 :
8679 : template <class Dictionary>
8680 1008 : void CodeStubAssembler::Add(TNode<Dictionary> dictionary, TNode<Name> key,
8681 : TNode<Object> value, Label* bailout) {
8682 : CSA_ASSERT(this, Word32BinaryNot(IsEmptyPropertyDictionary(dictionary)));
8683 1008 : TNode<Smi> capacity = GetCapacity<Dictionary>(dictionary);
8684 1008 : TNode<Smi> nof = GetNumberOfElements<Dictionary>(dictionary);
8685 1008 : TNode<Smi> new_nof = SmiAdd(nof, SmiConstant(1));
8686 : // Require 33% to still be free after adding additional_elements.
8687 : // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
8688 : // But that's OK here because it's only used for a comparison.
8689 1008 : TNode<Smi> required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
8690 2016 : GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
8691 : // Require rehashing if more than 50% of free elements are deleted elements.
8692 1008 : TNode<Smi> deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
8693 : CSA_ASSERT(this, SmiAbove(capacity, new_nof));
8694 1008 : TNode<Smi> half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
8695 2016 : GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
8696 :
8697 1008 : TNode<Smi> enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
8698 1008 : TNode<Smi> new_enum_index = SmiAdd(enum_index, SmiConstant(1));
8699 : TNode<Smi> max_enum_index =
8700 1008 : SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
8701 2016 : GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
8702 :
8703 : // No more bailouts after this point.
8704 : // Operations from here on can have side effects.
8705 :
8706 : SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
8707 : SetNumberOfElements<Dictionary>(dictionary, new_nof);
8708 :
8709 : TVARIABLE(IntPtrT, var_key_index);
8710 1008 : FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
8711 1008 : InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
8712 : enum_index);
8713 1008 : }
8714 :
8715 : template void CodeStubAssembler::Add<NameDictionary>(TNode<NameDictionary>,
8716 : TNode<Name>, TNode<Object>,
8717 : Label*);
8718 :
8719 : template <typename Array>
8720 2252 : void CodeStubAssembler::LookupLinear(TNode<Name> unique_name,
8721 : TNode<Array> array,
8722 : TNode<Uint32T> number_of_valid_entries,
8723 : Label* if_found,
8724 : TVariable<IntPtrT>* var_name_index,
8725 : Label* if_not_found) {
8726 : static_assert(std::is_base_of<FixedArray, Array>::value ||
8727 : std::is_base_of<WeakFixedArray, Array>::value ||
8728 : std::is_base_of<DescriptorArray, Array>::value,
8729 : "T must be a descendant of FixedArray or a WeakFixedArray");
8730 2252 : Comment("LookupLinear");
8731 : CSA_ASSERT(this, IsUniqueName(unique_name));
8732 2252 : TNode<IntPtrT> first_inclusive = IntPtrConstant(Array::ToKeyIndex(0));
8733 2252 : TNode<IntPtrT> factor = IntPtrConstant(Array::kEntrySize);
8734 4504 : TNode<IntPtrT> last_exclusive = IntPtrAdd(
8735 : first_inclusive,
8736 : IntPtrMul(ChangeInt32ToIntPtr(number_of_valid_entries), factor));
8737 :
8738 6756 : BuildFastLoop(last_exclusive, first_inclusive,
8739 2252 : [=](SloppyTNode<IntPtrT> name_index) {
8740 : TNode<MaybeObject> element =
8741 2252 : LoadArrayElement(array, Array::kHeaderSize, name_index);
8742 6756 : TNode<Name> candidate_name = CAST(element);
8743 2252 : *var_name_index = name_index;
8744 6756 : GotoIf(WordEqual(candidate_name, unique_name), if_found);
8745 2252 : },
8746 : -Array::kEntrySize, INTPTR_PARAMETERS, IndexAdvanceMode::kPre);
8747 2252 : Goto(if_not_found);
8748 2252 : }
8749 :
8750 : template <>
8751 0 : TNode<Uint32T> CodeStubAssembler::NumberOfEntries<DescriptorArray>(
8752 : TNode<DescriptorArray> descriptors) {
8753 0 : return Unsigned(LoadNumberOfDescriptors(descriptors));
8754 : }
8755 :
8756 : template <>
8757 568 : TNode<Uint32T> CodeStubAssembler::NumberOfEntries<TransitionArray>(
8758 : TNode<TransitionArray> transitions) {
8759 : TNode<IntPtrT> length = LoadAndUntagWeakFixedArrayLength(transitions);
8760 : return Select<Uint32T>(
8761 1704 : UintPtrLessThan(length, IntPtrConstant(TransitionArray::kFirstIndex)),
8762 1136 : [=] { return Unsigned(Int32Constant(0)); },
8763 568 : [=] {
8764 1136 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8765 : transitions, WeakFixedArray::kHeaderSize,
8766 1704 : IntPtrConstant(TransitionArray::kTransitionLengthIndex)));
8767 2272 : });
8768 : }
8769 :
8770 : template <typename Array>
8771 12596 : TNode<IntPtrT> CodeStubAssembler::EntryIndexToIndex(
8772 : TNode<Uint32T> entry_index) {
8773 12596 : TNode<Int32T> entry_size = Int32Constant(Array::kEntrySize);
8774 12596 : TNode<Word32T> index = Int32Mul(entry_index, entry_size);
8775 12596 : return ChangeInt32ToIntPtr(index);
8776 : }
8777 :
8778 : template <typename Array>
8779 2588 : TNode<IntPtrT> CodeStubAssembler::ToKeyIndex(TNode<Uint32T> entry_index) {
8780 : return IntPtrAdd(IntPtrConstant(Array::ToKeyIndex(0)),
8781 5176 : EntryIndexToIndex<Array>(entry_index));
8782 : }
8783 :
8784 : template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<DescriptorArray>(
8785 : TNode<Uint32T>);
8786 : template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<TransitionArray>(
8787 : TNode<Uint32T>);
8788 :
8789 : template <>
8790 3936 : TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<DescriptorArray>(
8791 : TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8792 : TNode<Uint32T> details =
8793 3936 : DescriptorArrayGetDetails(descriptors, descriptor_number);
8794 3936 : return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
8795 : }
8796 :
8797 : template <>
8798 0 : TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<TransitionArray>(
8799 : TNode<TransitionArray> transitions, TNode<Uint32T> transition_number) {
8800 0 : return transition_number;
8801 : }
8802 :
8803 : template <typename Array>
8804 4504 : TNode<Name> CodeStubAssembler::GetKey(TNode<Array> array,
8805 : TNode<Uint32T> entry_index) {
8806 : static_assert(std::is_base_of<TransitionArray, Array>::value ||
8807 : std::is_base_of<DescriptorArray, Array>::value,
8808 : "T must be a descendant of DescriptorArray or TransitionArray");
8809 : const int key_offset = Array::ToKeyIndex(0) * kTaggedSize;
8810 : TNode<MaybeObject> element =
8811 : LoadArrayElement(array, Array::kHeaderSize,
8812 9008 : EntryIndexToIndex<Array>(entry_index), key_offset);
8813 4504 : return CAST(element);
8814 : }
8815 :
8816 : template TNode<Name> CodeStubAssembler::GetKey<DescriptorArray>(
8817 : TNode<DescriptorArray>, TNode<Uint32T>);
8818 : template TNode<Name> CodeStubAssembler::GetKey<TransitionArray>(
8819 : TNode<TransitionArray>, TNode<Uint32T>);
8820 :
8821 5504 : TNode<Uint32T> CodeStubAssembler::DescriptorArrayGetDetails(
8822 : TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8823 : const int details_offset = DescriptorArray::ToDetailsIndex(0) * kTaggedSize;
8824 11008 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8825 : descriptors, DescriptorArray::kHeaderSize,
8826 11008 : EntryIndexToIndex<DescriptorArray>(descriptor_number), details_offset));
8827 : }
8828 :
8829 : template <typename Array>
8830 2252 : void CodeStubAssembler::LookupBinary(TNode<Name> unique_name,
8831 : TNode<Array> array,
8832 : TNode<Uint32T> number_of_valid_entries,
8833 : Label* if_found,
8834 : TVariable<IntPtrT>* var_name_index,
8835 : Label* if_not_found) {
8836 2252 : Comment("LookupBinary");
8837 4504 : TVARIABLE(Uint32T, var_low, Unsigned(Int32Constant(0)));
8838 : TNode<Uint32T> limit =
8839 4788 : Unsigned(Int32Sub(NumberOfEntries<Array>(array), Int32Constant(1)));
8840 : TVARIABLE(Uint32T, var_high, limit);
8841 : TNode<Uint32T> hash = LoadNameHashField(unique_name);
8842 : CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
8843 :
8844 : // Assume non-empty array.
8845 : CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
8846 :
8847 6756 : Label binary_loop(this, {&var_high, &var_low});
8848 2252 : Goto(&binary_loop);
8849 : BIND(&binary_loop);
8850 : {
8851 : // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
8852 4504 : TNode<Uint32T> mid = Unsigned(
8853 : Int32Add(var_low.value(),
8854 4504 : Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1)));
8855 : // mid_name = array->GetSortedKey(mid).
8856 2252 : TNode<Uint32T> sorted_key_index = GetSortedKeyIndex<Array>(array, mid);
8857 2252 : TNode<Name> mid_name = GetKey<Array>(array, sorted_key_index);
8858 :
8859 : TNode<Uint32T> mid_hash = LoadNameHashField(mid_name);
8860 :
8861 2252 : Label mid_greater(this), mid_less(this), merge(this);
8862 4504 : Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
8863 : BIND(&mid_greater);
8864 : {
8865 : var_high = mid;
8866 2252 : Goto(&merge);
8867 : }
8868 : BIND(&mid_less);
8869 : {
8870 6756 : var_low = Unsigned(Int32Add(mid, Int32Constant(1)));
8871 2252 : Goto(&merge);
8872 : }
8873 : BIND(&merge);
8874 4504 : GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
8875 : }
8876 :
8877 2252 : Label scan_loop(this, &var_low);
8878 2252 : Goto(&scan_loop);
8879 : BIND(&scan_loop);
8880 : {
8881 4504 : GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
8882 :
8883 : TNode<Uint32T> sort_index =
8884 1968 : GetSortedKeyIndex<Array>(array, var_low.value());
8885 2252 : TNode<Name> current_name = GetKey<Array>(array, sort_index);
8886 : TNode<Uint32T> current_hash = LoadNameHashField(current_name);
8887 4504 : GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
8888 2252 : Label next(this);
8889 2252 : GotoIf(WordNotEqual(current_name, unique_name), &next);
8890 4504 : GotoIf(Uint32GreaterThanOrEqual(sort_index, number_of_valid_entries),
8891 : if_not_found);
8892 2252 : *var_name_index = ToKeyIndex<Array>(sort_index);
8893 2252 : Goto(if_found);
8894 :
8895 : BIND(&next);
8896 6756 : var_low = Unsigned(Int32Add(var_low.value(), Int32Constant(1)));
8897 2252 : Goto(&scan_loop);
8898 : }
8899 2252 : }
8900 :
8901 112 : void CodeStubAssembler::ForEachEnumerableOwnProperty(
8902 : TNode<Context> context, TNode<Map> map, TNode<JSObject> object,
8903 : ForEachEnumerationMode mode, const ForEachKeyValueFunction& body,
8904 : Label* bailout) {
8905 112 : TNode<Int32T> type = LoadMapInstanceType(map);
8906 112 : TNode<Uint32T> bit_field3 = EnsureOnlyHasSimpleProperties(map, type, bailout);
8907 :
8908 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
8909 : TNode<Uint32T> nof_descriptors =
8910 112 : DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bit_field3);
8911 :
8912 112 : TVARIABLE(BoolT, var_stable, Int32TrueConstant());
8913 :
8914 : TVARIABLE(BoolT, var_has_symbol, Int32FalseConstant());
8915 : // false - iterate only string properties, true - iterate only symbol
8916 : // properties
8917 : TVARIABLE(BoolT, var_is_symbol_processing_loop, Int32FalseConstant());
8918 336 : TVARIABLE(IntPtrT, var_start_key_index,
8919 : ToKeyIndex<DescriptorArray>(Unsigned(Int32Constant(0))));
8920 : // Note: var_end_key_index is exclusive for the loop
8921 112 : TVARIABLE(IntPtrT, var_end_key_index,
8922 : ToKeyIndex<DescriptorArray>(nof_descriptors));
8923 : VariableList list(
8924 : {&var_stable, &var_has_symbol, &var_is_symbol_processing_loop,
8925 : &var_start_key_index, &var_end_key_index},
8926 224 : zone());
8927 : Label descriptor_array_loop(
8928 : this, {&var_stable, &var_has_symbol, &var_is_symbol_processing_loop,
8929 336 : &var_start_key_index, &var_end_key_index});
8930 :
8931 112 : Goto(&descriptor_array_loop);
8932 : BIND(&descriptor_array_loop);
8933 :
8934 448 : BuildFastLoop(
8935 : list, var_start_key_index.value(), var_end_key_index.value(),
8936 : [=, &var_stable, &var_has_symbol, &var_is_symbol_processing_loop,
8937 1232 : &var_start_key_index, &var_end_key_index](Node* index) {
8938 : TNode<IntPtrT> descriptor_key_index =
8939 : TNode<IntPtrT>::UncheckedCast(index);
8940 : TNode<Name> next_key =
8941 5544 : LoadKeyByKeyIndex(descriptors, descriptor_key_index);
8942 :
8943 224 : TVARIABLE(Object, var_value, SmiConstant(0));
8944 112 : Label callback(this), next_iteration(this);
8945 :
8946 112 : if (mode == kEnumerationOrder) {
8947 : // |next_key| is either a string or a symbol
8948 : // Skip strings or symbols depending on
8949 : // |var_is_symbol_processing_loop|.
8950 56 : Label if_string(this), if_symbol(this), if_name_ok(this);
8951 112 : Branch(IsSymbol(next_key), &if_symbol, &if_string);
8952 : BIND(&if_symbol);
8953 : {
8954 : // Process symbol property when |var_is_symbol_processing_loop| is
8955 : // true.
8956 56 : GotoIf(var_is_symbol_processing_loop.value(), &if_name_ok);
8957 : // First iteration need to calculate smaller range for processing
8958 : // symbols
8959 56 : Label if_first_symbol(this);
8960 : // var_end_key_index is still inclusive at this point.
8961 : var_end_key_index = descriptor_key_index;
8962 56 : Branch(var_has_symbol.value(), &next_iteration, &if_first_symbol);
8963 : BIND(&if_first_symbol);
8964 : {
8965 : var_start_key_index = descriptor_key_index;
8966 56 : var_has_symbol = Int32TrueConstant();
8967 56 : Goto(&next_iteration);
8968 : }
8969 : }
8970 : BIND(&if_string);
8971 : {
8972 : CSA_ASSERT(this, IsString(next_key));
8973 : // Process string property when |var_is_symbol_processing_loop| is
8974 : // false.
8975 112 : Branch(var_is_symbol_processing_loop.value(), &next_iteration,
8976 56 : &if_name_ok);
8977 : }
8978 : BIND(&if_name_ok);
8979 : }
8980 : {
8981 : TVARIABLE(Map, var_map);
8982 : TVARIABLE(HeapObject, var_meta_storage);
8983 : TVARIABLE(IntPtrT, var_entry);
8984 : TVARIABLE(Uint32T, var_details);
8985 112 : Label if_found(this);
8986 :
8987 112 : Label if_found_fast(this), if_found_dict(this);
8988 :
8989 112 : Label if_stable(this), if_not_stable(this);
8990 112 : Branch(var_stable.value(), &if_stable, &if_not_stable);
8991 : BIND(&if_stable);
8992 : {
8993 : // Directly decode from the descriptor array if |object| did not
8994 : // change shape.
8995 : var_map = map;
8996 : var_meta_storage = descriptors;
8997 : var_entry = Signed(descriptor_key_index);
8998 112 : Goto(&if_found_fast);
8999 : }
9000 : BIND(&if_not_stable);
9001 : {
9002 : // If the map did change, do a slower lookup. We are still
9003 : // guaranteed that the object has a simple shape, and that the key
9004 : // is a name.
9005 : var_map = LoadMap(object);
9006 : TryLookupPropertyInSimpleObject(
9007 : object, var_map.value(), next_key, &if_found_fast,
9008 112 : &if_found_dict, &var_meta_storage, &var_entry, &next_iteration);
9009 : }
9010 :
9011 : BIND(&if_found_fast);
9012 : {
9013 : TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
9014 : TNode<IntPtrT> name_index = var_entry.value();
9015 :
9016 : // Skip non-enumerable properties.
9017 : var_details = LoadDetailsByKeyIndex(descriptors, name_index);
9018 336 : GotoIf(IsSetWord32(var_details.value(),
9019 112 : PropertyDetails::kAttributesDontEnumMask),
9020 112 : &next_iteration);
9021 :
9022 : LoadPropertyFromFastObject(object, var_map.value(), descriptors,
9023 : name_index, var_details.value(),
9024 112 : &var_value);
9025 112 : Goto(&if_found);
9026 : }
9027 : BIND(&if_found_dict);
9028 : {
9029 : TNode<NameDictionary> dictionary = CAST(var_meta_storage.value());
9030 : TNode<IntPtrT> entry = var_entry.value();
9031 :
9032 : TNode<Uint32T> details =
9033 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, entry);
9034 : // Skip non-enumerable properties.
9035 224 : GotoIf(
9036 224 : IsSetWord32(details, PropertyDetails::kAttributesDontEnumMask),
9037 112 : &next_iteration);
9038 :
9039 : var_details = details;
9040 : var_value = LoadValueByKeyIndex<NameDictionary>(dictionary, entry);
9041 112 : Goto(&if_found);
9042 : }
9043 :
9044 : // Here we have details and value which could be an accessor.
9045 : BIND(&if_found);
9046 : {
9047 112 : Label slow_load(this, Label::kDeferred);
9048 :
9049 112 : var_value = CallGetterIfAccessor(var_value.value(),
9050 : var_details.value(), context,
9051 : object, &slow_load, kCallJSGetter);
9052 112 : Goto(&callback);
9053 :
9054 : BIND(&slow_load);
9055 112 : var_value =
9056 : CallRuntime(Runtime::kGetProperty, context, object, next_key);
9057 112 : Goto(&callback);
9058 :
9059 : BIND(&callback);
9060 : body(next_key, var_value.value());
9061 :
9062 : // Check if |object| is still stable, i.e. we can proceed using
9063 : // property details from preloaded |descriptors|.
9064 336 : var_stable =
9065 : Select<BoolT>(var_stable.value(),
9066 448 : [=] { return WordEqual(LoadMap(object), map); },
9067 112 : [=] { return Int32FalseConstant(); });
9068 :
9069 112 : Goto(&next_iteration);
9070 : }
9071 : }
9072 : BIND(&next_iteration);
9073 112 : },
9074 112 : DescriptorArray::kEntrySize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
9075 :
9076 112 : if (mode == kEnumerationOrder) {
9077 56 : Label done(this);
9078 56 : GotoIf(var_is_symbol_processing_loop.value(), &done);
9079 56 : GotoIfNot(var_has_symbol.value(), &done);
9080 : // All string properties are processed, now process symbol properties.
9081 : var_is_symbol_processing_loop = Int32TrueConstant();
9082 : // Add DescriptorArray::kEntrySize to make the var_end_key_index exclusive
9083 : // as BuildFastLoop() expects.
9084 : Increment(&var_end_key_index, DescriptorArray::kEntrySize,
9085 56 : INTPTR_PARAMETERS);
9086 56 : Goto(&descriptor_array_loop);
9087 :
9088 : BIND(&done);
9089 : }
9090 112 : }
9091 :
9092 1968 : void CodeStubAssembler::DescriptorLookup(
9093 : SloppyTNode<Name> unique_name, SloppyTNode<DescriptorArray> descriptors,
9094 : SloppyTNode<Uint32T> bitfield3, Label* if_found,
9095 : TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
9096 1968 : Comment("DescriptorArrayLookup");
9097 1968 : TNode<Uint32T> nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
9098 : Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
9099 1968 : var_name_index, if_not_found);
9100 1968 : }
9101 :
9102 284 : void CodeStubAssembler::TransitionLookup(
9103 : SloppyTNode<Name> unique_name, SloppyTNode<TransitionArray> transitions,
9104 : Label* if_found, TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
9105 284 : Comment("TransitionArrayLookup");
9106 : TNode<Uint32T> number_of_valid_transitions =
9107 284 : NumberOfEntries<TransitionArray>(transitions);
9108 : Lookup<TransitionArray>(unique_name, transitions, number_of_valid_transitions,
9109 284 : if_found, var_name_index, if_not_found);
9110 284 : }
9111 :
9112 : template <typename Array>
9113 2252 : void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
9114 : TNode<Uint32T> number_of_valid_entries,
9115 : Label* if_found,
9116 : TVariable<IntPtrT>* var_name_index,
9117 : Label* if_not_found) {
9118 2252 : Comment("ArrayLookup");
9119 2252 : if (!number_of_valid_entries) {
9120 0 : number_of_valid_entries = NumberOfEntries(array);
9121 : }
9122 6756 : GotoIf(Word32Equal(number_of_valid_entries, Int32Constant(0)), if_not_found);
9123 2252 : Label linear_search(this), binary_search(this);
9124 : const int kMaxElementsForLinearSearch = 32;
9125 6756 : Branch(Uint32LessThanOrEqual(number_of_valid_entries,
9126 : Int32Constant(kMaxElementsForLinearSearch)),
9127 : &linear_search, &binary_search);
9128 : BIND(&linear_search);
9129 : {
9130 2252 : LookupLinear<Array>(unique_name, array, number_of_valid_entries, if_found,
9131 : var_name_index, if_not_found);
9132 : }
9133 : BIND(&binary_search);
9134 : {
9135 2252 : LookupBinary<Array>(unique_name, array, number_of_valid_entries, if_found,
9136 : var_name_index, if_not_found);
9137 : }
9138 2252 : }
9139 :
9140 56 : TNode<BoolT> CodeStubAssembler::IsSimpleObjectMap(TNode<Map> map) {
9141 : uint32_t mask =
9142 : Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
9143 : // !IsSpecialReceiverType && !IsNamedInterceptor && !IsAccessCheckNeeded
9144 : return Select<BoolT>(
9145 112 : IsSpecialReceiverInstanceType(LoadMapInstanceType(map)),
9146 56 : [=] { return Int32FalseConstant(); },
9147 336 : [=] { return IsClearWord32(LoadMapBitField(map), mask); });
9148 : }
9149 :
9150 1520 : void CodeStubAssembler::TryLookupPropertyInSimpleObject(
9151 : TNode<JSObject> object, TNode<Map> map, TNode<Name> unique_name,
9152 : Label* if_found_fast, Label* if_found_dict,
9153 : TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9154 : Label* if_not_found) {
9155 : CSA_ASSERT(this, IsSimpleObjectMap(map));
9156 : CSA_ASSERT(this, IsUniqueNameNoIndex(unique_name));
9157 :
9158 : TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
9159 3040 : Label if_isfastmap(this), if_isslowmap(this);
9160 1520 : Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
9161 1520 : &if_isfastmap);
9162 : BIND(&if_isfastmap);
9163 : {
9164 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
9165 : *var_meta_storage = descriptors;
9166 :
9167 1520 : DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
9168 1520 : var_name_index, if_not_found);
9169 : }
9170 : BIND(&if_isslowmap);
9171 : {
9172 1520 : TNode<NameDictionary> dictionary = CAST(LoadSlowProperties(object));
9173 : *var_meta_storage = dictionary;
9174 :
9175 : NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
9176 1520 : var_name_index, if_not_found);
9177 : }
9178 1520 : }
9179 :
9180 1408 : void CodeStubAssembler::TryLookupProperty(
9181 : SloppyTNode<JSObject> object, SloppyTNode<Map> map,
9182 : SloppyTNode<Int32T> instance_type, SloppyTNode<Name> unique_name,
9183 : Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
9184 : TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9185 : Label* if_not_found, Label* if_bailout) {
9186 2816 : Label if_objectisspecial(this);
9187 2816 : GotoIf(IsSpecialReceiverInstanceType(instance_type), &if_objectisspecial);
9188 :
9189 : TryLookupPropertyInSimpleObject(object, map, unique_name, if_found_fast,
9190 : if_found_dict, var_meta_storage,
9191 1408 : var_name_index, if_not_found);
9192 :
9193 : BIND(&if_objectisspecial);
9194 : {
9195 : // Handle global object here and bailout for other special objects.
9196 2816 : GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
9197 1408 : if_bailout);
9198 :
9199 : // Handle interceptors and access checks in runtime.
9200 : TNode<Int32T> bit_field = LoadMapBitField(map);
9201 : int mask =
9202 : Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
9203 2816 : GotoIf(IsSetWord32(bit_field, mask), if_bailout);
9204 :
9205 1408 : TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(object));
9206 : *var_meta_storage = dictionary;
9207 :
9208 : NameDictionaryLookup<GlobalDictionary>(
9209 1408 : dictionary, unique_name, if_found_global, var_name_index, if_not_found);
9210 : }
9211 1408 : }
9212 :
9213 732 : void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
9214 : Node* instance_type,
9215 : Node* unique_name, Label* if_found,
9216 : Label* if_not_found,
9217 : Label* if_bailout) {
9218 732 : Comment("TryHasOwnProperty");
9219 : CSA_ASSERT(this, IsUniqueNameNoIndex(CAST(unique_name)));
9220 : TVARIABLE(HeapObject, var_meta_storage);
9221 : TVARIABLE(IntPtrT, var_name_index);
9222 :
9223 732 : Label if_found_global(this);
9224 732 : TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
9225 : &if_found_global, &var_meta_storage, &var_name_index,
9226 732 : if_not_found, if_bailout);
9227 :
9228 : BIND(&if_found_global);
9229 : {
9230 1464 : VARIABLE(var_value, MachineRepresentation::kTagged);
9231 1464 : VARIABLE(var_details, MachineRepresentation::kWord32);
9232 : // Check if the property cell is not deleted.
9233 : LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
9234 : var_name_index.value(), &var_value,
9235 732 : &var_details, if_not_found);
9236 732 : Goto(if_found);
9237 : }
9238 732 : }
9239 :
9240 392 : Node* CodeStubAssembler::GetMethod(Node* context, Node* object,
9241 : Handle<Name> name,
9242 : Label* if_null_or_undefined) {
9243 784 : Node* method = GetProperty(context, object, name);
9244 :
9245 784 : GotoIf(IsUndefined(method), if_null_or_undefined);
9246 784 : GotoIf(IsNull(method), if_null_or_undefined);
9247 :
9248 392 : return method;
9249 : }
9250 :
9251 56 : TNode<Object> CodeStubAssembler::GetIteratorMethod(
9252 : TNode<Context> context, TNode<HeapObject> heap_obj,
9253 : Label* if_iteratorundefined) {
9254 112 : return CAST(GetMethod(context, heap_obj,
9255 : isolate()->factory()->iterator_symbol(),
9256 : if_iteratorundefined));
9257 : }
9258 :
9259 1068 : void CodeStubAssembler::LoadPropertyFromFastObject(
9260 : Node* object, Node* map, TNode<DescriptorArray> descriptors,
9261 : Node* name_index, Variable* var_details, Variable* var_value) {
9262 : DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
9263 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
9264 :
9265 : Node* details =
9266 : LoadDetailsByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index));
9267 1068 : var_details->Bind(details);
9268 :
9269 : LoadPropertyFromFastObject(object, map, descriptors, name_index, details,
9270 1068 : var_value);
9271 1068 : }
9272 :
9273 1292 : void CodeStubAssembler::LoadPropertyFromFastObject(
9274 : Node* object, Node* map, TNode<DescriptorArray> descriptors,
9275 : Node* name_index, Node* details, Variable* var_value) {
9276 1292 : Comment("[ LoadPropertyFromFastObject");
9277 :
9278 : Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
9279 :
9280 1292 : Label if_in_field(this), if_in_descriptor(this), done(this);
9281 3876 : Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
9282 1292 : &if_in_descriptor);
9283 : BIND(&if_in_field);
9284 : {
9285 : Node* field_index =
9286 2584 : DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
9287 : Node* representation =
9288 : DecodeWord32<PropertyDetails::RepresentationField>(details);
9289 :
9290 : field_index =
9291 3876 : IntPtrAdd(field_index, LoadMapInobjectPropertiesStartInWords(map));
9292 2584 : Node* instance_size_in_words = LoadMapInstanceSizeInWords(map);
9293 :
9294 1292 : Label if_inobject(this), if_backing_store(this);
9295 2584 : VARIABLE(var_double_value, MachineRepresentation::kFloat64);
9296 1292 : Label rebox_double(this, &var_double_value);
9297 2584 : Branch(UintPtrLessThan(field_index, instance_size_in_words), &if_inobject,
9298 1292 : &if_backing_store);
9299 : BIND(&if_inobject);
9300 : {
9301 1292 : Comment("if_inobject");
9302 : Node* field_offset = TimesTaggedSize(field_index);
9303 :
9304 1292 : Label if_double(this), if_tagged(this);
9305 2584 : Branch(Word32NotEqual(representation,
9306 2584 : Int32Constant(Representation::kDouble)),
9307 1292 : &if_tagged, &if_double);
9308 : BIND(&if_tagged);
9309 : {
9310 1292 : var_value->Bind(LoadObjectField(object, field_offset));
9311 1292 : Goto(&done);
9312 : }
9313 : BIND(&if_double);
9314 : {
9315 : if (FLAG_unbox_double_fields) {
9316 2584 : var_double_value.Bind(
9317 1292 : LoadObjectField(object, field_offset, MachineType::Float64()));
9318 : } else {
9319 : Node* mutable_heap_number = LoadObjectField(object, field_offset);
9320 : var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
9321 : }
9322 1292 : Goto(&rebox_double);
9323 : }
9324 : }
9325 : BIND(&if_backing_store);
9326 : {
9327 1292 : Comment("if_backing_store");
9328 1292 : TNode<HeapObject> properties = LoadFastProperties(object);
9329 2584 : field_index = IntPtrSub(field_index, instance_size_in_words);
9330 : Node* value = LoadPropertyArrayElement(CAST(properties), field_index);
9331 :
9332 1292 : Label if_double(this), if_tagged(this);
9333 2584 : Branch(Word32NotEqual(representation,
9334 2584 : Int32Constant(Representation::kDouble)),
9335 1292 : &if_tagged, &if_double);
9336 : BIND(&if_tagged);
9337 : {
9338 1292 : var_value->Bind(value);
9339 1292 : Goto(&done);
9340 : }
9341 : BIND(&if_double);
9342 : {
9343 1292 : var_double_value.Bind(LoadHeapNumberValue(value));
9344 1292 : Goto(&rebox_double);
9345 : }
9346 : }
9347 : BIND(&rebox_double);
9348 : {
9349 1292 : Comment("rebox_double");
9350 3876 : Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
9351 1292 : var_value->Bind(heap_number);
9352 1292 : Goto(&done);
9353 : }
9354 : }
9355 : BIND(&if_in_descriptor);
9356 : {
9357 : var_value->Bind(
9358 2584 : LoadValueByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index)));
9359 1292 : Goto(&done);
9360 : }
9361 : BIND(&done);
9362 :
9363 1292 : Comment("] LoadPropertyFromFastObject");
9364 1292 : }
9365 :
9366 2412 : void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
9367 : Node* name_index,
9368 : Variable* var_details,
9369 : Variable* var_value) {
9370 2412 : Comment("LoadPropertyFromNameDictionary");
9371 : CSA_ASSERT(this, IsNameDictionary(dictionary));
9372 :
9373 : var_details->Bind(
9374 2412 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
9375 2412 : var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
9376 :
9377 2412 : Comment("] LoadPropertyFromNameDictionary");
9378 2412 : }
9379 :
9380 1184 : void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
9381 : Node* name_index,
9382 : Variable* var_details,
9383 : Variable* var_value,
9384 : Label* if_deleted) {
9385 1184 : Comment("[ LoadPropertyFromGlobalDictionary");
9386 : CSA_ASSERT(this, IsGlobalDictionary(dictionary));
9387 :
9388 2368 : Node* property_cell = LoadFixedArrayElement(CAST(dictionary), name_index);
9389 : CSA_ASSERT(this, IsPropertyCell(property_cell));
9390 :
9391 : Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
9392 1184 : GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
9393 :
9394 1184 : var_value->Bind(value);
9395 :
9396 : Node* details = LoadAndUntagToWord32ObjectField(
9397 : property_cell, PropertyCell::kPropertyDetailsRawOffset);
9398 1184 : var_details->Bind(details);
9399 :
9400 1184 : Comment("] LoadPropertyFromGlobalDictionary");
9401 1184 : }
9402 :
9403 : // |value| is the property backing store's contents, which is either a value
9404 : // or an accessor pair, as specified by |details|.
9405 : // Returns either the original value, or the result of the getter call.
9406 3812 : TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
9407 : Node* value, Node* details, Node* context, Node* receiver,
9408 : Label* if_bailout, GetOwnPropertyMode mode) {
9409 7624 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
9410 3812 : Label done(this), if_accessor_info(this, Label::kDeferred);
9411 :
9412 : Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
9413 11436 : GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
9414 :
9415 : // Accessor case.
9416 7624 : GotoIfNot(IsAccessorPair(value), &if_accessor_info);
9417 :
9418 : // AccessorPair case.
9419 : {
9420 3812 : if (mode == kCallJSGetter) {
9421 : Node* accessor_pair = value;
9422 : Node* getter =
9423 : LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
9424 : Node* getter_map = LoadMap(getter);
9425 : Node* instance_type = LoadMapInstanceType(getter_map);
9426 : // FunctionTemplateInfo getters are not supported yet.
9427 7176 : GotoIf(InstanceTypeEqual(instance_type, FUNCTION_TEMPLATE_INFO_TYPE),
9428 3588 : if_bailout);
9429 :
9430 : // Return undefined if the {getter} is not callable.
9431 3588 : var_value.Bind(UndefinedConstant());
9432 7176 : GotoIfNot(IsCallableMap(getter_map), &done);
9433 :
9434 : // Call the accessor.
9435 3588 : Callable callable = CodeFactory::Call(isolate());
9436 3588 : Node* result = CallJS(callable, context, getter, receiver);
9437 3588 : var_value.Bind(result);
9438 : }
9439 3812 : Goto(&done);
9440 : }
9441 :
9442 : // AccessorInfo case.
9443 : BIND(&if_accessor_info);
9444 : {
9445 : Node* accessor_info = value;
9446 : CSA_ASSERT(this, IsAccessorInfo(value));
9447 : CSA_ASSERT(this, TaggedIsNotSmi(receiver));
9448 3812 : Label if_array(this), if_function(this), if_value(this);
9449 :
9450 : // Dispatch based on {receiver} instance type.
9451 : Node* receiver_map = LoadMap(receiver);
9452 : Node* receiver_instance_type = LoadMapInstanceType(receiver_map);
9453 3812 : GotoIf(IsJSArrayInstanceType(receiver_instance_type), &if_array);
9454 3812 : GotoIf(IsJSFunctionInstanceType(receiver_instance_type), &if_function);
9455 3812 : Branch(IsJSValueInstanceType(receiver_instance_type), &if_value,
9456 3812 : if_bailout);
9457 :
9458 : // JSArray AccessorInfo case.
9459 : BIND(&if_array);
9460 : {
9461 : // We only deal with the "length" accessor on JSArray.
9462 7624 : GotoIfNot(IsLengthString(
9463 3812 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9464 3812 : if_bailout);
9465 3812 : var_value.Bind(LoadJSArrayLength(receiver));
9466 3812 : Goto(&done);
9467 : }
9468 :
9469 : // JSFunction AccessorInfo case.
9470 : BIND(&if_function);
9471 : {
9472 : // We only deal with the "prototype" accessor on JSFunction here.
9473 7624 : GotoIfNot(IsPrototypeString(
9474 3812 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9475 3812 : if_bailout);
9476 :
9477 : GotoIfPrototypeRequiresRuntimeLookup(CAST(receiver), CAST(receiver_map),
9478 3812 : if_bailout);
9479 3812 : var_value.Bind(LoadJSFunctionPrototype(receiver, if_bailout));
9480 3812 : Goto(&done);
9481 : }
9482 :
9483 : // JSValue AccessorInfo case.
9484 : BIND(&if_value);
9485 : {
9486 : // We only deal with the "length" accessor on JSValue string wrappers.
9487 7624 : GotoIfNot(IsLengthString(
9488 3812 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9489 3812 : if_bailout);
9490 : Node* receiver_value = LoadJSValueValue(receiver);
9491 7624 : GotoIfNot(TaggedIsNotSmi(receiver_value), if_bailout);
9492 7624 : GotoIfNot(IsString(receiver_value), if_bailout);
9493 7624 : var_value.Bind(LoadStringLengthAsSmi(receiver_value));
9494 3812 : Goto(&done);
9495 : }
9496 : }
9497 :
9498 : BIND(&done);
9499 7624 : return UncheckedCast<Object>(var_value.value());
9500 : }
9501 :
9502 228 : void CodeStubAssembler::TryGetOwnProperty(
9503 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
9504 : Node* unique_name, Label* if_found_value, Variable* var_value,
9505 : Label* if_not_found, Label* if_bailout) {
9506 : TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
9507 : if_found_value, var_value, nullptr, nullptr, if_not_found,
9508 228 : if_bailout, kCallJSGetter);
9509 228 : }
9510 :
9511 452 : void CodeStubAssembler::TryGetOwnProperty(
9512 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
9513 : Node* unique_name, Label* if_found_value, Variable* var_value,
9514 : Variable* var_details, Variable* var_raw_value, Label* if_not_found,
9515 : Label* if_bailout, GetOwnPropertyMode mode) {
9516 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
9517 452 : Comment("TryGetOwnProperty");
9518 : CSA_ASSERT(this, IsUniqueNameNoIndex(CAST(unique_name)));
9519 :
9520 : TVARIABLE(HeapObject, var_meta_storage);
9521 : TVARIABLE(IntPtrT, var_entry);
9522 :
9523 452 : Label if_found_fast(this), if_found_dict(this), if_found_global(this);
9524 :
9525 904 : VARIABLE(local_var_details, MachineRepresentation::kWord32);
9526 452 : if (!var_details) {
9527 : var_details = &local_var_details;
9528 : }
9529 452 : Label if_found(this);
9530 :
9531 452 : TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
9532 : &if_found_dict, &if_found_global, &var_meta_storage,
9533 452 : &var_entry, if_not_found, if_bailout);
9534 : BIND(&if_found_fast);
9535 : {
9536 452 : TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
9537 : Node* name_index = var_entry.value();
9538 :
9539 : LoadPropertyFromFastObject(object, map, descriptors, name_index,
9540 452 : var_details, var_value);
9541 452 : Goto(&if_found);
9542 : }
9543 : BIND(&if_found_dict);
9544 : {
9545 : Node* dictionary = var_meta_storage.value();
9546 : Node* entry = var_entry.value();
9547 452 : LoadPropertyFromNameDictionary(dictionary, entry, var_details, var_value);
9548 452 : Goto(&if_found);
9549 : }
9550 : BIND(&if_found_global);
9551 : {
9552 : Node* dictionary = var_meta_storage.value();
9553 : Node* entry = var_entry.value();
9554 :
9555 : LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
9556 452 : if_not_found);
9557 452 : Goto(&if_found);
9558 : }
9559 : // Here we have details and value which could be an accessor.
9560 : BIND(&if_found);
9561 : {
9562 : // TODO(ishell): Execute C++ accessor in case of accessor info
9563 452 : if (var_raw_value) {
9564 224 : var_raw_value->Bind(var_value->value());
9565 : }
9566 904 : Node* value = CallGetterIfAccessor(var_value->value(), var_details->value(),
9567 452 : context, receiver, if_bailout, mode);
9568 452 : var_value->Bind(value);
9569 452 : Goto(if_found_value);
9570 : }
9571 452 : }
9572 :
9573 732 : void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
9574 : SloppyTNode<Int32T> instance_type,
9575 : SloppyTNode<IntPtrT> intptr_index,
9576 : Label* if_found, Label* if_absent,
9577 : Label* if_not_found,
9578 : Label* if_bailout) {
9579 : // Handle special objects in runtime.
9580 1464 : GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
9581 :
9582 1464 : Node* elements_kind = LoadMapElementsKind(map);
9583 :
9584 : // TODO(verwaest): Support other elements kinds as well.
9585 732 : Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
9586 732 : if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
9587 732 : if_typedarray(this);
9588 : // clang-format off
9589 : int32_t values[] = {
9590 : // Handled by {if_isobjectorsmi}.
9591 : PACKED_SMI_ELEMENTS, HOLEY_SMI_ELEMENTS, PACKED_ELEMENTS,
9592 : HOLEY_ELEMENTS,
9593 : // Handled by {if_isdouble}.
9594 : PACKED_DOUBLE_ELEMENTS, HOLEY_DOUBLE_ELEMENTS,
9595 : // Handled by {if_isdictionary}.
9596 : DICTIONARY_ELEMENTS,
9597 : // Handled by {if_isfaststringwrapper}.
9598 : FAST_STRING_WRAPPER_ELEMENTS,
9599 : // Handled by {if_isslowstringwrapper}.
9600 : SLOW_STRING_WRAPPER_ELEMENTS,
9601 : // Handled by {if_not_found}.
9602 : NO_ELEMENTS,
9603 : // Handled by {if_typed_array}.
9604 : UINT8_ELEMENTS,
9605 : INT8_ELEMENTS,
9606 : UINT16_ELEMENTS,
9607 : INT16_ELEMENTS,
9608 : UINT32_ELEMENTS,
9609 : INT32_ELEMENTS,
9610 : FLOAT32_ELEMENTS,
9611 : FLOAT64_ELEMENTS,
9612 : UINT8_CLAMPED_ELEMENTS,
9613 : BIGUINT64_ELEMENTS,
9614 : BIGINT64_ELEMENTS,
9615 732 : };
9616 : Label* labels[] = {
9617 : &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
9618 : &if_isobjectorsmi,
9619 : &if_isdouble, &if_isdouble,
9620 : &if_isdictionary,
9621 : &if_isfaststringwrapper,
9622 : &if_isslowstringwrapper,
9623 : if_not_found,
9624 : &if_typedarray,
9625 : &if_typedarray,
9626 : &if_typedarray,
9627 : &if_typedarray,
9628 : &if_typedarray,
9629 : &if_typedarray,
9630 : &if_typedarray,
9631 : &if_typedarray,
9632 : &if_typedarray,
9633 : &if_typedarray,
9634 : &if_typedarray,
9635 732 : };
9636 : // clang-format on
9637 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
9638 732 : Switch(elements_kind, if_bailout, values, labels, arraysize(values));
9639 :
9640 : BIND(&if_isobjectorsmi);
9641 : {
9642 : TNode<FixedArray> elements = CAST(LoadElements(object));
9643 : TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
9644 :
9645 1464 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
9646 :
9647 : TNode<Object> element = UnsafeLoadFixedArrayElement(elements, intptr_index);
9648 : TNode<Oddball> the_hole = TheHoleConstant();
9649 732 : Branch(WordEqual(element, the_hole), if_not_found, if_found);
9650 : }
9651 : BIND(&if_isdouble);
9652 : {
9653 : TNode<FixedArrayBase> elements = LoadElements(object);
9654 : TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
9655 :
9656 1464 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
9657 :
9658 : // Check if the element is a double hole, but don't load it.
9659 : LoadFixedDoubleArrayElement(CAST(elements), intptr_index,
9660 : MachineType::None(), 0, INTPTR_PARAMETERS,
9661 732 : if_not_found);
9662 732 : Goto(if_found);
9663 : }
9664 : BIND(&if_isdictionary);
9665 : {
9666 : // Negative keys must be converted to property names.
9667 2196 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9668 :
9669 : TVARIABLE(IntPtrT, var_entry);
9670 732 : TNode<NumberDictionary> elements = CAST(LoadElements(object));
9671 : NumberDictionaryLookup(elements, intptr_index, if_found, &var_entry,
9672 732 : if_not_found);
9673 : }
9674 : BIND(&if_isfaststringwrapper);
9675 : {
9676 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
9677 : Node* string = LoadJSValueValue(object);
9678 : CSA_ASSERT(this, IsString(string));
9679 1464 : Node* length = LoadStringLengthAsWord(string);
9680 1464 : GotoIf(UintPtrLessThan(intptr_index, length), if_found);
9681 732 : Goto(&if_isobjectorsmi);
9682 : }
9683 : BIND(&if_isslowstringwrapper);
9684 : {
9685 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
9686 : Node* string = LoadJSValueValue(object);
9687 : CSA_ASSERT(this, IsString(string));
9688 1464 : Node* length = LoadStringLengthAsWord(string);
9689 1464 : GotoIf(UintPtrLessThan(intptr_index, length), if_found);
9690 732 : Goto(&if_isdictionary);
9691 : }
9692 : BIND(&if_typedarray);
9693 : {
9694 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
9695 1464 : GotoIf(IsDetachedBuffer(buffer), if_absent);
9696 :
9697 1464 : Node* length = SmiUntag(LoadJSTypedArrayLength(CAST(object)));
9698 1464 : Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
9699 : }
9700 : BIND(&if_oob);
9701 : {
9702 : // Positive OOB indices mean "not found", negative indices must be
9703 : // converted to property names.
9704 2196 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9705 732 : Goto(if_not_found);
9706 : }
9707 732 : }
9708 :
9709 728 : void CodeStubAssembler::BranchIfMaybeSpecialIndex(TNode<String> name_string,
9710 : Label* if_maybe_special_index,
9711 : Label* if_not_special_index) {
9712 : // TODO(cwhan.tunz): Implement fast cases more.
9713 :
9714 : // If a name is empty or too long, it's not a special index
9715 : // Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
9716 : const int kBufferSize = 24;
9717 728 : TNode<Smi> string_length = LoadStringLengthAsSmi(name_string);
9718 1456 : GotoIf(SmiEqual(string_length, SmiConstant(0)), if_not_special_index);
9719 1456 : GotoIf(SmiGreaterThan(string_length, SmiConstant(kBufferSize)),
9720 728 : if_not_special_index);
9721 :
9722 : // If the first character of name is not a digit or '-', or we can't match it
9723 : // to Infinity or NaN, then this is not a special index.
9724 1456 : TNode<Int32T> first_char = StringCharCodeAt(name_string, IntPtrConstant(0));
9725 : // If the name starts with '-', it can be a negative index.
9726 2184 : GotoIf(Word32Equal(first_char, Int32Constant('-')), if_maybe_special_index);
9727 : // If the name starts with 'I', it can be "Infinity".
9728 2184 : GotoIf(Word32Equal(first_char, Int32Constant('I')), if_maybe_special_index);
9729 : // If the name starts with 'N', it can be "NaN".
9730 2184 : GotoIf(Word32Equal(first_char, Int32Constant('N')), if_maybe_special_index);
9731 : // Finally, if the first character is not a digit either, then we are sure
9732 : // that the name is not a special index.
9733 2184 : GotoIf(Uint32LessThan(first_char, Int32Constant('0')), if_not_special_index);
9734 2184 : GotoIf(Uint32LessThan(Int32Constant('9'), first_char), if_not_special_index);
9735 728 : Goto(if_maybe_special_index);
9736 728 : }
9737 :
9738 728 : void CodeStubAssembler::TryPrototypeChainLookup(
9739 : Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
9740 : const LookupInHolder& lookup_element_in_holder, Label* if_end,
9741 : Label* if_bailout, Label* if_proxy) {
9742 : // Ensure receiver is JSReceiver, otherwise bailout.
9743 1456 : Label if_objectisnotsmi(this);
9744 1456 : Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
9745 : BIND(&if_objectisnotsmi);
9746 :
9747 : Node* map = LoadMap(receiver);
9748 : Node* instance_type = LoadMapInstanceType(map);
9749 : {
9750 728 : Label if_objectisreceiver(this);
9751 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
9752 : STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
9753 1456 : Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
9754 728 : if_bailout);
9755 : BIND(&if_objectisreceiver);
9756 :
9757 728 : if (if_proxy) {
9758 1456 : GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
9759 : }
9760 : }
9761 :
9762 1456 : VARIABLE(var_index, MachineType::PointerRepresentation());
9763 1456 : VARIABLE(var_unique, MachineRepresentation::kTagged);
9764 :
9765 728 : Label if_keyisindex(this), if_iskeyunique(this);
9766 : TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
9767 728 : if_bailout);
9768 :
9769 : BIND(&if_iskeyunique);
9770 : {
9771 1456 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
9772 1456 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
9773 1456 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
9774 : instance_type);
9775 :
9776 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
9777 728 : &var_holder_instance_type};
9778 1456 : Label loop(this, arraysize(merged_variables), merged_variables);
9779 728 : Goto(&loop);
9780 : BIND(&loop);
9781 : {
9782 728 : Node* holder_map = var_holder_map.value();
9783 728 : Node* holder_instance_type = var_holder_instance_type.value();
9784 :
9785 728 : Label next_proto(this), check_integer_indexed_exotic(this);
9786 728 : lookup_property_in_holder(receiver, var_holder.value(), holder_map,
9787 : holder_instance_type, var_unique.value(),
9788 : &check_integer_indexed_exotic, if_bailout);
9789 :
9790 : BIND(&check_integer_indexed_exotic);
9791 : {
9792 : // Bailout if it can be an integer indexed exotic case.
9793 1456 : GotoIfNot(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
9794 728 : &next_proto);
9795 2184 : GotoIfNot(IsString(var_unique.value()), &next_proto);
9796 728 : BranchIfMaybeSpecialIndex(CAST(var_unique.value()), if_bailout,
9797 728 : &next_proto);
9798 : }
9799 :
9800 : BIND(&next_proto);
9801 :
9802 : Node* proto = LoadMapPrototype(holder_map);
9803 :
9804 1456 : GotoIf(IsNull(proto), if_end);
9805 :
9806 : Node* map = LoadMap(proto);
9807 : Node* instance_type = LoadMapInstanceType(map);
9808 :
9809 728 : var_holder.Bind(proto);
9810 728 : var_holder_map.Bind(map);
9811 728 : var_holder_instance_type.Bind(instance_type);
9812 728 : Goto(&loop);
9813 : }
9814 : }
9815 : BIND(&if_keyisindex);
9816 : {
9817 1456 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
9818 1456 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
9819 1456 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
9820 : instance_type);
9821 :
9822 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
9823 728 : &var_holder_instance_type};
9824 1456 : Label loop(this, arraysize(merged_variables), merged_variables);
9825 728 : Goto(&loop);
9826 : BIND(&loop);
9827 : {
9828 728 : Label next_proto(this);
9829 728 : lookup_element_in_holder(receiver, var_holder.value(),
9830 : var_holder_map.value(),
9831 : var_holder_instance_type.value(),
9832 : var_index.value(), &next_proto, if_bailout);
9833 : BIND(&next_proto);
9834 :
9835 728 : Node* proto = LoadMapPrototype(var_holder_map.value());
9836 :
9837 1456 : GotoIf(IsNull(proto), if_end);
9838 :
9839 : Node* map = LoadMap(proto);
9840 : Node* instance_type = LoadMapInstanceType(map);
9841 :
9842 728 : var_holder.Bind(proto);
9843 728 : var_holder_map.Bind(map);
9844 728 : var_holder_instance_type.Bind(instance_type);
9845 728 : Goto(&loop);
9846 : }
9847 : }
9848 728 : }
9849 :
9850 168 : Node* CodeStubAssembler::HasInPrototypeChain(Node* context, Node* object,
9851 : Node* prototype) {
9852 : CSA_ASSERT(this, TaggedIsNotSmi(object));
9853 336 : VARIABLE(var_result, MachineRepresentation::kTagged);
9854 168 : Label return_false(this), return_true(this),
9855 168 : return_runtime(this, Label::kDeferred), return_result(this);
9856 :
9857 : // Loop through the prototype chain looking for the {prototype}.
9858 336 : VARIABLE(var_object_map, MachineRepresentation::kTagged, LoadMap(object));
9859 168 : Label loop(this, &var_object_map);
9860 168 : Goto(&loop);
9861 : BIND(&loop);
9862 : {
9863 : // Check if we can determine the prototype directly from the {object_map}.
9864 168 : Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
9865 168 : Node* object_map = var_object_map.value();
9866 : TNode<Int32T> object_instance_type = LoadMapInstanceType(object_map);
9867 336 : Branch(IsSpecialReceiverInstanceType(object_instance_type),
9868 168 : &if_objectisspecial, &if_objectisdirect);
9869 : BIND(&if_objectisspecial);
9870 : {
9871 : // The {object_map} is a special receiver map or a primitive map, check
9872 : // if we need to use the if_objectisspecial path in the runtime.
9873 336 : GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
9874 168 : &return_runtime);
9875 : Node* object_bitfield = LoadMapBitField(object_map);
9876 : int mask = Map::HasNamedInterceptorBit::kMask |
9877 : Map::IsAccessCheckNeededBit::kMask;
9878 336 : Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
9879 168 : &if_objectisdirect);
9880 : }
9881 : BIND(&if_objectisdirect);
9882 :
9883 : // Check the current {object} prototype.
9884 : Node* object_prototype = LoadMapPrototype(object_map);
9885 336 : GotoIf(IsNull(object_prototype), &return_false);
9886 336 : GotoIf(WordEqual(object_prototype, prototype), &return_true);
9887 :
9888 : // Continue with the prototype.
9889 : CSA_ASSERT(this, TaggedIsNotSmi(object_prototype));
9890 168 : var_object_map.Bind(LoadMap(object_prototype));
9891 168 : Goto(&loop);
9892 : }
9893 :
9894 : BIND(&return_true);
9895 168 : var_result.Bind(TrueConstant());
9896 168 : Goto(&return_result);
9897 :
9898 : BIND(&return_false);
9899 168 : var_result.Bind(FalseConstant());
9900 168 : Goto(&return_result);
9901 :
9902 : BIND(&return_runtime);
9903 : {
9904 : // Fallback to the runtime implementation.
9905 : var_result.Bind(
9906 168 : CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype));
9907 : }
9908 168 : Goto(&return_result);
9909 :
9910 : BIND(&return_result);
9911 336 : return var_result.value();
9912 : }
9913 :
9914 112 : Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
9915 : Node* object) {
9916 224 : VARIABLE(var_result, MachineRepresentation::kTagged);
9917 112 : Label return_runtime(this, Label::kDeferred), return_result(this);
9918 :
9919 : GotoIfForceSlowPath(&return_runtime);
9920 :
9921 : // Goto runtime if {object} is a Smi.
9922 224 : GotoIf(TaggedIsSmi(object), &return_runtime);
9923 :
9924 : // Goto runtime if {callable} is a Smi.
9925 224 : GotoIf(TaggedIsSmi(callable), &return_runtime);
9926 :
9927 : // Load map of {callable}.
9928 : Node* callable_map = LoadMap(callable);
9929 :
9930 : // Goto runtime if {callable} is not a JSFunction.
9931 : Node* callable_instance_type = LoadMapInstanceType(callable_map);
9932 224 : GotoIfNot(InstanceTypeEqual(callable_instance_type, JS_FUNCTION_TYPE),
9933 112 : &return_runtime);
9934 :
9935 : GotoIfPrototypeRequiresRuntimeLookup(CAST(callable), CAST(callable_map),
9936 112 : &return_runtime);
9937 :
9938 : // Get the "prototype" (or initial map) of the {callable}.
9939 : Node* callable_prototype =
9940 : LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
9941 : {
9942 112 : Label no_initial_map(this), walk_prototype_chain(this);
9943 224 : VARIABLE(var_callable_prototype, MachineRepresentation::kTagged,
9944 : callable_prototype);
9945 :
9946 : // Resolve the "prototype" if the {callable} has an initial map.
9947 224 : GotoIfNot(IsMap(callable_prototype), &no_initial_map);
9948 : var_callable_prototype.Bind(
9949 112 : LoadObjectField(callable_prototype, Map::kPrototypeOffset));
9950 112 : Goto(&walk_prototype_chain);
9951 :
9952 : BIND(&no_initial_map);
9953 : // {callable_prototype} is the hole if the "prototype" property hasn't been
9954 : // requested so far.
9955 112 : Branch(WordEqual(callable_prototype, TheHoleConstant()), &return_runtime,
9956 112 : &walk_prototype_chain);
9957 :
9958 : BIND(&walk_prototype_chain);
9959 112 : callable_prototype = var_callable_prototype.value();
9960 : }
9961 :
9962 : // Loop through the prototype chain looking for the {callable} prototype.
9963 : CSA_ASSERT(this, IsJSReceiver(callable_prototype));
9964 112 : var_result.Bind(HasInPrototypeChain(context, object, callable_prototype));
9965 112 : Goto(&return_result);
9966 :
9967 : BIND(&return_runtime);
9968 : {
9969 : // Fallback to the runtime implementation.
9970 : var_result.Bind(
9971 112 : CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
9972 : }
9973 112 : Goto(&return_result);
9974 :
9975 : BIND(&return_result);
9976 224 : return var_result.value();
9977 : }
9978 :
9979 305620 : TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
9980 : ElementsKind kind,
9981 : ParameterMode mode,
9982 : int base_size) {
9983 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, mode));
9984 305620 : int element_size_shift = ElementsKindToShiftSize(kind);
9985 305620 : int element_size = 1 << element_size_shift;
9986 : int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
9987 305620 : intptr_t index = 0;
9988 : bool constant_index = false;
9989 305620 : if (mode == SMI_PARAMETERS) {
9990 22944 : element_size_shift -= kSmiShiftBits;
9991 22944 : Smi smi_index;
9992 22944 : constant_index = ToSmiConstant(index_node, &smi_index);
9993 24828 : if (constant_index) index = smi_index->value();
9994 45888 : index_node = BitcastTaggedToWord(index_node);
9995 : } else {
9996 : DCHECK(mode == INTPTR_PARAMETERS);
9997 282676 : constant_index = ToIntPtrConstant(index_node, index);
9998 : }
9999 305620 : if (constant_index) {
10000 86908 : return IntPtrConstant(base_size + element_size * index);
10001 : }
10002 :
10003 : TNode<WordT> shifted_index =
10004 : (element_size_shift == 0)
10005 : ? UncheckedCast<WordT>(index_node)
10006 : : ((element_size_shift > 0)
10007 191156 : ? WordShl(index_node, IntPtrConstant(element_size_shift))
10008 430928 : : WordSar(index_node, IntPtrConstant(-element_size_shift)));
10009 218712 : return IntPtrAdd(IntPtrConstant(base_size), Signed(shifted_index));
10010 : }
10011 :
10012 0 : TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(SloppyTNode<IntPtrT> offset,
10013 : SloppyTNode<IntPtrT> length,
10014 : int header_size,
10015 : ElementsKind kind) {
10016 : // Make sure we point to the last field.
10017 0 : int element_size = 1 << ElementsKindToShiftSize(kind);
10018 0 : int correction = header_size - kHeapObjectTag - element_size;
10019 : TNode<IntPtrT> last_offset =
10020 0 : ElementOffsetFromIndex(length, kind, INTPTR_PARAMETERS, correction);
10021 0 : return IntPtrLessThanOrEqual(offset, last_offset);
10022 : }
10023 :
10024 12836 : TNode<HeapObject> CodeStubAssembler::LoadFeedbackCellValue(
10025 : SloppyTNode<JSFunction> closure) {
10026 : TNode<FeedbackCell> feedback_cell =
10027 : CAST(LoadObjectField(closure, JSFunction::kFeedbackCellOffset));
10028 12836 : return CAST(LoadObjectField(feedback_cell, FeedbackCell::kValueOffset));
10029 : }
10030 :
10031 12612 : TNode<HeapObject> CodeStubAssembler::LoadFeedbackVector(
10032 : SloppyTNode<JSFunction> closure) {
10033 12612 : TVARIABLE(HeapObject, maybe_vector, LoadFeedbackCellValue(closure));
10034 12612 : Label done(this);
10035 :
10036 : // If the closure doesn't have a feedback vector allocated yet, return
10037 : // undefined. FeedbackCell can contain Undefined / FixedArray (for lazy
10038 : // allocations) / FeedbackVector.
10039 25224 : GotoIf(IsFeedbackVector(maybe_vector.value()), &done);
10040 :
10041 : // In all other cases return Undefined.
10042 : maybe_vector = UndefinedConstant();
10043 12612 : Goto(&done);
10044 :
10045 : BIND(&done);
10046 12612 : return maybe_vector.value();
10047 : }
10048 :
10049 168 : TNode<ClosureFeedbackCellArray> CodeStubAssembler::LoadClosureFeedbackArray(
10050 : SloppyTNode<JSFunction> closure) {
10051 168 : TVARIABLE(HeapObject, feedback_cell_array, LoadFeedbackCellValue(closure));
10052 168 : Label end(this);
10053 :
10054 : // When feedback vectors are not yet allocated feedback cell contains a
10055 : // an array of feedback cells used by create closures.
10056 336 : GotoIf(HasInstanceType(feedback_cell_array.value(),
10057 168 : CLOSURE_FEEDBACK_CELL_ARRAY_TYPE),
10058 168 : &end);
10059 :
10060 : // Load FeedbackCellArray from feedback vector.
10061 : TNode<FeedbackVector> vector = CAST(feedback_cell_array.value());
10062 : feedback_cell_array = CAST(
10063 : LoadObjectField(vector, FeedbackVector::kClosureFeedbackCellArrayOffset));
10064 168 : Goto(&end);
10065 :
10066 : BIND(&end);
10067 168 : return CAST(feedback_cell_array.value());
10068 : }
10069 :
10070 504 : TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorForStub() {
10071 : TNode<JSFunction> function =
10072 504 : CAST(LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset));
10073 504 : return CAST(LoadFeedbackVector(function));
10074 : }
10075 :
10076 8736 : void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* maybe_vector,
10077 : Node* slot_id) {
10078 17472 : Label end(this);
10079 : // If feedback_vector is not valid, then nothing to do.
10080 17472 : GotoIf(IsUndefined(maybe_vector), &end);
10081 :
10082 : // This method is used for binary op and compare feedback. These
10083 : // vector nodes are initialized with a smi 0, so we can simply OR
10084 : // our new feedback in place.
10085 : TNode<FeedbackVector> feedback_vector = CAST(maybe_vector);
10086 : TNode<MaybeObject> feedback_element =
10087 8736 : LoadFeedbackVectorSlot(feedback_vector, slot_id);
10088 8736 : TNode<Smi> previous_feedback = CAST(feedback_element);
10089 8736 : TNode<Smi> combined_feedback = SmiOr(previous_feedback, CAST(feedback));
10090 :
10091 17472 : GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
10092 : {
10093 : StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
10094 8736 : SKIP_WRITE_BARRIER);
10095 8736 : ReportFeedbackUpdate(feedback_vector, slot_id, "UpdateFeedback");
10096 8736 : Goto(&end);
10097 : }
10098 :
10099 : BIND(&end);
10100 8736 : }
10101 :
10102 13272 : void CodeStubAssembler::ReportFeedbackUpdate(
10103 : SloppyTNode<FeedbackVector> feedback_vector, SloppyTNode<IntPtrT> slot_id,
10104 : const char* reason) {
10105 : // Reset profiler ticks.
10106 : StoreObjectFieldNoWriteBarrier(
10107 26544 : feedback_vector, FeedbackVector::kProfilerTicksOffset, Int32Constant(0),
10108 : MachineRepresentation::kWord32);
10109 :
10110 : #ifdef V8_TRACE_FEEDBACK_UPDATES
10111 : // Trace the update.
10112 : CallRuntime(Runtime::kInterpreterTraceUpdateFeedback, NoContextConstant(),
10113 : LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset),
10114 : SmiTag(slot_id), StringConstant(reason));
10115 : #endif // V8_TRACE_FEEDBACK_UPDATES
10116 13272 : }
10117 :
10118 33320 : void CodeStubAssembler::OverwriteFeedback(Variable* existing_feedback,
10119 : int new_feedback) {
10120 33320 : if (existing_feedback == nullptr) return;
10121 46368 : existing_feedback->Bind(SmiConstant(new_feedback));
10122 : }
10123 :
10124 26320 : void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
10125 : int feedback) {
10126 26320 : if (existing_feedback == nullptr) return;
10127 : existing_feedback->Bind(
10128 38640 : SmiOr(CAST(existing_feedback->value()), SmiConstant(feedback)));
10129 : }
10130 :
10131 560 : void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
10132 : Node* feedback) {
10133 560 : if (existing_feedback == nullptr) return;
10134 : existing_feedback->Bind(
10135 1008 : SmiOr(CAST(existing_feedback->value()), CAST(feedback)));
10136 : }
10137 :
10138 896 : void CodeStubAssembler::CheckForAssociatedProtector(Node* name,
10139 : Label* if_protector) {
10140 : // This list must be kept in sync with LookupIterator::UpdateProtector!
10141 : // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
10142 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kconstructor_string)),
10143 896 : if_protector);
10144 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kiterator_symbol)), if_protector);
10145 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::knext_string)), if_protector);
10146 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kspecies_symbol)), if_protector);
10147 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kis_concat_spreadable_symbol)),
10148 896 : if_protector);
10149 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kresolve_string)), if_protector);
10150 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kthen_string)), if_protector);
10151 : // Fall through if no case matched.
10152 896 : }
10153 :
10154 728 : TNode<Map> CodeStubAssembler::LoadReceiverMap(SloppyTNode<Object> receiver) {
10155 : return Select<Map>(
10156 1456 : TaggedIsSmi(receiver),
10157 728 : [=] { return CAST(LoadRoot(RootIndex::kHeapNumberMap)); },
10158 3640 : [=] { return LoadMap(UncheckedCast<HeapObject>(receiver)); });
10159 : }
10160 :
10161 8964 : TNode<IntPtrT> CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
10162 8964 : TVARIABLE(IntPtrT, var_intptr_key);
10163 8964 : Label done(this, &var_intptr_key), key_is_smi(this);
10164 17928 : GotoIf(TaggedIsSmi(key), &key_is_smi);
10165 : // Try to convert a heap number to a Smi.
10166 17928 : GotoIfNot(IsHeapNumber(key), miss);
10167 : {
10168 : TNode<Float64T> value = LoadHeapNumberValue(key);
10169 8964 : TNode<Int32T> int_value = RoundFloat64ToInt32(value);
10170 26892 : GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
10171 17928 : var_intptr_key = ChangeInt32ToIntPtr(int_value);
10172 8964 : Goto(&done);
10173 : }
10174 :
10175 : BIND(&key_is_smi);
10176 : {
10177 17928 : var_intptr_key = SmiUntag(key);
10178 8964 : Goto(&done);
10179 : }
10180 :
10181 : BIND(&done);
10182 8964 : return var_intptr_key.value();
10183 : }
10184 :
10185 336 : Node* CodeStubAssembler::EmitKeyedSloppyArguments(
10186 : Node* receiver, Node* key, Node* value, Label* bailout,
10187 : ArgumentsAccessMode access_mode) {
10188 : // Mapped arguments are actual arguments. Unmapped arguments are values added
10189 : // to the arguments object after it was created for the call. Mapped arguments
10190 : // are stored in the context at indexes given by elements[key + 2]. Unmapped
10191 : // arguments are stored as regular indexed properties in the arguments array,
10192 : // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
10193 : // look at argument object construction.
10194 : //
10195 : // The sloppy arguments elements array has a special format:
10196 : //
10197 : // 0: context
10198 : // 1: unmapped arguments array
10199 : // 2: mapped_index0,
10200 : // 3: mapped_index1,
10201 : // ...
10202 : //
10203 : // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
10204 : // If key + 2 >= elements.length then attempt to look in the unmapped
10205 : // arguments array (given by elements[1]) and return the value at key, missing
10206 : // to the runtime if the unmapped arguments array is not a fixed array or if
10207 : // key >= unmapped_arguments_array.length.
10208 : //
10209 : // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
10210 : // in the unmapped arguments array, as described above. Otherwise, t is a Smi
10211 : // index into the context array given at elements[0]. Return the value at
10212 : // context[t].
10213 :
10214 672 : GotoIfNot(TaggedIsSmi(key), bailout);
10215 672 : key = SmiUntag(key);
10216 1008 : GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
10217 :
10218 : TNode<FixedArray> elements = CAST(LoadElements(receiver));
10219 : TNode<IntPtrT> elements_length = LoadAndUntagFixedArrayBaseLength(elements);
10220 :
10221 672 : VARIABLE(var_result, MachineRepresentation::kTagged);
10222 336 : if (access_mode == ArgumentsAccessMode::kStore) {
10223 224 : var_result.Bind(value);
10224 : } else {
10225 : DCHECK(access_mode == ArgumentsAccessMode::kLoad ||
10226 : access_mode == ArgumentsAccessMode::kHas);
10227 : }
10228 336 : Label if_mapped(this), if_unmapped(this), end(this, &var_result);
10229 672 : Node* intptr_two = IntPtrConstant(2);
10230 672 : Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
10231 :
10232 672 : GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
10233 :
10234 : TNode<Object> mapped_index =
10235 672 : LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
10236 336 : Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
10237 :
10238 : BIND(&if_mapped);
10239 : {
10240 336 : TNode<IntPtrT> mapped_index_intptr = SmiUntag(CAST(mapped_index));
10241 336 : TNode<Context> the_context = CAST(LoadFixedArrayElement(elements, 0));
10242 336 : if (access_mode == ArgumentsAccessMode::kLoad) {
10243 112 : Node* result = LoadContextElement(the_context, mapped_index_intptr);
10244 : CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
10245 56 : var_result.Bind(result);
10246 280 : } else if (access_mode == ArgumentsAccessMode::kHas) {
10247 : CSA_ASSERT(this, Word32BinaryNot(IsTheHole(LoadContextElement(
10248 : the_context, mapped_index_intptr))));
10249 56 : var_result.Bind(TrueConstant());
10250 : } else {
10251 224 : StoreContextElement(the_context, mapped_index_intptr, value);
10252 : }
10253 336 : Goto(&end);
10254 : }
10255 :
10256 : BIND(&if_unmapped);
10257 : {
10258 : TNode<HeapObject> backing_store_ho =
10259 336 : CAST(LoadFixedArrayElement(elements, 1));
10260 336 : GotoIf(WordNotEqual(LoadMap(backing_store_ho), FixedArrayMapConstant()),
10261 336 : bailout);
10262 : TNode<FixedArray> backing_store = CAST(backing_store_ho);
10263 :
10264 : TNode<IntPtrT> backing_store_length =
10265 : LoadAndUntagFixedArrayBaseLength(backing_store);
10266 336 : if (access_mode == ArgumentsAccessMode::kHas) {
10267 56 : Label out_of_bounds(this);
10268 112 : GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length),
10269 56 : &out_of_bounds);
10270 112 : Node* result = LoadFixedArrayElement(backing_store, key);
10271 : var_result.Bind(
10272 112 : SelectBooleanConstant(WordNotEqual(result, TheHoleConstant())));
10273 56 : Goto(&end);
10274 :
10275 : BIND(&out_of_bounds);
10276 56 : var_result.Bind(FalseConstant());
10277 56 : Goto(&end);
10278 : } else {
10279 560 : GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
10280 :
10281 : // The key falls into unmapped range.
10282 280 : if (access_mode == ArgumentsAccessMode::kLoad) {
10283 112 : Node* result = LoadFixedArrayElement(backing_store, key);
10284 56 : GotoIf(WordEqual(result, TheHoleConstant()), bailout);
10285 56 : var_result.Bind(result);
10286 : } else {
10287 224 : StoreFixedArrayElement(backing_store, key, value);
10288 : }
10289 280 : Goto(&end);
10290 : }
10291 : }
10292 :
10293 : BIND(&end);
10294 672 : return var_result.value();
10295 : }
10296 :
10297 840 : TNode<Context> CodeStubAssembler::LoadScriptContext(
10298 : TNode<Context> context, TNode<IntPtrT> context_index) {
10299 : TNode<Context> native_context = LoadNativeContext(context);
10300 840 : TNode<ScriptContextTable> script_context_table = CAST(
10301 : LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX));
10302 :
10303 : TNode<Context> script_context = CAST(LoadFixedArrayElement(
10304 : script_context_table, context_index,
10305 : ScriptContextTable::kFirstContextSlotIndex * kTaggedSize));
10306 840 : return script_context;
10307 : }
10308 :
10309 : namespace {
10310 :
10311 : // Converts typed array elements kind to a machine representations.
10312 4032 : MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
10313 4032 : switch (kind) {
10314 : case UINT8_CLAMPED_ELEMENTS:
10315 : case UINT8_ELEMENTS:
10316 : case INT8_ELEMENTS:
10317 : return MachineRepresentation::kWord8;
10318 : case UINT16_ELEMENTS:
10319 : case INT16_ELEMENTS:
10320 896 : return MachineRepresentation::kWord16;
10321 : case UINT32_ELEMENTS:
10322 : case INT32_ELEMENTS:
10323 896 : return MachineRepresentation::kWord32;
10324 : case FLOAT32_ELEMENTS:
10325 448 : return MachineRepresentation::kFloat32;
10326 : case FLOAT64_ELEMENTS:
10327 448 : return MachineRepresentation::kFloat64;
10328 : default:
10329 0 : UNREACHABLE();
10330 : }
10331 : }
10332 :
10333 : } // namespace
10334 :
10335 8828 : void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
10336 : Node* index, Node* value,
10337 : ParameterMode mode) {
10338 8828 : if (IsFixedTypedArrayElementsKind(kind)) {
10339 : if (kind == UINT8_CLAMPED_ELEMENTS) {
10340 : CSA_ASSERT(this,
10341 : Word32Equal(value, Word32And(Int32Constant(0xFF), value)));
10342 : }
10343 8064 : Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
10344 : // TODO(cbruni): Add OOB check once typed.
10345 4032 : MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
10346 4032 : StoreNoWriteBarrier(rep, elements, offset, value);
10347 4032 : return;
10348 4796 : } else if (IsDoubleElementsKind(kind)) {
10349 1524 : TNode<Float64T> value_float64 = UncheckedCast<Float64T>(value);
10350 1524 : StoreFixedDoubleArrayElement(CAST(elements), index, value_float64, mode);
10351 : } else {
10352 : WriteBarrierMode barrier_mode =
10353 3272 : IsSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
10354 3272 : StoreFixedArrayElement(CAST(elements), index, value, barrier_mode, 0, mode);
10355 : }
10356 : }
10357 :
10358 392 : Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
10359 784 : Label done(this);
10360 784 : Node* int32_zero = Int32Constant(0);
10361 784 : Node* int32_255 = Int32Constant(255);
10362 784 : VARIABLE(var_value, MachineRepresentation::kWord32, int32_value);
10363 784 : GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
10364 392 : var_value.Bind(int32_zero);
10365 784 : GotoIf(Int32LessThan(int32_value, int32_zero), &done);
10366 392 : var_value.Bind(int32_255);
10367 392 : Goto(&done);
10368 : BIND(&done);
10369 784 : return var_value.value();
10370 : }
10371 :
10372 392 : Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
10373 784 : Label done(this);
10374 1176 : VARIABLE(var_value, MachineRepresentation::kWord32, Int32Constant(0));
10375 1176 : GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
10376 784 : var_value.Bind(Int32Constant(255));
10377 1176 : GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
10378 : {
10379 784 : Node* rounded_value = Float64RoundToEven(float64_value);
10380 784 : var_value.Bind(TruncateFloat64ToWord32(rounded_value));
10381 392 : Goto(&done);
10382 : }
10383 : BIND(&done);
10384 784 : return var_value.value();
10385 : }
10386 :
10387 4088 : Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
10388 : TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
10389 : DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
10390 :
10391 : MachineRepresentation rep;
10392 4088 : switch (elements_kind) {
10393 : case UINT8_ELEMENTS:
10394 : case INT8_ELEMENTS:
10395 : case UINT16_ELEMENTS:
10396 : case INT16_ELEMENTS:
10397 : case UINT32_ELEMENTS:
10398 : case INT32_ELEMENTS:
10399 : case UINT8_CLAMPED_ELEMENTS:
10400 : rep = MachineRepresentation::kWord32;
10401 : break;
10402 : case FLOAT32_ELEMENTS:
10403 : rep = MachineRepresentation::kFloat32;
10404 392 : break;
10405 : case FLOAT64_ELEMENTS:
10406 : rep = MachineRepresentation::kFloat64;
10407 392 : break;
10408 : case BIGINT64_ELEMENTS:
10409 : case BIGUINT64_ELEMENTS:
10410 1120 : return ToBigInt(context, input);
10411 : default:
10412 0 : UNREACHABLE();
10413 : }
10414 :
10415 7056 : VARIABLE(var_result, rep);
10416 7056 : VARIABLE(var_input, MachineRepresentation::kTagged, input);
10417 3528 : Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
10418 3528 : convert(this), loop(this, &var_input);
10419 3528 : Goto(&loop);
10420 : BIND(&loop);
10421 10584 : GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
10422 : // We can handle both HeapNumber and Oddball here, since Oddball has the
10423 : // same layout as the HeapNumber for the HeapNumber::value field. This
10424 : // way we can also properly optimize stores of oddballs to typed arrays.
10425 10584 : GotoIf(IsHeapNumber(var_input.value()), &if_heapnumber_or_oddball);
10426 : STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
10427 : Oddball::kToNumberRawOffset);
10428 10584 : Branch(HasInstanceType(var_input.value(), ODDBALL_TYPE),
10429 3528 : &if_heapnumber_or_oddball, &convert);
10430 :
10431 : BIND(&if_heapnumber_or_oddball);
10432 : {
10433 : Node* value = UncheckedCast<Float64T>(LoadObjectField(
10434 7056 : var_input.value(), HeapNumber::kValueOffset, MachineType::Float64()));
10435 3528 : if (rep == MachineRepresentation::kWord32) {
10436 2744 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10437 392 : value = Float64ToUint8Clamped(value);
10438 : } else {
10439 4704 : value = TruncateFloat64ToWord32(value);
10440 : }
10441 784 : } else if (rep == MachineRepresentation::kFloat32) {
10442 784 : value = TruncateFloat64ToFloat32(value);
10443 : } else {
10444 : DCHECK_EQ(MachineRepresentation::kFloat64, rep);
10445 : }
10446 3528 : var_result.Bind(value);
10447 3528 : Goto(&done);
10448 : }
10449 :
10450 : BIND(&if_smi);
10451 : {
10452 10584 : Node* value = SmiToInt32(var_input.value());
10453 3528 : if (rep == MachineRepresentation::kFloat32) {
10454 784 : value = RoundInt32ToFloat32(value);
10455 3136 : } else if (rep == MachineRepresentation::kFloat64) {
10456 784 : value = ChangeInt32ToFloat64(value);
10457 : } else {
10458 : DCHECK_EQ(MachineRepresentation::kWord32, rep);
10459 2744 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10460 392 : value = Int32ToUint8Clamped(value);
10461 : }
10462 : }
10463 3528 : var_result.Bind(value);
10464 3528 : Goto(&done);
10465 : }
10466 :
10467 : BIND(&convert);
10468 : {
10469 7056 : var_input.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, input));
10470 3528 : Goto(&loop);
10471 : }
10472 :
10473 : BIND(&done);
10474 3528 : return var_result.value();
10475 : }
10476 :
10477 224 : void CodeStubAssembler::EmitBigTypedArrayElementStore(
10478 : TNode<JSTypedArray> object, TNode<FixedTypedArrayBase> elements,
10479 : TNode<IntPtrT> intptr_key, TNode<Object> value, TNode<Context> context,
10480 : Label* opt_if_detached) {
10481 224 : TNode<BigInt> bigint_value = ToBigInt(context, value);
10482 :
10483 224 : if (opt_if_detached != nullptr) {
10484 : // Check if buffer has been detached. Must happen after {ToBigInt}!
10485 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
10486 448 : GotoIf(IsDetachedBuffer(buffer), opt_if_detached);
10487 : }
10488 :
10489 224 : TNode<RawPtrT> backing_store = LoadFixedTypedArrayBackingStore(elements);
10490 : TNode<IntPtrT> offset = ElementOffsetFromIndex(intptr_key, BIGINT64_ELEMENTS,
10491 224 : INTPTR_PARAMETERS, 0);
10492 224 : EmitBigTypedArrayElementStore(elements, backing_store, offset, bigint_value);
10493 224 : }
10494 :
10495 1456 : void CodeStubAssembler::BigIntToRawBytes(TNode<BigInt> bigint,
10496 : TVariable<UintPtrT>* var_low,
10497 : TVariable<UintPtrT>* var_high) {
10498 2912 : Label done(this);
10499 2912 : *var_low = Unsigned(IntPtrConstant(0));
10500 2912 : *var_high = Unsigned(IntPtrConstant(0));
10501 : TNode<Word32T> bitfield = LoadBigIntBitfield(bigint);
10502 : TNode<Uint32T> length = DecodeWord32<BigIntBase::LengthBits>(bitfield);
10503 : TNode<Uint32T> sign = DecodeWord32<BigIntBase::SignBits>(bitfield);
10504 4368 : GotoIf(Word32Equal(length, Int32Constant(0)), &done);
10505 : *var_low = LoadBigIntDigit(bigint, 0);
10506 1456 : if (!Is64()) {
10507 0 : Label load_done(this);
10508 0 : GotoIf(Word32Equal(length, Int32Constant(1)), &load_done);
10509 : *var_high = LoadBigIntDigit(bigint, 1);
10510 0 : Goto(&load_done);
10511 : BIND(&load_done);
10512 : }
10513 4368 : GotoIf(Word32Equal(sign, Int32Constant(0)), &done);
10514 : // Negative value. Simulate two's complement.
10515 1456 : if (!Is64()) {
10516 0 : *var_high = Unsigned(IntPtrSub(IntPtrConstant(0), var_high->value()));
10517 0 : Label no_carry(this);
10518 0 : GotoIf(WordEqual(var_low->value(), IntPtrConstant(0)), &no_carry);
10519 0 : *var_high = Unsigned(IntPtrSub(var_high->value(), IntPtrConstant(1)));
10520 0 : Goto(&no_carry);
10521 : BIND(&no_carry);
10522 : }
10523 4368 : *var_low = Unsigned(IntPtrSub(IntPtrConstant(0), var_low->value()));
10524 1456 : Goto(&done);
10525 : BIND(&done);
10526 1456 : }
10527 :
10528 896 : void CodeStubAssembler::EmitBigTypedArrayElementStore(
10529 : TNode<FixedTypedArrayBase> elements, TNode<RawPtrT> backing_store,
10530 : TNode<IntPtrT> offset, TNode<BigInt> bigint_value) {
10531 896 : TVARIABLE(UintPtrT, var_low);
10532 : // Only used on 32-bit platforms.
10533 : TVARIABLE(UintPtrT, var_high);
10534 896 : BigIntToRawBytes(bigint_value, &var_low, &var_high);
10535 :
10536 : MachineRepresentation rep = WordT::kMachineRepresentation;
10537 : #if defined(V8_TARGET_BIG_ENDIAN)
10538 : if (!Is64()) {
10539 : StoreNoWriteBarrier(rep, backing_store, offset, var_high.value());
10540 : StoreNoWriteBarrier(rep, backing_store,
10541 : IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
10542 : var_low.value());
10543 : } else {
10544 : StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
10545 : }
10546 : #else
10547 896 : StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
10548 896 : if (!Is64()) {
10549 : StoreNoWriteBarrier(rep, backing_store,
10550 0 : IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
10551 0 : var_high.value());
10552 : }
10553 : #endif
10554 896 : }
10555 :
10556 7336 : void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
10557 : ElementsKind elements_kind,
10558 : KeyedAccessStoreMode store_mode,
10559 : Label* bailout, Node* context) {
10560 : CSA_ASSERT(this, Word32BinaryNot(IsJSProxy(object)));
10561 :
10562 : Node* elements = LoadElements(object);
10563 7336 : if (!IsSmiOrObjectElementsKind(elements_kind)) {
10564 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10565 2688 : } else if (!IsCOWHandlingStoreMode(store_mode)) {
10566 2688 : GotoIf(IsFixedCOWArrayMap(LoadMap(elements)), bailout);
10567 : }
10568 :
10569 : // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
10570 : ParameterMode parameter_mode = INTPTR_PARAMETERS;
10571 7336 : TNode<IntPtrT> intptr_key = TryToIntptr(key, bailout);
10572 :
10573 7336 : if (IsFixedTypedArrayElementsKind(elements_kind)) {
10574 6160 : Label done(this);
10575 :
10576 : // IntegerIndexedElementSet converts value to a Number/BigInt prior to the
10577 : // bounds check.
10578 3080 : value = PrepareValueForWriteToTypedArray(CAST(value), elements_kind,
10579 3080 : CAST(context));
10580 :
10581 : // There must be no allocations between the buffer load and
10582 : // and the actual store to backing store, because GC may decide that
10583 : // the buffer is not alive or move the elements.
10584 : // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
10585 :
10586 : // Check if buffer has been detached.
10587 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
10588 6160 : GotoIf(IsDetachedBuffer(buffer), bailout);
10589 :
10590 : // Bounds check.
10591 : Node* length =
10592 : TaggedToParameter(LoadJSTypedArrayLength(CAST(object)), parameter_mode);
10593 :
10594 3080 : if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
10595 : // Skip the store if we write beyond the length or
10596 : // to a property with a negative integer index.
10597 1232 : GotoIfNot(UintPtrLessThan(intptr_key, length), &done);
10598 2464 : } else if (store_mode == STANDARD_STORE) {
10599 2464 : GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
10600 : } else {
10601 : // This case is produced due to the dispatched call in
10602 : // ElementsTransitionAndStore and StoreFastElement.
10603 : // TODO(jgruber): Avoid generating unsupported combinations to save code
10604 : // size.
10605 1232 : DebugBreak();
10606 : }
10607 :
10608 3080 : if (elements_kind == BIGINT64_ELEMENTS ||
10609 : elements_kind == BIGUINT64_ELEMENTS) {
10610 560 : TNode<BigInt> bigint_value = UncheckedCast<BigInt>(value);
10611 :
10612 : TNode<RawPtrT> backing_store =
10613 560 : LoadFixedTypedArrayBackingStore(CAST(elements));
10614 : TNode<IntPtrT> offset = ElementOffsetFromIndex(
10615 560 : intptr_key, BIGINT64_ELEMENTS, INTPTR_PARAMETERS, 0);
10616 : EmitBigTypedArrayElementStore(CAST(elements), backing_store, offset,
10617 560 : bigint_value);
10618 : } else {
10619 5040 : Node* backing_store = LoadFixedTypedArrayBackingStore(CAST(elements));
10620 : StoreElement(backing_store, elements_kind, intptr_key, value,
10621 2520 : parameter_mode);
10622 : }
10623 3080 : Goto(&done);
10624 :
10625 : BIND(&done);
10626 : return;
10627 : }
10628 : DCHECK(IsFastElementsKind(elements_kind) ||
10629 : elements_kind == PACKED_SEALED_ELEMENTS);
10630 :
10631 : Node* length =
10632 25536 : SelectImpl(IsJSArray(object), [=]() { return LoadJSArrayLength(object); },
10633 8512 : [=]() { return LoadFixedArrayBaseLength(elements); },
10634 4256 : MachineRepresentation::kTagged);
10635 : length = TaggedToParameter(length, parameter_mode);
10636 :
10637 : // In case value is stored into a fast smi array, assure that the value is
10638 : // a smi before manipulating the backing store. Otherwise the backing store
10639 : // may be left in an invalid state.
10640 4256 : if (IsSmiElementsKind(elements_kind)) {
10641 1344 : GotoIfNot(TaggedIsSmi(value), bailout);
10642 3584 : } else if (IsDoubleElementsKind(elements_kind)) {
10643 1344 : value = TryTaggedToFloat64(value, bailout);
10644 : }
10645 :
10646 4256 : if (IsGrowStoreMode(store_mode) &&
10647 : !(elements_kind == PACKED_SEALED_ELEMENTS)) {
10648 1008 : elements = CheckForCapacityGrow(object, elements, elements_kind, length,
10649 1008 : intptr_key, parameter_mode, bailout);
10650 : } else {
10651 6496 : GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
10652 : }
10653 :
10654 : // If we didn't grow {elements}, it might still be COW, in which case we
10655 : // copy it now.
10656 4256 : if (!IsSmiOrObjectElementsKind(elements_kind)) {
10657 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10658 2688 : } else if (IsCOWHandlingStoreMode(store_mode)) {
10659 1344 : elements = CopyElementsOnWrite(object, elements, elements_kind, length,
10660 1344 : parameter_mode, bailout);
10661 : }
10662 :
10663 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10664 4256 : StoreElement(elements, elements_kind, intptr_key, value, parameter_mode);
10665 : }
10666 :
10667 1008 : Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
10668 : ElementsKind kind, Node* length,
10669 : Node* key, ParameterMode mode,
10670 : Label* bailout) {
10671 : DCHECK(IsFastElementsKind(kind));
10672 2016 : VARIABLE(checked_elements, MachineRepresentation::kTagged);
10673 1008 : Label grow_case(this), no_grow_case(this), done(this),
10674 1008 : grow_bailout(this, Label::kDeferred);
10675 :
10676 : Node* condition;
10677 1008 : if (IsHoleyElementsKind(kind)) {
10678 1344 : condition = UintPtrGreaterThanOrEqual(key, length);
10679 : } else {
10680 : // We don't support growing here unless the value is being appended.
10681 672 : condition = WordEqual(key, length);
10682 : }
10683 1008 : Branch(condition, &grow_case, &no_grow_case);
10684 :
10685 : BIND(&grow_case);
10686 : {
10687 : Node* current_capacity =
10688 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
10689 1008 : checked_elements.Bind(elements);
10690 1008 : Label fits_capacity(this);
10691 : // If key is negative, we will notice in Runtime::kGrowArrayElements.
10692 2016 : GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
10693 :
10694 : {
10695 1008 : Node* new_elements = TryGrowElementsCapacity(
10696 1008 : object, elements, kind, key, current_capacity, mode, &grow_bailout);
10697 1008 : checked_elements.Bind(new_elements);
10698 1008 : Goto(&fits_capacity);
10699 : }
10700 :
10701 : BIND(&grow_bailout);
10702 : {
10703 : Node* tagged_key = mode == SMI_PARAMETERS
10704 : ? key
10705 3024 : : ChangeInt32ToTagged(TruncateIntPtrToInt32(key));
10706 : Node* maybe_elements = CallRuntime(
10707 : Runtime::kGrowArrayElements, NoContextConstant(), object, tagged_key);
10708 2016 : GotoIf(TaggedIsSmi(maybe_elements), bailout);
10709 : CSA_ASSERT(this, IsFixedArrayWithKind(maybe_elements, kind));
10710 1008 : checked_elements.Bind(maybe_elements);
10711 1008 : Goto(&fits_capacity);
10712 : }
10713 :
10714 : BIND(&fits_capacity);
10715 2016 : GotoIfNot(IsJSArray(object), &done);
10716 :
10717 3024 : Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
10718 : StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
10719 : ParameterToTagged(new_length, mode));
10720 1008 : Goto(&done);
10721 : }
10722 :
10723 : BIND(&no_grow_case);
10724 : {
10725 2016 : GotoIfNot(UintPtrLessThan(key, length), bailout);
10726 1008 : checked_elements.Bind(elements);
10727 1008 : Goto(&done);
10728 : }
10729 :
10730 : BIND(&done);
10731 2016 : return checked_elements.value();
10732 : }
10733 :
10734 1344 : Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
10735 : ElementsKind kind, Node* length,
10736 : ParameterMode mode,
10737 : Label* bailout) {
10738 2688 : VARIABLE(new_elements_var, MachineRepresentation::kTagged, elements);
10739 1344 : Label done(this);
10740 :
10741 2688 : GotoIfNot(IsFixedCOWArrayMap(LoadMap(elements)), &done);
10742 : {
10743 : Node* capacity =
10744 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
10745 1344 : Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
10746 1344 : length, capacity, mode, bailout);
10747 1344 : new_elements_var.Bind(new_elements);
10748 1344 : Goto(&done);
10749 : }
10750 :
10751 : BIND(&done);
10752 2688 : return new_elements_var.value();
10753 : }
10754 :
10755 2688 : void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
10756 : ElementsKind from_kind,
10757 : ElementsKind to_kind,
10758 : Label* bailout) {
10759 : DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind));
10760 2688 : if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
10761 1568 : TrapAllocationMemento(object, bailout);
10762 : }
10763 :
10764 2688 : if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
10765 1344 : Comment("Non-simple map transition");
10766 : Node* elements = LoadElements(object);
10767 :
10768 1344 : Label done(this);
10769 1344 : GotoIf(WordEqual(elements, EmptyFixedArrayConstant()), &done);
10770 :
10771 : // TODO(ishell): Use OptimalParameterMode().
10772 : ParameterMode mode = INTPTR_PARAMETERS;
10773 2688 : Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
10774 5376 : Node* array_length = SelectImpl(
10775 : IsJSArray(object),
10776 1344 : [=]() {
10777 : CSA_ASSERT(this, IsFastElementsKind(LoadElementsKind(object)));
10778 2688 : return SmiUntag(LoadFastJSArrayLength(object));
10779 : },
10780 1344 : [=]() { return elements_length; },
10781 1344 : MachineType::PointerRepresentation());
10782 :
10783 : CSA_ASSERT(this, WordNotEqual(elements_length, IntPtrConstant(0)));
10784 :
10785 : GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
10786 1344 : elements_length, mode, bailout);
10787 1344 : Goto(&done);
10788 : BIND(&done);
10789 : }
10790 :
10791 : StoreMap(object, map);
10792 2688 : }
10793 :
10794 2744 : void CodeStubAssembler::TrapAllocationMemento(Node* object,
10795 : Label* memento_found) {
10796 2744 : Comment("[ TrapAllocationMemento");
10797 2744 : Label no_memento_found(this);
10798 2744 : Label top_check(this), map_check(this);
10799 :
10800 : TNode<ExternalReference> new_space_top_address = ExternalConstant(
10801 2744 : ExternalReference::new_space_allocation_top_address(isolate()));
10802 : const int kMementoMapOffset = JSArray::kSize;
10803 : const int kMementoLastWordOffset =
10804 : kMementoMapOffset + AllocationMemento::kSize - kTaggedSize;
10805 :
10806 : // Bail out if the object is not in new space.
10807 2744 : TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
10808 2744 : TNode<IntPtrT> object_page = PageFromAddress(object_word);
10809 : {
10810 : TNode<IntPtrT> page_flags =
10811 : UncheckedCast<IntPtrT>(Load(MachineType::IntPtr(), object_page,
10812 5488 : IntPtrConstant(Page::kFlagsOffset)));
10813 5488 : GotoIf(WordEqual(
10814 : WordAnd(page_flags,
10815 2744 : IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),
10816 5488 : IntPtrConstant(0)),
10817 2744 : &no_memento_found);
10818 : // TODO(ulan): Support allocation memento for a large object by allocating
10819 : // additional word for the memento after the large object.
10820 5488 : GotoIf(WordNotEqual(WordAnd(page_flags,
10821 2744 : IntPtrConstant(MemoryChunk::kIsLargePageMask)),
10822 5488 : IntPtrConstant(0)),
10823 2744 : &no_memento_found);
10824 : }
10825 :
10826 : TNode<IntPtrT> memento_last_word = IntPtrAdd(
10827 2744 : object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
10828 2744 : TNode<IntPtrT> memento_last_word_page = PageFromAddress(memento_last_word);
10829 :
10830 : TNode<IntPtrT> new_space_top = UncheckedCast<IntPtrT>(
10831 2744 : Load(MachineType::Pointer(), new_space_top_address));
10832 2744 : TNode<IntPtrT> new_space_top_page = PageFromAddress(new_space_top);
10833 :
10834 : // If the object is in new space, we need to check whether respective
10835 : // potential memento object is on the same page as the current top.
10836 5488 : GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
10837 :
10838 : // The object is on a different page than allocation top. Bail out if the
10839 : // object sits on the page boundary as no memento can follow and we cannot
10840 : // touch the memory following it.
10841 5488 : Branch(WordEqual(object_page, memento_last_word_page), &map_check,
10842 2744 : &no_memento_found);
10843 :
10844 : // If top is on the same page as the current object, we need to check whether
10845 : // we are below top.
10846 : BIND(&top_check);
10847 : {
10848 5488 : Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
10849 2744 : &no_memento_found, &map_check);
10850 : }
10851 :
10852 : // Memento map check.
10853 : BIND(&map_check);
10854 : {
10855 : TNode<Object> memento_map = LoadObjectField(object, kMementoMapOffset);
10856 5488 : Branch(WordEqual(memento_map, LoadRoot(RootIndex::kAllocationMementoMap)),
10857 2744 : memento_found, &no_memento_found);
10858 : }
10859 : BIND(&no_memento_found);
10860 2744 : Comment("] TrapAllocationMemento");
10861 2744 : }
10862 :
10863 11628 : TNode<IntPtrT> CodeStubAssembler::PageFromAddress(TNode<IntPtrT> address) {
10864 23256 : return WordAnd(address, IntPtrConstant(~kPageAlignmentMask));
10865 : }
10866 :
10867 392 : TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
10868 : SloppyTNode<FeedbackVector> feedback_vector, TNode<Smi> slot) {
10869 392 : TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSizeWithWeakNext);
10870 784 : Node* site = Allocate(size, CodeStubAssembler::kPretenured);
10871 392 : StoreMapNoWriteBarrier(site, RootIndex::kAllocationSiteWithWeakNextMap);
10872 : // Should match AllocationSite::Initialize.
10873 : TNode<WordT> field = UpdateWord<AllocationSite::ElementsKindBits>(
10874 1176 : IntPtrConstant(0), IntPtrConstant(GetInitialFastElementsKind()));
10875 : StoreObjectFieldNoWriteBarrier(
10876 : site, AllocationSite::kTransitionInfoOrBoilerplateOffset,
10877 784 : SmiTag(Signed(field)));
10878 :
10879 : // Unlike literals, constructed arrays don't have nested sites
10880 392 : TNode<Smi> zero = SmiConstant(0);
10881 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
10882 :
10883 : // Pretenuring calculation field.
10884 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
10885 784 : Int32Constant(0),
10886 : MachineRepresentation::kWord32);
10887 :
10888 : // Pretenuring memento creation count field.
10889 : StoreObjectFieldNoWriteBarrier(
10890 784 : site, AllocationSite::kPretenureCreateCountOffset, Int32Constant(0),
10891 : MachineRepresentation::kWord32);
10892 :
10893 : // Store an empty fixed array for the code dependency.
10894 : StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
10895 392 : RootIndex::kEmptyWeakFixedArray);
10896 :
10897 : // Link the object to the allocation site list
10898 : TNode<ExternalReference> site_list = ExternalConstant(
10899 392 : ExternalReference::allocation_sites_list_address(isolate()));
10900 392 : TNode<Object> next_site = CAST(LoadBufferObject(site_list, 0));
10901 :
10902 : // TODO(mvstanton): This is a store to a weak pointer, which we may want to
10903 : // mark as such in order to skip the write barrier, once we have a unified
10904 : // system for weakness. For now we decided to keep it like this because having
10905 : // an initial write barrier backed store makes this pointer strong until the
10906 : // next GC, and allocation sites are designed to survive several GCs anyway.
10907 : StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
10908 392 : StoreFullTaggedNoWriteBarrier(site_list, site);
10909 :
10910 : StoreFeedbackVectorSlot(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
10911 392 : SMI_PARAMETERS);
10912 392 : return CAST(site);
10913 : }
10914 :
10915 2240 : TNode<MaybeObject> CodeStubAssembler::StoreWeakReferenceInFeedbackVector(
10916 : SloppyTNode<FeedbackVector> feedback_vector, Node* slot,
10917 : SloppyTNode<HeapObject> value, int additional_offset,
10918 : ParameterMode parameter_mode) {
10919 2240 : TNode<MaybeObject> weak_value = MakeWeak(value);
10920 : StoreFeedbackVectorSlot(feedback_vector, slot, weak_value,
10921 : UPDATE_WRITE_BARRIER, additional_offset,
10922 2240 : parameter_mode);
10923 2240 : return weak_value;
10924 : }
10925 :
10926 672 : TNode<BoolT> CodeStubAssembler::NotHasBoilerplate(
10927 : TNode<Object> maybe_literal_site) {
10928 672 : return TaggedIsSmi(maybe_literal_site);
10929 : }
10930 :
10931 56 : TNode<Smi> CodeStubAssembler::LoadTransitionInfo(
10932 : TNode<AllocationSite> allocation_site) {
10933 : TNode<Smi> transition_info = CAST(LoadObjectField(
10934 : allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10935 56 : return transition_info;
10936 : }
10937 :
10938 448 : TNode<JSObject> CodeStubAssembler::LoadBoilerplate(
10939 : TNode<AllocationSite> allocation_site) {
10940 : TNode<JSObject> boilerplate = CAST(LoadObjectField(
10941 : allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10942 448 : return boilerplate;
10943 : }
10944 :
10945 280 : TNode<Int32T> CodeStubAssembler::LoadElementsKind(
10946 : TNode<AllocationSite> allocation_site) {
10947 : TNode<Smi> transition_info = LoadTransitionInfo(allocation_site);
10948 : TNode<Int32T> elements_kind =
10949 : Signed(DecodeWord32<AllocationSite::ElementsKindBits>(
10950 560 : SmiToInt32(transition_info)));
10951 : CSA_ASSERT(this, IsFastElementsKind(elements_kind));
10952 280 : return elements_kind;
10953 : }
10954 :
10955 29448 : Node* CodeStubAssembler::BuildFastLoop(
10956 : const CodeStubAssembler::VariableList& vars, Node* start_index,
10957 : Node* end_index, const FastLoopBody& body, int increment,
10958 : ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
10959 : CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
10960 : CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
10961 : MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
10962 : ? MachineType::PointerRepresentation()
10963 29448 : : MachineRepresentation::kTaggedSigned;
10964 58896 : VARIABLE(var, index_rep, start_index);
10965 29448 : VariableList vars_copy(vars.begin(), vars.end(), zone());
10966 58896 : vars_copy.push_back(&var);
10967 29448 : Label loop(this, vars_copy);
10968 29448 : Label after_loop(this);
10969 : // Introduce an explicit second check of the termination condition before the
10970 : // loop that helps turbofan generate better code. If there's only a single
10971 : // check, then the CodeStubAssembler forces it to be at the beginning of the
10972 : // loop requiring a backwards branch at the end of the loop (it's not possible
10973 : // to force the loop header check at the end of the loop and branch forward to
10974 : // it from the pre-header). The extra branch is slower in the case that the
10975 : // loop actually iterates.
10976 88344 : Node* first_check = WordEqual(var.value(), end_index);
10977 : int32_t first_check_val;
10978 29448 : if (ToInt32Constant(first_check, first_check_val)) {
10979 1036 : if (first_check_val) return var.value();
10980 72 : Goto(&loop);
10981 : } else {
10982 28412 : Branch(first_check, &after_loop, &loop);
10983 : }
10984 :
10985 : BIND(&loop);
10986 : {
10987 28484 : if (advance_mode == IndexAdvanceMode::kPre) {
10988 18192 : Increment(&var, increment, parameter_mode);
10989 : }
10990 28484 : body(var.value());
10991 28484 : if (advance_mode == IndexAdvanceMode::kPost) {
10992 10292 : Increment(&var, increment, parameter_mode);
10993 : }
10994 85452 : Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
10995 : }
10996 : BIND(&after_loop);
10997 28484 : return var.value();
10998 : }
10999 :
11000 16996 : void CodeStubAssembler::BuildFastFixedArrayForEach(
11001 : const CodeStubAssembler::VariableList& vars, Node* fixed_array,
11002 : ElementsKind kind, Node* first_element_inclusive,
11003 : Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
11004 : ParameterMode mode, ForEachDirection direction) {
11005 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
11006 : CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
11007 : CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
11008 : CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(fixed_array, kind),
11009 : IsPropertyArray(fixed_array)));
11010 : int32_t first_val;
11011 16996 : bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
11012 : int32_t last_val;
11013 16996 : bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
11014 16996 : if (constant_first && constent_last) {
11015 1032 : int delta = last_val - first_val;
11016 : DCHECK_GE(delta, 0);
11017 1032 : if (delta <= kElementLoopUnrollThreshold) {
11018 976 : if (direction == ForEachDirection::kForward) {
11019 60 : for (int i = first_val; i < last_val; ++i) {
11020 48 : Node* index = IntPtrConstant(i);
11021 : Node* offset =
11022 48 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
11023 24 : FixedArray::kHeaderSize - kHeapObjectTag);
11024 : body(fixed_array, offset);
11025 : }
11026 : } else {
11027 3092 : for (int i = last_val - 1; i >= first_val; --i) {
11028 4256 : Node* index = IntPtrConstant(i);
11029 : Node* offset =
11030 4256 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
11031 2128 : FixedArray::kHeaderSize - kHeapObjectTag);
11032 : body(fixed_array, offset);
11033 : }
11034 : }
11035 976 : return;
11036 : }
11037 : }
11038 :
11039 : Node* start =
11040 32040 : ElementOffsetFromIndex(first_element_inclusive, kind, mode,
11041 16020 : FixedArray::kHeaderSize - kHeapObjectTag);
11042 : Node* limit =
11043 32040 : ElementOffsetFromIndex(last_element_exclusive, kind, mode,
11044 : FixedArray::kHeaderSize - kHeapObjectTag);
11045 16020 : if (direction == ForEachDirection::kReverse) std::swap(start, limit);
11046 :
11047 : int increment = IsDoubleElementsKind(kind) ? kDoubleSize : kTaggedSize;
11048 32040 : BuildFastLoop(
11049 : vars, start, limit,
11050 31016 : [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
11051 : direction == ForEachDirection::kReverse ? -increment : increment,
11052 : INTPTR_PARAMETERS,
11053 : direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
11054 16020 : : IndexAdvanceMode::kPost);
11055 : }
11056 :
11057 224 : void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
11058 : Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
11059 672 : GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size, mode),
11060 224 : doesnt_fit);
11061 224 : }
11062 :
11063 3876 : void CodeStubAssembler::InitializeFieldsWithRoot(Node* object,
11064 : Node* start_offset,
11065 : Node* end_offset,
11066 : RootIndex root_index) {
11067 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
11068 11628 : start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
11069 11628 : end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
11070 7752 : Node* root_value = LoadRoot(root_index);
11071 3876 : BuildFastLoop(
11072 : end_offset, start_offset,
11073 3424 : [this, object, root_value](Node* current) {
11074 3424 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, current,
11075 3424 : root_value);
11076 : },
11077 : -kTaggedSize, INTPTR_PARAMETERS,
11078 3876 : CodeStubAssembler::IndexAdvanceMode::kPre);
11079 3876 : }
11080 :
11081 8712 : void CodeStubAssembler::BranchIfNumberRelationalComparison(
11082 : Operation op, Node* left, Node* right, Label* if_true, Label* if_false) {
11083 : CSA_SLOW_ASSERT(this, IsNumber(left));
11084 : CSA_SLOW_ASSERT(this, IsNumber(right));
11085 :
11086 17424 : Label do_float_comparison(this);
11087 : TVARIABLE(Float64T, var_left_float);
11088 : TVARIABLE(Float64T, var_right_float);
11089 :
11090 43560 : Branch(
11091 : TaggedIsSmi(left),
11092 8712 : [&] {
11093 34848 : TNode<Smi> smi_left = CAST(left);
11094 :
11095 52272 : Branch(
11096 26136 : TaggedIsSmi(right),
11097 8712 : [&] {
11098 8712 : TNode<Smi> smi_right = CAST(right);
11099 :
11100 : // Both {left} and {right} are Smi, so just perform a fast
11101 : // Smi comparison.
11102 17424 : switch (op) {
11103 : case Operation::kEqual:
11104 30960 : BranchIfSmiEqual(smi_left, smi_right, if_true, if_false);
11105 348 : break;
11106 : case Operation::kLessThan:
11107 9576 : BranchIfSmiLessThan(smi_left, smi_right, if_true, if_false);
11108 3192 : break;
11109 : case Operation::kLessThanOrEqual:
11110 112 : BranchIfSmiLessThanOrEqual(smi_left, smi_right, if_true,
11111 56 : if_false);
11112 56 : break;
11113 : case Operation::kGreaterThan:
11114 5712 : BranchIfSmiLessThan(smi_right, smi_left, if_true, if_false);
11115 1904 : break;
11116 : case Operation::kGreaterThanOrEqual:
11117 6424 : BranchIfSmiLessThanOrEqual(smi_right, smi_left, if_true,
11118 3212 : if_false);
11119 3212 : break;
11120 : default:
11121 0 : UNREACHABLE();
11122 : }
11123 8712 : },
11124 8712 : [&] {
11125 : CSA_ASSERT(this, IsHeapNumber(right));
11126 52272 : var_left_float = SmiToFloat64(smi_left);
11127 26136 : var_right_float = LoadHeapNumberValue(right);
11128 26136 : Goto(&do_float_comparison);
11129 17424 : });
11130 8712 : },
11131 8712 : [&] {
11132 : CSA_ASSERT(this, IsHeapNumber(left));
11133 52272 : var_left_float = LoadHeapNumberValue(left);
11134 :
11135 52272 : Branch(
11136 26136 : TaggedIsSmi(right),
11137 8712 : [&] {
11138 52272 : var_right_float = SmiToFloat64(right);
11139 34848 : Goto(&do_float_comparison);
11140 8712 : },
11141 8712 : [&] {
11142 : CSA_ASSERT(this, IsHeapNumber(right));
11143 26136 : var_right_float = LoadHeapNumberValue(right);
11144 17424 : Goto(&do_float_comparison);
11145 17424 : });
11146 17424 : });
11147 :
11148 : BIND(&do_float_comparison);
11149 : {
11150 8712 : switch (op) {
11151 : case Operation::kEqual:
11152 696 : Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
11153 696 : if_true, if_false);
11154 348 : break;
11155 : case Operation::kLessThan:
11156 6384 : Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11157 6384 : if_true, if_false);
11158 3192 : break;
11159 : case Operation::kLessThanOrEqual:
11160 112 : Branch(Float64LessThanOrEqual(var_left_float.value(),
11161 56 : var_right_float.value()),
11162 112 : if_true, if_false);
11163 56 : break;
11164 : case Operation::kGreaterThan:
11165 1904 : Branch(
11166 3808 : Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11167 3808 : if_true, if_false);
11168 1904 : break;
11169 : case Operation::kGreaterThanOrEqual:
11170 6424 : Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11171 3212 : var_right_float.value()),
11172 6424 : if_true, if_false);
11173 3212 : break;
11174 : default:
11175 0 : UNREACHABLE();
11176 : }
11177 : }
11178 8712 : }
11179 :
11180 2760 : void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(Node* left, Node* right,
11181 : Label* if_true) {
11182 5520 : Label if_false(this);
11183 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, left,
11184 2760 : right, if_true, &if_false);
11185 : BIND(&if_false);
11186 2760 : }
11187 :
11188 : namespace {
11189 2688 : Operation Reverse(Operation op) {
11190 2688 : switch (op) {
11191 : case Operation::kLessThan:
11192 : return Operation::kGreaterThan;
11193 : case Operation::kLessThanOrEqual:
11194 672 : return Operation::kGreaterThanOrEqual;
11195 : case Operation::kGreaterThan:
11196 672 : return Operation::kLessThan;
11197 : case Operation::kGreaterThanOrEqual:
11198 672 : return Operation::kLessThanOrEqual;
11199 : default:
11200 : break;
11201 : }
11202 0 : UNREACHABLE();
11203 : }
11204 : } // anonymous namespace
11205 :
11206 896 : Node* CodeStubAssembler::RelationalComparison(Operation op, Node* left,
11207 : Node* right, Node* context,
11208 : Variable* var_type_feedback) {
11209 1792 : Label return_true(this), return_false(this), do_float_comparison(this),
11210 896 : end(this);
11211 : TVARIABLE(Oddball, var_result); // Actually only "true" or "false".
11212 : TVARIABLE(Float64T, var_left_float);
11213 : TVARIABLE(Float64T, var_right_float);
11214 :
11215 : // We might need to loop several times due to ToPrimitive and/or ToNumeric
11216 : // conversions.
11217 1792 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
11218 1792 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
11219 1792 : VariableList loop_variable_list({&var_left, &var_right}, zone());
11220 896 : if (var_type_feedback != nullptr) {
11221 : // Initialize the type feedback to None. The current feedback is combined
11222 : // with the previous feedback.
11223 672 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
11224 672 : loop_variable_list.push_back(var_type_feedback);
11225 : }
11226 896 : Label loop(this, loop_variable_list);
11227 896 : Goto(&loop);
11228 : BIND(&loop);
11229 : {
11230 896 : left = var_left.value();
11231 896 : right = var_right.value();
11232 :
11233 896 : Label if_left_smi(this), if_left_not_smi(this);
11234 1792 : Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11235 :
11236 : BIND(&if_left_smi);
11237 : {
11238 : TNode<Smi> smi_left = CAST(left);
11239 896 : Label if_right_smi(this), if_right_heapnumber(this),
11240 896 : if_right_bigint(this, Label::kDeferred),
11241 896 : if_right_not_numeric(this, Label::kDeferred);
11242 1792 : GotoIf(TaggedIsSmi(right), &if_right_smi);
11243 : Node* right_map = LoadMap(right);
11244 1792 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11245 : Node* right_instance_type = LoadMapInstanceType(right_map);
11246 896 : Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
11247 896 : &if_right_not_numeric);
11248 :
11249 : BIND(&if_right_smi);
11250 : {
11251 896 : TNode<Smi> smi_right = CAST(right);
11252 : CombineFeedback(var_type_feedback,
11253 896 : CompareOperationFeedback::kSignedSmall);
11254 896 : switch (op) {
11255 : case Operation::kLessThan:
11256 : BranchIfSmiLessThan(smi_left, smi_right, &return_true,
11257 224 : &return_false);
11258 224 : break;
11259 : case Operation::kLessThanOrEqual:
11260 : BranchIfSmiLessThanOrEqual(smi_left, smi_right, &return_true,
11261 224 : &return_false);
11262 224 : break;
11263 : case Operation::kGreaterThan:
11264 : BranchIfSmiLessThan(smi_right, smi_left, &return_true,
11265 224 : &return_false);
11266 224 : break;
11267 : case Operation::kGreaterThanOrEqual:
11268 : BranchIfSmiLessThanOrEqual(smi_right, smi_left, &return_true,
11269 224 : &return_false);
11270 224 : break;
11271 : default:
11272 0 : UNREACHABLE();
11273 : }
11274 : }
11275 :
11276 : BIND(&if_right_heapnumber);
11277 : {
11278 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11279 1792 : var_left_float = SmiToFloat64(smi_left);
11280 : var_right_float = LoadHeapNumberValue(right);
11281 896 : Goto(&do_float_comparison);
11282 : }
11283 :
11284 : BIND(&if_right_bigint);
11285 : {
11286 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11287 896 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11288 : NoContextConstant(),
11289 : SmiConstant(Reverse(op)), right, left));
11290 896 : Goto(&end);
11291 : }
11292 :
11293 : BIND(&if_right_not_numeric);
11294 : {
11295 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11296 : // Convert {right} to a Numeric; we don't need to perform the
11297 : // dedicated ToPrimitive(right, hint Number) operation, as the
11298 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11299 : // a Number hint.
11300 : var_right.Bind(
11301 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11302 896 : Goto(&loop);
11303 : }
11304 : }
11305 :
11306 : BIND(&if_left_not_smi);
11307 : {
11308 : Node* left_map = LoadMap(left);
11309 :
11310 896 : Label if_right_smi(this), if_right_not_smi(this);
11311 1792 : Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11312 :
11313 : BIND(&if_right_smi);
11314 : {
11315 896 : Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
11316 896 : if_left_not_numeric(this, Label::kDeferred);
11317 1792 : GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
11318 : Node* left_instance_type = LoadMapInstanceType(left_map);
11319 896 : Branch(IsBigIntInstanceType(left_instance_type), &if_left_bigint,
11320 896 : &if_left_not_numeric);
11321 :
11322 : BIND(&if_left_heapnumber);
11323 : {
11324 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11325 : var_left_float = LoadHeapNumberValue(left);
11326 1792 : var_right_float = SmiToFloat64(right);
11327 896 : Goto(&do_float_comparison);
11328 : }
11329 :
11330 : BIND(&if_left_bigint);
11331 : {
11332 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11333 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11334 : NoContextConstant(), SmiConstant(op),
11335 : left, right));
11336 896 : Goto(&end);
11337 : }
11338 :
11339 : BIND(&if_left_not_numeric);
11340 : {
11341 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11342 : // Convert {left} to a Numeric; we don't need to perform the
11343 : // dedicated ToPrimitive(left, hint Number) operation, as the
11344 : // ToNumeric(left) will by itself already invoke ToPrimitive with
11345 : // a Number hint.
11346 : var_left.Bind(
11347 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11348 896 : Goto(&loop);
11349 : }
11350 : }
11351 :
11352 : BIND(&if_right_not_smi);
11353 : {
11354 : Node* right_map = LoadMap(right);
11355 :
11356 896 : Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
11357 896 : if_left_string(this), if_left_other(this, Label::kDeferred);
11358 1792 : GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
11359 : Node* left_instance_type = LoadMapInstanceType(left_map);
11360 896 : GotoIf(IsBigIntInstanceType(left_instance_type), &if_left_bigint);
11361 1792 : Branch(IsStringInstanceType(left_instance_type), &if_left_string,
11362 896 : &if_left_other);
11363 :
11364 : BIND(&if_left_heapnumber);
11365 : {
11366 896 : Label if_right_heapnumber(this),
11367 896 : if_right_bigint(this, Label::kDeferred),
11368 896 : if_right_not_numeric(this, Label::kDeferred);
11369 1792 : GotoIf(WordEqual(right_map, left_map), &if_right_heapnumber);
11370 : Node* right_instance_type = LoadMapInstanceType(right_map);
11371 896 : Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
11372 896 : &if_right_not_numeric);
11373 :
11374 : BIND(&if_right_heapnumber);
11375 : {
11376 : CombineFeedback(var_type_feedback,
11377 896 : CompareOperationFeedback::kNumber);
11378 : var_left_float = LoadHeapNumberValue(left);
11379 : var_right_float = LoadHeapNumberValue(right);
11380 896 : Goto(&do_float_comparison);
11381 : }
11382 :
11383 : BIND(&if_right_bigint);
11384 : {
11385 : OverwriteFeedback(var_type_feedback,
11386 896 : CompareOperationFeedback::kAny);
11387 896 : var_result = CAST(CallRuntime(
11388 : Runtime::kBigIntCompareToNumber, NoContextConstant(),
11389 : SmiConstant(Reverse(op)), right, left));
11390 896 : Goto(&end);
11391 : }
11392 :
11393 : BIND(&if_right_not_numeric);
11394 : {
11395 : OverwriteFeedback(var_type_feedback,
11396 896 : CompareOperationFeedback::kAny);
11397 : // Convert {right} to a Numeric; we don't need to perform
11398 : // dedicated ToPrimitive(right, hint Number) operation, as the
11399 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11400 : // a Number hint.
11401 : var_right.Bind(
11402 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11403 896 : Goto(&loop);
11404 : }
11405 : }
11406 :
11407 : BIND(&if_left_bigint);
11408 : {
11409 896 : Label if_right_heapnumber(this), if_right_bigint(this),
11410 896 : if_right_string(this), if_right_other(this);
11411 1792 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11412 : Node* right_instance_type = LoadMapInstanceType(right_map);
11413 896 : GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
11414 1792 : Branch(IsStringInstanceType(right_instance_type), &if_right_string,
11415 896 : &if_right_other);
11416 :
11417 : BIND(&if_right_heapnumber);
11418 : {
11419 : OverwriteFeedback(var_type_feedback,
11420 896 : CompareOperationFeedback::kAny);
11421 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11422 : NoContextConstant(), SmiConstant(op),
11423 : left, right));
11424 896 : Goto(&end);
11425 : }
11426 :
11427 : BIND(&if_right_bigint);
11428 : {
11429 : CombineFeedback(var_type_feedback,
11430 896 : CompareOperationFeedback::kBigInt);
11431 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToBigInt,
11432 : NoContextConstant(), SmiConstant(op),
11433 : left, right));
11434 896 : Goto(&end);
11435 : }
11436 :
11437 : BIND(&if_right_string);
11438 : {
11439 : OverwriteFeedback(var_type_feedback,
11440 896 : CompareOperationFeedback::kAny);
11441 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToString,
11442 : NoContextConstant(), SmiConstant(op),
11443 : left, right));
11444 896 : Goto(&end);
11445 : }
11446 :
11447 : // {right} is not a Number, BigInt, or String.
11448 : BIND(&if_right_other);
11449 : {
11450 : OverwriteFeedback(var_type_feedback,
11451 896 : CompareOperationFeedback::kAny);
11452 : // Convert {right} to a Numeric; we don't need to perform
11453 : // dedicated ToPrimitive(right, hint Number) operation, as the
11454 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11455 : // a Number hint.
11456 : var_right.Bind(
11457 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11458 896 : Goto(&loop);
11459 : }
11460 : }
11461 :
11462 : BIND(&if_left_string);
11463 : {
11464 : Node* right_instance_type = LoadMapInstanceType(right_map);
11465 :
11466 896 : Label if_right_not_string(this, Label::kDeferred);
11467 1792 : GotoIfNot(IsStringInstanceType(right_instance_type),
11468 896 : &if_right_not_string);
11469 :
11470 : // Both {left} and {right} are strings.
11471 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
11472 : Builtins::Name builtin;
11473 896 : switch (op) {
11474 : case Operation::kLessThan:
11475 : builtin = Builtins::kStringLessThan;
11476 : break;
11477 : case Operation::kLessThanOrEqual:
11478 : builtin = Builtins::kStringLessThanOrEqual;
11479 224 : break;
11480 : case Operation::kGreaterThan:
11481 : builtin = Builtins::kStringGreaterThan;
11482 224 : break;
11483 : case Operation::kGreaterThanOrEqual:
11484 : builtin = Builtins::kStringGreaterThanOrEqual;
11485 224 : break;
11486 : default:
11487 0 : UNREACHABLE();
11488 : }
11489 1792 : var_result = CAST(CallBuiltin(builtin, context, left, right));
11490 896 : Goto(&end);
11491 :
11492 : BIND(&if_right_not_string);
11493 : {
11494 : OverwriteFeedback(var_type_feedback,
11495 896 : CompareOperationFeedback::kAny);
11496 : // {left} is a String, while {right} isn't. Check if {right} is
11497 : // a BigInt, otherwise call ToPrimitive(right, hint Number) if
11498 : // {right} is a receiver, or ToNumeric(left) and then
11499 : // ToNumeric(right) in the other cases.
11500 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
11501 896 : Label if_right_bigint(this),
11502 896 : if_right_receiver(this, Label::kDeferred);
11503 896 : GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
11504 1792 : GotoIf(IsJSReceiverInstanceType(right_instance_type),
11505 896 : &if_right_receiver);
11506 :
11507 : var_left.Bind(
11508 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11509 1792 : var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
11510 896 : Goto(&loop);
11511 :
11512 : BIND(&if_right_bigint);
11513 : {
11514 896 : var_result = CAST(CallRuntime(
11515 : Runtime::kBigIntCompareToString, NoContextConstant(),
11516 : SmiConstant(Reverse(op)), right, left));
11517 896 : Goto(&end);
11518 : }
11519 :
11520 : BIND(&if_right_receiver);
11521 : {
11522 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
11523 896 : isolate(), ToPrimitiveHint::kNumber);
11524 1792 : var_right.Bind(CallStub(callable, context, right));
11525 896 : Goto(&loop);
11526 : }
11527 : }
11528 : }
11529 :
11530 : BIND(&if_left_other);
11531 : {
11532 : // {left} is neither a Numeric nor a String, and {right} is not a Smi.
11533 896 : if (var_type_feedback != nullptr) {
11534 : // Collect NumberOrOddball feedback if {left} is an Oddball
11535 : // and {right} is either a HeapNumber or Oddball. Otherwise collect
11536 : // Any feedback.
11537 672 : Label collect_any_feedback(this), collect_oddball_feedback(this),
11538 672 : collect_feedback_done(this);
11539 1344 : GotoIfNot(InstanceTypeEqual(left_instance_type, ODDBALL_TYPE),
11540 672 : &collect_any_feedback);
11541 :
11542 1344 : GotoIf(IsHeapNumberMap(right_map), &collect_oddball_feedback);
11543 : Node* right_instance_type = LoadMapInstanceType(right_map);
11544 1344 : Branch(InstanceTypeEqual(right_instance_type, ODDBALL_TYPE),
11545 672 : &collect_oddball_feedback, &collect_any_feedback);
11546 :
11547 : BIND(&collect_oddball_feedback);
11548 : {
11549 : CombineFeedback(var_type_feedback,
11550 672 : CompareOperationFeedback::kNumberOrOddball);
11551 672 : Goto(&collect_feedback_done);
11552 : }
11553 :
11554 : BIND(&collect_any_feedback);
11555 : {
11556 : OverwriteFeedback(var_type_feedback,
11557 672 : CompareOperationFeedback::kAny);
11558 672 : Goto(&collect_feedback_done);
11559 : }
11560 :
11561 : BIND(&collect_feedback_done);
11562 : }
11563 :
11564 : // If {left} is a receiver, call ToPrimitive(left, hint Number).
11565 : // Otherwise call ToNumeric(right) and then ToNumeric(left), the
11566 : // order here is important as it's observable by user code.
11567 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
11568 896 : Label if_left_receiver(this, Label::kDeferred);
11569 1792 : GotoIf(IsJSReceiverInstanceType(left_instance_type),
11570 896 : &if_left_receiver);
11571 :
11572 1792 : var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
11573 : var_left.Bind(
11574 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11575 896 : Goto(&loop);
11576 :
11577 : BIND(&if_left_receiver);
11578 : {
11579 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
11580 896 : isolate(), ToPrimitiveHint::kNumber);
11581 1792 : var_left.Bind(CallStub(callable, context, left));
11582 896 : Goto(&loop);
11583 : }
11584 : }
11585 : }
11586 : }
11587 : }
11588 :
11589 : BIND(&do_float_comparison);
11590 : {
11591 896 : switch (op) {
11592 : case Operation::kLessThan:
11593 448 : Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11594 224 : &return_true, &return_false);
11595 224 : break;
11596 : case Operation::kLessThanOrEqual:
11597 448 : Branch(Float64LessThanOrEqual(var_left_float.value(),
11598 224 : var_right_float.value()),
11599 224 : &return_true, &return_false);
11600 224 : break;
11601 : case Operation::kGreaterThan:
11602 224 : Branch(
11603 448 : Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11604 224 : &return_true, &return_false);
11605 224 : break;
11606 : case Operation::kGreaterThanOrEqual:
11607 448 : Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11608 224 : var_right_float.value()),
11609 224 : &return_true, &return_false);
11610 224 : break;
11611 : default:
11612 0 : UNREACHABLE();
11613 : }
11614 : }
11615 :
11616 : BIND(&return_true);
11617 : {
11618 : var_result = TrueConstant();
11619 896 : Goto(&end);
11620 : }
11621 :
11622 : BIND(&return_false);
11623 : {
11624 : var_result = FalseConstant();
11625 896 : Goto(&end);
11626 : }
11627 :
11628 : BIND(&end);
11629 896 : return var_result.value();
11630 : }
11631 :
11632 1120 : TNode<Smi> CodeStubAssembler::CollectFeedbackForString(
11633 : SloppyTNode<Int32T> instance_type) {
11634 : TNode<Smi> feedback = SelectSmiConstant(
11635 2240 : Word32Equal(
11636 3360 : Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
11637 3360 : Int32Constant(kInternalizedTag)),
11638 : CompareOperationFeedback::kInternalizedString,
11639 : CompareOperationFeedback::kString);
11640 1120 : return feedback;
11641 : }
11642 :
11643 616 : void CodeStubAssembler::GenerateEqual_Same(Node* value, Label* if_equal,
11644 : Label* if_notequal,
11645 : Variable* var_type_feedback) {
11646 : // In case of abstract or strict equality checks, we need additional checks
11647 : // for NaN values because they are not considered equal, even if both the
11648 : // left and the right hand side reference exactly the same value.
11649 :
11650 1232 : Label if_smi(this), if_heapnumber(this);
11651 1232 : GotoIf(TaggedIsSmi(value), &if_smi);
11652 :
11653 : Node* value_map = LoadMap(value);
11654 1232 : GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
11655 :
11656 : // For non-HeapNumbers, all we do is collect type feedback.
11657 616 : if (var_type_feedback != nullptr) {
11658 : Node* instance_type = LoadMapInstanceType(value_map);
11659 :
11660 336 : Label if_string(this), if_receiver(this), if_oddball(this), if_symbol(this),
11661 336 : if_bigint(this);
11662 672 : GotoIf(IsStringInstanceType(instance_type), &if_string);
11663 672 : GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
11664 336 : GotoIf(IsOddballInstanceType(instance_type), &if_oddball);
11665 336 : Branch(IsBigIntInstanceType(instance_type), &if_bigint, &if_symbol);
11666 :
11667 : BIND(&if_string);
11668 : {
11669 : CSA_ASSERT(this, IsString(value));
11670 : CombineFeedback(var_type_feedback,
11671 672 : CollectFeedbackForString(instance_type));
11672 336 : Goto(if_equal);
11673 : }
11674 :
11675 : BIND(&if_symbol);
11676 : {
11677 : CSA_ASSERT(this, IsSymbol(value));
11678 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kSymbol);
11679 336 : Goto(if_equal);
11680 : }
11681 :
11682 : BIND(&if_receiver);
11683 : {
11684 : CSA_ASSERT(this, IsJSReceiver(value));
11685 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
11686 336 : Goto(if_equal);
11687 : }
11688 :
11689 : BIND(&if_bigint);
11690 : {
11691 : CSA_ASSERT(this, IsBigInt(value));
11692 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11693 336 : Goto(if_equal);
11694 : }
11695 :
11696 : BIND(&if_oddball);
11697 : {
11698 : CSA_ASSERT(this, IsOddball(value));
11699 336 : Label if_boolean(this), if_not_boolean(this);
11700 672 : Branch(IsBooleanMap(value_map), &if_boolean, &if_not_boolean);
11701 :
11702 : BIND(&if_boolean);
11703 : {
11704 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11705 336 : Goto(if_equal);
11706 : }
11707 :
11708 : BIND(&if_not_boolean);
11709 : {
11710 : CSA_ASSERT(this, IsNullOrUndefined(value));
11711 : CombineFeedback(var_type_feedback,
11712 336 : CompareOperationFeedback::kReceiverOrNullOrUndefined);
11713 336 : Goto(if_equal);
11714 : }
11715 : }
11716 : } else {
11717 280 : Goto(if_equal);
11718 : }
11719 :
11720 : BIND(&if_heapnumber);
11721 : {
11722 616 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11723 : Node* number_value = LoadHeapNumberValue(value);
11724 616 : BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
11725 : }
11726 :
11727 : BIND(&if_smi);
11728 : {
11729 616 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kSignedSmall);
11730 616 : Goto(if_equal);
11731 : }
11732 616 : }
11733 :
11734 : // ES6 section 7.2.12 Abstract Equality Comparison
11735 224 : Node* CodeStubAssembler::Equal(Node* left, Node* right, Node* context,
11736 : Variable* var_type_feedback) {
11737 : // This is a slightly optimized version of Object::Equals. Whenever you
11738 : // change something functionality wise in here, remember to update the
11739 : // Object::Equals method as well.
11740 :
11741 448 : Label if_equal(this), if_notequal(this), do_float_comparison(this),
11742 224 : do_right_stringtonumber(this, Label::kDeferred), end(this);
11743 448 : VARIABLE(result, MachineRepresentation::kTagged);
11744 : TVARIABLE(Float64T, var_left_float);
11745 : TVARIABLE(Float64T, var_right_float);
11746 :
11747 : // We can avoid code duplication by exploiting the fact that abstract equality
11748 : // is symmetric.
11749 224 : Label use_symmetry(this);
11750 :
11751 : // We might need to loop several times due to ToPrimitive and/or ToNumber
11752 : // conversions.
11753 448 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
11754 448 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
11755 448 : VariableList loop_variable_list({&var_left, &var_right}, zone());
11756 224 : if (var_type_feedback != nullptr) {
11757 : // Initialize the type feedback to None. The current feedback will be
11758 : // combined with the previous feedback.
11759 168 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
11760 168 : loop_variable_list.push_back(var_type_feedback);
11761 : }
11762 224 : Label loop(this, loop_variable_list);
11763 224 : Goto(&loop);
11764 : BIND(&loop);
11765 : {
11766 224 : left = var_left.value();
11767 224 : right = var_right.value();
11768 :
11769 224 : Label if_notsame(this);
11770 448 : GotoIf(WordNotEqual(left, right), &if_notsame);
11771 : {
11772 : // {left} and {right} reference the exact same value, yet we need special
11773 : // treatment for HeapNumber, as NaN is not equal to NaN.
11774 224 : GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
11775 : }
11776 :
11777 : BIND(&if_notsame);
11778 224 : Label if_left_smi(this), if_left_not_smi(this);
11779 448 : Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11780 :
11781 : BIND(&if_left_smi);
11782 : {
11783 224 : Label if_right_smi(this), if_right_not_smi(this);
11784 448 : Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11785 :
11786 : BIND(&if_right_smi);
11787 : {
11788 : // We have already checked for {left} and {right} being the same value,
11789 : // so when we get here they must be different Smis.
11790 : CombineFeedback(var_type_feedback,
11791 224 : CompareOperationFeedback::kSignedSmall);
11792 224 : Goto(&if_notequal);
11793 : }
11794 :
11795 : BIND(&if_right_not_smi);
11796 : Node* right_map = LoadMap(right);
11797 224 : Label if_right_heapnumber(this), if_right_boolean(this),
11798 224 : if_right_bigint(this, Label::kDeferred),
11799 224 : if_right_receiver(this, Label::kDeferred);
11800 448 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11801 : // {left} is Smi and {right} is not HeapNumber or Smi.
11802 224 : if (var_type_feedback != nullptr) {
11803 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
11804 : }
11805 448 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11806 : Node* right_type = LoadMapInstanceType(right_map);
11807 448 : GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11808 224 : GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11809 448 : Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
11810 224 : &if_notequal);
11811 :
11812 : BIND(&if_right_heapnumber);
11813 : {
11814 448 : var_left_float = SmiToFloat64(left);
11815 : var_right_float = LoadHeapNumberValue(right);
11816 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11817 224 : Goto(&do_float_comparison);
11818 : }
11819 :
11820 : BIND(&if_right_boolean);
11821 : {
11822 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11823 224 : Goto(&loop);
11824 : }
11825 :
11826 : BIND(&if_right_bigint);
11827 : {
11828 : result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
11829 224 : NoContextConstant(), right, left));
11830 224 : Goto(&end);
11831 : }
11832 :
11833 : BIND(&if_right_receiver);
11834 : {
11835 224 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
11836 448 : var_right.Bind(CallStub(callable, context, right));
11837 224 : Goto(&loop);
11838 : }
11839 : }
11840 :
11841 : BIND(&if_left_not_smi);
11842 : {
11843 448 : GotoIf(TaggedIsSmi(right), &use_symmetry);
11844 :
11845 224 : Label if_left_symbol(this), if_left_number(this), if_left_string(this),
11846 224 : if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
11847 224 : if_left_receiver(this);
11848 :
11849 : Node* left_map = LoadMap(left);
11850 : Node* right_map = LoadMap(right);
11851 : Node* left_type = LoadMapInstanceType(left_map);
11852 : Node* right_type = LoadMapInstanceType(right_map);
11853 :
11854 448 : GotoIf(IsStringInstanceType(left_type), &if_left_string);
11855 224 : GotoIf(IsSymbolInstanceType(left_type), &if_left_symbol);
11856 224 : GotoIf(IsHeapNumberInstanceType(left_type), &if_left_number);
11857 224 : GotoIf(IsOddballInstanceType(left_type), &if_left_oddball);
11858 224 : Branch(IsBigIntInstanceType(left_type), &if_left_bigint,
11859 224 : &if_left_receiver);
11860 :
11861 : BIND(&if_left_string);
11862 : {
11863 448 : GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
11864 448 : result.Bind(CallBuiltin(Builtins::kStringEqual, context, left, right));
11865 : CombineFeedback(var_type_feedback,
11866 448 : SmiOr(CollectFeedbackForString(left_type),
11867 896 : CollectFeedbackForString(right_type)));
11868 224 : Goto(&end);
11869 : }
11870 :
11871 : BIND(&if_left_number);
11872 : {
11873 224 : Label if_right_not_number(this);
11874 448 : GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
11875 :
11876 : var_left_float = LoadHeapNumberValue(left);
11877 : var_right_float = LoadHeapNumberValue(right);
11878 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11879 224 : Goto(&do_float_comparison);
11880 :
11881 : BIND(&if_right_not_number);
11882 : {
11883 224 : Label if_right_boolean(this);
11884 224 : if (var_type_feedback != nullptr) {
11885 168 : var_type_feedback->Bind(
11886 168 : SmiConstant(CompareOperationFeedback::kAny));
11887 : }
11888 448 : GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11889 448 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11890 224 : GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
11891 448 : Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11892 224 : &if_notequal);
11893 :
11894 : BIND(&if_right_boolean);
11895 : {
11896 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11897 224 : Goto(&loop);
11898 : }
11899 : }
11900 : }
11901 :
11902 : BIND(&if_left_bigint);
11903 : {
11904 224 : Label if_right_heapnumber(this), if_right_bigint(this),
11905 224 : if_right_string(this), if_right_boolean(this);
11906 448 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11907 224 : GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11908 448 : GotoIf(IsStringInstanceType(right_type), &if_right_string);
11909 448 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11910 448 : Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11911 224 : &if_notequal);
11912 :
11913 : BIND(&if_right_heapnumber);
11914 : {
11915 224 : if (var_type_feedback != nullptr) {
11916 168 : var_type_feedback->Bind(
11917 168 : SmiConstant(CompareOperationFeedback::kAny));
11918 : }
11919 : result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
11920 224 : NoContextConstant(), left, right));
11921 224 : Goto(&end);
11922 : }
11923 :
11924 : BIND(&if_right_bigint);
11925 : {
11926 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11927 : result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
11928 224 : NoContextConstant(), left, right));
11929 224 : Goto(&end);
11930 : }
11931 :
11932 : BIND(&if_right_string);
11933 : {
11934 224 : if (var_type_feedback != nullptr) {
11935 168 : var_type_feedback->Bind(
11936 168 : SmiConstant(CompareOperationFeedback::kAny));
11937 : }
11938 : result.Bind(CallRuntime(Runtime::kBigIntEqualToString,
11939 224 : NoContextConstant(), left, right));
11940 224 : Goto(&end);
11941 : }
11942 :
11943 : BIND(&if_right_boolean);
11944 : {
11945 224 : if (var_type_feedback != nullptr) {
11946 168 : var_type_feedback->Bind(
11947 168 : SmiConstant(CompareOperationFeedback::kAny));
11948 : }
11949 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11950 224 : Goto(&loop);
11951 : }
11952 : }
11953 :
11954 : BIND(&if_left_oddball);
11955 : {
11956 224 : Label if_left_boolean(this), if_left_not_boolean(this);
11957 448 : Branch(IsBooleanMap(left_map), &if_left_boolean, &if_left_not_boolean);
11958 :
11959 : BIND(&if_left_not_boolean);
11960 : {
11961 : // {left} is either Null or Undefined. Check if {right} is
11962 : // undetectable (which includes Null and Undefined).
11963 224 : Label if_right_undetectable(this), if_right_not_undetectable(this);
11964 448 : Branch(IsUndetectableMap(right_map), &if_right_undetectable,
11965 224 : &if_right_not_undetectable);
11966 :
11967 : BIND(&if_right_undetectable);
11968 : {
11969 224 : if (var_type_feedback != nullptr) {
11970 : // If {right} is undetectable, it must be either also
11971 : // Null or Undefined, or a Receiver (aka document.all).
11972 168 : var_type_feedback->Bind(SmiConstant(
11973 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
11974 : }
11975 224 : Goto(&if_equal);
11976 : }
11977 :
11978 : BIND(&if_right_not_undetectable);
11979 : {
11980 224 : if (var_type_feedback != nullptr) {
11981 : // Track whether {right} is Null, Undefined or Receiver.
11982 168 : var_type_feedback->Bind(SmiConstant(
11983 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
11984 336 : GotoIf(IsJSReceiverInstanceType(right_type), &if_notequal);
11985 336 : GotoIfNot(IsBooleanMap(right_map), &if_notequal);
11986 168 : var_type_feedback->Bind(
11987 168 : SmiConstant(CompareOperationFeedback::kAny));
11988 : }
11989 224 : Goto(&if_notequal);
11990 : }
11991 : }
11992 :
11993 : BIND(&if_left_boolean);
11994 : {
11995 224 : if (var_type_feedback != nullptr) {
11996 168 : var_type_feedback->Bind(
11997 168 : SmiConstant(CompareOperationFeedback::kAny));
11998 : }
11999 :
12000 : // If {right} is a Boolean too, it must be a different Boolean.
12001 448 : GotoIf(WordEqual(right_map, left_map), &if_notequal);
12002 :
12003 : // Otherwise, convert {left} to number and try again.
12004 224 : var_left.Bind(LoadObjectField(left, Oddball::kToNumberOffset));
12005 224 : Goto(&loop);
12006 : }
12007 : }
12008 :
12009 : BIND(&if_left_symbol);
12010 : {
12011 224 : Label if_right_receiver(this);
12012 448 : GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
12013 : // {right} is not a JSReceiver and also not the same Symbol as {left},
12014 : // so the result is "not equal".
12015 224 : if (var_type_feedback != nullptr) {
12016 168 : Label if_right_symbol(this);
12017 168 : GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
12018 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
12019 168 : Goto(&if_notequal);
12020 :
12021 : BIND(&if_right_symbol);
12022 : {
12023 : CombineFeedback(var_type_feedback,
12024 168 : CompareOperationFeedback::kSymbol);
12025 168 : Goto(&if_notequal);
12026 : }
12027 : } else {
12028 56 : Goto(&if_notequal);
12029 : }
12030 :
12031 : BIND(&if_right_receiver);
12032 : {
12033 : // {left} is a Primitive and {right} is a JSReceiver, so swapping
12034 : // the order is not observable.
12035 224 : if (var_type_feedback != nullptr) {
12036 168 : var_type_feedback->Bind(
12037 168 : SmiConstant(CompareOperationFeedback::kAny));
12038 : }
12039 224 : Goto(&use_symmetry);
12040 : }
12041 : }
12042 :
12043 : BIND(&if_left_receiver);
12044 : {
12045 : CSA_ASSERT(this, IsJSReceiverInstanceType(left_type));
12046 224 : Label if_right_receiver(this), if_right_not_receiver(this);
12047 448 : Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
12048 224 : &if_right_not_receiver);
12049 :
12050 : BIND(&if_right_receiver);
12051 : {
12052 : // {left} and {right} are different JSReceiver references.
12053 : CombineFeedback(var_type_feedback,
12054 224 : CompareOperationFeedback::kReceiver);
12055 224 : Goto(&if_notequal);
12056 : }
12057 :
12058 : BIND(&if_right_not_receiver);
12059 : {
12060 : // Check if {right} is undetectable, which means it must be Null
12061 : // or Undefined, since we already ruled out Receiver for {right}.
12062 224 : Label if_right_undetectable(this),
12063 224 : if_right_not_undetectable(this, Label::kDeferred);
12064 448 : Branch(IsUndetectableMap(right_map), &if_right_undetectable,
12065 224 : &if_right_not_undetectable);
12066 :
12067 : BIND(&if_right_undetectable);
12068 : {
12069 : // When we get here, {right} must be either Null or Undefined.
12070 : CSA_ASSERT(this, IsNullOrUndefined(right));
12071 224 : if (var_type_feedback != nullptr) {
12072 168 : var_type_feedback->Bind(SmiConstant(
12073 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12074 : }
12075 448 : Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
12076 : }
12077 :
12078 : BIND(&if_right_not_undetectable);
12079 : {
12080 : // {right} is a Primitive, and neither Null or Undefined;
12081 : // convert {left} to Primitive too.
12082 224 : if (var_type_feedback != nullptr) {
12083 168 : var_type_feedback->Bind(
12084 168 : SmiConstant(CompareOperationFeedback::kAny));
12085 : }
12086 224 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
12087 448 : var_left.Bind(CallStub(callable, context, left));
12088 224 : Goto(&loop);
12089 : }
12090 : }
12091 : }
12092 : }
12093 :
12094 : BIND(&do_right_stringtonumber);
12095 : {
12096 448 : var_right.Bind(CallBuiltin(Builtins::kStringToNumber, context, right));
12097 224 : Goto(&loop);
12098 : }
12099 :
12100 : BIND(&use_symmetry);
12101 : {
12102 224 : var_left.Bind(right);
12103 224 : var_right.Bind(left);
12104 224 : Goto(&loop);
12105 : }
12106 : }
12107 :
12108 : BIND(&do_float_comparison);
12109 : {
12110 448 : Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
12111 224 : &if_equal, &if_notequal);
12112 : }
12113 :
12114 : BIND(&if_equal);
12115 : {
12116 224 : result.Bind(TrueConstant());
12117 224 : Goto(&end);
12118 : }
12119 :
12120 : BIND(&if_notequal);
12121 : {
12122 224 : result.Bind(FalseConstant());
12123 224 : Goto(&end);
12124 : }
12125 :
12126 : BIND(&end);
12127 448 : return result.value();
12128 : }
12129 :
12130 392 : Node* CodeStubAssembler::StrictEqual(Node* lhs, Node* rhs,
12131 : Variable* var_type_feedback) {
12132 : // Pseudo-code for the algorithm below:
12133 : //
12134 : // if (lhs == rhs) {
12135 : // if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
12136 : // return true;
12137 : // }
12138 : // if (!lhs->IsSmi()) {
12139 : // if (lhs->IsHeapNumber()) {
12140 : // if (rhs->IsSmi()) {
12141 : // return Smi::ToInt(rhs) == HeapNumber::cast(lhs)->value();
12142 : // } else if (rhs->IsHeapNumber()) {
12143 : // return HeapNumber::cast(rhs)->value() ==
12144 : // HeapNumber::cast(lhs)->value();
12145 : // } else {
12146 : // return false;
12147 : // }
12148 : // } else {
12149 : // if (rhs->IsSmi()) {
12150 : // return false;
12151 : // } else {
12152 : // if (lhs->IsString()) {
12153 : // if (rhs->IsString()) {
12154 : // return %StringEqual(lhs, rhs);
12155 : // } else {
12156 : // return false;
12157 : // }
12158 : // } else if (lhs->IsBigInt()) {
12159 : // if (rhs->IsBigInt()) {
12160 : // return %BigIntEqualToBigInt(lhs, rhs);
12161 : // } else {
12162 : // return false;
12163 : // }
12164 : // } else {
12165 : // return false;
12166 : // }
12167 : // }
12168 : // }
12169 : // } else {
12170 : // if (rhs->IsSmi()) {
12171 : // return false;
12172 : // } else {
12173 : // if (rhs->IsHeapNumber()) {
12174 : // return Smi::ToInt(lhs) == HeapNumber::cast(rhs)->value();
12175 : // } else {
12176 : // return false;
12177 : // }
12178 : // }
12179 : // }
12180 :
12181 784 : Label if_equal(this), if_notequal(this), end(this);
12182 784 : VARIABLE(result, MachineRepresentation::kTagged);
12183 :
12184 : // Check if {lhs} and {rhs} refer to the same object.
12185 392 : Label if_same(this), if_notsame(this);
12186 784 : Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
12187 :
12188 : BIND(&if_same);
12189 : {
12190 : // The {lhs} and {rhs} reference the exact same value, yet we need special
12191 : // treatment for HeapNumber, as NaN is not equal to NaN.
12192 392 : if (var_type_feedback != nullptr) {
12193 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
12194 : }
12195 392 : GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
12196 : }
12197 :
12198 : BIND(&if_notsame);
12199 : {
12200 : // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
12201 : // BigInt and String they can still be considered equal.
12202 :
12203 392 : if (var_type_feedback != nullptr) {
12204 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
12205 : }
12206 :
12207 : // Check if {lhs} is a Smi or a HeapObject.
12208 392 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
12209 784 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
12210 :
12211 : BIND(&if_lhsisnotsmi);
12212 : {
12213 : // Load the map of {lhs}.
12214 : Node* lhs_map = LoadMap(lhs);
12215 :
12216 : // Check if {lhs} is a HeapNumber.
12217 392 : Label if_lhsisnumber(this), if_lhsisnotnumber(this);
12218 784 : Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
12219 :
12220 : BIND(&if_lhsisnumber);
12221 : {
12222 : // Check if {rhs} is a Smi or a HeapObject.
12223 392 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12224 784 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12225 :
12226 : BIND(&if_rhsissmi);
12227 : {
12228 : // Convert {lhs} and {rhs} to floating point values.
12229 : Node* lhs_value = LoadHeapNumberValue(lhs);
12230 784 : Node* rhs_value = SmiToFloat64(rhs);
12231 :
12232 392 : if (var_type_feedback != nullptr) {
12233 : var_type_feedback->Bind(
12234 168 : SmiConstant(CompareOperationFeedback::kNumber));
12235 : }
12236 :
12237 : // Perform a floating point comparison of {lhs} and {rhs}.
12238 784 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12239 : }
12240 :
12241 : BIND(&if_rhsisnotsmi);
12242 : {
12243 : // Load the map of {rhs}.
12244 : Node* rhs_map = LoadMap(rhs);
12245 :
12246 : // Check if {rhs} is also a HeapNumber.
12247 392 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
12248 784 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
12249 :
12250 : BIND(&if_rhsisnumber);
12251 : {
12252 : // Convert {lhs} and {rhs} to floating point values.
12253 : Node* lhs_value = LoadHeapNumberValue(lhs);
12254 : Node* rhs_value = LoadHeapNumberValue(rhs);
12255 :
12256 392 : if (var_type_feedback != nullptr) {
12257 : var_type_feedback->Bind(
12258 168 : SmiConstant(CompareOperationFeedback::kNumber));
12259 : }
12260 :
12261 : // Perform a floating point comparison of {lhs} and {rhs}.
12262 784 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12263 : }
12264 :
12265 : BIND(&if_rhsisnotnumber);
12266 392 : Goto(&if_notequal);
12267 : }
12268 : }
12269 :
12270 : BIND(&if_lhsisnotnumber);
12271 : {
12272 : // Check if {rhs} is a Smi or a HeapObject.
12273 392 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12274 784 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12275 :
12276 : BIND(&if_rhsissmi);
12277 392 : Goto(&if_notequal);
12278 :
12279 : BIND(&if_rhsisnotsmi);
12280 : {
12281 : // Load the instance type of {lhs}.
12282 : Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
12283 :
12284 : // Check if {lhs} is a String.
12285 392 : Label if_lhsisstring(this), if_lhsisnotstring(this);
12286 784 : Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
12287 392 : &if_lhsisnotstring);
12288 :
12289 : BIND(&if_lhsisstring);
12290 : {
12291 : // Load the instance type of {rhs}.
12292 784 : Node* rhs_instance_type = LoadInstanceType(rhs);
12293 :
12294 : // Check if {rhs} is also a String.
12295 392 : Label if_rhsisstring(this, Label::kDeferred),
12296 392 : if_rhsisnotstring(this);
12297 784 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
12298 392 : &if_rhsisnotstring);
12299 :
12300 : BIND(&if_rhsisstring);
12301 : {
12302 392 : if (var_type_feedback != nullptr) {
12303 : TNode<Smi> lhs_feedback =
12304 168 : CollectFeedbackForString(lhs_instance_type);
12305 : TNode<Smi> rhs_feedback =
12306 168 : CollectFeedbackForString(rhs_instance_type);
12307 336 : var_type_feedback->Bind(SmiOr(lhs_feedback, rhs_feedback));
12308 : }
12309 784 : result.Bind(CallBuiltin(Builtins::kStringEqual,
12310 784 : NoContextConstant(), lhs, rhs));
12311 392 : Goto(&end);
12312 : }
12313 :
12314 : BIND(&if_rhsisnotstring);
12315 392 : Goto(&if_notequal);
12316 : }
12317 :
12318 : BIND(&if_lhsisnotstring);
12319 :
12320 : // Check if {lhs} is a BigInt.
12321 392 : Label if_lhsisbigint(this), if_lhsisnotbigint(this);
12322 392 : Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
12323 392 : &if_lhsisnotbigint);
12324 :
12325 : BIND(&if_lhsisbigint);
12326 : {
12327 : // Load the instance type of {rhs}.
12328 784 : Node* rhs_instance_type = LoadInstanceType(rhs);
12329 :
12330 : // Check if {rhs} is also a BigInt.
12331 392 : Label if_rhsisbigint(this, Label::kDeferred),
12332 392 : if_rhsisnotbigint(this);
12333 392 : Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
12334 392 : &if_rhsisnotbigint);
12335 :
12336 : BIND(&if_rhsisbigint);
12337 : {
12338 392 : if (var_type_feedback != nullptr) {
12339 : var_type_feedback->Bind(
12340 168 : SmiConstant(CompareOperationFeedback::kBigInt));
12341 : }
12342 : result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
12343 392 : NoContextConstant(), lhs, rhs));
12344 392 : Goto(&end);
12345 : }
12346 :
12347 : BIND(&if_rhsisnotbigint);
12348 392 : Goto(&if_notequal);
12349 : }
12350 :
12351 : BIND(&if_lhsisnotbigint);
12352 392 : if (var_type_feedback != nullptr) {
12353 : // Load the instance type of {rhs}.
12354 : Node* rhs_map = LoadMap(rhs);
12355 : Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
12356 :
12357 168 : Label if_lhsissymbol(this), if_lhsisreceiver(this),
12358 168 : if_lhsisoddball(this);
12359 336 : GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
12360 168 : &if_lhsisreceiver);
12361 336 : GotoIf(IsBooleanMap(lhs_map), &if_notequal);
12362 168 : GotoIf(IsOddballInstanceType(lhs_instance_type), &if_lhsisoddball);
12363 168 : Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
12364 168 : &if_notequal);
12365 :
12366 : BIND(&if_lhsisreceiver);
12367 : {
12368 336 : GotoIf(IsBooleanMap(rhs_map), &if_notequal);
12369 : var_type_feedback->Bind(
12370 168 : SmiConstant(CompareOperationFeedback::kReceiver));
12371 336 : GotoIf(IsJSReceiverInstanceType(rhs_instance_type), &if_notequal);
12372 : var_type_feedback->Bind(SmiConstant(
12373 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12374 168 : GotoIf(IsOddballInstanceType(rhs_instance_type), &if_notequal);
12375 : var_type_feedback->Bind(
12376 168 : SmiConstant(CompareOperationFeedback::kAny));
12377 168 : Goto(&if_notequal);
12378 : }
12379 :
12380 : BIND(&if_lhsisoddball);
12381 : {
12382 : STATIC_ASSERT(LAST_PRIMITIVE_TYPE == ODDBALL_TYPE);
12383 336 : GotoIf(IsBooleanMap(rhs_map), &if_notequal);
12384 168 : GotoIf(
12385 504 : Int32LessThan(rhs_instance_type, Int32Constant(ODDBALL_TYPE)),
12386 168 : &if_notequal);
12387 : var_type_feedback->Bind(SmiConstant(
12388 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12389 168 : Goto(&if_notequal);
12390 : }
12391 :
12392 : BIND(&if_lhsissymbol);
12393 : {
12394 168 : GotoIfNot(IsSymbolInstanceType(rhs_instance_type), &if_notequal);
12395 : var_type_feedback->Bind(
12396 168 : SmiConstant(CompareOperationFeedback::kSymbol));
12397 168 : Goto(&if_notequal);
12398 : }
12399 : } else {
12400 224 : Goto(&if_notequal);
12401 : }
12402 : }
12403 : }
12404 : }
12405 :
12406 : BIND(&if_lhsissmi);
12407 : {
12408 : // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
12409 : // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
12410 : // HeapNumber with an equal floating point value.
12411 :
12412 : // Check if {rhs} is a Smi or a HeapObject.
12413 392 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12414 784 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12415 :
12416 : BIND(&if_rhsissmi);
12417 392 : if (var_type_feedback != nullptr) {
12418 : var_type_feedback->Bind(
12419 168 : SmiConstant(CompareOperationFeedback::kSignedSmall));
12420 : }
12421 392 : Goto(&if_notequal);
12422 :
12423 : BIND(&if_rhsisnotsmi);
12424 : {
12425 : // Load the map of the {rhs}.
12426 : Node* rhs_map = LoadMap(rhs);
12427 :
12428 : // The {rhs} could be a HeapNumber with the same value as {lhs}.
12429 392 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
12430 784 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
12431 :
12432 : BIND(&if_rhsisnumber);
12433 : {
12434 : // Convert {lhs} and {rhs} to floating point values.
12435 784 : Node* lhs_value = SmiToFloat64(lhs);
12436 : Node* rhs_value = LoadHeapNumberValue(rhs);
12437 :
12438 392 : if (var_type_feedback != nullptr) {
12439 : var_type_feedback->Bind(
12440 168 : SmiConstant(CompareOperationFeedback::kNumber));
12441 : }
12442 :
12443 : // Perform a floating point comparison of {lhs} and {rhs}.
12444 784 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12445 : }
12446 :
12447 : BIND(&if_rhsisnotnumber);
12448 392 : Goto(&if_notequal);
12449 : }
12450 : }
12451 : }
12452 :
12453 : BIND(&if_equal);
12454 : {
12455 392 : result.Bind(TrueConstant());
12456 392 : Goto(&end);
12457 : }
12458 :
12459 : BIND(&if_notequal);
12460 : {
12461 392 : result.Bind(FalseConstant());
12462 392 : Goto(&end);
12463 : }
12464 :
12465 : BIND(&end);
12466 784 : return result.value();
12467 : }
12468 :
12469 : // ECMA#sec-samevalue
12470 : // This algorithm differs from the Strict Equality Comparison Algorithm in its
12471 : // treatment of signed zeroes and NaNs.
12472 952 : void CodeStubAssembler::BranchIfSameValue(Node* lhs, Node* rhs, Label* if_true,
12473 : Label* if_false, SameValueMode mode) {
12474 1904 : VARIABLE(var_lhs_value, MachineRepresentation::kFloat64);
12475 1904 : VARIABLE(var_rhs_value, MachineRepresentation::kFloat64);
12476 952 : Label do_fcmp(this);
12477 :
12478 : // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
12479 : // StrictEqual - SameValue considers two NaNs to be equal.
12480 3808 : GotoIf(WordEqual(lhs, rhs), if_true);
12481 :
12482 : // Check if the {lhs} is a Smi.
12483 952 : Label if_lhsissmi(this), if_lhsisheapobject(this);
12484 2856 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
12485 :
12486 : BIND(&if_lhsissmi);
12487 : {
12488 : // Since {lhs} is a Smi, the comparison can only yield true
12489 : // iff the {rhs} is a HeapNumber with the same float64 value.
12490 4760 : Branch(TaggedIsSmi(rhs), if_false, [&] {
12491 6664 : GotoIfNot(IsHeapNumber(rhs), if_false);
12492 2856 : var_lhs_value.Bind(SmiToFloat64(lhs));
12493 1904 : var_rhs_value.Bind(LoadHeapNumberValue(rhs));
12494 1904 : Goto(&do_fcmp);
12495 2856 : });
12496 : }
12497 :
12498 : BIND(&if_lhsisheapobject);
12499 : {
12500 : // Check if the {rhs} is a Smi.
12501 4760 : Branch(TaggedIsSmi(rhs),
12502 952 : [&] {
12503 : // Since {rhs} is a Smi, the comparison can only yield true
12504 : // iff the {lhs} is a HeapNumber with the same float64 value.
12505 5712 : GotoIfNot(IsHeapNumber(lhs), if_false);
12506 1904 : var_lhs_value.Bind(LoadHeapNumberValue(lhs));
12507 2856 : var_rhs_value.Bind(SmiToFloat64(rhs));
12508 1904 : Goto(&do_fcmp);
12509 952 : },
12510 952 : [&] {
12511 : // Now this can only yield true if either both {lhs} and {rhs} are
12512 : // HeapNumbers with the same value, or both are Strings with the
12513 : // same character sequence, or both are BigInts with the same
12514 : // value.
12515 19712 : Label if_lhsisheapnumber(this), if_lhsisstring(this),
12516 952 : if_lhsisbigint(this);
12517 2576 : Node* const lhs_map = LoadMap(lhs);
12518 1904 : GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
12519 1904 : if (mode != SameValueMode::kNumbersOnly) {
12520 : Node* const lhs_instance_type = LoadMapInstanceType(lhs_map);
12521 672 : GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
12522 336 : GotoIf(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint);
12523 : }
12524 4200 : Goto(if_false);
12525 :
12526 : BIND(&if_lhsisheapnumber);
12527 : {
12528 5152 : GotoIfNot(IsHeapNumber(rhs), if_false);
12529 1904 : var_lhs_value.Bind(LoadHeapNumberValue(lhs));
12530 1904 : var_rhs_value.Bind(LoadHeapNumberValue(rhs));
12531 1904 : Goto(&do_fcmp);
12532 : }
12533 :
12534 952 : if (mode != SameValueMode::kNumbersOnly) {
12535 : BIND(&if_lhsisstring);
12536 : {
12537 : // Now we can only yield true if {rhs} is also a String
12538 : // with the same sequence of characters.
12539 1344 : GotoIfNot(IsString(rhs), if_false);
12540 672 : Node* const result = CallBuiltin(
12541 1008 : Builtins::kStringEqual, NoContextConstant(), lhs, rhs);
12542 1344 : Branch(IsTrue(result), if_true, if_false);
12543 : }
12544 :
12545 : BIND(&if_lhsisbigint);
12546 : {
12547 1344 : GotoIfNot(IsBigInt(rhs), if_false);
12548 : Node* const result =
12549 : CallRuntime(Runtime::kBigIntEqualToBigInt,
12550 1008 : NoContextConstant(), lhs, rhs);
12551 1344 : Branch(IsTrue(result), if_true, if_false);
12552 : }
12553 : }
12554 1904 : });
12555 : }
12556 :
12557 : BIND(&do_fcmp);
12558 : {
12559 952 : TNode<Float64T> lhs_value = UncheckedCast<Float64T>(var_lhs_value.value());
12560 952 : TNode<Float64T> rhs_value = UncheckedCast<Float64T>(var_rhs_value.value());
12561 952 : BranchIfSameNumberValue(lhs_value, rhs_value, if_true, if_false);
12562 : }
12563 952 : }
12564 :
12565 1848 : void CodeStubAssembler::BranchIfSameNumberValue(TNode<Float64T> lhs_value,
12566 : TNode<Float64T> rhs_value,
12567 : Label* if_true,
12568 : Label* if_false) {
12569 3696 : Label if_equal(this), if_notequal(this);
12570 3696 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12571 :
12572 : BIND(&if_equal);
12573 : {
12574 : // We still need to handle the case when {lhs} and {rhs} are -0.0 and
12575 : // 0.0 (or vice versa). Compare the high word to
12576 : // distinguish between the two.
12577 3696 : Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_value);
12578 3696 : Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_value);
12579 :
12580 : // If x is +0 and y is -0, return false.
12581 : // If x is -0 and y is +0, return false.
12582 3696 : Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
12583 : }
12584 :
12585 : BIND(&if_notequal);
12586 : {
12587 : // Return true iff both {rhs} and {lhs} are NaN.
12588 3696 : GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
12589 3696 : Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
12590 : }
12591 1848 : }
12592 :
12593 672 : TNode<Oddball> CodeStubAssembler::HasProperty(SloppyTNode<Context> context,
12594 : SloppyTNode<Object> object,
12595 : SloppyTNode<Object> key,
12596 : HasPropertyLookupMode mode) {
12597 1344 : Label call_runtime(this, Label::kDeferred), return_true(this),
12598 672 : return_false(this), end(this), if_proxy(this, Label::kDeferred);
12599 :
12600 : CodeStubAssembler::LookupInHolder lookup_property_in_holder =
12601 : [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
12602 : Node* holder_instance_type, Node* unique_name,
12603 672 : Label* next_holder, Label* if_bailout) {
12604 : TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
12605 672 : &return_true, next_holder, if_bailout);
12606 : };
12607 :
12608 : CodeStubAssembler::LookupInHolder lookup_element_in_holder =
12609 : [this, &return_true, &return_false](
12610 : Node* receiver, Node* holder, Node* holder_map,
12611 : Node* holder_instance_type, Node* index, Label* next_holder,
12612 1344 : Label* if_bailout) {
12613 672 : TryLookupElement(holder, holder_map, holder_instance_type, index,
12614 672 : &return_true, &return_false, next_holder, if_bailout);
12615 : };
12616 :
12617 : TryPrototypeChainLookup(object, key, lookup_property_in_holder,
12618 : lookup_element_in_holder, &return_false,
12619 672 : &call_runtime, &if_proxy);
12620 :
12621 : TVARIABLE(Oddball, result);
12622 :
12623 : BIND(&if_proxy);
12624 : {
12625 672 : TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
12626 672 : switch (mode) {
12627 : case kHasProperty:
12628 1232 : GotoIf(IsPrivateSymbol(name), &return_false);
12629 :
12630 1232 : result = CAST(
12631 : CallBuiltin(Builtins::kProxyHasProperty, context, object, name));
12632 616 : Goto(&end);
12633 616 : break;
12634 : case kForInHasProperty:
12635 56 : Goto(&call_runtime);
12636 56 : break;
12637 : }
12638 : }
12639 :
12640 : BIND(&return_true);
12641 : {
12642 : result = TrueConstant();
12643 672 : Goto(&end);
12644 : }
12645 :
12646 : BIND(&return_false);
12647 : {
12648 : result = FalseConstant();
12649 672 : Goto(&end);
12650 : }
12651 :
12652 : BIND(&call_runtime);
12653 : {
12654 : Runtime::FunctionId fallback_runtime_function_id;
12655 672 : switch (mode) {
12656 : case kHasProperty:
12657 : fallback_runtime_function_id = Runtime::kHasProperty;
12658 616 : break;
12659 : case kForInHasProperty:
12660 : fallback_runtime_function_id = Runtime::kForInHasProperty;
12661 56 : break;
12662 : }
12663 :
12664 : result =
12665 : CAST(CallRuntime(fallback_runtime_function_id, context, object, key));
12666 672 : Goto(&end);
12667 : }
12668 :
12669 : BIND(&end);
12670 : CSA_ASSERT(this, IsBoolean(result.value()));
12671 672 : return result.value();
12672 : }
12673 :
12674 392 : Node* CodeStubAssembler::Typeof(Node* value) {
12675 784 : VARIABLE(result_var, MachineRepresentation::kTagged);
12676 :
12677 392 : Label return_number(this, Label::kDeferred), if_oddball(this),
12678 392 : return_function(this), return_undefined(this), return_object(this),
12679 392 : return_string(this), return_bigint(this), return_result(this);
12680 :
12681 784 : GotoIf(TaggedIsSmi(value), &return_number);
12682 :
12683 : Node* map = LoadMap(value);
12684 :
12685 784 : GotoIf(IsHeapNumberMap(map), &return_number);
12686 :
12687 : Node* instance_type = LoadMapInstanceType(map);
12688 :
12689 784 : GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
12690 :
12691 784 : Node* callable_or_undetectable_mask = Word32And(
12692 : LoadMapBitField(map),
12693 784 : Int32Constant(Map::IsCallableBit::kMask | Map::IsUndetectableBit::kMask));
12694 :
12695 784 : GotoIf(Word32Equal(callable_or_undetectable_mask,
12696 784 : Int32Constant(Map::IsCallableBit::kMask)),
12697 392 : &return_function);
12698 :
12699 1176 : GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
12700 392 : &return_undefined);
12701 :
12702 784 : GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
12703 :
12704 784 : GotoIf(IsStringInstanceType(instance_type), &return_string);
12705 :
12706 392 : GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
12707 :
12708 : CSA_ASSERT(this, InstanceTypeEqual(instance_type, SYMBOL_TYPE));
12709 784 : result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
12710 392 : Goto(&return_result);
12711 :
12712 : BIND(&return_number);
12713 : {
12714 784 : result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
12715 392 : Goto(&return_result);
12716 : }
12717 :
12718 : BIND(&if_oddball);
12719 : {
12720 : Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
12721 392 : result_var.Bind(type);
12722 392 : Goto(&return_result);
12723 : }
12724 :
12725 : BIND(&return_function);
12726 : {
12727 784 : result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
12728 392 : Goto(&return_result);
12729 : }
12730 :
12731 : BIND(&return_undefined);
12732 : {
12733 784 : result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
12734 392 : Goto(&return_result);
12735 : }
12736 :
12737 : BIND(&return_object);
12738 : {
12739 784 : result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
12740 392 : Goto(&return_result);
12741 : }
12742 :
12743 : BIND(&return_string);
12744 : {
12745 784 : result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
12746 392 : Goto(&return_result);
12747 : }
12748 :
12749 : BIND(&return_bigint);
12750 : {
12751 784 : result_var.Bind(HeapConstant(isolate()->factory()->bigint_string()));
12752 392 : Goto(&return_result);
12753 : }
12754 :
12755 : BIND(&return_result);
12756 784 : return result_var.value();
12757 : }
12758 :
12759 224 : TNode<Object> CodeStubAssembler::GetSuperConstructor(
12760 : SloppyTNode<Context> context, SloppyTNode<JSFunction> active_function) {
12761 448 : Label is_not_constructor(this, Label::kDeferred), out(this);
12762 : TVARIABLE(Object, result);
12763 :
12764 : TNode<Map> map = LoadMap(active_function);
12765 : TNode<Object> prototype = LoadMapPrototype(map);
12766 : TNode<Map> prototype_map = LoadMap(CAST(prototype));
12767 448 : GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
12768 :
12769 : result = prototype;
12770 224 : Goto(&out);
12771 :
12772 : BIND(&is_not_constructor);
12773 : {
12774 : CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
12775 : active_function);
12776 224 : Unreachable();
12777 : }
12778 :
12779 : BIND(&out);
12780 224 : return result.value();
12781 : }
12782 :
12783 504 : TNode<JSReceiver> CodeStubAssembler::SpeciesConstructor(
12784 : SloppyTNode<Context> context, SloppyTNode<Object> object,
12785 : SloppyTNode<JSReceiver> default_constructor) {
12786 504 : Isolate* isolate = this->isolate();
12787 : TVARIABLE(JSReceiver, var_result, default_constructor);
12788 :
12789 : // 2. Let C be ? Get(O, "constructor").
12790 : TNode<Object> constructor =
12791 504 : GetProperty(context, object, isolate->factory()->constructor_string());
12792 :
12793 : // 3. If C is undefined, return defaultConstructor.
12794 504 : Label out(this);
12795 1008 : GotoIf(IsUndefined(constructor), &out);
12796 :
12797 : // 4. If Type(C) is not Object, throw a TypeError exception.
12798 : ThrowIfNotJSReceiver(context, constructor,
12799 504 : MessageTemplate::kConstructorNotReceiver);
12800 :
12801 : // 5. Let S be ? Get(C, @@species).
12802 : TNode<Object> species =
12803 504 : GetProperty(context, constructor, isolate->factory()->species_symbol());
12804 :
12805 : // 6. If S is either undefined or null, return defaultConstructor.
12806 1008 : GotoIf(IsNullOrUndefined(species), &out);
12807 :
12808 : // 7. If IsConstructor(S) is true, return S.
12809 504 : Label throw_error(this);
12810 1008 : GotoIf(TaggedIsSmi(species), &throw_error);
12811 1008 : GotoIfNot(IsConstructorMap(LoadMap(CAST(species))), &throw_error);
12812 : var_result = CAST(species);
12813 504 : Goto(&out);
12814 :
12815 : // 8. Throw a TypeError exception.
12816 : BIND(&throw_error);
12817 : ThrowTypeError(context, MessageTemplate::kSpeciesNotConstructor);
12818 :
12819 : BIND(&out);
12820 504 : return var_result.value();
12821 : }
12822 :
12823 224 : Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
12824 : Node* context) {
12825 448 : VARIABLE(var_result, MachineRepresentation::kTagged);
12826 224 : Label if_notcallable(this, Label::kDeferred),
12827 224 : if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
12828 224 : if_nohandler(this, Label::kDeferred), return_true(this),
12829 224 : return_false(this), return_result(this, &var_result);
12830 :
12831 : // Ensure that the {callable} is actually a JSReceiver.
12832 448 : GotoIf(TaggedIsSmi(callable), &if_notreceiver);
12833 448 : GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
12834 :
12835 : // Load the @@hasInstance property from {callable}.
12836 : Node* inst_of_handler =
12837 : GetProperty(context, callable, HasInstanceSymbolConstant());
12838 :
12839 : // Optimize for the likely case where {inst_of_handler} is the builtin
12840 : // Function.prototype[@@hasInstance] method, and emit a direct call in
12841 : // that case without any additional checking.
12842 : Node* native_context = LoadNativeContext(context);
12843 : Node* function_has_instance =
12844 448 : LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
12845 448 : GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
12846 224 : &if_otherhandler);
12847 : {
12848 : // Call to Function.prototype[@@hasInstance] directly.
12849 : Callable builtin(BUILTIN_CODE(isolate(), FunctionPrototypeHasInstance),
12850 448 : CallTrampolineDescriptor{});
12851 224 : Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
12852 224 : var_result.Bind(result);
12853 224 : Goto(&return_result);
12854 : }
12855 :
12856 : BIND(&if_otherhandler);
12857 : {
12858 : // Check if there's actually an {inst_of_handler}.
12859 448 : GotoIf(IsNull(inst_of_handler), &if_nohandler);
12860 448 : GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
12861 :
12862 : // Call the {inst_of_handler} for {callable} and {object}.
12863 : Node* result = CallJS(
12864 448 : CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
12865 224 : context, inst_of_handler, callable, object);
12866 :
12867 : // Convert the {result} to a Boolean.
12868 224 : BranchIfToBooleanIsTrue(result, &return_true, &return_false);
12869 : }
12870 :
12871 : BIND(&if_nohandler);
12872 : {
12873 : // Ensure that the {callable} is actually Callable.
12874 448 : GotoIfNot(IsCallable(callable), &if_notcallable);
12875 :
12876 : // Use the OrdinaryHasInstance algorithm.
12877 : Node* result =
12878 448 : CallBuiltin(Builtins::kOrdinaryHasInstance, context, callable, object);
12879 224 : var_result.Bind(result);
12880 224 : Goto(&return_result);
12881 : }
12882 :
12883 : BIND(&if_notcallable);
12884 : { ThrowTypeError(context, MessageTemplate::kNonCallableInInstanceOfCheck); }
12885 :
12886 : BIND(&if_notreceiver);
12887 : { ThrowTypeError(context, MessageTemplate::kNonObjectInInstanceOfCheck); }
12888 :
12889 : BIND(&return_true);
12890 224 : var_result.Bind(TrueConstant());
12891 224 : Goto(&return_result);
12892 :
12893 : BIND(&return_false);
12894 224 : var_result.Bind(FalseConstant());
12895 224 : Goto(&return_result);
12896 :
12897 : BIND(&return_result);
12898 448 : return var_result.value();
12899 : }
12900 :
12901 1064 : TNode<Number> CodeStubAssembler::NumberInc(SloppyTNode<Number> value) {
12902 1064 : TVARIABLE(Number, var_result);
12903 : TVARIABLE(Float64T, var_finc_value);
12904 1064 : Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
12905 2128 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12906 :
12907 : BIND(&if_issmi);
12908 : {
12909 1064 : Label if_overflow(this);
12910 : TNode<Smi> smi_value = CAST(value);
12911 1064 : TNode<Smi> one = SmiConstant(1);
12912 2128 : var_result = TrySmiAdd(smi_value, one, &if_overflow);
12913 1064 : Goto(&end);
12914 :
12915 : BIND(&if_overflow);
12916 : {
12917 2128 : var_finc_value = SmiToFloat64(smi_value);
12918 1064 : Goto(&do_finc);
12919 : }
12920 : }
12921 :
12922 : BIND(&if_isnotsmi);
12923 : {
12924 : TNode<HeapNumber> heap_number_value = CAST(value);
12925 :
12926 : // Load the HeapNumber value.
12927 : var_finc_value = LoadHeapNumberValue(heap_number_value);
12928 1064 : Goto(&do_finc);
12929 : }
12930 :
12931 : BIND(&do_finc);
12932 : {
12933 : TNode<Float64T> finc_value = var_finc_value.value();
12934 1064 : TNode<Float64T> one = Float64Constant(1.0);
12935 1064 : TNode<Float64T> finc_result = Float64Add(finc_value, one);
12936 2128 : var_result = AllocateHeapNumberWithValue(finc_result);
12937 1064 : Goto(&end);
12938 : }
12939 :
12940 : BIND(&end);
12941 1064 : return var_result.value();
12942 : }
12943 :
12944 224 : TNode<Number> CodeStubAssembler::NumberDec(SloppyTNode<Number> value) {
12945 224 : TVARIABLE(Number, var_result);
12946 : TVARIABLE(Float64T, var_fdec_value);
12947 224 : Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
12948 448 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12949 :
12950 : BIND(&if_issmi);
12951 : {
12952 : TNode<Smi> smi_value = CAST(value);
12953 224 : TNode<Smi> one = SmiConstant(1);
12954 224 : Label if_overflow(this);
12955 448 : var_result = TrySmiSub(smi_value, one, &if_overflow);
12956 224 : Goto(&end);
12957 :
12958 : BIND(&if_overflow);
12959 : {
12960 448 : var_fdec_value = SmiToFloat64(smi_value);
12961 224 : Goto(&do_fdec);
12962 : }
12963 : }
12964 :
12965 : BIND(&if_isnotsmi);
12966 : {
12967 : TNode<HeapNumber> heap_number_value = CAST(value);
12968 :
12969 : // Load the HeapNumber value.
12970 : var_fdec_value = LoadHeapNumberValue(heap_number_value);
12971 224 : Goto(&do_fdec);
12972 : }
12973 :
12974 : BIND(&do_fdec);
12975 : {
12976 : TNode<Float64T> fdec_value = var_fdec_value.value();
12977 224 : TNode<Float64T> minus_one = Float64Constant(-1.0);
12978 224 : TNode<Float64T> fdec_result = Float64Add(fdec_value, minus_one);
12979 448 : var_result = AllocateHeapNumberWithValue(fdec_result);
12980 224 : Goto(&end);
12981 : }
12982 :
12983 : BIND(&end);
12984 224 : return var_result.value();
12985 : }
12986 :
12987 2524 : TNode<Number> CodeStubAssembler::NumberAdd(SloppyTNode<Number> a,
12988 : SloppyTNode<Number> b) {
12989 2524 : TVARIABLE(Number, var_result);
12990 2524 : Label float_add(this, Label::kDeferred), end(this);
12991 5048 : GotoIf(TaggedIsNotSmi(a), &float_add);
12992 5048 : GotoIf(TaggedIsNotSmi(b), &float_add);
12993 :
12994 : // Try fast Smi addition first.
12995 5048 : var_result = TrySmiAdd(CAST(a), CAST(b), &float_add);
12996 2524 : Goto(&end);
12997 :
12998 : BIND(&float_add);
12999 : {
13000 5048 : var_result = ChangeFloat64ToTagged(
13001 10096 : Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
13002 2524 : Goto(&end);
13003 : }
13004 :
13005 : BIND(&end);
13006 2524 : return var_result.value();
13007 : }
13008 :
13009 2076 : TNode<Number> CodeStubAssembler::NumberSub(SloppyTNode<Number> a,
13010 : SloppyTNode<Number> b) {
13011 2076 : TVARIABLE(Number, var_result);
13012 2076 : Label float_sub(this, Label::kDeferred), end(this);
13013 4152 : GotoIf(TaggedIsNotSmi(a), &float_sub);
13014 4152 : GotoIf(TaggedIsNotSmi(b), &float_sub);
13015 :
13016 : // Try fast Smi subtraction first.
13017 4152 : var_result = TrySmiSub(CAST(a), CAST(b), &float_sub);
13018 2076 : Goto(&end);
13019 :
13020 : BIND(&float_sub);
13021 : {
13022 4152 : var_result = ChangeFloat64ToTagged(
13023 8304 : Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
13024 2076 : Goto(&end);
13025 : }
13026 :
13027 : BIND(&end);
13028 2076 : return var_result.value();
13029 : }
13030 :
13031 236 : void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
13032 472 : Label is_number(this);
13033 472 : GotoIf(TaggedIsSmi(input), &is_number);
13034 472 : Branch(IsHeapNumber(input), &is_number, is_not_number);
13035 : BIND(&is_number);
13036 236 : }
13037 :
13038 112 : void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
13039 224 : GotoIf(TaggedIsSmi(input), is_number);
13040 224 : GotoIf(IsHeapNumber(input), is_number);
13041 112 : }
13042 :
13043 2352 : TNode<Number> CodeStubAssembler::BitwiseOp(Node* left32, Node* right32,
13044 : Operation bitwise_op) {
13045 2352 : switch (bitwise_op) {
13046 : case Operation::kBitwiseAnd:
13047 784 : return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
13048 : case Operation::kBitwiseOr:
13049 784 : return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
13050 : case Operation::kBitwiseXor:
13051 784 : return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
13052 : case Operation::kShiftLeft:
13053 392 : if (!Word32ShiftIsSafe()) {
13054 0 : right32 = Word32And(right32, Int32Constant(0x1F));
13055 : }
13056 784 : return ChangeInt32ToTagged(Signed(Word32Shl(left32, right32)));
13057 : case Operation::kShiftRight:
13058 392 : if (!Word32ShiftIsSafe()) {
13059 0 : right32 = Word32And(right32, Int32Constant(0x1F));
13060 : }
13061 784 : return ChangeInt32ToTagged(Signed(Word32Sar(left32, right32)));
13062 : case Operation::kShiftRightLogical:
13063 392 : if (!Word32ShiftIsSafe()) {
13064 0 : right32 = Word32And(right32, Int32Constant(0x1F));
13065 : }
13066 784 : return ChangeUint32ToTagged(Unsigned(Word32Shr(left32, right32)));
13067 : default:
13068 : break;
13069 : }
13070 0 : UNREACHABLE();
13071 : }
13072 :
13073 : // ES #sec-createarrayiterator
13074 336 : TNode<JSArrayIterator> CodeStubAssembler::CreateArrayIterator(
13075 : TNode<Context> context, TNode<Object> object, IterationKind kind) {
13076 : TNode<Context> native_context = LoadNativeContext(context);
13077 336 : TNode<Map> iterator_map = CAST(LoadContextElement(
13078 : native_context, Context::INITIAL_ARRAY_ITERATOR_MAP_INDEX));
13079 : Node* iterator = Allocate(JSArrayIterator::kSize);
13080 : StoreMapNoWriteBarrier(iterator, iterator_map);
13081 : StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOrHashOffset,
13082 336 : RootIndex::kEmptyFixedArray);
13083 : StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
13084 336 : RootIndex::kEmptyFixedArray);
13085 : StoreObjectFieldNoWriteBarrier(
13086 : iterator, JSArrayIterator::kIteratedObjectOffset, object);
13087 : StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
13088 672 : SmiConstant(0));
13089 : StoreObjectFieldNoWriteBarrier(
13090 : iterator, JSArrayIterator::kKindOffset,
13091 672 : SmiConstant(Smi::FromInt(static_cast<int>(kind))));
13092 336 : return CAST(iterator);
13093 : }
13094 :
13095 336 : Node* CodeStubAssembler::AllocateJSIteratorResult(Node* context, Node* value,
13096 : Node* done) {
13097 : CSA_ASSERT(this, IsBoolean(done));
13098 : Node* native_context = LoadNativeContext(context);
13099 : Node* map =
13100 672 : LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
13101 : Node* result = Allocate(JSIteratorResult::kSize);
13102 : StoreMapNoWriteBarrier(result, map);
13103 : StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
13104 336 : RootIndex::kEmptyFixedArray);
13105 : StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
13106 336 : RootIndex::kEmptyFixedArray);
13107 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
13108 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
13109 336 : return result;
13110 : }
13111 :
13112 168 : Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
13113 : Node* key,
13114 : Node* value) {
13115 : Node* native_context = LoadNativeContext(context);
13116 336 : Node* length = SmiConstant(2);
13117 : int const elements_size = FixedArray::SizeFor(2);
13118 : TNode<FixedArray> elements = UncheckedCast<FixedArray>(
13119 : Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize));
13120 : StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
13121 168 : RootIndex::kFixedArrayMap);
13122 : StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
13123 168 : StoreFixedArrayElement(elements, 0, key);
13124 168 : StoreFixedArrayElement(elements, 1, value);
13125 336 : Node* array_map = LoadContextElement(
13126 168 : native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX);
13127 : TNode<HeapObject> array = InnerAllocate(elements, elements_size);
13128 : StoreMapNoWriteBarrier(array, array_map);
13129 : StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
13130 168 : RootIndex::kEmptyFixedArray);
13131 : StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
13132 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
13133 : Node* iterator_map =
13134 336 : LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
13135 : TNode<HeapObject> result = InnerAllocate(array, JSArray::kSize);
13136 : StoreMapNoWriteBarrier(result, iterator_map);
13137 : StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
13138 168 : RootIndex::kEmptyFixedArray);
13139 : StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
13140 168 : RootIndex::kEmptyFixedArray);
13141 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
13142 : StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
13143 168 : RootIndex::kFalseValue);
13144 168 : return result;
13145 : }
13146 :
13147 224 : TNode<JSReceiver> CodeStubAssembler::ArraySpeciesCreate(TNode<Context> context,
13148 : TNode<Object> o,
13149 : TNode<Number> len) {
13150 : TNode<JSReceiver> constructor =
13151 224 : CAST(CallRuntime(Runtime::kArraySpeciesConstructor, context, o));
13152 224 : return Construct(context, constructor, len);
13153 : }
13154 :
13155 9636 : Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
13156 : CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
13157 : TNode<Uint32T> buffer_bit_field = LoadJSArrayBufferBitField(CAST(buffer));
13158 9636 : return IsSetWord32<JSArrayBuffer::WasDetachedBit>(buffer_bit_field);
13159 : }
13160 :
13161 952 : void CodeStubAssembler::ThrowIfArrayBufferIsDetached(
13162 : SloppyTNode<Context> context, TNode<JSArrayBuffer> array_buffer,
13163 : const char* method_name) {
13164 1904 : Label if_detached(this, Label::kDeferred), if_not_detached(this);
13165 1904 : Branch(IsDetachedBuffer(array_buffer), &if_detached, &if_not_detached);
13166 : BIND(&if_detached);
13167 952 : ThrowTypeError(context, MessageTemplate::kDetachedOperation, method_name);
13168 : BIND(&if_not_detached);
13169 952 : }
13170 :
13171 896 : void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
13172 : SloppyTNode<Context> context, TNode<JSArrayBufferView> array_buffer_view,
13173 : const char* method_name) {
13174 896 : TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array_buffer_view);
13175 896 : ThrowIfArrayBufferIsDetached(context, buffer, method_name);
13176 896 : }
13177 :
13178 504 : TNode<Uint32T> CodeStubAssembler::LoadJSArrayBufferBitField(
13179 : TNode<JSArrayBuffer> array_buffer) {
13180 504 : return LoadObjectField<Uint32T>(array_buffer, JSArrayBuffer::kBitFieldOffset);
13181 : }
13182 :
13183 504 : TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStore(
13184 : TNode<JSArrayBuffer> array_buffer) {
13185 : return LoadObjectField<RawPtrT>(array_buffer,
13186 504 : JSArrayBuffer::kBackingStoreOffset);
13187 : }
13188 :
13189 784 : TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer(
13190 : TNode<JSArrayBufferView> array_buffer_view) {
13191 : return LoadObjectField<JSArrayBuffer>(array_buffer_view,
13192 784 : JSArrayBufferView::kBufferOffset);
13193 : }
13194 :
13195 56 : TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteLength(
13196 : TNode<JSArrayBufferView> array_buffer_view) {
13197 : return LoadObjectField<UintPtrT>(array_buffer_view,
13198 56 : JSArrayBufferView::kByteLengthOffset);
13199 : }
13200 :
13201 560 : TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteOffset(
13202 : TNode<JSArrayBufferView> array_buffer_view) {
13203 : return LoadObjectField<UintPtrT>(array_buffer_view,
13204 560 : JSArrayBufferView::kByteOffsetOffset);
13205 : }
13206 :
13207 1232 : TNode<Smi> CodeStubAssembler::LoadJSTypedArrayLength(
13208 : TNode<JSTypedArray> typed_array) {
13209 1232 : return LoadObjectField<Smi>(typed_array, JSTypedArray::kLengthOffset);
13210 : }
13211 :
13212 7780 : CodeStubArguments::CodeStubArguments(
13213 : CodeStubAssembler* assembler, Node* argc, Node* fp,
13214 : CodeStubAssembler::ParameterMode param_mode, ReceiverMode receiver_mode)
13215 : : assembler_(assembler),
13216 : argc_mode_(param_mode),
13217 : receiver_mode_(receiver_mode),
13218 : argc_(argc),
13219 : base_(),
13220 15560 : fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
13221 15560 : Node* offset = assembler_->ElementOffsetFromIndex(
13222 : argc_, SYSTEM_POINTER_ELEMENTS, param_mode,
13223 : (StandardFrameConstants::kFixedSlotCountAboveFp - 1) *
13224 7780 : kSystemPointerSize);
13225 23340 : base_ =
13226 7780 : assembler_->UncheckedCast<RawPtrT>(assembler_->IntPtrAdd(fp_, offset));
13227 7780 : }
13228 :
13229 6508 : TNode<Object> CodeStubArguments::GetReceiver() const {
13230 : DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
13231 6508 : return assembler_->UncheckedCast<Object>(assembler_->LoadFullTagged(
13232 13016 : base_, assembler_->IntPtrConstant(kSystemPointerSize)));
13233 : }
13234 :
13235 224 : void CodeStubArguments::SetReceiver(TNode<Object> object) const {
13236 : DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
13237 224 : assembler_->StoreFullTaggedNoWriteBarrier(
13238 448 : base_, assembler_->IntPtrConstant(kSystemPointerSize), object);
13239 224 : }
13240 :
13241 12432 : TNode<WordT> CodeStubArguments::AtIndexPtr(
13242 : Node* index, CodeStubAssembler::ParameterMode mode) const {
13243 : typedef compiler::Node Node;
13244 12432 : Node* negated_index = assembler_->IntPtrOrSmiSub(
13245 24864 : assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
13246 24864 : Node* offset = assembler_->ElementOffsetFromIndex(
13247 : negated_index, SYSTEM_POINTER_ELEMENTS, mode, 0);
13248 12432 : return assembler_->IntPtrAdd(assembler_->UncheckedCast<IntPtrT>(base_),
13249 12432 : offset);
13250 : }
13251 :
13252 12376 : TNode<Object> CodeStubArguments::AtIndex(
13253 : Node* index, CodeStubAssembler::ParameterMode mode) const {
13254 : DCHECK_EQ(argc_mode_, mode);
13255 : CSA_ASSERT(assembler_,
13256 : assembler_->UintPtrOrSmiLessThan(index, GetLength(mode), mode));
13257 : return assembler_->UncheckedCast<Object>(
13258 24752 : assembler_->LoadFullTagged(AtIndexPtr(index, mode)));
13259 : }
13260 :
13261 3808 : TNode<Object> CodeStubArguments::AtIndex(int index) const {
13262 7616 : return AtIndex(assembler_->IntPtrConstant(index));
13263 : }
13264 :
13265 2856 : TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
13266 : int index, TNode<Object> default_value) {
13267 2856 : CodeStubAssembler::TVariable<Object> result(assembler_);
13268 5712 : CodeStubAssembler::Label argument_missing(assembler_),
13269 5712 : argument_done(assembler_, &result);
13270 :
13271 11424 : assembler_->GotoIf(assembler_->UintPtrOrSmiGreaterThanOrEqual(
13272 2856 : assembler_->IntPtrOrSmiConstant(index, argc_mode_),
13273 : argc_, argc_mode_),
13274 2856 : &argument_missing);
13275 2856 : result = AtIndex(index);
13276 2856 : assembler_->Goto(&argument_done);
13277 :
13278 2856 : assembler_->BIND(&argument_missing);
13279 : result = default_value;
13280 2856 : assembler_->Goto(&argument_done);
13281 :
13282 2856 : assembler_->BIND(&argument_done);
13283 2856 : return result.value();
13284 : }
13285 :
13286 7784 : TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
13287 : TNode<IntPtrT> index, TNode<Object> default_value) {
13288 7784 : CodeStubAssembler::TVariable<Object> result(assembler_);
13289 15568 : CodeStubAssembler::Label argument_missing(assembler_),
13290 15568 : argument_done(assembler_, &result);
13291 :
13292 31136 : assembler_->GotoIf(
13293 7784 : assembler_->UintPtrOrSmiGreaterThanOrEqual(
13294 7784 : assembler_->IntPtrToParameter(index, argc_mode_), argc_, argc_mode_),
13295 7784 : &argument_missing);
13296 7784 : result = AtIndex(index);
13297 7784 : assembler_->Goto(&argument_done);
13298 :
13299 7784 : assembler_->BIND(&argument_missing);
13300 : result = default_value;
13301 7784 : assembler_->Goto(&argument_done);
13302 :
13303 7784 : assembler_->BIND(&argument_done);
13304 7784 : return result.value();
13305 : }
13306 :
13307 1048 : void CodeStubArguments::ForEach(
13308 : const CodeStubAssembler::VariableList& vars,
13309 : const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
13310 : CodeStubAssembler::ParameterMode mode) {
13311 1048 : assembler_->Comment("CodeStubArguments::ForEach");
13312 1048 : if (first == nullptr) {
13313 452 : first = assembler_->IntPtrOrSmiConstant(0, mode);
13314 : }
13315 1048 : if (last == nullptr) {
13316 : DCHECK_EQ(mode, argc_mode_);
13317 1048 : last = argc_;
13318 : }
13319 : Node* start = assembler_->IntPtrSub(
13320 1048 : assembler_->UncheckedCast<IntPtrT>(base_),
13321 2096 : assembler_->ElementOffsetFromIndex(first, SYSTEM_POINTER_ELEMENTS, mode));
13322 : Node* end = assembler_->IntPtrSub(
13323 1048 : assembler_->UncheckedCast<IntPtrT>(base_),
13324 2096 : assembler_->ElementOffsetFromIndex(last, SYSTEM_POINTER_ELEMENTS, mode));
13325 2096 : assembler_->BuildFastLoop(
13326 : vars, start, end,
13327 2096 : [this, &body](Node* current) {
13328 1048 : Node* arg = assembler_->Load(MachineType::AnyTagged(), current);
13329 : body(arg);
13330 1048 : },
13331 : -kSystemPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
13332 1048 : CodeStubAssembler::IndexAdvanceMode::kPost);
13333 1048 : }
13334 :
13335 12728 : void CodeStubArguments::PopAndReturn(Node* value) {
13336 : Node* pop_count;
13337 12728 : if (receiver_mode_ == ReceiverMode::kHasReceiver) {
13338 25456 : pop_count = assembler_->IntPtrOrSmiAdd(
13339 25456 : argc_, assembler_->IntPtrOrSmiConstant(1, argc_mode_), argc_mode_);
13340 : } else {
13341 0 : pop_count = argc_;
13342 : }
13343 :
13344 25456 : assembler_->PopAndReturn(assembler_->ParameterToIntPtr(pop_count, argc_mode_),
13345 12728 : value);
13346 12728 : }
13347 :
13348 3472 : Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
13349 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13350 6944 : return Uint32LessThanOrEqual(elements_kind,
13351 6944 : Int32Constant(LAST_FAST_ELEMENTS_KIND));
13352 : }
13353 :
13354 284 : TNode<BoolT> CodeStubAssembler::IsDoubleElementsKind(
13355 : TNode<Int32T> elements_kind) {
13356 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13357 : STATIC_ASSERT((PACKED_DOUBLE_ELEMENTS & 1) == 0);
13358 : STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS);
13359 852 : return Word32Equal(Word32Shr(elements_kind, Int32Constant(1)),
13360 852 : Int32Constant(PACKED_DOUBLE_ELEMENTS / 2));
13361 : }
13362 :
13363 336 : Node* CodeStubAssembler::IsFastSmiOrTaggedElementsKind(Node* elements_kind) {
13364 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13365 : STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
13366 : STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
13367 672 : return Uint32LessThanOrEqual(elements_kind,
13368 672 : Int32Constant(TERMINAL_FAST_ELEMENTS_KIND));
13369 : }
13370 :
13371 112 : Node* CodeStubAssembler::IsFastSmiElementsKind(Node* elements_kind) {
13372 224 : return Uint32LessThanOrEqual(elements_kind,
13373 224 : Int32Constant(HOLEY_SMI_ELEMENTS));
13374 : }
13375 :
13376 0 : Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
13377 : CSA_ASSERT(this, IsFastElementsKind(elements_kind));
13378 :
13379 : STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
13380 : STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
13381 : STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
13382 112 : return IsSetWord32(elements_kind, 1);
13383 : }
13384 :
13385 728 : Node* CodeStubAssembler::IsElementsKindGreaterThan(
13386 : Node* target_kind, ElementsKind reference_kind) {
13387 2184 : return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
13388 : }
13389 :
13390 1456 : TNode<BoolT> CodeStubAssembler::IsElementsKindLessThanOrEqual(
13391 : TNode<Int32T> target_kind, ElementsKind reference_kind) {
13392 2912 : return Int32LessThanOrEqual(target_kind, Int32Constant(reference_kind));
13393 : }
13394 :
13395 0 : TNode<BoolT> CodeStubAssembler::IsElementsKindInRange(
13396 : TNode<Int32T> target_kind, ElementsKind lower_reference_kind,
13397 : ElementsKind higher_reference_kind) {
13398 : return Int32LessThanOrEqual(
13399 0 : Int32Sub(target_kind, Int32Constant(lower_reference_kind)),
13400 0 : Int32Sub(Int32Constant(higher_reference_kind),
13401 0 : Int32Constant(lower_reference_kind)));
13402 : }
13403 :
13404 508 : Node* CodeStubAssembler::IsDebugActive() {
13405 : Node* is_debug_active = Load(
13406 : MachineType::Uint8(),
13407 1016 : ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
13408 1524 : return Word32NotEqual(is_debug_active, Int32Constant(0));
13409 : }
13410 :
13411 2576 : TNode<BoolT> CodeStubAssembler::IsRuntimeCallStatsEnabled() {
13412 : STATIC_ASSERT(sizeof(TracingFlags::runtime_stats) == kInt32Size);
13413 : TNode<Word32T> flag_value = UncheckedCast<Word32T>(Load(
13414 : MachineType::Int32(),
13415 5152 : ExternalConstant(ExternalReference::address_of_runtime_stats_flag())));
13416 5152 : return Word32NotEqual(flag_value, Int32Constant(0));
13417 : }
13418 :
13419 56 : Node* CodeStubAssembler::IsPromiseHookEnabled() {
13420 : Node* const promise_hook = Load(
13421 : MachineType::Pointer(),
13422 112 : ExternalConstant(ExternalReference::promise_hook_address(isolate())));
13423 168 : return WordNotEqual(promise_hook, IntPtrConstant(0));
13424 : }
13425 :
13426 224 : Node* CodeStubAssembler::HasAsyncEventDelegate() {
13427 : Node* const async_event_delegate =
13428 : Load(MachineType::Pointer(),
13429 448 : ExternalConstant(
13430 448 : ExternalReference::async_event_delegate_address(isolate())));
13431 672 : return WordNotEqual(async_event_delegate, IntPtrConstant(0));
13432 : }
13433 :
13434 756 : Node* CodeStubAssembler::IsPromiseHookEnabledOrHasAsyncEventDelegate() {
13435 : Node* const promise_hook_or_async_event_delegate =
13436 : Load(MachineType::Uint8(),
13437 1512 : ExternalConstant(
13438 : ExternalReference::promise_hook_or_async_event_delegate_address(
13439 1512 : isolate())));
13440 2268 : return Word32NotEqual(promise_hook_or_async_event_delegate, Int32Constant(0));
13441 : }
13442 :
13443 1232 : Node* CodeStubAssembler::
13444 : IsPromiseHookEnabledOrDebugIsActiveOrHasAsyncEventDelegate() {
13445 : Node* const promise_hook_or_debug_is_active_or_async_event_delegate = Load(
13446 : MachineType::Uint8(),
13447 2464 : ExternalConstant(
13448 : ExternalReference::
13449 : promise_hook_or_debug_is_active_or_async_event_delegate_address(
13450 2464 : isolate())));
13451 2464 : return Word32NotEqual(promise_hook_or_debug_is_active_or_async_event_delegate,
13452 2464 : Int32Constant(0));
13453 : }
13454 :
13455 2476 : TNode<Code> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
13456 : CSA_ASSERT(this, SmiGreaterThanOrEqual(builtin_id, SmiConstant(0)));
13457 : CSA_ASSERT(this,
13458 : SmiLessThan(builtin_id, SmiConstant(Builtins::builtin_count)));
13459 :
13460 : int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
13461 : int index_shift = kSystemPointerSizeLog2 - kSmiShiftBits;
13462 : TNode<WordT> table_index =
13463 : index_shift >= 0 ? WordShl(BitcastTaggedToWord(builtin_id), index_shift)
13464 4952 : : WordSar(BitcastTaggedToWord(builtin_id), -index_shift);
13465 :
13466 4952 : return CAST(
13467 : Load(MachineType::TaggedPointer(),
13468 : ExternalConstant(ExternalReference::builtins_address(isolate())),
13469 : table_index));
13470 : }
13471 :
13472 1132 : TNode<Code> CodeStubAssembler::GetSharedFunctionInfoCode(
13473 : SloppyTNode<SharedFunctionInfo> shared_info, Label* if_compile_lazy) {
13474 : TNode<Object> sfi_data =
13475 : LoadObjectField(shared_info, SharedFunctionInfo::kFunctionDataOffset);
13476 :
13477 1132 : TVARIABLE(Code, sfi_code);
13478 :
13479 1132 : Label done(this);
13480 1132 : Label check_instance_type(this);
13481 :
13482 : // IsSmi: Is builtin
13483 2264 : GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
13484 1132 : if (if_compile_lazy) {
13485 112 : GotoIf(SmiEqual(CAST(sfi_data), SmiConstant(Builtins::kCompileLazy)),
13486 56 : if_compile_lazy);
13487 : }
13488 1132 : sfi_code = LoadBuiltin(CAST(sfi_data));
13489 1132 : Goto(&done);
13490 :
13491 : // Switch on data's instance type.
13492 : BIND(&check_instance_type);
13493 1132 : TNode<Int32T> data_type = LoadInstanceType(CAST(sfi_data));
13494 :
13495 : int32_t case_values[] = {BYTECODE_ARRAY_TYPE,
13496 : WASM_EXPORTED_FUNCTION_DATA_TYPE,
13497 : ASM_WASM_DATA_TYPE,
13498 : UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE,
13499 : UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE,
13500 1132 : FUNCTION_TEMPLATE_INFO_TYPE};
13501 1132 : Label check_is_bytecode_array(this);
13502 1132 : Label check_is_exported_function_data(this);
13503 1132 : Label check_is_asm_wasm_data(this);
13504 1132 : Label check_is_uncompiled_data_without_preparse_data(this);
13505 1132 : Label check_is_uncompiled_data_with_preparse_data(this);
13506 1132 : Label check_is_function_template_info(this);
13507 1132 : Label check_is_interpreter_data(this);
13508 : Label* case_labels[] = {&check_is_bytecode_array,
13509 : &check_is_exported_function_data,
13510 : &check_is_asm_wasm_data,
13511 : &check_is_uncompiled_data_without_preparse_data,
13512 : &check_is_uncompiled_data_with_preparse_data,
13513 1132 : &check_is_function_template_info};
13514 : STATIC_ASSERT(arraysize(case_values) == arraysize(case_labels));
13515 : Switch(data_type, &check_is_interpreter_data, case_values, case_labels,
13516 1132 : arraysize(case_labels));
13517 :
13518 : // IsBytecodeArray: Interpret bytecode
13519 : BIND(&check_is_bytecode_array);
13520 2264 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
13521 1132 : Goto(&done);
13522 :
13523 : // IsWasmExportedFunctionData: Use the wrapper code
13524 : BIND(&check_is_exported_function_data);
13525 : sfi_code = CAST(LoadObjectField(
13526 : CAST(sfi_data), WasmExportedFunctionData::kWrapperCodeOffset));
13527 1132 : Goto(&done);
13528 :
13529 : // IsAsmWasmData: Instantiate using AsmWasmData
13530 : BIND(&check_is_asm_wasm_data);
13531 2264 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InstantiateAsmJs));
13532 1132 : Goto(&done);
13533 :
13534 : // IsUncompiledDataWithPreparseData | IsUncompiledDataWithoutPreparseData:
13535 : // Compile lazy
13536 : BIND(&check_is_uncompiled_data_with_preparse_data);
13537 1132 : Goto(&check_is_uncompiled_data_without_preparse_data);
13538 : BIND(&check_is_uncompiled_data_without_preparse_data);
13539 2264 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
13540 1132 : Goto(if_compile_lazy ? if_compile_lazy : &done);
13541 :
13542 : // IsFunctionTemplateInfo: API call
13543 : BIND(&check_is_function_template_info);
13544 2264 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), HandleApiCall));
13545 1132 : Goto(&done);
13546 :
13547 : // IsInterpreterData: Interpret bytecode
13548 : BIND(&check_is_interpreter_data);
13549 : // This is the default branch, so assert that we have the expected data type.
13550 : CSA_ASSERT(this,
13551 : Word32Equal(data_type, Int32Constant(INTERPRETER_DATA_TYPE)));
13552 : sfi_code = CAST(LoadObjectField(
13553 : CAST(sfi_data), InterpreterData::kInterpreterTrampolineOffset));
13554 1132 : Goto(&done);
13555 :
13556 : BIND(&done);
13557 1132 : return sfi_code.value();
13558 : }
13559 :
13560 1020 : Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
13561 : Node* shared_info,
13562 : Node* context) {
13563 : CSA_SLOW_ASSERT(this, IsMap(map));
13564 :
13565 2040 : Node* const code = GetSharedFunctionInfoCode(shared_info);
13566 :
13567 : // TODO(ishell): All the callers of this function pass map loaded from
13568 : // Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove
13569 : // map parameter.
13570 : CSA_ASSERT(this, Word32BinaryNot(IsConstructorMap(map)));
13571 : CSA_ASSERT(this, Word32BinaryNot(IsFunctionWithPrototypeSlotMap(map)));
13572 : Node* const fun = Allocate(JSFunction::kSizeWithoutPrototype);
13573 : STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
13574 : StoreMapNoWriteBarrier(fun, map);
13575 : StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
13576 1020 : RootIndex::kEmptyFixedArray);
13577 : StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
13578 1020 : RootIndex::kEmptyFixedArray);
13579 : StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
13580 1020 : RootIndex::kManyClosuresCell);
13581 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
13582 : shared_info);
13583 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
13584 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeOffset, code);
13585 1020 : return fun;
13586 : }
13587 :
13588 0 : Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
13589 : StackFrame::Type frame_type) {
13590 0 : return WordEqual(marker_or_function,
13591 0 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
13592 : }
13593 :
13594 0 : Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
13595 : StackFrame::Type frame_type) {
13596 0 : return WordNotEqual(marker_or_function,
13597 0 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
13598 : }
13599 :
13600 448 : void CodeStubAssembler::CheckPrototypeEnumCache(Node* receiver,
13601 : Node* receiver_map,
13602 : Label* if_fast,
13603 : Label* if_slow) {
13604 896 : VARIABLE(var_object, MachineRepresentation::kTagged, receiver);
13605 896 : VARIABLE(var_object_map, MachineRepresentation::kTagged, receiver_map);
13606 :
13607 1344 : Label loop(this, {&var_object, &var_object_map}), done_loop(this);
13608 448 : Goto(&loop);
13609 : BIND(&loop);
13610 : {
13611 : // Check that there are no elements on the current {object}.
13612 448 : Label if_no_elements(this);
13613 448 : Node* object = var_object.value();
13614 448 : Node* object_map = var_object_map.value();
13615 :
13616 : // The following relies on the elements only aliasing with JSProxy::target,
13617 : // which is a Javascript value and hence cannot be confused with an elements
13618 : // backing store.
13619 : STATIC_ASSERT(static_cast<int>(JSObject::kElementsOffset) ==
13620 : static_cast<int>(JSProxy::kTargetOffset));
13621 : Node* object_elements = LoadObjectField(object, JSObject::kElementsOffset);
13622 896 : GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
13623 896 : GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
13624 :
13625 : // It might still be an empty JSArray.
13626 896 : GotoIfNot(IsJSArrayMap(object_map), if_slow);
13627 : Node* object_length = LoadJSArrayLength(object);
13628 896 : Branch(WordEqual(object_length, SmiConstant(0)), &if_no_elements, if_slow);
13629 :
13630 : // Continue with the {object}s prototype.
13631 : BIND(&if_no_elements);
13632 : object = LoadMapPrototype(object_map);
13633 896 : GotoIf(IsNull(object), if_fast);
13634 :
13635 : // For all {object}s but the {receiver}, check that the cache is empty.
13636 448 : var_object.Bind(object);
13637 : object_map = LoadMap(object);
13638 448 : var_object_map.Bind(object_map);
13639 448 : Node* object_enum_length = LoadMapEnumLength(object_map);
13640 1344 : Branch(WordEqual(object_enum_length, IntPtrConstant(0)), &loop, if_slow);
13641 : }
13642 448 : }
13643 :
13644 224 : Node* CodeStubAssembler::CheckEnumCache(Node* receiver, Label* if_empty,
13645 : Label* if_runtime) {
13646 448 : Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
13647 : Node* receiver_map = LoadMap(receiver);
13648 :
13649 : // Check if the enum length field of the {receiver} is properly initialized,
13650 : // indicating that there is an enum cache.
13651 224 : Node* receiver_enum_length = LoadMapEnumLength(receiver_map);
13652 448 : Branch(WordEqual(receiver_enum_length,
13653 448 : IntPtrConstant(kInvalidEnumCacheSentinel)),
13654 224 : &if_no_cache, &if_cache);
13655 :
13656 : BIND(&if_no_cache);
13657 : {
13658 : // Avoid runtime-call for empty dictionary receivers.
13659 448 : GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
13660 224 : TNode<NameDictionary> properties = CAST(LoadSlowProperties(receiver));
13661 : TNode<Smi> length = GetNumberOfElements(properties);
13662 448 : GotoIfNot(WordEqual(length, SmiConstant(0)), if_runtime);
13663 : // Check that there are no elements on the {receiver} and its prototype
13664 : // chain. Given that we do not create an EnumCache for dict-mode objects,
13665 : // directly jump to {if_empty} if there are no elements and no properties
13666 : // on the {receiver}.
13667 224 : CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
13668 : }
13669 :
13670 : // Check that there are no elements on the fast {receiver} and its
13671 : // prototype chain.
13672 : BIND(&if_cache);
13673 224 : CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
13674 :
13675 : BIND(&if_fast);
13676 224 : return receiver_map;
13677 : }
13678 :
13679 7784 : TNode<Object> CodeStubAssembler::GetArgumentValue(
13680 : BaseBuiltinsFromDSLAssembler::Arguments args, TNode<IntPtrT> index) {
13681 7784 : return CodeStubArguments(this, args).GetOptionalArgumentValue(index);
13682 : }
13683 :
13684 4312 : BaseBuiltinsFromDSLAssembler::Arguments CodeStubAssembler::GetFrameArguments(
13685 : TNode<RawPtrT> frame, TNode<IntPtrT> argc) {
13686 : return CodeStubArguments(this, argc, frame, INTPTR_PARAMETERS)
13687 8624 : .GetTorqueArguments();
13688 : }
13689 :
13690 0 : void CodeStubAssembler::Print(const char* s) {
13691 0 : std::string formatted(s);
13692 : formatted += "\n";
13693 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13694 0 : StringConstant(formatted.c_str()));
13695 0 : }
13696 :
13697 0 : void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
13698 0 : if (prefix != nullptr) {
13699 0 : std::string formatted(prefix);
13700 : formatted += ": ";
13701 : Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
13702 0 : formatted.c_str(), AllocationType::kOld);
13703 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13704 : HeapConstant(string));
13705 : }
13706 : CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
13707 0 : }
13708 :
13709 19432 : void CodeStubAssembler::PerformStackCheck(TNode<Context> context) {
13710 38864 : Label ok(this), stack_check_interrupt(this, Label::kDeferred);
13711 :
13712 : // The instruction sequence below is carefully crafted to hit our pattern
13713 : // matcher for stack checks within instruction selection.
13714 : // See StackCheckMatcher::Matched and JSGenericLowering::LowerJSStackCheck.
13715 :
13716 19432 : TNode<UintPtrT> sp = UncheckedCast<UintPtrT>(LoadStackPointer());
13717 : TNode<UintPtrT> stack_limit = UncheckedCast<UintPtrT>(Load(
13718 : MachineType::Pointer(),
13719 38864 : ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))));
13720 19432 : TNode<BoolT> sp_within_limit = UintPtrLessThan(stack_limit, sp);
13721 :
13722 19432 : Branch(sp_within_limit, &ok, &stack_check_interrupt);
13723 :
13724 : BIND(&stack_check_interrupt);
13725 : CallRuntime(Runtime::kStackGuard, context);
13726 19432 : Goto(&ok);
13727 :
13728 : BIND(&ok);
13729 19432 : }
13730 :
13731 744 : void CodeStubAssembler::InitializeFunctionContext(Node* native_context,
13732 : Node* context, int slots) {
13733 : DCHECK_GE(slots, Context::MIN_CONTEXT_SLOTS);
13734 744 : StoreMapNoWriteBarrier(context, RootIndex::kFunctionContextMap);
13735 : StoreObjectFieldNoWriteBarrier(context, FixedArray::kLengthOffset,
13736 1488 : SmiConstant(slots));
13737 :
13738 : Node* const empty_scope_info =
13739 1488 : LoadContextElement(native_context, Context::SCOPE_INFO_INDEX);
13740 744 : StoreContextElementNoWriteBarrier(context, Context::SCOPE_INFO_INDEX,
13741 744 : empty_scope_info);
13742 744 : StoreContextElementNoWriteBarrier(context, Context::PREVIOUS_INDEX,
13743 744 : UndefinedConstant());
13744 744 : StoreContextElementNoWriteBarrier(context, Context::EXTENSION_INDEX,
13745 744 : TheHoleConstant());
13746 744 : StoreContextElementNoWriteBarrier(context, Context::NATIVE_CONTEXT_INDEX,
13747 744 : native_context);
13748 744 : }
13749 :
13750 112 : TNode<JSArray> CodeStubAssembler::ArrayCreate(TNode<Context> context,
13751 : TNode<Number> length) {
13752 112 : TVARIABLE(JSArray, array);
13753 112 : Label allocate_js_array(this);
13754 :
13755 112 : Label done(this), next(this), runtime(this, Label::kDeferred);
13756 112 : TNode<Smi> limit = SmiConstant(JSArray::kInitialMaxFastElementArray);
13757 : CSA_ASSERT_BRANCH(this, [=](Label* ok, Label* not_ok) {
13758 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13759 : SmiConstant(0), ok, not_ok);
13760 : });
13761 : // This check also transitively covers the case where length is too big
13762 : // to be representable by a SMI and so is not usable with
13763 : // AllocateJSArray.
13764 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13765 112 : limit, &runtime, &next);
13766 :
13767 : BIND(&runtime);
13768 : {
13769 : TNode<Context> native_context = LoadNativeContext(context);
13770 : TNode<JSFunction> array_function =
13771 112 : CAST(LoadContextElement(native_context, Context::ARRAY_FUNCTION_INDEX));
13772 : array = CAST(CallRuntime(Runtime::kNewArray, context, array_function,
13773 : length, array_function, UndefinedConstant()));
13774 112 : Goto(&done);
13775 : }
13776 :
13777 : BIND(&next);
13778 : CSA_ASSERT(this, TaggedIsSmi(length));
13779 :
13780 112 : TNode<Map> array_map = CAST(LoadContextElement(
13781 : context, Context::JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX));
13782 :
13783 : // TODO(delphick): Consider using
13784 : // AllocateUninitializedJSArrayWithElements to avoid initializing an
13785 : // array and then writing over it.
13786 224 : array =
13787 : AllocateJSArray(PACKED_SMI_ELEMENTS, array_map, length, SmiConstant(0),
13788 : nullptr, ParameterMode::SMI_PARAMETERS);
13789 112 : Goto(&done);
13790 :
13791 : BIND(&done);
13792 112 : return array.value();
13793 : }
13794 :
13795 112 : void CodeStubAssembler::SetPropertyLength(TNode<Context> context,
13796 : TNode<Object> array,
13797 : TNode<Number> length) {
13798 224 : Label fast(this), runtime(this), done(this);
13799 : // There's no need to set the length, if
13800 : // 1) the array is a fast JS array and
13801 : // 2) the new length is equal to the old length.
13802 : // as the set is not observable. Otherwise fall back to the run-time.
13803 :
13804 : // 1) Check that the array has fast elements.
13805 : // TODO(delphick): Consider changing this since it does an an unnecessary
13806 : // check for SMIs.
13807 : // TODO(delphick): Also we could hoist this to after the array construction
13808 : // and copy the args into array in the same way as the Array constructor.
13809 112 : BranchIfFastJSArray(array, context, &fast, &runtime);
13810 :
13811 : BIND(&fast);
13812 : {
13813 : TNode<JSArray> fast_array = CAST(array);
13814 :
13815 112 : TNode<Smi> length_smi = CAST(length);
13816 112 : TNode<Smi> old_length = LoadFastJSArrayLength(fast_array);
13817 : CSA_ASSERT(this, TaggedIsPositiveSmi(old_length));
13818 :
13819 : // 2) If the created array's length matches the required length, then
13820 : // there's nothing else to do. Otherwise use the runtime to set the
13821 : // property as that will insert holes into excess elements or shrink
13822 : // the backing store as appropriate.
13823 224 : Branch(SmiNotEqual(length_smi, old_length), &runtime, &done);
13824 : }
13825 :
13826 : BIND(&runtime);
13827 : {
13828 : SetPropertyStrict(context, array, CodeStubAssembler::LengthStringConstant(),
13829 : length);
13830 112 : Goto(&done);
13831 : }
13832 :
13833 : BIND(&done);
13834 112 : }
13835 :
13836 224 : void CodeStubAssembler::GotoIfInitialPrototypePropertyModified(
13837 : TNode<Map> object_map, TNode<Map> initial_prototype_map, int descriptor,
13838 : RootIndex field_name_root_index, Label* if_modified) {
13839 : DescriptorIndexAndName index_name{descriptor, field_name_root_index};
13840 224 : GotoIfInitialPrototypePropertiesModified(
13841 : object_map, initial_prototype_map,
13842 224 : Vector<DescriptorIndexAndName>(&index_name, 1), if_modified);
13843 224 : }
13844 :
13845 1176 : void CodeStubAssembler::GotoIfInitialPrototypePropertiesModified(
13846 : TNode<Map> object_map, TNode<Map> initial_prototype_map,
13847 : Vector<DescriptorIndexAndName> properties, Label* if_modified) {
13848 : TNode<Map> prototype_map = LoadMap(LoadMapPrototype(object_map));
13849 2352 : GotoIfNot(WordEqual(prototype_map, initial_prototype_map), if_modified);
13850 :
13851 : if (FLAG_track_constant_fields) {
13852 : // With constant field tracking, we need to make sure that important
13853 : // properties in the prototype has not been tampered with. We do this by
13854 : // checking that their slots in the prototype's descriptor array are still
13855 : // marked as const.
13856 1176 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(prototype_map);
13857 :
13858 : TNode<Uint32T> combined_details;
13859 4312 : for (int i = 0; i < properties.length(); i++) {
13860 : // Assert the descriptor index is in-bounds.
13861 3136 : int descriptor = properties[i].descriptor_index;
13862 : CSA_ASSERT(this, Int32LessThan(Int32Constant(descriptor),
13863 : LoadNumberOfDescriptors(descriptors)));
13864 : // Assert that the name is correct. This essentially checks that
13865 : // the descriptor index corresponds to the insertion order in
13866 : // the bootstrapper.
13867 : CSA_ASSERT(this,
13868 : WordEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),
13869 : LoadRoot(properties[i].name_root_index)));
13870 :
13871 : TNode<Uint32T> details =
13872 1568 : DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
13873 1568 : if (i == 0) {
13874 : combined_details = details;
13875 : } else {
13876 392 : combined_details = Unsigned(Word32And(combined_details, details));
13877 : }
13878 : }
13879 :
13880 : TNode<Uint32T> constness =
13881 : DecodeWord32<PropertyDetails::ConstnessField>(combined_details);
13882 :
13883 1176 : GotoIfNot(
13884 2352 : Word32Equal(constness,
13885 2352 : Int32Constant(static_cast<int>(PropertyConstness::kConst))),
13886 1176 : if_modified);
13887 : }
13888 1176 : }
13889 :
13890 224 : TNode<String> CodeStubAssembler::TaggedToDirectString(TNode<Object> value,
13891 : Label* fail) {
13892 448 : ToDirectStringAssembler to_direct(state(), value);
13893 224 : to_direct.TryToDirect(fail);
13894 : to_direct.PointerToData(fail);
13895 224 : return CAST(value);
13896 : }
13897 :
13898 : } // namespace internal
13899 59480 : } // namespace v8
|