Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/code-stub-assembler.h"
6 :
7 : #include "src/code-factory.h"
8 : #include "src/counters.h"
9 : #include "src/frames-inl.h"
10 : #include "src/frames.h"
11 : #include "src/objects/api-callbacks.h"
12 : #include "src/objects/cell.h"
13 : #include "src/objects/descriptor-array.h"
14 : #include "src/objects/heap-number.h"
15 : #include "src/objects/oddball.h"
16 : #include "src/objects/ordered-hash-table-inl.h"
17 : #include "src/objects/property-cell.h"
18 : #include "src/wasm/wasm-objects.h"
19 :
20 : namespace v8 {
21 : namespace internal {
22 :
23 : using compiler::Node;
24 : template <class T>
25 : using TNode = compiler::TNode<T>;
26 : template <class T>
27 : using SloppyTNode = compiler::SloppyTNode<T>;
28 :
29 458764 : CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
30 : : compiler::CodeAssembler(state), BaseBuiltinsFromDSLAssembler(state) {
31 : if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
32 : HandleBreakOnNode();
33 : }
34 458764 : }
35 :
36 0 : void CodeStubAssembler::HandleBreakOnNode() {
37 : // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
38 : // string specifying the name of a stub and NODE is number specifying node id.
39 0 : const char* name = state()->name();
40 0 : size_t name_length = strlen(name);
41 0 : if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
42 : // Different name.
43 0 : return;
44 : }
45 0 : size_t option_length = strlen(FLAG_csa_trap_on_node);
46 0 : if (option_length < name_length + 2 ||
47 0 : FLAG_csa_trap_on_node[name_length] != ',') {
48 : // Option is too short.
49 : return;
50 : }
51 0 : const char* start = &FLAG_csa_trap_on_node[name_length + 1];
52 : char* end;
53 0 : int node_id = static_cast<int>(strtol(start, &end, 10));
54 0 : if (start == end) {
55 : // Bad node id.
56 : return;
57 : }
58 0 : BreakOnNode(node_id);
59 : }
60 :
61 0 : void CodeStubAssembler::Assert(const BranchGenerator& branch,
62 : const char* message, const char* file, int line,
63 : Node* extra_node1, const char* extra_node1_name,
64 : Node* extra_node2, const char* extra_node2_name,
65 : Node* extra_node3, const char* extra_node3_name,
66 : Node* extra_node4, const char* extra_node4_name,
67 : Node* extra_node5,
68 : const char* extra_node5_name) {
69 : #if defined(DEBUG)
70 : if (FLAG_debug_code) {
71 : Check(branch, message, file, line, extra_node1, extra_node1_name,
72 : extra_node2, extra_node2_name, extra_node3, extra_node3_name,
73 : extra_node4, extra_node4_name, extra_node5, extra_node5_name);
74 : }
75 : #endif
76 0 : }
77 :
78 0 : void CodeStubAssembler::Assert(const NodeGenerator& condition_body,
79 : const char* message, const char* file, int line,
80 : Node* extra_node1, const char* extra_node1_name,
81 : Node* extra_node2, const char* extra_node2_name,
82 : Node* extra_node3, const char* extra_node3_name,
83 : Node* extra_node4, const char* extra_node4_name,
84 : Node* extra_node5,
85 : const char* extra_node5_name) {
86 : #if defined(DEBUG)
87 : if (FLAG_debug_code) {
88 : Check(condition_body, message, file, line, extra_node1, extra_node1_name,
89 : extra_node2, extra_node2_name, extra_node3, extra_node3_name,
90 : extra_node4, extra_node4_name, extra_node5, extra_node5_name);
91 : }
92 : #endif
93 0 : }
94 :
95 : #ifdef DEBUG
96 : namespace {
97 : void MaybePrintNodeWithName(CodeStubAssembler* csa, Node* node,
98 : const char* node_name) {
99 : if (node != nullptr) {
100 : csa->CallRuntime(Runtime::kPrintWithNameForAssert, csa->SmiConstant(0),
101 : csa->StringConstant(node_name), node);
102 : }
103 : }
104 : } // namespace
105 : #endif
106 :
107 0 : void CodeStubAssembler::Check(const BranchGenerator& branch,
108 : const char* message, const char* file, int line,
109 : Node* extra_node1, const char* extra_node1_name,
110 : Node* extra_node2, const char* extra_node2_name,
111 : Node* extra_node3, const char* extra_node3_name,
112 : Node* extra_node4, const char* extra_node4_name,
113 : Node* extra_node5, const char* extra_node5_name) {
114 0 : Label ok(this);
115 0 : Label not_ok(this, Label::kDeferred);
116 0 : if (message != nullptr && FLAG_code_comments) {
117 0 : Comment("[ Assert: ", message);
118 : } else {
119 0 : Comment("[ Assert");
120 : }
121 0 : branch(&ok, ¬_ok);
122 :
123 : BIND(¬_ok);
124 : FailAssert(message, file, line, extra_node1, extra_node1_name, extra_node2,
125 : extra_node2_name, extra_node3, extra_node3_name, extra_node4,
126 0 : extra_node4_name, extra_node5, extra_node5_name);
127 :
128 : BIND(&ok);
129 0 : Comment("] Assert");
130 0 : }
131 :
132 0 : void CodeStubAssembler::Check(const NodeGenerator& condition_body,
133 : const char* message, const char* file, int line,
134 : Node* extra_node1, const char* extra_node1_name,
135 : Node* extra_node2, const char* extra_node2_name,
136 : Node* extra_node3, const char* extra_node3_name,
137 : Node* extra_node4, const char* extra_node4_name,
138 : Node* extra_node5, const char* extra_node5_name) {
139 0 : BranchGenerator branch = [=](Label* ok, Label* not_ok) {
140 0 : Node* condition = condition_body();
141 : DCHECK_NOT_NULL(condition);
142 0 : Branch(condition, ok, not_ok);
143 0 : };
144 :
145 : Check(branch, message, file, line, extra_node1, extra_node1_name, extra_node2,
146 : extra_node2_name, extra_node3, extra_node3_name, extra_node4,
147 0 : extra_node4_name, extra_node5, extra_node5_name);
148 0 : }
149 :
150 1960 : void CodeStubAssembler::FastCheck(TNode<BoolT> condition) {
151 1960 : Label ok(this);
152 1960 : GotoIf(condition, &ok);
153 1960 : DebugBreak();
154 1960 : Goto(&ok);
155 1960 : BIND(&ok);
156 1960 : }
157 :
158 525 : void CodeStubAssembler::FailAssert(
159 : const char* message, const char* file, int line, Node* extra_node1,
160 : const char* extra_node1_name, Node* extra_node2,
161 : const char* extra_node2_name, Node* extra_node3,
162 : const char* extra_node3_name, Node* extra_node4,
163 : const char* extra_node4_name, Node* extra_node5,
164 : const char* extra_node5_name) {
165 : DCHECK_NOT_NULL(message);
166 : char chars[1024];
167 : Vector<char> buffer(chars);
168 525 : if (file != nullptr) {
169 525 : SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
170 : } else {
171 0 : SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
172 : }
173 1050 : Node* message_node = StringConstant(&(buffer[0]));
174 :
175 : #ifdef DEBUG
176 : // Only print the extra nodes in debug builds.
177 : MaybePrintNodeWithName(this, extra_node1, extra_node1_name);
178 : MaybePrintNodeWithName(this, extra_node2, extra_node2_name);
179 : MaybePrintNodeWithName(this, extra_node3, extra_node3_name);
180 : MaybePrintNodeWithName(this, extra_node4, extra_node4_name);
181 : MaybePrintNodeWithName(this, extra_node5, extra_node5_name);
182 : #endif
183 :
184 525 : DebugAbort(message_node);
185 525 : Unreachable();
186 525 : }
187 :
188 44319 : Node* CodeStubAssembler::SelectImpl(TNode<BoolT> condition,
189 : const NodeGenerator& true_body,
190 : const NodeGenerator& false_body,
191 : MachineRepresentation rep) {
192 44319 : VARIABLE(value, rep);
193 44319 : Label vtrue(this), vfalse(this), end(this);
194 44319 : Branch(condition, &vtrue, &vfalse);
195 :
196 : BIND(&vtrue);
197 : {
198 44319 : value.Bind(true_body());
199 44319 : Goto(&end);
200 : }
201 : BIND(&vfalse);
202 : {
203 44319 : value.Bind(false_body());
204 44319 : Goto(&end);
205 : }
206 :
207 : BIND(&end);
208 88638 : return value.value();
209 : }
210 :
211 224 : TNode<Int32T> CodeStubAssembler::SelectInt32Constant(
212 : SloppyTNode<BoolT> condition, int true_value, int false_value) {
213 : return SelectConstant<Int32T>(condition, Int32Constant(true_value),
214 224 : Int32Constant(false_value));
215 : }
216 :
217 0 : TNode<IntPtrT> CodeStubAssembler::SelectIntPtrConstant(
218 : SloppyTNode<BoolT> condition, int true_value, int false_value) {
219 : return SelectConstant<IntPtrT>(condition, IntPtrConstant(true_value),
220 0 : IntPtrConstant(false_value));
221 : }
222 :
223 2875 : TNode<Oddball> CodeStubAssembler::SelectBooleanConstant(
224 : SloppyTNode<BoolT> condition) {
225 2875 : return SelectConstant<Oddball>(condition, TrueConstant(), FalseConstant());
226 : }
227 :
228 4144 : TNode<Smi> CodeStubAssembler::SelectSmiConstant(SloppyTNode<BoolT> condition,
229 : Smi true_value,
230 : Smi false_value) {
231 : return SelectConstant<Smi>(condition, SmiConstant(true_value),
232 4144 : SmiConstant(false_value));
233 : }
234 :
235 2352 : TNode<Object> CodeStubAssembler::NoContextConstant() {
236 120446 : return SmiConstant(Context::kNoContext);
237 : }
238 :
239 : #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
240 : compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
241 : std::declval<Heap>().rootAccessorName())>::type>::type> \
242 : CodeStubAssembler::name##Constant() { \
243 : return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
244 : std::declval<Heap>().rootAccessorName())>::type>::type>( \
245 : LoadRoot(RootIndex::k##rootIndexName)); \
246 : }
247 56 : HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR);
248 : #undef HEAP_CONSTANT_ACCESSOR
249 :
250 : #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
251 : compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
252 : std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type> \
253 : CodeStubAssembler::name##Constant() { \
254 : return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
255 : std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type>( \
256 : LoadRoot(RootIndex::k##rootIndexName)); \
257 : }
258 284957 : HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR);
259 : #undef HEAP_CONSTANT_ACCESSOR
260 :
261 : #define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
262 : compiler::TNode<BoolT> CodeStubAssembler::Is##name( \
263 : SloppyTNode<Object> value) { \
264 : return WordEqual(value, name##Constant()); \
265 : } \
266 : compiler::TNode<BoolT> CodeStubAssembler::IsNot##name( \
267 : SloppyTNode<Object> value) { \
268 : return WordNotEqual(value, name##Constant()); \
269 : }
270 301418 : HEAP_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_TEST);
271 : #undef HEAP_CONSTANT_TEST
272 :
273 120727 : Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
274 120727 : if (mode == SMI_PARAMETERS) {
275 15728 : return SmiConstant(value);
276 : } else {
277 : DCHECK_EQ(INTPTR_PARAMETERS, mode);
278 225726 : return IntPtrConstant(value);
279 : }
280 : }
281 :
282 2800 : bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
283 : ParameterMode mode) {
284 : int32_t constant_test;
285 2800 : Smi smi_test;
286 2800 : if (mode == INTPTR_PARAMETERS) {
287 1512 : if (ToInt32Constant(test, constant_test) && constant_test == 0) {
288 : return true;
289 : }
290 : } else {
291 : DCHECK_EQ(mode, SMI_PARAMETERS);
292 1792 : if (ToSmiConstant(test, &smi_test) && smi_test->value() == 0) {
293 : return true;
294 : }
295 : }
296 : return false;
297 : }
298 :
299 4077 : bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
300 : int* value,
301 : ParameterMode mode) {
302 : int32_t int32_constant;
303 4077 : if (mode == INTPTR_PARAMETERS) {
304 2886 : if (ToInt32Constant(maybe_constant, int32_constant)) {
305 682 : *value = int32_constant;
306 682 : return true;
307 : }
308 : } else {
309 : DCHECK_EQ(mode, SMI_PARAMETERS);
310 1191 : Smi smi_constant;
311 1191 : if (ToSmiConstant(maybe_constant, &smi_constant)) {
312 5 : *value = Smi::ToInt(smi_constant);
313 5 : return true;
314 : }
315 : }
316 : return false;
317 : }
318 :
319 1013 : TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
320 : TNode<IntPtrT> value) {
321 1013 : Comment("IntPtrRoundUpToPowerOfTwo32");
322 : CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
323 1013 : value = Signed(IntPtrSub(value, IntPtrConstant(1)));
324 6078 : for (int i = 1; i <= 16; i *= 2) {
325 10130 : value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
326 : }
327 2026 : return Signed(IntPtrAdd(value, IntPtrConstant(1)));
328 : }
329 :
330 0 : Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
331 0 : if (mode == SMI_PARAMETERS) {
332 0 : return TaggedIsSmi(value);
333 : } else {
334 0 : return Int32Constant(1);
335 : }
336 : }
337 :
338 0 : TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
339 : // value && !(value & (value - 1))
340 : return WordEqual(
341 : Select<IntPtrT>(
342 0 : WordEqual(value, IntPtrConstant(0)),
343 0 : [=] { return IntPtrConstant(1); },
344 0 : [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }),
345 0 : IntPtrConstant(0));
346 : }
347 :
348 56 : TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
349 112 : Node* one = Float64Constant(1.0);
350 112 : Node* one_half = Float64Constant(0.5);
351 :
352 : Label return_x(this);
353 :
354 : // Round up {x} towards Infinity.
355 168 : VARIABLE(var_x, MachineRepresentation::kFloat64, Float64Ceil(x));
356 :
357 168 : GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
358 112 : &return_x);
359 168 : var_x.Bind(Float64Sub(var_x.value(), one));
360 56 : Goto(&return_x);
361 :
362 : BIND(&return_x);
363 112 : return TNode<Float64T>::UncheckedCast(var_x.value());
364 : }
365 :
366 112 : TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
367 112 : if (IsFloat64RoundUpSupported()) {
368 110 : return Float64RoundUp(x);
369 : }
370 :
371 4 : Node* one = Float64Constant(1.0);
372 4 : Node* zero = Float64Constant(0.0);
373 4 : Node* two_52 = Float64Constant(4503599627370496.0E0);
374 4 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
375 :
376 2 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
377 2 : Label return_x(this), return_minus_x(this);
378 :
379 : // Check if {x} is greater than zero.
380 2 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
381 2 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
382 4 : &if_xnotgreaterthanzero);
383 :
384 : BIND(&if_xgreaterthanzero);
385 : {
386 : // Just return {x} unless it's in the range ]0,2^52[.
387 4 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
388 :
389 : // Round positive {x} towards Infinity.
390 6 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
391 6 : GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
392 6 : var_x.Bind(Float64Add(var_x.value(), one));
393 2 : Goto(&return_x);
394 : }
395 :
396 : BIND(&if_xnotgreaterthanzero);
397 : {
398 : // Just return {x} unless it's in the range ]-2^52,0[
399 4 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
400 4 : GotoIfNot(Float64LessThan(x, zero), &return_x);
401 :
402 : // Round negated {x} towards Infinity and return the result negated.
403 4 : Node* minus_x = Float64Neg(x);
404 6 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
405 6 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
406 6 : var_x.Bind(Float64Sub(var_x.value(), one));
407 2 : Goto(&return_minus_x);
408 : }
409 :
410 : BIND(&return_minus_x);
411 6 : var_x.Bind(Float64Neg(var_x.value()));
412 2 : Goto(&return_x);
413 :
414 : BIND(&return_x);
415 4 : return TNode<Float64T>::UncheckedCast(var_x.value());
416 : }
417 :
418 119 : TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
419 119 : if (IsFloat64RoundDownSupported()) {
420 110 : return Float64RoundDown(x);
421 : }
422 :
423 18 : Node* one = Float64Constant(1.0);
424 18 : Node* zero = Float64Constant(0.0);
425 18 : Node* two_52 = Float64Constant(4503599627370496.0E0);
426 18 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
427 :
428 9 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
429 9 : Label return_x(this), return_minus_x(this);
430 :
431 : // Check if {x} is greater than zero.
432 9 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
433 9 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
434 18 : &if_xnotgreaterthanzero);
435 :
436 : BIND(&if_xgreaterthanzero);
437 : {
438 : // Just return {x} unless it's in the range ]0,2^52[.
439 18 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
440 :
441 : // Round positive {x} towards -Infinity.
442 27 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
443 27 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
444 27 : var_x.Bind(Float64Sub(var_x.value(), one));
445 9 : Goto(&return_x);
446 : }
447 :
448 : BIND(&if_xnotgreaterthanzero);
449 : {
450 : // Just return {x} unless it's in the range ]-2^52,0[
451 18 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
452 18 : GotoIfNot(Float64LessThan(x, zero), &return_x);
453 :
454 : // Round negated {x} towards -Infinity and return the result negated.
455 18 : Node* minus_x = Float64Neg(x);
456 27 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
457 27 : GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
458 18 : var_x.Bind(Float64Add(var_x.value(), one));
459 9 : Goto(&return_minus_x);
460 : }
461 :
462 : BIND(&return_minus_x);
463 18 : var_x.Bind(Float64Neg(var_x.value()));
464 9 : Goto(&return_x);
465 :
466 : BIND(&return_x);
467 18 : return TNode<Float64T>::UncheckedCast(var_x.value());
468 : }
469 :
470 392 : TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
471 392 : if (IsFloat64RoundTiesEvenSupported()) {
472 385 : return Float64RoundTiesEven(x);
473 : }
474 : // See ES#sec-touint8clamp for details.
475 14 : Node* f = Float64Floor(x);
476 21 : Node* f_and_half = Float64Add(f, Float64Constant(0.5));
477 :
478 7 : VARIABLE(var_result, MachineRepresentation::kFloat64);
479 7 : Label return_f(this), return_f_plus_one(this), done(this);
480 :
481 14 : GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
482 14 : GotoIf(Float64LessThan(x, f_and_half), &return_f);
483 : {
484 21 : Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
485 14 : Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
486 14 : &return_f_plus_one);
487 : }
488 :
489 : BIND(&return_f);
490 7 : var_result.Bind(f);
491 7 : Goto(&done);
492 :
493 : BIND(&return_f_plus_one);
494 21 : var_result.Bind(Float64Add(f, Float64Constant(1.0)));
495 7 : Goto(&done);
496 :
497 : BIND(&done);
498 14 : return TNode<Float64T>::UncheckedCast(var_result.value());
499 : }
500 :
501 341 : TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
502 341 : if (IsFloat64RoundTruncateSupported()) {
503 335 : return Float64RoundTruncate(x);
504 : }
505 :
506 12 : Node* one = Float64Constant(1.0);
507 12 : Node* zero = Float64Constant(0.0);
508 12 : Node* two_52 = Float64Constant(4503599627370496.0E0);
509 12 : Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
510 :
511 6 : VARIABLE(var_x, MachineRepresentation::kFloat64, x);
512 6 : Label return_x(this), return_minus_x(this);
513 :
514 : // Check if {x} is greater than 0.
515 6 : Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
516 6 : Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
517 12 : &if_xnotgreaterthanzero);
518 :
519 : BIND(&if_xgreaterthanzero);
520 : {
521 6 : if (IsFloat64RoundDownSupported()) {
522 0 : var_x.Bind(Float64RoundDown(x));
523 : } else {
524 : // Just return {x} unless it's in the range ]0,2^52[.
525 12 : GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
526 :
527 : // Round positive {x} towards -Infinity.
528 18 : var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
529 18 : GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
530 18 : var_x.Bind(Float64Sub(var_x.value(), one));
531 : }
532 6 : Goto(&return_x);
533 : }
534 :
535 : BIND(&if_xnotgreaterthanzero);
536 : {
537 6 : if (IsFloat64RoundUpSupported()) {
538 0 : var_x.Bind(Float64RoundUp(x));
539 0 : Goto(&return_x);
540 : } else {
541 : // Just return {x} unless its in the range ]-2^52,0[.
542 12 : GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
543 12 : GotoIfNot(Float64LessThan(x, zero), &return_x);
544 :
545 : // Round negated {x} towards -Infinity and return result negated.
546 12 : Node* minus_x = Float64Neg(x);
547 18 : var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
548 18 : GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
549 18 : var_x.Bind(Float64Sub(var_x.value(), one));
550 6 : Goto(&return_minus_x);
551 : }
552 : }
553 :
554 : BIND(&return_minus_x);
555 18 : var_x.Bind(Float64Neg(var_x.value()));
556 6 : Goto(&return_x);
557 :
558 : BIND(&return_x);
559 12 : return TNode<Float64T>::UncheckedCast(var_x.value());
560 : }
561 :
562 0 : TNode<BoolT> CodeStubAssembler::IsValidSmi(TNode<Smi> smi) {
563 : if (SmiValuesAre31Bits() && kSystemPointerSize == kInt64Size) {
564 : // Check that the Smi value is properly sign-extended.
565 : TNode<IntPtrT> value = Signed(BitcastTaggedToWord(smi));
566 : return WordEqual(value, ChangeInt32ToIntPtr(TruncateIntPtrToInt32(value)));
567 : }
568 0 : return Int32TrueConstant();
569 : }
570 :
571 0 : Node* CodeStubAssembler::SmiShiftBitsConstant() {
572 436500 : return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
573 : }
574 :
575 13614 : TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) {
576 13614 : TNode<IntPtrT> value_intptr = ChangeInt32ToIntPtr(value);
577 : TNode<Smi> smi =
578 27228 : BitcastWordToTaggedSigned(WordShl(value_intptr, SmiShiftBitsConstant()));
579 13614 : return smi;
580 : }
581 :
582 7135 : TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
583 : intptr_t constant_value;
584 7135 : if (ToIntPtrConstant(value, constant_value)) {
585 55 : return (static_cast<uintptr_t>(constant_value) <=
586 : static_cast<uintptr_t>(Smi::kMaxValue))
587 : ? Int32TrueConstant()
588 55 : : Int32FalseConstant();
589 : }
590 :
591 14160 : return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
592 : }
593 :
594 77219 : TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
595 : int32_t constant_value;
596 77219 : if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
597 6124 : return SmiConstant(constant_value);
598 : }
599 : TNode<Smi> smi =
600 142190 : BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
601 71095 : return smi;
602 : }
603 :
604 133847 : TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
605 : intptr_t constant_value;
606 133847 : if (ToIntPtrConstant(value, constant_value)) {
607 647 : return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
608 : }
609 266400 : return Signed(WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()));
610 : }
611 :
612 67681 : TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
613 67681 : TNode<IntPtrT> result = SmiUntag(value);
614 67681 : return TruncateIntPtrToInt32(result);
615 : }
616 :
617 46509 : TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
618 93018 : return ChangeInt32ToFloat64(SmiToInt32(value));
619 : }
620 :
621 2072 : TNode<Smi> CodeStubAssembler::SmiMax(TNode<Smi> a, TNode<Smi> b) {
622 2072 : return SelectConstant<Smi>(SmiLessThan(a, b), b, a);
623 : }
624 :
625 224 : TNode<Smi> CodeStubAssembler::SmiMin(TNode<Smi> a, TNode<Smi> b) {
626 224 : return SelectConstant<Smi>(SmiLessThan(a, b), a, b);
627 : }
628 :
629 4765 : TNode<IntPtrT> CodeStubAssembler::TryIntPtrAdd(TNode<IntPtrT> a,
630 : TNode<IntPtrT> b,
631 : Label* if_overflow) {
632 4765 : TNode<PairT<IntPtrT, BoolT>> pair = IntPtrAddWithOverflow(a, b);
633 : TNode<BoolT> overflow = Projection<1>(pair);
634 4765 : GotoIf(overflow, if_overflow);
635 4765 : return Projection<0>(pair);
636 : }
637 :
638 4093 : TNode<Smi> CodeStubAssembler::TrySmiAdd(TNode<Smi> lhs, TNode<Smi> rhs,
639 : Label* if_overflow) {
640 : if (SmiValuesAre32Bits()) {
641 : return BitcastWordToTaggedSigned(TryIntPtrAdd(
642 12279 : BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs), if_overflow));
643 : } else {
644 : DCHECK(SmiValuesAre31Bits());
645 : TNode<PairT<Int32T, BoolT>> pair =
646 : Int32AddWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
647 : TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
648 : TNode<BoolT> overflow = Projection<1>(pair);
649 : GotoIf(overflow, if_overflow);
650 : TNode<Int32T> result = Projection<0>(pair);
651 : return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
652 : }
653 : }
654 :
655 2693 : TNode<Smi> CodeStubAssembler::TrySmiSub(TNode<Smi> lhs, TNode<Smi> rhs,
656 : Label* if_overflow) {
657 : if (SmiValuesAre32Bits()) {
658 : TNode<PairT<IntPtrT, BoolT>> pair = IntPtrSubWithOverflow(
659 8079 : BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs));
660 : TNode<BoolT> overflow = Projection<1>(pair);
661 2693 : GotoIf(overflow, if_overflow);
662 : TNode<IntPtrT> result = Projection<0>(pair);
663 2693 : return BitcastWordToTaggedSigned(result);
664 : } else {
665 : DCHECK(SmiValuesAre31Bits());
666 : TNode<PairT<Int32T, BoolT>> pair =
667 : Int32SubWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
668 : TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
669 : TNode<BoolT> overflow = Projection<1>(pair);
670 : GotoIf(overflow, if_overflow);
671 : TNode<Int32T> result = Projection<0>(pair);
672 : return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
673 : }
674 : }
675 :
676 453 : TNode<Number> CodeStubAssembler::NumberMax(SloppyTNode<Number> a,
677 : SloppyTNode<Number> b) {
678 : // TODO(danno): This could be optimized by specifically handling smi cases.
679 453 : TVARIABLE(Number, result);
680 453 : Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
681 453 : GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
682 453 : GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
683 : result = NanConstant();
684 453 : Goto(&done);
685 : BIND(&greater_than_equal_a);
686 : result = a;
687 453 : Goto(&done);
688 : BIND(&greater_than_equal_b);
689 : result = b;
690 453 : Goto(&done);
691 : BIND(&done);
692 453 : return result.value();
693 : }
694 :
695 509 : TNode<Number> CodeStubAssembler::NumberMin(SloppyTNode<Number> a,
696 : SloppyTNode<Number> b) {
697 : // TODO(danno): This could be optimized by specifically handling smi cases.
698 509 : TVARIABLE(Number, result);
699 509 : Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
700 509 : GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
701 509 : GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
702 : result = NanConstant();
703 509 : Goto(&done);
704 : BIND(&greater_than_equal_a);
705 : result = b;
706 509 : Goto(&done);
707 : BIND(&greater_than_equal_b);
708 : result = a;
709 509 : Goto(&done);
710 : BIND(&done);
711 509 : return result.value();
712 : }
713 :
714 392 : TNode<IntPtrT> CodeStubAssembler::ConvertToRelativeIndex(
715 : TNode<Context> context, TNode<Object> index, TNode<IntPtrT> length) {
716 392 : TVARIABLE(IntPtrT, result);
717 :
718 : TNode<Number> const index_int =
719 392 : ToInteger_Inline(context, index, CodeStubAssembler::kTruncateMinusZero);
720 392 : TNode<IntPtrT> zero = IntPtrConstant(0);
721 :
722 392 : Label done(this);
723 392 : Label if_issmi(this), if_isheapnumber(this, Label::kDeferred);
724 784 : Branch(TaggedIsSmi(index_int), &if_issmi, &if_isheapnumber);
725 :
726 : BIND(&if_issmi);
727 : {
728 : TNode<Smi> const index_smi = CAST(index_int);
729 1960 : result = Select<IntPtrT>(
730 784 : IntPtrLessThan(SmiUntag(index_smi), zero),
731 1176 : [=] { return IntPtrMax(IntPtrAdd(length, SmiUntag(index_smi)), zero); },
732 1176 : [=] { return IntPtrMin(SmiUntag(index_smi), length); });
733 392 : Goto(&done);
734 : }
735 :
736 : BIND(&if_isheapnumber);
737 : {
738 : // If {index} is a heap number, it is definitely out of bounds. If it is
739 : // negative, {index} = max({length} + {index}),0) = 0'. If it is positive,
740 : // set {index} to {length}.
741 : TNode<HeapNumber> const index_hn = CAST(index_int);
742 392 : TNode<Float64T> const float_zero = Float64Constant(0.);
743 392 : TNode<Float64T> const index_float = LoadHeapNumberValue(index_hn);
744 784 : result = SelectConstant<IntPtrT>(Float64LessThan(index_float, float_zero),
745 : zero, length);
746 392 : Goto(&done);
747 : }
748 : BIND(&done);
749 392 : return result.value();
750 : }
751 :
752 504 : TNode<Number> CodeStubAssembler::SmiMod(TNode<Smi> a, TNode<Smi> b) {
753 504 : TVARIABLE(Number, var_result);
754 504 : Label return_result(this, &var_result),
755 504 : return_minuszero(this, Label::kDeferred),
756 504 : return_nan(this, Label::kDeferred);
757 :
758 : // Untag {a} and {b}.
759 504 : TNode<Int32T> int_a = SmiToInt32(a);
760 504 : TNode<Int32T> int_b = SmiToInt32(b);
761 :
762 : // Return NaN if {b} is zero.
763 1512 : GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
764 :
765 : // Check if {a} is non-negative.
766 504 : Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
767 1008 : Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
768 1008 : &if_aisnegative);
769 :
770 : BIND(&if_aisnotnegative);
771 : {
772 : // Fast case, don't need to check any other edge cases.
773 504 : TNode<Int32T> r = Int32Mod(int_a, int_b);
774 1008 : var_result = SmiFromInt32(r);
775 504 : Goto(&return_result);
776 : }
777 :
778 : BIND(&if_aisnegative);
779 : {
780 : if (SmiValuesAre32Bits()) {
781 : // Check if {a} is kMinInt and {b} is -1 (only relevant if the
782 : // kMinInt is actually representable as a Smi).
783 : Label join(this);
784 1512 : GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
785 1512 : GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
786 504 : Goto(&join);
787 504 : BIND(&join);
788 : }
789 :
790 : // Perform the integer modulus operation.
791 504 : TNode<Int32T> r = Int32Mod(int_a, int_b);
792 :
793 : // Check if {r} is zero, and if so return -0, because we have to
794 : // take the sign of the left hand side {a}, which is negative.
795 1512 : GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
796 :
797 : // The remainder {r} can be outside the valid Smi range on 32bit
798 : // architectures, so we cannot just say SmiFromInt32(r) here.
799 1008 : var_result = ChangeInt32ToTagged(r);
800 504 : Goto(&return_result);
801 : }
802 :
803 : BIND(&return_minuszero);
804 : var_result = MinusZeroConstant();
805 504 : Goto(&return_result);
806 :
807 : BIND(&return_nan);
808 : var_result = NanConstant();
809 504 : Goto(&return_result);
810 :
811 : BIND(&return_result);
812 504 : return var_result.value();
813 : }
814 :
815 840 : TNode<Number> CodeStubAssembler::SmiMul(TNode<Smi> a, TNode<Smi> b) {
816 840 : TVARIABLE(Number, var_result);
817 1680 : VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64);
818 1680 : VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64);
819 840 : Label return_result(this, &var_result);
820 :
821 : // Both {a} and {b} are Smis. Convert them to integers and multiply.
822 1680 : Node* lhs32 = SmiToInt32(a);
823 1680 : Node* rhs32 = SmiToInt32(b);
824 1680 : Node* pair = Int32MulWithOverflow(lhs32, rhs32);
825 :
826 840 : Node* overflow = Projection(1, pair);
827 :
828 : // Check if the multiplication overflowed.
829 840 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
830 840 : Branch(overflow, &if_overflow, &if_notoverflow);
831 : BIND(&if_notoverflow);
832 : {
833 : // If the answer is zero, we may need to return -0.0, depending on the
834 : // input.
835 840 : Label answer_zero(this), answer_not_zero(this);
836 840 : Node* answer = Projection(0, pair);
837 1680 : Node* zero = Int32Constant(0);
838 1680 : Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
839 : BIND(&answer_not_zero);
840 : {
841 1680 : var_result = ChangeInt32ToTagged(answer);
842 840 : Goto(&return_result);
843 : }
844 : BIND(&answer_zero);
845 : {
846 1680 : Node* or_result = Word32Or(lhs32, rhs32);
847 840 : Label if_should_be_negative_zero(this), if_should_be_zero(this);
848 840 : Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
849 1680 : &if_should_be_zero);
850 : BIND(&if_should_be_negative_zero);
851 : {
852 : var_result = MinusZeroConstant();
853 840 : Goto(&return_result);
854 : }
855 : BIND(&if_should_be_zero);
856 : {
857 1680 : var_result = SmiConstant(0);
858 840 : Goto(&return_result);
859 840 : }
860 840 : }
861 : }
862 : BIND(&if_overflow);
863 : {
864 1680 : var_lhs_float64.Bind(SmiToFloat64(a));
865 1680 : var_rhs_float64.Bind(SmiToFloat64(b));
866 3360 : Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
867 1680 : var_result = AllocateHeapNumberWithValue(value);
868 840 : Goto(&return_result);
869 : }
870 :
871 : BIND(&return_result);
872 840 : return var_result.value();
873 : }
874 :
875 336 : TNode<Smi> CodeStubAssembler::TrySmiDiv(TNode<Smi> dividend, TNode<Smi> divisor,
876 : Label* bailout) {
877 : // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
878 : // is zero.
879 672 : GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
880 :
881 : // Do floating point division if {dividend} is zero and {divisor} is
882 : // negative.
883 336 : Label dividend_is_zero(this), dividend_is_not_zero(this);
884 336 : Branch(WordEqual(dividend, SmiConstant(0)), ÷nd_is_zero,
885 336 : ÷nd_is_not_zero);
886 :
887 : BIND(÷nd_is_zero);
888 : {
889 672 : GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
890 336 : Goto(÷nd_is_not_zero);
891 : }
892 : BIND(÷nd_is_not_zero);
893 :
894 336 : TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
895 336 : TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
896 :
897 : // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
898 : // if the Smi size is 31) and {divisor} is -1.
899 336 : Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
900 672 : Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
901 672 : &divisor_is_minus_one, &divisor_is_not_minus_one);
902 :
903 : BIND(&divisor_is_minus_one);
904 : {
905 : GotoIf(Word32Equal(
906 : untagged_dividend,
907 672 : Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
908 672 : bailout);
909 336 : Goto(&divisor_is_not_minus_one);
910 : }
911 : BIND(&divisor_is_not_minus_one);
912 :
913 336 : TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
914 336 : TNode<Int32T> truncated = Signed(Int32Mul(untagged_result, untagged_divisor));
915 :
916 : // Do floating point division if the remainder is not 0.
917 672 : GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
918 :
919 672 : return SmiFromInt32(untagged_result);
920 : }
921 :
922 56 : TNode<Smi> CodeStubAssembler::SmiLexicographicCompare(TNode<Smi> x,
923 : TNode<Smi> y) {
924 : TNode<ExternalReference> smi_lexicographic_compare =
925 56 : ExternalConstant(ExternalReference::smi_lexicographic_compare_function());
926 : TNode<ExternalReference> isolate_ptr =
927 56 : ExternalConstant(ExternalReference::isolate_address(isolate()));
928 56 : return CAST(CallCFunction3(MachineType::AnyTagged(), MachineType::Pointer(),
929 : MachineType::AnyTagged(), MachineType::AnyTagged(),
930 : smi_lexicographic_compare, isolate_ptr, x, y));
931 : }
932 :
933 93063 : TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(
934 : SloppyTNode<IntPtrT> value) {
935 93063 : if (Is64()) {
936 93063 : return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
937 : }
938 : return ReinterpretCast<Int32T>(value);
939 : }
940 :
941 160451 : TNode<BoolT> CodeStubAssembler::TaggedIsSmi(SloppyTNode<Object> a) {
942 160451 : return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
943 320902 : IntPtrConstant(0));
944 : }
945 :
946 1624 : TNode<BoolT> CodeStubAssembler::TaggedIsSmi(TNode<MaybeObject> a) {
947 : return WordEqual(
948 4872 : WordAnd(BitcastMaybeObjectToWord(a), IntPtrConstant(kSmiTagMask)),
949 4872 : IntPtrConstant(0));
950 : }
951 :
952 22581 : TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(SloppyTNode<Object> a) {
953 : return WordNotEqual(
954 22581 : WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
955 45162 : IntPtrConstant(0));
956 : }
957 :
958 1863 : TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(SloppyTNode<Object> a) {
959 : return WordEqual(WordAnd(BitcastTaggedToWord(a),
960 1863 : IntPtrConstant(kSmiTagMask | kSmiSignMask)),
961 3726 : IntPtrConstant(0));
962 : }
963 :
964 0 : TNode<BoolT> CodeStubAssembler::WordIsAligned(SloppyTNode<WordT> word,
965 : size_t alignment) {
966 : DCHECK(base::bits::IsPowerOfTwo(alignment));
967 : return WordEqual(IntPtrConstant(0),
968 0 : WordAnd(word, IntPtrConstant(alignment - 1)));
969 : }
970 :
971 : #if DEBUG
972 : void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
973 : CodeAssembler::Bind(label, debug_info);
974 : }
975 : #endif // DEBUG
976 :
977 1353521 : void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
978 :
979 392 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
980 : TNode<FixedDoubleArray> array, TNode<Smi> index, Label* if_hole) {
981 : return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
982 392 : SMI_PARAMETERS, if_hole);
983 : }
984 :
985 672 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
986 : TNode<FixedDoubleArray> array, TNode<IntPtrT> index, Label* if_hole) {
987 : return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
988 672 : INTPTR_PARAMETERS, if_hole);
989 : }
990 :
991 112 : void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
992 : Node* receiver_map, Label* definitely_no_elements,
993 : Label* possibly_elements) {
994 : CSA_SLOW_ASSERT(this, IsMap(receiver_map));
995 112 : VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
996 112 : Label loop_body(this, &var_map);
997 224 : Node* empty_fixed_array = LoadRoot(RootIndex::kEmptyFixedArray);
998 : Node* empty_slow_element_dictionary =
999 224 : LoadRoot(RootIndex::kEmptySlowElementDictionary);
1000 112 : Goto(&loop_body);
1001 :
1002 : BIND(&loop_body);
1003 : {
1004 112 : Node* map = var_map.value();
1005 224 : Node* prototype = LoadMapPrototype(map);
1006 224 : GotoIf(IsNull(prototype), definitely_no_elements);
1007 224 : Node* prototype_map = LoadMap(prototype);
1008 112 : TNode<Int32T> prototype_instance_type = LoadMapInstanceType(prototype_map);
1009 :
1010 : // Pessimistically assume elements if a Proxy, Special API Object,
1011 : // or JSValue wrapper is found on the prototype chain. After this
1012 : // instance type check, it's not necessary to check for interceptors or
1013 : // access checks.
1014 112 : Label if_custom(this, Label::kDeferred), if_notcustom(this);
1015 : Branch(IsCustomElementsReceiverInstanceType(prototype_instance_type),
1016 224 : &if_custom, &if_notcustom);
1017 :
1018 : BIND(&if_custom);
1019 : {
1020 : // For string JSValue wrappers we still support the checks as long
1021 : // as they wrap the empty string.
1022 112 : GotoIfNot(InstanceTypeEqual(prototype_instance_type, JS_VALUE_TYPE),
1023 224 : possibly_elements);
1024 : Node* prototype_value = LoadJSValueValue(prototype);
1025 224 : Branch(IsEmptyString(prototype_value), &if_notcustom, possibly_elements);
1026 : }
1027 :
1028 : BIND(&if_notcustom);
1029 : {
1030 : Node* prototype_elements = LoadElements(prototype);
1031 112 : var_map.Bind(prototype_map);
1032 224 : GotoIf(WordEqual(prototype_elements, empty_fixed_array), &loop_body);
1033 112 : Branch(WordEqual(prototype_elements, empty_slow_element_dictionary),
1034 224 : &loop_body, possibly_elements);
1035 112 : }
1036 112 : }
1037 112 : }
1038 :
1039 1792 : void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
1040 : Label* if_false) {
1041 3584 : GotoIf(TaggedIsSmi(object), if_false);
1042 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1043 3584 : Branch(IsJSReceiver(object), if_true, if_false);
1044 1792 : }
1045 :
1046 2688 : void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
1047 : #ifdef V8_ENABLE_FORCE_SLOW_PATH
1048 : Node* const force_slow_path_addr =
1049 : ExternalConstant(ExternalReference::force_slow_path(isolate()));
1050 : Node* const force_slow = Load(MachineType::Uint8(), force_slow_path_addr);
1051 :
1052 : GotoIf(force_slow, if_true);
1053 : #endif
1054 2688 : }
1055 :
1056 5 : void CodeStubAssembler::GotoIfDebugExecutionModeChecksSideEffects(
1057 : Label* if_true) {
1058 : STATIC_ASSERT(sizeof(DebugInfo::ExecutionMode) >= sizeof(int32_t));
1059 :
1060 : TNode<ExternalReference> execution_mode_address = ExternalConstant(
1061 5 : ExternalReference::debug_execution_mode_address(isolate()));
1062 : TNode<Int32T> execution_mode =
1063 5 : UncheckedCast<Int32T>(Load(MachineType::Int32(), execution_mode_address));
1064 :
1065 10 : GotoIf(Word32Equal(execution_mode, Int32Constant(DebugInfo::kSideEffects)),
1066 10 : if_true);
1067 5 : }
1068 :
1069 7716 : TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
1070 : AllocationFlags flags,
1071 : TNode<RawPtrT> top_address,
1072 : TNode<RawPtrT> limit_address) {
1073 7716 : Label if_out_of_memory(this, Label::kDeferred);
1074 :
1075 : // TODO(jgruber,jkummerow): Extract the slow paths (= probably everything
1076 : // but bump pointer allocation) into a builtin to save code space. The
1077 : // size_in_bytes check may be moved there as well since a non-smi
1078 : // size_in_bytes probably doesn't fit into the bump pointer region
1079 : // (double-check that).
1080 :
1081 : intptr_t size_in_bytes_constant;
1082 : bool size_in_bytes_is_constant = false;
1083 7716 : if (ToIntPtrConstant(size_in_bytes, size_in_bytes_constant)) {
1084 : size_in_bytes_is_constant = true;
1085 1384 : CHECK(Internals::IsValidSmi(size_in_bytes_constant));
1086 692 : CHECK_GT(size_in_bytes_constant, 0);
1087 : } else {
1088 14048 : GotoIfNot(IsValidPositiveSmi(size_in_bytes), &if_out_of_memory);
1089 : }
1090 :
1091 : TNode<RawPtrT> top =
1092 7716 : UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), top_address));
1093 : TNode<RawPtrT> limit =
1094 7716 : UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), limit_address));
1095 :
1096 : // If there's not enough space, call the runtime.
1097 : TVARIABLE(Object, result);
1098 7716 : Label runtime_call(this, Label::kDeferred), no_runtime_call(this), out(this);
1099 :
1100 7716 : bool needs_double_alignment = flags & kDoubleAlignment;
1101 :
1102 7716 : if (flags & kAllowLargeObjectAllocation) {
1103 : Label next(this);
1104 11370 : GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1105 :
1106 : TNode<Smi> runtime_flags = SmiConstant(
1107 5685 : Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1108 11370 : AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
1109 11370 : result = CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
1110 : SmiTag(size_in_bytes), runtime_flags);
1111 5685 : Goto(&out);
1112 :
1113 5685 : BIND(&next);
1114 : }
1115 :
1116 : TVARIABLE(IntPtrT, adjusted_size, size_in_bytes);
1117 :
1118 7716 : if (needs_double_alignment) {
1119 : Label next(this);
1120 0 : GotoIfNot(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &next);
1121 :
1122 0 : adjusted_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1123 0 : Goto(&next);
1124 :
1125 0 : BIND(&next);
1126 : }
1127 :
1128 : TNode<IntPtrT> new_top =
1129 : IntPtrAdd(UncheckedCast<IntPtrT>(top), adjusted_size.value());
1130 :
1131 7716 : Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1132 15432 : &no_runtime_call);
1133 :
1134 : BIND(&runtime_call);
1135 : {
1136 7716 : if (flags & kPretenured) {
1137 : TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
1138 0 : AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1139 0 : AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
1140 0 : result = CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
1141 : SmiTag(size_in_bytes), runtime_flags);
1142 : } else {
1143 15432 : result = CallRuntime(Runtime::kAllocateInNewSpace, NoContextConstant(),
1144 : SmiTag(size_in_bytes));
1145 : }
1146 7716 : Goto(&out);
1147 : }
1148 :
1149 : // When there is enough space, return `top' and bump it up.
1150 : BIND(&no_runtime_call);
1151 : {
1152 : StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
1153 7716 : new_top);
1154 :
1155 : TVARIABLE(IntPtrT, address, UncheckedCast<IntPtrT>(top));
1156 :
1157 7716 : if (needs_double_alignment) {
1158 : Label next(this);
1159 0 : GotoIf(IntPtrEqual(adjusted_size.value(), size_in_bytes), &next);
1160 :
1161 : // Store a filler and increase the address by 4.
1162 : StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
1163 0 : LoadRoot(RootIndex::kOnePointerFillerMap));
1164 0 : address = IntPtrAdd(UncheckedCast<IntPtrT>(top), IntPtrConstant(4));
1165 0 : Goto(&next);
1166 :
1167 0 : BIND(&next);
1168 : }
1169 :
1170 15432 : result = BitcastWordToTagged(
1171 7716 : IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1172 7716 : Goto(&out);
1173 : }
1174 :
1175 7716 : if (!size_in_bytes_is_constant) {
1176 : BIND(&if_out_of_memory);
1177 : CallRuntime(Runtime::kFatalProcessOutOfMemoryInAllocateRaw,
1178 : NoContextConstant());
1179 7024 : Unreachable();
1180 : }
1181 :
1182 : BIND(&out);
1183 7716 : return UncheckedCast<HeapObject>(result.value());
1184 : }
1185 :
1186 0 : TNode<HeapObject> CodeStubAssembler::AllocateRawUnaligned(
1187 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1188 : TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1189 : DCHECK_EQ(flags & kDoubleAlignment, 0);
1190 4973 : return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1191 : }
1192 :
1193 0 : TNode<HeapObject> CodeStubAssembler::AllocateRawDoubleAligned(
1194 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1195 : TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1196 : #if defined(V8_HOST_ARCH_32_BIT)
1197 : return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
1198 : limit_address);
1199 : #elif defined(V8_HOST_ARCH_64_BIT)
1200 : // Allocation on 64 bit machine is naturally double aligned
1201 : return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
1202 2743 : limit_address);
1203 : #else
1204 : #error Architecture not supported
1205 : #endif
1206 : }
1207 :
1208 784 : TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(
1209 : TNode<IntPtrT> size_in_bytes, AllocationFlags flags) {
1210 : DCHECK(flags == kNone || flags == kDoubleAlignment);
1211 : CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
1212 12116 : return Allocate(size_in_bytes, flags);
1213 : }
1214 :
1215 70737 : TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
1216 : AllocationFlags flags) {
1217 70737 : Comment("Allocate");
1218 : bool const new_space = !(flags & kPretenured);
1219 138731 : if (!(flags & kDoubleAlignment) && !(flags & kAllowLargeObjectAllocation)) {
1220 : return OptimizedAllocate(size_in_bytes, new_space
1221 : ? PretenureFlag::NOT_TENURED
1222 63021 : : PretenureFlag::TENURED);
1223 : }
1224 : TNode<ExternalReference> top_address = ExternalConstant(
1225 : new_space
1226 7716 : ? ExternalReference::new_space_allocation_top_address(isolate())
1227 15432 : : ExternalReference::old_space_allocation_top_address(isolate()));
1228 : DCHECK_EQ(kTaggedSize,
1229 : ExternalReference::new_space_allocation_limit_address(isolate())
1230 : .address() -
1231 : ExternalReference::new_space_allocation_top_address(isolate())
1232 : .address());
1233 : DCHECK_EQ(kTaggedSize,
1234 : ExternalReference::old_space_allocation_limit_address(isolate())
1235 : .address() -
1236 : ExternalReference::old_space_allocation_top_address(isolate())
1237 : .address());
1238 : TNode<IntPtrT> limit_address = IntPtrAdd(
1239 7716 : ReinterpretCast<IntPtrT>(top_address), IntPtrConstant(kTaggedSize));
1240 :
1241 7716 : if (flags & kDoubleAlignment) {
1242 : return AllocateRawDoubleAligned(size_in_bytes, flags,
1243 : ReinterpretCast<RawPtrT>(top_address),
1244 : ReinterpretCast<RawPtrT>(limit_address));
1245 : } else {
1246 : return AllocateRawUnaligned(size_in_bytes, flags,
1247 : ReinterpretCast<RawPtrT>(top_address),
1248 : ReinterpretCast<RawPtrT>(limit_address));
1249 : }
1250 : }
1251 :
1252 1588 : TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1253 : AllocationFlags flags) {
1254 1588 : CHECK(flags == kNone || flags == kDoubleAlignment);
1255 : DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1256 1588 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1257 : }
1258 :
1259 35838 : TNode<HeapObject> CodeStubAssembler::Allocate(int size_in_bytes,
1260 : AllocationFlags flags) {
1261 35838 : return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1262 : }
1263 :
1264 5432 : TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1265 : TNode<IntPtrT> offset) {
1266 : return UncheckedCast<HeapObject>(
1267 10864 : BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset)));
1268 : }
1269 :
1270 4424 : TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1271 : int offset) {
1272 4424 : return InnerAllocate(previous, IntPtrConstant(offset));
1273 : }
1274 :
1275 5909 : TNode<BoolT> CodeStubAssembler::IsRegularHeapObjectSize(TNode<IntPtrT> size) {
1276 : return UintPtrLessThanOrEqual(size,
1277 11818 : IntPtrConstant(kMaxRegularHeapObjectSize));
1278 : }
1279 :
1280 7504 : void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
1281 : Label* if_false) {
1282 15008 : Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1283 7504 : if_bigint(this, Label::kDeferred);
1284 : // Rule out false {value}.
1285 7504 : GotoIf(WordEqual(value, FalseConstant()), if_false);
1286 :
1287 : // Check if {value} is a Smi or a HeapObject.
1288 15008 : Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1289 :
1290 : BIND(&if_smi);
1291 : {
1292 : // The {value} is a Smi, only need to check against zero.
1293 7504 : BranchIfSmiEqual(CAST(value), SmiConstant(0), if_false, if_true);
1294 : }
1295 :
1296 : BIND(&if_notsmi);
1297 : {
1298 : // Check if {value} is the empty string.
1299 15008 : GotoIf(IsEmptyString(value), if_false);
1300 :
1301 : // The {value} is a HeapObject, load its map.
1302 15008 : Node* value_map = LoadMap(value);
1303 :
1304 : // Only null, undefined and document.all have the undetectable bit set,
1305 : // so we can return false immediately when that bit is set.
1306 15008 : GotoIf(IsUndetectableMap(value_map), if_false);
1307 :
1308 : // We still need to handle numbers specially, but all other {value}s
1309 : // that make it here yield true.
1310 15008 : GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1311 15008 : Branch(IsBigInt(value), &if_bigint, if_true);
1312 :
1313 : BIND(&if_heapnumber);
1314 : {
1315 : // Load the floating point value of {value}.
1316 : Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
1317 7504 : MachineType::Float64());
1318 :
1319 : // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1320 22512 : Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1321 15008 : if_true, if_false);
1322 : }
1323 :
1324 : BIND(&if_bigint);
1325 : {
1326 : Node* result =
1327 : CallRuntime(Runtime::kBigIntToBoolean, NoContextConstant(), value);
1328 : CSA_ASSERT(this, IsBoolean(result));
1329 7504 : Branch(WordEqual(result, TrueConstant()), if_true, if_false);
1330 : }
1331 7504 : }
1332 7504 : }
1333 :
1334 2016 : Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
1335 2016 : Node* frame_pointer = LoadParentFramePointer();
1336 4032 : return Load(rep, frame_pointer, IntPtrConstant(offset));
1337 : }
1338 :
1339 3655 : Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
1340 : MachineType rep) {
1341 7310 : return Load(rep, buffer, IntPtrConstant(offset));
1342 : }
1343 :
1344 885722 : Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1345 : int offset, MachineType rep) {
1346 : CSA_ASSERT(this, IsStrong(object));
1347 1771444 : return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
1348 : }
1349 :
1350 7290 : Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1351 : SloppyTNode<IntPtrT> offset,
1352 : MachineType rep) {
1353 : CSA_ASSERT(this, IsStrong(object));
1354 14580 : return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
1355 : }
1356 :
1357 11724 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1358 : SloppyTNode<HeapObject> object, int offset) {
1359 : if (SmiValuesAre32Bits()) {
1360 : #if V8_TARGET_LITTLE_ENDIAN
1361 11724 : offset += 4;
1362 : #endif
1363 : return ChangeInt32ToIntPtr(
1364 23448 : LoadObjectField(object, offset, MachineType::Int32()));
1365 : } else {
1366 : return SmiToIntPtr(
1367 : LoadObjectField(object, offset, MachineType::AnyTagged()));
1368 : }
1369 : }
1370 :
1371 4378 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
1372 : int offset) {
1373 : if (SmiValuesAre32Bits()) {
1374 : #if V8_TARGET_LITTLE_ENDIAN
1375 4378 : offset += 4;
1376 : #endif
1377 : return UncheckedCast<Int32T>(
1378 4378 : LoadObjectField(object, offset, MachineType::Int32()));
1379 : } else {
1380 : return SmiToInt32(
1381 : LoadObjectField(object, offset, MachineType::AnyTagged()));
1382 : }
1383 : }
1384 :
1385 1512 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
1386 : if (SmiValuesAre32Bits()) {
1387 : #if V8_TARGET_LITTLE_ENDIAN
1388 1512 : index += 4;
1389 : #endif
1390 : return ChangeInt32ToIntPtr(
1391 4536 : Load(MachineType::Int32(), base, IntPtrConstant(index)));
1392 : } else {
1393 : return SmiToIntPtr(
1394 : Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
1395 : }
1396 : }
1397 :
1398 0 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32Root(
1399 : RootIndex root_index) {
1400 : Node* isolate_root =
1401 0 : ExternalConstant(ExternalReference::isolate_root(isolate()));
1402 : int offset = IsolateData::root_slot_offset(root_index);
1403 : if (SmiValuesAre32Bits()) {
1404 : #if V8_TARGET_LITTLE_ENDIAN
1405 0 : offset += 4;
1406 : #endif
1407 : return UncheckedCast<Int32T>(
1408 0 : Load(MachineType::Int32(), isolate_root, IntPtrConstant(offset)));
1409 : } else {
1410 : return SmiToInt32(
1411 : Load(MachineType::AnyTagged(), isolate_root, IntPtrConstant(offset)));
1412 : }
1413 : }
1414 :
1415 59261 : void CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
1416 : if (SmiValuesAre32Bits()) {
1417 59261 : int zero_offset = offset + 4;
1418 : int payload_offset = offset;
1419 : #if V8_TARGET_LITTLE_ENDIAN
1420 : std::swap(zero_offset, payload_offset);
1421 : #endif
1422 : StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1423 177783 : IntPtrConstant(zero_offset), Int32Constant(0));
1424 : StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1425 59261 : IntPtrConstant(payload_offset),
1426 177783 : TruncateInt64ToInt32(value));
1427 : } else {
1428 : StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
1429 : IntPtrConstant(offset), SmiTag(value));
1430 : }
1431 59261 : }
1432 :
1433 84810 : TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1434 : SloppyTNode<HeapNumber> object) {
1435 : return TNode<Float64T>::UncheckedCast(LoadObjectField(
1436 84810 : object, HeapNumber::kValueOffset, MachineType::Float64()));
1437 : }
1438 :
1439 208474 : TNode<Map> CodeStubAssembler::LoadMap(SloppyTNode<HeapObject> object) {
1440 208474 : return UncheckedCast<Map>(LoadObjectField(object, HeapObject::kMapOffset));
1441 : }
1442 :
1443 66115 : TNode<Int32T> CodeStubAssembler::LoadInstanceType(
1444 : SloppyTNode<HeapObject> object) {
1445 132230 : return LoadMapInstanceType(LoadMap(object));
1446 : }
1447 :
1448 9128 : TNode<BoolT> CodeStubAssembler::HasInstanceType(SloppyTNode<HeapObject> object,
1449 : InstanceType instance_type) {
1450 18256 : return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1451 : }
1452 :
1453 896 : TNode<BoolT> CodeStubAssembler::DoesntHaveInstanceType(
1454 : SloppyTNode<HeapObject> object, InstanceType instance_type) {
1455 2688 : return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1456 : }
1457 :
1458 0 : TNode<BoolT> CodeStubAssembler::TaggedDoesntHaveInstanceType(
1459 : SloppyTNode<HeapObject> any_tagged, InstanceType type) {
1460 : /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1461 0 : TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
1462 : return Select<BoolT>(
1463 0 : tagged_is_smi, [=]() { return tagged_is_smi; },
1464 0 : [=]() { return DoesntHaveInstanceType(any_tagged, type); });
1465 : }
1466 :
1467 3757 : TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1468 : SloppyTNode<JSObject> object) {
1469 : CSA_SLOW_ASSERT(this, Word32BinaryNot(IsDictionaryMap(LoadMap(object))));
1470 3757 : TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1471 3757 : return Select<HeapObject>(TaggedIsSmi(properties),
1472 3757 : [=] { return EmptyFixedArrayConstant(); },
1473 15028 : [=] { return CAST(properties); });
1474 : }
1475 :
1476 6348 : TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1477 : SloppyTNode<JSObject> object) {
1478 : CSA_SLOW_ASSERT(this, IsDictionaryMap(LoadMap(object)));
1479 6348 : TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1480 6348 : return Select<HeapObject>(TaggedIsSmi(properties),
1481 6348 : [=] { return EmptyPropertyDictionaryConstant(); },
1482 25392 : [=] { return CAST(properties); });
1483 : }
1484 :
1485 14487 : TNode<Number> CodeStubAssembler::LoadJSArrayLength(SloppyTNode<JSArray> array) {
1486 : CSA_ASSERT(this, IsJSArray(array));
1487 14487 : return CAST(LoadObjectField(array, JSArray::kLengthOffset));
1488 : }
1489 :
1490 0 : TNode<Object> CodeStubAssembler::LoadJSArgumentsObjectWithLength(
1491 : SloppyTNode<JSArgumentsObjectWithLength> array) {
1492 0 : return LoadObjectField(array, JSArgumentsObjectWithLength::kLengthOffset);
1493 : }
1494 :
1495 2072 : TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(
1496 : SloppyTNode<JSArray> array) {
1497 8468 : TNode<Object> length = LoadJSArrayLength(array);
1498 : CSA_ASSERT(this, IsFastElementsKind(LoadElementsKind(array)));
1499 : // JSArray length is always a positive Smi for fast arrays.
1500 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
1501 2072 : return UncheckedCast<Smi>(length);
1502 : }
1503 :
1504 13021 : TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1505 : SloppyTNode<FixedArrayBase> array) {
1506 : CSA_SLOW_ASSERT(this, IsNotWeakFixedArraySubclass(array));
1507 13021 : return CAST(LoadObjectField(array, FixedArrayBase::kLengthOffset));
1508 : }
1509 :
1510 1736 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1511 : SloppyTNode<FixedArrayBase> array) {
1512 3986 : return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1513 : }
1514 :
1515 0 : TNode<IntPtrT> CodeStubAssembler::LoadFeedbackVectorLength(
1516 : TNode<FeedbackVector> vector) {
1517 : return ChangeInt32ToIntPtr(
1518 0 : LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset));
1519 : }
1520 :
1521 616 : TNode<Smi> CodeStubAssembler::LoadWeakFixedArrayLength(
1522 : TNode<WeakFixedArray> array) {
1523 616 : return CAST(LoadObjectField(array, WeakFixedArray::kLengthOffset));
1524 : }
1525 :
1526 616 : TNode<IntPtrT> CodeStubAssembler::LoadAndUntagWeakFixedArrayLength(
1527 : SloppyTNode<WeakFixedArray> array) {
1528 1186 : return LoadAndUntagObjectField(array, WeakFixedArray::kLengthOffset);
1529 : }
1530 :
1531 2138 : TNode<Int32T> CodeStubAssembler::LoadNumberOfDescriptors(
1532 : TNode<DescriptorArray> array) {
1533 : return UncheckedCast<Int32T>(
1534 : LoadObjectField(array, DescriptorArray::kNumberOfDescriptorsOffset,
1535 2138 : MachineType::Int16()));
1536 : }
1537 :
1538 28137 : TNode<Int32T> CodeStubAssembler::LoadMapBitField(SloppyTNode<Map> map) {
1539 : CSA_SLOW_ASSERT(this, IsMap(map));
1540 : return UncheckedCast<Int32T>(
1541 28137 : LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8()));
1542 : }
1543 :
1544 10253 : TNode<Int32T> CodeStubAssembler::LoadMapBitField2(SloppyTNode<Map> map) {
1545 : CSA_SLOW_ASSERT(this, IsMap(map));
1546 : return UncheckedCast<Int32T>(
1547 10253 : LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()));
1548 : }
1549 :
1550 6898 : TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(SloppyTNode<Map> map) {
1551 : CSA_SLOW_ASSERT(this, IsMap(map));
1552 : return UncheckedCast<Uint32T>(
1553 6898 : LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()));
1554 : }
1555 :
1556 120923 : TNode<Int32T> CodeStubAssembler::LoadMapInstanceType(SloppyTNode<Map> map) {
1557 : return UncheckedCast<Int32T>(
1558 120923 : LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint16()));
1559 : }
1560 :
1561 9413 : TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(SloppyTNode<Map> map) {
1562 : CSA_SLOW_ASSERT(this, IsMap(map));
1563 18826 : Node* bit_field2 = LoadMapBitField2(map);
1564 9413 : return Signed(DecodeWord32<Map::ElementsKindBits>(bit_field2));
1565 : }
1566 :
1567 2912 : TNode<Int32T> CodeStubAssembler::LoadElementsKind(
1568 : SloppyTNode<HeapObject> object) {
1569 5824 : return LoadMapElementsKind(LoadMap(object));
1570 : }
1571 :
1572 8466 : TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(
1573 : SloppyTNode<Map> map) {
1574 : CSA_SLOW_ASSERT(this, IsMap(map));
1575 8466 : return CAST(LoadObjectField(map, Map::kDescriptorsOffset));
1576 : }
1577 :
1578 16525 : TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(SloppyTNode<Map> map) {
1579 : CSA_SLOW_ASSERT(this, IsMap(map));
1580 16525 : return CAST(LoadObjectField(map, Map::kPrototypeOffset));
1581 : }
1582 :
1583 112 : TNode<PrototypeInfo> CodeStubAssembler::LoadMapPrototypeInfo(
1584 : SloppyTNode<Map> map, Label* if_no_proto_info) {
1585 112 : Label if_strong_heap_object(this);
1586 : CSA_ASSERT(this, IsMap(map));
1587 : TNode<MaybeObject> maybe_prototype_info =
1588 112 : LoadMaybeWeakObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1589 : TVARIABLE(Object, prototype_info);
1590 : DispatchMaybeObject(maybe_prototype_info, if_no_proto_info, if_no_proto_info,
1591 : if_no_proto_info, &if_strong_heap_object,
1592 112 : &prototype_info);
1593 :
1594 : BIND(&if_strong_heap_object);
1595 : GotoIfNot(WordEqual(LoadMap(CAST(prototype_info.value())),
1596 224 : LoadRoot(RootIndex::kPrototypeInfoMap)),
1597 112 : if_no_proto_info);
1598 112 : return CAST(prototype_info.value());
1599 : }
1600 :
1601 4892 : TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSizeInWords(
1602 : SloppyTNode<Map> map) {
1603 : CSA_SLOW_ASSERT(this, IsMap(map));
1604 : return ChangeInt32ToIntPtr(LoadObjectField(
1605 9784 : map, Map::kInstanceSizeInWordsOffset, MachineType::Uint8()));
1606 : }
1607 :
1608 2133 : TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectPropertiesStartInWords(
1609 : SloppyTNode<Map> map) {
1610 : CSA_SLOW_ASSERT(this, IsMap(map));
1611 : // See Map::GetInObjectPropertiesStartInWords() for details.
1612 : CSA_ASSERT(this, IsJSObjectMap(map));
1613 : return ChangeInt32ToIntPtr(LoadObjectField(
1614 : map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1615 4266 : MachineType::Uint8()));
1616 : }
1617 :
1618 56 : TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1619 : SloppyTNode<Map> map) {
1620 : CSA_SLOW_ASSERT(this, IsMap(map));
1621 : // See Map::GetConstructorFunctionIndex() for details.
1622 : CSA_ASSERT(this, IsPrimitiveInstanceType(LoadMapInstanceType(map)));
1623 : return ChangeInt32ToIntPtr(LoadObjectField(
1624 : map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1625 112 : MachineType::Uint8()));
1626 : }
1627 :
1628 0 : TNode<Object> CodeStubAssembler::LoadMapConstructor(SloppyTNode<Map> map) {
1629 : CSA_SLOW_ASSERT(this, IsMap(map));
1630 0 : TVARIABLE(Object, result,
1631 : LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1632 :
1633 0 : Label done(this), loop(this, &result);
1634 0 : Goto(&loop);
1635 : BIND(&loop);
1636 : {
1637 0 : GotoIf(TaggedIsSmi(result.value()), &done);
1638 : Node* is_map_type =
1639 0 : InstanceTypeEqual(LoadInstanceType(CAST(result.value())), MAP_TYPE);
1640 0 : GotoIfNot(is_map_type, &done);
1641 : result = LoadObjectField(CAST(result.value()),
1642 : Map::kConstructorOrBackPointerOffset);
1643 0 : Goto(&loop);
1644 : }
1645 : BIND(&done);
1646 0 : return result.value();
1647 : }
1648 :
1649 840 : Node* CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) {
1650 : CSA_SLOW_ASSERT(this, IsMap(map));
1651 1680 : Node* bit_field3 = LoadMapBitField3(map);
1652 1680 : return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3);
1653 : }
1654 :
1655 0 : TNode<Object> CodeStubAssembler::LoadMapBackPointer(SloppyTNode<Map> map) {
1656 : TNode<HeapObject> object =
1657 : CAST(LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1658 0 : return Select<Object>(IsMap(object), [=] { return object; },
1659 0 : [=] { return UndefinedConstant(); });
1660 : }
1661 :
1662 112 : TNode<Uint32T> CodeStubAssembler::EnsureOnlyHasSimpleProperties(
1663 : TNode<Map> map, TNode<Int32T> instance_type, Label* bailout) {
1664 : // This check can have false positives, since it applies to any JSValueType.
1665 224 : GotoIf(IsCustomElementsReceiverInstanceType(instance_type), bailout);
1666 :
1667 112 : TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1668 : GotoIf(IsSetWord32(bit_field3, Map::IsDictionaryMapBit::kMask |
1669 112 : Map::HasHiddenPrototypeBit::kMask),
1670 224 : bailout);
1671 :
1672 112 : return bit_field3;
1673 : }
1674 :
1675 560 : TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
1676 : SloppyTNode<Object> receiver, Label* if_no_hash) {
1677 560 : TVARIABLE(IntPtrT, var_hash);
1678 560 : Label done(this), if_smi(this), if_property_array(this),
1679 560 : if_property_dictionary(this), if_fixed_array(this);
1680 :
1681 : TNode<Object> properties_or_hash =
1682 : LoadObjectField(TNode<HeapObject>::UncheckedCast(receiver),
1683 : JSReceiver::kPropertiesOrHashOffset);
1684 1120 : GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
1685 :
1686 : TNode<HeapObject> properties =
1687 : TNode<HeapObject>::UncheckedCast(properties_or_hash);
1688 560 : TNode<Int32T> properties_instance_type = LoadInstanceType(properties);
1689 :
1690 560 : GotoIf(InstanceTypeEqual(properties_instance_type, PROPERTY_ARRAY_TYPE),
1691 1120 : &if_property_array);
1692 560 : Branch(InstanceTypeEqual(properties_instance_type, NAME_DICTIONARY_TYPE),
1693 1120 : &if_property_dictionary, &if_fixed_array);
1694 :
1695 : BIND(&if_fixed_array);
1696 : {
1697 560 : var_hash = IntPtrConstant(PropertyArray::kNoHashSentinel);
1698 560 : Goto(&done);
1699 : }
1700 :
1701 : BIND(&if_smi);
1702 : {
1703 1120 : var_hash = SmiUntag(TNode<Smi>::UncheckedCast(properties_or_hash));
1704 560 : Goto(&done);
1705 : }
1706 :
1707 : BIND(&if_property_array);
1708 : {
1709 : TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
1710 560 : properties, PropertyArray::kLengthAndHashOffset);
1711 : var_hash = TNode<IntPtrT>::UncheckedCast(
1712 : DecodeWord<PropertyArray::HashField>(length_and_hash));
1713 560 : Goto(&done);
1714 : }
1715 :
1716 : BIND(&if_property_dictionary);
1717 : {
1718 560 : var_hash = SmiUntag(CAST(LoadFixedArrayElement(
1719 : CAST(properties), NameDictionary::kObjectHashIndex)));
1720 560 : Goto(&done);
1721 : }
1722 :
1723 : BIND(&done);
1724 560 : if (if_no_hash != nullptr) {
1725 : GotoIf(IntPtrEqual(var_hash.value(),
1726 448 : IntPtrConstant(PropertyArray::kNoHashSentinel)),
1727 448 : if_no_hash);
1728 : }
1729 560 : return var_hash.value();
1730 : }
1731 :
1732 407 : TNode<Uint32T> CodeStubAssembler::LoadNameHashField(SloppyTNode<Name> name) {
1733 : CSA_ASSERT(this, IsName(name));
1734 407 : return LoadObjectField<Uint32T>(name, Name::kHashFieldOffset);
1735 : }
1736 :
1737 6862 : TNode<Uint32T> CodeStubAssembler::LoadNameHash(SloppyTNode<Name> name,
1738 : Label* if_hash_not_computed) {
1739 : TNode<Uint32T> hash_field = LoadNameHashField(name);
1740 6862 : if (if_hash_not_computed != nullptr) {
1741 336 : GotoIf(IsSetWord32(hash_field, Name::kHashNotComputedMask),
1742 672 : if_hash_not_computed);
1743 : }
1744 13724 : return Unsigned(Word32Shr(hash_field, Int32Constant(Name::kHashShift)));
1745 : }
1746 :
1747 5997 : TNode<Smi> CodeStubAssembler::LoadStringLengthAsSmi(
1748 : SloppyTNode<String> string) {
1749 11994 : return SmiFromIntPtr(LoadStringLengthAsWord(string));
1750 : }
1751 :
1752 11831 : TNode<IntPtrT> CodeStubAssembler::LoadStringLengthAsWord(
1753 : SloppyTNode<String> string) {
1754 23662 : return Signed(ChangeUint32ToWord(LoadStringLengthAsWord32(string)));
1755 : }
1756 :
1757 224 : TNode<Uint32T> CodeStubAssembler::LoadStringLengthAsWord32(
1758 : SloppyTNode<String> string) {
1759 : CSA_ASSERT(this, IsString(string));
1760 224 : return LoadObjectField<Uint32T>(string, String::kLengthOffset);
1761 : }
1762 :
1763 56 : Node* CodeStubAssembler::PointerToSeqStringData(Node* seq_string) {
1764 : CSA_ASSERT(this, IsString(seq_string));
1765 : CSA_ASSERT(this,
1766 : IsSequentialStringInstanceType(LoadInstanceType(seq_string)));
1767 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
1768 : return IntPtrAdd(
1769 : BitcastTaggedToWord(seq_string),
1770 168 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag));
1771 : }
1772 :
1773 112 : Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1774 : CSA_ASSERT(this, IsJSValue(object));
1775 112 : return LoadObjectField(object, JSValue::kValueOffset);
1776 : }
1777 :
1778 392 : void CodeStubAssembler::DispatchMaybeObject(TNode<MaybeObject> maybe_object,
1779 : Label* if_smi, Label* if_cleared,
1780 : Label* if_weak, Label* if_strong,
1781 : TVariable<Object>* extracted) {
1782 784 : Label inner_if_smi(this), inner_if_strong(this);
1783 :
1784 784 : GotoIf(TaggedIsSmi(maybe_object), &inner_if_smi);
1785 :
1786 784 : GotoIf(IsCleared(maybe_object), if_cleared);
1787 :
1788 : GotoIf(Word32Equal(Word32And(TruncateIntPtrToInt32(
1789 784 : BitcastMaybeObjectToWord(maybe_object)),
1790 1568 : Int32Constant(kHeapObjectTagMask)),
1791 1568 : Int32Constant(kHeapObjectTag)),
1792 784 : &inner_if_strong);
1793 :
1794 784 : *extracted =
1795 : BitcastWordToTagged(WordAnd(BitcastMaybeObjectToWord(maybe_object),
1796 1176 : IntPtrConstant(~kWeakHeapObjectMask)));
1797 392 : Goto(if_weak);
1798 :
1799 : BIND(&inner_if_smi);
1800 : *extracted = CAST(maybe_object);
1801 392 : Goto(if_smi);
1802 :
1803 : BIND(&inner_if_strong);
1804 : *extracted = CAST(maybe_object);
1805 784 : Goto(if_strong);
1806 392 : }
1807 :
1808 504 : TNode<BoolT> CodeStubAssembler::IsStrong(TNode<MaybeObject> value) {
1809 : return WordEqual(WordAnd(BitcastMaybeObjectToWord(value),
1810 1512 : IntPtrConstant(kHeapObjectTagMask)),
1811 1512 : IntPtrConstant(kHeapObjectTag));
1812 : }
1813 :
1814 504 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectIfStrong(
1815 : TNode<MaybeObject> value, Label* if_not_strong) {
1816 1008 : GotoIfNot(IsStrong(value), if_not_strong);
1817 504 : return CAST(value);
1818 : }
1819 :
1820 504 : TNode<BoolT> CodeStubAssembler::IsWeakOrCleared(TNode<MaybeObject> value) {
1821 : return Word32Equal(
1822 1008 : Word32And(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1823 2016 : Int32Constant(kHeapObjectTagMask)),
1824 2016 : Int32Constant(kWeakHeapObjectTag));
1825 : }
1826 :
1827 7000 : TNode<BoolT> CodeStubAssembler::IsCleared(TNode<MaybeObject> value) {
1828 14000 : return Word32Equal(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1829 28000 : Int32Constant(kClearedWeakHeapObjectLower32));
1830 : }
1831 :
1832 1288 : TNode<BoolT> CodeStubAssembler::IsNotCleared(TNode<MaybeObject> value) {
1833 2576 : return Word32NotEqual(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1834 5152 : Int32Constant(kClearedWeakHeapObjectLower32));
1835 : }
1836 :
1837 5992 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1838 : TNode<MaybeObject> value) {
1839 : CSA_ASSERT(this, IsWeakOrCleared(value));
1840 : CSA_ASSERT(this, IsNotCleared(value));
1841 : return UncheckedCast<HeapObject>(BitcastWordToTagged(WordAnd(
1842 17976 : BitcastMaybeObjectToWord(value), IntPtrConstant(~kWeakHeapObjectMask))));
1843 : }
1844 :
1845 4256 : TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1846 : TNode<MaybeObject> value, Label* if_cleared) {
1847 8512 : GotoIf(IsCleared(value), if_cleared);
1848 4256 : return GetHeapObjectAssumeWeak(value);
1849 : }
1850 :
1851 2184 : TNode<BoolT> CodeStubAssembler::IsWeakReferenceTo(TNode<MaybeObject> object,
1852 : TNode<Object> value) {
1853 : return WordEqual(WordAnd(BitcastMaybeObjectToWord(object),
1854 6552 : IntPtrConstant(~kWeakHeapObjectMask)),
1855 6552 : BitcastTaggedToWord(value));
1856 : }
1857 :
1858 1400 : TNode<BoolT> CodeStubAssembler::IsStrongReferenceTo(TNode<MaybeObject> object,
1859 : TNode<Object> value) {
1860 1400 : return WordEqual(BitcastMaybeObjectToWord(object),
1861 5600 : BitcastTaggedToWord(value));
1862 : }
1863 :
1864 3584 : TNode<BoolT> CodeStubAssembler::IsNotWeakReferenceTo(TNode<MaybeObject> object,
1865 : TNode<Object> value) {
1866 : return WordNotEqual(WordAnd(BitcastMaybeObjectToWord(object),
1867 10752 : IntPtrConstant(~kWeakHeapObjectMask)),
1868 10752 : BitcastTaggedToWord(value));
1869 : }
1870 :
1871 2240 : TNode<MaybeObject> CodeStubAssembler::MakeWeak(TNode<HeapObject> value) {
1872 : return ReinterpretCast<MaybeObject>(BitcastWordToTagged(
1873 11200 : WordOr(BitcastTaggedToWord(value), IntPtrConstant(kWeakHeapObjectTag))));
1874 : }
1875 :
1876 : template <>
1877 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<FixedArray> array) {
1878 0 : return LoadAndUntagFixedArrayBaseLength(array);
1879 : }
1880 :
1881 : template <>
1882 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<WeakFixedArray> array) {
1883 0 : return LoadAndUntagWeakFixedArrayLength(array);
1884 : }
1885 :
1886 : template <>
1887 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<PropertyArray> array) {
1888 0 : return LoadPropertyArrayLength(array);
1889 : }
1890 :
1891 : template <>
1892 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1893 : TNode<DescriptorArray> array) {
1894 : return IntPtrMul(ChangeInt32ToIntPtr(LoadNumberOfDescriptors(array)),
1895 0 : IntPtrConstant(DescriptorArray::kEntrySize));
1896 : }
1897 :
1898 : template <>
1899 0 : TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1900 : TNode<TransitionArray> array) {
1901 0 : return LoadAndUntagWeakFixedArrayLength(array);
1902 : }
1903 :
1904 : template <typename Array>
1905 131316 : TNode<MaybeObject> CodeStubAssembler::LoadArrayElement(
1906 : TNode<Array> array, int array_header_size, Node* index_node,
1907 : int additional_offset, ParameterMode parameter_mode,
1908 : LoadSensitivity needs_poisoning) {
1909 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
1910 : ParameterToIntPtr(index_node, parameter_mode),
1911 : IntPtrConstant(0)));
1912 : DCHECK(IsAligned(additional_offset, kTaggedSize));
1913 131316 : int32_t header_size = array_header_size + additional_offset - kHeapObjectTag;
1914 : TNode<IntPtrT> offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
1915 131316 : parameter_mode, header_size);
1916 : CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(array),
1917 : array_header_size));
1918 : return UncheckedCast<MaybeObject>(
1919 131316 : Load(MachineType::AnyTagged(), array, offset, needs_poisoning));
1920 : }
1921 :
1922 : template TNode<MaybeObject>
1923 : CodeStubAssembler::LoadArrayElement<TransitionArray>(TNode<TransitionArray>,
1924 : int, Node*, int,
1925 : ParameterMode,
1926 : LoadSensitivity);
1927 :
1928 : template TNode<MaybeObject>
1929 : CodeStubAssembler::LoadArrayElement<DescriptorArray>(TNode<DescriptorArray>,
1930 : int, Node*, int,
1931 : ParameterMode,
1932 : LoadSensitivity);
1933 :
1934 24142 : void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
1935 : Node* index,
1936 : int additional_offset,
1937 : ParameterMode parameter_mode) {
1938 24142 : if (!FLAG_fixed_array_bounds_checks) return;
1939 : DCHECK(IsAligned(additional_offset, kTaggedSize));
1940 : if (parameter_mode == ParameterMode::SMI_PARAMETERS) {
1941 : TNode<Smi> effective_index;
1942 : Smi constant_index;
1943 : bool index_is_constant = ToSmiConstant(index, &constant_index);
1944 : if (index_is_constant) {
1945 : effective_index = SmiConstant(Smi::ToInt(constant_index) +
1946 : additional_offset / kTaggedSize);
1947 : } else if (additional_offset != 0) {
1948 : effective_index =
1949 : SmiAdd(CAST(index), SmiConstant(additional_offset / kTaggedSize));
1950 : } else {
1951 : effective_index = CAST(index);
1952 : }
1953 : CSA_CHECK(this, SmiBelow(effective_index, LoadFixedArrayBaseLength(array)));
1954 : } else {
1955 : // IntPtrAdd does constant-folding automatically.
1956 : TNode<IntPtrT> effective_index =
1957 : IntPtrAdd(UncheckedCast<IntPtrT>(index),
1958 : IntPtrConstant(additional_offset / kTaggedSize));
1959 : CSA_CHECK(this, UintPtrLessThan(effective_index,
1960 : LoadAndUntagFixedArrayBaseLength(array)));
1961 : }
1962 : }
1963 :
1964 49149 : TNode<Object> CodeStubAssembler::LoadFixedArrayElement(
1965 : TNode<FixedArray> object, Node* index_node, int additional_offset,
1966 : ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
1967 : CSA_ASSERT(this, IsFixedArraySubclass(object));
1968 : CSA_ASSERT(this, IsNotWeakFixedArraySubclass(object));
1969 : FixedArrayBoundsCheck(object, index_node, additional_offset, parameter_mode);
1970 : TNode<MaybeObject> element =
1971 : LoadArrayElement(object, FixedArray::kHeaderSize, index_node,
1972 108021 : additional_offset, parameter_mode, needs_poisoning);
1973 49149 : return CAST(element);
1974 : }
1975 :
1976 1573 : TNode<Object> CodeStubAssembler::LoadPropertyArrayElement(
1977 : TNode<PropertyArray> object, SloppyTNode<IntPtrT> index) {
1978 : int additional_offset = 0;
1979 : ParameterMode parameter_mode = INTPTR_PARAMETERS;
1980 : LoadSensitivity needs_poisoning = LoadSensitivity::kSafe;
1981 1573 : return CAST(LoadArrayElement(object, PropertyArray::kHeaderSize, index,
1982 : additional_offset, parameter_mode,
1983 : needs_poisoning));
1984 : }
1985 :
1986 56 : TNode<IntPtrT> CodeStubAssembler::LoadPropertyArrayLength(
1987 : TNode<PropertyArray> object) {
1988 : TNode<IntPtrT> value =
1989 56 : LoadAndUntagObjectField(object, PropertyArray::kLengthAndHashOffset);
1990 56 : return Signed(DecodeWord<PropertyArray::LengthField>(value));
1991 : }
1992 :
1993 7448 : TNode<RawPtrT> CodeStubAssembler::LoadFixedTypedArrayBackingStore(
1994 : TNode<FixedTypedArrayBase> typed_array) {
1995 : // Backing store = external_pointer + base_pointer.
1996 : Node* external_pointer =
1997 : LoadObjectField(typed_array, FixedTypedArrayBase::kExternalPointerOffset,
1998 7448 : MachineType::Pointer());
1999 : Node* base_pointer =
2000 : LoadObjectField(typed_array, FixedTypedArrayBase::kBasePointerOffset);
2001 : return UncheckedCast<RawPtrT>(
2002 14896 : IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer)));
2003 : }
2004 :
2005 784 : Node* CodeStubAssembler::LoadFixedBigInt64ArrayElementAsTagged(
2006 : Node* data_pointer, Node* offset) {
2007 784 : if (Is64()) {
2008 : TNode<IntPtrT> value = UncheckedCast<IntPtrT>(
2009 784 : Load(MachineType::IntPtr(), data_pointer, offset));
2010 1568 : return BigIntFromInt64(value);
2011 : } else {
2012 : DCHECK(!Is64());
2013 : #if defined(V8_TARGET_BIG_ENDIAN)
2014 : TNode<IntPtrT> high = UncheckedCast<IntPtrT>(
2015 : Load(MachineType::UintPtr(), data_pointer, offset));
2016 : TNode<IntPtrT> low = UncheckedCast<IntPtrT>(
2017 : Load(MachineType::UintPtr(), data_pointer,
2018 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2019 : #else
2020 : TNode<IntPtrT> low = UncheckedCast<IntPtrT>(
2021 0 : Load(MachineType::UintPtr(), data_pointer, offset));
2022 : TNode<IntPtrT> high = UncheckedCast<IntPtrT>(
2023 : Load(MachineType::UintPtr(), data_pointer,
2024 0 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2025 : #endif
2026 0 : return BigIntFromInt32Pair(low, high);
2027 : }
2028 : }
2029 :
2030 0 : TNode<BigInt> CodeStubAssembler::BigIntFromInt32Pair(TNode<IntPtrT> low,
2031 : TNode<IntPtrT> high) {
2032 : DCHECK(!Is64());
2033 0 : TVARIABLE(BigInt, var_result);
2034 0 : TVARIABLE(Word32T, var_sign, Int32Constant(BigInt::SignBits::encode(false)));
2035 : TVARIABLE(IntPtrT, var_high, high);
2036 : TVARIABLE(IntPtrT, var_low, low);
2037 0 : Label high_zero(this), negative(this), allocate_one_digit(this),
2038 0 : allocate_two_digits(this), if_zero(this), done(this);
2039 :
2040 0 : GotoIf(WordEqual(var_high.value(), IntPtrConstant(0)), &high_zero);
2041 0 : Branch(IntPtrLessThan(var_high.value(), IntPtrConstant(0)), &negative,
2042 0 : &allocate_two_digits);
2043 :
2044 : BIND(&high_zero);
2045 0 : Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &if_zero,
2046 0 : &allocate_one_digit);
2047 :
2048 : BIND(&negative);
2049 : {
2050 0 : var_sign = Int32Constant(BigInt::SignBits::encode(true));
2051 : // We must negate the value by computing "0 - (high|low)", performing
2052 : // both parts of the subtraction separately and manually taking care
2053 : // of the carry bit (which is 1 iff low != 0).
2054 0 : var_high = IntPtrSub(IntPtrConstant(0), var_high.value());
2055 0 : Label carry(this), no_carry(this);
2056 0 : Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &no_carry, &carry);
2057 : BIND(&carry);
2058 0 : var_high = IntPtrSub(var_high.value(), IntPtrConstant(1));
2059 0 : Goto(&no_carry);
2060 : BIND(&no_carry);
2061 0 : var_low = IntPtrSub(IntPtrConstant(0), var_low.value());
2062 : // var_high was non-zero going into this block, but subtracting the
2063 : // carry bit from it could bring us back onto the "one digit" path.
2064 0 : Branch(WordEqual(var_high.value(), IntPtrConstant(0)), &allocate_one_digit,
2065 0 : &allocate_two_digits);
2066 : }
2067 :
2068 : BIND(&allocate_one_digit);
2069 : {
2070 0 : var_result = AllocateRawBigInt(IntPtrConstant(1));
2071 : StoreBigIntBitfield(var_result.value(),
2072 : Word32Or(var_sign.value(),
2073 0 : Int32Constant(BigInt::LengthBits::encode(1))));
2074 : StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2075 0 : Goto(&done);
2076 : }
2077 :
2078 : BIND(&allocate_two_digits);
2079 : {
2080 0 : var_result = AllocateRawBigInt(IntPtrConstant(2));
2081 : StoreBigIntBitfield(var_result.value(),
2082 : Word32Or(var_sign.value(),
2083 0 : Int32Constant(BigInt::LengthBits::encode(2))));
2084 : StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2085 : StoreBigIntDigit(var_result.value(), 1, Unsigned(var_high.value()));
2086 0 : Goto(&done);
2087 : }
2088 :
2089 : BIND(&if_zero);
2090 0 : var_result = AllocateBigInt(IntPtrConstant(0));
2091 0 : Goto(&done);
2092 :
2093 : BIND(&done);
2094 0 : return var_result.value();
2095 : }
2096 :
2097 1288 : TNode<BigInt> CodeStubAssembler::BigIntFromInt64(TNode<IntPtrT> value) {
2098 : DCHECK(Is64());
2099 1288 : TVARIABLE(BigInt, var_result);
2100 1288 : Label done(this), if_positive(this), if_negative(this), if_zero(this);
2101 3864 : GotoIf(WordEqual(value, IntPtrConstant(0)), &if_zero);
2102 1288 : var_result = AllocateRawBigInt(IntPtrConstant(1));
2103 2576 : Branch(IntPtrGreaterThan(value, IntPtrConstant(0)), &if_positive,
2104 2576 : &if_negative);
2105 :
2106 : BIND(&if_positive);
2107 : {
2108 : StoreBigIntBitfield(var_result.value(),
2109 : Int32Constant(BigInt::SignBits::encode(false) |
2110 2576 : BigInt::LengthBits::encode(1)));
2111 : StoreBigIntDigit(var_result.value(), 0, Unsigned(value));
2112 1288 : Goto(&done);
2113 : }
2114 :
2115 : BIND(&if_negative);
2116 : {
2117 : StoreBigIntBitfield(var_result.value(),
2118 : Int32Constant(BigInt::SignBits::encode(true) |
2119 2576 : BigInt::LengthBits::encode(1)));
2120 : StoreBigIntDigit(var_result.value(), 0,
2121 1288 : Unsigned(IntPtrSub(IntPtrConstant(0), value)));
2122 1288 : Goto(&done);
2123 : }
2124 :
2125 : BIND(&if_zero);
2126 : {
2127 1288 : var_result = AllocateBigInt(IntPtrConstant(0));
2128 1288 : Goto(&done);
2129 : }
2130 :
2131 : BIND(&done);
2132 1288 : return var_result.value();
2133 : }
2134 :
2135 784 : Node* CodeStubAssembler::LoadFixedBigUint64ArrayElementAsTagged(
2136 : Node* data_pointer, Node* offset) {
2137 1568 : Label if_zero(this), done(this);
2138 784 : if (Is64()) {
2139 : TNode<UintPtrT> value = UncheckedCast<UintPtrT>(
2140 784 : Load(MachineType::UintPtr(), data_pointer, offset));
2141 1568 : return BigIntFromUint64(value);
2142 : } else {
2143 : DCHECK(!Is64());
2144 : #if defined(V8_TARGET_BIG_ENDIAN)
2145 : TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2146 : Load(MachineType::UintPtr(), data_pointer, offset));
2147 : TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2148 : Load(MachineType::UintPtr(), data_pointer,
2149 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2150 : #else
2151 : TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2152 0 : Load(MachineType::UintPtr(), data_pointer, offset));
2153 : TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2154 : Load(MachineType::UintPtr(), data_pointer,
2155 0 : Int32Add(offset, Int32Constant(kSystemPointerSize))));
2156 : #endif
2157 0 : return BigIntFromUint32Pair(low, high);
2158 784 : }
2159 : }
2160 :
2161 0 : TNode<BigInt> CodeStubAssembler::BigIntFromUint32Pair(TNode<UintPtrT> low,
2162 : TNode<UintPtrT> high) {
2163 : DCHECK(!Is64());
2164 0 : TVARIABLE(BigInt, var_result);
2165 0 : Label high_zero(this), if_zero(this), done(this);
2166 :
2167 0 : GotoIf(WordEqual(high, IntPtrConstant(0)), &high_zero);
2168 0 : var_result = AllocateBigInt(IntPtrConstant(2));
2169 : StoreBigIntDigit(var_result.value(), 0, low);
2170 : StoreBigIntDigit(var_result.value(), 1, high);
2171 0 : Goto(&done);
2172 :
2173 : BIND(&high_zero);
2174 0 : GotoIf(WordEqual(low, IntPtrConstant(0)), &if_zero);
2175 0 : var_result = AllocateBigInt(IntPtrConstant(1));
2176 : StoreBigIntDigit(var_result.value(), 0, low);
2177 0 : Goto(&done);
2178 :
2179 : BIND(&if_zero);
2180 0 : var_result = AllocateBigInt(IntPtrConstant(0));
2181 0 : Goto(&done);
2182 :
2183 : BIND(&done);
2184 0 : return var_result.value();
2185 : }
2186 :
2187 1232 : TNode<BigInt> CodeStubAssembler::BigIntFromUint64(TNode<UintPtrT> value) {
2188 : DCHECK(Is64());
2189 1232 : TVARIABLE(BigInt, var_result);
2190 1232 : Label done(this), if_zero(this);
2191 3696 : GotoIf(WordEqual(value, IntPtrConstant(0)), &if_zero);
2192 1232 : var_result = AllocateBigInt(IntPtrConstant(1));
2193 : StoreBigIntDigit(var_result.value(), 0, value);
2194 1232 : Goto(&done);
2195 :
2196 : BIND(&if_zero);
2197 1232 : var_result = AllocateBigInt(IntPtrConstant(0));
2198 1232 : Goto(&done);
2199 : BIND(&done);
2200 1232 : return var_result.value();
2201 : }
2202 :
2203 7616 : Node* CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2204 : Node* data_pointer, Node* index_node, ElementsKind elements_kind,
2205 : ParameterMode parameter_mode) {
2206 : Node* offset =
2207 15232 : ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2208 7616 : switch (elements_kind) {
2209 : case UINT8_ELEMENTS: /* fall through */
2210 : case UINT8_CLAMPED_ELEMENTS:
2211 4032 : return SmiFromInt32(Load(MachineType::Uint8(), data_pointer, offset));
2212 : case INT8_ELEMENTS:
2213 2016 : return SmiFromInt32(Load(MachineType::Int8(), data_pointer, offset));
2214 : case UINT16_ELEMENTS:
2215 2016 : return SmiFromInt32(Load(MachineType::Uint16(), data_pointer, offset));
2216 : case INT16_ELEMENTS:
2217 2016 : return SmiFromInt32(Load(MachineType::Int16(), data_pointer, offset));
2218 : case UINT32_ELEMENTS:
2219 : return ChangeUint32ToTagged(
2220 2016 : Load(MachineType::Uint32(), data_pointer, offset));
2221 : case INT32_ELEMENTS:
2222 : return ChangeInt32ToTagged(
2223 2016 : Load(MachineType::Int32(), data_pointer, offset));
2224 : case FLOAT32_ELEMENTS:
2225 : return AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(
2226 2688 : Load(MachineType::Float32(), data_pointer, offset)));
2227 : case FLOAT64_ELEMENTS:
2228 : return AllocateHeapNumberWithValue(
2229 2016 : Load(MachineType::Float64(), data_pointer, offset));
2230 : case BIGINT64_ELEMENTS:
2231 784 : return LoadFixedBigInt64ArrayElementAsTagged(data_pointer, offset);
2232 : case BIGUINT64_ELEMENTS:
2233 784 : return LoadFixedBigUint64ArrayElementAsTagged(data_pointer, offset);
2234 : default:
2235 0 : UNREACHABLE();
2236 : }
2237 : }
2238 :
2239 56 : TNode<Numeric> CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2240 : TNode<WordT> data_pointer, TNode<Smi> index, TNode<Int32T> elements_kind) {
2241 56 : TVARIABLE(Numeric, var_result);
2242 56 : Label done(this), if_unknown_type(this, Label::kDeferred);
2243 : int32_t elements_kinds[] = {
2244 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) TYPE##_ELEMENTS,
2245 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2246 : #undef TYPED_ARRAY_CASE
2247 56 : };
2248 :
2249 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) Label if_##type##array(this);
2250 56 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2251 : #undef TYPED_ARRAY_CASE
2252 :
2253 : Label* elements_kind_labels[] = {
2254 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) &if_##type##array,
2255 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2256 : #undef TYPED_ARRAY_CASE
2257 56 : };
2258 : STATIC_ASSERT(arraysize(elements_kinds) == arraysize(elements_kind_labels));
2259 :
2260 : Switch(elements_kind, &if_unknown_type, elements_kinds, elements_kind_labels,
2261 56 : arraysize(elements_kinds));
2262 :
2263 : BIND(&if_unknown_type);
2264 56 : Unreachable();
2265 :
2266 : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2267 : BIND(&if_##type##array); \
2268 : { \
2269 : var_result = CAST(LoadFixedTypedArrayElementAsTagged( \
2270 : data_pointer, index, TYPE##_ELEMENTS, SMI_PARAMETERS)); \
2271 : Goto(&done); \
2272 : }
2273 1232 : TYPED_ARRAYS(TYPED_ARRAY_CASE)
2274 : #undef TYPED_ARRAY_CASE
2275 :
2276 : BIND(&done);
2277 56 : return var_result.value();
2278 : }
2279 :
2280 616 : void CodeStubAssembler::StoreFixedTypedArrayElementFromTagged(
2281 : TNode<Context> context, TNode<FixedTypedArrayBase> elements,
2282 : TNode<Object> index_node, TNode<Object> value, ElementsKind elements_kind,
2283 : ParameterMode parameter_mode) {
2284 616 : TNode<RawPtrT> data_pointer = LoadFixedTypedArrayBackingStore(elements);
2285 616 : switch (elements_kind) {
2286 : case UINT8_ELEMENTS:
2287 : case UINT8_CLAMPED_ELEMENTS:
2288 : case INT8_ELEMENTS:
2289 : case UINT16_ELEMENTS:
2290 : case INT16_ELEMENTS:
2291 : StoreElement(data_pointer, elements_kind, index_node,
2292 560 : SmiToInt32(CAST(value)), parameter_mode);
2293 280 : break;
2294 : case UINT32_ELEMENTS:
2295 : case INT32_ELEMENTS:
2296 : StoreElement(data_pointer, elements_kind, index_node,
2297 112 : TruncateTaggedToWord32(context, value), parameter_mode);
2298 112 : break;
2299 : case FLOAT32_ELEMENTS:
2300 : StoreElement(data_pointer, elements_kind, index_node,
2301 168 : TruncateFloat64ToFloat32(LoadHeapNumberValue(CAST(value))),
2302 112 : parameter_mode);
2303 56 : break;
2304 : case FLOAT64_ELEMENTS:
2305 : StoreElement(data_pointer, elements_kind, index_node,
2306 112 : LoadHeapNumberValue(CAST(value)), parameter_mode);
2307 56 : break;
2308 : case BIGUINT64_ELEMENTS:
2309 : case BIGINT64_ELEMENTS: {
2310 : TNode<IntPtrT> offset =
2311 112 : ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2312 : EmitBigTypedArrayElementStore(elements, data_pointer, offset,
2313 112 : CAST(value));
2314 : break;
2315 : }
2316 : default:
2317 0 : UNREACHABLE();
2318 : }
2319 616 : }
2320 :
2321 16016 : TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2322 : Node* object, Node* slot_index_node, int additional_offset,
2323 : ParameterMode parameter_mode) {
2324 : CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2325 : CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2326 : int32_t header_size =
2327 16016 : FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2328 : Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2329 32032 : parameter_mode, header_size);
2330 : CSA_SLOW_ASSERT(
2331 : this, IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2332 : FeedbackVector::kHeaderSize));
2333 : return UncheckedCast<MaybeObject>(
2334 16016 : Load(MachineType::AnyTagged(), object, offset));
2335 : }
2336 :
2337 : template <typename Array>
2338 14072 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ArrayElement(
2339 : TNode<Array> object, int array_header_size, Node* index_node,
2340 : int additional_offset, ParameterMode parameter_mode) {
2341 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2342 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2343 : int endian_correction = 0;
2344 : #if V8_TARGET_LITTLE_ENDIAN
2345 : if (SmiValuesAre32Bits()) endian_correction = 4;
2346 : #endif
2347 : int32_t header_size = array_header_size + additional_offset - kHeapObjectTag +
2348 14072 : endian_correction;
2349 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2350 28144 : parameter_mode, header_size);
2351 : CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(object),
2352 : array_header_size + endian_correction));
2353 : if (SmiValuesAre32Bits()) {
2354 14072 : return UncheckedCast<Int32T>(Load(MachineType::Int32(), object, offset));
2355 : } else {
2356 : return SmiToInt32(Load(MachineType::AnyTagged(), object, offset));
2357 : }
2358 : }
2359 :
2360 3560 : TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
2361 : TNode<FixedArray> object, Node* index_node, int additional_offset,
2362 : ParameterMode parameter_mode) {
2363 : CSA_SLOW_ASSERT(this, IsFixedArraySubclass(object));
2364 : return LoadAndUntagToWord32ArrayElement(object, FixedArray::kHeaderSize,
2365 : index_node, additional_offset,
2366 6421 : parameter_mode);
2367 : }
2368 :
2369 6160 : TNode<MaybeObject> CodeStubAssembler::LoadWeakFixedArrayElement(
2370 : TNode<WeakFixedArray> object, Node* index, int additional_offset,
2371 : ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
2372 : return LoadArrayElement(object, WeakFixedArray::kHeaderSize, index,
2373 6160 : additional_offset, parameter_mode, needs_poisoning);
2374 : }
2375 :
2376 3029 : TNode<Float64T> CodeStubAssembler::LoadFixedDoubleArrayElement(
2377 : SloppyTNode<FixedDoubleArray> object, Node* index_node,
2378 : MachineType machine_type, int additional_offset,
2379 : ParameterMode parameter_mode, Label* if_hole) {
2380 : CSA_ASSERT(this, IsFixedDoubleArray(object));
2381 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2382 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2383 : int32_t header_size =
2384 3029 : FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
2385 : TNode<IntPtrT> offset = ElementOffsetFromIndex(
2386 3029 : index_node, HOLEY_DOUBLE_ELEMENTS, parameter_mode, header_size);
2387 : CSA_ASSERT(this, IsOffsetInBounds(
2388 : offset, LoadAndUntagFixedArrayBaseLength(object),
2389 : FixedDoubleArray::kHeaderSize, HOLEY_DOUBLE_ELEMENTS));
2390 3029 : return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
2391 : }
2392 :
2393 56 : TNode<Object> CodeStubAssembler::LoadFixedArrayBaseElementAsTagged(
2394 : TNode<FixedArrayBase> elements, TNode<IntPtrT> index,
2395 : TNode<Int32T> elements_kind, Label* if_accessor, Label* if_hole) {
2396 56 : TVARIABLE(Object, var_result);
2397 56 : Label done(this), if_packed(this), if_holey(this), if_packed_double(this),
2398 56 : if_holey_double(this), if_dictionary(this, Label::kDeferred);
2399 :
2400 : int32_t kinds[] = {// Handled by if_packed.
2401 : PACKED_SMI_ELEMENTS, PACKED_ELEMENTS,
2402 : // Handled by if_holey.
2403 : HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS,
2404 : // Handled by if_packed_double.
2405 : PACKED_DOUBLE_ELEMENTS,
2406 : // Handled by if_holey_double.
2407 56 : HOLEY_DOUBLE_ELEMENTS};
2408 : Label* labels[] = {// PACKED_{SMI,}_ELEMENTS
2409 : &if_packed, &if_packed,
2410 : // HOLEY_{SMI,}_ELEMENTS
2411 : &if_holey, &if_holey,
2412 : // PACKED_DOUBLE_ELEMENTS
2413 : &if_packed_double,
2414 : // HOLEY_DOUBLE_ELEMENTS
2415 56 : &if_holey_double};
2416 56 : Switch(elements_kind, &if_dictionary, kinds, labels, arraysize(kinds));
2417 :
2418 : BIND(&if_packed);
2419 : {
2420 56 : var_result = LoadFixedArrayElement(CAST(elements), index, 0);
2421 56 : Goto(&done);
2422 : }
2423 :
2424 : BIND(&if_holey);
2425 : {
2426 56 : var_result = LoadFixedArrayElement(CAST(elements), index);
2427 56 : Branch(WordEqual(var_result.value(), TheHoleConstant()), if_hole, &done);
2428 : }
2429 :
2430 : BIND(&if_packed_double);
2431 : {
2432 112 : var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2433 112 : CAST(elements), index, MachineType::Float64()));
2434 56 : Goto(&done);
2435 : }
2436 :
2437 : BIND(&if_holey_double);
2438 : {
2439 112 : var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2440 : CAST(elements), index, MachineType::Float64(), 0, INTPTR_PARAMETERS,
2441 112 : if_hole));
2442 56 : Goto(&done);
2443 : }
2444 :
2445 : BIND(&if_dictionary);
2446 : {
2447 : CSA_ASSERT(this, IsDictionaryElementsKind(elements_kind));
2448 56 : var_result = BasicLoadNumberDictionaryElement(CAST(elements), index,
2449 : if_accessor, if_hole);
2450 56 : Goto(&done);
2451 : }
2452 :
2453 : BIND(&done);
2454 56 : return var_result.value();
2455 : }
2456 :
2457 5676 : TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
2458 : SloppyTNode<Object> base, SloppyTNode<IntPtrT> offset, Label* if_hole,
2459 : MachineType machine_type) {
2460 5676 : if (if_hole) {
2461 : // TODO(ishell): Compare only the upper part for the hole once the
2462 : // compiler is able to fold addition of already complex |offset| with
2463 : // |kIeeeDoubleExponentWordOffset| into one addressing mode.
2464 5172 : if (Is64()) {
2465 5172 : Node* element = Load(MachineType::Uint64(), base, offset);
2466 15516 : GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
2467 : } else {
2468 : Node* element_upper = Load(
2469 : MachineType::Uint32(), base,
2470 0 : IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
2471 0 : GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
2472 0 : if_hole);
2473 : }
2474 : }
2475 5676 : if (machine_type.IsNone()) {
2476 : // This means the actual value is not needed.
2477 1069 : return TNode<Float64T>();
2478 : }
2479 4607 : return UncheckedCast<Float64T>(Load(machine_type, base, offset));
2480 : }
2481 :
2482 75017 : TNode<Object> CodeStubAssembler::LoadContextElement(
2483 : SloppyTNode<Context> context, int slot_index) {
2484 : int offset = Context::SlotOffset(slot_index);
2485 : return UncheckedCast<Object>(
2486 150034 : Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)));
2487 : }
2488 :
2489 5274 : TNode<Object> CodeStubAssembler::LoadContextElement(
2490 : SloppyTNode<Context> context, SloppyTNode<IntPtrT> slot_index) {
2491 : Node* offset = ElementOffsetFromIndex(
2492 10548 : slot_index, PACKED_ELEMENTS, INTPTR_PARAMETERS, Context::SlotOffset(0));
2493 5274 : return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2494 : }
2495 :
2496 56 : TNode<Object> CodeStubAssembler::LoadContextElement(TNode<Context> context,
2497 : TNode<Smi> slot_index) {
2498 : Node* offset = ElementOffsetFromIndex(slot_index, PACKED_ELEMENTS,
2499 112 : SMI_PARAMETERS, Context::SlotOffset(0));
2500 56 : return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2501 : }
2502 :
2503 112 : void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2504 : int slot_index,
2505 : SloppyTNode<Object> value) {
2506 : int offset = Context::SlotOffset(slot_index);
2507 224 : Store(context, IntPtrConstant(offset), value);
2508 112 : }
2509 :
2510 1008 : void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2511 : SloppyTNode<IntPtrT> slot_index,
2512 : SloppyTNode<Object> value) {
2513 : Node* offset = IntPtrAdd(TimesTaggedSize(slot_index),
2514 1008 : IntPtrConstant(Context::SlotOffset(0)));
2515 1008 : Store(context, offset, value);
2516 1008 : }
2517 :
2518 6906 : void CodeStubAssembler::StoreContextElementNoWriteBarrier(
2519 : SloppyTNode<Context> context, int slot_index, SloppyTNode<Object> value) {
2520 : int offset = Context::SlotOffset(slot_index);
2521 : StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
2522 13812 : IntPtrConstant(offset), value);
2523 6906 : }
2524 :
2525 21849 : TNode<Context> CodeStubAssembler::LoadNativeContext(
2526 : SloppyTNode<Context> context) {
2527 : return UncheckedCast<Context>(
2528 30473 : LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX));
2529 : }
2530 :
2531 168 : TNode<Context> CodeStubAssembler::LoadModuleContext(
2532 : SloppyTNode<Context> context) {
2533 336 : Node* module_map = LoadRoot(RootIndex::kModuleContextMap);
2534 168 : Variable cur_context(this, MachineRepresentation::kTaggedPointer);
2535 168 : cur_context.Bind(context);
2536 :
2537 168 : Label context_found(this);
2538 :
2539 168 : Variable* context_search_loop_variables[1] = {&cur_context};
2540 336 : Label context_search(this, 1, context_search_loop_variables);
2541 :
2542 : // Loop until cur_context->map() is module_map.
2543 168 : Goto(&context_search);
2544 : BIND(&context_search);
2545 : {
2546 : CSA_ASSERT(this, Word32BinaryNot(IsNativeContext(cur_context.value())));
2547 504 : GotoIf(WordEqual(LoadMap(cur_context.value()), module_map), &context_found);
2548 :
2549 : cur_context.Bind(
2550 504 : LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
2551 168 : Goto(&context_search);
2552 : }
2553 :
2554 : BIND(&context_found);
2555 336 : return UncheckedCast<Context>(cur_context.value());
2556 : }
2557 :
2558 677 : TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2559 : SloppyTNode<Int32T> kind, SloppyTNode<Context> native_context) {
2560 : CSA_ASSERT(this, IsFastElementsKind(kind));
2561 : CSA_ASSERT(this, IsNativeContext(native_context));
2562 : Node* offset = IntPtrAdd(IntPtrConstant(Context::FIRST_JS_ARRAY_MAP_SLOT),
2563 1354 : ChangeInt32ToIntPtr(kind));
2564 677 : return UncheckedCast<Map>(LoadContextElement(native_context, offset));
2565 : }
2566 :
2567 4816 : TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2568 : ElementsKind kind, SloppyTNode<Context> native_context) {
2569 : CSA_ASSERT(this, IsNativeContext(native_context));
2570 : return UncheckedCast<Map>(
2571 4816 : LoadContextElement(native_context, Context::ArrayMapIndex(kind)));
2572 : }
2573 :
2574 3981 : TNode<BoolT> CodeStubAssembler::IsGeneratorFunction(
2575 : TNode<JSFunction> function) {
2576 : TNode<SharedFunctionInfo> const shared_function_info =
2577 : CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
2578 :
2579 : TNode<Uint32T> const function_kind =
2580 : DecodeWord32<SharedFunctionInfo::FunctionKindBits>(LoadObjectField(
2581 : shared_function_info, SharedFunctionInfo::kFlagsOffset,
2582 7962 : MachineType::Uint32()));
2583 :
2584 : return TNode<BoolT>::UncheckedCast(Word32Or(
2585 : Word32Or(
2586 : Word32Or(
2587 : Word32Equal(function_kind,
2588 7962 : Int32Constant(FunctionKind::kAsyncGeneratorFunction)),
2589 : Word32Equal(
2590 : function_kind,
2591 19905 : Int32Constant(FunctionKind::kAsyncConciseGeneratorMethod))),
2592 : Word32Equal(function_kind,
2593 19905 : Int32Constant(FunctionKind::kGeneratorFunction))),
2594 : Word32Equal(function_kind,
2595 23886 : Int32Constant(FunctionKind::kConciseGeneratorMethod))));
2596 : }
2597 :
2598 3981 : TNode<BoolT> CodeStubAssembler::HasPrototypeProperty(TNode<JSFunction> function,
2599 : TNode<Map> map) {
2600 : // (has_prototype_slot() && IsConstructor()) ||
2601 : // IsGeneratorFunction(shared()->kind())
2602 : uint32_t mask =
2603 : Map::HasPrototypeSlotBit::kMask | Map::IsConstructorBit::kMask;
2604 : return TNode<BoolT>::UncheckedCast(
2605 7962 : Word32Or(IsAllSetWord32(LoadMapBitField(map), mask),
2606 19905 : IsGeneratorFunction(function)));
2607 : }
2608 :
2609 3981 : void CodeStubAssembler::GotoIfPrototypeRequiresRuntimeLookup(
2610 : TNode<JSFunction> function, TNode<Map> map, Label* runtime) {
2611 : // !has_prototype_property() || has_non_instance_prototype()
2612 7962 : GotoIfNot(HasPrototypeProperty(function, map), runtime);
2613 7962 : GotoIf(IsSetWord32<Map::HasNonInstancePrototypeBit>(LoadMapBitField(map)),
2614 3981 : runtime);
2615 3981 : }
2616 :
2617 3925 : Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function,
2618 : Label* if_bailout) {
2619 : CSA_ASSERT(this, TaggedIsNotSmi(function));
2620 : CSA_ASSERT(this, IsJSFunction(function));
2621 : CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)));
2622 : CSA_ASSERT(this, IsClearWord32<Map::HasNonInstancePrototypeBit>(
2623 : LoadMapBitField(LoadMap(function))));
2624 : Node* proto_or_map =
2625 : LoadObjectField(function, JSFunction::kPrototypeOrInitialMapOffset);
2626 7850 : GotoIf(IsTheHole(proto_or_map), if_bailout);
2627 :
2628 3925 : VARIABLE(var_result, MachineRepresentation::kTagged, proto_or_map);
2629 3925 : Label done(this, &var_result);
2630 7850 : GotoIfNot(IsMap(proto_or_map), &done);
2631 :
2632 7850 : var_result.Bind(LoadMapPrototype(proto_or_map));
2633 3925 : Goto(&done);
2634 :
2635 : BIND(&done);
2636 7850 : return var_result.value();
2637 : }
2638 :
2639 112 : TNode<BytecodeArray> CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray(
2640 : SloppyTNode<SharedFunctionInfo> shared) {
2641 : Node* function_data =
2642 : LoadObjectField(shared, SharedFunctionInfo::kFunctionDataOffset);
2643 :
2644 112 : VARIABLE(var_result, MachineRepresentation::kTagged, function_data);
2645 112 : Label done(this, &var_result);
2646 :
2647 224 : GotoIfNot(HasInstanceType(function_data, INTERPRETER_DATA_TYPE), &done);
2648 : Node* bytecode_array =
2649 : LoadObjectField(function_data, InterpreterData::kBytecodeArrayOffset);
2650 112 : var_result.Bind(bytecode_array);
2651 112 : Goto(&done);
2652 :
2653 : BIND(&done);
2654 224 : return CAST(var_result.value());
2655 : }
2656 :
2657 30 : void CodeStubAssembler::StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,
2658 : int offset,
2659 : TNode<Word32T> value) {
2660 : StoreNoWriteBarrier(MachineRepresentation::kWord8, object,
2661 60 : IntPtrConstant(offset - kHeapObjectTag), value);
2662 30 : }
2663 :
2664 784 : void CodeStubAssembler::StoreHeapNumberValue(SloppyTNode<HeapNumber> object,
2665 : SloppyTNode<Float64T> value) {
2666 : StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
2667 : MachineRepresentation::kFloat64);
2668 784 : }
2669 :
2670 0 : void CodeStubAssembler::StoreMutableHeapNumberValue(
2671 : SloppyTNode<MutableHeapNumber> object, SloppyTNode<Float64T> value) {
2672 : StoreObjectFieldNoWriteBarrier(object, MutableHeapNumber::kValueOffset, value,
2673 : MachineRepresentation::kFloat64);
2674 0 : }
2675 :
2676 9148 : void CodeStubAssembler::StoreObjectField(Node* object, int offset,
2677 : Node* value) {
2678 : DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
2679 :
2680 : OptimizedStoreField(MachineRepresentation::kTagged,
2681 : UncheckedCast<HeapObject>(object), offset, value,
2682 15241 : WriteBarrierKind::kFullWriteBarrier);
2683 9148 : }
2684 :
2685 1400 : void CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
2686 : Node* value) {
2687 : int const_offset;
2688 1400 : if (ToInt32Constant(offset, const_offset)) {
2689 0 : StoreObjectField(object, const_offset, value);
2690 : } else {
2691 4200 : Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2692 : }
2693 1400 : }
2694 :
2695 32147 : void CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2696 : Node* object, int offset, Node* value, MachineRepresentation rep) {
2697 : OptimizedStoreField(rep, UncheckedCast<HeapObject>(object), offset, value,
2698 159437 : WriteBarrierKind::kNoWriteBarrier);
2699 32147 : }
2700 :
2701 2128 : void CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2702 : Node* object, Node* offset, Node* value, MachineRepresentation rep) {
2703 : int const_offset;
2704 2128 : if (ToInt32Constant(offset, const_offset)) {
2705 2464 : return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
2706 : }
2707 : StoreNoWriteBarrier(rep, object,
2708 5880 : IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2709 : }
2710 :
2711 4093 : void CodeStubAssembler::StoreMap(Node* object, Node* map) {
2712 10715 : OptimizedStoreMap(UncheckedCast<HeapObject>(object), CAST(map));
2713 4093 : }
2714 :
2715 55315 : void CodeStubAssembler::StoreMapNoWriteBarrier(Node* object,
2716 : RootIndex map_root_index) {
2717 110630 : StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
2718 55315 : }
2719 :
2720 5070 : void CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
2721 : CSA_SLOW_ASSERT(this, IsMap(map));
2722 : OptimizedStoreField(MachineRepresentation::kTaggedPointer,
2723 : UncheckedCast<HeapObject>(object), HeapObject::kMapOffset,
2724 72342 : map, WriteBarrierKind::kNoWriteBarrier);
2725 5070 : }
2726 :
2727 24409 : void CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
2728 : RootIndex root_index) {
2729 24409 : if (RootsTable::IsImmortalImmovable(root_index)) {
2730 48818 : return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
2731 : } else {
2732 0 : return StoreObjectField(object, offset, LoadRoot(root_index));
2733 : }
2734 : }
2735 :
2736 0 : void CodeStubAssembler::StoreJSArrayLength(TNode<JSArray> array,
2737 : TNode<Smi> length) {
2738 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2739 0 : }
2740 :
2741 0 : void CodeStubAssembler::StoreElements(TNode<Object> object,
2742 : TNode<FixedArrayBase> elements) {
2743 : StoreObjectField(object, JSObject::kElementsOffset, elements);
2744 0 : }
2745 :
2746 40661 : void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
2747 : Node* object, Node* index_node, Node* value, WriteBarrierMode barrier_mode,
2748 : int additional_offset, ParameterMode parameter_mode) {
2749 : CSA_SLOW_ASSERT(
2750 : this, Word32Or(IsFixedArraySubclass(object), IsPropertyArray(object)));
2751 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2752 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2753 : barrier_mode == UPDATE_WRITE_BARRIER);
2754 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2755 : STATIC_ASSERT(static_cast<int>(FixedArray::kHeaderSize) ==
2756 : static_cast<int>(PropertyArray::kHeaderSize));
2757 : int header_size =
2758 40661 : FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
2759 : Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2760 81322 : parameter_mode, header_size);
2761 : STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2762 : static_cast<int>(WeakFixedArray::kLengthOffset));
2763 : STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2764 : static_cast<int>(PropertyArray::kLengthAndHashOffset));
2765 : // Check that index_node + additional_offset <= object.length.
2766 : // TODO(cbruni): Use proper LoadXXLength helpers
2767 : CSA_ASSERT(
2768 : this,
2769 : IsOffsetInBounds(
2770 : offset,
2771 : Select<IntPtrT>(
2772 : IsPropertyArray(object),
2773 : [=] {
2774 : TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
2775 : object, PropertyArray::kLengthAndHashOffset);
2776 : return TNode<IntPtrT>::UncheckedCast(
2777 : DecodeWord<PropertyArray::LengthField>(length_and_hash));
2778 : },
2779 : [=] {
2780 : return LoadAndUntagObjectField(object,
2781 : FixedArrayBase::kLengthOffset);
2782 : }),
2783 : FixedArray::kHeaderSize));
2784 40661 : if (barrier_mode == SKIP_WRITE_BARRIER) {
2785 23541 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2786 : } else {
2787 17120 : Store(object, offset, value);
2788 : }
2789 40661 : }
2790 :
2791 1807 : void CodeStubAssembler::StoreFixedDoubleArrayElement(
2792 : TNode<FixedDoubleArray> object, Node* index_node, TNode<Float64T> value,
2793 : ParameterMode parameter_mode) {
2794 : CSA_ASSERT(this, IsFixedDoubleArray(object));
2795 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2796 : FixedArrayBoundsCheck(object, index_node, 0, parameter_mode);
2797 : Node* offset =
2798 : ElementOffsetFromIndex(index_node, PACKED_DOUBLE_ELEMENTS, parameter_mode,
2799 3614 : FixedArray::kHeaderSize - kHeapObjectTag);
2800 : MachineRepresentation rep = MachineRepresentation::kFloat64;
2801 1807 : StoreNoWriteBarrier(rep, object, offset, value);
2802 1807 : }
2803 :
2804 15960 : void CodeStubAssembler::StoreFeedbackVectorSlot(Node* object,
2805 : Node* slot_index_node,
2806 : Node* value,
2807 : WriteBarrierMode barrier_mode,
2808 : int additional_offset,
2809 : ParameterMode parameter_mode) {
2810 : CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2811 : CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2812 : DCHECK(IsAligned(additional_offset, kTaggedSize));
2813 : DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2814 : barrier_mode == UPDATE_WRITE_BARRIER);
2815 : int header_size =
2816 15960 : FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2817 : Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2818 31920 : parameter_mode, header_size);
2819 : // Check that slot_index_node <= object.length.
2820 : CSA_ASSERT(this,
2821 : IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2822 : FeedbackVector::kHeaderSize));
2823 15960 : if (barrier_mode == SKIP_WRITE_BARRIER) {
2824 13160 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2825 : } else {
2826 2800 : Store(object, offset, value);
2827 : }
2828 15960 : }
2829 :
2830 336 : void CodeStubAssembler::EnsureArrayLengthWritable(TNode<Map> map,
2831 : Label* bailout) {
2832 : // Don't support arrays in dictionary named property mode.
2833 672 : GotoIf(IsDictionaryMap(map), bailout);
2834 :
2835 : // Check whether the length property is writable. The length property is the
2836 : // only default named property on arrays. It's nonconfigurable, hence is
2837 : // guaranteed to stay the first property.
2838 336 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
2839 :
2840 : int length_index = JSArray::kLengthDescriptorIndex;
2841 : #ifdef DEBUG
2842 : TNode<Name> maybe_length =
2843 : LoadKeyByDescriptorEntry(descriptors, length_index);
2844 : CSA_ASSERT(this,
2845 : WordEqual(maybe_length, LoadRoot(RootIndex::klength_string)));
2846 : #endif
2847 :
2848 : TNode<Uint32T> details =
2849 336 : LoadDetailsByDescriptorEntry(descriptors, length_index);
2850 336 : GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
2851 672 : bailout);
2852 336 : }
2853 :
2854 168 : TNode<Int32T> CodeStubAssembler::EnsureArrayPushable(TNode<Map> map,
2855 : Label* bailout) {
2856 : // Disallow pushing onto prototypes. It might be the JSArray prototype.
2857 : // Disallow pushing onto non-extensible objects.
2858 168 : Comment("Disallow pushing onto prototypes");
2859 336 : Node* bit_field2 = LoadMapBitField2(map);
2860 : int mask = Map::IsPrototypeMapBit::kMask | Map::IsExtensibleBit::kMask;
2861 504 : Node* test = Word32And(bit_field2, Int32Constant(mask));
2862 336 : GotoIf(Word32NotEqual(test, Int32Constant(Map::IsExtensibleBit::kMask)),
2863 336 : bailout);
2864 :
2865 168 : EnsureArrayLengthWritable(map, bailout);
2866 :
2867 : TNode<Uint32T> kind = DecodeWord32<Map::ElementsKindBits>(bit_field2);
2868 168 : return Signed(kind);
2869 : }
2870 :
2871 381 : void CodeStubAssembler::PossiblyGrowElementsCapacity(
2872 : ParameterMode mode, ElementsKind kind, Node* array, Node* length,
2873 : Variable* var_elements, Node* growth, Label* bailout) {
2874 381 : Label fits(this, var_elements);
2875 : Node* capacity =
2876 1143 : TaggedToParameter(LoadFixedArrayBaseLength(var_elements->value()), mode);
2877 : // length and growth nodes are already in a ParameterMode appropriate
2878 : // representation.
2879 381 : Node* new_length = IntPtrOrSmiAdd(growth, length, mode);
2880 762 : GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
2881 381 : Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
2882 : var_elements->Bind(GrowElementsCapacity(array, var_elements->value(), kind,
2883 : kind, capacity, new_capacity, mode,
2884 381 : bailout));
2885 381 : Goto(&fits);
2886 381 : BIND(&fits);
2887 381 : }
2888 :
2889 213 : TNode<Smi> CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
2890 : SloppyTNode<JSArray> array,
2891 213 : CodeStubArguments* args,
2892 : TVariable<IntPtrT>* arg_index,
2893 : Label* bailout) {
2894 : CSA_SLOW_ASSERT(this, IsJSArray(array));
2895 213 : Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
2896 : Label pre_bailout(this);
2897 213 : Label success(this);
2898 : TVARIABLE(Smi, var_tagged_length);
2899 : ParameterMode mode = OptimalParameterMode();
2900 426 : VARIABLE(var_length, OptimalParameterRepresentation(),
2901 : TaggedToParameter(LoadFastJSArrayLength(array), mode));
2902 426 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2903 :
2904 : // Resize the capacity of the fixed array if it doesn't fit.
2905 : TNode<IntPtrT> first = arg_index->value();
2906 : Node* growth = IntPtrToParameter(
2907 : IntPtrSub(UncheckedCast<IntPtrT>(args->GetLength(INTPTR_PARAMETERS)),
2908 : first),
2909 : mode);
2910 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2911 213 : &var_elements, growth, &pre_bailout);
2912 :
2913 : // Push each argument onto the end of the array now that there is enough
2914 : // capacity.
2915 426 : CodeStubAssembler::VariableList push_vars({&var_length}, zone());
2916 213 : Node* elements = var_elements.value();
2917 : args->ForEach(
2918 : push_vars,
2919 213 : [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
2920 : TryStoreArrayElement(kind, mode, &pre_bailout, elements,
2921 213 : var_length.value(), arg);
2922 213 : Increment(&var_length, 1, mode);
2923 213 : },
2924 426 : first, nullptr);
2925 : {
2926 213 : TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2927 : var_tagged_length = length;
2928 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2929 213 : Goto(&success);
2930 : }
2931 :
2932 : BIND(&pre_bailout);
2933 : {
2934 213 : TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2935 : var_tagged_length = length;
2936 426 : Node* diff = SmiSub(length, LoadFastJSArrayLength(array));
2937 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2938 426 : *arg_index = IntPtrAdd(arg_index->value(), SmiUntag(diff));
2939 213 : Goto(bailout);
2940 : }
2941 :
2942 : BIND(&success);
2943 213 : return var_tagged_length.value();
2944 : }
2945 :
2946 717 : void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind,
2947 : ParameterMode mode, Label* bailout,
2948 : Node* elements, Node* index,
2949 : Node* value) {
2950 717 : if (IsSmiElementsKind(kind)) {
2951 488 : GotoIf(TaggedIsNotSmi(value), bailout);
2952 473 : } else if (IsDoubleElementsKind(kind)) {
2953 239 : GotoIfNotNumber(value, bailout);
2954 : }
2955 1195 : if (IsDoubleElementsKind(kind)) value = ChangeNumberToFloat64(value);
2956 717 : StoreElement(elements, kind, index, value, mode);
2957 717 : }
2958 :
2959 168 : void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
2960 : Node* value, Label* bailout) {
2961 : CSA_SLOW_ASSERT(this, IsJSArray(array));
2962 168 : Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
2963 : ParameterMode mode = OptimalParameterMode();
2964 168 : VARIABLE(var_length, OptimalParameterRepresentation(),
2965 : TaggedToParameter(LoadFastJSArrayLength(array), mode));
2966 336 : VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2967 :
2968 : // Resize the capacity of the fixed array if it doesn't fit.
2969 168 : Node* growth = IntPtrOrSmiConstant(1, mode);
2970 : PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2971 168 : &var_elements, growth, bailout);
2972 :
2973 : // Push each argument onto the end of the array now that there is enough
2974 : // capacity.
2975 : TryStoreArrayElement(kind, mode, bailout, var_elements.value(),
2976 168 : var_length.value(), value);
2977 168 : Increment(&var_length, 1, mode);
2978 :
2979 168 : Node* length = ParameterToTagged(var_length.value(), mode);
2980 168 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2981 168 : }
2982 :
2983 0 : Node* CodeStubAssembler::AllocateCellWithValue(Node* value,
2984 : WriteBarrierMode mode) {
2985 0 : Node* result = Allocate(Cell::kSize, kNone);
2986 0 : StoreMapNoWriteBarrier(result, RootIndex::kCellMap);
2987 0 : StoreCellValue(result, value, mode);
2988 0 : return result;
2989 : }
2990 :
2991 1288 : Node* CodeStubAssembler::LoadCellValue(Node* cell) {
2992 : CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
2993 1288 : return LoadObjectField(cell, Cell::kValueOffset);
2994 : }
2995 :
2996 0 : void CodeStubAssembler::StoreCellValue(Node* cell, Node* value,
2997 : WriteBarrierMode mode) {
2998 : CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
2999 : DCHECK(mode == SKIP_WRITE_BARRIER || mode == UPDATE_WRITE_BARRIER);
3000 :
3001 0 : if (mode == UPDATE_WRITE_BARRIER) {
3002 : StoreObjectField(cell, Cell::kValueOffset, value);
3003 : } else {
3004 : StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
3005 : }
3006 0 : }
3007 :
3008 28401 : TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber() {
3009 56802 : Node* result = Allocate(HeapNumber::kSize, kNone);
3010 : RootIndex heap_map_index = RootIndex::kHeapNumberMap;
3011 28401 : StoreMapNoWriteBarrier(result, heap_map_index);
3012 28401 : return UncheckedCast<HeapNumber>(result);
3013 : }
3014 :
3015 24385 : TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
3016 : SloppyTNode<Float64T> value) {
3017 24385 : TNode<HeapNumber> result = AllocateHeapNumber();
3018 : StoreHeapNumberValue(result, value);
3019 24385 : return result;
3020 : }
3021 :
3022 504 : TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumber() {
3023 1008 : Node* result = Allocate(MutableHeapNumber::kSize, kNone);
3024 : RootIndex heap_map_index = RootIndex::kMutableHeapNumberMap;
3025 504 : StoreMapNoWriteBarrier(result, heap_map_index);
3026 504 : return UncheckedCast<MutableHeapNumber>(result);
3027 : }
3028 :
3029 56 : TNode<Object> CodeStubAssembler::CloneIfMutablePrimitive(TNode<Object> object) {
3030 56 : TVARIABLE(Object, result, object);
3031 56 : Label done(this);
3032 :
3033 112 : GotoIf(TaggedIsSmi(object), &done);
3034 112 : GotoIfNot(IsMutableHeapNumber(UncheckedCast<HeapObject>(object)), &done);
3035 : {
3036 : // Mutable heap number found --- allocate a clone.
3037 : TNode<Float64T> value =
3038 56 : LoadHeapNumberValue(UncheckedCast<HeapNumber>(object));
3039 112 : result = AllocateMutableHeapNumberWithValue(value);
3040 56 : Goto(&done);
3041 : }
3042 :
3043 : BIND(&done);
3044 56 : return result.value();
3045 : }
3046 :
3047 504 : TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumberWithValue(
3048 : SloppyTNode<Float64T> value) {
3049 504 : TNode<MutableHeapNumber> result = AllocateMutableHeapNumber();
3050 : StoreMutableHeapNumberValue(result, value);
3051 504 : return result;
3052 : }
3053 :
3054 3976 : TNode<BigInt> CodeStubAssembler::AllocateBigInt(TNode<IntPtrT> length) {
3055 3976 : TNode<BigInt> result = AllocateRawBigInt(length);
3056 : StoreBigIntBitfield(result,
3057 3976 : Word32Shl(TruncateIntPtrToInt32(length),
3058 19880 : Int32Constant(BigInt::LengthBits::kShift)));
3059 3976 : return result;
3060 : }
3061 :
3062 5264 : TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
3063 : // This is currently used only for 64-bit wide BigInts. If more general
3064 : // applicability is required, a large-object check must be added.
3065 : CSA_ASSERT(this, UintPtrLessThan(length, IntPtrConstant(3)));
3066 :
3067 : TNode<IntPtrT> size =
3068 : IntPtrAdd(IntPtrConstant(BigInt::kHeaderSize),
3069 10528 : Signed(WordShl(length, kSystemPointerSizeLog2)));
3070 10528 : Node* raw_result = Allocate(size, kNone);
3071 5264 : StoreMapNoWriteBarrier(raw_result, RootIndex::kBigIntMap);
3072 : if (FIELD_SIZE(BigInt::kOptionalPaddingOffset) != 0) {
3073 : DCHECK_EQ(4, FIELD_SIZE(BigInt::kOptionalPaddingOffset));
3074 : StoreObjectFieldNoWriteBarrier(raw_result, BigInt::kOptionalPaddingOffset,
3075 : Int32Constant(0),
3076 10528 : MachineRepresentation::kWord32);
3077 : }
3078 5264 : return UncheckedCast<BigInt>(raw_result);
3079 : }
3080 :
3081 224 : void CodeStubAssembler::StoreBigIntBitfield(TNode<BigInt> bigint,
3082 : TNode<Word32T> bitfield) {
3083 : StoreObjectFieldNoWriteBarrier(bigint, BigInt::kBitfieldOffset, bitfield,
3084 : MachineRepresentation::kWord32);
3085 224 : }
3086 :
3087 112 : void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint, int digit_index,
3088 : TNode<UintPtrT> digit) {
3089 : StoreObjectFieldNoWriteBarrier(
3090 : bigint, BigInt::kDigitsOffset + digit_index * kSystemPointerSize, digit,
3091 112 : UintPtrT::kMachineRepresentation);
3092 112 : }
3093 :
3094 1680 : TNode<Word32T> CodeStubAssembler::LoadBigIntBitfield(TNode<BigInt> bigint) {
3095 : return UncheckedCast<Word32T>(
3096 1680 : LoadObjectField(bigint, BigInt::kBitfieldOffset, MachineType::Uint32()));
3097 : }
3098 :
3099 1568 : TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
3100 : int digit_index) {
3101 : return UncheckedCast<UintPtrT>(LoadObjectField(
3102 : bigint, BigInt::kDigitsOffset + digit_index * kSystemPointerSize,
3103 3136 : MachineType::UintPtr()));
3104 : }
3105 :
3106 784 : TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3107 : uint32_t length, AllocationFlags flags) {
3108 784 : Comment("AllocateSeqOneByteString");
3109 784 : if (length == 0) {
3110 0 : return CAST(LoadRoot(RootIndex::kempty_string));
3111 : }
3112 1568 : Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
3113 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3114 784 : StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3115 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3116 : Uint32Constant(length),
3117 : MachineRepresentation::kWord32);
3118 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3119 : Int32Constant(String::kEmptyHashField),
3120 1568 : MachineRepresentation::kWord32);
3121 : return CAST(result);
3122 : }
3123 :
3124 0 : TNode<BoolT> CodeStubAssembler::IsZeroOrContext(SloppyTNode<Object> object) {
3125 0 : return Select<BoolT>(WordEqual(object, SmiConstant(0)),
3126 0 : [=] { return Int32TrueConstant(); },
3127 0 : [=] { return IsContext(CAST(object)); });
3128 : }
3129 :
3130 1624 : TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3131 : Node* context, TNode<Uint32T> length, AllocationFlags flags) {
3132 1624 : Comment("AllocateSeqOneByteString");
3133 : CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3134 1624 : VARIABLE(var_result, MachineRepresentation::kTagged);
3135 :
3136 : // Compute the SeqOneByteString size and check if it fits into new space.
3137 1624 : Label if_lengthiszero(this), if_sizeissmall(this),
3138 1624 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
3139 3248 : GotoIf(Word32Equal(length, Uint32Constant(0)), &if_lengthiszero);
3140 :
3141 : Node* raw_size = GetArrayAllocationSize(
3142 3248 : Signed(ChangeUint32ToWord(length)), UINT8_ELEMENTS, INTPTR_PARAMETERS,
3143 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3144 3248 : TNode<WordT> size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3145 3248 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3146 3248 : &if_sizeissmall, &if_notsizeissmall);
3147 :
3148 : BIND(&if_sizeissmall);
3149 : {
3150 : // Just allocate the SeqOneByteString in new space.
3151 : TNode<Object> result =
3152 : AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3153 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3154 1624 : StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3155 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3156 : length, MachineRepresentation::kWord32);
3157 : StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3158 : Int32Constant(String::kEmptyHashField),
3159 3248 : MachineRepresentation::kWord32);
3160 1624 : var_result.Bind(result);
3161 1624 : Goto(&if_join);
3162 : }
3163 :
3164 : BIND(&if_notsizeissmall);
3165 : {
3166 : // We might need to allocate in large object space, go to the runtime.
3167 : Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
3168 3248 : ChangeUint32ToTagged(length));
3169 1624 : var_result.Bind(result);
3170 1624 : Goto(&if_join);
3171 : }
3172 :
3173 : BIND(&if_lengthiszero);
3174 : {
3175 3248 : var_result.Bind(LoadRoot(RootIndex::kempty_string));
3176 1624 : Goto(&if_join);
3177 : }
3178 :
3179 : BIND(&if_join);
3180 3248 : return CAST(var_result.value());
3181 : }
3182 :
3183 896 : TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3184 : uint32_t length, AllocationFlags flags) {
3185 896 : Comment("AllocateSeqTwoByteString");
3186 896 : if (length == 0) {
3187 0 : return CAST(LoadRoot(RootIndex::kempty_string));
3188 : }
3189 2688 : Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
3190 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3191 896 : StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3192 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3193 : Uint32Constant(length),
3194 : MachineRepresentation::kWord32);
3195 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3196 : Int32Constant(String::kEmptyHashField),
3197 1792 : MachineRepresentation::kWord32);
3198 : return CAST(result);
3199 : }
3200 :
3201 1232 : TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3202 : Node* context, TNode<Uint32T> length, AllocationFlags flags) {
3203 : CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3204 1232 : Comment("AllocateSeqTwoByteString");
3205 1232 : VARIABLE(var_result, MachineRepresentation::kTagged);
3206 :
3207 : // Compute the SeqTwoByteString size and check if it fits into new space.
3208 1232 : Label if_lengthiszero(this), if_sizeissmall(this),
3209 1232 : if_notsizeissmall(this, Label::kDeferred), if_join(this);
3210 2464 : GotoIf(Word32Equal(length, Uint32Constant(0)), &if_lengthiszero);
3211 :
3212 : Node* raw_size = GetArrayAllocationSize(
3213 2464 : Signed(ChangeUint32ToWord(length)), UINT16_ELEMENTS, INTPTR_PARAMETERS,
3214 : SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3215 2464 : TNode<WordT> size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3216 2464 : Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3217 2464 : &if_sizeissmall, &if_notsizeissmall);
3218 :
3219 : BIND(&if_sizeissmall);
3220 : {
3221 : // Just allocate the SeqTwoByteString in new space.
3222 : TNode<Object> result =
3223 : AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3224 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3225 1232 : StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3226 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3227 : length, MachineRepresentation::kWord32);
3228 : StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3229 : Int32Constant(String::kEmptyHashField),
3230 2464 : MachineRepresentation::kWord32);
3231 1232 : var_result.Bind(result);
3232 1232 : Goto(&if_join);
3233 : }
3234 :
3235 : BIND(&if_notsizeissmall);
3236 : {
3237 : // We might need to allocate in large object space, go to the runtime.
3238 : Node* result = CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
3239 2464 : ChangeUint32ToTagged(length));
3240 1232 : var_result.Bind(result);
3241 1232 : Goto(&if_join);
3242 : }
3243 :
3244 : BIND(&if_lengthiszero);
3245 : {
3246 2464 : var_result.Bind(LoadRoot(RootIndex::kempty_string));
3247 1232 : Goto(&if_join);
3248 : }
3249 :
3250 : BIND(&if_join);
3251 2464 : return CAST(var_result.value());
3252 : }
3253 :
3254 896 : TNode<String> CodeStubAssembler::AllocateSlicedString(RootIndex map_root_index,
3255 : TNode<Uint32T> length,
3256 : TNode<String> parent,
3257 : TNode<Smi> offset) {
3258 : DCHECK(map_root_index == RootIndex::kSlicedOneByteStringMap ||
3259 : map_root_index == RootIndex::kSlicedStringMap);
3260 1792 : Node* result = Allocate(SlicedString::kSize);
3261 : DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
3262 896 : StoreMapNoWriteBarrier(result, map_root_index);
3263 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
3264 896 : Int32Constant(String::kEmptyHashField),
3265 1792 : MachineRepresentation::kWord32);
3266 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
3267 : MachineRepresentation::kWord32);
3268 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
3269 : MachineRepresentation::kTagged);
3270 : StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
3271 : MachineRepresentation::kTagged);
3272 896 : return CAST(result);
3273 : }
3274 :
3275 0 : TNode<String> CodeStubAssembler::AllocateSlicedOneByteString(
3276 : TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3277 : return AllocateSlicedString(RootIndex::kSlicedOneByteStringMap, length,
3278 448 : parent, offset);
3279 : }
3280 :
3281 0 : TNode<String> CodeStubAssembler::AllocateSlicedTwoByteString(
3282 : TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3283 : return AllocateSlicedString(RootIndex::kSlicedStringMap, length, parent,
3284 448 : offset);
3285 : }
3286 :
3287 112 : TNode<String> CodeStubAssembler::AllocateConsString(RootIndex map_root_index,
3288 : TNode<Uint32T> length,
3289 : TNode<String> first,
3290 : TNode<String> second,
3291 : AllocationFlags flags) {
3292 : DCHECK(map_root_index == RootIndex::kConsOneByteStringMap ||
3293 : map_root_index == RootIndex::kConsStringMap);
3294 224 : Node* result = Allocate(ConsString::kSize, flags);
3295 : DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
3296 112 : StoreMapNoWriteBarrier(result, map_root_index);
3297 : StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
3298 : MachineRepresentation::kWord32);
3299 : StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset,
3300 : Int32Constant(String::kEmptyHashField),
3301 224 : MachineRepresentation::kWord32);
3302 : bool const new_space = !(flags & kPretenured);
3303 112 : if (new_space) {
3304 : StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
3305 : MachineRepresentation::kTagged);
3306 : StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
3307 : MachineRepresentation::kTagged);
3308 : } else {
3309 : StoreObjectField(result, ConsString::kFirstOffset, first);
3310 : StoreObjectField(result, ConsString::kSecondOffset, second);
3311 : }
3312 112 : return CAST(result);
3313 : }
3314 :
3315 0 : TNode<String> CodeStubAssembler::AllocateOneByteConsString(
3316 : TNode<Uint32T> length, TNode<String> first, TNode<String> second,
3317 : AllocationFlags flags) {
3318 : return AllocateConsString(RootIndex::kConsOneByteStringMap, length, first,
3319 56 : second, flags);
3320 : }
3321 :
3322 0 : TNode<String> CodeStubAssembler::AllocateTwoByteConsString(
3323 : TNode<Uint32T> length, TNode<String> first, TNode<String> second,
3324 : AllocationFlags flags) {
3325 : return AllocateConsString(RootIndex::kConsStringMap, length, first, second,
3326 56 : flags);
3327 : }
3328 :
3329 56 : TNode<String> CodeStubAssembler::NewConsString(TNode<Uint32T> length,
3330 : TNode<String> left,
3331 : TNode<String> right,
3332 : AllocationFlags flags) {
3333 : // Added string can be a cons string.
3334 56 : Comment("Allocating ConsString");
3335 112 : Node* left_instance_type = LoadInstanceType(left);
3336 112 : Node* right_instance_type = LoadInstanceType(right);
3337 :
3338 : // Compute intersection and difference of instance types.
3339 : Node* anded_instance_types =
3340 112 : Word32And(left_instance_type, right_instance_type);
3341 : Node* xored_instance_types =
3342 112 : Word32Xor(left_instance_type, right_instance_type);
3343 :
3344 : // We create a one-byte cons string if
3345 : // 1. both strings are one-byte, or
3346 : // 2. at least one of the strings is two-byte, but happens to contain only
3347 : // one-byte characters.
3348 : // To do this, we check
3349 : // 1. if both strings are one-byte, or if the one-byte data hint is set in
3350 : // both strings, or
3351 : // 2. if one of the strings has the one-byte data hint set and the other
3352 : // string is one-byte.
3353 : STATIC_ASSERT(kOneByteStringTag != 0);
3354 : STATIC_ASSERT(kOneByteDataHintTag != 0);
3355 : Label one_byte_map(this);
3356 56 : Label two_byte_map(this);
3357 : TVARIABLE(String, result);
3358 56 : Label done(this, &result);
3359 : GotoIf(IsSetWord32(anded_instance_types,
3360 56 : kStringEncodingMask | kOneByteDataHintTag),
3361 112 : &one_byte_map);
3362 : Branch(Word32NotEqual(Word32And(xored_instance_types,
3363 : Int32Constant(kStringEncodingMask |
3364 112 : kOneByteDataHintMask)),
3365 224 : Int32Constant(kOneByteStringTag | kOneByteDataHintTag)),
3366 112 : &two_byte_map, &one_byte_map);
3367 :
3368 : BIND(&one_byte_map);
3369 56 : Comment("One-byte ConsString");
3370 : result = AllocateOneByteConsString(length, left, right, flags);
3371 56 : Goto(&done);
3372 :
3373 : BIND(&two_byte_map);
3374 56 : Comment("Two-byte ConsString");
3375 : result = AllocateTwoByteConsString(length, left, right, flags);
3376 56 : Goto(&done);
3377 :
3378 : BIND(&done);
3379 :
3380 56 : return result.value();
3381 : }
3382 :
3383 896 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3384 : int at_least_space_for) {
3385 896 : return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
3386 : }
3387 :
3388 901 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3389 : TNode<IntPtrT> at_least_space_for) {
3390 : CSA_ASSERT(this, UintPtrLessThanOrEqual(
3391 : at_least_space_for,
3392 : IntPtrConstant(NameDictionary::kMaxCapacity)));
3393 901 : TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
3394 901 : return AllocateNameDictionaryWithCapacity(capacity);
3395 : }
3396 :
3397 1125 : TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
3398 : TNode<IntPtrT> capacity) {
3399 : CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3400 : CSA_ASSERT(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
3401 : TNode<IntPtrT> length = EntryToIndex<NameDictionary>(capacity);
3402 : TNode<IntPtrT> store_size = IntPtrAdd(
3403 1125 : TimesTaggedSize(length), IntPtrConstant(NameDictionary::kHeaderSize));
3404 :
3405 : TNode<NameDictionary> result =
3406 : UncheckedCast<NameDictionary>(AllocateInNewSpace(store_size));
3407 1125 : Comment("Initialize NameDictionary");
3408 : // Initialize FixedArray fields.
3409 : DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kNameDictionaryMap));
3410 1125 : StoreMapNoWriteBarrier(result, RootIndex::kNameDictionaryMap);
3411 : StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
3412 : SmiFromIntPtr(length));
3413 : // Initialized HashTable fields.
3414 1125 : TNode<Smi> zero = SmiConstant(0);
3415 : StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
3416 1125 : SKIP_WRITE_BARRIER);
3417 : StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
3418 1125 : zero, SKIP_WRITE_BARRIER);
3419 : StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
3420 2250 : SmiTag(capacity), SKIP_WRITE_BARRIER);
3421 : // Initialize Dictionary fields.
3422 : TNode<HeapObject> filler = UndefinedConstant();
3423 : StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
3424 : SmiConstant(PropertyDetails::kInitialIndex),
3425 2250 : SKIP_WRITE_BARRIER);
3426 : StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
3427 : SmiConstant(PropertyArray::kNoHashSentinel),
3428 2250 : SKIP_WRITE_BARRIER);
3429 :
3430 : // Initialize NameDictionary elements.
3431 2250 : TNode<WordT> result_word = BitcastTaggedToWord(result);
3432 : TNode<WordT> start_address = IntPtrAdd(
3433 : result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
3434 : NameDictionary::kElementsStartIndex) -
3435 2250 : kHeapObjectTag));
3436 : TNode<WordT> end_address = IntPtrAdd(
3437 2250 : result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
3438 1125 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3439 1125 : return result;
3440 : }
3441 :
3442 224 : TNode<NameDictionary> CodeStubAssembler::CopyNameDictionary(
3443 : TNode<NameDictionary> dictionary, Label* large_object_fallback) {
3444 224 : Comment("Copy boilerplate property dict");
3445 448 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NameDictionary>(dictionary));
3446 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(capacity, IntPtrConstant(0)));
3447 : GotoIf(UintPtrGreaterThan(
3448 448 : capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
3449 448 : large_object_fallback);
3450 : TNode<NameDictionary> properties =
3451 224 : AllocateNameDictionaryWithCapacity(capacity);
3452 448 : TNode<IntPtrT> length = SmiUntag(LoadFixedArrayBaseLength(dictionary));
3453 : CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
3454 224 : SKIP_WRITE_BARRIER, INTPTR_PARAMETERS);
3455 224 : return properties;
3456 : }
3457 :
3458 : template <typename CollectionType>
3459 112 : Node* CodeStubAssembler::AllocateOrderedHashTable() {
3460 : static const int kCapacity = CollectionType::kMinCapacity;
3461 : static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
3462 : static const int kDataTableLength = kCapacity * CollectionType::kEntrySize;
3463 : static const int kFixedArrayLength =
3464 : CollectionType::HashTableStartIndex() + kBucketCount + kDataTableLength;
3465 : static const int kDataTableStartIndex =
3466 : CollectionType::HashTableStartIndex() + kBucketCount;
3467 :
3468 : STATIC_ASSERT(base::bits::IsPowerOfTwo(kCapacity));
3469 : STATIC_ASSERT(kCapacity <= CollectionType::MaxCapacity());
3470 :
3471 : // Allocate the table and add the proper map.
3472 : const ElementsKind elements_kind = HOLEY_ELEMENTS;
3473 112 : TNode<IntPtrT> length_intptr = IntPtrConstant(kFixedArrayLength);
3474 : TNode<Map> fixed_array_map =
3475 112 : CAST(LoadRoot(CollectionType::GetMapRootIndex()));
3476 : TNode<FixedArray> table =
3477 : CAST(AllocateFixedArray(elements_kind, length_intptr,
3478 : kAllowLargeObjectAllocation, fixed_array_map));
3479 :
3480 : // Initialize the OrderedHashTable fields.
3481 : const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
3482 224 : StoreFixedArrayElement(table, CollectionType::NumberOfElementsIndex(),
3483 : SmiConstant(0), barrier_mode);
3484 224 : StoreFixedArrayElement(table, CollectionType::NumberOfDeletedElementsIndex(),
3485 : SmiConstant(0), barrier_mode);
3486 224 : StoreFixedArrayElement(table, CollectionType::NumberOfBucketsIndex(),
3487 : SmiConstant(kBucketCount), barrier_mode);
3488 :
3489 : // Fill the buckets with kNotFound.
3490 112 : TNode<Smi> not_found = SmiConstant(CollectionType::kNotFound);
3491 : STATIC_ASSERT(CollectionType::HashTableStartIndex() ==
3492 : CollectionType::NumberOfBucketsIndex() + 1);
3493 : STATIC_ASSERT((CollectionType::HashTableStartIndex() + kBucketCount) ==
3494 : kDataTableStartIndex);
3495 336 : for (int i = 0; i < kBucketCount; i++) {
3496 224 : StoreFixedArrayElement(table, CollectionType::HashTableStartIndex() + i,
3497 : not_found, barrier_mode);
3498 : }
3499 :
3500 : // Fill the data table with undefined.
3501 : STATIC_ASSERT(kDataTableStartIndex + kDataTableLength == kFixedArrayLength);
3502 1120 : for (int i = 0; i < kDataTableLength; i++) {
3503 1120 : StoreFixedArrayElement(table, kDataTableStartIndex + i, UndefinedConstant(),
3504 2240 : barrier_mode);
3505 : }
3506 :
3507 112 : return table;
3508 : }
3509 :
3510 : template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashMap>();
3511 : template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashSet>();
3512 :
3513 : template <typename CollectionType>
3514 10 : TNode<CollectionType> CodeStubAssembler::AllocateSmallOrderedHashTable(
3515 : TNode<IntPtrT> capacity) {
3516 : CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3517 : CSA_ASSERT(this, IntPtrLessThan(
3518 : capacity, IntPtrConstant(CollectionType::kMaxCapacity)));
3519 :
3520 : TNode<IntPtrT> data_table_start_offset =
3521 10 : IntPtrConstant(CollectionType::DataTableStartOffset());
3522 :
3523 : TNode<IntPtrT> data_table_size = IntPtrMul(
3524 10 : capacity, IntPtrConstant(CollectionType::kEntrySize * kTaggedSize));
3525 :
3526 : TNode<Int32T> hash_table_size =
3527 10 : Int32Div(TruncateIntPtrToInt32(capacity),
3528 40 : Int32Constant(CollectionType::kLoadFactor));
3529 :
3530 : TNode<IntPtrT> hash_table_start_offset =
3531 : IntPtrAdd(data_table_start_offset, data_table_size);
3532 :
3533 : TNode<IntPtrT> hash_table_and_chain_table_size =
3534 20 : IntPtrAdd(ChangeInt32ToIntPtr(hash_table_size), capacity);
3535 :
3536 : TNode<IntPtrT> total_size =
3537 : IntPtrAdd(hash_table_start_offset, hash_table_and_chain_table_size);
3538 :
3539 : TNode<IntPtrT> total_size_word_aligned =
3540 10 : IntPtrAdd(total_size, IntPtrConstant(kTaggedSize - 1));
3541 20 : total_size_word_aligned = ChangeInt32ToIntPtr(
3542 10 : Int32Div(TruncateIntPtrToInt32(total_size_word_aligned),
3543 40 : Int32Constant(kTaggedSize)));
3544 : total_size_word_aligned =
3545 : UncheckedCast<IntPtrT>(TimesTaggedSize(total_size_word_aligned));
3546 :
3547 : // Allocate the table and add the proper map.
3548 : TNode<Map> small_ordered_hash_map =
3549 10 : CAST(LoadRoot(CollectionType::GetMapRootIndex()));
3550 : TNode<Object> table_obj = AllocateInNewSpace(total_size_word_aligned);
3551 : StoreMapNoWriteBarrier(table_obj, small_ordered_hash_map);
3552 : TNode<CollectionType> table = UncheckedCast<CollectionType>(table_obj);
3553 :
3554 : // Initialize the SmallOrderedHashTable fields.
3555 30 : StoreObjectByteNoWriteBarrier(
3556 : table, CollectionType::NumberOfBucketsOffset(),
3557 : Word32And(Int32Constant(0xFF), hash_table_size));
3558 20 : StoreObjectByteNoWriteBarrier(table, CollectionType::NumberOfElementsOffset(),
3559 : Int32Constant(0));
3560 20 : StoreObjectByteNoWriteBarrier(
3561 : table, CollectionType::NumberOfDeletedElementsOffset(), Int32Constant(0));
3562 :
3563 : TNode<IntPtrT> table_address =
3564 30 : IntPtrSub(BitcastTaggedToWord(table), IntPtrConstant(kHeapObjectTag));
3565 : TNode<IntPtrT> hash_table_start_address =
3566 : IntPtrAdd(table_address, hash_table_start_offset);
3567 :
3568 : // Initialize the HashTable part.
3569 20 : Node* memset = ExternalConstant(ExternalReference::libc_memset_function());
3570 10 : CallCFunction3(MachineType::AnyTagged(), MachineType::Pointer(),
3571 : MachineType::IntPtr(), MachineType::UintPtr(), memset,
3572 : hash_table_start_address, IntPtrConstant(0xFF),
3573 20 : hash_table_and_chain_table_size);
3574 :
3575 : // Initialize the DataTable part.
3576 : TNode<HeapObject> filler = TheHoleConstant();
3577 : TNode<WordT> data_table_start_address =
3578 : IntPtrAdd(table_address, data_table_start_offset);
3579 : TNode<WordT> data_table_end_address =
3580 10 : IntPtrAdd(data_table_start_address, data_table_size);
3581 10 : StoreFieldsNoWriteBarrier(data_table_start_address, data_table_end_address,
3582 : filler);
3583 :
3584 10 : return table;
3585 : }
3586 :
3587 : template TNode<SmallOrderedHashMap>
3588 : CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashMap>(
3589 : TNode<IntPtrT> capacity);
3590 : template TNode<SmallOrderedHashSet>
3591 : CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashSet>(
3592 : TNode<IntPtrT> capacity);
3593 :
3594 : template <typename CollectionType>
3595 1680 : void CodeStubAssembler::FindOrderedHashTableEntry(
3596 : Node* table, Node* hash,
3597 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3598 : Variable* entry_start_position, Label* entry_found, Label* not_found) {
3599 : // Get the index of the bucket.
3600 : Node* const number_of_buckets = SmiUntag(CAST(LoadFixedArrayElement(
3601 3360 : CAST(table), CollectionType::NumberOfBucketsIndex())));
3602 : Node* const bucket =
3603 6720 : WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1)));
3604 : Node* const first_entry = SmiUntag(CAST(LoadFixedArrayElement(
3605 : CAST(table), bucket,
3606 3360 : CollectionType::HashTableStartIndex() * kTaggedSize)));
3607 :
3608 : // Walk the bucket chain.
3609 : Node* entry_start;
3610 : Label if_key_found(this);
3611 : {
3612 1680 : VARIABLE(var_entry, MachineType::PointerRepresentation(), first_entry);
3613 5040 : Label loop(this, {&var_entry, entry_start_position}),
3614 1680 : continue_next_entry(this);
3615 1680 : Goto(&loop);
3616 : BIND(&loop);
3617 :
3618 : // If the entry index is the not-found sentinel, we are done.
3619 6720 : GotoIf(
3620 : WordEqual(var_entry.value(), IntPtrConstant(CollectionType::kNotFound)),
3621 : not_found);
3622 :
3623 : // Make sure the entry index is within range.
3624 : CSA_ASSERT(
3625 : this,
3626 : UintPtrLessThan(
3627 : var_entry.value(),
3628 : SmiUntag(SmiAdd(
3629 : CAST(LoadFixedArrayElement(
3630 : CAST(table), CollectionType::NumberOfElementsIndex())),
3631 : CAST(LoadFixedArrayElement(
3632 : CAST(table),
3633 : CollectionType::NumberOfDeletedElementsIndex()))))));
3634 :
3635 : // Compute the index of the entry relative to kHashTableStartIndex.
3636 8400 : entry_start =
3637 : IntPtrAdd(IntPtrMul(var_entry.value(),
3638 : IntPtrConstant(CollectionType::kEntrySize)),
3639 : number_of_buckets);
3640 :
3641 : // Load the key from the entry.
3642 : Node* const candidate_key = LoadFixedArrayElement(
3643 : CAST(table), entry_start,
3644 : CollectionType::HashTableStartIndex() * kTaggedSize);
3645 :
3646 1680 : key_compare(candidate_key, &if_key_found, &continue_next_entry);
3647 :
3648 : BIND(&continue_next_entry);
3649 : // Load the index of the next entry in the bucket chain.
3650 3360 : var_entry.Bind(SmiUntag(CAST(LoadFixedArrayElement(
3651 : CAST(table), entry_start,
3652 : (CollectionType::HashTableStartIndex() + CollectionType::kChainOffset) *
3653 : kTaggedSize))));
3654 :
3655 3360 : Goto(&loop);
3656 : }
3657 :
3658 : BIND(&if_key_found);
3659 1680 : entry_start_position->Bind(entry_start);
3660 1680 : Goto(entry_found);
3661 1680 : }
3662 :
3663 : template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashMap>(
3664 : Node* table, Node* hash,
3665 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3666 : Variable* entry_start_position, Label* entry_found, Label* not_found);
3667 : template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashSet>(
3668 : Node* table, Node* hash,
3669 : const std::function<void(Node*, Label*, Label*)>& key_compare,
3670 : Variable* entry_start_position, Label* entry_found, Label* not_found);
3671 :
3672 10 : Node* CodeStubAssembler::AllocateStruct(Node* map, AllocationFlags flags) {
3673 10 : Comment("AllocateStruct");
3674 : CSA_ASSERT(this, IsMap(map));
3675 20 : TNode<IntPtrT> size = TimesTaggedSize(LoadMapInstanceSizeInWords(map));
3676 20 : TNode<Object> object = Allocate(size, flags);
3677 : StoreMapNoWriteBarrier(object, map);
3678 10 : InitializeStructBody(object, map, size, Struct::kHeaderSize);
3679 10 : return object;
3680 : }
3681 :
3682 10 : void CodeStubAssembler::InitializeStructBody(Node* object, Node* map,
3683 : Node* size, int start_offset) {
3684 : CSA_SLOW_ASSERT(this, IsMap(map));
3685 10 : Comment("InitializeStructBody");
3686 : Node* filler = UndefinedConstant();
3687 : // Calculate the untagged field addresses.
3688 20 : object = BitcastTaggedToWord(object);
3689 : Node* start_address =
3690 30 : IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
3691 : Node* end_address =
3692 40 : IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
3693 10 : StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3694 10 : }
3695 :
3696 2525 : Node* CodeStubAssembler::AllocateJSObjectFromMap(
3697 : Node* map, Node* properties, Node* elements, AllocationFlags flags,
3698 : SlackTrackingMode slack_tracking_mode) {
3699 : CSA_ASSERT(this, IsMap(map));
3700 : CSA_ASSERT(this, Word32BinaryNot(IsJSFunctionMap(map)));
3701 : CSA_ASSERT(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),
3702 : JS_GLOBAL_OBJECT_TYPE)));
3703 : TNode<IntPtrT> instance_size =
3704 5050 : TimesTaggedSize(LoadMapInstanceSizeInWords(map));
3705 : TNode<Object> object = AllocateInNewSpace(instance_size, flags);
3706 : StoreMapNoWriteBarrier(object, map);
3707 : InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
3708 2525 : slack_tracking_mode);
3709 2525 : return object;
3710 : }
3711 :
3712 2525 : void CodeStubAssembler::InitializeJSObjectFromMap(
3713 : Node* object, Node* map, Node* instance_size, Node* properties,
3714 : Node* elements, SlackTrackingMode slack_tracking_mode) {
3715 : CSA_SLOW_ASSERT(this, IsMap(map));
3716 : // This helper assumes that the object is in new-space, as guarded by the
3717 : // check in AllocatedJSObjectFromMap.
3718 2525 : if (properties == nullptr) {
3719 : CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
3720 : StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
3721 1456 : RootIndex::kEmptyFixedArray);
3722 : } else {
3723 : CSA_ASSERT(this, Word32Or(Word32Or(IsPropertyArray(properties),
3724 : IsNameDictionary(properties)),
3725 : IsEmptyFixedArray(properties)));
3726 : StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
3727 : properties);
3728 : }
3729 2525 : if (elements == nullptr) {
3730 : StoreObjectFieldRoot(object, JSObject::kElementsOffset,
3731 2352 : RootIndex::kEmptyFixedArray);
3732 : } else {
3733 : CSA_ASSERT(this, IsFixedArray(elements));
3734 : StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
3735 : }
3736 2525 : if (slack_tracking_mode == kNoSlackTracking) {
3737 2021 : InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3738 : } else {
3739 : DCHECK_EQ(slack_tracking_mode, kWithSlackTracking);
3740 504 : InitializeJSObjectBodyWithSlackTracking(object, map, instance_size);
3741 : }
3742 2525 : }
3743 :
3744 2581 : void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
3745 : Node* object, Node* map, Node* instance_size, int start_offset) {
3746 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3747 : CSA_ASSERT(
3748 : this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
3749 2581 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
3750 5162 : RootIndex::kUndefinedValue);
3751 2581 : }
3752 :
3753 504 : void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
3754 : Node* object, Node* map, Node* instance_size) {
3755 : CSA_SLOW_ASSERT(this, IsMap(map));
3756 504 : Comment("InitializeJSObjectBodyNoSlackTracking");
3757 :
3758 : // Perform in-object slack tracking if requested.
3759 : int start_offset = JSObject::kHeaderSize;
3760 1008 : Node* bit_field3 = LoadMapBitField3(map);
3761 504 : Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
3762 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3763 : GotoIf(IsSetWord32<Map::ConstructionCounterBits>(bit_field3),
3764 504 : &slack_tracking);
3765 504 : Comment("No slack tracking");
3766 504 : InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3767 504 : Goto(&end);
3768 :
3769 : BIND(&slack_tracking);
3770 : {
3771 504 : Comment("Decrease construction counter");
3772 : // Slack tracking is only done on initial maps.
3773 : CSA_ASSERT(this, IsUndefined(LoadMapBackPointer(map)));
3774 : STATIC_ASSERT(Map::ConstructionCounterBits::kNext == 32);
3775 : Node* new_bit_field3 = Int32Sub(
3776 1512 : bit_field3, Int32Constant(1 << Map::ConstructionCounterBits::kShift));
3777 : StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3,
3778 : MachineRepresentation::kWord32);
3779 : STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3780 :
3781 : // The object still has in-object slack therefore the |unsed_or_unused|
3782 : // field contain the "used" value.
3783 : Node* used_size = TimesTaggedSize(ChangeUint32ToWord(
3784 : LoadObjectField(map, Map::kUsedOrUnusedInstanceSizeInWordsOffset,
3785 1512 : MachineType::Uint8())));
3786 :
3787 504 : Comment("iInitialize filler fields");
3788 : InitializeFieldsWithRoot(object, used_size, instance_size,
3789 504 : RootIndex::kOnePointerFillerMap);
3790 :
3791 504 : Comment("Initialize undefined fields");
3792 : InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
3793 1008 : RootIndex::kUndefinedValue);
3794 :
3795 : STATIC_ASSERT(Map::kNoSlackTracking == 0);
3796 : GotoIf(IsClearWord32<Map::ConstructionCounterBits>(new_bit_field3),
3797 504 : &complete);
3798 504 : Goto(&end);
3799 : }
3800 :
3801 : // Finalize the instance size.
3802 : BIND(&complete);
3803 : {
3804 : // ComplextInobjectSlackTracking doesn't allocate and thus doesn't need a
3805 : // context.
3806 : CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
3807 : NoContextConstant(), map);
3808 504 : Goto(&end);
3809 : }
3810 :
3811 504 : BIND(&end);
3812 504 : }
3813 :
3814 1145 : void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
3815 : Node* end_address,
3816 : Node* value) {
3817 1145 : Comment("StoreFieldsNoWriteBarrier");
3818 : CSA_ASSERT(this, WordIsAligned(start_address, kTaggedSize));
3819 : CSA_ASSERT(this, WordIsAligned(end_address, kTaggedSize));
3820 : BuildFastLoop(
3821 : start_address, end_address,
3822 : [this, value](Node* current) {
3823 1145 : StoreNoWriteBarrier(MachineRepresentation::kTagged, current, value);
3824 : },
3825 2290 : kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
3826 1145 : }
3827 :
3828 280 : TNode<BoolT> CodeStubAssembler::IsValidFastJSArrayCapacity(
3829 : Node* capacity, ParameterMode capacity_mode) {
3830 : return UncheckedCast<BoolT>(
3831 : UintPtrLessThanOrEqual(ParameterToIntPtr(capacity, capacity_mode),
3832 560 : IntPtrConstant(JSArray::kMaxFastArrayLength)));
3833 : }
3834 :
3835 3024 : TNode<JSArray> CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
3836 : TNode<Map> array_map, TNode<Smi> length, Node* allocation_site) {
3837 3024 : Comment("begin allocation of JSArray without elements");
3838 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3839 :
3840 : int base_size = JSArray::kSize;
3841 3024 : if (allocation_site != nullptr) {
3842 : base_size += AllocationMemento::kSize;
3843 : }
3844 :
3845 3024 : TNode<IntPtrT> size = IntPtrConstant(base_size);
3846 3024 : return AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3847 : }
3848 :
3849 : std::pair<TNode<JSArray>, TNode<FixedArrayBase>>
3850 1792 : CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
3851 : ElementsKind kind, TNode<Map> array_map, TNode<Smi> length,
3852 : Node* allocation_site, Node* capacity, ParameterMode capacity_mode,
3853 : AllocationFlags allocation_flags) {
3854 1792 : Comment("begin allocation of JSArray with elements");
3855 3584 : CHECK_EQ(allocation_flags & ~kAllowLargeObjectAllocation, 0);
3856 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3857 :
3858 : int base_size = JSArray::kSize;
3859 1792 : if (allocation_site != nullptr) base_size += AllocationMemento::kSize;
3860 :
3861 : const int elements_offset = base_size;
3862 :
3863 : // Compute space for elements
3864 1792 : base_size += FixedArray::kHeaderSize;
3865 : TNode<IntPtrT> size =
3866 1792 : ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
3867 :
3868 : TVARIABLE(JSArray, array);
3869 : TVARIABLE(FixedArrayBase, elements);
3870 :
3871 1792 : Label out(this);
3872 :
3873 : // For very large arrays in which the requested allocation exceeds the
3874 : // maximal size of a regular heap object, we cannot use the allocation
3875 : // folding trick. Instead, we first allocate the elements in large object
3876 : // space, and then allocate the JSArray (and possibly the allocation memento)
3877 : // in new space.
3878 1792 : if (allocation_flags & kAllowLargeObjectAllocation) {
3879 : Label next(this);
3880 448 : GotoIf(IsRegularHeapObjectSize(size), &next);
3881 :
3882 224 : CSA_CHECK(this, IsValidFastJSArrayCapacity(capacity, capacity_mode));
3883 :
3884 : // Allocate and initialize the elements first. Full initialization is needed
3885 : // because the upcoming JSArray allocation could trigger GC.
3886 448 : elements =
3887 : AllocateFixedArray(kind, capacity, capacity_mode, allocation_flags);
3888 :
3889 224 : if (IsDoubleElementsKind(kind)) {
3890 : FillFixedDoubleArrayWithZero(CAST(elements.value()),
3891 0 : ParameterToIntPtr(capacity, capacity_mode));
3892 : } else {
3893 : FillFixedArrayWithSmiZero(CAST(elements.value()),
3894 224 : ParameterToIntPtr(capacity, capacity_mode));
3895 : }
3896 :
3897 : // The JSArray and possibly allocation memento next. Note that
3898 : // allocation_flags are *not* passed on here and the resulting JSArray will
3899 : // always be in new space.
3900 224 : array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
3901 : allocation_site);
3902 : StoreObjectFieldNoWriteBarrier(array.value(), JSObject::kElementsOffset,
3903 : elements.value());
3904 :
3905 224 : Goto(&out);
3906 :
3907 224 : BIND(&next);
3908 : }
3909 :
3910 : // Fold all objects into a single new space allocation.
3911 1792 : array =
3912 : AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3913 3584 : elements = UncheckedCast<FixedArrayBase>(
3914 : InnerAllocate(array.value(), elements_offset));
3915 :
3916 : StoreObjectFieldNoWriteBarrier(array.value(), JSObject::kElementsOffset,
3917 : elements.value());
3918 :
3919 : // Setup elements object.
3920 : STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kTaggedSize);
3921 : RootIndex elements_map_index = IsDoubleElementsKind(kind)
3922 : ? RootIndex::kFixedDoubleArrayMap
3923 1792 : : RootIndex::kFixedArrayMap;
3924 : DCHECK(RootsTable::IsImmortalImmovable(elements_map_index));
3925 1792 : StoreMapNoWriteBarrier(elements.value(), elements_map_index);
3926 :
3927 : TNode<Smi> capacity_smi = ParameterToTagged(capacity, capacity_mode);
3928 : CSA_ASSERT(this, SmiGreaterThan(capacity_smi, SmiConstant(0)));
3929 : StoreObjectFieldNoWriteBarrier(elements.value(), FixedArray::kLengthOffset,
3930 : capacity_smi);
3931 1792 : Goto(&out);
3932 :
3933 : BIND(&out);
3934 3584 : return {array.value(), elements.value()};
3935 : }
3936 :
3937 4816 : TNode<JSArray> CodeStubAssembler::AllocateUninitializedJSArray(
3938 : TNode<Map> array_map, TNode<Smi> length, Node* allocation_site,
3939 : TNode<IntPtrT> size_in_bytes) {
3940 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3941 :
3942 : // Allocate space for the JSArray and the elements FixedArray in one go.
3943 : TNode<Object> array = AllocateInNewSpace(size_in_bytes);
3944 :
3945 : StoreMapNoWriteBarrier(array, array_map);
3946 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3947 : StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
3948 4816 : RootIndex::kEmptyFixedArray);
3949 :
3950 4816 : if (allocation_site != nullptr) {
3951 : InitializeAllocationMemento(array, IntPtrConstant(JSArray::kSize),
3952 1456 : allocation_site);
3953 : }
3954 :
3955 4816 : return CAST(array);
3956 : }
3957 :
3958 2408 : TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3959 : ElementsKind kind, TNode<Map> array_map, Node* capacity, TNode<Smi> length,
3960 : Node* allocation_site, ParameterMode capacity_mode,
3961 : AllocationFlags allocation_flags) {
3962 : CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3963 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
3964 :
3965 : TNode<JSArray> array;
3966 : TNode<FixedArrayBase> elements;
3967 : int capacity_as_constant;
3968 :
3969 2408 : if (IsIntPtrOrSmiConstantZero(capacity, capacity_mode)) {
3970 : // Array is empty. Use the shared empty fixed array instead of allocating a
3971 : // new one.
3972 784 : array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
3973 : allocation_site);
3974 : StoreObjectFieldRoot(array, JSArray::kElementsOffset,
3975 784 : RootIndex::kEmptyFixedArray);
3976 1624 : } else if (TryGetIntPtrOrSmiConstantValue(capacity, &capacity_as_constant,
3977 : capacity_mode)) {
3978 672 : CHECK_GT(capacity_as_constant, 0);
3979 : // Allocate both array and elements object, and initialize the JSArray.
3980 1344 : std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
3981 : kind, array_map, length, allocation_site, capacity, capacity_mode,
3982 672 : allocation_flags);
3983 : // Fill in the elements with holes.
3984 : FillFixedArrayWithValue(kind, elements,
3985 : IntPtrOrSmiConstant(0, capacity_mode), capacity,
3986 672 : RootIndex::kTheHoleValue, capacity_mode);
3987 : } else {
3988 1904 : Label out(this), empty(this), nonempty(this);
3989 : TVARIABLE(JSArray, var_array);
3990 :
3991 952 : Branch(SmiEqual(ParameterToTagged(capacity, capacity_mode), SmiConstant(0)),
3992 1904 : &empty, &nonempty);
3993 :
3994 : BIND(&empty);
3995 : {
3996 : // Array is empty. Use the shared empty fixed array instead of allocating
3997 : // a new one.
3998 952 : var_array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
3999 : allocation_site);
4000 : StoreObjectFieldRoot(var_array.value(), JSArray::kElementsOffset,
4001 952 : RootIndex::kEmptyFixedArray);
4002 952 : Goto(&out);
4003 : }
4004 :
4005 : BIND(&nonempty);
4006 : {
4007 : // Allocate both array and elements object, and initialize the JSArray.
4008 : TNode<JSArray> array;
4009 1904 : std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
4010 : kind, array_map, length, allocation_site, capacity, capacity_mode,
4011 952 : allocation_flags);
4012 : var_array = array;
4013 : // Fill in the elements with holes.
4014 : FillFixedArrayWithValue(kind, elements,
4015 : IntPtrOrSmiConstant(0, capacity_mode), capacity,
4016 952 : RootIndex::kTheHoleValue, capacity_mode);
4017 952 : Goto(&out);
4018 : }
4019 :
4020 : BIND(&out);
4021 952 : array = var_array.value();
4022 : }
4023 :
4024 2408 : return array;
4025 : }
4026 :
4027 56 : Node* CodeStubAssembler::ExtractFastJSArray(Node* context, Node* array,
4028 : Node* begin, Node* count,
4029 : ParameterMode mode, Node* capacity,
4030 : Node* allocation_site) {
4031 112 : Node* original_array_map = LoadMap(array);
4032 112 : Node* elements_kind = LoadMapElementsKind(original_array_map);
4033 :
4034 : // Use the cannonical map for the Array's ElementsKind
4035 : Node* native_context = LoadNativeContext(context);
4036 56 : TNode<Map> array_map = LoadJSArrayElementsMap(elements_kind, native_context);
4037 :
4038 : Node* new_elements = ExtractFixedArray(
4039 : LoadElements(array), begin, count, capacity,
4040 112 : ExtractFixedArrayFlag::kAllFixedArrays, mode, nullptr, elements_kind);
4041 :
4042 : TNode<Object> result = AllocateUninitializedJSArrayWithoutElements(
4043 112 : array_map, ParameterToTagged(count, mode), allocation_site);
4044 : StoreObjectField(result, JSObject::kElementsOffset, new_elements);
4045 56 : return result;
4046 : }
4047 :
4048 336 : Node* CodeStubAssembler::CloneFastJSArray(Node* context, Node* array,
4049 : ParameterMode mode,
4050 : Node* allocation_site,
4051 : HoleConversionMode convert_holes) {
4052 : // TODO(dhai): we should be able to assert IsFastJSArray(array) here, but this
4053 : // function is also used to copy boilerplates even when the no-elements
4054 : // protector is invalid. This function should be renamed to reflect its uses.
4055 : CSA_ASSERT(this, IsJSArray(array));
4056 :
4057 672 : Node* length = LoadJSArrayLength(array);
4058 : Node* new_elements = nullptr;
4059 336 : VARIABLE(var_new_elements, MachineRepresentation::kTagged);
4060 1008 : TVARIABLE(Int32T, var_elements_kind, LoadMapElementsKind(LoadMap(array)));
4061 :
4062 336 : Label allocate_jsarray(this), holey_extract(this);
4063 :
4064 : bool need_conversion =
4065 336 : convert_holes == HoleConversionMode::kConvertToUndefined;
4066 336 : if (need_conversion) {
4067 : // We need to take care of holes, if the array is of holey elements kind.
4068 56 : GotoIf(IsHoleyFastElementsKind(var_elements_kind.value()), &holey_extract);
4069 : }
4070 :
4071 : // Simple extraction that preserves holes.
4072 672 : new_elements =
4073 : ExtractFixedArray(LoadElements(array), IntPtrOrSmiConstant(0, mode),
4074 : TaggedToParameter(length, mode), nullptr,
4075 : ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW, mode,
4076 672 : nullptr, var_elements_kind.value());
4077 336 : var_new_elements.Bind(new_elements);
4078 336 : Goto(&allocate_jsarray);
4079 :
4080 336 : if (need_conversion) {
4081 : BIND(&holey_extract);
4082 : // Convert holes to undefined.
4083 : TVARIABLE(BoolT, var_holes_converted, Int32FalseConstant());
4084 : // Copy |array|'s elements store. The copy will be compatible with the
4085 : // original elements kind unless there are holes in the source. Any holes
4086 : // get converted to undefined, hence in that case the copy is compatible
4087 : // only with PACKED_ELEMENTS and HOLEY_ELEMENTS, and we will choose
4088 : // PACKED_ELEMENTS. Also, if we want to replace holes, we must not use
4089 : // ExtractFixedArrayFlag::kDontCopyCOW.
4090 112 : new_elements = ExtractFixedArray(
4091 : LoadElements(array), IntPtrOrSmiConstant(0, mode),
4092 : TaggedToParameter(length, mode), nullptr,
4093 112 : ExtractFixedArrayFlag::kAllFixedArrays, mode, &var_holes_converted);
4094 56 : var_new_elements.Bind(new_elements);
4095 : // If the array type didn't change, use the original elements kind.
4096 56 : GotoIfNot(var_holes_converted.value(), &allocate_jsarray);
4097 : // Otherwise use PACKED_ELEMENTS for the target's elements kind.
4098 56 : var_elements_kind = Int32Constant(PACKED_ELEMENTS);
4099 56 : Goto(&allocate_jsarray);
4100 : }
4101 :
4102 : BIND(&allocate_jsarray);
4103 : // Use the cannonical map for the chosen elements kind.
4104 : Node* native_context = LoadNativeContext(context);
4105 : TNode<Map> array_map =
4106 336 : LoadJSArrayElementsMap(var_elements_kind.value(), native_context);
4107 :
4108 : TNode<Object> result = AllocateUninitializedJSArrayWithoutElements(
4109 672 : array_map, CAST(length), allocation_site);
4110 336 : StoreObjectField(result, JSObject::kElementsOffset, var_new_elements.value());
4111 336 : return result;
4112 : }
4113 :
4114 14292 : TNode<FixedArrayBase> CodeStubAssembler::AllocateFixedArray(
4115 : ElementsKind kind, Node* capacity, ParameterMode mode,
4116 : AllocationFlags flags, SloppyTNode<Map> fixed_array_map) {
4117 14292 : Comment("AllocateFixedArray");
4118 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4119 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity,
4120 : IntPtrOrSmiConstant(0, mode), mode));
4121 :
4122 : const intptr_t kMaxLength = IsDoubleElementsKind(kind)
4123 : ? FixedDoubleArray::kMaxLength
4124 : : FixedArray::kMaxLength;
4125 : intptr_t capacity_constant;
4126 14292 : if (ToParameterConstant(capacity, &capacity_constant, mode)) {
4127 712 : CHECK_LE(capacity_constant, kMaxLength);
4128 : } else {
4129 13580 : Label if_out_of_memory(this, Label::kDeferred), next(this);
4130 : Branch(IntPtrOrSmiGreaterThan(
4131 : capacity,
4132 : IntPtrOrSmiConstant(static_cast<int>(kMaxLength), mode), mode),
4133 27160 : &if_out_of_memory, &next);
4134 :
4135 : BIND(&if_out_of_memory);
4136 : CallRuntime(Runtime::kFatalProcessOutOfMemoryInvalidArrayLength,
4137 : NoContextConstant());
4138 13580 : Unreachable();
4139 :
4140 13580 : BIND(&next);
4141 : }
4142 :
4143 14292 : TNode<IntPtrT> total_size = GetFixedArrayAllocationSize(capacity, kind, mode);
4144 :
4145 14292 : if (IsDoubleElementsKind(kind)) flags |= kDoubleAlignment;
4146 : // Allocate both array and elements object, and initialize the JSArray.
4147 28584 : Node* array = Allocate(total_size, flags);
4148 14292 : if (fixed_array_map != nullptr) {
4149 : // Conservatively only skip the write barrier if there are no allocation
4150 : // flags, this ensures that the object hasn't ended up in LOS. Note that the
4151 : // fixed array map is currently always immortal and technically wouldn't
4152 : // need the write barrier even in LOS, but it's better to not take chances
4153 : // in case this invariant changes later, since it's difficult to enforce
4154 : // locally here.
4155 6499 : if (flags == CodeStubAssembler::kNone) {
4156 : StoreMapNoWriteBarrier(array, fixed_array_map);
4157 : } else {
4158 : StoreMap(array, fixed_array_map);
4159 : }
4160 : } else {
4161 : RootIndex map_index = IsDoubleElementsKind(kind)
4162 : ? RootIndex::kFixedDoubleArrayMap
4163 7793 : : RootIndex::kFixedArrayMap;
4164 : DCHECK(RootsTable::IsImmortalImmovable(map_index));
4165 7793 : StoreMapNoWriteBarrier(array, map_index);
4166 : }
4167 : StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
4168 : ParameterToTagged(capacity, mode));
4169 14292 : return UncheckedCast<FixedArray>(array);
4170 : }
4171 :
4172 2677 : TNode<FixedArray> CodeStubAssembler::ExtractToFixedArray(
4173 : Node* source, Node* first, Node* count, Node* capacity, Node* source_map,
4174 : ElementsKind from_kind, AllocationFlags allocation_flags,
4175 : ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
4176 : HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted,
4177 : Node* source_elements_kind) {
4178 : DCHECK_NE(first, nullptr);
4179 : DCHECK_NE(count, nullptr);
4180 : DCHECK_NE(capacity, nullptr);
4181 : DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays);
4182 : CSA_ASSERT(this,
4183 : WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity));
4184 : CSA_ASSERT(this, WordEqual(source_map, LoadMap(source)));
4185 :
4186 2677 : VARIABLE(var_result, MachineRepresentation::kTagged);
4187 5354 : VARIABLE(var_target_map, MachineRepresentation::kTagged, source_map);
4188 :
4189 8031 : Label done(this, {&var_result}), is_cow(this),
4190 8031 : new_space_check(this, {&var_target_map});
4191 :
4192 : // If source_map is either FixedDoubleArrayMap, or FixedCOWArrayMap but
4193 : // we can't just use COW, use FixedArrayMap as the target map. Otherwise, use
4194 : // source_map as the target map.
4195 2677 : if (IsDoubleElementsKind(from_kind)) {
4196 : CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4197 112 : var_target_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
4198 56 : Goto(&new_space_check);
4199 : } else {
4200 : CSA_ASSERT(this, Word32BinaryNot(IsFixedDoubleArrayMap(source_map)));
4201 : Branch(WordEqual(var_target_map.value(),
4202 2621 : LoadRoot(RootIndex::kFixedCOWArrayMap)),
4203 2621 : &is_cow, &new_space_check);
4204 :
4205 : BIND(&is_cow);
4206 : {
4207 : // |source| is a COW array, so we don't actually need to allocate a new
4208 : // array unless:
4209 : // 1) |extract_flags| forces us to, or
4210 : // 2) we're asked to extract only part of the |source| (|first| != 0).
4211 2621 : if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
4212 : Branch(WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), first),
4213 636 : &new_space_check, [&] {
4214 636 : var_result.Bind(source);
4215 636 : Goto(&done);
4216 2574 : });
4217 : } else {
4218 3950 : var_target_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
4219 1975 : Goto(&new_space_check);
4220 : }
4221 : }
4222 : }
4223 :
4224 : BIND(&new_space_check);
4225 : {
4226 : bool handle_old_space = true;
4227 2677 : if (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly) {
4228 : handle_old_space = false;
4229 : CSA_ASSERT(this, Word32BinaryNot(FixedArraySizeDoesntFitInNewSpace(
4230 : count, FixedArray::kHeaderSize, parameter_mode)));
4231 : } else {
4232 : int constant_count;
4233 : handle_old_space =
4234 : !TryGetIntPtrOrSmiConstantValue(count, &constant_count,
4235 2468 : parameter_mode) ||
4236 15 : (constant_count >
4237 15 : FixedArray::GetMaxLengthForNewSpaceAllocation(PACKED_ELEMENTS));
4238 : }
4239 :
4240 : Label old_space(this, Label::kDeferred);
4241 2677 : if (handle_old_space) {
4242 : GotoIfFixedArraySizeDoesntFitInNewSpace(
4243 2438 : capacity, &old_space, FixedArray::kHeaderSize, parameter_mode);
4244 : }
4245 :
4246 2677 : Comment("Copy FixedArray new space");
4247 : // We use PACKED_ELEMENTS to tell AllocateFixedArray and
4248 : // CopyFixedArrayElements that we want a FixedArray.
4249 : const ElementsKind to_kind = PACKED_ELEMENTS;
4250 : TNode<FixedArrayBase> to_elements =
4251 : AllocateFixedArray(to_kind, capacity, parameter_mode,
4252 5354 : AllocationFlag::kNone, var_target_map.value());
4253 2677 : var_result.Bind(to_elements);
4254 :
4255 2677 : if (convert_holes == HoleConversionMode::kDontConvert &&
4256 : !IsDoubleElementsKind(from_kind)) {
4257 : // We can use CopyElements (memcpy) because we don't need to replace or
4258 : // convert any values. Since {to_elements} is in new-space, CopyElements
4259 : // will efficiently use memcpy.
4260 : FillFixedArrayWithValue(to_kind, to_elements, count, capacity,
4261 2565 : RootIndex::kTheHoleValue, parameter_mode);
4262 2565 : CopyElements(to_kind, to_elements, IntPtrConstant(0), CAST(source),
4263 : ParameterToIntPtr(first, parameter_mode),
4264 : ParameterToIntPtr(count, parameter_mode),
4265 2565 : SKIP_WRITE_BARRIER);
4266 : } else {
4267 : CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4268 : count, capacity, SKIP_WRITE_BARRIER,
4269 : parameter_mode, convert_holes,
4270 112 : var_holes_converted);
4271 : }
4272 2677 : Goto(&done);
4273 :
4274 2677 : if (handle_old_space) {
4275 : BIND(&old_space);
4276 : {
4277 2438 : Comment("Copy FixedArray old space");
4278 : Label copy_one_by_one(this);
4279 :
4280 : // Try to use memcpy if we don't need to convert holes to undefined.
4281 4876 : if (convert_holes == HoleConversionMode::kDontConvert &&
4282 2438 : source_elements_kind != nullptr) {
4283 : // Only try memcpy if we're not copying object pointers.
4284 : GotoIfNot(IsFastSmiElementsKind(source_elements_kind),
4285 784 : ©_one_by_one);
4286 :
4287 : const ElementsKind to_smi_kind = PACKED_SMI_ELEMENTS;
4288 784 : to_elements =
4289 : AllocateFixedArray(to_smi_kind, capacity, parameter_mode,
4290 : allocation_flags, var_target_map.value());
4291 392 : var_result.Bind(to_elements);
4292 :
4293 : FillFixedArrayWithValue(to_smi_kind, to_elements, count, capacity,
4294 392 : RootIndex::kTheHoleValue, parameter_mode);
4295 : // CopyElements will try to use memcpy if it's not conflicting with
4296 : // GC. Otherwise it will copy elements by elements, but skip write
4297 : // barriers (since we're copying smis to smis).
4298 : CopyElements(to_smi_kind, to_elements, IntPtrConstant(0),
4299 392 : CAST(source), ParameterToIntPtr(first, parameter_mode),
4300 : ParameterToIntPtr(count, parameter_mode),
4301 392 : SKIP_WRITE_BARRIER);
4302 392 : Goto(&done);
4303 : } else {
4304 2046 : Goto(©_one_by_one);
4305 : }
4306 :
4307 : BIND(©_one_by_one);
4308 : {
4309 4876 : to_elements =
4310 : AllocateFixedArray(to_kind, capacity, parameter_mode,
4311 : allocation_flags, var_target_map.value());
4312 2438 : var_result.Bind(to_elements);
4313 : CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4314 : count, capacity, UPDATE_WRITE_BARRIER,
4315 : parameter_mode, convert_holes,
4316 2438 : var_holes_converted);
4317 2438 : Goto(&done);
4318 2438 : }
4319 : }
4320 2677 : }
4321 : }
4322 :
4323 : BIND(&done);
4324 5354 : return UncheckedCast<FixedArray>(var_result.value());
4325 : }
4326 :
4327 56 : TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedDoubleArrayFillingHoles(
4328 : Node* from_array, Node* first, Node* count, Node* capacity,
4329 : Node* fixed_array_map, TVariable<BoolT>* var_holes_converted,
4330 : AllocationFlags allocation_flags, ExtractFixedArrayFlags extract_flags,
4331 : ParameterMode mode) {
4332 : DCHECK_NE(first, nullptr);
4333 : DCHECK_NE(count, nullptr);
4334 : DCHECK_NE(capacity, nullptr);
4335 : DCHECK_NE(var_holes_converted, nullptr);
4336 : CSA_ASSERT(this, IsFixedDoubleArrayMap(fixed_array_map));
4337 :
4338 56 : VARIABLE(var_result, MachineRepresentation::kTagged);
4339 : const ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4340 : Node* to_elements = AllocateFixedArray(kind, capacity, mode, allocation_flags,
4341 112 : fixed_array_map);
4342 56 : var_result.Bind(to_elements);
4343 : // We first try to copy the FixedDoubleArray to a new FixedDoubleArray.
4344 : // |var_holes_converted| is set to False preliminarily.
4345 : *var_holes_converted = Int32FalseConstant();
4346 :
4347 : // The construction of the loop and the offsets for double elements is
4348 : // extracted from CopyFixedArrayElements.
4349 : CSA_SLOW_ASSERT(this, MatchesParameterMode(count, mode));
4350 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4351 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, kind));
4352 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4353 :
4354 56 : Comment("[ ExtractFixedDoubleArrayFillingHoles");
4355 :
4356 : // This copy can trigger GC, so we pre-initialize the array with holes.
4357 : FillFixedArrayWithValue(kind, to_elements, IntPtrOrSmiConstant(0, mode),
4358 56 : capacity, RootIndex::kTheHoleValue, mode);
4359 :
4360 : const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4361 : Node* first_from_element_offset =
4362 112 : ElementOffsetFromIndex(first, kind, mode, 0);
4363 : Node* limit_offset = IntPtrAdd(first_from_element_offset,
4364 168 : IntPtrConstant(first_element_offset));
4365 168 : VARIABLE(var_from_offset, MachineType::PointerRepresentation(),
4366 : ElementOffsetFromIndex(IntPtrOrSmiAdd(first, count, mode), kind,
4367 : mode, first_element_offset));
4368 :
4369 168 : Label decrement(this, {&var_from_offset}), done(this);
4370 : Node* to_array_adjusted =
4371 168 : IntPtrSub(BitcastTaggedToWord(to_elements), first_from_element_offset);
4372 :
4373 168 : Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4374 :
4375 : BIND(&decrement);
4376 : {
4377 : Node* from_offset =
4378 224 : IntPtrSub(var_from_offset.value(), IntPtrConstant(kDoubleSize));
4379 56 : var_from_offset.Bind(from_offset);
4380 :
4381 : Node* to_offset = from_offset;
4382 :
4383 : Label if_hole(this);
4384 :
4385 : Node* value = LoadElementAndPrepareForStore(
4386 56 : from_array, var_from_offset.value(), kind, kind, &if_hole);
4387 :
4388 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4389 56 : to_offset, value);
4390 :
4391 112 : Node* compare = WordNotEqual(from_offset, limit_offset);
4392 56 : Branch(compare, &decrement, &done);
4393 :
4394 : BIND(&if_hole);
4395 : // We are unlucky: there are holes! We need to restart the copy, this time
4396 : // we will copy the FixedDoubleArray to a new FixedArray with undefined
4397 : // replacing holes. We signal this to the caller through
4398 : // |var_holes_converted|.
4399 : *var_holes_converted = Int32TrueConstant();
4400 112 : to_elements =
4401 : ExtractToFixedArray(from_array, first, count, capacity, fixed_array_map,
4402 : kind, allocation_flags, extract_flags, mode,
4403 : HoleConversionMode::kConvertToUndefined);
4404 56 : var_result.Bind(to_elements);
4405 56 : Goto(&done);
4406 : }
4407 :
4408 : BIND(&done);
4409 56 : Comment("] ExtractFixedDoubleArrayFillingHoles");
4410 112 : return UncheckedCast<FixedArrayBase>(var_result.value());
4411 : }
4412 :
4413 2621 : TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
4414 : Node* source, Node* first, Node* count, Node* capacity,
4415 : ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
4416 : TVariable<BoolT>* var_holes_converted, Node* source_runtime_kind) {
4417 : DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays ||
4418 : extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays);
4419 : // If we want to replace holes, ExtractFixedArrayFlag::kDontCopyCOW should not
4420 : // be used, because that disables the iteration which detects holes.
4421 : DCHECK_IMPLIES(var_holes_converted != nullptr,
4422 : !(extract_flags & ExtractFixedArrayFlag::kDontCopyCOW));
4423 : HoleConversionMode convert_holes =
4424 : var_holes_converted != nullptr ? HoleConversionMode::kConvertToUndefined
4425 2621 : : HoleConversionMode::kDontConvert;
4426 2621 : VARIABLE(var_result, MachineRepresentation::kTagged);
4427 : const AllocationFlags allocation_flags =
4428 : (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly)
4429 : ? CodeStubAssembler::kNone
4430 2621 : : CodeStubAssembler::kAllowLargeObjectAllocation;
4431 2621 : if (first == nullptr) {
4432 672 : first = IntPtrOrSmiConstant(0, parameter_mode);
4433 : }
4434 2621 : if (count == nullptr) {
4435 : count = IntPtrOrSmiSub(
4436 356 : TaggedToParameter(LoadFixedArrayBaseLength(source), parameter_mode),
4437 712 : first, parameter_mode);
4438 :
4439 : CSA_ASSERT(
4440 : this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant(0, parameter_mode),
4441 : count, parameter_mode));
4442 : }
4443 2621 : if (capacity == nullptr) {
4444 : capacity = count;
4445 : } else {
4446 : CSA_ASSERT(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
4447 : IntPtrOrSmiAdd(first, count, parameter_mode), capacity,
4448 : parameter_mode)));
4449 : }
4450 :
4451 7863 : Label if_fixed_double_array(this), empty(this), done(this, {&var_result});
4452 5242 : Node* source_map = LoadMap(source);
4453 7863 : GotoIf(WordEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity), &empty);
4454 :
4455 2621 : if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4456 880 : if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4457 1760 : GotoIf(IsFixedDoubleArrayMap(source_map), &if_fixed_double_array);
4458 : } else {
4459 : CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4460 : }
4461 : }
4462 :
4463 2621 : if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4464 : // Here we can only get |source| as FixedArray, never FixedDoubleArray.
4465 : // PACKED_ELEMENTS is used to signify that the source is a FixedArray.
4466 : Node* to_elements = ExtractToFixedArray(
4467 : source, first, count, capacity, source_map, PACKED_ELEMENTS,
4468 : allocation_flags, extract_flags, parameter_mode, convert_holes,
4469 5242 : var_holes_converted, source_runtime_kind);
4470 2621 : var_result.Bind(to_elements);
4471 2621 : Goto(&done);
4472 : }
4473 :
4474 2621 : if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4475 : BIND(&if_fixed_double_array);
4476 880 : Comment("Copy FixedDoubleArray");
4477 :
4478 880 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4479 : Node* to_elements = ExtractFixedDoubleArrayFillingHoles(
4480 : source, first, count, capacity, source_map, var_holes_converted,
4481 112 : allocation_flags, extract_flags, parameter_mode);
4482 56 : var_result.Bind(to_elements);
4483 : } else {
4484 : // We use PACKED_DOUBLE_ELEMENTS to signify that both the source and
4485 : // the target are FixedDoubleArray. That it is PACKED or HOLEY does not
4486 : // matter.
4487 : ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4488 : TNode<FixedArrayBase> to_elements = AllocateFixedArray(
4489 824 : kind, capacity, parameter_mode, allocation_flags, source_map);
4490 : FillFixedArrayWithValue(kind, to_elements, count, capacity,
4491 824 : RootIndex::kTheHoleValue, parameter_mode);
4492 : CopyElements(kind, to_elements, IntPtrConstant(0), CAST(source),
4493 : ParameterToIntPtr(first, parameter_mode),
4494 824 : ParameterToIntPtr(count, parameter_mode));
4495 824 : var_result.Bind(to_elements);
4496 : }
4497 :
4498 880 : Goto(&done);
4499 : }
4500 :
4501 : BIND(&empty);
4502 : {
4503 2621 : Comment("Copy empty array");
4504 :
4505 2621 : var_result.Bind(EmptyFixedArrayConstant());
4506 2621 : Goto(&done);
4507 : }
4508 :
4509 : BIND(&done);
4510 5242 : return UncheckedCast<FixedArray>(var_result.value());
4511 : }
4512 :
4513 504 : void CodeStubAssembler::InitializePropertyArrayLength(Node* property_array,
4514 : Node* length,
4515 : ParameterMode mode) {
4516 : CSA_SLOW_ASSERT(this, IsPropertyArray(property_array));
4517 : CSA_ASSERT(
4518 : this, IntPtrOrSmiGreaterThan(length, IntPtrOrSmiConstant(0, mode), mode));
4519 : CSA_ASSERT(
4520 : this,
4521 : IntPtrOrSmiLessThanOrEqual(
4522 : length, IntPtrOrSmiConstant(PropertyArray::LengthField::kMax, mode),
4523 : mode));
4524 : StoreObjectFieldNoWriteBarrier(
4525 : property_array, PropertyArray::kLengthAndHashOffset,
4526 : ParameterToTagged(length, mode), MachineRepresentation::kTaggedSigned);
4527 504 : }
4528 :
4529 504 : Node* CodeStubAssembler::AllocatePropertyArray(Node* capacity_node,
4530 : ParameterMode mode,
4531 : AllocationFlags flags) {
4532 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
4533 : CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
4534 : IntPtrOrSmiConstant(0, mode), mode));
4535 : TNode<IntPtrT> total_size =
4536 504 : GetPropertyArrayAllocationSize(capacity_node, mode);
4537 :
4538 1008 : TNode<Object> array = Allocate(total_size, flags);
4539 : RootIndex map_index = RootIndex::kPropertyArrayMap;
4540 : DCHECK(RootsTable::IsImmortalImmovable(map_index));
4541 504 : StoreMapNoWriteBarrier(array, map_index);
4542 504 : InitializePropertyArrayLength(array, capacity_node, mode);
4543 504 : return array;
4544 : }
4545 :
4546 504 : void CodeStubAssembler::FillPropertyArrayWithUndefined(Node* array,
4547 : Node* from_node,
4548 : Node* to_node,
4549 : ParameterMode mode) {
4550 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4551 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4552 : CSA_SLOW_ASSERT(this, IsPropertyArray(array));
4553 : ElementsKind kind = PACKED_ELEMENTS;
4554 : Node* value = UndefinedConstant();
4555 : BuildFastFixedArrayForEach(array, kind, from_node, to_node,
4556 : [this, value](Node* array, Node* offset) {
4557 : StoreNoWriteBarrier(
4558 : MachineRepresentation::kTagged, array,
4559 504 : offset, value);
4560 : },
4561 1008 : mode);
4562 504 : }
4563 :
4564 13234 : void CodeStubAssembler::FillFixedArrayWithValue(ElementsKind kind, Node* array,
4565 : Node* from_node, Node* to_node,
4566 : RootIndex value_root_index,
4567 : ParameterMode mode) {
4568 : CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4569 : CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4570 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
4571 : DCHECK(value_root_index == RootIndex::kTheHoleValue ||
4572 : value_root_index == RootIndex::kUndefinedValue);
4573 :
4574 : // Determine the value to initialize the {array} based
4575 : // on the {value_root_index} and the elements {kind}.
4576 26468 : Node* value = LoadRoot(value_root_index);
4577 13234 : if (IsDoubleElementsKind(kind)) {
4578 4814 : value = LoadHeapNumberValue(value);
4579 : }
4580 :
4581 : BuildFastFixedArrayForEach(
4582 : array, kind, from_node, to_node,
4583 14721 : [this, value, kind](Node* array, Node* offset) {
4584 29442 : if (IsDoubleElementsKind(kind)) {
4585 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, array, offset,
4586 2733 : value);
4587 : } else {
4588 : StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
4589 11988 : value);
4590 : }
4591 14721 : },
4592 26468 : mode);
4593 13234 : }
4594 :
4595 168 : void CodeStubAssembler::StoreFixedDoubleArrayHole(
4596 : TNode<FixedDoubleArray> array, Node* index, ParameterMode parameter_mode) {
4597 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index, parameter_mode));
4598 : Node* offset =
4599 : ElementOffsetFromIndex(index, PACKED_DOUBLE_ELEMENTS, parameter_mode,
4600 336 : FixedArray::kHeaderSize - kHeapObjectTag);
4601 : CSA_ASSERT(this, IsOffsetInBounds(
4602 : offset, LoadAndUntagFixedArrayBaseLength(array),
4603 : FixedDoubleArray::kHeaderSize, PACKED_DOUBLE_ELEMENTS));
4604 : Node* double_hole =
4605 672 : Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4606 336 : : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4607 : // TODO(danno): When we have a Float32/Float64 wrapper class that
4608 : // preserves double bits during manipulation, remove this code/change
4609 : // this to an indexed Float64 store.
4610 168 : if (Is64()) {
4611 : StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset,
4612 168 : double_hole);
4613 : } else {
4614 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
4615 0 : double_hole);
4616 : StoreNoWriteBarrier(MachineRepresentation::kWord32, array,
4617 0 : IntPtrAdd(offset, IntPtrConstant(kInt32Size)),
4618 0 : double_hole);
4619 : }
4620 168 : }
4621 :
4622 1247 : void CodeStubAssembler::FillFixedArrayWithSmiZero(TNode<FixedArray> array,
4623 : TNode<IntPtrT> length) {
4624 : CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4625 :
4626 : TNode<IntPtrT> byte_length = TimesTaggedSize(length);
4627 : CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4628 :
4629 : static const int32_t fa_base_data_offset =
4630 : FixedArray::kHeaderSize - kHeapObjectTag;
4631 : TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4632 3741 : IntPtrConstant(fa_base_data_offset));
4633 :
4634 : // Call out to memset to perform initialization.
4635 : TNode<ExternalReference> memset =
4636 1247 : ExternalConstant(ExternalReference::libc_memset_function());
4637 : STATIC_ASSERT(kSizetSize == kIntptrSize);
4638 : CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4639 : MachineType::IntPtr(), MachineType::UintPtr(), memset,
4640 2494 : backing_store, IntPtrConstant(0), byte_length);
4641 1247 : }
4642 :
4643 56 : void CodeStubAssembler::FillFixedDoubleArrayWithZero(
4644 : TNode<FixedDoubleArray> array, TNode<IntPtrT> length) {
4645 : CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4646 :
4647 : TNode<IntPtrT> byte_length = TimesDoubleSize(length);
4648 : CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4649 :
4650 : static const int32_t fa_base_data_offset =
4651 : FixedDoubleArray::kHeaderSize - kHeapObjectTag;
4652 : TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4653 168 : IntPtrConstant(fa_base_data_offset));
4654 :
4655 : // Call out to memset to perform initialization.
4656 : TNode<ExternalReference> memset =
4657 56 : ExternalConstant(ExternalReference::libc_memset_function());
4658 : STATIC_ASSERT(kSizetSize == kIntptrSize);
4659 : CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4660 : MachineType::IntPtr(), MachineType::UintPtr(), memset,
4661 112 : backing_store, IntPtrConstant(0), byte_length);
4662 56 : }
4663 :
4664 3349 : void CodeStubAssembler::JumpIfPointersFromHereAreInteresting(
4665 : TNode<Object> object, Label* interesting) {
4666 3349 : Label finished(this);
4667 3349 : TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
4668 3349 : TNode<IntPtrT> object_page = PageFromAddress(object_word);
4669 : TNode<IntPtrT> page_flags = UncheckedCast<IntPtrT>(Load(
4670 6698 : MachineType::IntPtr(), object_page, IntPtrConstant(Page::kFlagsOffset)));
4671 : Branch(
4672 : WordEqual(WordAnd(page_flags,
4673 : IntPtrConstant(
4674 3349 : MemoryChunk::kPointersFromHereAreInterestingMask)),
4675 6698 : IntPtrConstant(0)),
4676 6698 : &finished, interesting);
4677 3349 : BIND(&finished);
4678 3349 : }
4679 :
4680 392 : void CodeStubAssembler::MoveElements(ElementsKind kind,
4681 : TNode<FixedArrayBase> elements,
4682 : TNode<IntPtrT> dst_index,
4683 : TNode<IntPtrT> src_index,
4684 : TNode<IntPtrT> length) {
4685 392 : Label finished(this);
4686 392 : Label needs_barrier(this);
4687 392 : const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4688 :
4689 : DCHECK(IsFastElementsKind(kind));
4690 : CSA_ASSERT(this, IsFixedArrayWithKind(elements, kind));
4691 : CSA_ASSERT(this,
4692 : IntPtrLessThanOrEqual(IntPtrAdd(dst_index, length),
4693 : LoadAndUntagFixedArrayBaseLength(elements)));
4694 : CSA_ASSERT(this,
4695 : IntPtrLessThanOrEqual(IntPtrAdd(src_index, length),
4696 : LoadAndUntagFixedArrayBaseLength(elements)));
4697 :
4698 : // The write barrier can be ignored if {dst_elements} is in new space, or if
4699 : // the elements pointer is FixedDoubleArray.
4700 392 : if (needs_barrier_check) {
4701 224 : JumpIfPointersFromHereAreInteresting(elements, &needs_barrier);
4702 : }
4703 :
4704 : const TNode<IntPtrT> source_byte_length =
4705 392 : IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4706 : static const int32_t fa_base_data_offset =
4707 : FixedArrayBase::kHeaderSize - kHeapObjectTag;
4708 392 : TNode<IntPtrT> elements_intptr = BitcastTaggedToWord(elements);
4709 : TNode<IntPtrT> target_data_ptr =
4710 : IntPtrAdd(elements_intptr,
4711 : ElementOffsetFromIndex(dst_index, kind, INTPTR_PARAMETERS,
4712 392 : fa_base_data_offset));
4713 : TNode<IntPtrT> source_data_ptr =
4714 : IntPtrAdd(elements_intptr,
4715 : ElementOffsetFromIndex(src_index, kind, INTPTR_PARAMETERS,
4716 392 : fa_base_data_offset));
4717 : TNode<ExternalReference> memmove =
4718 392 : ExternalConstant(ExternalReference::libc_memmove_function());
4719 : CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4720 : MachineType::Pointer(), MachineType::UintPtr(), memmove,
4721 392 : target_data_ptr, source_data_ptr, source_byte_length);
4722 :
4723 392 : if (needs_barrier_check) {
4724 224 : Goto(&finished);
4725 :
4726 : BIND(&needs_barrier);
4727 : {
4728 : const TNode<IntPtrT> begin = src_index;
4729 : const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4730 :
4731 : // If dst_index is less than src_index, then walk forward.
4732 : const TNode<IntPtrT> delta =
4733 : IntPtrMul(IntPtrSub(dst_index, begin),
4734 448 : IntPtrConstant(ElementsKindToByteSize(kind)));
4735 448 : auto loop_body = [&](Node* array, Node* offset) {
4736 448 : Node* const element = Load(MachineType::AnyTagged(), array, offset);
4737 1344 : Node* const delta_offset = IntPtrAdd(offset, delta);
4738 448 : Store(array, delta_offset, element);
4739 448 : };
4740 :
4741 : Label iterate_forward(this);
4742 224 : Label iterate_backward(this);
4743 448 : Branch(IntPtrLessThan(delta, IntPtrConstant(0)), &iterate_forward,
4744 448 : &iterate_backward);
4745 : BIND(&iterate_forward);
4746 : {
4747 : // Make a loop for the stores.
4748 : BuildFastFixedArrayForEach(elements, kind, begin, end, loop_body,
4749 : INTPTR_PARAMETERS,
4750 448 : ForEachDirection::kForward);
4751 224 : Goto(&finished);
4752 : }
4753 :
4754 : BIND(&iterate_backward);
4755 : {
4756 : BuildFastFixedArrayForEach(elements, kind, begin, end, loop_body,
4757 : INTPTR_PARAMETERS,
4758 448 : ForEachDirection::kReverse);
4759 224 : Goto(&finished);
4760 224 : }
4761 : }
4762 : BIND(&finished);
4763 392 : }
4764 392 : }
4765 :
4766 4005 : void CodeStubAssembler::CopyElements(ElementsKind kind,
4767 : TNode<FixedArrayBase> dst_elements,
4768 : TNode<IntPtrT> dst_index,
4769 : TNode<FixedArrayBase> src_elements,
4770 : TNode<IntPtrT> src_index,
4771 : TNode<IntPtrT> length,
4772 : WriteBarrierMode write_barrier) {
4773 4005 : Label finished(this);
4774 4005 : Label needs_barrier(this);
4775 4005 : const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4776 :
4777 : DCHECK(IsFastElementsKind(kind));
4778 : CSA_ASSERT(this, IsFixedArrayWithKind(dst_elements, kind));
4779 : CSA_ASSERT(this, IsFixedArrayWithKind(src_elements, kind));
4780 : CSA_ASSERT(this, IntPtrLessThanOrEqual(
4781 : IntPtrAdd(dst_index, length),
4782 : LoadAndUntagFixedArrayBaseLength(dst_elements)));
4783 : CSA_ASSERT(this, IntPtrLessThanOrEqual(
4784 : IntPtrAdd(src_index, length),
4785 : LoadAndUntagFixedArrayBaseLength(src_elements)));
4786 : CSA_ASSERT(this, Word32Or(WordNotEqual(dst_elements, src_elements),
4787 : WordEqual(length, IntPtrConstant(0))));
4788 :
4789 : // The write barrier can be ignored if {dst_elements} is in new space, or if
4790 : // the elements pointer is FixedDoubleArray.
4791 4005 : if (needs_barrier_check) {
4792 3125 : JumpIfPointersFromHereAreInteresting(dst_elements, &needs_barrier);
4793 : }
4794 :
4795 : TNode<IntPtrT> source_byte_length =
4796 4005 : IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4797 : static const int32_t fa_base_data_offset =
4798 : FixedArrayBase::kHeaderSize - kHeapObjectTag;
4799 : TNode<IntPtrT> src_offset_start = ElementOffsetFromIndex(
4800 4005 : src_index, kind, INTPTR_PARAMETERS, fa_base_data_offset);
4801 : TNode<IntPtrT> dst_offset_start = ElementOffsetFromIndex(
4802 4005 : dst_index, kind, INTPTR_PARAMETERS, fa_base_data_offset);
4803 4005 : TNode<IntPtrT> src_elements_intptr = BitcastTaggedToWord(src_elements);
4804 : TNode<IntPtrT> source_data_ptr =
4805 : IntPtrAdd(src_elements_intptr, src_offset_start);
4806 4005 : TNode<IntPtrT> dst_elements_intptr = BitcastTaggedToWord(dst_elements);
4807 : TNode<IntPtrT> dst_data_ptr =
4808 : IntPtrAdd(dst_elements_intptr, dst_offset_start);
4809 : TNode<ExternalReference> memcpy =
4810 4005 : ExternalConstant(ExternalReference::libc_memcpy_function());
4811 : CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4812 : MachineType::Pointer(), MachineType::UintPtr(), memcpy,
4813 4005 : dst_data_ptr, source_data_ptr, source_byte_length);
4814 :
4815 4005 : if (needs_barrier_check) {
4816 3125 : Goto(&finished);
4817 :
4818 : BIND(&needs_barrier);
4819 : {
4820 : const TNode<IntPtrT> begin = src_index;
4821 : const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4822 : const TNode<IntPtrT> delta =
4823 : IntPtrMul(IntPtrSub(dst_index, src_index),
4824 6250 : IntPtrConstant(ElementsKindToByteSize(kind)));
4825 : BuildFastFixedArrayForEach(
4826 : src_elements, kind, begin, end,
4827 3140 : [&](Node* array, Node* offset) {
4828 3140 : Node* const element = Load(MachineType::AnyTagged(), array, offset);
4829 9420 : Node* const delta_offset = IntPtrAdd(offset, delta);
4830 3140 : if (write_barrier == SKIP_WRITE_BARRIER) {
4831 : StoreNoWriteBarrier(MachineRepresentation::kTagged, dst_elements,
4832 5944 : delta_offset, element);
4833 : } else {
4834 336 : Store(dst_elements, delta_offset, element);
4835 : }
4836 3140 : },
4837 6250 : INTPTR_PARAMETERS, ForEachDirection::kForward);
4838 3125 : Goto(&finished);
4839 : }
4840 : BIND(&finished);
4841 4005 : }
4842 4005 : }
4843 :
4844 8531 : void CodeStubAssembler::CopyFixedArrayElements(
4845 : ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
4846 : Node* to_array, Node* first_element, Node* element_count, Node* capacity,
4847 : WriteBarrierMode barrier_mode, ParameterMode mode,
4848 : HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted) {
4849 : DCHECK_IMPLIES(var_holes_converted != nullptr,
4850 : convert_holes == HoleConversionMode::kConvertToUndefined);
4851 : CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
4852 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4853 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
4854 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
4855 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4856 : const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4857 8531 : Comment("[ CopyFixedArrayElements");
4858 :
4859 : // Typed array elements are not supported.
4860 : DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
4861 : DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
4862 :
4863 : Label done(this);
4864 : bool from_double_elements = IsDoubleElementsKind(from_kind);
4865 : bool to_double_elements = IsDoubleElementsKind(to_kind);
4866 : bool doubles_to_objects_conversion =
4867 10562 : IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
4868 : bool needs_write_barrier =
4869 8531 : doubles_to_objects_conversion ||
4870 2494 : (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
4871 : bool element_offset_matches =
4872 8531 : !needs_write_barrier && (Is64() || IsDoubleElementsKind(from_kind) ==
4873 : IsDoubleElementsKind(to_kind));
4874 : Node* double_hole =
4875 34124 : Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4876 17062 : : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4877 :
4878 : // If copying might trigger a GC, we pre-initialize the FixedArray such that
4879 : // it's always in a consistent state.
4880 8531 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4881 : DCHECK(IsObjectElementsKind(to_kind));
4882 : // Use undefined for the part that we copy and holes for the rest.
4883 : // Later if we run into a hole in the source we can just skip the writing
4884 : // to the target and are still guaranteed that we get an undefined.
4885 : FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4886 224 : element_count, RootIndex::kUndefinedValue, mode);
4887 : FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4888 224 : RootIndex::kTheHoleValue, mode);
4889 8307 : } else if (doubles_to_objects_conversion) {
4890 : // Pre-initialized the target with holes so later if we run into a hole in
4891 : // the source we can just skip the writing to the target.
4892 : FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4893 1400 : capacity, RootIndex::kTheHoleValue, mode);
4894 6907 : } else if (element_count != capacity) {
4895 : FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4896 4973 : RootIndex::kTheHoleValue, mode);
4897 : }
4898 :
4899 : Node* first_from_element_offset =
4900 17062 : ElementOffsetFromIndex(first_element, from_kind, mode, 0);
4901 : Node* limit_offset = IntPtrAdd(first_from_element_offset,
4902 25593 : IntPtrConstant(first_element_offset));
4903 25593 : VARIABLE(
4904 : var_from_offset, MachineType::PointerRepresentation(),
4905 : ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count, mode),
4906 : from_kind, mode, first_element_offset));
4907 : // This second variable is used only when the element sizes of source and
4908 : // destination arrays do not match.
4909 17062 : VARIABLE(var_to_offset, MachineType::PointerRepresentation());
4910 8531 : if (element_offset_matches) {
4911 4525 : var_to_offset.Bind(var_from_offset.value());
4912 : } else {
4913 : var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
4914 8012 : first_element_offset));
4915 : }
4916 :
4917 8531 : Variable* vars[] = {&var_from_offset, &var_to_offset, var_holes_converted};
4918 : int num_vars =
4919 8531 : var_holes_converted != nullptr ? arraysize(vars) : arraysize(vars) - 1;
4920 17062 : Label decrement(this, num_vars, vars);
4921 :
4922 : Node* to_array_adjusted =
4923 : element_offset_matches
4924 17581 : ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
4925 25593 : : to_array;
4926 :
4927 25593 : Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4928 :
4929 : BIND(&decrement);
4930 : {
4931 : Node* from_offset = IntPtrSub(
4932 : var_from_offset.value(),
4933 34124 : IntPtrConstant(from_double_elements ? kDoubleSize : kTaggedSize));
4934 8531 : var_from_offset.Bind(from_offset);
4935 :
4936 : Node* to_offset;
4937 8531 : if (element_offset_matches) {
4938 : to_offset = from_offset;
4939 : } else {
4940 8012 : to_offset = IntPtrSub(
4941 : var_to_offset.value(),
4942 12018 : IntPtrConstant(to_double_elements ? kDoubleSize : kTaggedSize));
4943 4006 : var_to_offset.Bind(to_offset);
4944 : }
4945 :
4946 8531 : Label next_iter(this), store_double_hole(this), signal_hole(this);
4947 : Label* if_hole;
4948 8531 : if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4949 : // The target elements array is already preinitialized with undefined
4950 : // so we only need to signal that a hole was found and continue the loop.
4951 : if_hole = &signal_hole;
4952 8307 : } else if (doubles_to_objects_conversion) {
4953 : // The target elements array is already preinitialized with holes, so we
4954 : // can just proceed with the next iteration.
4955 : if_hole = &next_iter;
4956 6907 : } else if (IsDoubleElementsKind(to_kind)) {
4957 : if_hole = &store_double_hole;
4958 : } else {
4959 : // In all the other cases don't check for holes and copy the data as is.
4960 : if_hole = nullptr;
4961 : }
4962 :
4963 : Node* value = LoadElementAndPrepareForStore(
4964 8531 : from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
4965 :
4966 8531 : if (needs_write_barrier) {
4967 4006 : CHECK_EQ(to_array, to_array_adjusted);
4968 4006 : Store(to_array_adjusted, to_offset, value);
4969 4525 : } else if (to_double_elements) {
4970 : StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4971 1807 : to_offset, value);
4972 : } else {
4973 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array_adjusted,
4974 2718 : to_offset, value);
4975 : }
4976 8531 : Goto(&next_iter);
4977 :
4978 8531 : if (if_hole == &store_double_hole) {
4979 : BIND(&store_double_hole);
4980 : // Don't use doubles to store the hole double, since manipulating the
4981 : // signaling NaN used for the hole in C++, e.g. with bit_cast, will
4982 : // change its value on ia32 (the x87 stack is used to return values
4983 : // and stores to the stack silently clear the signalling bit).
4984 : //
4985 : // TODO(danno): When we have a Float32/Float64 wrapper class that
4986 : // preserves double bits during manipulation, remove this code/change
4987 : // this to an indexed Float64 store.
4988 1807 : if (Is64()) {
4989 : StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
4990 1807 : to_offset, double_hole);
4991 : } else {
4992 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4993 0 : to_offset, double_hole);
4994 : StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4995 0 : IntPtrAdd(to_offset, IntPtrConstant(kInt32Size)),
4996 0 : double_hole);
4997 : }
4998 1807 : Goto(&next_iter);
4999 6724 : } else if (if_hole == &signal_hole) {
5000 : // This case happens only when IsObjectElementsKind(to_kind).
5001 : BIND(&signal_hole);
5002 224 : if (var_holes_converted != nullptr) {
5003 : *var_holes_converted = Int32TrueConstant();
5004 : }
5005 224 : Goto(&next_iter);
5006 : }
5007 :
5008 : BIND(&next_iter);
5009 17062 : Node* compare = WordNotEqual(from_offset, limit_offset);
5010 17062 : Branch(compare, &decrement, &done);
5011 : }
5012 :
5013 : BIND(&done);
5014 17062 : Comment("] CopyFixedArrayElements");
5015 8531 : }
5016 :
5017 758 : TNode<FixedArray> CodeStubAssembler::HeapObjectToFixedArray(
5018 : TNode<HeapObject> base, Label* cast_fail) {
5019 758 : Label fixed_array(this);
5020 758 : TNode<Map> map = LoadMap(base);
5021 1516 : GotoIf(WordEqual(map, LoadRoot(RootIndex::kFixedArrayMap)), &fixed_array);
5022 1516 : GotoIf(WordNotEqual(map, LoadRoot(RootIndex::kFixedCOWArrayMap)), cast_fail);
5023 758 : Goto(&fixed_array);
5024 : BIND(&fixed_array);
5025 758 : return UncheckedCast<FixedArray>(base);
5026 : }
5027 :
5028 504 : void CodeStubAssembler::CopyPropertyArrayValues(Node* from_array,
5029 : Node* to_array,
5030 : Node* property_count,
5031 : WriteBarrierMode barrier_mode,
5032 : ParameterMode mode,
5033 : DestroySource destroy_source) {
5034 : CSA_SLOW_ASSERT(this, MatchesParameterMode(property_count, mode));
5035 : CSA_SLOW_ASSERT(this, Word32Or(IsPropertyArray(from_array),
5036 : IsEmptyFixedArray(from_array)));
5037 : CSA_SLOW_ASSERT(this, IsPropertyArray(to_array));
5038 504 : Comment("[ CopyPropertyArrayValues");
5039 :
5040 504 : bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
5041 :
5042 504 : if (destroy_source == DestroySource::kNo) {
5043 : // PropertyArray may contain MutableHeapNumbers, which will be cloned on the
5044 : // heap, requiring a write barrier.
5045 : needs_write_barrier = true;
5046 : }
5047 :
5048 504 : Node* start = IntPtrOrSmiConstant(0, mode);
5049 : ElementsKind kind = PACKED_ELEMENTS;
5050 : BuildFastFixedArrayForEach(
5051 : from_array, kind, start, property_count,
5052 : [this, to_array, needs_write_barrier, destroy_source](Node* array,
5053 504 : Node* offset) {
5054 504 : Node* value = Load(MachineType::AnyTagged(), array, offset);
5055 :
5056 504 : if (destroy_source == DestroySource::kNo) {
5057 112 : value = CloneIfMutablePrimitive(CAST(value));
5058 : }
5059 :
5060 504 : if (needs_write_barrier) {
5061 56 : Store(to_array, offset, value);
5062 : } else {
5063 : StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, offset,
5064 448 : value);
5065 : }
5066 504 : },
5067 1512 : mode);
5068 :
5069 : #ifdef DEBUG
5070 : // Zap {from_array} if the copying above has made it invalid.
5071 : if (destroy_source == DestroySource::kYes) {
5072 : Label did_zap(this);
5073 : GotoIf(IsEmptyFixedArray(from_array), &did_zap);
5074 : FillPropertyArrayWithUndefined(from_array, start, property_count, mode);
5075 :
5076 : Goto(&did_zap);
5077 : BIND(&did_zap);
5078 : }
5079 : #endif
5080 504 : Comment("] CopyPropertyArrayValues");
5081 504 : }
5082 :
5083 2092 : void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
5084 : TNode<IntPtrT> from_index,
5085 : TNode<IntPtrT> to_index,
5086 : TNode<IntPtrT> character_count,
5087 : String::Encoding from_encoding,
5088 : String::Encoding to_encoding) {
5089 : // Cannot assert IsString(from_string) and IsString(to_string) here because
5090 : // CSA::SubString can pass in faked sequential strings when handling external
5091 : // subject strings.
5092 2092 : bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
5093 2092 : bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
5094 : DCHECK_IMPLIES(to_one_byte, from_one_byte);
5095 : Comment("CopyStringCharacters ",
5096 : from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING", " -> ",
5097 2092 : to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
5098 :
5099 2092 : ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
5100 2092 : ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
5101 : STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
5102 : int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
5103 : Node* from_offset = ElementOffsetFromIndex(from_index, from_kind,
5104 4184 : INTPTR_PARAMETERS, header_size);
5105 : Node* to_offset =
5106 4184 : ElementOffsetFromIndex(to_index, to_kind, INTPTR_PARAMETERS, header_size);
5107 : Node* byte_count =
5108 4184 : ElementOffsetFromIndex(character_count, from_kind, INTPTR_PARAMETERS);
5109 4184 : Node* limit_offset = IntPtrAdd(from_offset, byte_count);
5110 :
5111 : // Prepare the fast loop
5112 : MachineType type =
5113 2092 : from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
5114 : MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
5115 2092 : : MachineRepresentation::kWord16;
5116 2092 : int from_increment = 1 << ElementsKindToShiftSize(from_kind);
5117 2092 : int to_increment = 1 << ElementsKindToShiftSize(to_kind);
5118 :
5119 2092 : VARIABLE(current_to_offset, MachineType::PointerRepresentation(), to_offset);
5120 4184 : VariableList vars({¤t_to_offset}, zone());
5121 2092 : int to_index_constant = 0, from_index_constant = 0;
5122 2092 : bool index_same = (from_encoding == to_encoding) &&
5123 2031 : (from_index == to_index ||
5124 2270 : (ToInt32Constant(from_index, from_index_constant) &&
5125 366 : ToInt32Constant(to_index, to_index_constant) &&
5126 127 : from_index_constant == to_index_constant));
5127 : BuildFastLoop(vars, from_offset, limit_offset,
5128 : [this, from_string, to_string, ¤t_to_offset, to_increment,
5129 2092 : type, rep, index_same](Node* offset) {
5130 2092 : Node* value = Load(type, from_string, offset);
5131 : StoreNoWriteBarrier(
5132 : rep, to_string,
5133 2092 : index_same ? offset : current_to_offset.value(), value);
5134 2092 : if (!index_same) {
5135 1970 : Increment(¤t_to_offset, to_increment);
5136 : }
5137 2092 : },
5138 6276 : from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
5139 2092 : }
5140 :
5141 8587 : Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
5142 : Node* offset,
5143 : ElementsKind from_kind,
5144 : ElementsKind to_kind,
5145 : Label* if_hole) {
5146 : CSA_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
5147 8587 : if (IsDoubleElementsKind(from_kind)) {
5148 : Node* value =
5149 4174 : LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
5150 2087 : if (!IsDoubleElementsKind(to_kind)) {
5151 3024 : value = AllocateHeapNumberWithValue(value);
5152 : }
5153 2087 : return value;
5154 :
5155 : } else {
5156 6500 : Node* value = Load(MachineType::AnyTagged(), array, offset);
5157 6500 : if (if_hole) {
5158 1400 : GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
5159 : }
5160 6500 : if (IsDoubleElementsKind(to_kind)) {
5161 1288 : if (IsSmiElementsKind(from_kind)) {
5162 2576 : value = SmiToFloat64(value);
5163 : } else {
5164 0 : value = LoadHeapNumberValue(value);
5165 : }
5166 : }
5167 6500 : return value;
5168 : }
5169 : }
5170 :
5171 2407 : Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
5172 : ParameterMode mode) {
5173 : CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
5174 2407 : Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
5175 2407 : Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
5176 : Node* padding =
5177 2407 : IntPtrOrSmiConstant(JSObject::kMinAddedElementsCapacity, mode);
5178 2407 : return IntPtrOrSmiAdd(new_capacity, padding, mode);
5179 : }
5180 :
5181 112 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
5182 : ElementsKind kind, Node* key,
5183 : Label* bailout) {
5184 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5185 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
5186 : CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
5187 224 : Node* capacity = LoadFixedArrayBaseLength(elements);
5188 :
5189 : ParameterMode mode = OptimalParameterMode();
5190 : capacity = TaggedToParameter(capacity, mode);
5191 : key = TaggedToParameter(key, mode);
5192 :
5193 : return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
5194 112 : bailout);
5195 : }
5196 :
5197 1120 : Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
5198 : ElementsKind kind, Node* key,
5199 : Node* capacity,
5200 : ParameterMode mode,
5201 : Label* bailout) {
5202 1120 : Comment("TryGrowElementsCapacity");
5203 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5204 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
5205 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
5206 : CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
5207 :
5208 : // If the gap growth is too big, fall back to the runtime.
5209 1120 : Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
5210 1120 : Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
5211 2240 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
5212 :
5213 : // Calculate the capacity of the new backing store.
5214 : Node* new_capacity = CalculateNewElementsCapacity(
5215 1120 : IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
5216 : return GrowElementsCapacity(object, elements, kind, kind, capacity,
5217 1120 : new_capacity, mode, bailout);
5218 : }
5219 :
5220 5309 : Node* CodeStubAssembler::GrowElementsCapacity(
5221 : Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
5222 : Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
5223 5309 : Comment("[ GrowElementsCapacity");
5224 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5225 : CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
5226 : CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
5227 : CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
5228 :
5229 : // If size of the allocation for the new capacity doesn't fit in a page
5230 : // that we can bump-pointer allocate from, fall back to the runtime.
5231 5309 : int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
5232 : GotoIf(UintPtrOrSmiGreaterThanOrEqual(
5233 : new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
5234 10618 : bailout);
5235 :
5236 : // Allocate the new backing store.
5237 10618 : Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
5238 :
5239 : // Copy the elements from the old elements store to the new.
5240 : // The size-check above guarantees that the |new_elements| is allocated
5241 : // in new space so we can skip the write barrier.
5242 : CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
5243 5309 : new_capacity, SKIP_WRITE_BARRIER, mode);
5244 :
5245 : StoreObjectField(object, JSObject::kElementsOffset, new_elements);
5246 5309 : Comment("] GrowElementsCapacity");
5247 5309 : return new_elements;
5248 : }
5249 :
5250 952 : void CodeStubAssembler::InitializeAllocationMemento(Node* base,
5251 : Node* base_allocation_size,
5252 : Node* allocation_site) {
5253 952 : Comment("[Initialize AllocationMemento");
5254 : TNode<Object> memento =
5255 1904 : InnerAllocate(CAST(base), UncheckedCast<IntPtrT>(base_allocation_size));
5256 952 : StoreMapNoWriteBarrier(memento, RootIndex::kAllocationMementoMap);
5257 : StoreObjectFieldNoWriteBarrier(
5258 : memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
5259 952 : if (FLAG_allocation_site_pretenuring) {
5260 : TNode<Int32T> count = UncheckedCast<Int32T>(LoadObjectField(
5261 : allocation_site, AllocationSite::kPretenureCreateCountOffset,
5262 952 : MachineType::Int32()));
5263 :
5264 952 : TNode<Int32T> incremented_count = Int32Add(count, Int32Constant(1));
5265 : StoreObjectFieldNoWriteBarrier(
5266 : allocation_site, AllocationSite::kPretenureCreateCountOffset,
5267 : incremented_count, MachineRepresentation::kWord32);
5268 : }
5269 952 : Comment("]");
5270 952 : }
5271 :
5272 3696 : Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
5273 : Label* if_valueisnotnumber) {
5274 3696 : Label out(this);
5275 7392 : VARIABLE(var_result, MachineRepresentation::kFloat64);
5276 :
5277 : // Check if the {value} is a Smi or a HeapObject.
5278 3696 : Label if_valueissmi(this), if_valueisnotsmi(this);
5279 7392 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5280 :
5281 : BIND(&if_valueissmi);
5282 : {
5283 : // Convert the Smi {value}.
5284 7392 : var_result.Bind(SmiToFloat64(value));
5285 3696 : Goto(&out);
5286 : }
5287 :
5288 : BIND(&if_valueisnotsmi);
5289 : {
5290 : // Check if {value} is a HeapNumber.
5291 : Label if_valueisheapnumber(this);
5292 7392 : Branch(IsHeapNumber(value), &if_valueisheapnumber, if_valueisnotnumber);
5293 :
5294 : BIND(&if_valueisheapnumber);
5295 : {
5296 : // Load the floating point value.
5297 7392 : var_result.Bind(LoadHeapNumberValue(value));
5298 3696 : Goto(&out);
5299 3696 : }
5300 : }
5301 : BIND(&out);
5302 7392 : return var_result.value();
5303 : }
5304 :
5305 1680 : Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
5306 : // We might need to loop once due to ToNumber conversion.
5307 1680 : VARIABLE(var_value, MachineRepresentation::kTagged);
5308 3360 : VARIABLE(var_result, MachineRepresentation::kFloat64);
5309 1680 : Label loop(this, &var_value), done_loop(this, &var_result);
5310 1680 : var_value.Bind(value);
5311 1680 : Goto(&loop);
5312 : BIND(&loop);
5313 : {
5314 : Label if_valueisnotnumber(this, Label::kDeferred);
5315 :
5316 : // Load the current {value}.
5317 1680 : value = var_value.value();
5318 :
5319 : // Convert {value} to Float64 if it is a number and convert it to a number
5320 : // otherwise.
5321 1680 : Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
5322 1680 : var_result.Bind(result);
5323 1680 : Goto(&done_loop);
5324 :
5325 : BIND(&if_valueisnotnumber);
5326 : {
5327 : // Convert the {value} to a Number first.
5328 3360 : var_value.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, value));
5329 1680 : Goto(&loop);
5330 1680 : }
5331 : }
5332 : BIND(&done_loop);
5333 3360 : return var_result.value();
5334 : }
5335 :
5336 1400 : Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
5337 1400 : VARIABLE(var_result, MachineRepresentation::kWord32);
5338 1400 : Label done(this);
5339 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumber>(context, value,
5340 1400 : &done, &var_result);
5341 : BIND(&done);
5342 2800 : return var_result.value();
5343 : }
5344 :
5345 : // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5346 : // or find that it is a BigInt and jump to {if_bigint}.
5347 672 : void CodeStubAssembler::TaggedToWord32OrBigInt(Node* context, Node* value,
5348 : Label* if_number,
5349 : Variable* var_word32,
5350 : Label* if_bigint,
5351 : Variable* var_bigint) {
5352 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5353 672 : context, value, if_number, var_word32, if_bigint, var_bigint);
5354 672 : }
5355 :
5356 : // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5357 : // or find that it is a BigInt and jump to {if_bigint}. In either case,
5358 : // store the type feedback in {var_feedback}.
5359 3192 : void CodeStubAssembler::TaggedToWord32OrBigIntWithFeedback(
5360 : Node* context, Node* value, Label* if_number, Variable* var_word32,
5361 : Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
5362 : TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5363 : context, value, if_number, var_word32, if_bigint, var_bigint,
5364 3192 : var_feedback);
5365 3192 : }
5366 :
5367 : template <Object::Conversion conversion>
5368 5264 : void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
5369 : Node* context, Node* value, Label* if_number, Variable* var_word32,
5370 : Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
5371 : DCHECK(var_word32->rep() == MachineRepresentation::kWord32);
5372 : DCHECK(var_bigint == nullptr ||
5373 : var_bigint->rep() == MachineRepresentation::kTagged);
5374 : DCHECK(var_feedback == nullptr ||
5375 : var_feedback->rep() == MachineRepresentation::kTaggedSigned);
5376 :
5377 : // We might need to loop after conversion.
5378 5264 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5379 5264 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNone);
5380 5264 : Variable* loop_vars[] = {&var_value, var_feedback};
5381 : int num_vars =
5382 5264 : var_feedback != nullptr ? arraysize(loop_vars) : arraysize(loop_vars) - 1;
5383 10528 : Label loop(this, num_vars, loop_vars);
5384 5264 : Goto(&loop);
5385 : BIND(&loop);
5386 : {
5387 5264 : value = var_value.value();
5388 5264 : Label not_smi(this), is_heap_number(this), is_oddball(this),
5389 5264 : is_bigint(this);
5390 10528 : GotoIf(TaggedIsNotSmi(value), ¬_smi);
5391 :
5392 : // {value} is a Smi.
5393 10528 : var_word32->Bind(SmiToInt32(value));
5394 5264 : CombineFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
5395 5264 : Goto(if_number);
5396 :
5397 : BIND(¬_smi);
5398 10528 : Node* map = LoadMap(value);
5399 10528 : GotoIf(IsHeapNumberMap(map), &is_heap_number);
5400 10528 : Node* instance_type = LoadMapInstanceType(map);
5401 : if (conversion == Object::Conversion::kToNumeric) {
5402 3864 : GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
5403 : }
5404 :
5405 : // Not HeapNumber (or BigInt if conversion == kToNumeric).
5406 : {
5407 : if (var_feedback != nullptr) {
5408 : // We do not require an Or with earlier feedback here because once we
5409 : // convert the value to a Numeric, we cannot reach this path. We can
5410 : // only reach this path on the first pass when the feedback is kNone.
5411 : CSA_ASSERT(this, SmiEqual(CAST(var_feedback->value()),
5412 : SmiConstant(BinaryOperationFeedback::kNone)));
5413 : }
5414 10528 : GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
5415 : // Not an oddball either -> convert.
5416 : auto builtin = conversion == Object::Conversion::kToNumeric
5417 : ? Builtins::kNonNumberToNumeric
5418 : : Builtins::kNonNumberToNumber;
5419 10528 : var_value.Bind(CallBuiltin(builtin, context, value));
5420 5264 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
5421 5264 : Goto(&loop);
5422 :
5423 : BIND(&is_oddball);
5424 5264 : var_value.Bind(LoadObjectField(value, Oddball::kToNumberOffset));
5425 5264 : OverwriteFeedback(var_feedback,
5426 : BinaryOperationFeedback::kNumberOrOddball);
5427 5264 : Goto(&loop);
5428 : }
5429 :
5430 : BIND(&is_heap_number);
5431 5264 : var_word32->Bind(TruncateHeapNumberValueToWord32(value));
5432 5264 : CombineFeedback(var_feedback, BinaryOperationFeedback::kNumber);
5433 5264 : Goto(if_number);
5434 :
5435 : if (conversion == Object::Conversion::kToNumeric) {
5436 : BIND(&is_bigint);
5437 3864 : var_bigint->Bind(value);
5438 3864 : CombineFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
5439 3864 : Goto(if_bigint);
5440 5264 : }
5441 5264 : }
5442 5264 : }
5443 :
5444 5320 : Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
5445 10640 : Node* value = LoadHeapNumberValue(object);
5446 10640 : return TruncateFloat64ToWord32(value);
5447 : }
5448 :
5449 341 : void CodeStubAssembler::TryHeapNumberToSmi(TNode<HeapNumber> number,
5450 : TVariable<Smi>& var_result_smi,
5451 : Label* if_smi) {
5452 341 : TNode<Float64T> value = LoadHeapNumberValue(number);
5453 341 : TryFloat64ToSmi(value, var_result_smi, if_smi);
5454 341 : }
5455 :
5456 4724 : void CodeStubAssembler::TryFloat64ToSmi(TNode<Float64T> value,
5457 : TVariable<Smi>& var_result_smi,
5458 : Label* if_smi) {
5459 4724 : TNode<Int32T> value32 = RoundFloat64ToInt32(value);
5460 4724 : TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
5461 :
5462 4724 : Label if_int32(this), if_heap_number(this, Label::kDeferred);
5463 :
5464 9448 : GotoIfNot(Float64Equal(value, value64), &if_heap_number);
5465 14172 : GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32);
5466 9448 : Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
5467 14172 : Int32Constant(0)),
5468 9448 : &if_heap_number, &if_int32);
5469 :
5470 : TVARIABLE(Number, var_result);
5471 : BIND(&if_int32);
5472 : {
5473 : if (SmiValuesAre32Bits()) {
5474 14172 : var_result_smi = SmiTag(ChangeInt32ToIntPtr(value32));
5475 : } else {
5476 : DCHECK(SmiValuesAre31Bits());
5477 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
5478 : TNode<BoolT> overflow = Projection<1>(pair);
5479 : GotoIf(overflow, &if_heap_number);
5480 : var_result_smi =
5481 : BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
5482 : }
5483 4724 : Goto(if_smi);
5484 : }
5485 4724 : BIND(&if_heap_number);
5486 4724 : }
5487 :
5488 4383 : TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(
5489 : SloppyTNode<Float64T> value) {
5490 8766 : Label if_smi(this), done(this);
5491 : TVARIABLE(Smi, var_smi_result);
5492 : TVARIABLE(Number, var_result);
5493 4383 : TryFloat64ToSmi(value, var_smi_result, &if_smi);
5494 :
5495 8766 : var_result = AllocateHeapNumberWithValue(value);
5496 4383 : Goto(&done);
5497 :
5498 : BIND(&if_smi);
5499 : {
5500 : var_result = var_smi_result.value();
5501 4383 : Goto(&done);
5502 : }
5503 : BIND(&done);
5504 4383 : return var_result.value();
5505 : }
5506 :
5507 5936 : TNode<Number> CodeStubAssembler::ChangeInt32ToTagged(
5508 : SloppyTNode<Int32T> value) {
5509 : if (SmiValuesAre32Bits()) {
5510 17808 : return SmiTag(ChangeInt32ToIntPtr(value));
5511 : }
5512 : DCHECK(SmiValuesAre31Bits());
5513 : TVARIABLE(Number, var_result);
5514 : TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
5515 : TNode<BoolT> overflow = Projection<1>(pair);
5516 : Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
5517 : if_join(this);
5518 : Branch(overflow, &if_overflow, &if_notoverflow);
5519 : BIND(&if_overflow);
5520 : {
5521 : TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
5522 : TNode<HeapNumber> result = AllocateHeapNumberWithValue(value64);
5523 : var_result = result;
5524 : Goto(&if_join);
5525 : }
5526 : BIND(&if_notoverflow);
5527 : {
5528 : TNode<IntPtrT> almost_tagged_value =
5529 : ChangeInt32ToIntPtr(Projection<0>(pair));
5530 : TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
5531 : var_result = result;
5532 : Goto(&if_join);
5533 : }
5534 : BIND(&if_join);
5535 : return var_result.value();
5536 : }
5537 :
5538 4592 : TNode<Number> CodeStubAssembler::ChangeUint32ToTagged(
5539 : SloppyTNode<Uint32T> value) {
5540 9184 : Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5541 4592 : if_join(this);
5542 : TVARIABLE(Number, var_result);
5543 : // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5544 4592 : Branch(Uint32LessThan(Uint32Constant(Smi::kMaxValue), value), &if_overflow,
5545 9184 : &if_not_overflow);
5546 :
5547 : BIND(&if_not_overflow);
5548 : {
5549 : // The {value} is definitely in valid Smi range.
5550 13776 : var_result = SmiTag(Signed(ChangeUint32ToWord(value)));
5551 : }
5552 4592 : Goto(&if_join);
5553 :
5554 : BIND(&if_overflow);
5555 : {
5556 4592 : TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
5557 9184 : var_result = AllocateHeapNumberWithValue(float64_value);
5558 : }
5559 4592 : Goto(&if_join);
5560 :
5561 : BIND(&if_join);
5562 4592 : return var_result.value();
5563 : }
5564 :
5565 616 : TNode<Number> CodeStubAssembler::ChangeUintPtrToTagged(TNode<UintPtrT> value) {
5566 1232 : Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5567 616 : if_join(this);
5568 : TVARIABLE(Number, var_result);
5569 : // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5570 616 : Branch(UintPtrLessThan(UintPtrConstant(Smi::kMaxValue), value), &if_overflow,
5571 1232 : &if_not_overflow);
5572 :
5573 : BIND(&if_not_overflow);
5574 : {
5575 : // The {value} is definitely in valid Smi range.
5576 1232 : var_result = SmiTag(Signed(value));
5577 : }
5578 616 : Goto(&if_join);
5579 :
5580 : BIND(&if_overflow);
5581 : {
5582 616 : TNode<Float64T> float64_value = ChangeUintPtrToFloat64(value);
5583 1232 : var_result = AllocateHeapNumberWithValue(float64_value);
5584 : }
5585 616 : Goto(&if_join);
5586 :
5587 : BIND(&if_join);
5588 616 : return var_result.value();
5589 : }
5590 :
5591 1512 : TNode<String> CodeStubAssembler::ToThisString(Node* context, Node* value,
5592 : char const* method_name) {
5593 1512 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5594 :
5595 : // Check if the {value} is a Smi or a HeapObject.
5596 1512 : Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
5597 1512 : if_valueisstring(this);
5598 3024 : Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5599 : BIND(&if_valueisnotsmi);
5600 : {
5601 : // Load the instance type of the {value}.
5602 3024 : Node* value_instance_type = LoadInstanceType(value);
5603 :
5604 : // Check if the {value} is already String.
5605 : Label if_valueisnotstring(this, Label::kDeferred);
5606 1512 : Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
5607 3024 : &if_valueisnotstring);
5608 : BIND(&if_valueisnotstring);
5609 : {
5610 : // Check if the {value} is null.
5611 : Label if_valueisnullorundefined(this, Label::kDeferred);
5612 3024 : GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
5613 : // Convert the {value} to a String.
5614 3024 : var_value.Bind(CallBuiltin(Builtins::kToString, context, value));
5615 1512 : Goto(&if_valueisstring);
5616 :
5617 : BIND(&if_valueisnullorundefined);
5618 : {
5619 : // The {value} is either null or undefined.
5620 : ThrowTypeError(context, MessageTemplate::kCalledOnNullOrUndefined,
5621 1512 : method_name);
5622 1512 : }
5623 1512 : }
5624 : }
5625 : BIND(&if_valueissmi);
5626 : {
5627 : // The {value} is a Smi, convert it to a String.
5628 3024 : var_value.Bind(CallBuiltin(Builtins::kNumberToString, context, value));
5629 1512 : Goto(&if_valueisstring);
5630 : }
5631 : BIND(&if_valueisstring);
5632 3024 : return CAST(var_value.value());
5633 : }
5634 :
5635 112 : TNode<Uint32T> CodeStubAssembler::ChangeNumberToUint32(TNode<Number> value) {
5636 112 : TVARIABLE(Uint32T, var_result);
5637 112 : Label if_smi(this), if_heapnumber(this, Label::kDeferred), done(this);
5638 224 : Branch(TaggedIsSmi(value), &if_smi, &if_heapnumber);
5639 : BIND(&if_smi);
5640 : {
5641 224 : var_result = Unsigned(SmiToInt32(CAST(value)));
5642 112 : Goto(&done);
5643 : }
5644 : BIND(&if_heapnumber);
5645 : {
5646 336 : var_result = ChangeFloat64ToUint32(LoadHeapNumberValue(CAST(value)));
5647 112 : Goto(&done);
5648 : }
5649 : BIND(&done);
5650 112 : return var_result.value();
5651 : }
5652 :
5653 10619 : TNode<Float64T> CodeStubAssembler::ChangeNumberToFloat64(
5654 : SloppyTNode<Number> value) {
5655 : // TODO(tebbi): Remove assert once argument is TNode instead of SloppyTNode.
5656 : CSA_SLOW_ASSERT(this, IsNumber(value));
5657 10619 : TVARIABLE(Float64T, result);
5658 10619 : Label smi(this);
5659 10619 : Label done(this, &result);
5660 21238 : GotoIf(TaggedIsSmi(value), &smi);
5661 10619 : result = LoadHeapNumberValue(CAST(value));
5662 10619 : Goto(&done);
5663 :
5664 : BIND(&smi);
5665 : {
5666 10619 : result = SmiToFloat64(CAST(value));
5667 10619 : Goto(&done);
5668 : }
5669 :
5670 : BIND(&done);
5671 10619 : return result.value();
5672 : }
5673 :
5674 784 : TNode<UintPtrT> CodeStubAssembler::ChangeNonnegativeNumberToUintPtr(
5675 : TNode<Number> value) {
5676 784 : TVARIABLE(UintPtrT, result);
5677 784 : Label done(this, &result);
5678 : Branch(TaggedIsSmi(value),
5679 784 : [&] {
5680 784 : TNode<Smi> value_smi = CAST(value);
5681 : CSA_SLOW_ASSERT(this, SmiLessThan(SmiConstant(-1), value_smi));
5682 1568 : result = UncheckedCast<UintPtrT>(SmiToIntPtr(value_smi));
5683 784 : Goto(&done);
5684 784 : },
5685 784 : [&] {
5686 784 : TNode<HeapNumber> value_hn = CAST(value);
5687 2352 : result = ChangeFloat64ToUintPtr(LoadHeapNumberValue(value_hn));
5688 784 : Goto(&done);
5689 3136 : });
5690 :
5691 : BIND(&done);
5692 784 : return result.value();
5693 : }
5694 :
5695 72464 : TNode<WordT> CodeStubAssembler::TimesSystemPointerSize(
5696 : SloppyTNode<WordT> value) {
5697 72464 : return WordShl(value, kSystemPointerSizeLog2);
5698 : }
5699 :
5700 1120 : TNode<WordT> CodeStubAssembler::TimesTaggedSize(SloppyTNode<WordT> value) {
5701 8842 : return WordShl(value, kTaggedSizeLog2);
5702 : }
5703 :
5704 0 : TNode<WordT> CodeStubAssembler::TimesDoubleSize(SloppyTNode<WordT> value) {
5705 56 : return WordShl(value, kDoubleSizeLog2);
5706 : }
5707 :
5708 504 : Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
5709 : PrimitiveType primitive_type,
5710 : char const* method_name) {
5711 : // We might need to loop once due to JSValue unboxing.
5712 504 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
5713 504 : Label loop(this, &var_value), done_loop(this),
5714 504 : done_throw(this, Label::kDeferred);
5715 504 : Goto(&loop);
5716 : BIND(&loop);
5717 : {
5718 : // Load the current {value}.
5719 504 : value = var_value.value();
5720 :
5721 : // Check if the {value} is a Smi or a HeapObject.
5722 504 : GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
5723 : ? &done_loop
5724 1512 : : &done_throw);
5725 :
5726 : // Load the map of the {value}.
5727 1008 : Node* value_map = LoadMap(value);
5728 :
5729 : // Load the instance type of the {value}.
5730 1008 : Node* value_instance_type = LoadMapInstanceType(value_map);
5731 :
5732 : // Check if {value} is a JSValue.
5733 504 : Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
5734 504 : Branch(InstanceTypeEqual(value_instance_type, JS_VALUE_TYPE),
5735 1008 : &if_valueisvalue, &if_valueisnotvalue);
5736 :
5737 : BIND(&if_valueisvalue);
5738 : {
5739 : // Load the actual value from the {value}.
5740 504 : var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
5741 504 : Goto(&loop);
5742 : }
5743 :
5744 : BIND(&if_valueisnotvalue);
5745 : {
5746 504 : switch (primitive_type) {
5747 : case PrimitiveType::kBoolean:
5748 112 : GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
5749 112 : break;
5750 : case PrimitiveType::kNumber:
5751 56 : GotoIf(WordEqual(value_map, HeapNumberMapConstant()), &done_loop);
5752 56 : break;
5753 : case PrimitiveType::kString:
5754 224 : GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
5755 112 : break;
5756 : case PrimitiveType::kSymbol:
5757 224 : GotoIf(WordEqual(value_map, SymbolMapConstant()), &done_loop);
5758 224 : break;
5759 : }
5760 504 : Goto(&done_throw);
5761 504 : }
5762 : }
5763 :
5764 : BIND(&done_throw);
5765 : {
5766 : const char* primitive_name = nullptr;
5767 504 : switch (primitive_type) {
5768 : case PrimitiveType::kBoolean:
5769 : primitive_name = "Boolean";
5770 112 : break;
5771 : case PrimitiveType::kNumber:
5772 : primitive_name = "Number";
5773 56 : break;
5774 : case PrimitiveType::kString:
5775 : primitive_name = "String";
5776 112 : break;
5777 : case PrimitiveType::kSymbol:
5778 : primitive_name = "Symbol";
5779 224 : break;
5780 : }
5781 504 : CHECK_NOT_NULL(primitive_name);
5782 :
5783 : // The {value} is not a compatible receiver for this method.
5784 : ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
5785 504 : primitive_name);
5786 : }
5787 :
5788 : BIND(&done_loop);
5789 1008 : return var_value.value();
5790 : }
5791 :
5792 3024 : Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
5793 : InstanceType instance_type,
5794 : char const* method_name) {
5795 6048 : Label out(this), throw_exception(this, Label::kDeferred);
5796 6048 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
5797 :
5798 6048 : GotoIf(TaggedIsSmi(value), &throw_exception);
5799 :
5800 : // Load the instance type of the {value}.
5801 6048 : var_value_map.Bind(LoadMap(value));
5802 9072 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5803 :
5804 6048 : Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
5805 6048 : &throw_exception);
5806 :
5807 : // The {value} is not a compatible receiver for this method.
5808 : BIND(&throw_exception);
5809 : ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver,
5810 6048 : StringConstant(method_name), value);
5811 :
5812 : BIND(&out);
5813 6048 : return var_value_map.value();
5814 : }
5815 :
5816 840 : Node* CodeStubAssembler::ThrowIfNotJSReceiver(Node* context, Node* value,
5817 : MessageTemplate msg_template,
5818 : const char* method_name) {
5819 1680 : Label out(this), throw_exception(this, Label::kDeferred);
5820 1680 : VARIABLE(var_value_map, MachineRepresentation::kTagged);
5821 :
5822 1680 : GotoIf(TaggedIsSmi(value), &throw_exception);
5823 :
5824 : // Load the instance type of the {value}.
5825 1680 : var_value_map.Bind(LoadMap(value));
5826 2520 : Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5827 :
5828 1680 : Branch(IsJSReceiverInstanceType(value_instance_type), &out, &throw_exception);
5829 :
5830 : // The {value} is not a compatible receiver for this method.
5831 : BIND(&throw_exception);
5832 840 : ThrowTypeError(context, msg_template, method_name);
5833 :
5834 : BIND(&out);
5835 1680 : return var_value_map.value();
5836 : }
5837 :
5838 3864 : void CodeStubAssembler::ThrowRangeError(Node* context, MessageTemplate message,
5839 : Node* arg0, Node* arg1, Node* arg2) {
5840 7728 : Node* template_index = SmiConstant(static_cast<int>(message));
5841 3864 : if (arg0 == nullptr) {
5842 : CallRuntime(Runtime::kThrowRangeError, context, template_index);
5843 504 : } else if (arg1 == nullptr) {
5844 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0);
5845 0 : } else if (arg2 == nullptr) {
5846 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1);
5847 : } else {
5848 : CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1,
5849 : arg2);
5850 : }
5851 3864 : Unreachable();
5852 3864 : }
5853 :
5854 14071 : void CodeStubAssembler::ThrowTypeError(Node* context, MessageTemplate message,
5855 : char const* arg0, char const* arg1) {
5856 : Node* arg0_node = nullptr;
5857 20735 : if (arg0) arg0_node = StringConstant(arg0);
5858 : Node* arg1_node = nullptr;
5859 14575 : if (arg1) arg1_node = StringConstant(arg1);
5860 14071 : ThrowTypeError(context, message, arg0_node, arg1_node);
5861 14071 : }
5862 :
5863 26391 : void CodeStubAssembler::ThrowTypeError(Node* context, MessageTemplate message,
5864 : Node* arg0, Node* arg1, Node* arg2) {
5865 52782 : Node* template_index = SmiConstant(static_cast<int>(message));
5866 26391 : if (arg0 == nullptr) {
5867 : CallRuntime(Runtime::kThrowTypeError, context, template_index);
5868 18984 : } else if (arg1 == nullptr) {
5869 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0);
5870 5320 : } else if (arg2 == nullptr) {
5871 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1);
5872 : } else {
5873 : CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1,
5874 : arg2);
5875 : }
5876 26391 : Unreachable();
5877 26391 : }
5878 :
5879 100137 : TNode<BoolT> CodeStubAssembler::InstanceTypeEqual(
5880 : SloppyTNode<Int32T> instance_type, int type) {
5881 200274 : return Word32Equal(instance_type, Int32Constant(type));
5882 : }
5883 :
5884 1568 : TNode<BoolT> CodeStubAssembler::IsDictionaryMap(SloppyTNode<Map> map) {
5885 : CSA_SLOW_ASSERT(this, IsMap(map));
5886 3136 : Node* bit_field3 = LoadMapBitField3(map);
5887 1568 : return IsSetWord32<Map::IsDictionaryMapBit>(bit_field3);
5888 : }
5889 :
5890 168 : TNode<BoolT> CodeStubAssembler::IsExtensibleMap(SloppyTNode<Map> map) {
5891 : CSA_ASSERT(this, IsMap(map));
5892 336 : return IsSetWord32<Map::IsExtensibleBit>(LoadMapBitField2(map));
5893 : }
5894 :
5895 0 : TNode<BoolT> CodeStubAssembler::IsExtensibleNonPrototypeMap(TNode<Map> map) {
5896 : int kMask = Map::IsExtensibleBit::kMask | Map::IsPrototypeMapBit::kMask;
5897 : int kExpected = Map::IsExtensibleBit::kMask;
5898 0 : return Word32Equal(Word32And(LoadMapBitField2(map), Int32Constant(kMask)),
5899 0 : Int32Constant(kExpected));
5900 : }
5901 :
5902 8181 : TNode<BoolT> CodeStubAssembler::IsCallableMap(SloppyTNode<Map> map) {
5903 : CSA_ASSERT(this, IsMap(map));
5904 16362 : return IsSetWord32<Map::IsCallableBit>(LoadMapBitField(map));
5905 : }
5906 :
5907 672 : TNode<BoolT> CodeStubAssembler::IsDeprecatedMap(SloppyTNode<Map> map) {
5908 : CSA_ASSERT(this, IsMap(map));
5909 1344 : return IsSetWord32<Map::IsDeprecatedBit>(LoadMapBitField3(map));
5910 : }
5911 :
5912 8288 : TNode<BoolT> CodeStubAssembler::IsUndetectableMap(SloppyTNode<Map> map) {
5913 : CSA_ASSERT(this, IsMap(map));
5914 16576 : return IsSetWord32<Map::IsUndetectableBit>(LoadMapBitField(map));
5915 : }
5916 :
5917 4312 : TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
5918 8624 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5919 8624 : Node* cell = LoadRoot(RootIndex::kNoElementsProtector);
5920 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5921 4312 : return WordEqual(cell_value, invalid);
5922 : }
5923 :
5924 448 : TNode<BoolT> CodeStubAssembler::IsArrayIteratorProtectorCellInvalid() {
5925 896 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5926 896 : Node* cell = LoadRoot(RootIndex::kArrayIteratorProtector);
5927 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5928 448 : return WordEqual(cell_value, invalid);
5929 : }
5930 :
5931 168 : TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
5932 336 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5933 336 : Node* cell = LoadRoot(RootIndex::kPromiseResolveProtector);
5934 : Node* cell_value = LoadObjectField(cell, Cell::kValueOffset);
5935 168 : return WordEqual(cell_value, invalid);
5936 : }
5937 :
5938 392 : TNode<BoolT> CodeStubAssembler::IsPromiseThenProtectorCellInvalid() {
5939 784 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5940 784 : Node* cell = LoadRoot(RootIndex::kPromiseThenProtector);
5941 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5942 392 : return WordEqual(cell_value, invalid);
5943 : }
5944 :
5945 224 : TNode<BoolT> CodeStubAssembler::IsArraySpeciesProtectorCellInvalid() {
5946 448 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5947 448 : Node* cell = LoadRoot(RootIndex::kArraySpeciesProtector);
5948 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5949 224 : return WordEqual(cell_value, invalid);
5950 : }
5951 :
5952 224 : TNode<BoolT> CodeStubAssembler::IsTypedArraySpeciesProtectorCellInvalid() {
5953 448 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5954 448 : Node* cell = LoadRoot(RootIndex::kTypedArraySpeciesProtector);
5955 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5956 224 : return WordEqual(cell_value, invalid);
5957 : }
5958 :
5959 952 : TNode<BoolT> CodeStubAssembler::IsRegExpSpeciesProtectorCellInvalid() {
5960 1904 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5961 1904 : Node* cell = LoadRoot(RootIndex::kRegExpSpeciesProtector);
5962 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5963 952 : return WordEqual(cell_value, invalid);
5964 : }
5965 :
5966 616 : TNode<BoolT> CodeStubAssembler::IsPromiseSpeciesProtectorCellInvalid() {
5967 1232 : Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5968 1232 : Node* cell = LoadRoot(RootIndex::kPromiseSpeciesProtector);
5969 : Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5970 616 : return WordEqual(cell_value, invalid);
5971 : }
5972 :
5973 3136 : TNode<BoolT> CodeStubAssembler::IsPrototypeInitialArrayPrototype(
5974 : SloppyTNode<Context> context, SloppyTNode<Map> map) {
5975 : Node* const native_context = LoadNativeContext(context);
5976 : Node* const initial_array_prototype = LoadContextElement(
5977 6272 : native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
5978 6272 : Node* proto = LoadMapPrototype(map);
5979 3136 : return WordEqual(proto, initial_array_prototype);
5980 : }
5981 :
5982 224 : TNode<BoolT> CodeStubAssembler::IsPrototypeTypedArrayPrototype(
5983 : SloppyTNode<Context> context, SloppyTNode<Map> map) {
5984 : TNode<Context> const native_context = LoadNativeContext(context);
5985 : TNode<Object> const typed_array_prototype =
5986 224 : LoadContextElement(native_context, Context::TYPED_ARRAY_PROTOTYPE_INDEX);
5987 224 : TNode<HeapObject> proto = LoadMapPrototype(map);
5988 : TNode<HeapObject> proto_of_proto = Select<HeapObject>(
5989 896 : IsJSObject(proto), [=] { return LoadMapPrototype(LoadMap(proto)); },
5990 1120 : [=] { return NullConstant(); });
5991 448 : return WordEqual(proto_of_proto, typed_array_prototype);
5992 : }
5993 :
5994 728 : TNode<BoolT> CodeStubAssembler::IsFastAliasedArgumentsMap(
5995 : TNode<Context> context, TNode<Map> map) {
5996 : TNode<Context> const native_context = LoadNativeContext(context);
5997 : TNode<Object> const arguments_map = LoadContextElement(
5998 728 : native_context, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
5999 1456 : return WordEqual(arguments_map, map);
6000 : }
6001 :
6002 672 : TNode<BoolT> CodeStubAssembler::IsSlowAliasedArgumentsMap(
6003 : TNode<Context> context, TNode<Map> map) {
6004 : TNode<Context> const native_context = LoadNativeContext(context);
6005 : TNode<Object> const arguments_map = LoadContextElement(
6006 672 : native_context, Context::SLOW_ALIASED_ARGUMENTS_MAP_INDEX);
6007 1344 : return WordEqual(arguments_map, map);
6008 : }
6009 :
6010 728 : TNode<BoolT> CodeStubAssembler::IsSloppyArgumentsMap(TNode<Context> context,
6011 : TNode<Map> map) {
6012 : TNode<Context> const native_context = LoadNativeContext(context);
6013 : TNode<Object> const arguments_map =
6014 728 : LoadContextElement(native_context, Context::SLOPPY_ARGUMENTS_MAP_INDEX);
6015 1456 : return WordEqual(arguments_map, map);
6016 : }
6017 :
6018 728 : TNode<BoolT> CodeStubAssembler::IsStrictArgumentsMap(TNode<Context> context,
6019 : TNode<Map> map) {
6020 : TNode<Context> const native_context = LoadNativeContext(context);
6021 : TNode<Object> const arguments_map =
6022 728 : LoadContextElement(native_context, Context::STRICT_ARGUMENTS_MAP_INDEX);
6023 1456 : return WordEqual(arguments_map, map);
6024 : }
6025 :
6026 224 : TNode<BoolT> CodeStubAssembler::TaggedIsCallable(TNode<Object> object) {
6027 : return Select<BoolT>(
6028 448 : TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
6029 224 : [=] {
6030 672 : return IsCallableMap(LoadMap(UncheckedCast<HeapObject>(object)));
6031 1344 : });
6032 : }
6033 :
6034 2464 : TNode<BoolT> CodeStubAssembler::IsCallable(SloppyTNode<HeapObject> object) {
6035 4928 : return IsCallableMap(LoadMap(object));
6036 : }
6037 :
6038 0 : TNode<BoolT> CodeStubAssembler::IsCell(SloppyTNode<HeapObject> object) {
6039 0 : return WordEqual(LoadMap(object), LoadRoot(RootIndex::kCellMap));
6040 : }
6041 :
6042 56 : TNode<BoolT> CodeStubAssembler::IsCode(SloppyTNode<HeapObject> object) {
6043 56 : return HasInstanceType(object, CODE_TYPE);
6044 : }
6045 :
6046 1344 : TNode<BoolT> CodeStubAssembler::IsConstructorMap(SloppyTNode<Map> map) {
6047 : CSA_ASSERT(this, IsMap(map));
6048 2688 : return IsSetWord32<Map::IsConstructorBit>(LoadMapBitField(map));
6049 : }
6050 :
6051 504 : TNode<BoolT> CodeStubAssembler::IsConstructor(SloppyTNode<HeapObject> object) {
6052 1008 : return IsConstructorMap(LoadMap(object));
6053 : }
6054 :
6055 112 : TNode<BoolT> CodeStubAssembler::IsFunctionWithPrototypeSlotMap(
6056 : SloppyTNode<Map> map) {
6057 : CSA_ASSERT(this, IsMap(map));
6058 224 : return IsSetWord32<Map::HasPrototypeSlotBit>(LoadMapBitField(map));
6059 : }
6060 :
6061 3095 : TNode<BoolT> CodeStubAssembler::IsSpecialReceiverInstanceType(
6062 : TNode<Int32T> instance_type) {
6063 : STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
6064 : return Int32LessThanOrEqual(instance_type,
6065 6190 : Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
6066 : }
6067 :
6068 1568 : TNode<BoolT> CodeStubAssembler::IsCustomElementsReceiverInstanceType(
6069 : TNode<Int32T> instance_type) {
6070 : return Int32LessThanOrEqual(instance_type,
6071 3136 : Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER));
6072 : }
6073 :
6074 21020 : TNode<BoolT> CodeStubAssembler::IsStringInstanceType(
6075 : SloppyTNode<Int32T> instance_type) {
6076 : STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
6077 42040 : return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
6078 : }
6079 :
6080 4928 : TNode<BoolT> CodeStubAssembler::IsOneByteStringInstanceType(
6081 : SloppyTNode<Int32T> instance_type) {
6082 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6083 : return Word32Equal(
6084 9856 : Word32And(instance_type, Int32Constant(kStringEncodingMask)),
6085 19712 : Int32Constant(kOneByteStringTag));
6086 : }
6087 :
6088 448 : TNode<BoolT> CodeStubAssembler::HasOnlyOneByteChars(
6089 : TNode<Int32T> instance_type) {
6090 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6091 448 : return IsSetWord32(instance_type, kStringEncodingMask | kOneByteDataHintMask);
6092 : }
6093 :
6094 4368 : TNode<BoolT> CodeStubAssembler::IsSequentialStringInstanceType(
6095 : SloppyTNode<Int32T> instance_type) {
6096 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6097 : return Word32Equal(
6098 8736 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6099 17472 : Int32Constant(kSeqStringTag));
6100 : }
6101 :
6102 56 : TNode<BoolT> CodeStubAssembler::IsConsStringInstanceType(
6103 : SloppyTNode<Int32T> instance_type) {
6104 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6105 : return Word32Equal(
6106 112 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6107 224 : Int32Constant(kConsStringTag));
6108 : }
6109 :
6110 0 : TNode<BoolT> CodeStubAssembler::IsIndirectStringInstanceType(
6111 : SloppyTNode<Int32T> instance_type) {
6112 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6113 : STATIC_ASSERT(kIsIndirectStringMask == 0x1);
6114 : STATIC_ASSERT(kIsIndirectStringTag == 0x1);
6115 : return UncheckedCast<BoolT>(
6116 0 : Word32And(instance_type, Int32Constant(kIsIndirectStringMask)));
6117 : }
6118 :
6119 0 : TNode<BoolT> CodeStubAssembler::IsExternalStringInstanceType(
6120 : SloppyTNode<Int32T> instance_type) {
6121 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6122 : return Word32Equal(
6123 0 : Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6124 0 : Int32Constant(kExternalStringTag));
6125 : }
6126 :
6127 0 : TNode<BoolT> CodeStubAssembler::IsUncachedExternalStringInstanceType(
6128 : SloppyTNode<Int32T> instance_type) {
6129 : CSA_ASSERT(this, IsStringInstanceType(instance_type));
6130 : STATIC_ASSERT(kUncachedExternalStringTag != 0);
6131 4368 : return IsSetWord32(instance_type, kUncachedExternalStringMask);
6132 : }
6133 :
6134 14173 : TNode<BoolT> CodeStubAssembler::IsJSReceiverInstanceType(
6135 : SloppyTNode<Int32T> instance_type) {
6136 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6137 : return Int32GreaterThanOrEqual(instance_type,
6138 28346 : Int32Constant(FIRST_JS_RECEIVER_TYPE));
6139 : }
6140 :
6141 6272 : TNode<BoolT> CodeStubAssembler::IsJSReceiverMap(SloppyTNode<Map> map) {
6142 12544 : return IsJSReceiverInstanceType(LoadMapInstanceType(map));
6143 : }
6144 :
6145 5488 : TNode<BoolT> CodeStubAssembler::IsJSReceiver(SloppyTNode<HeapObject> object) {
6146 10976 : return IsJSReceiverMap(LoadMap(object));
6147 : }
6148 :
6149 0 : TNode<BoolT> CodeStubAssembler::IsNullOrJSReceiver(
6150 : SloppyTNode<HeapObject> object) {
6151 0 : return UncheckedCast<BoolT>(Word32Or(IsJSReceiver(object), IsNull(object)));
6152 : }
6153 :
6154 4144 : TNode<BoolT> CodeStubAssembler::IsNullOrUndefined(SloppyTNode<Object> value) {
6155 12432 : return UncheckedCast<BoolT>(Word32Or(IsUndefined(value), IsNull(value)));
6156 : }
6157 :
6158 0 : TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyInstanceType(
6159 : SloppyTNode<Int32T> instance_type) {
6160 0 : return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
6161 : }
6162 :
6163 448 : TNode<BoolT> CodeStubAssembler::IsJSObjectInstanceType(
6164 : SloppyTNode<Int32T> instance_type) {
6165 : STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
6166 : return Int32GreaterThanOrEqual(instance_type,
6167 896 : Int32Constant(FIRST_JS_OBJECT_TYPE));
6168 : }
6169 :
6170 336 : TNode<BoolT> CodeStubAssembler::IsJSObjectMap(SloppyTNode<Map> map) {
6171 : CSA_ASSERT(this, IsMap(map));
6172 672 : return IsJSObjectInstanceType(LoadMapInstanceType(map));
6173 : }
6174 :
6175 224 : TNode<BoolT> CodeStubAssembler::IsJSObject(SloppyTNode<HeapObject> object) {
6176 448 : return IsJSObjectMap(LoadMap(object));
6177 : }
6178 :
6179 840 : TNode<BoolT> CodeStubAssembler::IsJSPromiseMap(SloppyTNode<Map> map) {
6180 : CSA_ASSERT(this, IsMap(map));
6181 1680 : return InstanceTypeEqual(LoadMapInstanceType(map), JS_PROMISE_TYPE);
6182 : }
6183 :
6184 0 : TNode<BoolT> CodeStubAssembler::IsJSPromise(SloppyTNode<HeapObject> object) {
6185 0 : return IsJSPromiseMap(LoadMap(object));
6186 : }
6187 :
6188 336 : TNode<BoolT> CodeStubAssembler::IsJSProxy(SloppyTNode<HeapObject> object) {
6189 336 : return HasInstanceType(object, JS_PROXY_TYPE);
6190 : }
6191 :
6192 616 : TNode<BoolT> CodeStubAssembler::IsJSGlobalProxy(
6193 : SloppyTNode<HeapObject> object) {
6194 616 : return HasInstanceType(object, JS_GLOBAL_PROXY_TYPE);
6195 : }
6196 :
6197 4485 : TNode<BoolT> CodeStubAssembler::IsMap(SloppyTNode<HeapObject> map) {
6198 8970 : return IsMetaMap(LoadMap(map));
6199 : }
6200 :
6201 0 : TNode<BoolT> CodeStubAssembler::IsJSValueInstanceType(
6202 : SloppyTNode<Int32T> instance_type) {
6203 3813 : return InstanceTypeEqual(instance_type, JS_VALUE_TYPE);
6204 : }
6205 :
6206 0 : TNode<BoolT> CodeStubAssembler::IsJSValue(SloppyTNode<HeapObject> object) {
6207 0 : return IsJSValueMap(LoadMap(object));
6208 : }
6209 :
6210 0 : TNode<BoolT> CodeStubAssembler::IsJSValueMap(SloppyTNode<Map> map) {
6211 0 : return IsJSValueInstanceType(LoadMapInstanceType(map));
6212 : }
6213 :
6214 0 : TNode<BoolT> CodeStubAssembler::IsJSArrayInstanceType(
6215 : SloppyTNode<Int32T> instance_type) {
6216 14570 : return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
6217 : }
6218 :
6219 7453 : TNode<BoolT> CodeStubAssembler::IsJSArray(SloppyTNode<HeapObject> object) {
6220 14906 : return IsJSArrayMap(LoadMap(object));
6221 : }
6222 :
6223 10757 : TNode<BoolT> CodeStubAssembler::IsJSArrayMap(SloppyTNode<Map> map) {
6224 21514 : return IsJSArrayInstanceType(LoadMapInstanceType(map));
6225 : }
6226 :
6227 0 : TNode<BoolT> CodeStubAssembler::IsJSArrayIterator(
6228 : SloppyTNode<HeapObject> object) {
6229 0 : return HasInstanceType(object, JS_ARRAY_ITERATOR_TYPE);
6230 : }
6231 :
6232 0 : TNode<BoolT> CodeStubAssembler::IsJSAsyncGeneratorObject(
6233 : SloppyTNode<HeapObject> object) {
6234 0 : return HasInstanceType(object, JS_ASYNC_GENERATOR_OBJECT_TYPE);
6235 : }
6236 :
6237 5 : TNode<BoolT> CodeStubAssembler::IsContext(SloppyTNode<HeapObject> object) {
6238 10 : Node* instance_type = LoadInstanceType(object);
6239 : return UncheckedCast<BoolT>(Word32And(
6240 10 : Int32GreaterThanOrEqual(instance_type, Int32Constant(FIRST_CONTEXT_TYPE)),
6241 25 : Int32LessThanOrEqual(instance_type, Int32Constant(LAST_CONTEXT_TYPE))));
6242 : }
6243 :
6244 0 : TNode<BoolT> CodeStubAssembler::IsFixedArray(SloppyTNode<HeapObject> object) {
6245 0 : return HasInstanceType(object, FIXED_ARRAY_TYPE);
6246 : }
6247 :
6248 0 : TNode<BoolT> CodeStubAssembler::IsFixedArraySubclass(
6249 : SloppyTNode<HeapObject> object) {
6250 0 : Node* instance_type = LoadInstanceType(object);
6251 : return UncheckedCast<BoolT>(
6252 : Word32And(Int32GreaterThanOrEqual(instance_type,
6253 0 : Int32Constant(FIRST_FIXED_ARRAY_TYPE)),
6254 : Int32LessThanOrEqual(instance_type,
6255 0 : Int32Constant(LAST_FIXED_ARRAY_TYPE))));
6256 : }
6257 :
6258 0 : TNode<BoolT> CodeStubAssembler::IsNotWeakFixedArraySubclass(
6259 : SloppyTNode<HeapObject> object) {
6260 0 : Node* instance_type = LoadInstanceType(object);
6261 : return UncheckedCast<BoolT>(Word32Or(
6262 0 : Int32LessThan(instance_type, Int32Constant(FIRST_WEAK_FIXED_ARRAY_TYPE)),
6263 : Int32GreaterThan(instance_type,
6264 0 : Int32Constant(LAST_WEAK_FIXED_ARRAY_TYPE))));
6265 : }
6266 :
6267 392 : TNode<BoolT> CodeStubAssembler::IsPromiseCapability(
6268 : SloppyTNode<HeapObject> object) {
6269 392 : return HasInstanceType(object, PROMISE_CAPABILITY_TYPE);
6270 : }
6271 :
6272 0 : TNode<BoolT> CodeStubAssembler::IsPropertyArray(
6273 : SloppyTNode<HeapObject> object) {
6274 0 : return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
6275 : }
6276 :
6277 : // This complicated check is due to elements oddities. If a smi array is empty
6278 : // after Array.p.shift, it is replaced by the empty array constant. If it is
6279 : // later filled with a double element, we try to grow it but pass in a double
6280 : // elements kind. Usually this would cause a size mismatch (since the source
6281 : // fixed array has HOLEY_ELEMENTS and destination has
6282 : // HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
6283 : // source array is empty.
6284 : // TODO(jgruber): It might we worth creating an empty_double_array constant to
6285 : // simplify this case.
6286 0 : TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKindOrEmpty(
6287 : SloppyTNode<HeapObject> object, ElementsKind kind) {
6288 0 : Label out(this);
6289 : TVARIABLE(BoolT, var_result, Int32TrueConstant());
6290 :
6291 0 : GotoIf(IsFixedArrayWithKind(object, kind), &out);
6292 :
6293 0 : TNode<Smi> const length = LoadFixedArrayBaseLength(CAST(object));
6294 0 : GotoIf(SmiEqual(length, SmiConstant(0)), &out);
6295 :
6296 : var_result = Int32FalseConstant();
6297 0 : Goto(&out);
6298 :
6299 : BIND(&out);
6300 0 : return var_result.value();
6301 : }
6302 :
6303 0 : TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKind(
6304 : SloppyTNode<HeapObject> object, ElementsKind kind) {
6305 0 : if (IsDoubleElementsKind(kind)) {
6306 0 : return IsFixedDoubleArray(object);
6307 : } else {
6308 : DCHECK(IsSmiOrObjectElementsKind(kind));
6309 0 : return IsFixedArraySubclass(object);
6310 : }
6311 : }
6312 :
6313 168 : TNode<BoolT> CodeStubAssembler::IsBoolean(SloppyTNode<HeapObject> object) {
6314 336 : return IsBooleanMap(LoadMap(object));
6315 : }
6316 :
6317 0 : TNode<BoolT> CodeStubAssembler::IsPropertyCell(SloppyTNode<HeapObject> object) {
6318 0 : return IsPropertyCellMap(LoadMap(object));
6319 : }
6320 :
6321 336 : TNode<BoolT> CodeStubAssembler::IsAccessorInfo(SloppyTNode<HeapObject> object) {
6322 672 : return IsAccessorInfoMap(LoadMap(object));
6323 : }
6324 :
6325 3981 : TNode<BoolT> CodeStubAssembler::IsAccessorPair(SloppyTNode<HeapObject> object) {
6326 7962 : return IsAccessorPairMap(LoadMap(object));
6327 : }
6328 :
6329 168 : TNode<BoolT> CodeStubAssembler::IsAllocationSite(
6330 : SloppyTNode<HeapObject> object) {
6331 336 : return IsAllocationSiteInstanceType(LoadInstanceType(object));
6332 : }
6333 :
6334 0 : TNode<BoolT> CodeStubAssembler::IsAnyHeapNumber(
6335 : SloppyTNode<HeapObject> object) {
6336 : return UncheckedCast<BoolT>(
6337 0 : Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object)));
6338 : }
6339 :
6340 29995 : TNode<BoolT> CodeStubAssembler::IsHeapNumber(SloppyTNode<HeapObject> object) {
6341 59990 : return IsHeapNumberMap(LoadMap(object));
6342 : }
6343 :
6344 56 : TNode<BoolT> CodeStubAssembler::IsHeapNumberInstanceType(
6345 : SloppyTNode<Int32T> instance_type) {
6346 280 : return InstanceTypeEqual(instance_type, HEAP_NUMBER_TYPE);
6347 : }
6348 :
6349 0 : TNode<BoolT> CodeStubAssembler::IsOddball(SloppyTNode<HeapObject> object) {
6350 0 : return IsOddballInstanceType(LoadInstanceType(object));
6351 : }
6352 :
6353 0 : TNode<BoolT> CodeStubAssembler::IsOddballInstanceType(
6354 : SloppyTNode<Int32T> instance_type) {
6355 896 : return InstanceTypeEqual(instance_type, ODDBALL_TYPE);
6356 : }
6357 :
6358 56 : TNode<BoolT> CodeStubAssembler::IsMutableHeapNumber(
6359 : SloppyTNode<HeapObject> object) {
6360 112 : return IsMutableHeapNumberMap(LoadMap(object));
6361 : }
6362 :
6363 0 : TNode<BoolT> CodeStubAssembler::IsFeedbackCell(SloppyTNode<HeapObject> object) {
6364 0 : return HasInstanceType(object, FEEDBACK_CELL_TYPE);
6365 : }
6366 :
6367 0 : TNode<BoolT> CodeStubAssembler::IsFeedbackVector(
6368 : SloppyTNode<HeapObject> object) {
6369 0 : return IsFeedbackVectorMap(LoadMap(object));
6370 : }
6371 :
6372 56 : TNode<BoolT> CodeStubAssembler::IsName(SloppyTNode<HeapObject> object) {
6373 112 : return IsNameInstanceType(LoadInstanceType(object));
6374 : }
6375 :
6376 112 : TNode<BoolT> CodeStubAssembler::IsNameInstanceType(
6377 : SloppyTNode<Int32T> instance_type) {
6378 224 : return Int32LessThanOrEqual(instance_type, Int32Constant(LAST_NAME_TYPE));
6379 : }
6380 :
6381 9357 : TNode<BoolT> CodeStubAssembler::IsString(SloppyTNode<HeapObject> object) {
6382 18714 : return IsStringInstanceType(LoadInstanceType(object));
6383 : }
6384 :
6385 0 : TNode<BoolT> CodeStubAssembler::IsSymbolInstanceType(
6386 : SloppyTNode<Int32T> instance_type) {
6387 728 : return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
6388 : }
6389 :
6390 2698 : TNode<BoolT> CodeStubAssembler::IsSymbol(SloppyTNode<HeapObject> object) {
6391 5396 : return IsSymbolMap(LoadMap(object));
6392 : }
6393 :
6394 4760 : TNode<BoolT> CodeStubAssembler::IsBigIntInstanceType(
6395 : SloppyTNode<Int32T> instance_type) {
6396 31869 : return InstanceTypeEqual(instance_type, BIGINT_TYPE);
6397 : }
6398 :
6399 12936 : TNode<BoolT> CodeStubAssembler::IsBigInt(SloppyTNode<HeapObject> object) {
6400 25872 : return IsBigIntInstanceType(LoadInstanceType(object));
6401 : }
6402 :
6403 448 : TNode<BoolT> CodeStubAssembler::IsPrimitiveInstanceType(
6404 : SloppyTNode<Int32T> instance_type) {
6405 : return Int32LessThanOrEqual(instance_type,
6406 896 : Int32Constant(LAST_PRIMITIVE_TYPE));
6407 : }
6408 :
6409 2469 : TNode<BoolT> CodeStubAssembler::IsPrivateSymbol(
6410 : SloppyTNode<HeapObject> object) {
6411 : return Select<BoolT>(IsSymbol(object),
6412 2469 : [=] {
6413 : TNode<Symbol> symbol = CAST(object);
6414 : TNode<Uint32T> flags = LoadObjectField<Uint32T>(
6415 2469 : symbol, Symbol::kFlagsOffset);
6416 4938 : return IsSetWord32<Symbol::IsPrivateBit>(flags);
6417 : },
6418 7407 : [=] { return Int32FalseConstant(); });
6419 : }
6420 :
6421 56 : TNode<BoolT> CodeStubAssembler::IsNativeContext(
6422 : SloppyTNode<HeapObject> object) {
6423 112 : return WordEqual(LoadMap(object), LoadRoot(RootIndex::kNativeContextMap));
6424 : }
6425 :
6426 112 : TNode<BoolT> CodeStubAssembler::IsFixedDoubleArray(
6427 : SloppyTNode<HeapObject> object) {
6428 224 : return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
6429 : }
6430 :
6431 0 : TNode<BoolT> CodeStubAssembler::IsHashTable(SloppyTNode<HeapObject> object) {
6432 0 : Node* instance_type = LoadInstanceType(object);
6433 : return UncheckedCast<BoolT>(
6434 : Word32And(Int32GreaterThanOrEqual(instance_type,
6435 0 : Int32Constant(FIRST_HASH_TABLE_TYPE)),
6436 : Int32LessThanOrEqual(instance_type,
6437 0 : Int32Constant(LAST_HASH_TABLE_TYPE))));
6438 : }
6439 :
6440 0 : TNode<BoolT> CodeStubAssembler::IsEphemeronHashTable(
6441 : SloppyTNode<HeapObject> object) {
6442 0 : return HasInstanceType(object, EPHEMERON_HASH_TABLE_TYPE);
6443 : }
6444 :
6445 0 : TNode<BoolT> CodeStubAssembler::IsNameDictionary(
6446 : SloppyTNode<HeapObject> object) {
6447 0 : return HasInstanceType(object, NAME_DICTIONARY_TYPE);
6448 : }
6449 :
6450 0 : TNode<BoolT> CodeStubAssembler::IsGlobalDictionary(
6451 : SloppyTNode<HeapObject> object) {
6452 0 : return HasInstanceType(object, GLOBAL_DICTIONARY_TYPE);
6453 : }
6454 :
6455 0 : TNode<BoolT> CodeStubAssembler::IsNumberDictionary(
6456 : SloppyTNode<HeapObject> object) {
6457 0 : return HasInstanceType(object, NUMBER_DICTIONARY_TYPE);
6458 : }
6459 :
6460 0 : TNode<BoolT> CodeStubAssembler::IsJSGeneratorObject(
6461 : SloppyTNode<HeapObject> object) {
6462 0 : return HasInstanceType(object, JS_GENERATOR_OBJECT_TYPE);
6463 : }
6464 :
6465 0 : TNode<BoolT> CodeStubAssembler::IsJSFunctionInstanceType(
6466 : SloppyTNode<Int32T> instance_type) {
6467 3869 : return InstanceTypeEqual(instance_type, JS_FUNCTION_TYPE);
6468 : }
6469 :
6470 0 : TNode<BoolT> CodeStubAssembler::IsAllocationSiteInstanceType(
6471 : SloppyTNode<Int32T> instance_type) {
6472 168 : return InstanceTypeEqual(instance_type, ALLOCATION_SITE_TYPE);
6473 : }
6474 :
6475 56 : TNode<BoolT> CodeStubAssembler::IsJSFunction(SloppyTNode<HeapObject> object) {
6476 112 : return IsJSFunctionMap(LoadMap(object));
6477 : }
6478 :
6479 56 : TNode<BoolT> CodeStubAssembler::IsJSFunctionMap(SloppyTNode<Map> map) {
6480 112 : return IsJSFunctionInstanceType(LoadMapInstanceType(map));
6481 : }
6482 :
6483 616 : TNode<BoolT> CodeStubAssembler::IsJSTypedArray(SloppyTNode<HeapObject> object) {
6484 616 : return HasInstanceType(object, JS_TYPED_ARRAY_TYPE);
6485 : }
6486 :
6487 56 : TNode<BoolT> CodeStubAssembler::IsJSArrayBuffer(
6488 : SloppyTNode<HeapObject> object) {
6489 56 : return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
6490 : }
6491 :
6492 1288 : TNode<BoolT> CodeStubAssembler::IsJSDataView(TNode<HeapObject> object) {
6493 1288 : return HasInstanceType(object, JS_DATA_VIEW_TYPE);
6494 : }
6495 :
6496 0 : TNode<BoolT> CodeStubAssembler::IsFixedTypedArray(
6497 : SloppyTNode<HeapObject> object) {
6498 0 : TNode<Int32T> instance_type = LoadInstanceType(object);
6499 : return UncheckedCast<BoolT>(Word32And(
6500 : Int32GreaterThanOrEqual(instance_type,
6501 0 : Int32Constant(FIRST_FIXED_TYPED_ARRAY_TYPE)),
6502 : Int32LessThanOrEqual(instance_type,
6503 0 : Int32Constant(LAST_FIXED_TYPED_ARRAY_TYPE))));
6504 : }
6505 :
6506 672 : TNode<BoolT> CodeStubAssembler::IsJSRegExp(SloppyTNode<HeapObject> object) {
6507 672 : return HasInstanceType(object, JS_REGEXP_TYPE);
6508 : }
6509 :
6510 1797 : TNode<BoolT> CodeStubAssembler::IsNumber(SloppyTNode<Object> object) {
6511 1797 : return Select<BoolT>(TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6512 5391 : [=] { return IsHeapNumber(CAST(object)); });
6513 : }
6514 :
6515 112 : TNode<BoolT> CodeStubAssembler::IsNumeric(SloppyTNode<Object> object) {
6516 : return Select<BoolT>(
6517 112 : TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6518 112 : [=] {
6519 : return UncheckedCast<BoolT>(
6520 336 : Word32Or(IsHeapNumber(CAST(object)), IsBigInt(CAST(object))));
6521 336 : });
6522 : }
6523 :
6524 0 : TNode<BoolT> CodeStubAssembler::IsNumberNormalized(SloppyTNode<Number> number) {
6525 0 : TVARIABLE(BoolT, var_result, Int32TrueConstant());
6526 0 : Label out(this);
6527 :
6528 0 : GotoIf(TaggedIsSmi(number), &out);
6529 :
6530 0 : TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
6531 : TNode<Float64T> smi_min =
6532 0 : Float64Constant(static_cast<double>(Smi::kMinValue));
6533 : TNode<Float64T> smi_max =
6534 0 : Float64Constant(static_cast<double>(Smi::kMaxValue));
6535 :
6536 0 : GotoIf(Float64LessThan(value, smi_min), &out);
6537 0 : GotoIf(Float64GreaterThan(value, smi_max), &out);
6538 0 : GotoIfNot(Float64Equal(value, value), &out); // NaN.
6539 :
6540 : var_result = Int32FalseConstant();
6541 0 : Goto(&out);
6542 :
6543 : BIND(&out);
6544 0 : return var_result.value();
6545 : }
6546 :
6547 0 : TNode<BoolT> CodeStubAssembler::IsNumberPositive(SloppyTNode<Number> number) {
6548 0 : return Select<BoolT>(TaggedIsSmi(number),
6549 0 : [=] { return TaggedIsPositiveSmi(number); },
6550 0 : [=] { return IsHeapNumberPositive(CAST(number)); });
6551 : }
6552 :
6553 : // TODO(cbruni): Use TNode<HeapNumber> instead of custom name.
6554 5 : TNode<BoolT> CodeStubAssembler::IsHeapNumberPositive(TNode<HeapNumber> number) {
6555 5 : TNode<Float64T> value = LoadHeapNumberValue(number);
6556 5 : TNode<Float64T> float_zero = Float64Constant(0.);
6557 5 : return Float64GreaterThanOrEqual(value, float_zero);
6558 : }
6559 :
6560 0 : TNode<BoolT> CodeStubAssembler::IsNumberNonNegativeSafeInteger(
6561 : TNode<Number> number) {
6562 : return Select<BoolT>(
6563 : // TODO(cbruni): Introduce TaggedIsNonNegateSmi to avoid confusion.
6564 0 : TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
6565 0 : [=] {
6566 : TNode<HeapNumber> heap_number = CAST(number);
6567 : return Select<BoolT>(IsInteger(heap_number),
6568 0 : [=] { return IsHeapNumberPositive(heap_number); },
6569 0 : [=] { return Int32FalseConstant(); });
6570 0 : });
6571 : }
6572 :
6573 56 : TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<Object> number) {
6574 : return Select<BoolT>(
6575 112 : TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6576 56 : [=] {
6577 : return Select<BoolT>(
6578 : IsHeapNumber(CAST(number)),
6579 56 : [=] { return IsSafeInteger(UncheckedCast<HeapNumber>(number)); },
6580 224 : [=] { return Int32FalseConstant(); });
6581 336 : });
6582 : }
6583 :
6584 56 : TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<HeapNumber> number) {
6585 : // Load the actual value of {number}.
6586 56 : TNode<Float64T> number_value = LoadHeapNumberValue(number);
6587 : // Truncate the value of {number} to an integer (or an infinity).
6588 56 : TNode<Float64T> integer = Float64Trunc(number_value);
6589 :
6590 : return Select<BoolT>(
6591 : // Check if {number}s value matches the integer (ruling out the
6592 : // infinities).
6593 168 : Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0)),
6594 56 : [=] {
6595 : // Check if the {integer} value is in safe integer range.
6596 112 : return Float64LessThanOrEqual(Float64Abs(integer),
6597 280 : Float64Constant(kMaxSafeInteger));
6598 112 : },
6599 280 : [=] { return Int32FalseConstant(); });
6600 : }
6601 :
6602 56 : TNode<BoolT> CodeStubAssembler::IsInteger(TNode<Object> number) {
6603 : return Select<BoolT>(
6604 112 : TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6605 56 : [=] {
6606 : return Select<BoolT>(
6607 : IsHeapNumber(CAST(number)),
6608 56 : [=] { return IsInteger(UncheckedCast<HeapNumber>(number)); },
6609 224 : [=] { return Int32FalseConstant(); });
6610 336 : });
6611 : }
6612 :
6613 56 : TNode<BoolT> CodeStubAssembler::IsInteger(TNode<HeapNumber> number) {
6614 56 : TNode<Float64T> number_value = LoadHeapNumberValue(number);
6615 : // Truncate the value of {number} to an integer (or an infinity).
6616 56 : TNode<Float64T> integer = Float64Trunc(number_value);
6617 : // Check if {number}s value matches the integer (ruling out the infinities).
6618 168 : return Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0));
6619 : }
6620 :
6621 5 : TNode<BoolT> CodeStubAssembler::IsHeapNumberUint32(TNode<HeapNumber> number) {
6622 : // Check that the HeapNumber is a valid uint32
6623 : return Select<BoolT>(
6624 : IsHeapNumberPositive(number),
6625 5 : [=] {
6626 5 : TNode<Float64T> value = LoadHeapNumberValue(number);
6627 5 : TNode<Uint32T> int_value = Unsigned(TruncateFloat64ToWord32(value));
6628 10 : return Float64Equal(value, ChangeUint32ToFloat64(int_value));
6629 : },
6630 15 : [=] { return Int32FalseConstant(); });
6631 : }
6632 :
6633 5 : TNode<BoolT> CodeStubAssembler::IsNumberArrayIndex(TNode<Number> number) {
6634 5 : return Select<BoolT>(TaggedIsSmi(number),
6635 5 : [=] { return TaggedIsPositiveSmi(number); },
6636 25 : [=] { return IsHeapNumberUint32(CAST(number)); });
6637 : }
6638 :
6639 2662 : Node* CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(Node* element_count,
6640 : int base_size,
6641 : ParameterMode mode) {
6642 : int max_newspace_elements =
6643 2662 : (kMaxRegularHeapObjectSize - base_size) / kTaggedSize;
6644 : return IntPtrOrSmiGreaterThan(
6645 2662 : element_count, IntPtrOrSmiConstant(max_newspace_elements, mode), mode);
6646 : }
6647 :
6648 2744 : TNode<Int32T> CodeStubAssembler::StringCharCodeAt(SloppyTNode<String> string,
6649 : SloppyTNode<IntPtrT> index) {
6650 : CSA_ASSERT(this, IsString(string));
6651 :
6652 : CSA_ASSERT(this, IntPtrGreaterThanOrEqual(index, IntPtrConstant(0)));
6653 : CSA_ASSERT(this, IntPtrLessThan(index, LoadStringLengthAsWord(string)));
6654 :
6655 2744 : TVARIABLE(Int32T, var_result);
6656 :
6657 2744 : Label return_result(this), if_runtime(this, Label::kDeferred),
6658 2744 : if_stringistwobyte(this), if_stringisonebyte(this);
6659 :
6660 5488 : ToDirectStringAssembler to_direct(state(), string);
6661 2744 : to_direct.TryToDirect(&if_runtime);
6662 : Node* const offset = IntPtrAdd(index, to_direct.offset());
6663 : Node* const instance_type = to_direct.instance_type();
6664 :
6665 : Node* const string_data = to_direct.PointerToData(&if_runtime);
6666 :
6667 : // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
6668 2744 : Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
6669 5488 : &if_stringistwobyte);
6670 :
6671 : BIND(&if_stringisonebyte);
6672 : {
6673 2744 : var_result =
6674 : UncheckedCast<Int32T>(Load(MachineType::Uint8(), string_data, offset));
6675 2744 : Goto(&return_result);
6676 : }
6677 :
6678 : BIND(&if_stringistwobyte);
6679 : {
6680 8232 : var_result =
6681 : UncheckedCast<Int32T>(Load(MachineType::Uint16(), string_data,
6682 5488 : WordShl(offset, IntPtrConstant(1))));
6683 2744 : Goto(&return_result);
6684 : }
6685 :
6686 : BIND(&if_runtime);
6687 : {
6688 : Node* result = CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(),
6689 2744 : string, SmiTag(index));
6690 5488 : var_result = SmiToInt32(result);
6691 2744 : Goto(&return_result);
6692 : }
6693 :
6694 : BIND(&return_result);
6695 2744 : return var_result.value();
6696 : }
6697 :
6698 784 : TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
6699 784 : VARIABLE(var_result, MachineRepresentation::kTagged);
6700 :
6701 : // Check if the {code} is a one-byte char code.
6702 784 : Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
6703 784 : if_done(this);
6704 1568 : Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
6705 1568 : &if_codeisonebyte, &if_codeistwobyte);
6706 : BIND(&if_codeisonebyte);
6707 : {
6708 : // Load the isolate wide single character string cache.
6709 : TNode<FixedArray> cache =
6710 784 : CAST(LoadRoot(RootIndex::kSingleCharacterStringCache));
6711 1568 : TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
6712 :
6713 : // Check if we have an entry for the {code} in the single character string
6714 : // cache already.
6715 : Label if_entryisundefined(this, Label::kDeferred),
6716 784 : if_entryisnotundefined(this);
6717 1568 : Node* entry = LoadFixedArrayElement(cache, code_index);
6718 1568 : Branch(IsUndefined(entry), &if_entryisundefined, &if_entryisnotundefined);
6719 :
6720 : BIND(&if_entryisundefined);
6721 : {
6722 : // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
6723 784 : TNode<String> result = AllocateSeqOneByteString(1);
6724 : StoreNoWriteBarrier(
6725 : MachineRepresentation::kWord8, result,
6726 1568 : IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
6727 : StoreFixedArrayElement(cache, code_index, result);
6728 784 : var_result.Bind(result);
6729 784 : Goto(&if_done);
6730 : }
6731 :
6732 : BIND(&if_entryisnotundefined);
6733 : {
6734 : // Return the entry from the {cache}.
6735 784 : var_result.Bind(entry);
6736 784 : Goto(&if_done);
6737 784 : }
6738 : }
6739 :
6740 : BIND(&if_codeistwobyte);
6741 : {
6742 : // Allocate a new SeqTwoByteString for {code}.
6743 1568 : Node* result = AllocateSeqTwoByteString(1);
6744 : StoreNoWriteBarrier(
6745 : MachineRepresentation::kWord16, result,
6746 1568 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
6747 784 : var_result.Bind(result);
6748 784 : Goto(&if_done);
6749 : }
6750 :
6751 : BIND(&if_done);
6752 : CSA_ASSERT(this, IsString(var_result.value()));
6753 1568 : return CAST(var_result.value());
6754 : }
6755 :
6756 : // A wrapper around CopyStringCharacters which determines the correct string
6757 : // encoding, allocates a corresponding sequential string, and then copies the
6758 : // given character range using CopyStringCharacters.
6759 : // |from_string| must be a sequential string.
6760 : // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
6761 896 : TNode<String> CodeStubAssembler::AllocAndCopyStringCharacters(
6762 : Node* from, Node* from_instance_type, TNode<IntPtrT> from_index,
6763 : TNode<IntPtrT> character_count) {
6764 1792 : Label end(this), one_byte_sequential(this), two_byte_sequential(this);
6765 : TVARIABLE(String, var_result);
6766 :
6767 896 : Branch(IsOneByteStringInstanceType(from_instance_type), &one_byte_sequential,
6768 1792 : &two_byte_sequential);
6769 :
6770 : // The subject string is a sequential one-byte string.
6771 : BIND(&one_byte_sequential);
6772 : {
6773 : TNode<String> result = AllocateSeqOneByteString(
6774 1792 : NoContextConstant(), Unsigned(TruncateIntPtrToInt32(character_count)));
6775 : CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6776 : character_count, String::ONE_BYTE_ENCODING,
6777 1792 : String::ONE_BYTE_ENCODING);
6778 : var_result = result;
6779 896 : Goto(&end);
6780 : }
6781 :
6782 : // The subject string is a sequential two-byte string.
6783 : BIND(&two_byte_sequential);
6784 : {
6785 : TNode<String> result = AllocateSeqTwoByteString(
6786 1792 : NoContextConstant(), Unsigned(TruncateIntPtrToInt32(character_count)));
6787 : CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6788 : character_count, String::TWO_BYTE_ENCODING,
6789 1792 : String::TWO_BYTE_ENCODING);
6790 : var_result = result;
6791 896 : Goto(&end);
6792 : }
6793 :
6794 : BIND(&end);
6795 896 : return var_result.value();
6796 : }
6797 :
6798 448 : TNode<String> CodeStubAssembler::SubString(TNode<String> string,
6799 : TNode<IntPtrT> from,
6800 : TNode<IntPtrT> to) {
6801 448 : TVARIABLE(String, var_result);
6802 896 : ToDirectStringAssembler to_direct(state(), string);
6803 448 : Label end(this), runtime(this);
6804 :
6805 : TNode<IntPtrT> const substr_length = IntPtrSub(to, from);
6806 448 : TNode<IntPtrT> const string_length = LoadStringLengthAsWord(string);
6807 :
6808 : // Begin dispatching based on substring length.
6809 :
6810 448 : Label original_string_or_invalid_length(this);
6811 448 : GotoIf(UintPtrGreaterThanOrEqual(substr_length, string_length),
6812 896 : &original_string_or_invalid_length);
6813 :
6814 : // A real substring (substr_length < string_length).
6815 448 : Label empty(this);
6816 1344 : GotoIf(IntPtrEqual(substr_length, IntPtrConstant(0)), &empty);
6817 :
6818 448 : Label single_char(this);
6819 1344 : GotoIf(IntPtrEqual(substr_length, IntPtrConstant(1)), &single_char);
6820 :
6821 : // Deal with different string types: update the index if necessary
6822 : // and extract the underlying string.
6823 :
6824 448 : TNode<String> direct_string = to_direct.TryToDirect(&runtime);
6825 : TNode<IntPtrT> offset = IntPtrAdd(from, to_direct.offset());
6826 : Node* const instance_type = to_direct.instance_type();
6827 :
6828 : // The subject string can only be external or sequential string of either
6829 : // encoding at this point.
6830 448 : Label external_string(this);
6831 : {
6832 : if (FLAG_string_slices) {
6833 : Label next(this);
6834 :
6835 : // Short slice. Copy instead of slicing.
6836 : GotoIf(IntPtrLessThan(substr_length,
6837 896 : IntPtrConstant(SlicedString::kMinLength)),
6838 896 : &next);
6839 :
6840 : // Allocate new sliced string.
6841 :
6842 448 : Counters* counters = isolate()->counters();
6843 448 : IncrementCounter(counters->sub_string_native(), 1);
6844 :
6845 448 : Label one_byte_slice(this), two_byte_slice(this);
6846 448 : Branch(IsOneByteStringInstanceType(to_direct.instance_type()),
6847 896 : &one_byte_slice, &two_byte_slice);
6848 :
6849 : BIND(&one_byte_slice);
6850 : {
6851 1792 : var_result = AllocateSlicedOneByteString(
6852 448 : Unsigned(TruncateIntPtrToInt32(substr_length)), direct_string,
6853 : SmiTag(offset));
6854 448 : Goto(&end);
6855 : }
6856 :
6857 : BIND(&two_byte_slice);
6858 : {
6859 1792 : var_result = AllocateSlicedTwoByteString(
6860 448 : Unsigned(TruncateIntPtrToInt32(substr_length)), direct_string,
6861 : SmiTag(offset));
6862 448 : Goto(&end);
6863 : }
6864 :
6865 448 : BIND(&next);
6866 : }
6867 :
6868 : // The subject string can only be external or sequential string of either
6869 : // encoding at this point.
6870 448 : GotoIf(to_direct.is_external(), &external_string);
6871 :
6872 448 : var_result = AllocAndCopyStringCharacters(direct_string, instance_type,
6873 : offset, substr_length);
6874 :
6875 448 : Counters* counters = isolate()->counters();
6876 448 : IncrementCounter(counters->sub_string_native(), 1);
6877 :
6878 448 : Goto(&end);
6879 : }
6880 :
6881 : // Handle external string.
6882 : BIND(&external_string);
6883 : {
6884 : Node* const fake_sequential_string = to_direct.PointerToString(&runtime);
6885 :
6886 448 : var_result = AllocAndCopyStringCharacters(
6887 : fake_sequential_string, instance_type, offset, substr_length);
6888 :
6889 448 : Counters* counters = isolate()->counters();
6890 448 : IncrementCounter(counters->sub_string_native(), 1);
6891 :
6892 448 : Goto(&end);
6893 : }
6894 :
6895 : BIND(&empty);
6896 : {
6897 : var_result = EmptyStringConstant();
6898 448 : Goto(&end);
6899 : }
6900 :
6901 : // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
6902 : BIND(&single_char);
6903 : {
6904 448 : TNode<Int32T> char_code = StringCharCodeAt(string, from);
6905 448 : var_result = StringFromSingleCharCode(char_code);
6906 448 : Goto(&end);
6907 : }
6908 :
6909 : BIND(&original_string_or_invalid_length);
6910 : {
6911 : CSA_ASSERT(this, IntPtrEqual(substr_length, string_length));
6912 :
6913 : // Equal length - check if {from, to} == {0, str.length}.
6914 1344 : GotoIf(UintPtrGreaterThan(from, IntPtrConstant(0)), &runtime);
6915 :
6916 : // Return the original string (substr_length == string_length).
6917 :
6918 448 : Counters* counters = isolate()->counters();
6919 448 : IncrementCounter(counters->sub_string_native(), 1);
6920 :
6921 : var_result = string;
6922 448 : Goto(&end);
6923 : }
6924 :
6925 : // Fall back to a runtime call.
6926 : BIND(&runtime);
6927 : {
6928 : var_result =
6929 1344 : CAST(CallRuntime(Runtime::kStringSubstring, NoContextConstant(), string,
6930 : SmiTag(from), SmiTag(to)));
6931 448 : Goto(&end);
6932 : }
6933 :
6934 : BIND(&end);
6935 448 : return var_result.value();
6936 : }
6937 :
6938 4368 : ToDirectStringAssembler::ToDirectStringAssembler(
6939 : compiler::CodeAssemblerState* state, Node* string, Flags flags)
6940 : : CodeStubAssembler(state),
6941 : var_string_(this, MachineRepresentation::kTagged, string),
6942 : var_instance_type_(this, MachineRepresentation::kWord32),
6943 : var_offset_(this, MachineType::PointerRepresentation()),
6944 : var_is_external_(this, MachineRepresentation::kWord32),
6945 4368 : flags_(flags) {
6946 : CSA_ASSERT(this, TaggedIsNotSmi(string));
6947 : CSA_ASSERT(this, IsString(string));
6948 :
6949 4368 : var_string_.Bind(string);
6950 8736 : var_offset_.Bind(IntPtrConstant(0));
6951 8736 : var_instance_type_.Bind(LoadInstanceType(string));
6952 8736 : var_is_external_.Bind(Int32Constant(0));
6953 4368 : }
6954 :
6955 4368 : TNode<String> ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
6956 8736 : VariableList vars({&var_string_, &var_offset_, &var_instance_type_}, zone());
6957 4368 : Label dispatch(this, vars);
6958 4368 : Label if_iscons(this);
6959 4368 : Label if_isexternal(this);
6960 4368 : Label if_issliced(this);
6961 4368 : Label if_isthin(this);
6962 4368 : Label out(this);
6963 :
6964 13104 : Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
6965 8736 : &dispatch);
6966 :
6967 : // Dispatch based on string representation.
6968 : BIND(&dispatch);
6969 : {
6970 : int32_t values[] = {
6971 : kSeqStringTag, kConsStringTag, kExternalStringTag,
6972 : kSlicedStringTag, kThinStringTag,
6973 4368 : };
6974 : Label* labels[] = {
6975 : &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
6976 4368 : };
6977 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
6978 :
6979 : Node* const representation = Word32And(
6980 17472 : var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
6981 4368 : Switch(representation, if_bailout, values, labels, arraysize(values));
6982 : }
6983 :
6984 : // Cons string. Check whether it is flat, then fetch first part.
6985 : // Flat cons strings have an empty second part.
6986 : BIND(&if_iscons);
6987 : {
6988 4368 : Node* const string = var_string_.value();
6989 4368 : GotoIfNot(IsEmptyString(LoadObjectField(string, ConsString::kSecondOffset)),
6990 8736 : if_bailout);
6991 :
6992 : Node* const lhs = LoadObjectField(string, ConsString::kFirstOffset);
6993 4368 : var_string_.Bind(lhs);
6994 8736 : var_instance_type_.Bind(LoadInstanceType(lhs));
6995 :
6996 4368 : Goto(&dispatch);
6997 : }
6998 :
6999 : // Sliced string. Fetch parent and correct start index by offset.
7000 : BIND(&if_issliced);
7001 : {
7002 4368 : if (!FLAG_string_slices || (flags_ & kDontUnpackSlicedStrings)) {
7003 56 : Goto(if_bailout);
7004 : } else {
7005 4312 : Node* const string = var_string_.value();
7006 : Node* const sliced_offset =
7007 8624 : LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
7008 12936 : var_offset_.Bind(IntPtrAdd(var_offset_.value(), sliced_offset));
7009 :
7010 : Node* const parent = LoadObjectField(string, SlicedString::kParentOffset);
7011 4312 : var_string_.Bind(parent);
7012 8624 : var_instance_type_.Bind(LoadInstanceType(parent));
7013 :
7014 4312 : Goto(&dispatch);
7015 : }
7016 : }
7017 :
7018 : // Thin string. Fetch the actual string.
7019 : BIND(&if_isthin);
7020 : {
7021 4368 : Node* const string = var_string_.value();
7022 : Node* const actual_string =
7023 : LoadObjectField(string, ThinString::kActualOffset);
7024 8736 : Node* const actual_instance_type = LoadInstanceType(actual_string);
7025 :
7026 4368 : var_string_.Bind(actual_string);
7027 4368 : var_instance_type_.Bind(actual_instance_type);
7028 :
7029 4368 : Goto(&dispatch);
7030 : }
7031 :
7032 : // External string.
7033 : BIND(&if_isexternal);
7034 8736 : var_is_external_.Bind(Int32Constant(1));
7035 4368 : Goto(&out);
7036 :
7037 : BIND(&out);
7038 8736 : return CAST(var_string_.value());
7039 : }
7040 :
7041 4368 : TNode<RawPtrT> ToDirectStringAssembler::TryToSequential(
7042 : StringPointerKind ptr_kind, Label* if_bailout) {
7043 4368 : CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
7044 :
7045 4368 : TVARIABLE(RawPtrT, var_result);
7046 4368 : Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
7047 4368 : Branch(is_external(), &if_isexternal, &if_issequential);
7048 :
7049 : BIND(&if_issequential);
7050 : {
7051 : STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
7052 : SeqTwoByteString::kHeaderSize);
7053 8736 : TNode<IntPtrT> result = BitcastTaggedToWord(var_string_.value());
7054 4368 : if (ptr_kind == PTR_TO_DATA) {
7055 3920 : result = IntPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7056 : kHeapObjectTag));
7057 : }
7058 : var_result = ReinterpretCast<RawPtrT>(result);
7059 4368 : Goto(&out);
7060 : }
7061 :
7062 : BIND(&if_isexternal);
7063 : {
7064 4368 : GotoIf(IsUncachedExternalStringInstanceType(var_instance_type_.value()),
7065 4368 : if_bailout);
7066 :
7067 4368 : TNode<String> string = CAST(var_string_.value());
7068 : TNode<IntPtrT> result =
7069 : LoadObjectField<IntPtrT>(string, ExternalString::kResourceDataOffset);
7070 4368 : if (ptr_kind == PTR_TO_STRING) {
7071 448 : result = IntPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7072 : kHeapObjectTag));
7073 : }
7074 : var_result = ReinterpretCast<RawPtrT>(result);
7075 4368 : Goto(&out);
7076 : }
7077 :
7078 : BIND(&out);
7079 4368 : return var_result.value();
7080 : }
7081 :
7082 1008 : void CodeStubAssembler::BranchIfCanDerefIndirectString(Node* string,
7083 : Node* instance_type,
7084 : Label* can_deref,
7085 : Label* cannot_deref) {
7086 : CSA_ASSERT(this, IsString(string));
7087 : Node* representation =
7088 3024 : Word32And(instance_type, Int32Constant(kStringRepresentationMask));
7089 3024 : GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), can_deref);
7090 2016 : GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)),
7091 2016 : cannot_deref);
7092 : // Cons string.
7093 : Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
7094 2016 : GotoIf(IsEmptyString(rhs), can_deref);
7095 1008 : Goto(cannot_deref);
7096 1008 : }
7097 :
7098 0 : Node* CodeStubAssembler::DerefIndirectString(TNode<String> string,
7099 : TNode<Int32T> instance_type,
7100 : Label* cannot_deref) {
7101 0 : Label deref(this);
7102 0 : BranchIfCanDerefIndirectString(string, instance_type, &deref, cannot_deref);
7103 : BIND(&deref);
7104 : STATIC_ASSERT(static_cast<int>(ThinString::kActualOffset) ==
7105 : static_cast<int>(ConsString::kFirstOffset));
7106 0 : return LoadObjectField(string, ThinString::kActualOffset);
7107 : }
7108 :
7109 1008 : void CodeStubAssembler::DerefIndirectString(Variable* var_string,
7110 : Node* instance_type) {
7111 : #ifdef DEBUG
7112 : Label can_deref(this), cannot_deref(this);
7113 : BranchIfCanDerefIndirectString(var_string->value(), instance_type, &can_deref,
7114 : &cannot_deref);
7115 : BIND(&cannot_deref);
7116 : DebugBreak(); // Should be able to dereference string.
7117 : Goto(&can_deref);
7118 : BIND(&can_deref);
7119 : #endif // DEBUG
7120 :
7121 : STATIC_ASSERT(static_cast<int>(ThinString::kActualOffset) ==
7122 : static_cast<int>(ConsString::kFirstOffset));
7123 : var_string->Bind(
7124 2016 : LoadObjectField(var_string->value(), ThinString::kActualOffset));
7125 1008 : }
7126 :
7127 1008 : void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
7128 : Node* instance_type,
7129 : Label* did_deref,
7130 : Label* cannot_deref) {
7131 1008 : Label deref(this);
7132 : BranchIfCanDerefIndirectString(var_string->value(), instance_type, &deref,
7133 1008 : cannot_deref);
7134 :
7135 : BIND(&deref);
7136 : {
7137 1008 : DerefIndirectString(var_string, instance_type);
7138 1008 : Goto(did_deref);
7139 1008 : }
7140 1008 : }
7141 :
7142 336 : void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
7143 : Node* left_instance_type,
7144 : Variable* var_right,
7145 : Node* right_instance_type,
7146 : Label* did_something) {
7147 672 : Label did_nothing_left(this), did_something_left(this),
7148 336 : didnt_do_anything(this);
7149 : MaybeDerefIndirectString(var_left, left_instance_type, &did_something_left,
7150 336 : &did_nothing_left);
7151 :
7152 : BIND(&did_something_left);
7153 : {
7154 : MaybeDerefIndirectString(var_right, right_instance_type, did_something,
7155 336 : did_something);
7156 : }
7157 :
7158 : BIND(&did_nothing_left);
7159 : {
7160 : MaybeDerefIndirectString(var_right, right_instance_type, did_something,
7161 336 : &didnt_do_anything);
7162 : }
7163 :
7164 336 : BIND(&didnt_do_anything);
7165 : // Fall through if neither string was an indirect string.
7166 336 : }
7167 :
7168 56 : TNode<String> CodeStubAssembler::StringAdd(Node* context, TNode<String> left,
7169 : TNode<String> right,
7170 : AllocationFlags flags) {
7171 56 : TVARIABLE(String, result);
7172 56 : Label check_right(this), runtime(this, Label::kDeferred), cons(this),
7173 56 : done(this, &result), done_native(this, &result);
7174 56 : Counters* counters = isolate()->counters();
7175 :
7176 : TNode<Uint32T> left_length = LoadStringLengthAsWord32(left);
7177 112 : GotoIfNot(Word32Equal(left_length, Uint32Constant(0)), &check_right);
7178 : result = right;
7179 56 : Goto(&done_native);
7180 :
7181 : BIND(&check_right);
7182 : TNode<Uint32T> right_length = LoadStringLengthAsWord32(right);
7183 112 : GotoIfNot(Word32Equal(right_length, Uint32Constant(0)), &cons);
7184 : result = left;
7185 56 : Goto(&done_native);
7186 :
7187 : BIND(&cons);
7188 : {
7189 : TNode<Uint32T> new_length = Uint32Add(left_length, right_length);
7190 :
7191 : // If new length is greater than String::kMaxLength, goto runtime to
7192 : // throw. Note: we also need to invalidate the string length protector, so
7193 : // can't just throw here directly.
7194 56 : GotoIf(Uint32GreaterThan(new_length, Uint32Constant(String::kMaxLength)),
7195 112 : &runtime);
7196 :
7197 : TVARIABLE(String, var_left, left);
7198 : TVARIABLE(String, var_right, right);
7199 56 : Variable* input_vars[2] = {&var_left, &var_right};
7200 112 : Label non_cons(this, 2, input_vars);
7201 56 : Label slow(this, Label::kDeferred);
7202 56 : GotoIf(Uint32LessThan(new_length, Uint32Constant(ConsString::kMinLength)),
7203 112 : &non_cons);
7204 :
7205 56 : result =
7206 : NewConsString(new_length, var_left.value(), var_right.value(), flags);
7207 56 : Goto(&done_native);
7208 :
7209 : BIND(&non_cons);
7210 :
7211 56 : Comment("Full string concatenate");
7212 112 : Node* left_instance_type = LoadInstanceType(var_left.value());
7213 112 : Node* right_instance_type = LoadInstanceType(var_right.value());
7214 : // Compute intersection and difference of instance types.
7215 :
7216 : Node* ored_instance_types =
7217 112 : Word32Or(left_instance_type, right_instance_type);
7218 : Node* xored_instance_types =
7219 112 : Word32Xor(left_instance_type, right_instance_type);
7220 :
7221 : // Check if both strings have the same encoding and both are sequential.
7222 112 : GotoIf(IsSetWord32(xored_instance_types, kStringEncodingMask), &runtime);
7223 112 : GotoIf(IsSetWord32(ored_instance_types, kStringRepresentationMask), &slow);
7224 :
7225 112 : TNode<IntPtrT> word_left_length = Signed(ChangeUint32ToWord(left_length));
7226 112 : TNode<IntPtrT> word_right_length = Signed(ChangeUint32ToWord(right_length));
7227 :
7228 56 : Label two_byte(this);
7229 : GotoIf(Word32Equal(Word32And(ored_instance_types,
7230 112 : Int32Constant(kStringEncodingMask)),
7231 224 : Int32Constant(kTwoByteStringTag)),
7232 112 : &two_byte);
7233 : // One-byte sequential string case
7234 112 : result = AllocateSeqOneByteString(context, new_length);
7235 : CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
7236 : IntPtrConstant(0), word_left_length,
7237 112 : String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
7238 : CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
7239 : word_left_length, word_right_length,
7240 112 : String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
7241 56 : Goto(&done_native);
7242 :
7243 : BIND(&two_byte);
7244 : {
7245 : // Two-byte sequential string case
7246 112 : result = AllocateSeqTwoByteString(context, new_length);
7247 : CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
7248 : IntPtrConstant(0), word_left_length,
7249 : String::TWO_BYTE_ENCODING,
7250 112 : String::TWO_BYTE_ENCODING);
7251 : CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
7252 : word_left_length, word_right_length,
7253 : String::TWO_BYTE_ENCODING,
7254 112 : String::TWO_BYTE_ENCODING);
7255 56 : Goto(&done_native);
7256 : }
7257 :
7258 : BIND(&slow);
7259 : {
7260 : // Try to unwrap indirect strings, restart the above attempt on success.
7261 : MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
7262 56 : right_instance_type, &non_cons);
7263 56 : Goto(&runtime);
7264 : }
7265 : }
7266 : BIND(&runtime);
7267 : {
7268 : result = CAST(CallRuntime(Runtime::kStringAdd, context, left, right));
7269 56 : Goto(&done);
7270 : }
7271 :
7272 : BIND(&done_native);
7273 : {
7274 56 : IncrementCounter(counters->string_add_native(), 1);
7275 56 : Goto(&done);
7276 : }
7277 :
7278 : BIND(&done);
7279 56 : return result.value();
7280 : }
7281 :
7282 112 : TNode<String> CodeStubAssembler::StringFromSingleCodePoint(
7283 : TNode<Int32T> codepoint, UnicodeEncoding encoding) {
7284 112 : VARIABLE(var_result, MachineRepresentation::kTagged, EmptyStringConstant());
7285 :
7286 112 : Label if_isword16(this), if_isword32(this), return_result(this);
7287 :
7288 224 : Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
7289 224 : &if_isword32);
7290 :
7291 : BIND(&if_isword16);
7292 : {
7293 224 : var_result.Bind(StringFromSingleCharCode(codepoint));
7294 112 : Goto(&return_result);
7295 : }
7296 :
7297 : BIND(&if_isword32);
7298 : {
7299 112 : switch (encoding) {
7300 : case UnicodeEncoding::UTF16:
7301 : break;
7302 : case UnicodeEncoding::UTF32: {
7303 : // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
7304 0 : Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
7305 :
7306 : // lead = (codepoint >> 10) + LEAD_OFFSET
7307 : Node* lead =
7308 0 : Int32Add(Word32Shr(codepoint, Int32Constant(10)), lead_offset);
7309 :
7310 : // trail = (codepoint & 0x3FF) + 0xDC00;
7311 0 : Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
7312 0 : Int32Constant(0xDC00));
7313 :
7314 : // codpoint = (trail << 16) | lead;
7315 0 : codepoint = Signed(Word32Or(Word32Shl(trail, Int32Constant(16)), lead));
7316 0 : break;
7317 : }
7318 : }
7319 :
7320 224 : Node* value = AllocateSeqTwoByteString(2);
7321 : StoreNoWriteBarrier(
7322 : MachineRepresentation::kWord32, value,
7323 : IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
7324 224 : codepoint);
7325 112 : var_result.Bind(value);
7326 112 : Goto(&return_result);
7327 : }
7328 :
7329 : BIND(&return_result);
7330 224 : return CAST(var_result.value());
7331 : }
7332 :
7333 901 : TNode<Number> CodeStubAssembler::StringToNumber(TNode<String> input) {
7334 901 : Label runtime(this, Label::kDeferred);
7335 901 : Label end(this);
7336 :
7337 : TVARIABLE(Number, var_result);
7338 :
7339 : // Check if string has a cached array index.
7340 : TNode<Uint32T> hash = LoadNameHashField(input);
7341 901 : GotoIf(IsSetWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
7342 1802 : &runtime);
7343 :
7344 1802 : var_result =
7345 1802 : SmiTag(Signed(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
7346 901 : Goto(&end);
7347 :
7348 : BIND(&runtime);
7349 : {
7350 : var_result =
7351 : CAST(CallRuntime(Runtime::kStringToNumber, NoContextConstant(), input));
7352 901 : Goto(&end);
7353 : }
7354 :
7355 : BIND(&end);
7356 901 : return var_result.value();
7357 : }
7358 :
7359 341 : TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
7360 341 : TVARIABLE(String, result);
7361 : TVARIABLE(Smi, smi_input);
7362 341 : Label runtime(this, Label::kDeferred), if_smi(this), if_heap_number(this),
7363 341 : done(this, &result);
7364 :
7365 : // Load the number string cache.
7366 682 : Node* number_string_cache = LoadRoot(RootIndex::kNumberStringCache);
7367 :
7368 : // Make the hash mask from the length of the number string cache. It
7369 : // contains two elements (number and string) for each cache entry.
7370 : // TODO(ishell): cleanup mask handling.
7371 : Node* mask =
7372 1023 : BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
7373 341 : TNode<IntPtrT> one = IntPtrConstant(1);
7374 682 : mask = IntPtrSub(mask, one);
7375 :
7376 682 : GotoIfNot(TaggedIsSmi(input), &if_heap_number);
7377 : smi_input = CAST(input);
7378 341 : Goto(&if_smi);
7379 :
7380 : BIND(&if_heap_number);
7381 : {
7382 : TNode<HeapNumber> heap_number_input = CAST(input);
7383 : // Try normalizing the HeapNumber.
7384 341 : TryHeapNumberToSmi(heap_number_input, smi_input, &if_smi);
7385 :
7386 : // Make a hash from the two 32-bit values of the double.
7387 : TNode<Int32T> low =
7388 : LoadObjectField<Int32T>(heap_number_input, HeapNumber::kValueOffset);
7389 : TNode<Int32T> high = LoadObjectField<Int32T>(
7390 : heap_number_input, HeapNumber::kValueOffset + kIntSize);
7391 341 : TNode<Word32T> hash = Word32Xor(low, high);
7392 682 : TNode<WordT> word_hash = WordShl(ChangeInt32ToIntPtr(hash), one);
7393 : TNode<WordT> index =
7394 682 : WordAnd(word_hash, WordSar(mask, SmiShiftBitsConstant()));
7395 :
7396 : // Cache entry's key must be a heap number
7397 : Node* number_key = LoadFixedArrayElement(CAST(number_string_cache), index);
7398 682 : GotoIf(TaggedIsSmi(number_key), &runtime);
7399 682 : GotoIfNot(IsHeapNumber(number_key), &runtime);
7400 :
7401 : // Cache entry's key must match the heap number value we're looking for.
7402 : Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
7403 341 : MachineType::Int32());
7404 : Node* high_compare = LoadObjectField(
7405 341 : number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
7406 682 : GotoIfNot(Word32Equal(low, low_compare), &runtime);
7407 682 : GotoIfNot(Word32Equal(high, high_compare), &runtime);
7408 :
7409 : // Heap number match, return value from cache entry.
7410 : result = CAST(
7411 : LoadFixedArrayElement(CAST(number_string_cache), index, kTaggedSize));
7412 341 : Goto(&done);
7413 : }
7414 :
7415 : BIND(&if_smi);
7416 : {
7417 : // Load the smi key, make sure it matches the smi we're looking for.
7418 : Node* smi_index = BitcastWordToTagged(
7419 1705 : WordAnd(WordShl(BitcastTaggedToWord(smi_input.value()), one), mask));
7420 : Node* smi_key = LoadFixedArrayElement(CAST(number_string_cache), smi_index,
7421 : 0, SMI_PARAMETERS);
7422 341 : GotoIf(WordNotEqual(smi_key, smi_input.value()), &runtime);
7423 :
7424 : // Smi match, return value from cache entry.
7425 : result = CAST(LoadFixedArrayElement(CAST(number_string_cache), smi_index,
7426 : kTaggedSize, SMI_PARAMETERS));
7427 341 : Goto(&done);
7428 : }
7429 :
7430 : BIND(&runtime);
7431 : {
7432 : // No cache entry, go to the runtime.
7433 : result =
7434 : CAST(CallRuntime(Runtime::kNumberToString, NoContextConstant(), input));
7435 341 : Goto(&done);
7436 : }
7437 : BIND(&done);
7438 341 : return result.value();
7439 : }
7440 :
7441 845 : Node* CodeStubAssembler::NonNumberToNumberOrNumeric(
7442 : Node* context, Node* input, Object::Conversion mode,
7443 : BigIntHandling bigint_handling) {
7444 : CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
7445 : CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(input)));
7446 :
7447 : // We might need to loop once here due to ToPrimitive conversions.
7448 845 : VARIABLE(var_input, MachineRepresentation::kTagged, input);
7449 1690 : VARIABLE(var_result, MachineRepresentation::kTagged);
7450 845 : Label loop(this, &var_input);
7451 845 : Label end(this);
7452 845 : Goto(&loop);
7453 : BIND(&loop);
7454 : {
7455 : // Load the current {input} value (known to be a HeapObject).
7456 845 : Node* input = var_input.value();
7457 :
7458 : // Dispatch on the {input} instance type.
7459 1690 : Node* input_instance_type = LoadInstanceType(input);
7460 845 : Label if_inputisstring(this), if_inputisoddball(this),
7461 845 : if_inputisbigint(this), if_inputisreceiver(this, Label::kDeferred),
7462 845 : if_inputisother(this, Label::kDeferred);
7463 1690 : GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
7464 845 : GotoIf(IsBigIntInstanceType(input_instance_type), &if_inputisbigint);
7465 845 : GotoIf(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE),
7466 1690 : &if_inputisoddball);
7467 845 : Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
7468 1690 : &if_inputisother);
7469 :
7470 : BIND(&if_inputisstring);
7471 : {
7472 : // The {input} is a String, use the fast stub to convert it to a Number.
7473 845 : TNode<String> string_input = CAST(input);
7474 1690 : var_result.Bind(StringToNumber(string_input));
7475 845 : Goto(&end);
7476 : }
7477 :
7478 : BIND(&if_inputisbigint);
7479 845 : if (mode == Object::Conversion::kToNumeric) {
7480 112 : var_result.Bind(input);
7481 112 : Goto(&end);
7482 : } else {
7483 : DCHECK_EQ(mode, Object::Conversion::kToNumber);
7484 733 : if (bigint_handling == BigIntHandling::kThrow) {
7485 621 : Goto(&if_inputisother);
7486 : } else {
7487 : DCHECK_EQ(bigint_handling, BigIntHandling::kConvertToNumber);
7488 112 : var_result.Bind(CallRuntime(Runtime::kBigIntToNumber, context, input));
7489 112 : Goto(&end);
7490 : }
7491 : }
7492 :
7493 : BIND(&if_inputisoddball);
7494 : {
7495 : // The {input} is an Oddball, we just need to load the Number value of it.
7496 845 : var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
7497 845 : Goto(&end);
7498 : }
7499 :
7500 : BIND(&if_inputisreceiver);
7501 : {
7502 : // The {input} is a JSReceiver, we need to convert it to a Primitive first
7503 : // using the ToPrimitive type conversion, preferably yielding a Number.
7504 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
7505 845 : isolate(), ToPrimitiveHint::kNumber);
7506 1690 : Node* result = CallStub(callable, context, input);
7507 :
7508 : // Check if the {result} is already a Number/Numeric.
7509 845 : Label if_done(this), if_notdone(this);
7510 1578 : Branch(mode == Object::Conversion::kToNumber ? IsNumber(result)
7511 845 : : IsNumeric(result),
7512 2647 : &if_done, &if_notdone);
7513 :
7514 : BIND(&if_done);
7515 : {
7516 : // The ToPrimitive conversion already gave us a Number/Numeric, so we're
7517 : // done.
7518 845 : var_result.Bind(result);
7519 845 : Goto(&end);
7520 : }
7521 :
7522 : BIND(&if_notdone);
7523 : {
7524 : // We now have a Primitive {result}, but it's not yet a Number/Numeric.
7525 845 : var_input.Bind(result);
7526 845 : Goto(&loop);
7527 : }
7528 : }
7529 :
7530 : BIND(&if_inputisother);
7531 : {
7532 : // The {input} is something else (e.g. Symbol), let the runtime figure
7533 : // out the correct exception.
7534 : // Note: We cannot tail call to the runtime here, as js-to-wasm
7535 : // trampolines also use this code currently, and they declare all
7536 : // outgoing parameters as untagged, while we would push a tagged
7537 : // object here.
7538 : auto function_id = mode == Object::Conversion::kToNumber
7539 : ? Runtime::kToNumber
7540 845 : : Runtime::kToNumeric;
7541 845 : var_result.Bind(CallRuntime(function_id, context, input));
7542 845 : Goto(&end);
7543 845 : }
7544 : }
7545 :
7546 : BIND(&end);
7547 : if (mode == Object::Conversion::kToNumeric) {
7548 : CSA_ASSERT(this, IsNumeric(var_result.value()));
7549 : } else {
7550 : DCHECK_EQ(mode, Object::Conversion::kToNumber);
7551 : CSA_ASSERT(this, IsNumber(var_result.value()));
7552 : }
7553 1690 : return var_result.value();
7554 : }
7555 :
7556 56 : TNode<Number> CodeStubAssembler::NonNumberToNumber(
7557 : SloppyTNode<Context> context, SloppyTNode<HeapObject> input,
7558 : BigIntHandling bigint_handling) {
7559 733 : return CAST(NonNumberToNumberOrNumeric(
7560 : context, input, Object::Conversion::kToNumber, bigint_handling));
7561 : }
7562 :
7563 112 : TNode<Numeric> CodeStubAssembler::NonNumberToNumeric(
7564 : SloppyTNode<Context> context, SloppyTNode<HeapObject> input) {
7565 : Node* result = NonNumberToNumberOrNumeric(context, input,
7566 112 : Object::Conversion::kToNumeric);
7567 : CSA_SLOW_ASSERT(this, IsNumeric(result));
7568 112 : return UncheckedCast<Numeric>(result);
7569 : }
7570 :
7571 896 : TNode<Number> CodeStubAssembler::ToNumber_Inline(SloppyTNode<Context> context,
7572 : SloppyTNode<Object> input) {
7573 896 : TVARIABLE(Number, var_result);
7574 896 : Label end(this), not_smi(this, Label::kDeferred);
7575 :
7576 1792 : GotoIfNot(TaggedIsSmi(input), ¬_smi);
7577 : var_result = CAST(input);
7578 896 : Goto(&end);
7579 :
7580 : BIND(¬_smi);
7581 : {
7582 2688 : var_result =
7583 : Select<Number>(IsHeapNumber(CAST(input)), [=] { return CAST(input); },
7584 896 : [=] {
7585 896 : return CAST(CallBuiltin(Builtins::kNonNumberToNumber,
7586 : context, input));
7587 896 : });
7588 896 : Goto(&end);
7589 : }
7590 :
7591 : BIND(&end);
7592 896 : return var_result.value();
7593 : }
7594 :
7595 677 : TNode<Number> CodeStubAssembler::ToNumber(SloppyTNode<Context> context,
7596 : SloppyTNode<Object> input,
7597 : BigIntHandling bigint_handling) {
7598 677 : TVARIABLE(Number, var_result);
7599 677 : Label end(this);
7600 :
7601 677 : Label not_smi(this, Label::kDeferred);
7602 1354 : GotoIfNot(TaggedIsSmi(input), ¬_smi);
7603 : TNode<Smi> input_smi = CAST(input);
7604 : var_result = input_smi;
7605 677 : Goto(&end);
7606 :
7607 : BIND(¬_smi);
7608 : {
7609 : Label not_heap_number(this, Label::kDeferred);
7610 : TNode<HeapObject> input_ho = CAST(input);
7611 1354 : GotoIfNot(IsHeapNumber(input_ho), ¬_heap_number);
7612 :
7613 : TNode<HeapNumber> input_hn = CAST(input_ho);
7614 : var_result = input_hn;
7615 677 : Goto(&end);
7616 :
7617 : BIND(¬_heap_number);
7618 : {
7619 : var_result = NonNumberToNumber(context, input_ho, bigint_handling);
7620 677 : Goto(&end);
7621 677 : }
7622 : }
7623 :
7624 : BIND(&end);
7625 677 : return var_result.value();
7626 : }
7627 :
7628 1568 : TNode<BigInt> CodeStubAssembler::ToBigInt(SloppyTNode<Context> context,
7629 : SloppyTNode<Object> input) {
7630 1568 : TVARIABLE(BigInt, var_result);
7631 1568 : Label if_bigint(this), done(this), if_throw(this);
7632 :
7633 3136 : GotoIf(TaggedIsSmi(input), &if_throw);
7634 3136 : GotoIf(IsBigInt(CAST(input)), &if_bigint);
7635 : var_result = CAST(CallRuntime(Runtime::kToBigInt, context, input));
7636 1568 : Goto(&done);
7637 :
7638 : BIND(&if_bigint);
7639 : var_result = CAST(input);
7640 1568 : Goto(&done);
7641 :
7642 : BIND(&if_throw);
7643 1568 : ThrowTypeError(context, MessageTemplate::kBigIntFromObject, input);
7644 :
7645 : BIND(&done);
7646 1568 : return var_result.value();
7647 : }
7648 :
7649 336 : void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
7650 : Variable* var_numeric) {
7651 336 : TaggedToNumeric(context, value, done, var_numeric, nullptr);
7652 336 : }
7653 :
7654 1008 : void CodeStubAssembler::TaggedToNumericWithFeedback(Node* context, Node* value,
7655 : Label* done,
7656 : Variable* var_numeric,
7657 : Variable* var_feedback) {
7658 : DCHECK_NOT_NULL(var_feedback);
7659 1008 : TaggedToNumeric(context, value, done, var_numeric, var_feedback);
7660 1008 : }
7661 :
7662 1344 : void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
7663 : Variable* var_numeric,
7664 : Variable* var_feedback) {
7665 1344 : var_numeric->Bind(value);
7666 2688 : Label if_smi(this), if_heapnumber(this), if_bigint(this), if_oddball(this);
7667 2688 : GotoIf(TaggedIsSmi(value), &if_smi);
7668 2688 : Node* map = LoadMap(value);
7669 2688 : GotoIf(IsHeapNumberMap(map), &if_heapnumber);
7670 2688 : Node* instance_type = LoadMapInstanceType(map);
7671 1344 : GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
7672 :
7673 : // {value} is not a Numeric yet.
7674 4032 : GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
7675 2688 : var_numeric->Bind(CallBuiltin(Builtins::kNonNumberToNumeric, context, value));
7676 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
7677 1344 : Goto(done);
7678 :
7679 : BIND(&if_smi);
7680 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
7681 1344 : Goto(done);
7682 :
7683 : BIND(&if_heapnumber);
7684 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumber);
7685 1344 : Goto(done);
7686 :
7687 : BIND(&if_bigint);
7688 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
7689 1344 : Goto(done);
7690 :
7691 : BIND(&if_oddball);
7692 1344 : OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumberOrOddball);
7693 1344 : var_numeric->Bind(LoadObjectField(value, Oddball::kToNumberOffset));
7694 2688 : Goto(done);
7695 1344 : }
7696 :
7697 : // ES#sec-touint32
7698 61 : TNode<Number> CodeStubAssembler::ToUint32(SloppyTNode<Context> context,
7699 : SloppyTNode<Object> input) {
7700 122 : Node* const float_zero = Float64Constant(0.0);
7701 122 : Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
7702 :
7703 : Label out(this);
7704 :
7705 122 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
7706 :
7707 : // Early exit for positive smis.
7708 : {
7709 : // TODO(jgruber): This branch and the recheck below can be removed once we
7710 : // have a ToNumber with multiple exits.
7711 : Label next(this, Label::kDeferred);
7712 122 : Branch(TaggedIsPositiveSmi(input), &out, &next);
7713 61 : BIND(&next);
7714 : }
7715 :
7716 122 : Node* const number = ToNumber(context, input);
7717 61 : var_result.Bind(number);
7718 :
7719 : // Perhaps we have a positive smi now.
7720 : {
7721 : Label next(this, Label::kDeferred);
7722 122 : Branch(TaggedIsPositiveSmi(number), &out, &next);
7723 61 : BIND(&next);
7724 : }
7725 :
7726 61 : Label if_isnegativesmi(this), if_isheapnumber(this);
7727 122 : Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
7728 :
7729 : BIND(&if_isnegativesmi);
7730 : {
7731 122 : Node* const uint32_value = SmiToInt32(number);
7732 122 : Node* float64_value = ChangeUint32ToFloat64(uint32_value);
7733 122 : var_result.Bind(AllocateHeapNumberWithValue(float64_value));
7734 61 : Goto(&out);
7735 : }
7736 :
7737 : BIND(&if_isheapnumber);
7738 : {
7739 : Label return_zero(this);
7740 122 : Node* const value = LoadHeapNumberValue(number);
7741 :
7742 : {
7743 : // +-0.
7744 : Label next(this);
7745 122 : Branch(Float64Equal(value, float_zero), &return_zero, &next);
7746 61 : BIND(&next);
7747 : }
7748 :
7749 : {
7750 : // NaN.
7751 : Label next(this);
7752 122 : Branch(Float64Equal(value, value), &next, &return_zero);
7753 61 : BIND(&next);
7754 : }
7755 :
7756 : {
7757 : // +Infinity.
7758 : Label next(this);
7759 : Node* const positive_infinity =
7760 122 : Float64Constant(std::numeric_limits<double>::infinity());
7761 122 : Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
7762 61 : BIND(&next);
7763 : }
7764 :
7765 : {
7766 : // -Infinity.
7767 : Label next(this);
7768 : Node* const negative_infinity =
7769 122 : Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
7770 122 : Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
7771 61 : BIND(&next);
7772 : }
7773 :
7774 : // * Let int be the mathematical value that is the same sign as number and
7775 : // whose magnitude is floor(abs(number)).
7776 : // * Let int32bit be int modulo 2^32.
7777 : // * Return int32bit.
7778 : {
7779 122 : Node* x = Float64Trunc(value);
7780 122 : x = Float64Mod(x, float_two_32);
7781 122 : x = Float64Add(x, float_two_32);
7782 122 : x = Float64Mod(x, float_two_32);
7783 :
7784 122 : Node* const result = ChangeFloat64ToTagged(x);
7785 61 : var_result.Bind(result);
7786 61 : Goto(&out);
7787 : }
7788 :
7789 : BIND(&return_zero);
7790 : {
7791 122 : var_result.Bind(SmiConstant(0));
7792 61 : Goto(&out);
7793 61 : }
7794 : }
7795 :
7796 : BIND(&out);
7797 122 : return CAST(var_result.value());
7798 : }
7799 :
7800 173 : TNode<String> CodeStubAssembler::ToString(SloppyTNode<Context> context,
7801 : SloppyTNode<Object> input) {
7802 173 : Label is_number(this);
7803 173 : Label runtime(this, Label::kDeferred), done(this);
7804 346 : VARIABLE(result, MachineRepresentation::kTagged);
7805 346 : GotoIf(TaggedIsSmi(input), &is_number);
7806 :
7807 173 : TNode<Map> input_map = LoadMap(CAST(input));
7808 173 : TNode<Int32T> input_instance_type = LoadMapInstanceType(input_map);
7809 :
7810 173 : result.Bind(input);
7811 346 : GotoIf(IsStringInstanceType(input_instance_type), &done);
7812 :
7813 173 : Label not_heap_number(this);
7814 346 : Branch(IsHeapNumberMap(input_map), &is_number, ¬_heap_number);
7815 :
7816 : BIND(&is_number);
7817 173 : TNode<Number> number_input = CAST(input);
7818 346 : result.Bind(NumberToString(number_input));
7819 173 : Goto(&done);
7820 :
7821 : BIND(¬_heap_number);
7822 : {
7823 346 : GotoIfNot(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE), &runtime);
7824 173 : result.Bind(LoadObjectField(CAST(input), Oddball::kToStringOffset));
7825 173 : Goto(&done);
7826 : }
7827 :
7828 : BIND(&runtime);
7829 : {
7830 173 : result.Bind(CallRuntime(Runtime::kToString, context, input));
7831 173 : Goto(&done);
7832 : }
7833 :
7834 : BIND(&done);
7835 346 : return CAST(result.value());
7836 : }
7837 :
7838 2744 : TNode<String> CodeStubAssembler::ToString_Inline(SloppyTNode<Context> context,
7839 : SloppyTNode<Object> input) {
7840 2744 : VARIABLE(var_result, MachineRepresentation::kTagged, input);
7841 2744 : Label stub_call(this, Label::kDeferred), out(this);
7842 :
7843 5488 : GotoIf(TaggedIsSmi(input), &stub_call);
7844 5488 : Branch(IsString(CAST(input)), &out, &stub_call);
7845 :
7846 : BIND(&stub_call);
7847 5488 : var_result.Bind(CallBuiltin(Builtins::kToString, context, input));
7848 2744 : Goto(&out);
7849 :
7850 : BIND(&out);
7851 5488 : return CAST(var_result.value());
7852 : }
7853 :
7854 112 : Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
7855 224 : Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
7856 224 : VARIABLE(result, MachineRepresentation::kTagged);
7857 112 : Label done(this, &result);
7858 :
7859 112 : BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7860 :
7861 : BIND(&if_isreceiver);
7862 : {
7863 : // Convert {input} to a primitive first passing Number hint.
7864 112 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
7865 224 : result.Bind(CallStub(callable, context, input));
7866 112 : Goto(&done);
7867 : }
7868 :
7869 : BIND(&if_isnotreceiver);
7870 : {
7871 112 : result.Bind(input);
7872 112 : Goto(&done);
7873 : }
7874 :
7875 : BIND(&done);
7876 224 : return result.value();
7877 : }
7878 :
7879 112 : TNode<JSReceiver> CodeStubAssembler::ToObject(SloppyTNode<Context> context,
7880 : SloppyTNode<Object> input) {
7881 1680 : return CAST(CallBuiltin(Builtins::kToObject, context, input));
7882 : }
7883 :
7884 1568 : TNode<JSReceiver> CodeStubAssembler::ToObject_Inline(TNode<Context> context,
7885 : TNode<Object> input) {
7886 1568 : TVARIABLE(JSReceiver, result);
7887 1568 : Label if_isreceiver(this), if_isnotreceiver(this, Label::kDeferred);
7888 1568 : Label done(this);
7889 :
7890 1568 : BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7891 :
7892 : BIND(&if_isreceiver);
7893 : {
7894 : result = CAST(input);
7895 1568 : Goto(&done);
7896 : }
7897 :
7898 : BIND(&if_isnotreceiver);
7899 : {
7900 : result = ToObject(context, input);
7901 1568 : Goto(&done);
7902 : }
7903 :
7904 : BIND(&done);
7905 1568 : return result.value();
7906 : }
7907 :
7908 616 : TNode<Smi> CodeStubAssembler::ToSmiIndex(TNode<Object> input,
7909 : TNode<Context> context,
7910 : Label* range_error) {
7911 616 : TVARIABLE(Smi, result);
7912 616 : Label check_undefined(this), return_zero(this), defined(this),
7913 616 : negative_check(this), done(this);
7914 :
7915 1232 : GotoIfNot(TaggedIsSmi(input), &check_undefined);
7916 : result = CAST(input);
7917 616 : Goto(&negative_check);
7918 :
7919 : BIND(&check_undefined);
7920 1232 : Branch(IsUndefined(input), &return_zero, &defined);
7921 :
7922 : BIND(&defined);
7923 : TNode<Number> integer_input =
7924 616 : CAST(CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
7925 1232 : GotoIfNot(TaggedIsSmi(integer_input), range_error);
7926 : result = CAST(integer_input);
7927 616 : Goto(&negative_check);
7928 :
7929 : BIND(&negative_check);
7930 1848 : Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
7931 :
7932 : BIND(&return_zero);
7933 616 : result = SmiConstant(0);
7934 616 : Goto(&done);
7935 :
7936 : BIND(&done);
7937 616 : return result.value();
7938 : }
7939 :
7940 224 : TNode<Smi> CodeStubAssembler::ToSmiLength(TNode<Object> input,
7941 : TNode<Context> context,
7942 : Label* range_error) {
7943 224 : TVARIABLE(Smi, result);
7944 224 : Label to_integer(this), negative_check(this),
7945 224 : heap_number_negative_check(this), return_zero(this), done(this);
7946 :
7947 448 : GotoIfNot(TaggedIsSmi(input), &to_integer);
7948 : result = CAST(input);
7949 224 : Goto(&negative_check);
7950 :
7951 : BIND(&to_integer);
7952 : {
7953 224 : TNode<Number> integer_input = CAST(
7954 : CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
7955 448 : GotoIfNot(TaggedIsSmi(integer_input), &heap_number_negative_check);
7956 : result = CAST(integer_input);
7957 224 : Goto(&negative_check);
7958 :
7959 : // integer_input can still be a negative HeapNumber here.
7960 : BIND(&heap_number_negative_check);
7961 224 : TNode<HeapNumber> heap_number_input = CAST(integer_input);
7962 : Branch(IsTrue(CallBuiltin(Builtins::kLessThan, context, heap_number_input,
7963 672 : SmiConstant(0))),
7964 448 : &return_zero, range_error);
7965 : }
7966 :
7967 : BIND(&negative_check);
7968 672 : Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
7969 :
7970 : BIND(&return_zero);
7971 224 : result = SmiConstant(0);
7972 224 : Goto(&done);
7973 :
7974 : BIND(&done);
7975 224 : return result.value();
7976 : }
7977 :
7978 1680 : TNode<Number> CodeStubAssembler::ToLength_Inline(SloppyTNode<Context> context,
7979 : SloppyTNode<Object> input) {
7980 1680 : TNode<Smi> smi_zero = SmiConstant(0);
7981 : return Select<Number>(
7982 1680 : TaggedIsSmi(input), [=] { return SmiMax(CAST(input), smi_zero); },
7983 6720 : [=] { return CAST(CallBuiltin(Builtins::kToLength, context, input)); });
7984 : }
7985 :
7986 3080 : TNode<Number> CodeStubAssembler::ToInteger_Inline(
7987 : SloppyTNode<Context> context, SloppyTNode<Object> input,
7988 : ToIntegerTruncationMode mode) {
7989 : Builtins::Name builtin = (mode == kNoTruncation)
7990 : ? Builtins::kToInteger
7991 3080 : : Builtins::kToInteger_TruncateMinusZero;
7992 : return Select<Number>(
7993 : TaggedIsSmi(input), [=] { return CAST(input); },
7994 12320 : [=] { return CAST(CallBuiltin(builtin, context, input)); });
7995 : }
7996 :
7997 112 : TNode<Number> CodeStubAssembler::ToInteger(SloppyTNode<Context> context,
7998 : SloppyTNode<Object> input,
7999 : ToIntegerTruncationMode mode) {
8000 : // We might need to loop once for ToNumber conversion.
8001 112 : TVARIABLE(Object, var_arg, input);
8002 112 : Label loop(this, &var_arg), out(this);
8003 112 : Goto(&loop);
8004 : BIND(&loop);
8005 : {
8006 : // Shared entry points.
8007 : Label return_zero(this, Label::kDeferred);
8008 :
8009 : // Load the current {arg} value.
8010 : TNode<Object> arg = var_arg.value();
8011 :
8012 : // Check if {arg} is a Smi.
8013 224 : GotoIf(TaggedIsSmi(arg), &out);
8014 :
8015 : // Check if {arg} is a HeapNumber.
8016 112 : Label if_argisheapnumber(this),
8017 112 : if_argisnotheapnumber(this, Label::kDeferred);
8018 : Branch(IsHeapNumber(CAST(arg)), &if_argisheapnumber,
8019 224 : &if_argisnotheapnumber);
8020 :
8021 : BIND(&if_argisheapnumber);
8022 : {
8023 : TNode<HeapNumber> arg_hn = CAST(arg);
8024 : // Load the floating-point value of {arg}.
8025 224 : Node* arg_value = LoadHeapNumberValue(arg_hn);
8026 :
8027 : // Check if {arg} is NaN.
8028 224 : GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
8029 :
8030 : // Truncate {arg} towards zero.
8031 112 : TNode<Float64T> value = Float64Trunc(arg_value);
8032 :
8033 112 : if (mode == kTruncateMinusZero) {
8034 : // Truncate -0.0 to 0.
8035 168 : GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
8036 : }
8037 :
8038 224 : var_arg = ChangeFloat64ToTagged(value);
8039 112 : Goto(&out);
8040 : }
8041 :
8042 : BIND(&if_argisnotheapnumber);
8043 : {
8044 : // Need to convert {arg} to a Number first.
8045 224 : var_arg = UncheckedCast<Object>(
8046 : CallBuiltin(Builtins::kNonNumberToNumber, context, arg));
8047 112 : Goto(&loop);
8048 : }
8049 :
8050 : BIND(&return_zero);
8051 224 : var_arg = SmiConstant(0);
8052 224 : Goto(&out);
8053 : }
8054 :
8055 : BIND(&out);
8056 : if (mode == kTruncateMinusZero) {
8057 : CSA_ASSERT(this, IsNumberNormalized(CAST(var_arg.value())));
8058 : }
8059 112 : return CAST(var_arg.value());
8060 : }
8061 :
8062 34887 : TNode<Uint32T> CodeStubAssembler::DecodeWord32(SloppyTNode<Word32T> word32,
8063 : uint32_t shift, uint32_t mask) {
8064 : return UncheckedCast<Uint32T>(Word32Shr(
8065 104661 : Word32And(word32, Int32Constant(mask)), static_cast<int>(shift)));
8066 : }
8067 :
8068 19620 : TNode<UintPtrT> CodeStubAssembler::DecodeWord(SloppyTNode<WordT> word,
8069 : uint32_t shift, uint32_t mask) {
8070 : return Unsigned(
8071 58860 : WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift)));
8072 : }
8073 :
8074 392 : TNode<WordT> CodeStubAssembler::UpdateWord(TNode<WordT> word,
8075 : TNode<WordT> value, uint32_t shift,
8076 : uint32_t mask) {
8077 784 : TNode<WordT> encoded_value = WordShl(value, static_cast<int>(shift));
8078 392 : TNode<IntPtrT> inverted_mask = IntPtrConstant(~static_cast<intptr_t>(mask));
8079 : // Ensure the {value} fits fully in the mask.
8080 : CSA_ASSERT(this, WordEqual(WordAnd(encoded_value, inverted_mask),
8081 : IntPtrConstant(0)));
8082 784 : return WordOr(WordAnd(word, inverted_mask), encoded_value);
8083 : }
8084 :
8085 0 : void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
8086 0 : if (FLAG_native_code_counters && counter->Enabled()) {
8087 : Node* counter_address =
8088 0 : ExternalConstant(ExternalReference::Create(counter));
8089 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
8090 0 : Int32Constant(value));
8091 : }
8092 0 : }
8093 :
8094 3370 : void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
8095 : DCHECK_GT(delta, 0);
8096 3370 : if (FLAG_native_code_counters && counter->Enabled()) {
8097 : Node* counter_address =
8098 0 : ExternalConstant(ExternalReference::Create(counter));
8099 0 : Node* value = Load(MachineType::Int32(), counter_address);
8100 0 : value = Int32Add(value, Int32Constant(delta));
8101 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8102 : }
8103 3370 : }
8104 :
8105 0 : void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
8106 : DCHECK_GT(delta, 0);
8107 0 : if (FLAG_native_code_counters && counter->Enabled()) {
8108 : Node* counter_address =
8109 0 : ExternalConstant(ExternalReference::Create(counter));
8110 0 : Node* value = Load(MachineType::Int32(), counter_address);
8111 0 : value = Int32Sub(value, Int32Constant(delta));
8112 0 : StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8113 : }
8114 0 : }
8115 :
8116 49905 : void CodeStubAssembler::Increment(Variable* variable, int value,
8117 : ParameterMode mode) {
8118 : DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
8119 : variable->rep() == MachineType::PointerRepresentation());
8120 : DCHECK_IMPLIES(mode == SMI_PARAMETERS,
8121 : variable->rep() == MachineRepresentation::kTagged ||
8122 : variable->rep() == MachineRepresentation::kTaggedSigned);
8123 : variable->Bind(IntPtrOrSmiAdd(variable->value(),
8124 49905 : IntPtrOrSmiConstant(value, mode), mode));
8125 49905 : }
8126 :
8127 56 : void CodeStubAssembler::Use(Label* label) {
8128 224 : GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
8129 56 : }
8130 :
8131 1517 : void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
8132 : Variable* var_index, Label* if_keyisunique,
8133 : Variable* var_unique, Label* if_bailout,
8134 : Label* if_notinternalized) {
8135 : DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
8136 : DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
8137 1517 : Comment("TryToName");
8138 :
8139 1517 : Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this),
8140 1517 : if_keyisother(this, Label::kDeferred);
8141 : // Handle Smi and HeapNumber keys.
8142 3034 : var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
8143 1517 : Goto(if_keyisindex);
8144 :
8145 : BIND(&if_keyisnotindex);
8146 3034 : Node* key_map = LoadMap(key);
8147 1517 : var_unique->Bind(key);
8148 : // Symbols are unique.
8149 3034 : GotoIf(IsSymbolMap(key_map), if_keyisunique);
8150 3034 : Node* key_instance_type = LoadMapInstanceType(key_map);
8151 : // Miss if |key| is not a String.
8152 : STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
8153 3034 : GotoIfNot(IsStringInstanceType(key_instance_type), &if_keyisother);
8154 :
8155 : // |key| is a String. Check if it has a cached array index.
8156 : Node* hash = LoadNameHashField(key);
8157 1517 : GotoIf(IsClearWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
8158 3034 : &if_hascachedindex);
8159 : // No cached array index. If the string knows that it contains an index,
8160 : // then it must be an uncacheable index. Handle this case in the runtime.
8161 3034 : GotoIf(IsClearWord32(hash, Name::kIsNotArrayIndexMask), if_bailout);
8162 : // Check if we have a ThinString.
8163 1517 : GotoIf(InstanceTypeEqual(key_instance_type, THIN_STRING_TYPE),
8164 3034 : &if_thinstring);
8165 1517 : GotoIf(InstanceTypeEqual(key_instance_type, THIN_ONE_BYTE_STRING_TYPE),
8166 3034 : &if_thinstring);
8167 : // Finally, check if |key| is internalized.
8168 : STATIC_ASSERT(kNotInternalizedTag != 0);
8169 1517 : GotoIf(IsSetWord32(key_instance_type, kIsNotInternalizedMask),
8170 4551 : if_notinternalized != nullptr ? if_notinternalized : if_bailout);
8171 1517 : Goto(if_keyisunique);
8172 :
8173 : BIND(&if_thinstring);
8174 1517 : var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
8175 1517 : Goto(if_keyisunique);
8176 :
8177 : BIND(&if_hascachedindex);
8178 3034 : var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
8179 1517 : Goto(if_keyisindex);
8180 :
8181 : BIND(&if_keyisother);
8182 3034 : GotoIfNot(InstanceTypeEqual(key_instance_type, ODDBALL_TYPE), if_bailout);
8183 1517 : var_unique->Bind(LoadObjectField(key, Oddball::kToStringOffset));
8184 3034 : Goto(if_keyisunique);
8185 1517 : }
8186 :
8187 392 : void CodeStubAssembler::TryInternalizeString(
8188 : Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
8189 : Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
8190 : DCHECK(var_index->rep() == MachineType::PointerRepresentation());
8191 : DCHECK_EQ(var_internalized->rep(), MachineRepresentation::kTagged);
8192 : CSA_SLOW_ASSERT(this, IsString(string));
8193 : Node* function =
8194 784 : ExternalConstant(ExternalReference::try_internalize_string_function());
8195 : Node* const isolate_ptr =
8196 784 : ExternalConstant(ExternalReference::isolate_address(isolate()));
8197 : Node* result =
8198 : CallCFunction2(MachineType::AnyTagged(), MachineType::Pointer(),
8199 392 : MachineType::AnyTagged(), function, isolate_ptr, string);
8200 : Label internalized(this);
8201 784 : GotoIf(TaggedIsNotSmi(result), &internalized);
8202 784 : Node* word_result = SmiUntag(result);
8203 784 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
8204 784 : if_not_internalized);
8205 784 : GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
8206 784 : if_bailout);
8207 392 : var_index->Bind(word_result);
8208 392 : Goto(if_index);
8209 :
8210 : BIND(&internalized);
8211 392 : var_internalized->Bind(result);
8212 392 : Goto(if_internalized);
8213 392 : }
8214 :
8215 : template <typename Dictionary>
8216 32151 : TNode<IntPtrT> CodeStubAssembler::EntryToIndex(TNode<IntPtrT> entry,
8217 : int field_index) {
8218 : TNode<IntPtrT> entry_index =
8219 32151 : IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
8220 : return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
8221 64302 : field_index));
8222 : }
8223 :
8224 0 : TNode<MaybeObject> CodeStubAssembler::LoadDescriptorArrayElement(
8225 : TNode<DescriptorArray> object, Node* index, int additional_offset) {
8226 : return LoadArrayElement(object, DescriptorArray::kHeaderSize, index,
8227 8181 : additional_offset);
8228 : }
8229 :
8230 392 : TNode<Name> CodeStubAssembler::LoadKeyByKeyIndex(
8231 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8232 392 : return CAST(LoadDescriptorArrayElement(container, key_index, 0));
8233 : }
8234 :
8235 952 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
8236 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8237 : const int kKeyToDetails =
8238 : DescriptorArray::ToDetailsIndex(0) - DescriptorArray::ToKeyIndex(0);
8239 : return Unsigned(
8240 : LoadAndUntagToWord32ArrayElement(container, DescriptorArray::kHeaderSize,
8241 4266 : key_index, kKeyToDetails * kTaggedSize));
8242 : }
8243 :
8244 2021 : TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
8245 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8246 : const int kKeyToValue =
8247 : DescriptorArray::ToValueIndex(0) - DescriptorArray::ToKeyIndex(0);
8248 2021 : return CAST(LoadDescriptorArrayElement(container, key_index,
8249 : kKeyToValue * kTaggedSize));
8250 : }
8251 :
8252 728 : TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByKeyIndex(
8253 : TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8254 : const int kKeyToValue =
8255 : DescriptorArray::ToValueIndex(0) - DescriptorArray::ToKeyIndex(0);
8256 : return LoadDescriptorArrayElement(container, key_index,
8257 728 : kKeyToValue * kTaggedSize);
8258 : }
8259 :
8260 4928 : TNode<IntPtrT> CodeStubAssembler::DescriptorEntryToIndex(
8261 : TNode<IntPtrT> descriptor_entry) {
8262 : return IntPtrMul(descriptor_entry,
8263 9856 : IntPtrConstant(DescriptorArray::kEntrySize));
8264 : }
8265 :
8266 112 : TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8267 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8268 224 : return CAST(LoadDescriptorArrayElement(
8269 : container, DescriptorEntryToIndex(descriptor_entry),
8270 : DescriptorArray::ToKeyIndex(0) * kTaggedSize));
8271 : }
8272 :
8273 112 : TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8274 : TNode<DescriptorArray> container, int descriptor_entry) {
8275 224 : return CAST(LoadDescriptorArrayElement(
8276 : container, IntPtrConstant(0),
8277 : DescriptorArray::ToKeyIndex(descriptor_entry) * kTaggedSize));
8278 : }
8279 :
8280 112 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8281 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8282 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8283 : container, DescriptorArray::kHeaderSize,
8284 : DescriptorEntryToIndex(descriptor_entry),
8285 336 : DescriptorArray::ToDetailsIndex(0) * kTaggedSize));
8286 : }
8287 :
8288 672 : TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8289 : TNode<DescriptorArray> container, int descriptor_entry) {
8290 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8291 672 : container, DescriptorArray::kHeaderSize, IntPtrConstant(0),
8292 2688 : DescriptorArray::ToDetailsIndex(descriptor_entry) * kTaggedSize));
8293 : }
8294 :
8295 112 : TNode<Object> CodeStubAssembler::LoadValueByDescriptorEntry(
8296 : TNode<DescriptorArray> container, int descriptor_entry) {
8297 224 : return CAST(LoadDescriptorArrayElement(
8298 : container, IntPtrConstant(0),
8299 : DescriptorArray::ToValueIndex(descriptor_entry) * kTaggedSize));
8300 : }
8301 :
8302 4704 : TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByDescriptorEntry(
8303 : TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8304 : return LoadDescriptorArrayElement(
8305 : container, DescriptorEntryToIndex(descriptor_entry),
8306 9408 : DescriptorArray::ToValueIndex(0) * kTaggedSize);
8307 : }
8308 :
8309 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NameDictionary>(
8310 : TNode<IntPtrT>, int);
8311 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<GlobalDictionary>(
8312 : TNode<IntPtrT>, int);
8313 : template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NumberDictionary>(
8314 : TNode<IntPtrT>, int);
8315 :
8316 : // This must be kept in sync with HashTableBase::ComputeCapacity().
8317 1013 : TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
8318 : TNode<IntPtrT> at_least_space_for) {
8319 : TNode<IntPtrT> capacity = IntPtrRoundUpToPowerOfTwo32(
8320 2026 : IntPtrAdd(at_least_space_for, WordShr(at_least_space_for, 1)));
8321 2026 : return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
8322 : }
8323 :
8324 1630 : TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
8325 : SloppyTNode<IntPtrT> right) {
8326 : intptr_t left_constant;
8327 : intptr_t right_constant;
8328 2527 : if (ToIntPtrConstant(left, left_constant) &&
8329 897 : ToIntPtrConstant(right, right_constant)) {
8330 897 : return IntPtrConstant(std::max(left_constant, right_constant));
8331 : }
8332 : return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
8333 733 : right);
8334 : }
8335 :
8336 1009 : TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
8337 : SloppyTNode<IntPtrT> right) {
8338 : intptr_t left_constant;
8339 : intptr_t right_constant;
8340 1010 : if (ToIntPtrConstant(left, left_constant) &&
8341 1 : ToIntPtrConstant(right, right_constant)) {
8342 1 : return IntPtrConstant(std::min(left_constant, right_constant));
8343 : }
8344 : return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
8345 1008 : right);
8346 : }
8347 :
8348 : template <>
8349 0 : TNode<HeapObject> CodeStubAssembler::LoadName<NameDictionary>(
8350 : TNode<HeapObject> key) {
8351 : CSA_ASSERT(this, Word32Or(IsTheHole(key), IsName(key)));
8352 0 : return key;
8353 : }
8354 :
8355 : template <>
8356 7635 : TNode<HeapObject> CodeStubAssembler::LoadName<GlobalDictionary>(
8357 : TNode<HeapObject> key) {
8358 : TNode<PropertyCell> property_cell = CAST(key);
8359 7635 : return CAST(LoadObjectField(property_cell, PropertyCell::kNameOffset));
8360 : }
8361 :
8362 : template <typename Dictionary>
8363 6526 : void CodeStubAssembler::NameDictionaryLookup(
8364 : TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
8365 : TVariable<IntPtrT>* var_name_index, Label* if_not_found, int inlined_probes,
8366 : LookupMode mode) {
8367 : static_assert(std::is_same<Dictionary, NameDictionary>::value ||
8368 : std::is_same<Dictionary, GlobalDictionary>::value,
8369 : "Unexpected NameDictionary");
8370 : DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
8371 : DCHECK_IMPLIES(mode == kFindInsertionIndex,
8372 : inlined_probes == 0 && if_found == nullptr);
8373 6526 : Comment("NameDictionaryLookup");
8374 :
8375 13052 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
8376 6526 : TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8377 19578 : TNode<WordT> hash = ChangeUint32ToWord(LoadNameHash(unique_name));
8378 :
8379 : // See Dictionary::FirstProbe().
8380 6526 : TNode<IntPtrT> count = IntPtrConstant(0);
8381 6526 : TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
8382 : Node* undefined = UndefinedConstant();
8383 :
8384 36020 : for (int i = 0; i < inlined_probes; i++) {
8385 : TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
8386 : *var_name_index = index;
8387 :
8388 22968 : TNode<HeapObject> current = CAST(LoadFixedArrayElement(dictionary, index));
8389 22968 : GotoIf(WordEqual(current, undefined), if_not_found);
8390 6108 : current = LoadName<Dictionary>(current);
8391 22968 : GotoIf(WordEqual(current, unique_name), if_found);
8392 :
8393 : // See Dictionary::NextProbe().
8394 22968 : count = IntPtrConstant(i + 1);
8395 22968 : entry = Signed(WordAnd(IntPtrAdd(entry, count), mask));
8396 : }
8397 6526 : if (mode == kFindInsertionIndex) {
8398 : // Appease the variable merging algorithm for "Goto(&loop)" below.
8399 784 : *var_name_index = IntPtrConstant(0);
8400 : }
8401 :
8402 : TVARIABLE(IntPtrT, var_count, count);
8403 : TVARIABLE(IntPtrT, var_entry, entry);
8404 6526 : Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
8405 13052 : Label loop(this, 3, loop_vars);
8406 6526 : Goto(&loop);
8407 : BIND(&loop);
8408 : {
8409 : TNode<IntPtrT> entry = var_entry.value();
8410 :
8411 : TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
8412 : *var_name_index = index;
8413 :
8414 6526 : TNode<HeapObject> current = CAST(LoadFixedArrayElement(dictionary, index));
8415 6526 : GotoIf(WordEqual(current, undefined), if_not_found);
8416 6526 : if (mode == kFindExisting) {
8417 1527 : current = LoadName<Dictionary>(current);
8418 5742 : GotoIf(WordEqual(current, unique_name), if_found);
8419 : } else {
8420 : DCHECK_EQ(kFindInsertionIndex, mode);
8421 784 : GotoIf(WordEqual(current, TheHoleConstant()), if_not_found);
8422 : }
8423 :
8424 : // See Dictionary::NextProbe().
8425 6526 : Increment(&var_count);
8426 6526 : entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8427 :
8428 : var_entry = entry;
8429 6526 : Goto(&loop);
8430 : }
8431 6526 : }
8432 :
8433 : // Instantiate template methods to workaround GCC compilation issue.
8434 : template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
8435 : TNode<NameDictionary>, TNode<Name>, Label*, TVariable<IntPtrT>*, Label*,
8436 : int, LookupMode);
8437 : template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
8438 : TNode<GlobalDictionary>, TNode<Name>, Label*, TVariable<IntPtrT>*, Label*,
8439 : int, LookupMode);
8440 :
8441 336 : Node* CodeStubAssembler::ComputeUnseededHash(Node* key) {
8442 : // See v8::internal::ComputeUnseededHash()
8443 672 : Node* hash = TruncateIntPtrToInt32(key);
8444 1344 : hash = Int32Add(Word32Xor(hash, Int32Constant(0xFFFFFFFF)),
8445 1680 : Word32Shl(hash, Int32Constant(15)));
8446 1344 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
8447 1344 : hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
8448 1344 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
8449 1008 : hash = Int32Mul(hash, Int32Constant(2057));
8450 1344 : hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
8451 1008 : return Word32And(hash, Int32Constant(0x3FFFFFFF));
8452 : }
8453 :
8454 1191 : Node* CodeStubAssembler::ComputeSeededHash(Node* key) {
8455 : Node* const function_addr =
8456 2382 : ExternalConstant(ExternalReference::compute_integer_hash());
8457 : Node* const isolate_ptr =
8458 2382 : ExternalConstant(ExternalReference::isolate_address(isolate()));
8459 :
8460 1191 : MachineType type_ptr = MachineType::Pointer();
8461 1191 : MachineType type_uint32 = MachineType::Uint32();
8462 :
8463 : Node* const result =
8464 : CallCFunction2(type_uint32, type_ptr, type_uint32, function_addr,
8465 2382 : isolate_ptr, TruncateIntPtrToInt32(key));
8466 1191 : return result;
8467 : }
8468 :
8469 1186 : void CodeStubAssembler::NumberDictionaryLookup(
8470 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8471 : Label* if_found, TVariable<IntPtrT>* var_entry, Label* if_not_found) {
8472 : CSA_ASSERT(this, IsNumberDictionary(dictionary));
8473 : DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
8474 1186 : Comment("NumberDictionaryLookup");
8475 :
8476 2372 : TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NumberDictionary>(dictionary));
8477 1186 : TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8478 :
8479 3558 : TNode<WordT> hash = ChangeUint32ToWord(ComputeSeededHash(intptr_index));
8480 1186 : Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
8481 :
8482 : // See Dictionary::FirstProbe().
8483 1186 : TNode<IntPtrT> count = IntPtrConstant(0);
8484 1186 : TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
8485 :
8486 : Node* undefined = UndefinedConstant();
8487 : Node* the_hole = TheHoleConstant();
8488 :
8489 : TVARIABLE(IntPtrT, var_count, count);
8490 1186 : Variable* loop_vars[] = {&var_count, var_entry};
8491 2372 : Label loop(this, 2, loop_vars);
8492 : *var_entry = entry;
8493 1186 : Goto(&loop);
8494 : BIND(&loop);
8495 : {
8496 : TNode<IntPtrT> entry = var_entry->value();
8497 :
8498 1186 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(entry);
8499 2372 : Node* current = LoadFixedArrayElement(dictionary, index);
8500 2372 : GotoIf(WordEqual(current, undefined), if_not_found);
8501 : Label next_probe(this);
8502 : {
8503 1186 : Label if_currentissmi(this), if_currentisnotsmi(this);
8504 2372 : Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
8505 : BIND(&if_currentissmi);
8506 : {
8507 2372 : Node* current_value = SmiUntag(current);
8508 2372 : Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
8509 : }
8510 : BIND(&if_currentisnotsmi);
8511 : {
8512 2372 : GotoIf(WordEqual(current, the_hole), &next_probe);
8513 : // Current must be the Number.
8514 2372 : Node* current_value = LoadHeapNumberValue(current);
8515 1186 : Branch(Float64Equal(current_value, key_as_float64), if_found,
8516 2372 : &next_probe);
8517 1186 : }
8518 : }
8519 :
8520 : BIND(&next_probe);
8521 : // See Dictionary::NextProbe().
8522 1186 : Increment(&var_count);
8523 1186 : entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8524 :
8525 : *var_entry = entry;
8526 1186 : Goto(&loop);
8527 : }
8528 1186 : }
8529 :
8530 280 : TNode<Object> CodeStubAssembler::BasicLoadNumberDictionaryElement(
8531 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8532 : Label* not_data, Label* if_hole) {
8533 280 : TVARIABLE(IntPtrT, var_entry);
8534 280 : Label if_found(this);
8535 : NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8536 280 : if_hole);
8537 : BIND(&if_found);
8538 :
8539 : // Check that the value is a data property.
8540 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8541 : TNode<Uint32T> details =
8542 280 : LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
8543 : TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8544 : // TODO(jkummerow): Support accessors without missing?
8545 840 : GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
8546 : // Finally, load the value.
8547 560 : return LoadValueByKeyIndex<NumberDictionary>(dictionary, index);
8548 : }
8549 :
8550 56 : void CodeStubAssembler::BasicStoreNumberDictionaryElement(
8551 : TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8552 : TNode<Object> value, Label* not_data, Label* if_hole, Label* read_only) {
8553 56 : TVARIABLE(IntPtrT, var_entry);
8554 56 : Label if_found(this);
8555 : NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8556 56 : if_hole);
8557 : BIND(&if_found);
8558 :
8559 : // Check that the value is a data property.
8560 : TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8561 : TNode<Uint32T> details =
8562 56 : LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
8563 : TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8564 : // TODO(jkummerow): Support accessors without missing?
8565 168 : GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
8566 :
8567 : // Check that the property is writeable.
8568 56 : GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
8569 112 : read_only);
8570 :
8571 : // Finally, store the value.
8572 : StoreValueByKeyIndex<NumberDictionary>(dictionary, index, value);
8573 56 : }
8574 :
8575 : template <class Dictionary>
8576 : void CodeStubAssembler::FindInsertionEntry(TNode<Dictionary> dictionary,
8577 : TNode<Name> key,
8578 : TVariable<IntPtrT>* var_key_index) {
8579 : UNREACHABLE();
8580 : }
8581 :
8582 : template <>
8583 784 : void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
8584 : TNode<NameDictionary> dictionary, TNode<Name> key,
8585 : TVariable<IntPtrT>* var_key_index) {
8586 784 : Label done(this);
8587 : NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
8588 784 : &done, 0, kFindInsertionIndex);
8589 784 : BIND(&done);
8590 784 : }
8591 :
8592 : template <class Dictionary>
8593 : void CodeStubAssembler::InsertEntry(TNode<Dictionary> dictionary,
8594 : TNode<Name> key, TNode<Object> value,
8595 : TNode<IntPtrT> index,
8596 : TNode<Smi> enum_index) {
8597 : UNREACHABLE(); // Use specializations instead.
8598 : }
8599 :
8600 : template <>
8601 784 : void CodeStubAssembler::InsertEntry<NameDictionary>(
8602 : TNode<NameDictionary> dictionary, TNode<Name> name, TNode<Object> value,
8603 : TNode<IntPtrT> index, TNode<Smi> enum_index) {
8604 : // Store name and value.
8605 : StoreFixedArrayElement(dictionary, index, name);
8606 : StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
8607 :
8608 : // Prepare details of the new property.
8609 : PropertyDetails d(kData, NONE, PropertyCellType::kNoCell);
8610 784 : enum_index =
8611 : SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
8612 : // We OR over the actual index below, so we expect the initial value to be 0.
8613 : DCHECK_EQ(0, d.dictionary_index());
8614 784 : TVARIABLE(Smi, var_details, SmiOr(SmiConstant(d.AsSmi()), enum_index));
8615 :
8616 : // Private names must be marked non-enumerable.
8617 784 : Label not_private(this, &var_details);
8618 1568 : GotoIfNot(IsPrivateSymbol(name), ¬_private);
8619 : TNode<Smi> dont_enum =
8620 784 : SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
8621 784 : var_details = SmiOr(var_details.value(), dont_enum);
8622 784 : Goto(¬_private);
8623 : BIND(¬_private);
8624 :
8625 : // Finally, store the details.
8626 : StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
8627 : var_details.value());
8628 784 : }
8629 :
8630 : template <>
8631 0 : void CodeStubAssembler::InsertEntry<GlobalDictionary>(
8632 : TNode<GlobalDictionary> dictionary, TNode<Name> key, TNode<Object> value,
8633 : TNode<IntPtrT> index, TNode<Smi> enum_index) {
8634 0 : UNIMPLEMENTED();
8635 : }
8636 :
8637 : template <class Dictionary>
8638 784 : void CodeStubAssembler::Add(TNode<Dictionary> dictionary, TNode<Name> key,
8639 : TNode<Object> value, Label* bailout) {
8640 : CSA_ASSERT(this, Word32BinaryNot(IsEmptyPropertyDictionary(dictionary)));
8641 784 : TNode<Smi> capacity = GetCapacity<Dictionary>(dictionary);
8642 784 : TNode<Smi> nof = GetNumberOfElements<Dictionary>(dictionary);
8643 784 : TNode<Smi> new_nof = SmiAdd(nof, SmiConstant(1));
8644 : // Require 33% to still be free after adding additional_elements.
8645 : // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
8646 : // But that's OK here because it's only used for a comparison.
8647 784 : TNode<Smi> required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
8648 1568 : GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
8649 : // Require rehashing if more than 50% of free elements are deleted elements.
8650 784 : TNode<Smi> deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
8651 : CSA_ASSERT(this, SmiAbove(capacity, new_nof));
8652 784 : TNode<Smi> half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
8653 1568 : GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
8654 :
8655 784 : TNode<Smi> enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
8656 784 : TNode<Smi> new_enum_index = SmiAdd(enum_index, SmiConstant(1));
8657 : TNode<Smi> max_enum_index =
8658 784 : SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
8659 1568 : GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
8660 :
8661 : // No more bailouts after this point.
8662 : // Operations from here on can have side effects.
8663 :
8664 : SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
8665 : SetNumberOfElements<Dictionary>(dictionary, new_nof);
8666 :
8667 : TVARIABLE(IntPtrT, var_key_index);
8668 784 : FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
8669 784 : InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
8670 : enum_index);
8671 784 : }
8672 :
8673 : template void CodeStubAssembler::Add<NameDictionary>(TNode<NameDictionary>,
8674 : TNode<Name>, TNode<Object>,
8675 : Label*);
8676 :
8677 : template <typename Array>
8678 2367 : void CodeStubAssembler::LookupLinear(TNode<Name> unique_name,
8679 : TNode<Array> array,
8680 : TNode<Uint32T> number_of_valid_entries,
8681 : Label* if_found,
8682 : TVariable<IntPtrT>* var_name_index,
8683 : Label* if_not_found) {
8684 : static_assert(std::is_base_of<FixedArray, Array>::value ||
8685 : std::is_base_of<WeakFixedArray, Array>::value ||
8686 : std::is_base_of<DescriptorArray, Array>::value,
8687 : "T must be a descendant of FixedArray or a WeakFixedArray");
8688 2367 : Comment("LookupLinear");
8689 2367 : TNode<IntPtrT> first_inclusive = IntPtrConstant(Array::ToKeyIndex(0));
8690 2367 : TNode<IntPtrT> factor = IntPtrConstant(Array::kEntrySize);
8691 : TNode<IntPtrT> last_exclusive = IntPtrAdd(
8692 : first_inclusive,
8693 4734 : IntPtrMul(ChangeInt32ToIntPtr(number_of_valid_entries), factor));
8694 :
8695 7101 : BuildFastLoop(last_exclusive, first_inclusive,
8696 2367 : [=](SloppyTNode<IntPtrT> name_index) {
8697 : TNode<MaybeObject> element =
8698 2367 : LoadArrayElement(array, Array::kHeaderSize, name_index);
8699 : TNode<Name> candidate_name = CAST(element);
8700 2367 : *var_name_index = name_index;
8701 4734 : GotoIf(WordEqual(candidate_name, unique_name), if_found);
8702 2367 : },
8703 : -Array::kEntrySize, INTPTR_PARAMETERS, IndexAdvanceMode::kPre);
8704 2367 : Goto(if_not_found);
8705 2367 : }
8706 :
8707 : template <>
8708 0 : TNode<Uint32T> CodeStubAssembler::NumberOfEntries<DescriptorArray>(
8709 : TNode<DescriptorArray> descriptors) {
8710 4164 : return Unsigned(LoadNumberOfDescriptors(descriptors));
8711 : }
8712 :
8713 : template <>
8714 570 : TNode<Uint32T> CodeStubAssembler::NumberOfEntries<TransitionArray>(
8715 : TNode<TransitionArray> transitions) {
8716 : TNode<IntPtrT> length = LoadAndUntagWeakFixedArrayLength(transitions);
8717 : return Select<Uint32T>(
8718 1140 : UintPtrLessThan(length, IntPtrConstant(TransitionArray::kFirstIndex)),
8719 1140 : [=] { return Unsigned(Int32Constant(0)); },
8720 570 : [=] {
8721 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8722 : transitions, WeakFixedArray::kHeaderSize,
8723 1710 : IntPtrConstant(TransitionArray::kTransitionLengthIndex)));
8724 2850 : });
8725 : }
8726 :
8727 : template <typename Array>
8728 11601 : TNode<IntPtrT> CodeStubAssembler::EntryIndexToIndex(
8729 : TNode<Uint32T> entry_index) {
8730 11601 : TNode<Int32T> entry_size = Int32Constant(Array::kEntrySize);
8731 11601 : TNode<Word32T> index = Int32Mul(entry_index, entry_size);
8732 11601 : return ChangeInt32ToIntPtr(index);
8733 : }
8734 :
8735 : template <typename Array>
8736 2703 : TNode<IntPtrT> CodeStubAssembler::ToKeyIndex(TNode<Uint32T> entry_index) {
8737 : return IntPtrAdd(IntPtrConstant(Array::ToKeyIndex(0)),
8738 5406 : EntryIndexToIndex<Array>(entry_index));
8739 : }
8740 :
8741 : template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<DescriptorArray>(
8742 : TNode<Uint32T>);
8743 : template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<TransitionArray>(
8744 : TNode<Uint32T>);
8745 :
8746 : template <>
8747 4164 : TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<DescriptorArray>(
8748 : TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8749 : TNode<Uint32T> details =
8750 4164 : DescriptorArrayGetDetails(descriptors, descriptor_number);
8751 4164 : return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
8752 : }
8753 :
8754 : template <>
8755 0 : TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<TransitionArray>(
8756 : TNode<TransitionArray> transitions, TNode<Uint32T> transition_number) {
8757 0 : return transition_number;
8758 : }
8759 :
8760 : template <typename Array>
8761 4734 : TNode<Name> CodeStubAssembler::GetKey(TNode<Array> array,
8762 : TNode<Uint32T> entry_index) {
8763 : static_assert(std::is_base_of<TransitionArray, Array>::value ||
8764 : std::is_base_of<DescriptorArray, Array>::value,
8765 : "T must be a descendant of DescriptorArray or TransitionArray");
8766 : const int key_offset = Array::ToKeyIndex(0) * kTaggedSize;
8767 : TNode<MaybeObject> element =
8768 : LoadArrayElement(array, Array::kHeaderSize,
8769 9468 : EntryIndexToIndex<Array>(entry_index), key_offset);
8770 4734 : return CAST(element);
8771 : }
8772 :
8773 : template TNode<Name> CodeStubAssembler::GetKey<DescriptorArray>(
8774 : TNode<DescriptorArray>, TNode<Uint32T>);
8775 : template TNode<Name> CodeStubAssembler::GetKey<TransitionArray>(
8776 : TNode<TransitionArray>, TNode<Uint32T>);
8777 :
8778 4164 : TNode<Uint32T> CodeStubAssembler::DescriptorArrayGetDetails(
8779 : TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8780 : const int details_offset = DescriptorArray::ToDetailsIndex(0) * kTaggedSize;
8781 : return Unsigned(LoadAndUntagToWord32ArrayElement(
8782 : descriptors, DescriptorArray::kHeaderSize,
8783 12492 : EntryIndexToIndex<DescriptorArray>(descriptor_number), details_offset));
8784 : }
8785 :
8786 : template <typename Array>
8787 2367 : void CodeStubAssembler::LookupBinary(TNode<Name> unique_name,
8788 : TNode<Array> array,
8789 : TNode<Uint32T> number_of_valid_entries,
8790 : Label* if_found,
8791 : TVariable<IntPtrT>* var_name_index,
8792 : Label* if_not_found) {
8793 2367 : Comment("LookupBinary");
8794 4734 : TVARIABLE(Uint32T, var_low, Unsigned(Int32Constant(0)));
8795 : TNode<Uint32T> limit =
8796 5019 : Unsigned(Int32Sub(NumberOfEntries<Array>(array), Int32Constant(1)));
8797 : TVARIABLE(Uint32T, var_high, limit);
8798 : TNode<Uint32T> hash = LoadNameHashField(unique_name);
8799 : CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
8800 :
8801 : // Assume non-empty array.
8802 : CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
8803 :
8804 7101 : Label binary_loop(this, {&var_high, &var_low});
8805 2367 : Goto(&binary_loop);
8806 : BIND(&binary_loop);
8807 : {
8808 : // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
8809 : TNode<Uint32T> mid = Unsigned(
8810 : Int32Add(var_low.value(),
8811 7101 : Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1)));
8812 : // mid_name = array->GetSortedKey(mid).
8813 2367 : TNode<Uint32T> sorted_key_index = GetSortedKeyIndex<Array>(array, mid);
8814 2367 : TNode<Name> mid_name = GetKey<Array>(array, sorted_key_index);
8815 :
8816 : TNode<Uint32T> mid_hash = LoadNameHashField(mid_name);
8817 :
8818 2367 : Label mid_greater(this), mid_less(this), merge(this);
8819 4734 : Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
8820 : BIND(&mid_greater);
8821 : {
8822 : var_high = mid;
8823 2367 : Goto(&merge);
8824 : }
8825 : BIND(&mid_less);
8826 : {
8827 7101 : var_low = Unsigned(Int32Add(mid, Int32Constant(1)));
8828 2367 : Goto(&merge);
8829 : }
8830 : BIND(&merge);
8831 7101 : GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
8832 : }
8833 :
8834 2367 : Label scan_loop(this, &var_low);
8835 2367 : Goto(&scan_loop);
8836 : BIND(&scan_loop);
8837 : {
8838 4734 : GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
8839 :
8840 : TNode<Uint32T> sort_index =
8841 2082 : GetSortedKeyIndex<Array>(array, var_low.value());
8842 2367 : TNode<Name> current_name = GetKey<Array>(array, sort_index);
8843 : TNode<Uint32T> current_hash = LoadNameHashField(current_name);
8844 4734 : GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
8845 : Label next(this);
8846 2367 : GotoIf(WordNotEqual(current_name, unique_name), &next);
8847 4734 : GotoIf(Uint32GreaterThanOrEqual(sort_index, number_of_valid_entries),
8848 4734 : if_not_found);
8849 2367 : *var_name_index = ToKeyIndex<Array>(sort_index);
8850 2367 : Goto(if_found);
8851 :
8852 : BIND(&next);
8853 7101 : var_low = Unsigned(Int32Add(var_low.value(), Int32Constant(1)));
8854 2367 : Goto(&scan_loop);
8855 : }
8856 2367 : }
8857 :
8858 112 : void CodeStubAssembler::DescriptorArrayForEach(
8859 : VariableList& variable_list, TNode<Uint32T> start_descriptor,
8860 : TNode<Uint32T> end_descriptor, const ForEachDescriptorBodyFunction& body) {
8861 112 : TNode<IntPtrT> start_index = ToKeyIndex<DescriptorArray>(start_descriptor);
8862 112 : TNode<IntPtrT> end_index = ToKeyIndex<DescriptorArray>(end_descriptor);
8863 :
8864 : BuildFastLoop(variable_list, start_index, end_index,
8865 : [=](Node* index) {
8866 : TNode<IntPtrT> descriptor_key_index =
8867 112 : TNode<IntPtrT>::UncheckedCast(index);
8868 112 : body(descriptor_key_index);
8869 : },
8870 : DescriptorArray::kEntrySize, INTPTR_PARAMETERS,
8871 336 : IndexAdvanceMode::kPost);
8872 112 : }
8873 :
8874 112 : void CodeStubAssembler::ForEachEnumerableOwnProperty(
8875 : TNode<Context> context, TNode<Map> map, TNode<JSObject> object,
8876 : const ForEachKeyValueFunction& body, Label* bailout) {
8877 112 : TNode<Int32T> type = LoadMapInstanceType(map);
8878 112 : TNode<Uint32T> bit_field3 = EnsureOnlyHasSimpleProperties(map, type, bailout);
8879 :
8880 112 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
8881 : TNode<Uint32T> nof_descriptors =
8882 112 : DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bit_field3);
8883 :
8884 112 : TVARIABLE(BoolT, var_stable, Int32TrueConstant());
8885 224 : VariableList list({&var_stable}, zone());
8886 :
8887 : DescriptorArrayForEach(
8888 : list, Unsigned(Int32Constant(0)), nof_descriptors,
8889 784 : [=, &var_stable](TNode<IntPtrT> descriptor_key_index) {
8890 : TNode<Name> next_key =
8891 112 : LoadKeyByKeyIndex(descriptors, descriptor_key_index);
8892 :
8893 224 : TVARIABLE(Object, var_value, SmiConstant(0));
8894 336 : Label callback(this), next_iteration(this);
8895 :
8896 : {
8897 112 : TVARIABLE(Map, var_map);
8898 112 : TVARIABLE(HeapObject, var_meta_storage);
8899 112 : TVARIABLE(IntPtrT, var_entry);
8900 112 : TVARIABLE(Uint32T, var_details);
8901 224 : Label if_found(this);
8902 :
8903 336 : Label if_found_fast(this), if_found_dict(this);
8904 :
8905 336 : Label if_stable(this), if_not_stable(this);
8906 224 : Branch(var_stable.value(), &if_stable, &if_not_stable);
8907 112 : BIND(&if_stable);
8908 : {
8909 : // Directly decode from the descriptor array if |object| did not
8910 : // change shape.
8911 : var_map = map;
8912 : var_meta_storage = descriptors;
8913 : var_entry = Signed(descriptor_key_index);
8914 112 : Goto(&if_found_fast);
8915 : }
8916 112 : BIND(&if_not_stable);
8917 : {
8918 : // If the map did change, do a slower lookup. We are still
8919 : // guaranteed that the object has a simple shape, and that the key
8920 : // is a name.
8921 224 : var_map = LoadMap(object);
8922 : TryLookupPropertyInSimpleObject(
8923 : object, var_map.value(), next_key, &if_found_fast,
8924 112 : &if_found_dict, &var_meta_storage, &var_entry, &next_iteration);
8925 : }
8926 :
8927 112 : BIND(&if_found_fast);
8928 : {
8929 : TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
8930 : TNode<IntPtrT> name_index = var_entry.value();
8931 :
8932 : // Skip non-enumerable properties.
8933 112 : var_details = LoadDetailsByKeyIndex(descriptors, name_index);
8934 : GotoIf(IsSetWord32(var_details.value(),
8935 224 : PropertyDetails::kAttributesDontEnumMask),
8936 224 : &next_iteration);
8937 :
8938 : LoadPropertyFromFastObject(object, var_map.value(), descriptors,
8939 : name_index, var_details.value(),
8940 112 : &var_value);
8941 112 : Goto(&if_found);
8942 : }
8943 112 : BIND(&if_found_dict);
8944 : {
8945 : TNode<NameDictionary> dictionary = CAST(var_meta_storage.value());
8946 : TNode<IntPtrT> entry = var_entry.value();
8947 :
8948 : TNode<Uint32T> details =
8949 112 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, entry);
8950 : // Skip non-enumerable properties.
8951 : GotoIf(
8952 224 : IsSetWord32(details, PropertyDetails::kAttributesDontEnumMask),
8953 224 : &next_iteration);
8954 :
8955 : var_details = details;
8956 112 : var_value = LoadValueByKeyIndex<NameDictionary>(dictionary, entry);
8957 112 : Goto(&if_found);
8958 : }
8959 :
8960 : // Here we have details and value which could be an accessor.
8961 112 : BIND(&if_found);
8962 : {
8963 112 : Label slow_load(this, Label::kDeferred);
8964 :
8965 112 : var_value = CallGetterIfAccessor(var_value.value(),
8966 : var_details.value(), context,
8967 : object, &slow_load, kCallJSGetter);
8968 112 : Goto(&callback);
8969 :
8970 112 : BIND(&slow_load);
8971 112 : var_value =
8972 : CallRuntime(Runtime::kGetProperty, context, object, next_key);
8973 112 : Goto(&callback);
8974 :
8975 112 : BIND(&callback);
8976 112 : body(next_key, var_value.value());
8977 :
8978 : // Check if |object| is still stable, i.e. we can proceed using
8979 : // property details from preloaded |descriptors|.
8980 336 : var_stable =
8981 112 : Select<BoolT>(var_stable.value(),
8982 336 : [=] { return WordEqual(LoadMap(object), map); },
8983 112 : [=] { return Int32FalseConstant(); });
8984 :
8985 112 : Goto(&next_iteration);
8986 : }
8987 : }
8988 :
8989 112 : BIND(&next_iteration);
8990 672 : });
8991 112 : }
8992 :
8993 2082 : void CodeStubAssembler::DescriptorLookup(
8994 : SloppyTNode<Name> unique_name, SloppyTNode<DescriptorArray> descriptors,
8995 : SloppyTNode<Uint32T> bitfield3, Label* if_found,
8996 : TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
8997 2082 : Comment("DescriptorArrayLookup");
8998 2082 : TNode<Uint32T> nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
8999 : Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
9000 2082 : var_name_index, if_not_found);
9001 2082 : }
9002 :
9003 285 : void CodeStubAssembler::TransitionLookup(
9004 : SloppyTNode<Name> unique_name, SloppyTNode<TransitionArray> transitions,
9005 : Label* if_found, TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
9006 285 : Comment("TransitionArrayLookup");
9007 : TNode<Uint32T> number_of_valid_transitions =
9008 285 : NumberOfEntries<TransitionArray>(transitions);
9009 : Lookup<TransitionArray>(unique_name, transitions, number_of_valid_transitions,
9010 285 : if_found, var_name_index, if_not_found);
9011 285 : }
9012 :
9013 : template <typename Array>
9014 2367 : void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
9015 : TNode<Uint32T> number_of_valid_entries,
9016 : Label* if_found,
9017 : TVariable<IntPtrT>* var_name_index,
9018 : Label* if_not_found) {
9019 2367 : Comment("ArrayLookup");
9020 2367 : if (!number_of_valid_entries) {
9021 0 : number_of_valid_entries = NumberOfEntries(array);
9022 : }
9023 7101 : GotoIf(Word32Equal(number_of_valid_entries, Int32Constant(0)), if_not_found);
9024 2367 : Label linear_search(this), binary_search(this);
9025 : const int kMaxElementsForLinearSearch = 32;
9026 2367 : Branch(Uint32LessThanOrEqual(number_of_valid_entries,
9027 4734 : Int32Constant(kMaxElementsForLinearSearch)),
9028 4734 : &linear_search, &binary_search);
9029 : BIND(&linear_search);
9030 : {
9031 2367 : LookupLinear<Array>(unique_name, array, number_of_valid_entries, if_found,
9032 : var_name_index, if_not_found);
9033 : }
9034 : BIND(&binary_search);
9035 : {
9036 2367 : LookupBinary<Array>(unique_name, array, number_of_valid_entries, if_found,
9037 : var_name_index, if_not_found);
9038 2367 : }
9039 2367 : }
9040 :
9041 56 : TNode<BoolT> CodeStubAssembler::IsSimpleObjectMap(TNode<Map> map) {
9042 : uint32_t mask =
9043 : Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
9044 : // !IsSpecialReceiverType && !IsNamedInterceptor && !IsAccessCheckNeeded
9045 : return Select<BoolT>(
9046 112 : IsSpecialReceiverInstanceType(LoadMapInstanceType(map)),
9047 56 : [=] { return Int32FalseConstant(); },
9048 392 : [=] { return IsClearWord32(LoadMapBitField(map), mask); });
9049 : }
9050 :
9051 1634 : void CodeStubAssembler::TryLookupPropertyInSimpleObject(
9052 : TNode<JSObject> object, TNode<Map> map, TNode<Name> unique_name,
9053 : Label* if_found_fast, Label* if_found_dict,
9054 : TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9055 : Label* if_not_found) {
9056 : CSA_ASSERT(this, IsSimpleObjectMap(map));
9057 :
9058 1634 : TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
9059 3268 : Label if_isfastmap(this), if_isslowmap(this);
9060 : Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
9061 1634 : &if_isfastmap);
9062 : BIND(&if_isfastmap);
9063 : {
9064 1634 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
9065 : *var_meta_storage = descriptors;
9066 :
9067 : DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
9068 1634 : var_name_index, if_not_found);
9069 : }
9070 : BIND(&if_isslowmap);
9071 : {
9072 1634 : TNode<NameDictionary> dictionary = CAST(LoadSlowProperties(object));
9073 : *var_meta_storage = dictionary;
9074 :
9075 : NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
9076 1634 : var_name_index, if_not_found);
9077 1634 : }
9078 1634 : }
9079 :
9080 1522 : void CodeStubAssembler::TryLookupProperty(
9081 : SloppyTNode<JSObject> object, SloppyTNode<Map> map,
9082 : SloppyTNode<Int32T> instance_type, SloppyTNode<Name> unique_name,
9083 : Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
9084 : TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9085 : Label* if_not_found, Label* if_bailout) {
9086 1522 : Label if_objectisspecial(this);
9087 3044 : GotoIf(IsSpecialReceiverInstanceType(instance_type), &if_objectisspecial);
9088 :
9089 : TryLookupPropertyInSimpleObject(object, map, unique_name, if_found_fast,
9090 : if_found_dict, var_meta_storage,
9091 1522 : var_name_index, if_not_found);
9092 :
9093 : BIND(&if_objectisspecial);
9094 : {
9095 : // Handle global object here and bailout for other special objects.
9096 : GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
9097 3044 : if_bailout);
9098 :
9099 : // Handle interceptors and access checks in runtime.
9100 1522 : TNode<Int32T> bit_field = LoadMapBitField(map);
9101 : int mask =
9102 : Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
9103 3044 : GotoIf(IsSetWord32(bit_field, mask), if_bailout);
9104 :
9105 1522 : TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(object));
9106 : *var_meta_storage = dictionary;
9107 :
9108 : NameDictionaryLookup<GlobalDictionary>(
9109 1522 : dictionary, unique_name, if_found_global, var_name_index, if_not_found);
9110 1522 : }
9111 1522 : }
9112 :
9113 845 : void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
9114 : Node* instance_type,
9115 : Node* unique_name, Label* if_found,
9116 : Label* if_not_found,
9117 : Label* if_bailout) {
9118 845 : Comment("TryHasOwnProperty");
9119 : TVARIABLE(HeapObject, var_meta_storage);
9120 : TVARIABLE(IntPtrT, var_name_index);
9121 :
9122 845 : Label if_found_global(this);
9123 : TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
9124 : &if_found_global, &var_meta_storage, &var_name_index,
9125 845 : if_not_found, if_bailout);
9126 :
9127 : BIND(&if_found_global);
9128 : {
9129 845 : VARIABLE(var_value, MachineRepresentation::kTagged);
9130 1690 : VARIABLE(var_details, MachineRepresentation::kWord32);
9131 : // Check if the property cell is not deleted.
9132 : LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
9133 : var_name_index.value(), &var_value,
9134 845 : &var_details, if_not_found);
9135 1690 : Goto(if_found);
9136 : }
9137 845 : }
9138 :
9139 392 : Node* CodeStubAssembler::GetMethod(Node* context, Node* object,
9140 : Handle<Name> name,
9141 : Label* if_null_or_undefined) {
9142 784 : Node* method = GetProperty(context, object, name);
9143 :
9144 784 : GotoIf(IsUndefined(method), if_null_or_undefined);
9145 784 : GotoIf(IsNull(method), if_null_or_undefined);
9146 :
9147 392 : return method;
9148 : }
9149 :
9150 1069 : void CodeStubAssembler::LoadPropertyFromFastObject(
9151 : Node* object, Node* map, TNode<DescriptorArray> descriptors,
9152 : Node* name_index, Variable* var_details, Variable* var_value) {
9153 : DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
9154 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
9155 :
9156 : Node* details =
9157 : LoadDetailsByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index));
9158 1069 : var_details->Bind(details);
9159 :
9160 : LoadPropertyFromFastObject(object, map, descriptors, name_index, details,
9161 1069 : var_value);
9162 1069 : }
9163 :
9164 1293 : void CodeStubAssembler::LoadPropertyFromFastObject(
9165 : Node* object, Node* map, TNode<DescriptorArray> descriptors,
9166 : Node* name_index, Node* details, Variable* var_value) {
9167 1293 : Comment("[ LoadPropertyFromFastObject");
9168 :
9169 : Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
9170 :
9171 1293 : Label if_in_field(this), if_in_descriptor(this), done(this);
9172 2586 : Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
9173 2586 : &if_in_descriptor);
9174 : BIND(&if_in_field);
9175 : {
9176 : Node* field_index =
9177 2586 : DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
9178 : Node* representation =
9179 : DecodeWord32<PropertyDetails::RepresentationField>(details);
9180 :
9181 2586 : field_index =
9182 2586 : IntPtrAdd(field_index, LoadMapInobjectPropertiesStartInWords(map));
9183 2586 : Node* instance_size_in_words = LoadMapInstanceSizeInWords(map);
9184 :
9185 1293 : Label if_inobject(this), if_backing_store(this);
9186 2586 : VARIABLE(var_double_value, MachineRepresentation::kFloat64);
9187 1293 : Label rebox_double(this, &var_double_value);
9188 1293 : Branch(UintPtrLessThan(field_index, instance_size_in_words), &if_inobject,
9189 2586 : &if_backing_store);
9190 : BIND(&if_inobject);
9191 : {
9192 1293 : Comment("if_inobject");
9193 : Node* field_offset = TimesTaggedSize(field_index);
9194 :
9195 1293 : Label if_double(this), if_tagged(this);
9196 : Branch(Word32NotEqual(representation,
9197 2586 : Int32Constant(Representation::kDouble)),
9198 2586 : &if_tagged, &if_double);
9199 : BIND(&if_tagged);
9200 : {
9201 1293 : var_value->Bind(LoadObjectField(object, field_offset));
9202 1293 : Goto(&done);
9203 : }
9204 : BIND(&if_double);
9205 : {
9206 : if (FLAG_unbox_double_fields) {
9207 : var_double_value.Bind(
9208 1293 : LoadObjectField(object, field_offset, MachineType::Float64()));
9209 : } else {
9210 : Node* mutable_heap_number = LoadObjectField(object, field_offset);
9211 : var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
9212 : }
9213 1293 : Goto(&rebox_double);
9214 1293 : }
9215 : }
9216 : BIND(&if_backing_store);
9217 : {
9218 1293 : Comment("if_backing_store");
9219 1293 : TNode<HeapObject> properties = LoadFastProperties(object);
9220 2586 : field_index = IntPtrSub(field_index, instance_size_in_words);
9221 2586 : Node* value = LoadPropertyArrayElement(CAST(properties), field_index);
9222 :
9223 1293 : Label if_double(this), if_tagged(this);
9224 : Branch(Word32NotEqual(representation,
9225 2586 : Int32Constant(Representation::kDouble)),
9226 2586 : &if_tagged, &if_double);
9227 : BIND(&if_tagged);
9228 : {
9229 1293 : var_value->Bind(value);
9230 1293 : Goto(&done);
9231 : }
9232 : BIND(&if_double);
9233 : {
9234 2586 : var_double_value.Bind(LoadHeapNumberValue(value));
9235 1293 : Goto(&rebox_double);
9236 1293 : }
9237 : }
9238 : BIND(&rebox_double);
9239 : {
9240 1293 : Comment("rebox_double");
9241 3879 : Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
9242 1293 : var_value->Bind(heap_number);
9243 1293 : Goto(&done);
9244 1293 : }
9245 : }
9246 : BIND(&if_in_descriptor);
9247 : {
9248 : var_value->Bind(
9249 2586 : LoadValueByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index)));
9250 1293 : Goto(&done);
9251 : }
9252 : BIND(&done);
9253 :
9254 2586 : Comment("] LoadPropertyFromFastObject");
9255 1293 : }
9256 :
9257 2413 : void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
9258 : Node* name_index,
9259 : Variable* var_details,
9260 : Variable* var_value) {
9261 2413 : Comment("LoadPropertyFromNameDictionary");
9262 : CSA_ASSERT(this, IsNameDictionary(dictionary));
9263 :
9264 : var_details->Bind(
9265 4826 : LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
9266 4826 : var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
9267 :
9268 2413 : Comment("] LoadPropertyFromNameDictionary");
9269 2413 : }
9270 :
9271 1298 : void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
9272 : Node* name_index,
9273 : Variable* var_details,
9274 : Variable* var_value,
9275 : Label* if_deleted) {
9276 1298 : Comment("[ LoadPropertyFromGlobalDictionary");
9277 : CSA_ASSERT(this, IsGlobalDictionary(dictionary));
9278 :
9279 : Node* property_cell = LoadFixedArrayElement(CAST(dictionary), name_index);
9280 : CSA_ASSERT(this, IsPropertyCell(property_cell));
9281 :
9282 : Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
9283 1298 : GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
9284 :
9285 1298 : var_value->Bind(value);
9286 :
9287 : Node* details = LoadAndUntagToWord32ObjectField(property_cell,
9288 2596 : PropertyCell::kDetailsOffset);
9289 1298 : var_details->Bind(details);
9290 :
9291 1298 : Comment("] LoadPropertyFromGlobalDictionary");
9292 1298 : }
9293 :
9294 : // |value| is the property backing store's contents, which is either a value
9295 : // or an accessor pair, as specified by |details|.
9296 : // Returns either the original value, or the result of the getter call.
9297 3813 : TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
9298 : Node* value, Node* details, Node* context, Node* receiver,
9299 : Label* if_bailout, GetOwnPropertyMode mode) {
9300 3813 : VARIABLE(var_value, MachineRepresentation::kTagged, value);
9301 3813 : Label done(this), if_accessor_info(this, Label::kDeferred);
9302 :
9303 : Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
9304 11439 : GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
9305 :
9306 : // Accessor case.
9307 7626 : GotoIfNot(IsAccessorPair(value), &if_accessor_info);
9308 :
9309 : // AccessorPair case.
9310 : {
9311 3813 : if (mode == kCallJSGetter) {
9312 : Node* accessor_pair = value;
9313 : Node* getter =
9314 : LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
9315 7178 : Node* getter_map = LoadMap(getter);
9316 7178 : Node* instance_type = LoadMapInstanceType(getter_map);
9317 : // FunctionTemplateInfo getters are not supported yet.
9318 3589 : GotoIf(InstanceTypeEqual(instance_type, FUNCTION_TEMPLATE_INFO_TYPE),
9319 7178 : if_bailout);
9320 :
9321 : // Return undefined if the {getter} is not callable.
9322 3589 : var_value.Bind(UndefinedConstant());
9323 7178 : GotoIfNot(IsCallableMap(getter_map), &done);
9324 :
9325 : // Call the accessor.
9326 3589 : Callable callable = CodeFactory::Call(isolate());
9327 3589 : Node* result = CallJS(callable, context, getter, receiver);
9328 3589 : var_value.Bind(result);
9329 : }
9330 3813 : Goto(&done);
9331 : }
9332 :
9333 : // AccessorInfo case.
9334 : BIND(&if_accessor_info);
9335 : {
9336 : Node* accessor_info = value;
9337 : CSA_ASSERT(this, IsAccessorInfo(value));
9338 : CSA_ASSERT(this, TaggedIsNotSmi(receiver));
9339 3813 : Label if_array(this), if_function(this), if_value(this);
9340 :
9341 : // Dispatch based on {receiver} instance type.
9342 7626 : Node* receiver_map = LoadMap(receiver);
9343 7626 : Node* receiver_instance_type = LoadMapInstanceType(receiver_map);
9344 3813 : GotoIf(IsJSArrayInstanceType(receiver_instance_type), &if_array);
9345 3813 : GotoIf(IsJSFunctionInstanceType(receiver_instance_type), &if_function);
9346 : Branch(IsJSValueInstanceType(receiver_instance_type), &if_value,
9347 3813 : if_bailout);
9348 :
9349 : // JSArray AccessorInfo case.
9350 : BIND(&if_array);
9351 : {
9352 : // We only deal with the "length" accessor on JSArray.
9353 : GotoIfNot(IsLengthString(
9354 3813 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9355 7626 : if_bailout);
9356 7626 : var_value.Bind(LoadJSArrayLength(receiver));
9357 3813 : Goto(&done);
9358 : }
9359 :
9360 : // JSFunction AccessorInfo case.
9361 : BIND(&if_function);
9362 : {
9363 : // We only deal with the "prototype" accessor on JSFunction here.
9364 : GotoIfNot(IsPrototypeString(
9365 3813 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9366 7626 : if_bailout);
9367 :
9368 : GotoIfPrototypeRequiresRuntimeLookup(CAST(receiver), CAST(receiver_map),
9369 3813 : if_bailout);
9370 3813 : var_value.Bind(LoadJSFunctionPrototype(receiver, if_bailout));
9371 3813 : Goto(&done);
9372 : }
9373 :
9374 : // JSValue AccessorInfo case.
9375 : BIND(&if_value);
9376 : {
9377 : // We only deal with the "length" accessor on JSValue string wrappers.
9378 : GotoIfNot(IsLengthString(
9379 3813 : LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9380 7626 : if_bailout);
9381 : Node* receiver_value = LoadJSValueValue(receiver);
9382 7626 : GotoIfNot(TaggedIsNotSmi(receiver_value), if_bailout);
9383 7626 : GotoIfNot(IsString(receiver_value), if_bailout);
9384 7626 : var_value.Bind(LoadStringLengthAsSmi(receiver_value));
9385 3813 : Goto(&done);
9386 3813 : }
9387 : }
9388 :
9389 : BIND(&done);
9390 7626 : return UncheckedCast<Object>(var_value.value());
9391 : }
9392 :
9393 229 : void CodeStubAssembler::TryGetOwnProperty(
9394 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
9395 : Node* unique_name, Label* if_found_value, Variable* var_value,
9396 : Label* if_not_found, Label* if_bailout) {
9397 : TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
9398 : if_found_value, var_value, nullptr, nullptr, if_not_found,
9399 229 : if_bailout, kCallJSGetter);
9400 229 : }
9401 :
9402 453 : void CodeStubAssembler::TryGetOwnProperty(
9403 : Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
9404 : Node* unique_name, Label* if_found_value, Variable* var_value,
9405 : Variable* var_details, Variable* var_raw_value, Label* if_not_found,
9406 : Label* if_bailout, GetOwnPropertyMode mode) {
9407 : DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
9408 453 : Comment("TryGetOwnProperty");
9409 :
9410 : TVARIABLE(HeapObject, var_meta_storage);
9411 : TVARIABLE(IntPtrT, var_entry);
9412 :
9413 453 : Label if_found_fast(this), if_found_dict(this), if_found_global(this);
9414 :
9415 906 : VARIABLE(local_var_details, MachineRepresentation::kWord32);
9416 453 : if (!var_details) {
9417 : var_details = &local_var_details;
9418 : }
9419 453 : Label if_found(this);
9420 :
9421 : TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
9422 : &if_found_dict, &if_found_global, &var_meta_storage,
9423 453 : &var_entry, if_not_found, if_bailout);
9424 : BIND(&if_found_fast);
9425 : {
9426 453 : TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
9427 : Node* name_index = var_entry.value();
9428 :
9429 : LoadPropertyFromFastObject(object, map, descriptors, name_index,
9430 453 : var_details, var_value);
9431 453 : Goto(&if_found);
9432 : }
9433 : BIND(&if_found_dict);
9434 : {
9435 : Node* dictionary = var_meta_storage.value();
9436 : Node* entry = var_entry.value();
9437 453 : LoadPropertyFromNameDictionary(dictionary, entry, var_details, var_value);
9438 453 : Goto(&if_found);
9439 : }
9440 : BIND(&if_found_global);
9441 : {
9442 : Node* dictionary = var_meta_storage.value();
9443 : Node* entry = var_entry.value();
9444 :
9445 : LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
9446 453 : if_not_found);
9447 453 : Goto(&if_found);
9448 : }
9449 : // Here we have details and value which could be an accessor.
9450 : BIND(&if_found);
9451 : {
9452 : // TODO(ishell): Execute C++ accessor in case of accessor info
9453 453 : if (var_raw_value) {
9454 224 : var_raw_value->Bind(var_value->value());
9455 : }
9456 : Node* value = CallGetterIfAccessor(var_value->value(), var_details->value(),
9457 906 : context, receiver, if_bailout, mode);
9458 453 : var_value->Bind(value);
9459 453 : Goto(if_found_value);
9460 : }
9461 453 : }
9462 :
9463 845 : void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
9464 : SloppyTNode<Int32T> instance_type,
9465 : SloppyTNode<IntPtrT> intptr_index,
9466 : Label* if_found, Label* if_absent,
9467 : Label* if_not_found,
9468 : Label* if_bailout) {
9469 : // Handle special objects in runtime.
9470 1690 : GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
9471 :
9472 1690 : Node* elements_kind = LoadMapElementsKind(map);
9473 :
9474 : // TODO(verwaest): Support other elements kinds as well.
9475 845 : Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
9476 845 : if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
9477 845 : if_typedarray(this);
9478 : // clang-format off
9479 : int32_t values[] = {
9480 : // Handled by {if_isobjectorsmi}.
9481 : PACKED_SMI_ELEMENTS, HOLEY_SMI_ELEMENTS, PACKED_ELEMENTS,
9482 : HOLEY_ELEMENTS,
9483 : // Handled by {if_isdouble}.
9484 : PACKED_DOUBLE_ELEMENTS, HOLEY_DOUBLE_ELEMENTS,
9485 : // Handled by {if_isdictionary}.
9486 : DICTIONARY_ELEMENTS,
9487 : // Handled by {if_isfaststringwrapper}.
9488 : FAST_STRING_WRAPPER_ELEMENTS,
9489 : // Handled by {if_isslowstringwrapper}.
9490 : SLOW_STRING_WRAPPER_ELEMENTS,
9491 : // Handled by {if_not_found}.
9492 : NO_ELEMENTS,
9493 : // Handled by {if_typed_array}.
9494 : UINT8_ELEMENTS,
9495 : INT8_ELEMENTS,
9496 : UINT16_ELEMENTS,
9497 : INT16_ELEMENTS,
9498 : UINT32_ELEMENTS,
9499 : INT32_ELEMENTS,
9500 : FLOAT32_ELEMENTS,
9501 : FLOAT64_ELEMENTS,
9502 : UINT8_CLAMPED_ELEMENTS,
9503 : BIGUINT64_ELEMENTS,
9504 : BIGINT64_ELEMENTS,
9505 845 : };
9506 : Label* labels[] = {
9507 : &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
9508 : &if_isobjectorsmi,
9509 : &if_isdouble, &if_isdouble,
9510 : &if_isdictionary,
9511 : &if_isfaststringwrapper,
9512 : &if_isslowstringwrapper,
9513 : if_not_found,
9514 : &if_typedarray,
9515 : &if_typedarray,
9516 : &if_typedarray,
9517 : &if_typedarray,
9518 : &if_typedarray,
9519 : &if_typedarray,
9520 : &if_typedarray,
9521 : &if_typedarray,
9522 : &if_typedarray,
9523 : &if_typedarray,
9524 : &if_typedarray,
9525 845 : };
9526 : // clang-format on
9527 : STATIC_ASSERT(arraysize(values) == arraysize(labels));
9528 845 : Switch(elements_kind, if_bailout, values, labels, arraysize(values));
9529 :
9530 : BIND(&if_isobjectorsmi);
9531 : {
9532 : TNode<FixedArray> elements = CAST(LoadElements(object));
9533 : TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
9534 :
9535 1690 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
9536 :
9537 845 : TNode<Object> element = LoadFixedArrayElement(elements, intptr_index);
9538 : TNode<Oddball> the_hole = TheHoleConstant();
9539 845 : Branch(WordEqual(element, the_hole), if_not_found, if_found);
9540 : }
9541 : BIND(&if_isdouble);
9542 : {
9543 : TNode<FixedArrayBase> elements = LoadElements(object);
9544 : TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
9545 :
9546 1690 : GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
9547 :
9548 : // Check if the element is a double hole, but don't load it.
9549 : LoadFixedDoubleArrayElement(CAST(elements), intptr_index,
9550 : MachineType::None(), 0, INTPTR_PARAMETERS,
9551 845 : if_not_found);
9552 845 : Goto(if_found);
9553 : }
9554 : BIND(&if_isdictionary);
9555 : {
9556 : // Negative keys must be converted to property names.
9557 2535 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9558 :
9559 : TVARIABLE(IntPtrT, var_entry);
9560 845 : TNode<NumberDictionary> elements = CAST(LoadElements(object));
9561 : NumberDictionaryLookup(elements, intptr_index, if_found, &var_entry,
9562 845 : if_not_found);
9563 : }
9564 : BIND(&if_isfaststringwrapper);
9565 : {
9566 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
9567 : Node* string = LoadJSValueValue(object);
9568 : CSA_ASSERT(this, IsString(string));
9569 1690 : Node* length = LoadStringLengthAsWord(string);
9570 1690 : GotoIf(UintPtrLessThan(intptr_index, length), if_found);
9571 845 : Goto(&if_isobjectorsmi);
9572 : }
9573 : BIND(&if_isslowstringwrapper);
9574 : {
9575 : CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
9576 : Node* string = LoadJSValueValue(object);
9577 : CSA_ASSERT(this, IsString(string));
9578 1690 : Node* length = LoadStringLengthAsWord(string);
9579 1690 : GotoIf(UintPtrLessThan(intptr_index, length), if_found);
9580 845 : Goto(&if_isdictionary);
9581 : }
9582 : BIND(&if_typedarray);
9583 : {
9584 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
9585 1690 : GotoIf(IsDetachedBuffer(buffer), if_absent);
9586 :
9587 1690 : Node* length = SmiUntag(LoadJSTypedArrayLength(CAST(object)));
9588 1690 : Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
9589 : }
9590 : BIND(&if_oob);
9591 : {
9592 : // Positive OOB indices mean "not found", negative indices must be
9593 : // converted to property names.
9594 2535 : GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9595 845 : Goto(if_not_found);
9596 845 : }
9597 845 : }
9598 :
9599 840 : void CodeStubAssembler::BranchIfMaybeSpecialIndex(TNode<String> name_string,
9600 : Label* if_maybe_special_index,
9601 : Label* if_not_special_index) {
9602 : // TODO(cwhan.tunz): Implement fast cases more.
9603 :
9604 : // If a name is empty or too long, it's not a special index
9605 : // Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
9606 : const int kBufferSize = 24;
9607 840 : TNode<Smi> string_length = LoadStringLengthAsSmi(name_string);
9608 1680 : GotoIf(SmiEqual(string_length, SmiConstant(0)), if_not_special_index);
9609 840 : GotoIf(SmiGreaterThan(string_length, SmiConstant(kBufferSize)),
9610 1680 : if_not_special_index);
9611 :
9612 : // If the first character of name is not a digit or '-', or we can't match it
9613 : // to Infinity or NaN, then this is not a special index.
9614 1680 : TNode<Int32T> first_char = StringCharCodeAt(name_string, IntPtrConstant(0));
9615 : // If the name starts with '-', it can be a negative index.
9616 2520 : GotoIf(Word32Equal(first_char, Int32Constant('-')), if_maybe_special_index);
9617 : // If the name starts with 'I', it can be "Infinity".
9618 2520 : GotoIf(Word32Equal(first_char, Int32Constant('I')), if_maybe_special_index);
9619 : // If the name starts with 'N', it can be "NaN".
9620 2520 : GotoIf(Word32Equal(first_char, Int32Constant('N')), if_maybe_special_index);
9621 : // Finally, if the first character is not a digit either, then we are sure
9622 : // that the name is not a special index.
9623 2520 : GotoIf(Uint32LessThan(first_char, Int32Constant('0')), if_not_special_index);
9624 2520 : GotoIf(Uint32LessThan(Int32Constant('9'), first_char), if_not_special_index);
9625 840 : Goto(if_maybe_special_index);
9626 840 : }
9627 :
9628 840 : void CodeStubAssembler::TryPrototypeChainLookup(
9629 : Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
9630 : const LookupInHolder& lookup_element_in_holder, Label* if_end,
9631 : Label* if_bailout, Label* if_proxy) {
9632 : // Ensure receiver is JSReceiver, otherwise bailout.
9633 840 : Label if_objectisnotsmi(this);
9634 1680 : Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
9635 : BIND(&if_objectisnotsmi);
9636 :
9637 1680 : Node* map = LoadMap(receiver);
9638 1680 : Node* instance_type = LoadMapInstanceType(map);
9639 : {
9640 : Label if_objectisreceiver(this);
9641 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
9642 : STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
9643 840 : Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
9644 1680 : if_bailout);
9645 : BIND(&if_objectisreceiver);
9646 :
9647 840 : if (if_proxy) {
9648 1680 : GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
9649 840 : }
9650 : }
9651 :
9652 1680 : VARIABLE(var_index, MachineType::PointerRepresentation());
9653 1680 : VARIABLE(var_unique, MachineRepresentation::kTagged);
9654 :
9655 840 : Label if_keyisindex(this), if_iskeyunique(this);
9656 : TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
9657 840 : if_bailout);
9658 :
9659 : BIND(&if_iskeyunique);
9660 : {
9661 840 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
9662 1680 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
9663 1680 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
9664 : instance_type);
9665 :
9666 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
9667 840 : &var_holder_instance_type};
9668 1680 : Label loop(this, arraysize(merged_variables), merged_variables);
9669 840 : Goto(&loop);
9670 : BIND(&loop);
9671 : {
9672 840 : Node* holder_map = var_holder_map.value();
9673 840 : Node* holder_instance_type = var_holder_instance_type.value();
9674 :
9675 840 : Label next_proto(this), check_integer_indexed_exotic(this);
9676 : lookup_property_in_holder(receiver, var_holder.value(), holder_map,
9677 : holder_instance_type, var_unique.value(),
9678 840 : &check_integer_indexed_exotic, if_bailout);
9679 :
9680 : BIND(&check_integer_indexed_exotic);
9681 : {
9682 : // Bailout if it can be an integer indexed exotic case.
9683 840 : GotoIfNot(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
9684 1680 : &next_proto);
9685 2520 : GotoIfNot(IsString(var_unique.value()), &next_proto);
9686 840 : BranchIfMaybeSpecialIndex(CAST(var_unique.value()), if_bailout,
9687 840 : &next_proto);
9688 : }
9689 :
9690 : BIND(&next_proto);
9691 :
9692 1680 : Node* proto = LoadMapPrototype(holder_map);
9693 :
9694 1680 : GotoIf(IsNull(proto), if_end);
9695 :
9696 1680 : Node* map = LoadMap(proto);
9697 1680 : Node* instance_type = LoadMapInstanceType(map);
9698 :
9699 840 : var_holder.Bind(proto);
9700 840 : var_holder_map.Bind(map);
9701 840 : var_holder_instance_type.Bind(instance_type);
9702 1680 : Goto(&loop);
9703 840 : }
9704 : }
9705 : BIND(&if_keyisindex);
9706 : {
9707 840 : VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
9708 1680 : VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
9709 1680 : VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
9710 : instance_type);
9711 :
9712 : Variable* merged_variables[] = {&var_holder, &var_holder_map,
9713 840 : &var_holder_instance_type};
9714 1680 : Label loop(this, arraysize(merged_variables), merged_variables);
9715 840 : Goto(&loop);
9716 : BIND(&loop);
9717 : {
9718 : Label next_proto(this);
9719 : lookup_element_in_holder(receiver, var_holder.value(),
9720 : var_holder_map.value(),
9721 : var_holder_instance_type.value(),
9722 840 : var_index.value(), &next_proto, if_bailout);
9723 : BIND(&next_proto);
9724 :
9725 2520 : Node* proto = LoadMapPrototype(var_holder_map.value());
9726 :
9727 1680 : GotoIf(IsNull(proto), if_end);
9728 :
9729 1680 : Node* map = LoadMap(proto);
9730 1680 : Node* instance_type = LoadMapInstanceType(map);
9731 :
9732 840 : var_holder.Bind(proto);
9733 840 : var_holder_map.Bind(map);
9734 840 : var_holder_instance_type.Bind(instance_type);
9735 840 : Goto(&loop);
9736 840 : }
9737 840 : }
9738 840 : }
9739 :
9740 168 : Node* CodeStubAssembler::HasInPrototypeChain(Node* context, Node* object,
9741 : Node* prototype) {
9742 : CSA_ASSERT(this, TaggedIsNotSmi(object));
9743 168 : VARIABLE(var_result, MachineRepresentation::kTagged);
9744 168 : Label return_false(this), return_true(this),
9745 168 : return_runtime(this, Label::kDeferred), return_result(this);
9746 :
9747 : // Loop through the prototype chain looking for the {prototype}.
9748 504 : VARIABLE(var_object_map, MachineRepresentation::kTagged, LoadMap(object));
9749 168 : Label loop(this, &var_object_map);
9750 168 : Goto(&loop);
9751 : BIND(&loop);
9752 : {
9753 : // Check if we can determine the prototype directly from the {object_map}.
9754 168 : Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
9755 168 : Node* object_map = var_object_map.value();
9756 168 : TNode<Int32T> object_instance_type = LoadMapInstanceType(object_map);
9757 : Branch(IsSpecialReceiverInstanceType(object_instance_type),
9758 336 : &if_objectisspecial, &if_objectisdirect);
9759 : BIND(&if_objectisspecial);
9760 : {
9761 : // The {object_map} is a special receiver map or a primitive map, check
9762 : // if we need to use the if_objectisspecial path in the runtime.
9763 168 : GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
9764 336 : &return_runtime);
9765 336 : Node* object_bitfield = LoadMapBitField(object_map);
9766 : int mask = Map::HasNamedInterceptorBit::kMask |
9767 : Map::IsAccessCheckNeededBit::kMask;
9768 168 : Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
9769 336 : &if_objectisdirect);
9770 : }
9771 : BIND(&if_objectisdirect);
9772 :
9773 : // Check the current {object} prototype.
9774 336 : Node* object_prototype = LoadMapPrototype(object_map);
9775 336 : GotoIf(IsNull(object_prototype), &return_false);
9776 336 : GotoIf(WordEqual(object_prototype, prototype), &return_true);
9777 :
9778 : // Continue with the prototype.
9779 : CSA_ASSERT(this, TaggedIsNotSmi(object_prototype));
9780 336 : var_object_map.Bind(LoadMap(object_prototype));
9781 336 : Goto(&loop);
9782 : }
9783 :
9784 : BIND(&return_true);
9785 168 : var_result.Bind(TrueConstant());
9786 168 : Goto(&return_result);
9787 :
9788 : BIND(&return_false);
9789 168 : var_result.Bind(FalseConstant());
9790 168 : Goto(&return_result);
9791 :
9792 : BIND(&return_runtime);
9793 : {
9794 : // Fallback to the runtime implementation.
9795 : var_result.Bind(
9796 168 : CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype));
9797 : }
9798 168 : Goto(&return_result);
9799 :
9800 : BIND(&return_result);
9801 336 : return var_result.value();
9802 : }
9803 :
9804 112 : Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
9805 : Node* object) {
9806 112 : VARIABLE(var_result, MachineRepresentation::kTagged);
9807 112 : Label return_runtime(this, Label::kDeferred), return_result(this);
9808 :
9809 : // Goto runtime if {object} is a Smi.
9810 224 : GotoIf(TaggedIsSmi(object), &return_runtime);
9811 :
9812 : // Goto runtime if {callable} is a Smi.
9813 224 : GotoIf(TaggedIsSmi(callable), &return_runtime);
9814 :
9815 : // Load map of {callable}.
9816 224 : Node* callable_map = LoadMap(callable);
9817 :
9818 : // Goto runtime if {callable} is not a JSFunction.
9819 224 : Node* callable_instance_type = LoadMapInstanceType(callable_map);
9820 112 : GotoIfNot(InstanceTypeEqual(callable_instance_type, JS_FUNCTION_TYPE),
9821 224 : &return_runtime);
9822 :
9823 : GotoIfPrototypeRequiresRuntimeLookup(CAST(callable), CAST(callable_map),
9824 112 : &return_runtime);
9825 :
9826 : // Get the "prototype" (or initial map) of the {callable}.
9827 : Node* callable_prototype =
9828 : LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
9829 : {
9830 : Label callable_prototype_valid(this);
9831 224 : VARIABLE(var_callable_prototype, MachineRepresentation::kTagged,
9832 : callable_prototype);
9833 :
9834 : // Resolve the "prototype" if the {callable} has an initial map. Afterwards
9835 : // the {callable_prototype} will be either the JSReceiver prototype object
9836 : // or the hole value, which means that no instances of the {callable} were
9837 : // created so far and hence we should return false.
9838 : Node* callable_prototype_instance_type =
9839 224 : LoadInstanceType(callable_prototype);
9840 112 : GotoIfNot(InstanceTypeEqual(callable_prototype_instance_type, MAP_TYPE),
9841 224 : &callable_prototype_valid);
9842 : var_callable_prototype.Bind(
9843 112 : LoadObjectField(callable_prototype, Map::kPrototypeOffset));
9844 112 : Goto(&callable_prototype_valid);
9845 : BIND(&callable_prototype_valid);
9846 224 : callable_prototype = var_callable_prototype.value();
9847 : }
9848 :
9849 : // Loop through the prototype chain looking for the {callable} prototype.
9850 112 : var_result.Bind(HasInPrototypeChain(context, object, callable_prototype));
9851 112 : Goto(&return_result);
9852 :
9853 : BIND(&return_runtime);
9854 : {
9855 : // Fallback to the runtime implementation.
9856 : var_result.Bind(
9857 112 : CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
9858 : }
9859 112 : Goto(&return_result);
9860 :
9861 : BIND(&return_result);
9862 224 : return var_result.value();
9863 : }
9864 :
9865 363610 : TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
9866 : ElementsKind kind,
9867 : ParameterMode mode,
9868 : int base_size) {
9869 : CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, mode));
9870 363610 : int element_size_shift = ElementsKindToShiftSize(kind);
9871 363610 : int element_size = 1 << element_size_shift;
9872 : int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
9873 363610 : intptr_t index = 0;
9874 : bool constant_index = false;
9875 363610 : if (mode == SMI_PARAMETERS) {
9876 23115 : element_size_shift -= kSmiShiftBits;
9877 23115 : Smi smi_index;
9878 23115 : constant_index = ToSmiConstant(index_node, &smi_index);
9879 24677 : if (constant_index) index = smi_index->value();
9880 46230 : index_node = BitcastTaggedToWord(index_node);
9881 : } else {
9882 : DCHECK(mode == INTPTR_PARAMETERS);
9883 340495 : constant_index = ToIntPtrConstant(index_node, index);
9884 : }
9885 363610 : if (constant_index) {
9886 116328 : return IntPtrConstant(base_size + element_size * index);
9887 : }
9888 :
9889 : TNode<WordT> shifted_index =
9890 : (element_size_shift == 0)
9891 : ? UncheckedCast<WordT>(index_node)
9892 : : ((element_size_shift > 0)
9893 685748 : ? WordShl(index_node, IntPtrConstant(element_size_shift))
9894 516117 : : WordSar(index_node, IntPtrConstant(-element_size_shift)));
9895 247282 : return IntPtrAdd(IntPtrConstant(base_size), Signed(shifted_index));
9896 : }
9897 :
9898 0 : TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(SloppyTNode<IntPtrT> offset,
9899 : SloppyTNode<IntPtrT> length,
9900 : int header_size,
9901 : ElementsKind kind) {
9902 : // Make sure we point to the last field.
9903 0 : int element_size = 1 << ElementsKindToShiftSize(kind);
9904 0 : int correction = header_size - kHeapObjectTag - element_size;
9905 : TNode<IntPtrT> last_offset =
9906 0 : ElementOffsetFromIndex(length, kind, INTPTR_PARAMETERS, correction);
9907 0 : return IntPtrLessThanOrEqual(offset, last_offset);
9908 : }
9909 :
9910 908 : TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVector(
9911 : SloppyTNode<JSFunction> closure, Label* if_undefined) {
9912 908 : TNode<Object> maybe_vector = LoadFeedbackVectorUnchecked(closure);
9913 908 : if (if_undefined) {
9914 112 : GotoIf(IsUndefined(maybe_vector), if_undefined);
9915 : }
9916 908 : return CAST(maybe_vector);
9917 : }
9918 :
9919 12668 : TNode<Object> CodeStubAssembler::LoadFeedbackVectorUnchecked(
9920 : SloppyTNode<JSFunction> closure) {
9921 : TNode<FeedbackCell> feedback_cell =
9922 : CAST(LoadObjectField(closure, JSFunction::kFeedbackCellOffset));
9923 : TNode<Object> maybe_vector =
9924 : LoadObjectField(feedback_cell, FeedbackCell::kValueOffset);
9925 12668 : return maybe_vector;
9926 : }
9927 :
9928 504 : TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorForStub() {
9929 : TNode<JSFunction> function =
9930 504 : CAST(LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset));
9931 504 : return LoadFeedbackVector(function);
9932 : }
9933 :
9934 8736 : void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* maybe_vector,
9935 : Node* slot_id) {
9936 8736 : Label end(this);
9937 : // If feedback_vector is not valid, then nothing to do.
9938 17472 : GotoIf(IsUndefined(maybe_vector), &end);
9939 :
9940 : // This method is used for binary op and compare feedback. These
9941 : // vector nodes are initialized with a smi 0, so we can simply OR
9942 : // our new feedback in place.
9943 : TNode<FeedbackVector> feedback_vector = CAST(maybe_vector);
9944 : TNode<MaybeObject> feedback_element =
9945 8736 : LoadFeedbackVectorSlot(feedback_vector, slot_id);
9946 8736 : TNode<Smi> previous_feedback = CAST(feedback_element);
9947 8736 : TNode<Smi> combined_feedback = SmiOr(previous_feedback, CAST(feedback));
9948 :
9949 17472 : GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
9950 : {
9951 : StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
9952 8736 : SKIP_WRITE_BARRIER);
9953 8736 : ReportFeedbackUpdate(feedback_vector, slot_id, "UpdateFeedback");
9954 8736 : Goto(&end);
9955 : }
9956 :
9957 8736 : BIND(&end);
9958 8736 : }
9959 :
9960 672 : Node* CodeStubAssembler::GetLanguageMode(
9961 : TNode<SharedFunctionInfo> shared_function_info, Node* context) {
9962 1344 : VARIABLE(var_language_mode, MachineRepresentation::kTaggedSigned,
9963 : SmiConstant(LanguageMode::kStrict));
9964 672 : Label language_mode_determined(this), language_mode_sloppy(this);
9965 :
9966 : // Get the language mode from SFI
9967 : TNode<Uint32T> closure_is_strict =
9968 : DecodeWord32<SharedFunctionInfo::IsStrictBit>(LoadObjectField(
9969 : shared_function_info, SharedFunctionInfo::kFlagsOffset,
9970 1344 : MachineType::Uint32()));
9971 : // It is already strict, we need not check context's language mode.
9972 672 : GotoIf(closure_is_strict, &language_mode_determined);
9973 :
9974 : // SFI::LanguageMode is sloppy, check if context has a stricter mode.
9975 : TNode<ScopeInfo> scope_info =
9976 : CAST(LoadObjectField(context, Context::kScopeInfoOffset));
9977 : // If no flags field assume sloppy
9978 : GotoIf(SmiLessThanOrEqual(LoadFixedArrayBaseLength(scope_info),
9979 1344 : SmiConstant(ScopeInfo::Fields::kFlags)),
9980 1344 : &language_mode_sloppy);
9981 672 : TNode<Smi> flags = CAST(LoadFixedArrayElement(
9982 : scope_info, SmiConstant(ScopeInfo::Fields::kFlags)));
9983 : TNode<Uint32T> context_is_strict =
9984 1344 : DecodeWord32<ScopeInfo::LanguageModeField>(SmiToInt32(flags));
9985 672 : GotoIf(context_is_strict, &language_mode_determined);
9986 672 : Goto(&language_mode_sloppy);
9987 :
9988 : // Both Context::ScopeInfo::LanguageMode and SFI::LanguageMode are sloppy.
9989 : BIND(&language_mode_sloppy);
9990 672 : var_language_mode.Bind(SmiConstant(LanguageMode::kSloppy));
9991 672 : Goto(&language_mode_determined);
9992 :
9993 : BIND(&language_mode_determined);
9994 1344 : return var_language_mode.value();
9995 : }
9996 :
9997 672 : Node* CodeStubAssembler::GetLanguageMode(TNode<JSFunction> closure,
9998 : Node* context) {
9999 : TNode<SharedFunctionInfo> sfi =
10000 672 : CAST(LoadObjectField(closure, JSFunction::kSharedFunctionInfoOffset));
10001 672 : return GetLanguageMode(sfi, context);
10002 : }
10003 :
10004 0 : Node* CodeStubAssembler::GetLanguageMode(TNode<FeedbackVector> vector,
10005 : Node* context) {
10006 : TNode<SharedFunctionInfo> sfi =
10007 0 : CAST(LoadObjectField(vector, FeedbackVector::kSharedFunctionInfoOffset));
10008 0 : return GetLanguageMode(sfi, context);
10009 : }
10010 :
10011 13272 : void CodeStubAssembler::ReportFeedbackUpdate(
10012 : SloppyTNode<FeedbackVector> feedback_vector, SloppyTNode<IntPtrT> slot_id,
10013 : const char* reason) {
10014 : // Reset profiler ticks.
10015 : StoreObjectFieldNoWriteBarrier(
10016 13272 : feedback_vector, FeedbackVector::kProfilerTicksOffset, Int32Constant(0),
10017 26544 : MachineRepresentation::kWord32);
10018 :
10019 : #ifdef V8_TRACE_FEEDBACK_UPDATES
10020 : // Trace the update.
10021 : CallRuntime(Runtime::kInterpreterTraceUpdateFeedback, NoContextConstant(),
10022 : LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset),
10023 : SmiTag(slot_id), StringConstant(reason));
10024 : #endif // V8_TRACE_FEEDBACK_UPDATES
10025 13272 : }
10026 :
10027 33320 : void CodeStubAssembler::OverwriteFeedback(Variable* existing_feedback,
10028 : int new_feedback) {
10029 66640 : if (existing_feedback == nullptr) return;
10030 46368 : existing_feedback->Bind(SmiConstant(new_feedback));
10031 : }
10032 :
10033 26320 : void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
10034 : int feedback) {
10035 52640 : if (existing_feedback == nullptr) return;
10036 : existing_feedback->Bind(
10037 38640 : SmiOr(CAST(existing_feedback->value()), SmiConstant(feedback)));
10038 : }
10039 :
10040 560 : void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
10041 : Node* feedback) {
10042 1120 : if (existing_feedback == nullptr) return;
10043 : existing_feedback->Bind(
10044 1008 : SmiOr(CAST(existing_feedback->value()), CAST(feedback)));
10045 : }
10046 :
10047 896 : void CodeStubAssembler::CheckForAssociatedProtector(Node* name,
10048 : Label* if_protector) {
10049 : // This list must be kept in sync with LookupIterator::UpdateProtector!
10050 : // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
10051 896 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kconstructor_string)),
10052 896 : if_protector);
10053 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kiterator_symbol)), if_protector);
10054 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::knext_string)), if_protector);
10055 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kspecies_symbol)), if_protector);
10056 896 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kis_concat_spreadable_symbol)),
10057 896 : if_protector);
10058 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kresolve_string)), if_protector);
10059 1792 : GotoIf(WordEqual(name, LoadRoot(RootIndex::kthen_string)), if_protector);
10060 : // Fall through if no case matched.
10061 896 : }
10062 :
10063 616 : TNode<Map> CodeStubAssembler::LoadReceiverMap(SloppyTNode<Object> receiver) {
10064 : return Select<Map>(
10065 : TaggedIsSmi(receiver),
10066 616 : [=] { return CAST(LoadRoot(RootIndex::kHeapNumberMap)); },
10067 1848 : [=] { return LoadMap(UncheckedCast<HeapObject>(receiver)); });
10068 : }
10069 :
10070 8741 : TNode<IntPtrT> CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
10071 8741 : TVARIABLE(IntPtrT, var_intptr_key);
10072 8741 : Label done(this, &var_intptr_key), key_is_smi(this);
10073 17482 : GotoIf(TaggedIsSmi(key), &key_is_smi);
10074 : // Try to convert a heap number to a Smi.
10075 17482 : GotoIfNot(IsHeapNumber(key), miss);
10076 : {
10077 8741 : TNode<Float64T> value = LoadHeapNumberValue(key);
10078 8741 : TNode<Int32T> int_value = RoundFloat64ToInt32(value);
10079 26223 : GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
10080 17482 : var_intptr_key = ChangeInt32ToIntPtr(int_value);
10081 8741 : Goto(&done);
10082 : }
10083 :
10084 : BIND(&key_is_smi);
10085 : {
10086 17482 : var_intptr_key = SmiUntag(key);
10087 8741 : Goto(&done);
10088 : }
10089 :
10090 : BIND(&done);
10091 8741 : return var_intptr_key.value();
10092 : }
10093 :
10094 280 : Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
10095 : Node* value, Label* bailout) {
10096 : // Mapped arguments are actual arguments. Unmapped arguments are values added
10097 : // to the arguments object after it was created for the call. Mapped arguments
10098 : // are stored in the context at indexes given by elements[key + 2]. Unmapped
10099 : // arguments are stored as regular indexed properties in the arguments array,
10100 : // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
10101 : // look at argument object construction.
10102 : //
10103 : // The sloppy arguments elements array has a special format:
10104 : //
10105 : // 0: context
10106 : // 1: unmapped arguments array
10107 : // 2: mapped_index0,
10108 : // 3: mapped_index1,
10109 : // ...
10110 : //
10111 : // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
10112 : // If key + 2 >= elements.length then attempt to look in the unmapped
10113 : // arguments array (given by elements[1]) and return the value at key, missing
10114 : // to the runtime if the unmapped arguments array is not a fixed array or if
10115 : // key >= unmapped_arguments_array.length.
10116 : //
10117 : // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
10118 : // in the unmapped arguments array, as described above. Otherwise, t is a Smi
10119 : // index into the context array given at elements[0]. Return the value at
10120 : // context[t].
10121 :
10122 280 : bool is_load = value == nullptr;
10123 :
10124 560 : GotoIfNot(TaggedIsSmi(key), bailout);
10125 560 : key = SmiUntag(key);
10126 840 : GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
10127 :
10128 : TNode<FixedArray> elements = CAST(LoadElements(receiver));
10129 : TNode<IntPtrT> elements_length = LoadAndUntagFixedArrayBaseLength(elements);
10130 :
10131 280 : VARIABLE(var_result, MachineRepresentation::kTagged);
10132 280 : if (!is_load) {
10133 224 : var_result.Bind(value);
10134 : }
10135 280 : Label if_mapped(this), if_unmapped(this), end(this, &var_result);
10136 560 : Node* intptr_two = IntPtrConstant(2);
10137 560 : Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
10138 :
10139 560 : GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
10140 :
10141 : TNode<Object> mapped_index =
10142 560 : LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
10143 280 : Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
10144 :
10145 : BIND(&if_mapped);
10146 : {
10147 280 : TNode<IntPtrT> mapped_index_intptr = SmiUntag(CAST(mapped_index));
10148 280 : TNode<Context> the_context = CAST(LoadFixedArrayElement(elements, 0));
10149 280 : if (is_load) {
10150 112 : Node* result = LoadContextElement(the_context, mapped_index_intptr);
10151 : CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
10152 56 : var_result.Bind(result);
10153 : } else {
10154 224 : StoreContextElement(the_context, mapped_index_intptr, value);
10155 : }
10156 280 : Goto(&end);
10157 : }
10158 :
10159 : BIND(&if_unmapped);
10160 : {
10161 : TNode<HeapObject> backing_store_ho =
10162 280 : CAST(LoadFixedArrayElement(elements, 1));
10163 560 : GotoIf(WordNotEqual(LoadMap(backing_store_ho), FixedArrayMapConstant()),
10164 280 : bailout);
10165 : TNode<FixedArray> backing_store = CAST(backing_store_ho);
10166 :
10167 : TNode<IntPtrT> backing_store_length =
10168 : LoadAndUntagFixedArrayBaseLength(backing_store);
10169 560 : GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
10170 :
10171 : // The key falls into unmapped range.
10172 280 : if (is_load) {
10173 : Node* result = LoadFixedArrayElement(backing_store, key);
10174 56 : GotoIf(WordEqual(result, TheHoleConstant()), bailout);
10175 56 : var_result.Bind(result);
10176 : } else {
10177 : StoreFixedArrayElement(backing_store, key, value);
10178 : }
10179 280 : Goto(&end);
10180 : }
10181 :
10182 : BIND(&end);
10183 560 : return var_result.value();
10184 : }
10185 :
10186 840 : TNode<Context> CodeStubAssembler::LoadScriptContext(
10187 : TNode<Context> context, TNode<IntPtrT> context_index) {
10188 : TNode<Context> native_context = LoadNativeContext(context);
10189 840 : TNode<ScriptContextTable> script_context_table = CAST(
10190 : LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX));
10191 :
10192 840 : TNode<Context> script_context = CAST(LoadFixedArrayElement(
10193 : script_context_table, context_index,
10194 : ScriptContextTable::kFirstContextSlotIndex * kTaggedSize));
10195 840 : return script_context;
10196 : }
10197 :
10198 : namespace {
10199 :
10200 : // Converts typed array elements kind to a machine representations.
10201 4032 : MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
10202 4032 : switch (kind) {
10203 : case UINT8_CLAMPED_ELEMENTS:
10204 : case UINT8_ELEMENTS:
10205 : case INT8_ELEMENTS:
10206 : return MachineRepresentation::kWord8;
10207 : case UINT16_ELEMENTS:
10208 : case INT16_ELEMENTS:
10209 896 : return MachineRepresentation::kWord16;
10210 : case UINT32_ELEMENTS:
10211 : case INT32_ELEMENTS:
10212 896 : return MachineRepresentation::kWord32;
10213 : case FLOAT32_ELEMENTS:
10214 448 : return MachineRepresentation::kFloat32;
10215 : case FLOAT64_ELEMENTS:
10216 448 : return MachineRepresentation::kFloat64;
10217 : default:
10218 0 : UNREACHABLE();
10219 : }
10220 : }
10221 :
10222 : } // namespace
10223 :
10224 8781 : void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
10225 : Node* index, Node* value,
10226 : ParameterMode mode) {
10227 8781 : if (IsFixedTypedArrayElementsKind(kind)) {
10228 : if (kind == UINT8_CLAMPED_ELEMENTS) {
10229 : CSA_ASSERT(this,
10230 : Word32Equal(value, Word32And(Int32Constant(0xFF), value)));
10231 : }
10232 8064 : Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
10233 : // TODO(cbruni): Add OOB check once typed.
10234 4032 : MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
10235 4032 : StoreNoWriteBarrier(rep, elements, offset, value);
10236 12813 : return;
10237 4749 : } else if (IsDoubleElementsKind(kind)) {
10238 : // Make sure we do not store signalling NaNs into double arrays.
10239 1583 : TNode<Float64T> value_silenced = Float64SilenceNaN(value);
10240 1583 : StoreFixedDoubleArrayElement(CAST(elements), index, value_silenced, mode);
10241 : } else {
10242 : WriteBarrierMode barrier_mode =
10243 3166 : IsSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
10244 : StoreFixedArrayElement(CAST(elements), index, value, barrier_mode, 0, mode);
10245 : }
10246 : }
10247 :
10248 392 : Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
10249 392 : Label done(this);
10250 784 : Node* int32_zero = Int32Constant(0);
10251 784 : Node* int32_255 = Int32Constant(255);
10252 784 : VARIABLE(var_value, MachineRepresentation::kWord32, int32_value);
10253 784 : GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
10254 392 : var_value.Bind(int32_zero);
10255 784 : GotoIf(Int32LessThan(int32_value, int32_zero), &done);
10256 392 : var_value.Bind(int32_255);
10257 392 : Goto(&done);
10258 : BIND(&done);
10259 784 : return var_value.value();
10260 : }
10261 :
10262 392 : Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
10263 392 : Label done(this);
10264 1176 : VARIABLE(var_value, MachineRepresentation::kWord32, Int32Constant(0));
10265 1176 : GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
10266 784 : var_value.Bind(Int32Constant(255));
10267 1176 : GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
10268 : {
10269 784 : Node* rounded_value = Float64RoundToEven(float64_value);
10270 784 : var_value.Bind(TruncateFloat64ToWord32(rounded_value));
10271 392 : Goto(&done);
10272 : }
10273 : BIND(&done);
10274 784 : return var_value.value();
10275 : }
10276 :
10277 4088 : Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
10278 : TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
10279 : DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
10280 :
10281 : MachineRepresentation rep;
10282 4088 : switch (elements_kind) {
10283 : case UINT8_ELEMENTS:
10284 : case INT8_ELEMENTS:
10285 : case UINT16_ELEMENTS:
10286 : case INT16_ELEMENTS:
10287 : case UINT32_ELEMENTS:
10288 : case INT32_ELEMENTS:
10289 : case UINT8_CLAMPED_ELEMENTS:
10290 : rep = MachineRepresentation::kWord32;
10291 : break;
10292 : case FLOAT32_ELEMENTS:
10293 : rep = MachineRepresentation::kFloat32;
10294 392 : break;
10295 : case FLOAT64_ELEMENTS:
10296 : rep = MachineRepresentation::kFloat64;
10297 392 : break;
10298 : case BIGINT64_ELEMENTS:
10299 : case BIGUINT64_ELEMENTS:
10300 1120 : return ToBigInt(context, input);
10301 : default:
10302 0 : UNREACHABLE();
10303 : }
10304 :
10305 3528 : VARIABLE(var_result, rep);
10306 7056 : VARIABLE(var_input, MachineRepresentation::kTagged, input);
10307 3528 : Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
10308 3528 : convert(this), loop(this, &var_input);
10309 3528 : Goto(&loop);
10310 : BIND(&loop);
10311 10584 : GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
10312 : // We can handle both HeapNumber and Oddball here, since Oddball has the
10313 : // same layout as the HeapNumber for the HeapNumber::value field. This
10314 : // way we can also properly optimize stores of oddballs to typed arrays.
10315 10584 : GotoIf(IsHeapNumber(var_input.value()), &if_heapnumber_or_oddball);
10316 : STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
10317 7056 : Branch(HasInstanceType(var_input.value(), ODDBALL_TYPE),
10318 7056 : &if_heapnumber_or_oddball, &convert);
10319 :
10320 : BIND(&if_heapnumber_or_oddball);
10321 : {
10322 : Node* value = UncheckedCast<Float64T>(LoadObjectField(
10323 7056 : var_input.value(), HeapNumber::kValueOffset, MachineType::Float64()));
10324 3528 : if (rep == MachineRepresentation::kWord32) {
10325 2744 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10326 392 : value = Float64ToUint8Clamped(value);
10327 : } else {
10328 4704 : value = TruncateFloat64ToWord32(value);
10329 : }
10330 784 : } else if (rep == MachineRepresentation::kFloat32) {
10331 784 : value = TruncateFloat64ToFloat32(value);
10332 : } else {
10333 : DCHECK_EQ(MachineRepresentation::kFloat64, rep);
10334 : }
10335 3528 : var_result.Bind(value);
10336 3528 : Goto(&done);
10337 : }
10338 :
10339 : BIND(&if_smi);
10340 : {
10341 10584 : Node* value = SmiToInt32(var_input.value());
10342 3528 : if (rep == MachineRepresentation::kFloat32) {
10343 784 : value = RoundInt32ToFloat32(value);
10344 3136 : } else if (rep == MachineRepresentation::kFloat64) {
10345 784 : value = ChangeInt32ToFloat64(value);
10346 : } else {
10347 : DCHECK_EQ(MachineRepresentation::kWord32, rep);
10348 2744 : if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10349 392 : value = Int32ToUint8Clamped(value);
10350 : }
10351 : }
10352 3528 : var_result.Bind(value);
10353 3528 : Goto(&done);
10354 : }
10355 :
10356 : BIND(&convert);
10357 : {
10358 7056 : var_input.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, input));
10359 3528 : Goto(&loop);
10360 : }
10361 :
10362 : BIND(&done);
10363 7056 : return var_result.value();
10364 : }
10365 :
10366 224 : void CodeStubAssembler::EmitBigTypedArrayElementStore(
10367 : TNode<JSTypedArray> object, TNode<FixedTypedArrayBase> elements,
10368 : TNode<IntPtrT> intptr_key, TNode<Object> value, TNode<Context> context,
10369 : Label* opt_if_detached) {
10370 224 : TNode<BigInt> bigint_value = ToBigInt(context, value);
10371 :
10372 224 : if (opt_if_detached != nullptr) {
10373 : // Check if buffer has been detached. Must happen after {ToBigInt}!
10374 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
10375 448 : GotoIf(IsDetachedBuffer(buffer), opt_if_detached);
10376 : }
10377 :
10378 224 : TNode<RawPtrT> backing_store = LoadFixedTypedArrayBackingStore(elements);
10379 : TNode<IntPtrT> offset = ElementOffsetFromIndex(intptr_key, BIGINT64_ELEMENTS,
10380 224 : INTPTR_PARAMETERS, 0);
10381 224 : EmitBigTypedArrayElementStore(elements, backing_store, offset, bigint_value);
10382 224 : }
10383 :
10384 1456 : void CodeStubAssembler::BigIntToRawBytes(TNode<BigInt> bigint,
10385 : TVariable<UintPtrT>* var_low,
10386 : TVariable<UintPtrT>* var_high) {
10387 1456 : Label done(this);
10388 2912 : *var_low = Unsigned(IntPtrConstant(0));
10389 2912 : *var_high = Unsigned(IntPtrConstant(0));
10390 1456 : TNode<Word32T> bitfield = LoadBigIntBitfield(bigint);
10391 : TNode<Uint32T> length = DecodeWord32<BigIntBase::LengthBits>(bitfield);
10392 : TNode<Uint32T> sign = DecodeWord32<BigIntBase::SignBits>(bitfield);
10393 4368 : GotoIf(Word32Equal(length, Int32Constant(0)), &done);
10394 1456 : *var_low = LoadBigIntDigit(bigint, 0);
10395 1456 : if (!Is64()) {
10396 : Label load_done(this);
10397 0 : GotoIf(Word32Equal(length, Int32Constant(1)), &load_done);
10398 0 : *var_high = LoadBigIntDigit(bigint, 1);
10399 0 : Goto(&load_done);
10400 0 : BIND(&load_done);
10401 : }
10402 4368 : GotoIf(Word32Equal(sign, Int32Constant(0)), &done);
10403 : // Negative value. Simulate two's complement.
10404 1456 : if (!Is64()) {
10405 0 : *var_high = Unsigned(IntPtrSub(IntPtrConstant(0), var_high->value()));
10406 : Label no_carry(this);
10407 0 : GotoIf(WordEqual(var_low->value(), IntPtrConstant(0)), &no_carry);
10408 0 : *var_high = Unsigned(IntPtrSub(var_high->value(), IntPtrConstant(1)));
10409 0 : Goto(&no_carry);
10410 0 : BIND(&no_carry);
10411 : }
10412 4368 : *var_low = Unsigned(IntPtrSub(IntPtrConstant(0), var_low->value()));
10413 1456 : Goto(&done);
10414 1456 : BIND(&done);
10415 1456 : }
10416 :
10417 896 : void CodeStubAssembler::EmitBigTypedArrayElementStore(
10418 : TNode<FixedTypedArrayBase> elements, TNode<RawPtrT> backing_store,
10419 : TNode<IntPtrT> offset, TNode<BigInt> bigint_value) {
10420 896 : TVARIABLE(UintPtrT, var_low);
10421 : // Only used on 32-bit platforms.
10422 : TVARIABLE(UintPtrT, var_high);
10423 896 : BigIntToRawBytes(bigint_value, &var_low, &var_high);
10424 :
10425 : // Assert that offset < elements.length. Given that it's an offset for a raw
10426 : // pointer we correct it by the usual kHeapObjectTag offset.
10427 : CSA_ASSERT(
10428 : this, IsOffsetInBounds(offset, LoadAndUntagFixedArrayBaseLength(elements),
10429 : kHeapObjectTag, BIGINT64_ELEMENTS));
10430 :
10431 : MachineRepresentation rep = WordT::kMachineRepresentation;
10432 : #if defined(V8_TARGET_BIG_ENDIAN)
10433 : if (!Is64()) {
10434 : StoreNoWriteBarrier(rep, backing_store, offset, var_high.value());
10435 : StoreNoWriteBarrier(rep, backing_store,
10436 : IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
10437 : var_low.value());
10438 : } else {
10439 : StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
10440 : }
10441 : #else
10442 896 : StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
10443 896 : if (!Is64()) {
10444 : StoreNoWriteBarrier(rep, backing_store,
10445 0 : IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
10446 0 : var_high.value());
10447 : }
10448 : #endif
10449 896 : }
10450 :
10451 7112 : void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
10452 : ElementsKind elements_kind,
10453 : KeyedAccessStoreMode store_mode,
10454 : Label* bailout, Node* context) {
10455 : CSA_ASSERT(this, Word32BinaryNot(IsJSProxy(object)));
10456 :
10457 : Node* elements = LoadElements(object);
10458 7112 : if (!IsSmiOrObjectElementsKind(elements_kind)) {
10459 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10460 2688 : } else if (!IsCOWHandlingStoreMode(store_mode)) {
10461 4032 : GotoIf(IsFixedCOWArrayMap(LoadMap(elements)), bailout);
10462 : }
10463 :
10464 : // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
10465 : ParameterMode parameter_mode = INTPTR_PARAMETERS;
10466 7112 : TNode<IntPtrT> intptr_key = TryToIntptr(key, bailout);
10467 :
10468 7112 : if (IsFixedTypedArrayElementsKind(elements_kind)) {
10469 3080 : Label done(this);
10470 :
10471 : // IntegerIndexedElementSet converts value to a Number/BigInt prior to the
10472 : // bounds check.
10473 : value = PrepareValueForWriteToTypedArray(CAST(value), elements_kind,
10474 3080 : CAST(context));
10475 :
10476 : // There must be no allocations between the buffer load and
10477 : // and the actual store to backing store, because GC may decide that
10478 : // the buffer is not alive or move the elements.
10479 : // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
10480 :
10481 : // Check if buffer has been detached.
10482 : Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
10483 6160 : GotoIf(IsDetachedBuffer(buffer), bailout);
10484 :
10485 : // Bounds check.
10486 : Node* length =
10487 : TaggedToParameter(LoadJSTypedArrayLength(CAST(object)), parameter_mode);
10488 :
10489 3080 : if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
10490 : // Skip the store if we write beyond the length or
10491 : // to a property with a negative integer index.
10492 1232 : GotoIfNot(UintPtrLessThan(intptr_key, length), &done);
10493 2464 : } else if (store_mode == STANDARD_STORE) {
10494 2464 : GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
10495 : } else {
10496 : // This case is produced due to the dispatched call in
10497 : // ElementsTransitionAndStore and StoreFastElement.
10498 : // TODO(jgruber): Avoid generating unsupported combinations to save code
10499 : // size.
10500 1232 : DebugBreak();
10501 : }
10502 :
10503 3080 : if (elements_kind == BIGINT64_ELEMENTS ||
10504 : elements_kind == BIGUINT64_ELEMENTS) {
10505 560 : TNode<BigInt> bigint_value = UncheckedCast<BigInt>(value);
10506 :
10507 : TNode<RawPtrT> backing_store =
10508 560 : LoadFixedTypedArrayBackingStore(CAST(elements));
10509 : TNode<IntPtrT> offset = ElementOffsetFromIndex(
10510 560 : intptr_key, BIGINT64_ELEMENTS, INTPTR_PARAMETERS, 0);
10511 : EmitBigTypedArrayElementStore(CAST(elements), backing_store, offset,
10512 560 : bigint_value);
10513 : } else {
10514 5040 : Node* backing_store = LoadFixedTypedArrayBackingStore(CAST(elements));
10515 : StoreElement(backing_store, elements_kind, intptr_key, value,
10516 2520 : parameter_mode);
10517 : }
10518 3080 : Goto(&done);
10519 :
10520 : BIND(&done);
10521 10192 : return;
10522 : }
10523 : DCHECK(IsFastElementsKind(elements_kind));
10524 :
10525 : Node* length =
10526 8064 : SelectImpl(IsJSArray(object), [=]() { return LoadJSArrayLength(object); },
10527 8064 : [=]() { return LoadFixedArrayBaseLength(elements); },
10528 12096 : MachineRepresentation::kTagged);
10529 : length = TaggedToParameter(length, parameter_mode);
10530 :
10531 : // In case value is stored into a fast smi array, assure that the value is
10532 : // a smi before manipulating the backing store. Otherwise the backing store
10533 : // may be left in an invalid state.
10534 4032 : if (IsSmiElementsKind(elements_kind)) {
10535 1344 : GotoIfNot(TaggedIsSmi(value), bailout);
10536 3360 : } else if (IsDoubleElementsKind(elements_kind)) {
10537 1344 : value = TryTaggedToFloat64(value, bailout);
10538 : }
10539 :
10540 4032 : if (IsGrowStoreMode(store_mode)) {
10541 : elements = CheckForCapacityGrow(object, elements, elements_kind, length,
10542 1008 : intptr_key, parameter_mode, bailout);
10543 : } else {
10544 6048 : GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
10545 : }
10546 :
10547 : // If we didn't grow {elements}, it might still be COW, in which case we
10548 : // copy it now.
10549 4032 : if (!IsSmiOrObjectElementsKind(elements_kind)) {
10550 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10551 2688 : } else if (IsCOWHandlingStoreMode(store_mode)) {
10552 : elements = CopyElementsOnWrite(object, elements, elements_kind, length,
10553 1344 : parameter_mode, bailout);
10554 : }
10555 :
10556 : CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10557 4032 : StoreElement(elements, elements_kind, intptr_key, value, parameter_mode);
10558 : }
10559 :
10560 1008 : Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
10561 : ElementsKind kind, Node* length,
10562 : Node* key, ParameterMode mode,
10563 : Label* bailout) {
10564 : DCHECK(IsFastElementsKind(kind));
10565 1008 : VARIABLE(checked_elements, MachineRepresentation::kTagged);
10566 1008 : Label grow_case(this), no_grow_case(this), done(this),
10567 1008 : grow_bailout(this, Label::kDeferred);
10568 :
10569 : Node* condition;
10570 1008 : if (IsHoleyElementsKind(kind)) {
10571 1344 : condition = UintPtrGreaterThanOrEqual(key, length);
10572 : } else {
10573 : // We don't support growing here unless the value is being appended.
10574 672 : condition = WordEqual(key, length);
10575 : }
10576 1008 : Branch(condition, &grow_case, &no_grow_case);
10577 :
10578 : BIND(&grow_case);
10579 : {
10580 : Node* current_capacity =
10581 2016 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
10582 1008 : checked_elements.Bind(elements);
10583 : Label fits_capacity(this);
10584 : // If key is negative, we will notice in Runtime::kGrowArrayElements.
10585 2016 : GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
10586 :
10587 : {
10588 : Node* new_elements = TryGrowElementsCapacity(
10589 1008 : object, elements, kind, key, current_capacity, mode, &grow_bailout);
10590 1008 : checked_elements.Bind(new_elements);
10591 1008 : Goto(&fits_capacity);
10592 : }
10593 :
10594 : BIND(&grow_bailout);
10595 : {
10596 : Node* tagged_key = mode == SMI_PARAMETERS
10597 : ? key
10598 4032 : : ChangeInt32ToTagged(TruncateIntPtrToInt32(key));
10599 : Node* maybe_elements = CallRuntime(
10600 : Runtime::kGrowArrayElements, NoContextConstant(), object, tagged_key);
10601 2016 : GotoIf(TaggedIsSmi(maybe_elements), bailout);
10602 : CSA_ASSERT(this, IsFixedArrayWithKind(maybe_elements, kind));
10603 1008 : checked_elements.Bind(maybe_elements);
10604 1008 : Goto(&fits_capacity);
10605 : }
10606 :
10607 : BIND(&fits_capacity);
10608 2016 : GotoIfNot(IsJSArray(object), &done);
10609 :
10610 3024 : Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
10611 : StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
10612 : ParameterToTagged(new_length, mode));
10613 1008 : Goto(&done);
10614 : }
10615 :
10616 : BIND(&no_grow_case);
10617 : {
10618 2016 : GotoIfNot(UintPtrLessThan(key, length), bailout);
10619 1008 : checked_elements.Bind(elements);
10620 1008 : Goto(&done);
10621 : }
10622 :
10623 : BIND(&done);
10624 2016 : return checked_elements.value();
10625 : }
10626 :
10627 1344 : Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
10628 : ElementsKind kind, Node* length,
10629 : ParameterMode mode,
10630 : Label* bailout) {
10631 1344 : VARIABLE(new_elements_var, MachineRepresentation::kTagged, elements);
10632 1344 : Label done(this);
10633 :
10634 4032 : GotoIfNot(IsFixedCOWArrayMap(LoadMap(elements)), &done);
10635 : {
10636 : Node* capacity =
10637 2688 : TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
10638 : Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
10639 1344 : length, capacity, mode, bailout);
10640 1344 : new_elements_var.Bind(new_elements);
10641 1344 : Goto(&done);
10642 : }
10643 :
10644 : BIND(&done);
10645 2688 : return new_elements_var.value();
10646 : }
10647 :
10648 2800 : void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
10649 : ElementsKind from_kind,
10650 : ElementsKind to_kind,
10651 : Label* bailout) {
10652 : DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind));
10653 2800 : if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
10654 1680 : TrapAllocationMemento(object, bailout);
10655 : }
10656 :
10657 2800 : if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
10658 1456 : Comment("Non-simple map transition");
10659 : Node* elements = LoadElements(object);
10660 :
10661 : Label done(this);
10662 1456 : GotoIf(WordEqual(elements, EmptyFixedArrayConstant()), &done);
10663 :
10664 : // TODO(ishell): Use OptimalParameterMode().
10665 : ParameterMode mode = INTPTR_PARAMETERS;
10666 4368 : Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
10667 : Node* array_length = SelectImpl(
10668 : IsJSArray(object),
10669 1456 : [=]() {
10670 : CSA_ASSERT(this, IsFastElementsKind(LoadElementsKind(object)));
10671 2912 : return SmiUntag(LoadFastJSArrayLength(object));
10672 : },
10673 1456 : [=]() { return elements_length; },
10674 4368 : MachineType::PointerRepresentation());
10675 :
10676 : CSA_ASSERT(this, WordNotEqual(elements_length, IntPtrConstant(0)));
10677 :
10678 : GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
10679 1456 : elements_length, mode, bailout);
10680 1456 : Goto(&done);
10681 1456 : BIND(&done);
10682 : }
10683 :
10684 : StoreMap(object, map);
10685 2800 : }
10686 :
10687 2856 : void CodeStubAssembler::TrapAllocationMemento(Node* object,
10688 : Label* memento_found) {
10689 2856 : Comment("[ TrapAllocationMemento");
10690 : Label no_memento_found(this);
10691 2856 : Label top_check(this), map_check(this);
10692 :
10693 : TNode<ExternalReference> new_space_top_address = ExternalConstant(
10694 2856 : ExternalReference::new_space_allocation_top_address(isolate()));
10695 : const int kMementoMapOffset = JSArray::kSize;
10696 : const int kMementoLastWordOffset =
10697 : kMementoMapOffset + AllocationMemento::kSize - kTaggedSize;
10698 :
10699 : // Bail out if the object is not in new space.
10700 2856 : TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
10701 2856 : TNode<IntPtrT> object_page = PageFromAddress(object_word);
10702 : {
10703 : TNode<IntPtrT> page_flags =
10704 : UncheckedCast<IntPtrT>(Load(MachineType::IntPtr(), object_page,
10705 5712 : IntPtrConstant(Page::kFlagsOffset)));
10706 : GotoIf(WordEqual(WordAnd(page_flags,
10707 2856 : IntPtrConstant(MemoryChunk::kIsInNewSpaceMask)),
10708 5712 : IntPtrConstant(0)),
10709 5712 : &no_memento_found);
10710 : }
10711 :
10712 : TNode<IntPtrT> memento_last_word = IntPtrAdd(
10713 2856 : object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
10714 2856 : TNode<IntPtrT> memento_last_word_page = PageFromAddress(memento_last_word);
10715 :
10716 : TNode<IntPtrT> new_space_top = UncheckedCast<IntPtrT>(
10717 2856 : Load(MachineType::Pointer(), new_space_top_address));
10718 2856 : TNode<IntPtrT> new_space_top_page = PageFromAddress(new_space_top);
10719 :
10720 : // If the object is in new space, we need to check whether respective
10721 : // potential memento object is on the same page as the current top.
10722 5712 : GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
10723 :
10724 : // The object is on a different page than allocation top. Bail out if the
10725 : // object sits on the page boundary as no memento can follow and we cannot
10726 : // touch the memory following it.
10727 2856 : Branch(WordEqual(object_page, memento_last_word_page), &map_check,
10728 5712 : &no_memento_found);
10729 :
10730 : // If top is on the same page as the current object, we need to check whether
10731 : // we are below top.
10732 : BIND(&top_check);
10733 : {
10734 2856 : Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
10735 5712 : &no_memento_found, &map_check);
10736 : }
10737 :
10738 : // Memento map check.
10739 : BIND(&map_check);
10740 : {
10741 : TNode<Object> memento_map = LoadObjectField(object, kMementoMapOffset);
10742 2856 : Branch(WordEqual(memento_map, LoadRoot(RootIndex::kAllocationMementoMap)),
10743 2856 : memento_found, &no_memento_found);
10744 : }
10745 : BIND(&no_memento_found);
10746 5712 : Comment("] TrapAllocationMemento");
10747 2856 : }
10748 :
10749 11917 : TNode<IntPtrT> CodeStubAssembler::PageFromAddress(TNode<IntPtrT> address) {
10750 23834 : return WordAnd(address, IntPtrConstant(~kPageAlignmentMask));
10751 : }
10752 :
10753 392 : TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
10754 : SloppyTNode<FeedbackVector> feedback_vector, TNode<Smi> slot) {
10755 392 : TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSizeWithWeakNext);
10756 784 : Node* site = Allocate(size, CodeStubAssembler::kPretenured);
10757 392 : StoreMapNoWriteBarrier(site, RootIndex::kAllocationSiteWithWeakNextMap);
10758 : // Should match AllocationSite::Initialize.
10759 : TNode<WordT> field = UpdateWord<AllocationSite::ElementsKindBits>(
10760 1176 : IntPtrConstant(0), IntPtrConstant(GetInitialFastElementsKind()));
10761 : StoreObjectFieldNoWriteBarrier(
10762 : site, AllocationSite::kTransitionInfoOrBoilerplateOffset,
10763 784 : SmiTag(Signed(field)));
10764 :
10765 : // Unlike literals, constructed arrays don't have nested sites
10766 392 : TNode<Smi> zero = SmiConstant(0);
10767 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
10768 :
10769 : // Pretenuring calculation field.
10770 : StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
10771 : Int32Constant(0),
10772 784 : MachineRepresentation::kWord32);
10773 :
10774 : // Pretenuring memento creation count field.
10775 : StoreObjectFieldNoWriteBarrier(
10776 : site, AllocationSite::kPretenureCreateCountOffset, Int32Constant(0),
10777 784 : MachineRepresentation::kWord32);
10778 :
10779 : // Store an empty fixed array for the code dependency.
10780 : StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
10781 392 : RootIndex::kEmptyWeakFixedArray);
10782 :
10783 : // Link the object to the allocation site list
10784 : TNode<ExternalReference> site_list = ExternalConstant(
10785 392 : ExternalReference::allocation_sites_list_address(isolate()));
10786 392 : TNode<Object> next_site = CAST(LoadBufferObject(site_list, 0));
10787 :
10788 : // TODO(mvstanton): This is a store to a weak pointer, which we may want to
10789 : // mark as such in order to skip the write barrier, once we have a unified
10790 : // system for weakness. For now we decided to keep it like this because having
10791 : // an initial write barrier backed store makes this pointer strong until the
10792 : // next GC, and allocation sites are designed to survive several GCs anyway.
10793 : StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
10794 392 : StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site);
10795 :
10796 : StoreFeedbackVectorSlot(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
10797 392 : SMI_PARAMETERS);
10798 392 : return CAST(site);
10799 : }
10800 :
10801 2240 : TNode<MaybeObject> CodeStubAssembler::StoreWeakReferenceInFeedbackVector(
10802 : SloppyTNode<FeedbackVector> feedback_vector, Node* slot,
10803 : SloppyTNode<HeapObject> value, int additional_offset,
10804 : ParameterMode parameter_mode) {
10805 2240 : TNode<MaybeObject> weak_value = MakeWeak(value);
10806 : StoreFeedbackVectorSlot(feedback_vector, slot, weak_value,
10807 : UPDATE_WRITE_BARRIER, additional_offset,
10808 2240 : parameter_mode);
10809 2240 : return weak_value;
10810 : }
10811 :
10812 672 : TNode<BoolT> CodeStubAssembler::NotHasBoilerplate(
10813 : TNode<Object> maybe_literal_site) {
10814 672 : return TaggedIsSmi(maybe_literal_site);
10815 : }
10816 :
10817 336 : TNode<Smi> CodeStubAssembler::LoadTransitionInfo(
10818 : TNode<AllocationSite> allocation_site) {
10819 : TNode<Smi> transition_info = CAST(LoadObjectField(
10820 : allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10821 336 : return transition_info;
10822 : }
10823 :
10824 448 : TNode<JSObject> CodeStubAssembler::LoadBoilerplate(
10825 : TNode<AllocationSite> allocation_site) {
10826 : TNode<JSObject> boilerplate = CAST(LoadObjectField(
10827 : allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10828 448 : return boilerplate;
10829 : }
10830 :
10831 280 : TNode<Int32T> CodeStubAssembler::LoadElementsKind(
10832 : TNode<AllocationSite> allocation_site) {
10833 280 : TNode<Smi> transition_info = LoadTransitionInfo(allocation_site);
10834 : TNode<Int32T> elements_kind =
10835 : Signed(DecodeWord32<AllocationSite::ElementsKindBits>(
10836 560 : SmiToInt32(transition_info)));
10837 : CSA_ASSERT(this, IsFastElementsKind(elements_kind));
10838 280 : return elements_kind;
10839 : }
10840 :
10841 36268 : Node* CodeStubAssembler::BuildFastLoop(
10842 : const CodeStubAssembler::VariableList& vars, Node* start_index,
10843 : Node* end_index, const FastLoopBody& body, int increment,
10844 : ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
10845 : CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
10846 : CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
10847 : MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
10848 : ? MachineType::PointerRepresentation()
10849 36268 : : MachineRepresentation::kTaggedSigned;
10850 36268 : VARIABLE(var, index_rep, start_index);
10851 36268 : VariableList vars_copy(vars.begin(), vars.end(), zone());
10852 72536 : vars_copy.push_back(&var);
10853 36268 : Label loop(this, vars_copy);
10854 36268 : Label after_loop(this);
10855 : // Introduce an explicit second check of the termination condition before the
10856 : // loop that helps turbofan generate better code. If there's only a single
10857 : // check, then the CodeStubAssembler forces it to be at the beginning of the
10858 : // loop requiring a backwards branch at the end of the loop (it's not possible
10859 : // to force the loop header check at the end of the loop and branch forward to
10860 : // it from the pre-header). The extra branch is slower in the case that the
10861 : // loop actually iterates.
10862 108804 : Node* first_check = WordEqual(var.value(), end_index);
10863 : int32_t first_check_val;
10864 36268 : if (ToInt32Constant(first_check, first_check_val)) {
10865 86 : if (first_check_val) return var.value();
10866 76 : Goto(&loop);
10867 : } else {
10868 36182 : Branch(first_check, &after_loop, &loop);
10869 : }
10870 :
10871 : BIND(&loop);
10872 : {
10873 36258 : if (advance_mode == IndexAdvanceMode::kPre) {
10874 20621 : Increment(&var, increment, parameter_mode);
10875 : }
10876 36258 : body(var.value());
10877 36258 : if (advance_mode == IndexAdvanceMode::kPost) {
10878 15637 : Increment(&var, increment, parameter_mode);
10879 : }
10880 108774 : Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
10881 : }
10882 : BIND(&after_loop);
10883 72526 : return var.value();
10884 : }
10885 :
10886 17815 : void CodeStubAssembler::BuildFastFixedArrayForEach(
10887 : const CodeStubAssembler::VariableList& vars, Node* fixed_array,
10888 : ElementsKind kind, Node* first_element_inclusive,
10889 : Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
10890 : ParameterMode mode, ForEachDirection direction) {
10891 : STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
10892 : CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
10893 : CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
10894 : CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(fixed_array, kind),
10895 : IsPropertyArray(fixed_array)));
10896 : int32_t first_val;
10897 17815 : bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
10898 : int32_t last_val;
10899 17815 : bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
10900 17815 : if (constant_first && constent_last) {
10901 702 : int delta = last_val - first_val;
10902 : DCHECK_GE(delta, 0);
10903 702 : if (delta <= kElementLoopUnrollThreshold) {
10904 646 : if (direction == ForEachDirection::kForward) {
10905 30 : for (int i = first_val; i < last_val; ++i) {
10906 60 : Node* index = IntPtrConstant(i);
10907 : Node* offset =
10908 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
10909 60 : FixedArray::kHeaderSize - kHeapObjectTag);
10910 30 : body(fixed_array, offset);
10911 : }
10912 : } else {
10913 2759 : for (int i = last_val - 1; i >= first_val; --i) {
10914 4256 : Node* index = IntPtrConstant(i);
10915 : Node* offset =
10916 : ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
10917 4256 : FixedArray::kHeaderSize - kHeapObjectTag);
10918 2128 : body(fixed_array, offset);
10919 : }
10920 : }
10921 646 : return;
10922 : }
10923 : }
10924 :
10925 : Node* start =
10926 : ElementOffsetFromIndex(first_element_inclusive, kind, mode,
10927 34338 : FixedArray::kHeaderSize - kHeapObjectTag);
10928 : Node* limit =
10929 : ElementOffsetFromIndex(last_element_exclusive, kind, mode,
10930 34338 : FixedArray::kHeaderSize - kHeapObjectTag);
10931 17169 : if (direction == ForEachDirection::kReverse) std::swap(start, limit);
10932 :
10933 : int increment = IsDoubleElementsKind(kind) ? kDoubleSize : kTaggedSize;
10934 : BuildFastLoop(
10935 : vars, start, limit,
10936 17159 : [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
10937 : direction == ForEachDirection::kReverse ? -increment : increment,
10938 : INTPTR_PARAMETERS,
10939 : direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
10940 51507 : : IndexAdvanceMode::kPost);
10941 : }
10942 :
10943 2662 : void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
10944 : Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
10945 : GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size, mode),
10946 5324 : doesnt_fit);
10947 2662 : }
10948 :
10949 3589 : void CodeStubAssembler::InitializeFieldsWithRoot(Node* object,
10950 : Node* start_offset,
10951 : Node* end_offset,
10952 : RootIndex root_index) {
10953 : CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
10954 10767 : start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
10955 10767 : end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
10956 7178 : Node* root_value = LoadRoot(root_index);
10957 : BuildFastLoop(
10958 : end_offset, start_offset,
10959 : [this, object, root_value](Node* current) {
10960 : StoreNoWriteBarrier(MachineRepresentation::kTagged, object, current,
10961 3589 : root_value);
10962 : },
10963 : -kTaggedSize, INTPTR_PARAMETERS,
10964 7178 : CodeStubAssembler::IndexAdvanceMode::kPre);
10965 3589 : }
10966 :
10967 8369 : void CodeStubAssembler::BranchIfNumberRelationalComparison(
10968 : Operation op, Node* left, Node* right, Label* if_true, Label* if_false) {
10969 : CSA_SLOW_ASSERT(this, IsNumber(left));
10970 : CSA_SLOW_ASSERT(this, IsNumber(right));
10971 :
10972 8369 : Label do_float_comparison(this);
10973 : TVARIABLE(Float64T, var_left_float);
10974 : TVARIABLE(Float64T, var_right_float);
10975 :
10976 : Branch(TaggedIsSmi(left),
10977 8369 : [&] {
10978 8369 : TNode<Smi> smi_left = CAST(left);
10979 :
10980 : Branch(TaggedIsSmi(right),
10981 8369 : [&] {
10982 8369 : TNode<Smi> smi_right = CAST(right);
10983 :
10984 : // Both {left} and {right} are Smi, so just perform a fast
10985 : // Smi comparison.
10986 8369 : switch (op) {
10987 : case Operation::kEqual:
10988 : BranchIfSmiEqual(smi_left, smi_right, if_true,
10989 280 : if_false);
10990 280 : break;
10991 : case Operation::kLessThan:
10992 : BranchIfSmiLessThan(smi_left, smi_right, if_true,
10993 2800 : if_false);
10994 2800 : break;
10995 : case Operation::kLessThanOrEqual:
10996 : BranchIfSmiLessThanOrEqual(smi_left, smi_right, if_true,
10997 0 : if_false);
10998 0 : break;
10999 : case Operation::kGreaterThan:
11000 : BranchIfSmiLessThan(smi_right, smi_left, if_true,
11001 2128 : if_false);
11002 2128 : break;
11003 : case Operation::kGreaterThanOrEqual:
11004 : BranchIfSmiLessThanOrEqual(smi_right, smi_left, if_true,
11005 3161 : if_false);
11006 3161 : break;
11007 : default:
11008 0 : UNREACHABLE();
11009 : }
11010 8369 : },
11011 8369 : [&] {
11012 : CSA_ASSERT(this, IsHeapNumber(right));
11013 25107 : var_left_float = SmiToFloat64(smi_left);
11014 25107 : var_right_float = LoadHeapNumberValue(right);
11015 8369 : Goto(&do_float_comparison);
11016 58583 : });
11017 8369 : },
11018 8369 : [&] {
11019 : CSA_ASSERT(this, IsHeapNumber(left));
11020 25107 : var_left_float = LoadHeapNumberValue(left);
11021 :
11022 : Branch(TaggedIsSmi(right),
11023 8369 : [&] {
11024 25107 : var_right_float = SmiToFloat64(right);
11025 8369 : Goto(&do_float_comparison);
11026 8369 : },
11027 8369 : [&] {
11028 : CSA_ASSERT(this, IsHeapNumber(right));
11029 25107 : var_right_float = LoadHeapNumberValue(right);
11030 8369 : Goto(&do_float_comparison);
11031 58583 : });
11032 41845 : });
11033 :
11034 : BIND(&do_float_comparison);
11035 : {
11036 8369 : switch (op) {
11037 : case Operation::kEqual:
11038 280 : Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
11039 840 : if_true, if_false);
11040 280 : break;
11041 : case Operation::kLessThan:
11042 2800 : Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11043 8400 : if_true, if_false);
11044 2800 : break;
11045 : case Operation::kLessThanOrEqual:
11046 : Branch(Float64LessThanOrEqual(var_left_float.value(),
11047 0 : var_right_float.value()),
11048 0 : if_true, if_false);
11049 0 : break;
11050 : case Operation::kGreaterThan:
11051 : Branch(
11052 2128 : Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11053 6384 : if_true, if_false);
11054 2128 : break;
11055 : case Operation::kGreaterThanOrEqual:
11056 : Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11057 3161 : var_right_float.value()),
11058 9483 : if_true, if_false);
11059 3161 : break;
11060 : default:
11061 0 : UNREACHABLE();
11062 : }
11063 8369 : }
11064 8369 : }
11065 :
11066 2708 : void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(Node* left, Node* right,
11067 : Label* if_true) {
11068 2708 : Label if_false(this);
11069 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, left,
11070 2708 : right, if_true, &if_false);
11071 2708 : BIND(&if_false);
11072 2708 : }
11073 :
11074 : namespace {
11075 2688 : Operation Reverse(Operation op) {
11076 2688 : switch (op) {
11077 : case Operation::kLessThan:
11078 : return Operation::kGreaterThan;
11079 : case Operation::kLessThanOrEqual:
11080 672 : return Operation::kGreaterThanOrEqual;
11081 : case Operation::kGreaterThan:
11082 672 : return Operation::kLessThan;
11083 : case Operation::kGreaterThanOrEqual:
11084 672 : return Operation::kLessThanOrEqual;
11085 : default:
11086 : break;
11087 : }
11088 0 : UNREACHABLE();
11089 : }
11090 : } // anonymous namespace
11091 :
11092 896 : Node* CodeStubAssembler::RelationalComparison(Operation op, Node* left,
11093 : Node* right, Node* context,
11094 : Variable* var_type_feedback) {
11095 1792 : Label return_true(this), return_false(this), do_float_comparison(this),
11096 896 : end(this);
11097 : TVARIABLE(Oddball, var_result); // Actually only "true" or "false".
11098 : TVARIABLE(Float64T, var_left_float);
11099 : TVARIABLE(Float64T, var_right_float);
11100 :
11101 : // We might need to loop several times due to ToPrimitive and/or ToNumeric
11102 : // conversions.
11103 1792 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
11104 1792 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
11105 1792 : VariableList loop_variable_list({&var_left, &var_right}, zone());
11106 896 : if (var_type_feedback != nullptr) {
11107 : // Initialize the type feedback to None. The current feedback is combined
11108 : // with the previous feedback.
11109 672 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
11110 672 : loop_variable_list.push_back(var_type_feedback);
11111 : }
11112 896 : Label loop(this, loop_variable_list);
11113 896 : Goto(&loop);
11114 : BIND(&loop);
11115 : {
11116 896 : left = var_left.value();
11117 896 : right = var_right.value();
11118 :
11119 896 : Label if_left_smi(this), if_left_not_smi(this);
11120 1792 : Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11121 :
11122 : BIND(&if_left_smi);
11123 : {
11124 : TNode<Smi> smi_left = CAST(left);
11125 896 : Label if_right_smi(this), if_right_heapnumber(this),
11126 896 : if_right_bigint(this, Label::kDeferred),
11127 896 : if_right_not_numeric(this, Label::kDeferred);
11128 1792 : GotoIf(TaggedIsSmi(right), &if_right_smi);
11129 1792 : Node* right_map = LoadMap(right);
11130 1792 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11131 1792 : Node* right_instance_type = LoadMapInstanceType(right_map);
11132 : Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
11133 896 : &if_right_not_numeric);
11134 :
11135 : BIND(&if_right_smi);
11136 : {
11137 896 : TNode<Smi> smi_right = CAST(right);
11138 : CombineFeedback(var_type_feedback,
11139 896 : CompareOperationFeedback::kSignedSmall);
11140 896 : switch (op) {
11141 : case Operation::kLessThan:
11142 : BranchIfSmiLessThan(smi_left, smi_right, &return_true,
11143 224 : &return_false);
11144 224 : break;
11145 : case Operation::kLessThanOrEqual:
11146 : BranchIfSmiLessThanOrEqual(smi_left, smi_right, &return_true,
11147 224 : &return_false);
11148 224 : break;
11149 : case Operation::kGreaterThan:
11150 : BranchIfSmiLessThan(smi_right, smi_left, &return_true,
11151 224 : &return_false);
11152 224 : break;
11153 : case Operation::kGreaterThanOrEqual:
11154 : BranchIfSmiLessThanOrEqual(smi_right, smi_left, &return_true,
11155 224 : &return_false);
11156 224 : break;
11157 : default:
11158 0 : UNREACHABLE();
11159 : }
11160 : }
11161 :
11162 : BIND(&if_right_heapnumber);
11163 : {
11164 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11165 1792 : var_left_float = SmiToFloat64(smi_left);
11166 1792 : var_right_float = LoadHeapNumberValue(right);
11167 896 : Goto(&do_float_comparison);
11168 : }
11169 :
11170 : BIND(&if_right_bigint);
11171 : {
11172 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11173 896 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11174 : NoContextConstant(),
11175 : SmiConstant(Reverse(op)), right, left));
11176 896 : Goto(&end);
11177 : }
11178 :
11179 : BIND(&if_right_not_numeric);
11180 : {
11181 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11182 : // Convert {right} to a Numeric; we don't need to perform the
11183 : // dedicated ToPrimitive(right, hint Number) operation, as the
11184 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11185 : // a Number hint.
11186 : var_right.Bind(
11187 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11188 896 : Goto(&loop);
11189 896 : }
11190 : }
11191 :
11192 : BIND(&if_left_not_smi);
11193 : {
11194 1792 : Node* left_map = LoadMap(left);
11195 :
11196 896 : Label if_right_smi(this), if_right_not_smi(this);
11197 1792 : Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11198 :
11199 : BIND(&if_right_smi);
11200 : {
11201 896 : Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
11202 896 : if_left_not_numeric(this, Label::kDeferred);
11203 1792 : GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
11204 1792 : Node* left_instance_type = LoadMapInstanceType(left_map);
11205 : Branch(IsBigIntInstanceType(left_instance_type), &if_left_bigint,
11206 896 : &if_left_not_numeric);
11207 :
11208 : BIND(&if_left_heapnumber);
11209 : {
11210 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11211 1792 : var_left_float = LoadHeapNumberValue(left);
11212 1792 : var_right_float = SmiToFloat64(right);
11213 896 : Goto(&do_float_comparison);
11214 : }
11215 :
11216 : BIND(&if_left_bigint);
11217 : {
11218 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11219 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11220 : NoContextConstant(), SmiConstant(op),
11221 : left, right));
11222 896 : Goto(&end);
11223 : }
11224 :
11225 : BIND(&if_left_not_numeric);
11226 : {
11227 896 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11228 : // Convert {left} to a Numeric; we don't need to perform the
11229 : // dedicated ToPrimitive(left, hint Number) operation, as the
11230 : // ToNumeric(left) will by itself already invoke ToPrimitive with
11231 : // a Number hint.
11232 : var_left.Bind(
11233 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11234 896 : Goto(&loop);
11235 896 : }
11236 : }
11237 :
11238 : BIND(&if_right_not_smi);
11239 : {
11240 1792 : Node* right_map = LoadMap(right);
11241 :
11242 896 : Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
11243 896 : if_left_string(this), if_left_other(this, Label::kDeferred);
11244 1792 : GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
11245 1792 : Node* left_instance_type = LoadMapInstanceType(left_map);
11246 896 : GotoIf(IsBigIntInstanceType(left_instance_type), &if_left_bigint);
11247 896 : Branch(IsStringInstanceType(left_instance_type), &if_left_string,
11248 1792 : &if_left_other);
11249 :
11250 : BIND(&if_left_heapnumber);
11251 : {
11252 : Label if_right_heapnumber(this),
11253 896 : if_right_bigint(this, Label::kDeferred),
11254 896 : if_right_not_numeric(this, Label::kDeferred);
11255 1792 : GotoIf(WordEqual(right_map, left_map), &if_right_heapnumber);
11256 1792 : Node* right_instance_type = LoadMapInstanceType(right_map);
11257 : Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
11258 896 : &if_right_not_numeric);
11259 :
11260 : BIND(&if_right_heapnumber);
11261 : {
11262 : CombineFeedback(var_type_feedback,
11263 896 : CompareOperationFeedback::kNumber);
11264 1792 : var_left_float = LoadHeapNumberValue(left);
11265 1792 : var_right_float = LoadHeapNumberValue(right);
11266 896 : Goto(&do_float_comparison);
11267 : }
11268 :
11269 : BIND(&if_right_bigint);
11270 : {
11271 : OverwriteFeedback(var_type_feedback,
11272 896 : CompareOperationFeedback::kAny);
11273 896 : var_result = CAST(CallRuntime(
11274 : Runtime::kBigIntCompareToNumber, NoContextConstant(),
11275 : SmiConstant(Reverse(op)), right, left));
11276 896 : Goto(&end);
11277 : }
11278 :
11279 : BIND(&if_right_not_numeric);
11280 : {
11281 : OverwriteFeedback(var_type_feedback,
11282 896 : CompareOperationFeedback::kAny);
11283 : // Convert {right} to a Numeric; we don't need to perform
11284 : // dedicated ToPrimitive(right, hint Number) operation, as the
11285 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11286 : // a Number hint.
11287 : var_right.Bind(
11288 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11289 896 : Goto(&loop);
11290 896 : }
11291 : }
11292 :
11293 : BIND(&if_left_bigint);
11294 : {
11295 896 : Label if_right_heapnumber(this), if_right_bigint(this),
11296 896 : if_right_string(this), if_right_other(this);
11297 1792 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11298 1792 : Node* right_instance_type = LoadMapInstanceType(right_map);
11299 896 : GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
11300 896 : Branch(IsStringInstanceType(right_instance_type), &if_right_string,
11301 1792 : &if_right_other);
11302 :
11303 : BIND(&if_right_heapnumber);
11304 : {
11305 : OverwriteFeedback(var_type_feedback,
11306 896 : CompareOperationFeedback::kAny);
11307 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11308 : NoContextConstant(), SmiConstant(op),
11309 : left, right));
11310 896 : Goto(&end);
11311 : }
11312 :
11313 : BIND(&if_right_bigint);
11314 : {
11315 : CombineFeedback(var_type_feedback,
11316 896 : CompareOperationFeedback::kBigInt);
11317 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToBigInt,
11318 : NoContextConstant(), SmiConstant(op),
11319 : left, right));
11320 896 : Goto(&end);
11321 : }
11322 :
11323 : BIND(&if_right_string);
11324 : {
11325 : OverwriteFeedback(var_type_feedback,
11326 896 : CompareOperationFeedback::kAny);
11327 : var_result = CAST(CallRuntime(Runtime::kBigIntCompareToString,
11328 : NoContextConstant(), SmiConstant(op),
11329 : left, right));
11330 896 : Goto(&end);
11331 : }
11332 :
11333 : // {right} is not a Number, BigInt, or String.
11334 : BIND(&if_right_other);
11335 : {
11336 : OverwriteFeedback(var_type_feedback,
11337 896 : CompareOperationFeedback::kAny);
11338 : // Convert {right} to a Numeric; we don't need to perform
11339 : // dedicated ToPrimitive(right, hint Number) operation, as the
11340 : // ToNumeric(right) will by itself already invoke ToPrimitive with
11341 : // a Number hint.
11342 : var_right.Bind(
11343 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11344 896 : Goto(&loop);
11345 896 : }
11346 : }
11347 :
11348 : BIND(&if_left_string);
11349 : {
11350 1792 : Node* right_instance_type = LoadMapInstanceType(right_map);
11351 :
11352 : Label if_right_not_string(this, Label::kDeferred);
11353 896 : GotoIfNot(IsStringInstanceType(right_instance_type),
11354 1792 : &if_right_not_string);
11355 :
11356 : // Both {left} and {right} are strings.
11357 896 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
11358 : Builtins::Name builtin;
11359 896 : switch (op) {
11360 : case Operation::kLessThan:
11361 : builtin = Builtins::kStringLessThan;
11362 : break;
11363 : case Operation::kLessThanOrEqual:
11364 : builtin = Builtins::kStringLessThanOrEqual;
11365 224 : break;
11366 : case Operation::kGreaterThan:
11367 : builtin = Builtins::kStringGreaterThan;
11368 224 : break;
11369 : case Operation::kGreaterThanOrEqual:
11370 : builtin = Builtins::kStringGreaterThanOrEqual;
11371 224 : break;
11372 : default:
11373 0 : UNREACHABLE();
11374 : }
11375 1792 : var_result = CAST(CallBuiltin(builtin, context, left, right));
11376 896 : Goto(&end);
11377 :
11378 : BIND(&if_right_not_string);
11379 : {
11380 : OverwriteFeedback(var_type_feedback,
11381 896 : CompareOperationFeedback::kAny);
11382 : // {left} is a String, while {right} isn't. Check if {right} is
11383 : // a BigInt, otherwise call ToPrimitive(right, hint Number) if
11384 : // {right} is a receiver, or ToNumeric(left) and then
11385 : // ToNumeric(right) in the other cases.
11386 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
11387 : Label if_right_bigint(this),
11388 896 : if_right_receiver(this, Label::kDeferred);
11389 896 : GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
11390 896 : GotoIf(IsJSReceiverInstanceType(right_instance_type),
11391 1792 : &if_right_receiver);
11392 :
11393 : var_left.Bind(
11394 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11395 1792 : var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
11396 896 : Goto(&loop);
11397 :
11398 : BIND(&if_right_bigint);
11399 : {
11400 896 : var_result = CAST(CallRuntime(
11401 : Runtime::kBigIntCompareToString, NoContextConstant(),
11402 : SmiConstant(Reverse(op)), right, left));
11403 896 : Goto(&end);
11404 : }
11405 :
11406 : BIND(&if_right_receiver);
11407 : {
11408 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
11409 896 : isolate(), ToPrimitiveHint::kNumber);
11410 1792 : var_right.Bind(CallStub(callable, context, right));
11411 896 : Goto(&loop);
11412 896 : }
11413 896 : }
11414 : }
11415 :
11416 : BIND(&if_left_other);
11417 : {
11418 : // {left} is neither a Numeric nor a String, and {right} is not a Smi.
11419 896 : if (var_type_feedback != nullptr) {
11420 : // Collect NumberOrOddball feedback if {left} is an Oddball
11421 : // and {right} is either a HeapNumber or Oddball. Otherwise collect
11422 : // Any feedback.
11423 672 : Label collect_any_feedback(this), collect_oddball_feedback(this),
11424 672 : collect_feedback_done(this);
11425 672 : GotoIfNot(InstanceTypeEqual(left_instance_type, ODDBALL_TYPE),
11426 1344 : &collect_any_feedback);
11427 :
11428 1344 : GotoIf(IsHeapNumberMap(right_map), &collect_oddball_feedback);
11429 1344 : Node* right_instance_type = LoadMapInstanceType(right_map);
11430 672 : Branch(InstanceTypeEqual(right_instance_type, ODDBALL_TYPE),
11431 1344 : &collect_oddball_feedback, &collect_any_feedback);
11432 :
11433 : BIND(&collect_oddball_feedback);
11434 : {
11435 : CombineFeedback(var_type_feedback,
11436 672 : CompareOperationFeedback::kNumberOrOddball);
11437 672 : Goto(&collect_feedback_done);
11438 : }
11439 :
11440 : BIND(&collect_any_feedback);
11441 : {
11442 : OverwriteFeedback(var_type_feedback,
11443 672 : CompareOperationFeedback::kAny);
11444 672 : Goto(&collect_feedback_done);
11445 : }
11446 :
11447 672 : BIND(&collect_feedback_done);
11448 : }
11449 :
11450 : // If {left} is a receiver, call ToPrimitive(left, hint Number).
11451 : // Otherwise call ToNumeric(right) and then ToNumeric(left), the
11452 : // order here is important as it's observable by user code.
11453 : STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
11454 : Label if_left_receiver(this, Label::kDeferred);
11455 896 : GotoIf(IsJSReceiverInstanceType(left_instance_type),
11456 1792 : &if_left_receiver);
11457 :
11458 1792 : var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
11459 : var_left.Bind(
11460 1792 : CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11461 896 : Goto(&loop);
11462 :
11463 : BIND(&if_left_receiver);
11464 : {
11465 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(
11466 896 : isolate(), ToPrimitiveHint::kNumber);
11467 1792 : var_left.Bind(CallStub(callable, context, left));
11468 896 : Goto(&loop);
11469 896 : }
11470 896 : }
11471 896 : }
11472 896 : }
11473 : }
11474 :
11475 : BIND(&do_float_comparison);
11476 : {
11477 896 : switch (op) {
11478 : case Operation::kLessThan:
11479 224 : Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11480 448 : &return_true, &return_false);
11481 224 : break;
11482 : case Operation::kLessThanOrEqual:
11483 : Branch(Float64LessThanOrEqual(var_left_float.value(),
11484 224 : var_right_float.value()),
11485 448 : &return_true, &return_false);
11486 224 : break;
11487 : case Operation::kGreaterThan:
11488 : Branch(
11489 224 : Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11490 448 : &return_true, &return_false);
11491 224 : break;
11492 : case Operation::kGreaterThanOrEqual:
11493 : Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11494 224 : var_right_float.value()),
11495 448 : &return_true, &return_false);
11496 224 : break;
11497 : default:
11498 0 : UNREACHABLE();
11499 : }
11500 : }
11501 :
11502 : BIND(&return_true);
11503 : {
11504 : var_result = TrueConstant();
11505 896 : Goto(&end);
11506 : }
11507 :
11508 : BIND(&return_false);
11509 : {
11510 : var_result = FalseConstant();
11511 896 : Goto(&end);
11512 : }
11513 :
11514 : BIND(&end);
11515 896 : return var_result.value();
11516 : }
11517 :
11518 1120 : TNode<Smi> CodeStubAssembler::CollectFeedbackForString(
11519 : SloppyTNode<Int32T> instance_type) {
11520 : TNode<Smi> feedback = SelectSmiConstant(
11521 : Word32Equal(
11522 2240 : Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
11523 4480 : Int32Constant(kInternalizedTag)),
11524 : CompareOperationFeedback::kInternalizedString,
11525 2240 : CompareOperationFeedback::kString);
11526 1120 : return feedback;
11527 : }
11528 :
11529 616 : void CodeStubAssembler::GenerateEqual_Same(Node* value, Label* if_equal,
11530 : Label* if_notequal,
11531 : Variable* var_type_feedback) {
11532 : // In case of abstract or strict equality checks, we need additional checks
11533 : // for NaN values because they are not considered equal, even if both the
11534 : // left and the right hand side reference exactly the same value.
11535 :
11536 1232 : Label if_smi(this), if_heapnumber(this);
11537 1232 : GotoIf(TaggedIsSmi(value), &if_smi);
11538 :
11539 1232 : Node* value_map = LoadMap(value);
11540 1232 : GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
11541 :
11542 : // For non-HeapNumbers, all we do is collect type feedback.
11543 616 : if (var_type_feedback != nullptr) {
11544 672 : Node* instance_type = LoadMapInstanceType(value_map);
11545 :
11546 336 : Label if_string(this), if_receiver(this), if_oddball(this), if_symbol(this),
11547 336 : if_bigint(this);
11548 672 : GotoIf(IsStringInstanceType(instance_type), &if_string);
11549 672 : GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
11550 336 : GotoIf(IsOddballInstanceType(instance_type), &if_oddball);
11551 336 : Branch(IsBigIntInstanceType(instance_type), &if_bigint, &if_symbol);
11552 :
11553 : BIND(&if_string);
11554 : {
11555 : CSA_ASSERT(this, IsString(value));
11556 : CombineFeedback(var_type_feedback,
11557 672 : CollectFeedbackForString(instance_type));
11558 336 : Goto(if_equal);
11559 : }
11560 :
11561 : BIND(&if_symbol);
11562 : {
11563 : CSA_ASSERT(this, IsSymbol(value));
11564 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kSymbol);
11565 336 : Goto(if_equal);
11566 : }
11567 :
11568 : BIND(&if_receiver);
11569 : {
11570 : CSA_ASSERT(this, IsJSReceiver(value));
11571 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
11572 336 : Goto(if_equal);
11573 : }
11574 :
11575 : BIND(&if_bigint);
11576 : {
11577 : CSA_ASSERT(this, IsBigInt(value));
11578 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11579 336 : Goto(if_equal);
11580 : }
11581 :
11582 : BIND(&if_oddball);
11583 : {
11584 : CSA_ASSERT(this, IsOddball(value));
11585 336 : Label if_boolean(this), if_not_boolean(this);
11586 672 : Branch(IsBooleanMap(value_map), &if_boolean, &if_not_boolean);
11587 :
11588 : BIND(&if_boolean);
11589 : {
11590 336 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11591 336 : Goto(if_equal);
11592 : }
11593 :
11594 : BIND(&if_not_boolean);
11595 : {
11596 : CSA_ASSERT(this, IsNullOrUndefined(value));
11597 : CombineFeedback(var_type_feedback,
11598 336 : CompareOperationFeedback::kReceiverOrNullOrUndefined);
11599 336 : Goto(if_equal);
11600 336 : }
11601 336 : }
11602 : } else {
11603 280 : Goto(if_equal);
11604 : }
11605 :
11606 : BIND(&if_heapnumber);
11607 : {
11608 616 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11609 1232 : Node* number_value = LoadHeapNumberValue(value);
11610 616 : BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
11611 : }
11612 :
11613 : BIND(&if_smi);
11614 : {
11615 616 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kSignedSmall);
11616 616 : Goto(if_equal);
11617 616 : }
11618 616 : }
11619 :
11620 : // ES6 section 7.2.12 Abstract Equality Comparison
11621 224 : Node* CodeStubAssembler::Equal(Node* left, Node* right, Node* context,
11622 : Variable* var_type_feedback) {
11623 : // This is a slightly optimized version of Object::Equals. Whenever you
11624 : // change something functionality wise in here, remember to update the
11625 : // Object::Equals method as well.
11626 :
11627 448 : Label if_equal(this), if_notequal(this), do_float_comparison(this),
11628 224 : do_right_stringtonumber(this, Label::kDeferred), end(this);
11629 448 : VARIABLE(result, MachineRepresentation::kTagged);
11630 : TVARIABLE(Float64T, var_left_float);
11631 : TVARIABLE(Float64T, var_right_float);
11632 :
11633 : // We can avoid code duplication by exploiting the fact that abstract equality
11634 : // is symmetric.
11635 224 : Label use_symmetry(this);
11636 :
11637 : // We might need to loop several times due to ToPrimitive and/or ToNumber
11638 : // conversions.
11639 448 : VARIABLE(var_left, MachineRepresentation::kTagged, left);
11640 448 : VARIABLE(var_right, MachineRepresentation::kTagged, right);
11641 448 : VariableList loop_variable_list({&var_left, &var_right}, zone());
11642 224 : if (var_type_feedback != nullptr) {
11643 : // Initialize the type feedback to None. The current feedback will be
11644 : // combined with the previous feedback.
11645 168 : OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
11646 168 : loop_variable_list.push_back(var_type_feedback);
11647 : }
11648 224 : Label loop(this, loop_variable_list);
11649 224 : Goto(&loop);
11650 : BIND(&loop);
11651 : {
11652 224 : left = var_left.value();
11653 224 : right = var_right.value();
11654 :
11655 : Label if_notsame(this);
11656 448 : GotoIf(WordNotEqual(left, right), &if_notsame);
11657 : {
11658 : // {left} and {right} reference the exact same value, yet we need special
11659 : // treatment for HeapNumber, as NaN is not equal to NaN.
11660 224 : GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
11661 : }
11662 :
11663 : BIND(&if_notsame);
11664 224 : Label if_left_smi(this), if_left_not_smi(this);
11665 448 : Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11666 :
11667 : BIND(&if_left_smi);
11668 : {
11669 224 : Label if_right_smi(this), if_right_not_smi(this);
11670 448 : Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11671 :
11672 : BIND(&if_right_smi);
11673 : {
11674 : // We have already checked for {left} and {right} being the same value,
11675 : // so when we get here they must be different Smis.
11676 : CombineFeedback(var_type_feedback,
11677 224 : CompareOperationFeedback::kSignedSmall);
11678 224 : Goto(&if_notequal);
11679 : }
11680 :
11681 : BIND(&if_right_not_smi);
11682 448 : Node* right_map = LoadMap(right);
11683 224 : Label if_right_heapnumber(this), if_right_boolean(this),
11684 224 : if_right_bigint(this, Label::kDeferred),
11685 224 : if_right_receiver(this, Label::kDeferred);
11686 448 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11687 : // {left} is Smi and {right} is not HeapNumber or Smi.
11688 224 : if (var_type_feedback != nullptr) {
11689 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
11690 : }
11691 448 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11692 448 : Node* right_type = LoadMapInstanceType(right_map);
11693 448 : GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11694 224 : GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11695 224 : Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
11696 448 : &if_notequal);
11697 :
11698 : BIND(&if_right_heapnumber);
11699 : {
11700 448 : var_left_float = SmiToFloat64(left);
11701 448 : var_right_float = LoadHeapNumberValue(right);
11702 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11703 224 : Goto(&do_float_comparison);
11704 : }
11705 :
11706 : BIND(&if_right_boolean);
11707 : {
11708 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11709 224 : Goto(&loop);
11710 : }
11711 :
11712 : BIND(&if_right_bigint);
11713 : {
11714 : result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
11715 224 : NoContextConstant(), right, left));
11716 224 : Goto(&end);
11717 : }
11718 :
11719 : BIND(&if_right_receiver);
11720 : {
11721 224 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
11722 448 : var_right.Bind(CallStub(callable, context, right));
11723 224 : Goto(&loop);
11724 224 : }
11725 : }
11726 :
11727 : BIND(&if_left_not_smi);
11728 : {
11729 448 : GotoIf(TaggedIsSmi(right), &use_symmetry);
11730 :
11731 224 : Label if_left_symbol(this), if_left_number(this), if_left_string(this),
11732 224 : if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
11733 224 : if_left_receiver(this);
11734 :
11735 448 : Node* left_map = LoadMap(left);
11736 448 : Node* right_map = LoadMap(right);
11737 448 : Node* left_type = LoadMapInstanceType(left_map);
11738 448 : Node* right_type = LoadMapInstanceType(right_map);
11739 :
11740 448 : GotoIf(IsStringInstanceType(left_type), &if_left_string);
11741 224 : GotoIf(IsSymbolInstanceType(left_type), &if_left_symbol);
11742 224 : GotoIf(IsHeapNumberInstanceType(left_type), &if_left_number);
11743 224 : GotoIf(IsOddballInstanceType(left_type), &if_left_oddball);
11744 : Branch(IsBigIntInstanceType(left_type), &if_left_bigint,
11745 224 : &if_left_receiver);
11746 :
11747 : BIND(&if_left_string);
11748 : {
11749 448 : GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
11750 448 : result.Bind(CallBuiltin(Builtins::kStringEqual, context, left, right));
11751 : CombineFeedback(var_type_feedback,
11752 : SmiOr(CollectFeedbackForString(left_type),
11753 672 : CollectFeedbackForString(right_type)));
11754 224 : Goto(&end);
11755 : }
11756 :
11757 : BIND(&if_left_number);
11758 : {
11759 : Label if_right_not_number(this);
11760 448 : GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
11761 :
11762 448 : var_left_float = LoadHeapNumberValue(left);
11763 448 : var_right_float = LoadHeapNumberValue(right);
11764 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11765 224 : Goto(&do_float_comparison);
11766 :
11767 : BIND(&if_right_not_number);
11768 : {
11769 : Label if_right_boolean(this);
11770 224 : if (var_type_feedback != nullptr) {
11771 : var_type_feedback->Bind(
11772 168 : SmiConstant(CompareOperationFeedback::kAny));
11773 : }
11774 448 : GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11775 448 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11776 224 : GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
11777 224 : Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11778 448 : &if_notequal);
11779 :
11780 : BIND(&if_right_boolean);
11781 : {
11782 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11783 224 : Goto(&loop);
11784 224 : }
11785 224 : }
11786 : }
11787 :
11788 : BIND(&if_left_bigint);
11789 : {
11790 224 : Label if_right_heapnumber(this), if_right_bigint(this),
11791 224 : if_right_string(this), if_right_boolean(this);
11792 448 : GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11793 224 : GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11794 448 : GotoIf(IsStringInstanceType(right_type), &if_right_string);
11795 448 : GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11796 224 : Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11797 448 : &if_notequal);
11798 :
11799 : BIND(&if_right_heapnumber);
11800 : {
11801 224 : if (var_type_feedback != nullptr) {
11802 : var_type_feedback->Bind(
11803 168 : SmiConstant(CompareOperationFeedback::kAny));
11804 : }
11805 : result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
11806 224 : NoContextConstant(), left, right));
11807 224 : Goto(&end);
11808 : }
11809 :
11810 : BIND(&if_right_bigint);
11811 : {
11812 224 : CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11813 : result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
11814 224 : NoContextConstant(), left, right));
11815 224 : Goto(&end);
11816 : }
11817 :
11818 : BIND(&if_right_string);
11819 : {
11820 224 : if (var_type_feedback != nullptr) {
11821 : var_type_feedback->Bind(
11822 168 : SmiConstant(CompareOperationFeedback::kAny));
11823 : }
11824 : result.Bind(CallRuntime(Runtime::kBigIntEqualToString,
11825 224 : NoContextConstant(), left, right));
11826 224 : Goto(&end);
11827 : }
11828 :
11829 : BIND(&if_right_boolean);
11830 : {
11831 224 : if (var_type_feedback != nullptr) {
11832 : var_type_feedback->Bind(
11833 168 : SmiConstant(CompareOperationFeedback::kAny));
11834 : }
11835 224 : var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11836 224 : Goto(&loop);
11837 224 : }
11838 : }
11839 :
11840 : BIND(&if_left_oddball);
11841 : {
11842 224 : Label if_left_boolean(this), if_left_not_boolean(this);
11843 448 : Branch(IsBooleanMap(left_map), &if_left_boolean, &if_left_not_boolean);
11844 :
11845 : BIND(&if_left_not_boolean);
11846 : {
11847 : // {left} is either Null or Undefined. Check if {right} is
11848 : // undetectable (which includes Null and Undefined).
11849 224 : Label if_right_undetectable(this), if_right_not_undetectable(this);
11850 224 : Branch(IsUndetectableMap(right_map), &if_right_undetectable,
11851 448 : &if_right_not_undetectable);
11852 :
11853 : BIND(&if_right_undetectable);
11854 : {
11855 224 : if (var_type_feedback != nullptr) {
11856 : // If {right} is undetectable, it must be either also
11857 : // Null or Undefined, or a Receiver (aka document.all).
11858 : var_type_feedback->Bind(SmiConstant(
11859 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
11860 : }
11861 224 : Goto(&if_equal);
11862 : }
11863 :
11864 : BIND(&if_right_not_undetectable);
11865 : {
11866 224 : if (var_type_feedback != nullptr) {
11867 : // Track whether {right} is Null, Undefined or Receiver.
11868 : var_type_feedback->Bind(SmiConstant(
11869 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
11870 336 : GotoIf(IsJSReceiverInstanceType(right_type), &if_notequal);
11871 336 : GotoIfNot(IsBooleanMap(right_map), &if_notequal);
11872 : var_type_feedback->Bind(
11873 168 : SmiConstant(CompareOperationFeedback::kAny));
11874 : }
11875 224 : Goto(&if_notequal);
11876 224 : }
11877 : }
11878 :
11879 : BIND(&if_left_boolean);
11880 : {
11881 224 : if (var_type_feedback != nullptr) {
11882 : var_type_feedback->Bind(
11883 168 : SmiConstant(CompareOperationFeedback::kAny));
11884 : }
11885 :
11886 : // If {right} is a Boolean too, it must be a different Boolean.
11887 448 : GotoIf(WordEqual(right_map, left_map), &if_notequal);
11888 :
11889 : // Otherwise, convert {left} to number and try again.
11890 224 : var_left.Bind(LoadObjectField(left, Oddball::kToNumberOffset));
11891 224 : Goto(&loop);
11892 224 : }
11893 : }
11894 :
11895 : BIND(&if_left_symbol);
11896 : {
11897 : Label if_right_receiver(this);
11898 448 : GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
11899 : // {right} is not a JSReceiver and also not the same Symbol as {left},
11900 : // so the result is "not equal".
11901 224 : if (var_type_feedback != nullptr) {
11902 : Label if_right_symbol(this);
11903 168 : GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
11904 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
11905 168 : Goto(&if_notequal);
11906 :
11907 : BIND(&if_right_symbol);
11908 : {
11909 : CombineFeedback(var_type_feedback,
11910 168 : CompareOperationFeedback::kSymbol);
11911 168 : Goto(&if_notequal);
11912 168 : }
11913 : } else {
11914 56 : Goto(&if_notequal);
11915 : }
11916 :
11917 : BIND(&if_right_receiver);
11918 : {
11919 : // {left} is a Primitive and {right} is a JSReceiver, so swapping
11920 : // the order is not observable.
11921 224 : if (var_type_feedback != nullptr) {
11922 : var_type_feedback->Bind(
11923 168 : SmiConstant(CompareOperationFeedback::kAny));
11924 : }
11925 224 : Goto(&use_symmetry);
11926 224 : }
11927 : }
11928 :
11929 : BIND(&if_left_receiver);
11930 : {
11931 : CSA_ASSERT(this, IsJSReceiverInstanceType(left_type));
11932 224 : Label if_right_receiver(this), if_right_not_receiver(this);
11933 224 : Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
11934 448 : &if_right_not_receiver);
11935 :
11936 : BIND(&if_right_receiver);
11937 : {
11938 : // {left} and {right} are different JSReceiver references.
11939 : CombineFeedback(var_type_feedback,
11940 224 : CompareOperationFeedback::kReceiver);
11941 224 : Goto(&if_notequal);
11942 : }
11943 :
11944 : BIND(&if_right_not_receiver);
11945 : {
11946 : // Check if {right} is undetectable, which means it must be Null
11947 : // or Undefined, since we already ruled out Receiver for {right}.
11948 : Label if_right_undetectable(this),
11949 224 : if_right_not_undetectable(this, Label::kDeferred);
11950 224 : Branch(IsUndetectableMap(right_map), &if_right_undetectable,
11951 448 : &if_right_not_undetectable);
11952 :
11953 : BIND(&if_right_undetectable);
11954 : {
11955 : // When we get here, {right} must be either Null or Undefined.
11956 : CSA_ASSERT(this, IsNullOrUndefined(right));
11957 224 : if (var_type_feedback != nullptr) {
11958 : var_type_feedback->Bind(SmiConstant(
11959 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
11960 : }
11961 448 : Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
11962 : }
11963 :
11964 : BIND(&if_right_not_undetectable);
11965 : {
11966 : // {right} is a Primitive, and neither Null or Undefined;
11967 : // convert {left} to Primitive too.
11968 224 : if (var_type_feedback != nullptr) {
11969 : var_type_feedback->Bind(
11970 168 : SmiConstant(CompareOperationFeedback::kAny));
11971 : }
11972 224 : Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
11973 448 : var_left.Bind(CallStub(callable, context, left));
11974 224 : Goto(&loop);
11975 224 : }
11976 224 : }
11977 224 : }
11978 : }
11979 :
11980 : BIND(&do_right_stringtonumber);
11981 : {
11982 448 : var_right.Bind(CallBuiltin(Builtins::kStringToNumber, context, right));
11983 224 : Goto(&loop);
11984 : }
11985 :
11986 : BIND(&use_symmetry);
11987 : {
11988 224 : var_left.Bind(right);
11989 224 : var_right.Bind(left);
11990 224 : Goto(&loop);
11991 224 : }
11992 : }
11993 :
11994 : BIND(&do_float_comparison);
11995 : {
11996 224 : Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
11997 448 : &if_equal, &if_notequal);
11998 : }
11999 :
12000 : BIND(&if_equal);
12001 : {
12002 224 : result.Bind(TrueConstant());
12003 224 : Goto(&end);
12004 : }
12005 :
12006 : BIND(&if_notequal);
12007 : {
12008 224 : result.Bind(FalseConstant());
12009 224 : Goto(&end);
12010 : }
12011 :
12012 : BIND(&end);
12013 448 : return result.value();
12014 : }
12015 :
12016 392 : Node* CodeStubAssembler::StrictEqual(Node* lhs, Node* rhs,
12017 : Variable* var_type_feedback) {
12018 : // Pseudo-code for the algorithm below:
12019 : //
12020 : // if (lhs == rhs) {
12021 : // if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
12022 : // return true;
12023 : // }
12024 : // if (!lhs->IsSmi()) {
12025 : // if (lhs->IsHeapNumber()) {
12026 : // if (rhs->IsSmi()) {
12027 : // return Smi::ToInt(rhs) == HeapNumber::cast(lhs)->value();
12028 : // } else if (rhs->IsHeapNumber()) {
12029 : // return HeapNumber::cast(rhs)->value() ==
12030 : // HeapNumber::cast(lhs)->value();
12031 : // } else {
12032 : // return false;
12033 : // }
12034 : // } else {
12035 : // if (rhs->IsSmi()) {
12036 : // return false;
12037 : // } else {
12038 : // if (lhs->IsString()) {
12039 : // if (rhs->IsString()) {
12040 : // return %StringEqual(lhs, rhs);
12041 : // } else {
12042 : // return false;
12043 : // }
12044 : // } else if (lhs->IsBigInt()) {
12045 : // if (rhs->IsBigInt()) {
12046 : // return %BigIntEqualToBigInt(lhs, rhs);
12047 : // } else {
12048 : // return false;
12049 : // }
12050 : // } else {
12051 : // return false;
12052 : // }
12053 : // }
12054 : // }
12055 : // } else {
12056 : // if (rhs->IsSmi()) {
12057 : // return false;
12058 : // } else {
12059 : // if (rhs->IsHeapNumber()) {
12060 : // return Smi::ToInt(lhs) == HeapNumber::cast(rhs)->value();
12061 : // } else {
12062 : // return false;
12063 : // }
12064 : // }
12065 : // }
12066 :
12067 784 : Label if_equal(this), if_notequal(this), end(this);
12068 784 : VARIABLE(result, MachineRepresentation::kTagged);
12069 :
12070 : // Check if {lhs} and {rhs} refer to the same object.
12071 392 : Label if_same(this), if_notsame(this);
12072 784 : Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
12073 :
12074 : BIND(&if_same);
12075 : {
12076 : // The {lhs} and {rhs} reference the exact same value, yet we need special
12077 : // treatment for HeapNumber, as NaN is not equal to NaN.
12078 392 : if (var_type_feedback != nullptr) {
12079 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
12080 : }
12081 392 : GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
12082 : }
12083 :
12084 : BIND(&if_notsame);
12085 : {
12086 : // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
12087 : // BigInt and String they can still be considered equal.
12088 :
12089 392 : if (var_type_feedback != nullptr) {
12090 168 : var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
12091 : }
12092 :
12093 : // Check if {lhs} is a Smi or a HeapObject.
12094 392 : Label if_lhsissmi(this), if_lhsisnotsmi(this);
12095 784 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
12096 :
12097 : BIND(&if_lhsisnotsmi);
12098 : {
12099 : // Load the map of {lhs}.
12100 784 : Node* lhs_map = LoadMap(lhs);
12101 :
12102 : // Check if {lhs} is a HeapNumber.
12103 392 : Label if_lhsisnumber(this), if_lhsisnotnumber(this);
12104 784 : Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
12105 :
12106 : BIND(&if_lhsisnumber);
12107 : {
12108 : // Check if {rhs} is a Smi or a HeapObject.
12109 392 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12110 784 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12111 :
12112 : BIND(&if_rhsissmi);
12113 : {
12114 : // Convert {lhs} and {rhs} to floating point values.
12115 784 : Node* lhs_value = LoadHeapNumberValue(lhs);
12116 784 : Node* rhs_value = SmiToFloat64(rhs);
12117 :
12118 392 : if (var_type_feedback != nullptr) {
12119 : var_type_feedback->Bind(
12120 168 : SmiConstant(CompareOperationFeedback::kNumber));
12121 : }
12122 :
12123 : // Perform a floating point comparison of {lhs} and {rhs}.
12124 784 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12125 : }
12126 :
12127 : BIND(&if_rhsisnotsmi);
12128 : {
12129 : // Load the map of {rhs}.
12130 784 : Node* rhs_map = LoadMap(rhs);
12131 :
12132 : // Check if {rhs} is also a HeapNumber.
12133 392 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
12134 784 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
12135 :
12136 : BIND(&if_rhsisnumber);
12137 : {
12138 : // Convert {lhs} and {rhs} to floating point values.
12139 784 : Node* lhs_value = LoadHeapNumberValue(lhs);
12140 784 : Node* rhs_value = LoadHeapNumberValue(rhs);
12141 :
12142 392 : if (var_type_feedback != nullptr) {
12143 : var_type_feedback->Bind(
12144 168 : SmiConstant(CompareOperationFeedback::kNumber));
12145 : }
12146 :
12147 : // Perform a floating point comparison of {lhs} and {rhs}.
12148 784 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12149 : }
12150 :
12151 : BIND(&if_rhsisnotnumber);
12152 784 : Goto(&if_notequal);
12153 392 : }
12154 : }
12155 :
12156 : BIND(&if_lhsisnotnumber);
12157 : {
12158 : // Check if {rhs} is a Smi or a HeapObject.
12159 392 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12160 784 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12161 :
12162 : BIND(&if_rhsissmi);
12163 392 : Goto(&if_notequal);
12164 :
12165 : BIND(&if_rhsisnotsmi);
12166 : {
12167 : // Load the instance type of {lhs}.
12168 784 : Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
12169 :
12170 : // Check if {lhs} is a String.
12171 392 : Label if_lhsisstring(this), if_lhsisnotstring(this);
12172 392 : Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
12173 784 : &if_lhsisnotstring);
12174 :
12175 : BIND(&if_lhsisstring);
12176 : {
12177 : // Load the instance type of {rhs}.
12178 784 : Node* rhs_instance_type = LoadInstanceType(rhs);
12179 :
12180 : // Check if {rhs} is also a String.
12181 : Label if_rhsisstring(this, Label::kDeferred),
12182 392 : if_rhsisnotstring(this);
12183 392 : Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
12184 784 : &if_rhsisnotstring);
12185 :
12186 : BIND(&if_rhsisstring);
12187 : {
12188 392 : if (var_type_feedback != nullptr) {
12189 : TNode<Smi> lhs_feedback =
12190 168 : CollectFeedbackForString(lhs_instance_type);
12191 : TNode<Smi> rhs_feedback =
12192 168 : CollectFeedbackForString(rhs_instance_type);
12193 336 : var_type_feedback->Bind(SmiOr(lhs_feedback, rhs_feedback));
12194 : }
12195 : result.Bind(CallBuiltin(Builtins::kStringEqual,
12196 784 : NoContextConstant(), lhs, rhs));
12197 392 : Goto(&end);
12198 : }
12199 :
12200 : BIND(&if_rhsisnotstring);
12201 784 : Goto(&if_notequal);
12202 : }
12203 :
12204 : BIND(&if_lhsisnotstring);
12205 :
12206 : // Check if {lhs} is a BigInt.
12207 392 : Label if_lhsisbigint(this), if_lhsisnotbigint(this);
12208 : Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
12209 392 : &if_lhsisnotbigint);
12210 :
12211 : BIND(&if_lhsisbigint);
12212 : {
12213 : // Load the instance type of {rhs}.
12214 784 : Node* rhs_instance_type = LoadInstanceType(rhs);
12215 :
12216 : // Check if {rhs} is also a BigInt.
12217 : Label if_rhsisbigint(this, Label::kDeferred),
12218 392 : if_rhsisnotbigint(this);
12219 : Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
12220 392 : &if_rhsisnotbigint);
12221 :
12222 : BIND(&if_rhsisbigint);
12223 : {
12224 392 : if (var_type_feedback != nullptr) {
12225 : var_type_feedback->Bind(
12226 168 : SmiConstant(CompareOperationFeedback::kBigInt));
12227 : }
12228 : result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
12229 392 : NoContextConstant(), lhs, rhs));
12230 392 : Goto(&end);
12231 : }
12232 :
12233 : BIND(&if_rhsisnotbigint);
12234 784 : Goto(&if_notequal);
12235 : }
12236 :
12237 : BIND(&if_lhsisnotbigint);
12238 392 : if (var_type_feedback != nullptr) {
12239 : // Load the instance type of {rhs}.
12240 336 : Node* rhs_map = LoadMap(rhs);
12241 336 : Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
12242 :
12243 168 : Label if_lhsissymbol(this), if_lhsisreceiver(this),
12244 168 : if_lhsisoddball(this);
12245 168 : GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
12246 336 : &if_lhsisreceiver);
12247 336 : GotoIf(IsBooleanMap(lhs_map), &if_notequal);
12248 168 : GotoIf(IsOddballInstanceType(lhs_instance_type), &if_lhsisoddball);
12249 : Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
12250 168 : &if_notequal);
12251 :
12252 : BIND(&if_lhsisreceiver);
12253 : {
12254 336 : GotoIf(IsBooleanMap(rhs_map), &if_notequal);
12255 : var_type_feedback->Bind(
12256 168 : SmiConstant(CompareOperationFeedback::kReceiver));
12257 336 : GotoIf(IsJSReceiverInstanceType(rhs_instance_type), &if_notequal);
12258 : var_type_feedback->Bind(SmiConstant(
12259 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12260 168 : GotoIf(IsOddballInstanceType(rhs_instance_type), &if_notequal);
12261 : var_type_feedback->Bind(
12262 168 : SmiConstant(CompareOperationFeedback::kAny));
12263 168 : Goto(&if_notequal);
12264 : }
12265 :
12266 : BIND(&if_lhsisoddball);
12267 : {
12268 : STATIC_ASSERT(LAST_PRIMITIVE_TYPE == ODDBALL_TYPE);
12269 336 : GotoIf(IsBooleanMap(rhs_map), &if_notequal);
12270 : GotoIf(
12271 336 : Int32LessThan(rhs_instance_type, Int32Constant(ODDBALL_TYPE)),
12272 336 : &if_notequal);
12273 : var_type_feedback->Bind(SmiConstant(
12274 168 : CompareOperationFeedback::kReceiverOrNullOrUndefined));
12275 168 : Goto(&if_notequal);
12276 : }
12277 :
12278 : BIND(&if_lhsissymbol);
12279 : {
12280 168 : GotoIfNot(IsSymbolInstanceType(rhs_instance_type), &if_notequal);
12281 : var_type_feedback->Bind(
12282 168 : SmiConstant(CompareOperationFeedback::kSymbol));
12283 168 : Goto(&if_notequal);
12284 168 : }
12285 : } else {
12286 224 : Goto(&if_notequal);
12287 392 : }
12288 392 : }
12289 392 : }
12290 : }
12291 :
12292 : BIND(&if_lhsissmi);
12293 : {
12294 : // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
12295 : // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
12296 : // HeapNumber with an equal floating point value.
12297 :
12298 : // Check if {rhs} is a Smi or a HeapObject.
12299 392 : Label if_rhsissmi(this), if_rhsisnotsmi(this);
12300 784 : Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12301 :
12302 : BIND(&if_rhsissmi);
12303 392 : if (var_type_feedback != nullptr) {
12304 : var_type_feedback->Bind(
12305 168 : SmiConstant(CompareOperationFeedback::kSignedSmall));
12306 : }
12307 392 : Goto(&if_notequal);
12308 :
12309 : BIND(&if_rhsisnotsmi);
12310 : {
12311 : // Load the map of the {rhs}.
12312 784 : Node* rhs_map = LoadMap(rhs);
12313 :
12314 : // The {rhs} could be a HeapNumber with the same value as {lhs}.
12315 392 : Label if_rhsisnumber(this), if_rhsisnotnumber(this);
12316 784 : Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
12317 :
12318 : BIND(&if_rhsisnumber);
12319 : {
12320 : // Convert {lhs} and {rhs} to floating point values.
12321 784 : Node* lhs_value = SmiToFloat64(lhs);
12322 784 : Node* rhs_value = LoadHeapNumberValue(rhs);
12323 :
12324 392 : if (var_type_feedback != nullptr) {
12325 : var_type_feedback->Bind(
12326 168 : SmiConstant(CompareOperationFeedback::kNumber));
12327 : }
12328 :
12329 : // Perform a floating point comparison of {lhs} and {rhs}.
12330 784 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12331 : }
12332 :
12333 : BIND(&if_rhsisnotnumber);
12334 784 : Goto(&if_notequal);
12335 392 : }
12336 392 : }
12337 : }
12338 :
12339 : BIND(&if_equal);
12340 : {
12341 392 : result.Bind(TrueConstant());
12342 392 : Goto(&end);
12343 : }
12344 :
12345 : BIND(&if_notequal);
12346 : {
12347 392 : result.Bind(FalseConstant());
12348 392 : Goto(&end);
12349 : }
12350 :
12351 : BIND(&end);
12352 784 : return result.value();
12353 : }
12354 :
12355 : // ECMA#sec-samevalue
12356 : // This algorithm differs from the Strict Equality Comparison Algorithm in its
12357 : // treatment of signed zeroes and NaNs.
12358 336 : void CodeStubAssembler::BranchIfSameValue(Node* lhs, Node* rhs, Label* if_true,
12359 : Label* if_false) {
12360 336 : VARIABLE(var_lhs_value, MachineRepresentation::kFloat64);
12361 672 : VARIABLE(var_rhs_value, MachineRepresentation::kFloat64);
12362 336 : Label do_fcmp(this);
12363 :
12364 : // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
12365 : // StrictEqual - SameValue considers two NaNs to be equal.
12366 1344 : GotoIf(WordEqual(lhs, rhs), if_true);
12367 :
12368 : // Check if the {lhs} is a Smi.
12369 336 : Label if_lhsissmi(this), if_lhsisheapobject(this);
12370 1008 : Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
12371 :
12372 : BIND(&if_lhsissmi);
12373 : {
12374 : // Since {lhs} is a Smi, the comparison can only yield true
12375 : // iff the {rhs} is a HeapNumber with the same float64 value.
12376 336 : Branch(TaggedIsSmi(rhs), if_false, [&] {
12377 1008 : GotoIfNot(IsHeapNumber(rhs), if_false);
12378 1008 : var_lhs_value.Bind(SmiToFloat64(lhs));
12379 1008 : var_rhs_value.Bind(LoadHeapNumberValue(rhs));
12380 336 : Goto(&do_fcmp);
12381 1344 : });
12382 : }
12383 :
12384 : BIND(&if_lhsisheapobject);
12385 : {
12386 : // Check if the {rhs} is a Smi.
12387 : Branch(TaggedIsSmi(rhs),
12388 336 : [&] {
12389 : // Since {rhs} is a Smi, the comparison can only yield true
12390 : // iff the {lhs} is a HeapNumber with the same float64 value.
12391 1008 : GotoIfNot(IsHeapNumber(lhs), if_false);
12392 1008 : var_lhs_value.Bind(LoadHeapNumberValue(lhs));
12393 1008 : var_rhs_value.Bind(SmiToFloat64(rhs));
12394 336 : Goto(&do_fcmp);
12395 336 : },
12396 336 : [&] {
12397 : // Now this can only yield true if either both {lhs} and {rhs} are
12398 : // HeapNumbers with the same value, or both are Strings with the
12399 : // same character sequence, or both are BigInts with the same
12400 : // value.
12401 1008 : Label if_lhsisheapnumber(this), if_lhsisstring(this),
12402 672 : if_lhsisbigint(this);
12403 1008 : Node* const lhs_map = LoadMap(lhs);
12404 672 : GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
12405 672 : Node* const lhs_instance_type = LoadMapInstanceType(lhs_map);
12406 672 : GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
12407 336 : Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
12408 672 : if_false);
12409 :
12410 336 : BIND(&if_lhsisheapnumber);
12411 : {
12412 1008 : GotoIfNot(IsHeapNumber(rhs), if_false);
12413 1008 : var_lhs_value.Bind(LoadHeapNumberValue(lhs));
12414 1008 : var_rhs_value.Bind(LoadHeapNumberValue(rhs));
12415 336 : Goto(&do_fcmp);
12416 : }
12417 :
12418 336 : BIND(&if_lhsisstring);
12419 : {
12420 : // Now we can only yield true if {rhs} is also a String
12421 : // with the same sequence of characters.
12422 1008 : GotoIfNot(IsString(rhs), if_false);
12423 : Node* const result = CallBuiltin(Builtins::kStringEqual,
12424 1008 : NoContextConstant(), lhs, rhs);
12425 1008 : Branch(IsTrue(result), if_true, if_false);
12426 : }
12427 :
12428 336 : BIND(&if_lhsisbigint);
12429 : {
12430 1008 : GotoIfNot(IsBigInt(rhs), if_false);
12431 : Node* const result = CallRuntime(Runtime::kBigIntEqualToBigInt,
12432 672 : NoContextConstant(), lhs, rhs);
12433 1008 : Branch(IsTrue(result), if_true, if_false);
12434 : }
12435 1680 : });
12436 : }
12437 :
12438 : BIND(&do_fcmp);
12439 : {
12440 336 : Node* const lhs_value = var_lhs_value.value();
12441 336 : Node* const rhs_value = var_rhs_value.value();
12442 :
12443 336 : Label if_equal(this), if_notequal(this);
12444 672 : Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12445 :
12446 : BIND(&if_equal);
12447 : {
12448 : // We still need to handle the case when {lhs} and {rhs} are -0.0 and
12449 : // 0.0 (or vice versa). Compare the high word to
12450 : // distinguish between the two.
12451 672 : Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_value);
12452 672 : Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_value);
12453 :
12454 : // If x is +0 and y is -0, return false.
12455 : // If x is -0 and y is +0, return false.
12456 1008 : Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
12457 : }
12458 :
12459 : BIND(&if_notequal);
12460 : {
12461 : // Return true iff both {rhs} and {lhs} are NaN.
12462 1008 : GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
12463 1008 : Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
12464 336 : }
12465 336 : }
12466 336 : }
12467 :
12468 784 : TNode<Oddball> CodeStubAssembler::HasProperty(SloppyTNode<Context> context,
12469 : SloppyTNode<Object> object,
12470 : SloppyTNode<Object> key,
12471 : HasPropertyLookupMode mode) {
12472 1568 : Label call_runtime(this, Label::kDeferred), return_true(this),
12473 784 : return_false(this), end(this), if_proxy(this, Label::kDeferred);
12474 :
12475 : CodeStubAssembler::LookupInHolder lookup_property_in_holder =
12476 : [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
12477 : Node* holder_instance_type, Node* unique_name,
12478 : Label* next_holder, Label* if_bailout) {
12479 : TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
12480 784 : &return_true, next_holder, if_bailout);
12481 : };
12482 :
12483 : CodeStubAssembler::LookupInHolder lookup_element_in_holder =
12484 : [this, &return_true, &return_false](
12485 : Node* receiver, Node* holder, Node* holder_map,
12486 : Node* holder_instance_type, Node* index, Label* next_holder,
12487 784 : Label* if_bailout) {
12488 : TryLookupElement(holder, holder_map, holder_instance_type, index,
12489 1568 : &return_true, &return_false, next_holder, if_bailout);
12490 784 : };
12491 :
12492 : TryPrototypeChainLookup(object, key, lookup_property_in_holder,
12493 : lookup_element_in_holder, &return_false,
12494 784 : &call_runtime, &if_proxy);
12495 :
12496 : TVARIABLE(Oddball, result);
12497 :
12498 : BIND(&if_proxy);
12499 : {
12500 784 : TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
12501 784 : switch (mode) {
12502 : case kHasProperty:
12503 1456 : GotoIf(IsPrivateSymbol(name), &return_false);
12504 :
12505 1456 : result = CAST(
12506 : CallBuiltin(Builtins::kProxyHasProperty, context, object, name));
12507 728 : Goto(&end);
12508 728 : break;
12509 : case kForInHasProperty:
12510 56 : Goto(&call_runtime);
12511 56 : break;
12512 : }
12513 : }
12514 :
12515 : BIND(&return_true);
12516 : {
12517 : result = TrueConstant();
12518 784 : Goto(&end);
12519 : }
12520 :
12521 : BIND(&return_false);
12522 : {
12523 : result = FalseConstant();
12524 784 : Goto(&end);
12525 : }
12526 :
12527 : BIND(&call_runtime);
12528 : {
12529 : Runtime::FunctionId fallback_runtime_function_id;
12530 784 : switch (mode) {
12531 : case kHasProperty:
12532 : fallback_runtime_function_id = Runtime::kHasProperty;
12533 728 : break;
12534 : case kForInHasProperty:
12535 : fallback_runtime_function_id = Runtime::kForInHasProperty;
12536 56 : break;
12537 : }
12538 :
12539 : result =
12540 : CAST(CallRuntime(fallback_runtime_function_id, context, object, key));
12541 784 : Goto(&end);
12542 : }
12543 :
12544 : BIND(&end);
12545 : CSA_ASSERT(this, IsBoolean(result.value()));
12546 784 : return result.value();
12547 : }
12548 :
12549 392 : Node* CodeStubAssembler::Typeof(Node* value) {
12550 392 : VARIABLE(result_var, MachineRepresentation::kTagged);
12551 :
12552 392 : Label return_number(this, Label::kDeferred), if_oddball(this),
12553 392 : return_function(this), return_undefined(this), return_object(this),
12554 392 : return_string(this), return_bigint(this), return_result(this);
12555 :
12556 784 : GotoIf(TaggedIsSmi(value), &return_number);
12557 :
12558 784 : Node* map = LoadMap(value);
12559 :
12560 784 : GotoIf(IsHeapNumberMap(map), &return_number);
12561 :
12562 784 : Node* instance_type = LoadMapInstanceType(map);
12563 :
12564 784 : GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
12565 :
12566 : Node* callable_or_undetectable_mask = Word32And(
12567 392 : LoadMapBitField(map),
12568 1960 : Int32Constant(Map::IsCallableBit::kMask | Map::IsUndetectableBit::kMask));
12569 :
12570 : GotoIf(Word32Equal(callable_or_undetectable_mask,
12571 784 : Int32Constant(Map::IsCallableBit::kMask)),
12572 784 : &return_function);
12573 :
12574 784 : GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
12575 784 : &return_undefined);
12576 :
12577 784 : GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
12578 :
12579 784 : GotoIf(IsStringInstanceType(instance_type), &return_string);
12580 :
12581 392 : GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
12582 :
12583 : CSA_ASSERT(this, InstanceTypeEqual(instance_type, SYMBOL_TYPE));
12584 784 : result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
12585 392 : Goto(&return_result);
12586 :
12587 : BIND(&return_number);
12588 : {
12589 784 : result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
12590 392 : Goto(&return_result);
12591 : }
12592 :
12593 : BIND(&if_oddball);
12594 : {
12595 : Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
12596 392 : result_var.Bind(type);
12597 392 : Goto(&return_result);
12598 : }
12599 :
12600 : BIND(&return_function);
12601 : {
12602 784 : result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
12603 392 : Goto(&return_result);
12604 : }
12605 :
12606 : BIND(&return_undefined);
12607 : {
12608 784 : result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
12609 392 : Goto(&return_result);
12610 : }
12611 :
12612 : BIND(&return_object);
12613 : {
12614 784 : result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
12615 392 : Goto(&return_result);
12616 : }
12617 :
12618 : BIND(&return_string);
12619 : {
12620 784 : result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
12621 392 : Goto(&return_result);
12622 : }
12623 :
12624 : BIND(&return_bigint);
12625 : {
12626 784 : result_var.Bind(HeapConstant(isolate()->factory()->bigint_string()));
12627 392 : Goto(&return_result);
12628 : }
12629 :
12630 : BIND(&return_result);
12631 784 : return result_var.value();
12632 : }
12633 :
12634 224 : TNode<Object> CodeStubAssembler::GetSuperConstructor(
12635 : SloppyTNode<Context> context, SloppyTNode<JSFunction> active_function) {
12636 448 : Label is_not_constructor(this, Label::kDeferred), out(this);
12637 : TVARIABLE(Object, result);
12638 :
12639 224 : TNode<Map> map = LoadMap(active_function);
12640 448 : TNode<Object> prototype = LoadMapPrototype(map);
12641 224 : TNode<Map> prototype_map = LoadMap(CAST(prototype));
12642 448 : GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
12643 :
12644 : result = prototype;
12645 224 : Goto(&out);
12646 :
12647 : BIND(&is_not_constructor);
12648 : {
12649 : CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
12650 : active_function);
12651 224 : Unreachable();
12652 : }
12653 :
12654 : BIND(&out);
12655 224 : return result.value();
12656 : }
12657 :
12658 504 : TNode<JSReceiver> CodeStubAssembler::SpeciesConstructor(
12659 : SloppyTNode<Context> context, SloppyTNode<Object> object,
12660 : SloppyTNode<JSReceiver> default_constructor) {
12661 504 : Isolate* isolate = this->isolate();
12662 : TVARIABLE(JSReceiver, var_result, default_constructor);
12663 :
12664 : // 2. Let C be ? Get(O, "constructor").
12665 : TNode<Object> constructor =
12666 504 : GetProperty(context, object, isolate->factory()->constructor_string());
12667 :
12668 : // 3. If C is undefined, return defaultConstructor.
12669 504 : Label out(this);
12670 1008 : GotoIf(IsUndefined(constructor), &out);
12671 :
12672 : // 4. If Type(C) is not Object, throw a TypeError exception.
12673 : ThrowIfNotJSReceiver(context, constructor,
12674 504 : MessageTemplate::kConstructorNotReceiver);
12675 :
12676 : // 5. Let S be ? Get(C, @@species).
12677 : TNode<Object> species =
12678 504 : GetProperty(context, constructor, isolate->factory()->species_symbol());
12679 :
12680 : // 6. If S is either undefined or null, return defaultConstructor.
12681 1008 : GotoIf(IsNullOrUndefined(species), &out);
12682 :
12683 : // 7. If IsConstructor(S) is true, return S.
12684 504 : Label throw_error(this);
12685 1008 : GotoIf(TaggedIsSmi(species), &throw_error);
12686 1512 : GotoIfNot(IsConstructorMap(LoadMap(CAST(species))), &throw_error);
12687 : var_result = CAST(species);
12688 504 : Goto(&out);
12689 :
12690 : // 8. Throw a TypeError exception.
12691 : BIND(&throw_error);
12692 504 : ThrowTypeError(context, MessageTemplate::kSpeciesNotConstructor);
12693 :
12694 : BIND(&out);
12695 504 : return var_result.value();
12696 : }
12697 :
12698 224 : Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
12699 : Node* context) {
12700 224 : VARIABLE(var_result, MachineRepresentation::kTagged);
12701 224 : Label if_notcallable(this, Label::kDeferred),
12702 224 : if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
12703 224 : if_nohandler(this, Label::kDeferred), return_true(this),
12704 224 : return_false(this), return_result(this, &var_result);
12705 :
12706 : // Ensure that the {callable} is actually a JSReceiver.
12707 448 : GotoIf(TaggedIsSmi(callable), &if_notreceiver);
12708 448 : GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
12709 :
12710 : // Load the @@hasInstance property from {callable}.
12711 : Node* inst_of_handler =
12712 : GetProperty(context, callable, HasInstanceSymbolConstant());
12713 :
12714 : // Optimize for the likely case where {inst_of_handler} is the builtin
12715 : // Function.prototype[@@hasInstance] method, and emit a direct call in
12716 : // that case without any additional checking.
12717 : Node* native_context = LoadNativeContext(context);
12718 : Node* function_has_instance =
12719 448 : LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
12720 224 : GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
12721 448 : &if_otherhandler);
12722 : {
12723 : // Call to Function.prototype[@@hasInstance] directly.
12724 : Callable builtin(BUILTIN_CODE(isolate(), FunctionPrototypeHasInstance),
12725 224 : CallTrampolineDescriptor{});
12726 224 : Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
12727 224 : var_result.Bind(result);
12728 224 : Goto(&return_result);
12729 : }
12730 :
12731 : BIND(&if_otherhandler);
12732 : {
12733 : // Check if there's actually an {inst_of_handler}.
12734 448 : GotoIf(IsNull(inst_of_handler), &if_nohandler);
12735 448 : GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
12736 :
12737 : // Call the {inst_of_handler} for {callable} and {object}.
12738 : Node* result = CallJS(
12739 : CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
12740 448 : context, inst_of_handler, callable, object);
12741 :
12742 : // Convert the {result} to a Boolean.
12743 224 : BranchIfToBooleanIsTrue(result, &return_true, &return_false);
12744 : }
12745 :
12746 : BIND(&if_nohandler);
12747 : {
12748 : // Ensure that the {callable} is actually Callable.
12749 448 : GotoIfNot(IsCallable(callable), &if_notcallable);
12750 :
12751 : // Use the OrdinaryHasInstance algorithm.
12752 : Node* result =
12753 448 : CallBuiltin(Builtins::kOrdinaryHasInstance, context, callable, object);
12754 224 : var_result.Bind(result);
12755 224 : Goto(&return_result);
12756 : }
12757 :
12758 : BIND(&if_notcallable);
12759 224 : { ThrowTypeError(context, MessageTemplate::kNonCallableInInstanceOfCheck); }
12760 :
12761 : BIND(&if_notreceiver);
12762 224 : { ThrowTypeError(context, MessageTemplate::kNonObjectInInstanceOfCheck); }
12763 :
12764 : BIND(&return_true);
12765 224 : var_result.Bind(TrueConstant());
12766 224 : Goto(&return_result);
12767 :
12768 : BIND(&return_false);
12769 224 : var_result.Bind(FalseConstant());
12770 224 : Goto(&return_result);
12771 :
12772 : BIND(&return_result);
12773 448 : return var_result.value();
12774 : }
12775 :
12776 1568 : TNode<Number> CodeStubAssembler::NumberInc(SloppyTNode<Number> value) {
12777 1568 : TVARIABLE(Number, var_result);
12778 : TVARIABLE(Float64T, var_finc_value);
12779 1568 : Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
12780 3136 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12781 :
12782 : BIND(&if_issmi);
12783 : {
12784 : Label if_overflow(this);
12785 : TNode<Smi> smi_value = CAST(value);
12786 1568 : TNode<Smi> one = SmiConstant(1);
12787 3136 : var_result = TrySmiAdd(smi_value, one, &if_overflow);
12788 1568 : Goto(&end);
12789 :
12790 : BIND(&if_overflow);
12791 : {
12792 3136 : var_finc_value = SmiToFloat64(smi_value);
12793 1568 : Goto(&do_finc);
12794 1568 : }
12795 : }
12796 :
12797 : BIND(&if_isnotsmi);
12798 : {
12799 : TNode<HeapNumber> heap_number_value = CAST(value);
12800 :
12801 : // Load the HeapNumber value.
12802 3136 : var_finc_value = LoadHeapNumberValue(heap_number_value);
12803 1568 : Goto(&do_finc);
12804 : }
12805 :
12806 : BIND(&do_finc);
12807 : {
12808 : TNode<Float64T> finc_value = var_finc_value.value();
12809 1568 : TNode<Float64T> one = Float64Constant(1.0);
12810 1568 : TNode<Float64T> finc_result = Float64Add(finc_value, one);
12811 3136 : var_result = AllocateHeapNumberWithValue(finc_result);
12812 1568 : Goto(&end);
12813 : }
12814 :
12815 : BIND(&end);
12816 1568 : return var_result.value();
12817 : }
12818 :
12819 392 : TNode<Number> CodeStubAssembler::NumberDec(SloppyTNode<Number> value) {
12820 392 : TVARIABLE(Number, var_result);
12821 : TVARIABLE(Float64T, var_fdec_value);
12822 392 : Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
12823 784 : Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12824 :
12825 : BIND(&if_issmi);
12826 : {
12827 : TNode<Smi> smi_value = CAST(value);
12828 392 : TNode<Smi> one = SmiConstant(1);
12829 : Label if_overflow(this);
12830 784 : var_result = TrySmiSub(smi_value, one, &if_overflow);
12831 392 : Goto(&end);
12832 :
12833 : BIND(&if_overflow);
12834 : {
12835 784 : var_fdec_value = SmiToFloat64(smi_value);
12836 392 : Goto(&do_fdec);
12837 392 : }
12838 : }
12839 :
12840 : BIND(&if_isnotsmi);
12841 : {
12842 : TNode<HeapNumber> heap_number_value = CAST(value);
12843 :
12844 : // Load the HeapNumber value.
12845 784 : var_fdec_value = LoadHeapNumberValue(heap_number_value);
12846 392 : Goto(&do_fdec);
12847 : }
12848 :
12849 : BIND(&do_fdec);
12850 : {
12851 : TNode<Float64T> fdec_value = var_fdec_value.value();
12852 392 : TNode<Float64T> minus_one = Float64Constant(-1.0);
12853 392 : TNode<Float64T> fdec_result = Float64Add(fdec_value, minus_one);
12854 784 : var_result = AllocateHeapNumberWithValue(fdec_result);
12855 392 : Goto(&end);
12856 : }
12857 :
12858 : BIND(&end);
12859 392 : return var_result.value();
12860 : }
12861 :
12862 1965 : TNode<Number> CodeStubAssembler::NumberAdd(SloppyTNode<Number> a,
12863 : SloppyTNode<Number> b) {
12864 1965 : TVARIABLE(Number, var_result);
12865 1965 : Label float_add(this, Label::kDeferred), end(this);
12866 3930 : GotoIf(TaggedIsNotSmi(a), &float_add);
12867 3930 : GotoIf(TaggedIsNotSmi(b), &float_add);
12868 :
12869 : // Try fast Smi addition first.
12870 3930 : var_result = TrySmiAdd(CAST(a), CAST(b), &float_add);
12871 1965 : Goto(&end);
12872 :
12873 : BIND(&float_add);
12874 : {
12875 5895 : var_result = ChangeFloat64ToTagged(
12876 5895 : Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
12877 1965 : Goto(&end);
12878 : }
12879 :
12880 : BIND(&end);
12881 1965 : return var_result.value();
12882 : }
12883 :
12884 1685 : TNode<Number> CodeStubAssembler::NumberSub(SloppyTNode<Number> a,
12885 : SloppyTNode<Number> b) {
12886 1685 : TVARIABLE(Number, var_result);
12887 1685 : Label float_sub(this, Label::kDeferred), end(this);
12888 3370 : GotoIf(TaggedIsNotSmi(a), &float_sub);
12889 3370 : GotoIf(TaggedIsNotSmi(b), &float_sub);
12890 :
12891 : // Try fast Smi subtraction first.
12892 3370 : var_result = TrySmiSub(CAST(a), CAST(b), &float_sub);
12893 1685 : Goto(&end);
12894 :
12895 : BIND(&float_sub);
12896 : {
12897 5055 : var_result = ChangeFloat64ToTagged(
12898 5055 : Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
12899 1685 : Goto(&end);
12900 : }
12901 :
12902 : BIND(&end);
12903 1685 : return var_result.value();
12904 : }
12905 :
12906 295 : void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
12907 295 : Label is_number(this);
12908 590 : GotoIf(TaggedIsSmi(input), &is_number);
12909 590 : Branch(IsHeapNumber(input), &is_number, is_not_number);
12910 295 : BIND(&is_number);
12911 295 : }
12912 :
12913 112 : void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
12914 224 : GotoIf(TaggedIsSmi(input), is_number);
12915 224 : GotoIf(IsHeapNumber(input), is_number);
12916 112 : }
12917 :
12918 2352 : TNode<Number> CodeStubAssembler::BitwiseOp(Node* left32, Node* right32,
12919 : Operation bitwise_op) {
12920 2352 : switch (bitwise_op) {
12921 : case Operation::kBitwiseAnd:
12922 784 : return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
12923 : case Operation::kBitwiseOr:
12924 784 : return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
12925 : case Operation::kBitwiseXor:
12926 784 : return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
12927 : case Operation::kShiftLeft:
12928 392 : if (!Word32ShiftIsSafe()) {
12929 0 : right32 = Word32And(right32, Int32Constant(0x1F));
12930 : }
12931 784 : return ChangeInt32ToTagged(Signed(Word32Shl(left32, right32)));
12932 : case Operation::kShiftRight:
12933 392 : if (!Word32ShiftIsSafe()) {
12934 0 : right32 = Word32And(right32, Int32Constant(0x1F));
12935 : }
12936 784 : return ChangeInt32ToTagged(Signed(Word32Sar(left32, right32)));
12937 : case Operation::kShiftRightLogical:
12938 392 : if (!Word32ShiftIsSafe()) {
12939 0 : right32 = Word32And(right32, Int32Constant(0x1F));
12940 : }
12941 784 : return ChangeUint32ToTagged(Unsigned(Word32Shr(left32, right32)));
12942 : default:
12943 : break;
12944 : }
12945 0 : UNREACHABLE();
12946 : }
12947 :
12948 : // ES #sec-createarrayiterator
12949 336 : TNode<JSArrayIterator> CodeStubAssembler::CreateArrayIterator(
12950 : TNode<Context> context, TNode<Object> object, IterationKind kind) {
12951 : TNode<Context> native_context = LoadNativeContext(context);
12952 336 : TNode<Map> iterator_map = CAST(LoadContextElement(
12953 : native_context, Context::INITIAL_ARRAY_ITERATOR_MAP_INDEX));
12954 672 : Node* iterator = Allocate(JSArrayIterator::kSize);
12955 : StoreMapNoWriteBarrier(iterator, iterator_map);
12956 : StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOrHashOffset,
12957 336 : RootIndex::kEmptyFixedArray);
12958 : StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
12959 336 : RootIndex::kEmptyFixedArray);
12960 : StoreObjectFieldNoWriteBarrier(
12961 : iterator, JSArrayIterator::kIteratedObjectOffset, object);
12962 : StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
12963 672 : SmiConstant(0));
12964 : StoreObjectFieldNoWriteBarrier(
12965 : iterator, JSArrayIterator::kKindOffset,
12966 672 : SmiConstant(Smi::FromInt(static_cast<int>(kind))));
12967 336 : return CAST(iterator);
12968 : }
12969 :
12970 336 : Node* CodeStubAssembler::AllocateJSIteratorResult(Node* context, Node* value,
12971 : Node* done) {
12972 : CSA_ASSERT(this, IsBoolean(done));
12973 : Node* native_context = LoadNativeContext(context);
12974 : Node* map =
12975 672 : LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
12976 672 : Node* result = Allocate(JSIteratorResult::kSize);
12977 : StoreMapNoWriteBarrier(result, map);
12978 : StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
12979 336 : RootIndex::kEmptyFixedArray);
12980 : StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
12981 336 : RootIndex::kEmptyFixedArray);
12982 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
12983 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
12984 336 : return result;
12985 : }
12986 :
12987 168 : Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
12988 : Node* key,
12989 : Node* value) {
12990 : Node* native_context = LoadNativeContext(context);
12991 336 : Node* length = SmiConstant(2);
12992 : int const elements_size = FixedArray::SizeFor(2);
12993 : TNode<FixedArray> elements = UncheckedCast<FixedArray>(
12994 168 : Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize));
12995 : StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
12996 168 : RootIndex::kFixedArrayMap);
12997 : StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
12998 168 : StoreFixedArrayElement(elements, 0, key);
12999 168 : StoreFixedArrayElement(elements, 1, value);
13000 : Node* array_map = LoadContextElement(
13001 336 : native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX);
13002 168 : TNode<HeapObject> array = InnerAllocate(elements, elements_size);
13003 : StoreMapNoWriteBarrier(array, array_map);
13004 : StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
13005 168 : RootIndex::kEmptyFixedArray);
13006 : StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
13007 : StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
13008 : Node* iterator_map =
13009 336 : LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
13010 168 : TNode<HeapObject> result = InnerAllocate(array, JSArray::kSize);
13011 : StoreMapNoWriteBarrier(result, iterator_map);
13012 : StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
13013 168 : RootIndex::kEmptyFixedArray);
13014 : StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
13015 168 : RootIndex::kEmptyFixedArray);
13016 : StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
13017 : StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
13018 168 : RootIndex::kFalseValue);
13019 168 : return result;
13020 : }
13021 :
13022 168 : TNode<JSReceiver> CodeStubAssembler::ArraySpeciesCreate(TNode<Context> context,
13023 : TNode<Object> o,
13024 : TNode<Number> len) {
13025 : TNode<JSReceiver> constructor =
13026 336 : CAST(CallRuntime(Runtime::kArraySpeciesConstructor, context, o));
13027 168 : return Construct(context, constructor, len);
13028 : }
13029 :
13030 14173 : Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
13031 : CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
13032 : TNode<Uint32T> buffer_bit_field = LoadJSArrayBufferBitField(CAST(buffer));
13033 14173 : return IsSetWord32<JSArrayBuffer::WasDetachedBit>(buffer_bit_field);
13034 : }
13035 :
13036 1736 : void CodeStubAssembler::ThrowIfArrayBufferIsDetached(
13037 : SloppyTNode<Context> context, TNode<JSArrayBuffer> array_buffer,
13038 : const char* method_name) {
13039 3472 : Label if_detached(this, Label::kDeferred), if_not_detached(this);
13040 3472 : Branch(IsDetachedBuffer(array_buffer), &if_detached, &if_not_detached);
13041 : BIND(&if_detached);
13042 1736 : ThrowTypeError(context, MessageTemplate::kDetachedOperation, method_name);
13043 1736 : BIND(&if_not_detached);
13044 1736 : }
13045 :
13046 1120 : void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
13047 : SloppyTNode<Context> context, TNode<JSArrayBufferView> array_buffer_view,
13048 : const char* method_name) {
13049 1120 : TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array_buffer_view);
13050 1120 : ThrowIfArrayBufferIsDetached(context, buffer, method_name);
13051 1120 : }
13052 :
13053 504 : TNode<Uint32T> CodeStubAssembler::LoadJSArrayBufferBitField(
13054 : TNode<JSArrayBuffer> array_buffer) {
13055 504 : return LoadObjectField<Uint32T>(array_buffer, JSArrayBuffer::kBitFieldOffset);
13056 : }
13057 :
13058 1624 : TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStore(
13059 : TNode<JSArrayBuffer> array_buffer) {
13060 : return LoadObjectField<RawPtrT>(array_buffer,
13061 1624 : JSArrayBuffer::kBackingStoreOffset);
13062 : }
13063 :
13064 2520 : TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer(
13065 : TNode<JSArrayBufferView> array_buffer_view) {
13066 : return LoadObjectField<JSArrayBuffer>(array_buffer_view,
13067 2520 : JSArrayBufferView::kBufferOffset);
13068 : }
13069 :
13070 1232 : TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteLength(
13071 : TNode<JSArrayBufferView> array_buffer_view) {
13072 : return LoadObjectField<UintPtrT>(array_buffer_view,
13073 1232 : JSArrayBufferView::kByteLengthOffset);
13074 : }
13075 :
13076 1792 : TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteOffset(
13077 : TNode<JSArrayBufferView> array_buffer_view) {
13078 : return LoadObjectField<UintPtrT>(array_buffer_view,
13079 1792 : JSArrayBufferView::kByteOffsetOffset);
13080 : }
13081 :
13082 2128 : TNode<Smi> CodeStubAssembler::LoadJSTypedArrayLength(
13083 : TNode<JSTypedArray> typed_array) {
13084 2128 : return LoadObjectField<Smi>(typed_array, JSTypedArray::kLengthOffset);
13085 : }
13086 :
13087 6785 : CodeStubArguments::CodeStubArguments(
13088 : CodeStubAssembler* assembler, Node* argc, Node* fp,
13089 : CodeStubAssembler::ParameterMode param_mode, ReceiverMode receiver_mode)
13090 : : assembler_(assembler),
13091 : argc_mode_(param_mode),
13092 : receiver_mode_(receiver_mode),
13093 : argc_(argc),
13094 : arguments_(),
13095 13570 : fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
13096 : Node* offset = assembler_->ElementOffsetFromIndex(
13097 : argc_, PACKED_ELEMENTS, param_mode,
13098 : (StandardFrameConstants::kFixedSlotCountAboveFp - 1) *
13099 13570 : kSystemPointerSize);
13100 20355 : arguments_ = assembler_->UncheckedCast<RawPtr<Object>>(
13101 : assembler_->IntPtrAdd(fp_, offset));
13102 6785 : }
13103 :
13104 5503 : TNode<Object> CodeStubArguments::GetReceiver() const {
13105 : DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
13106 : return assembler_->UncheckedCast<Object>(
13107 : assembler_->Load(MachineType::AnyTagged(), arguments_,
13108 11006 : assembler_->IntPtrConstant(kSystemPointerSize)));
13109 : }
13110 :
13111 112 : void CodeStubArguments::SetReceiver(TNode<Object> object) const {
13112 : DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
13113 : assembler_->StoreNoWriteBarrier(
13114 : MachineRepresentation::kTagged, arguments_,
13115 224 : assembler_->IntPtrConstant(kSystemPointerSize), object);
13116 112 : }
13117 :
13118 11144 : TNode<RawPtr<Object>> CodeStubArguments::AtIndexPtr(
13119 : Node* index, CodeStubAssembler::ParameterMode mode) const {
13120 : typedef compiler::Node Node;
13121 : Node* negated_index = assembler_->IntPtrOrSmiSub(
13122 11144 : assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
13123 : Node* offset = assembler_->ElementOffsetFromIndex(negated_index,
13124 22288 : PACKED_ELEMENTS, mode, 0);
13125 : return assembler_->UncheckedCast<RawPtr<Object>>(assembler_->IntPtrAdd(
13126 11144 : assembler_->UncheckedCast<IntPtrT>(arguments_), offset));
13127 : }
13128 :
13129 11088 : TNode<Object> CodeStubArguments::AtIndex(
13130 : Node* index, CodeStubAssembler::ParameterMode mode) const {
13131 : DCHECK_EQ(argc_mode_, mode);
13132 : CSA_ASSERT(assembler_,
13133 : assembler_->UintPtrOrSmiLessThan(index, GetLength(mode), mode));
13134 : return assembler_->UncheckedCast<Object>(
13135 22176 : assembler_->Load(MachineType::AnyTagged(), AtIndexPtr(index, mode)));
13136 : }
13137 :
13138 5824 : TNode<Object> CodeStubArguments::AtIndex(int index) const {
13139 11648 : return AtIndex(assembler_->IntPtrConstant(index));
13140 : }
13141 :
13142 4872 : TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
13143 : int index, TNode<Object> default_value) {
13144 4872 : CodeStubAssembler::TVariable<Object> result(assembler_);
13145 9744 : CodeStubAssembler::Label argument_missing(assembler_),
13146 9744 : argument_done(assembler_, &result);
13147 :
13148 : assembler_->GotoIf(assembler_->UintPtrOrSmiGreaterThanOrEqual(
13149 : assembler_->IntPtrOrSmiConstant(index, argc_mode_),
13150 : argc_, argc_mode_),
13151 9744 : &argument_missing);
13152 4872 : result = AtIndex(index);
13153 4872 : assembler_->Goto(&argument_done);
13154 :
13155 4872 : assembler_->BIND(&argument_missing);
13156 : result = default_value;
13157 4872 : assembler_->Goto(&argument_done);
13158 :
13159 4872 : assembler_->BIND(&argument_done);
13160 4872 : return result.value();
13161 : }
13162 :
13163 4480 : TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
13164 : TNode<IntPtrT> index, TNode<Object> default_value) {
13165 4480 : CodeStubAssembler::TVariable<Object> result(assembler_);
13166 8960 : CodeStubAssembler::Label argument_missing(assembler_),
13167 8960 : argument_done(assembler_, &result);
13168 :
13169 : assembler_->GotoIf(
13170 : assembler_->UintPtrOrSmiGreaterThanOrEqual(
13171 : assembler_->IntPtrToParameter(index, argc_mode_), argc_, argc_mode_),
13172 17920 : &argument_missing);
13173 4480 : result = AtIndex(index);
13174 4480 : assembler_->Goto(&argument_done);
13175 :
13176 4480 : assembler_->BIND(&argument_missing);
13177 : result = default_value;
13178 4480 : assembler_->Goto(&argument_done);
13179 :
13180 4480 : assembler_->BIND(&argument_done);
13181 4480 : return result.value();
13182 : }
13183 :
13184 1058 : void CodeStubArguments::ForEach(
13185 : const CodeStubAssembler::VariableList& vars,
13186 : const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
13187 : CodeStubAssembler::ParameterMode mode) {
13188 1058 : assembler_->Comment("CodeStubArguments::ForEach");
13189 1058 : if (first == nullptr) {
13190 453 : first = assembler_->IntPtrOrSmiConstant(0, mode);
13191 : }
13192 1058 : if (last == nullptr) {
13193 : DCHECK_EQ(mode, argc_mode_);
13194 1058 : last = argc_;
13195 : }
13196 : Node* start = assembler_->IntPtrSub(
13197 : assembler_->UncheckedCast<IntPtrT>(arguments_),
13198 1058 : assembler_->ElementOffsetFromIndex(first, SYSTEM_POINTER_ELEMENTS, mode));
13199 : Node* end = assembler_->IntPtrSub(
13200 : assembler_->UncheckedCast<IntPtrT>(arguments_),
13201 1058 : assembler_->ElementOffsetFromIndex(last, SYSTEM_POINTER_ELEMENTS, mode));
13202 : assembler_->BuildFastLoop(
13203 : vars, start, end,
13204 1058 : [this, &body](Node* current) {
13205 1058 : Node* arg = assembler_->Load(MachineType::AnyTagged(), current);
13206 1058 : body(arg);
13207 1058 : },
13208 : -kSystemPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
13209 2116 : CodeStubAssembler::IndexAdvanceMode::kPost);
13210 1058 : }
13211 :
13212 10604 : void CodeStubArguments::PopAndReturn(Node* value) {
13213 : Node* pop_count;
13214 10604 : if (receiver_mode_ == ReceiverMode::kHasReceiver) {
13215 : pop_count = assembler_->IntPtrOrSmiAdd(
13216 10604 : argc_, assembler_->IntPtrOrSmiConstant(1, argc_mode_), argc_mode_);
13217 : } else {
13218 0 : pop_count = argc_;
13219 : }
13220 :
13221 10604 : assembler_->PopAndReturn(assembler_->ParameterToIntPtr(pop_count, argc_mode_),
13222 10604 : value);
13223 10604 : }
13224 :
13225 3864 : Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
13226 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13227 : return Uint32LessThanOrEqual(elements_kind,
13228 11592 : Int32Constant(LAST_FAST_ELEMENTS_KIND));
13229 : }
13230 :
13231 117 : TNode<BoolT> CodeStubAssembler::IsDoubleElementsKind(
13232 : TNode<Int32T> elements_kind) {
13233 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13234 : STATIC_ASSERT((PACKED_DOUBLE_ELEMENTS & 1) == 0);
13235 : STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS);
13236 234 : return Word32Equal(Word32Shr(elements_kind, Int32Constant(1)),
13237 468 : Int32Constant(PACKED_DOUBLE_ELEMENTS / 2));
13238 : }
13239 :
13240 336 : Node* CodeStubAssembler::IsFastSmiOrTaggedElementsKind(Node* elements_kind) {
13241 : STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13242 : STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
13243 : STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
13244 : return Uint32LessThanOrEqual(elements_kind,
13245 1008 : Int32Constant(TERMINAL_FAST_ELEMENTS_KIND));
13246 : }
13247 :
13248 448 : Node* CodeStubAssembler::IsFastSmiElementsKind(Node* elements_kind) {
13249 : return Uint32LessThanOrEqual(elements_kind,
13250 1344 : Int32Constant(HOLEY_SMI_ELEMENTS));
13251 : }
13252 :
13253 0 : Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
13254 : CSA_ASSERT(this, IsFastElementsKind(elements_kind));
13255 :
13256 : STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
13257 : STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
13258 : STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
13259 112 : return IsSetWord32(elements_kind, 1);
13260 : }
13261 :
13262 1344 : Node* CodeStubAssembler::IsElementsKindGreaterThan(
13263 : Node* target_kind, ElementsKind reference_kind) {
13264 4032 : return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
13265 : }
13266 :
13267 336 : TNode<BoolT> CodeStubAssembler::IsElementsKindLessThanOrEqual(
13268 : TNode<Int32T> target_kind, ElementsKind reference_kind) {
13269 672 : return Int32LessThanOrEqual(target_kind, Int32Constant(reference_kind));
13270 : }
13271 :
13272 397 : Node* CodeStubAssembler::IsDebugActive() {
13273 : Node* is_debug_active = Load(
13274 : MachineType::Uint8(),
13275 794 : ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
13276 1191 : return Word32NotEqual(is_debug_active, Int32Constant(0));
13277 : }
13278 :
13279 2576 : TNode<BoolT> CodeStubAssembler::IsRuntimeCallStatsEnabled() {
13280 : TNode<Word32T> flag_value = UncheckedCast<Word32T>(Load(
13281 : MachineType::Int32(),
13282 5152 : ExternalConstant(ExternalReference::address_of_runtime_stats_flag())));
13283 5152 : return Word32NotEqual(flag_value, Int32Constant(0));
13284 : }
13285 :
13286 56 : Node* CodeStubAssembler::IsPromiseHookEnabled() {
13287 : Node* const promise_hook = Load(
13288 : MachineType::Pointer(),
13289 112 : ExternalConstant(ExternalReference::promise_hook_address(isolate())));
13290 168 : return WordNotEqual(promise_hook, IntPtrConstant(0));
13291 : }
13292 :
13293 224 : Node* CodeStubAssembler::HasAsyncEventDelegate() {
13294 : Node* const async_event_delegate =
13295 : Load(MachineType::Pointer(),
13296 : ExternalConstant(
13297 448 : ExternalReference::async_event_delegate_address(isolate())));
13298 672 : return WordNotEqual(async_event_delegate, IntPtrConstant(0));
13299 : }
13300 :
13301 931 : Node* CodeStubAssembler::IsPromiseHookEnabledOrHasAsyncEventDelegate() {
13302 : Node* const promise_hook_or_async_event_delegate =
13303 : Load(MachineType::Uint8(),
13304 : ExternalConstant(
13305 : ExternalReference::promise_hook_or_async_event_delegate_address(
13306 1862 : isolate())));
13307 2793 : return Word32NotEqual(promise_hook_or_async_event_delegate, Int32Constant(0));
13308 : }
13309 :
13310 1176 : Node* CodeStubAssembler::
13311 : IsPromiseHookEnabledOrDebugIsActiveOrHasAsyncEventDelegate() {
13312 : Node* const promise_hook_or_debug_is_active_or_async_event_delegate = Load(
13313 : MachineType::Uint8(),
13314 : ExternalConstant(
13315 : ExternalReference::
13316 : promise_hook_or_debug_is_active_or_async_event_delegate_address(
13317 2352 : isolate())));
13318 : return Word32NotEqual(promise_hook_or_debug_is_active_or_async_event_delegate,
13319 3528 : Int32Constant(0));
13320 : }
13321 :
13322 2367 : TNode<Code> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
13323 : CSA_ASSERT(this, SmiGreaterThanOrEqual(builtin_id, SmiConstant(0)));
13324 : CSA_ASSERT(this,
13325 : SmiLessThan(builtin_id, SmiConstant(Builtins::builtin_count)));
13326 :
13327 : int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
13328 : int index_shift = kSystemPointerSizeLog2 - kSmiShiftBits;
13329 : TNode<WordT> table_index =
13330 : index_shift >= 0 ? WordShl(BitcastTaggedToWord(builtin_id), index_shift)
13331 4734 : : WordSar(BitcastTaggedToWord(builtin_id), -index_shift);
13332 :
13333 4734 : return CAST(
13334 : Load(MachineType::TaggedPointer(),
13335 : ExternalConstant(ExternalReference::builtins_address(isolate())),
13336 : table_index));
13337 : }
13338 :
13339 1023 : TNode<Code> CodeStubAssembler::GetSharedFunctionInfoCode(
13340 : SloppyTNode<SharedFunctionInfo> shared_info, Label* if_compile_lazy) {
13341 : TNode<Object> sfi_data =
13342 : LoadObjectField(shared_info, SharedFunctionInfo::kFunctionDataOffset);
13343 :
13344 1023 : TVARIABLE(Code, sfi_code);
13345 :
13346 1023 : Label done(this);
13347 1023 : Label check_instance_type(this);
13348 :
13349 : // IsSmi: Is builtin
13350 2046 : GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
13351 1023 : if (if_compile_lazy) {
13352 : GotoIf(SmiEqual(CAST(sfi_data), SmiConstant(Builtins::kCompileLazy)),
13353 112 : if_compile_lazy);
13354 : }
13355 1023 : sfi_code = LoadBuiltin(CAST(sfi_data));
13356 1023 : Goto(&done);
13357 :
13358 : // Switch on data's instance type.
13359 : BIND(&check_instance_type);
13360 1023 : TNode<Int32T> data_type = LoadInstanceType(CAST(sfi_data));
13361 :
13362 : int32_t case_values[] = {BYTECODE_ARRAY_TYPE,
13363 : WASM_EXPORTED_FUNCTION_DATA_TYPE,
13364 : ASM_WASM_DATA_TYPE,
13365 : UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE,
13366 : UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE,
13367 1023 : FUNCTION_TEMPLATE_INFO_TYPE};
13368 1023 : Label check_is_bytecode_array(this);
13369 1023 : Label check_is_exported_function_data(this);
13370 1023 : Label check_is_asm_wasm_data(this);
13371 1023 : Label check_is_uncompiled_data_without_preparse_data(this);
13372 1023 : Label check_is_uncompiled_data_with_preparse_data(this);
13373 1023 : Label check_is_function_template_info(this);
13374 1023 : Label check_is_interpreter_data(this);
13375 : Label* case_labels[] = {&check_is_bytecode_array,
13376 : &check_is_exported_function_data,
13377 : &check_is_asm_wasm_data,
13378 : &check_is_uncompiled_data_without_preparse_data,
13379 : &check_is_uncompiled_data_with_preparse_data,
13380 1023 : &check_is_function_template_info};
13381 : STATIC_ASSERT(arraysize(case_values) == arraysize(case_labels));
13382 : Switch(data_type, &check_is_interpreter_data, case_values, case_labels,
13383 1023 : arraysize(case_labels));
13384 :
13385 : // IsBytecodeArray: Interpret bytecode
13386 : BIND(&check_is_bytecode_array);
13387 1023 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
13388 1023 : Goto(&done);
13389 :
13390 : // IsWasmExportedFunctionData: Use the wrapper code
13391 : BIND(&check_is_exported_function_data);
13392 : sfi_code = CAST(LoadObjectField(
13393 : CAST(sfi_data), WasmExportedFunctionData::kWrapperCodeOffset));
13394 1023 : Goto(&done);
13395 :
13396 : // IsAsmWasmData: Instantiate using AsmWasmData
13397 : BIND(&check_is_asm_wasm_data);
13398 1023 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InstantiateAsmJs));
13399 1023 : Goto(&done);
13400 :
13401 : // IsUncompiledDataWithPreparseData | IsUncompiledDataWithoutPreparseData:
13402 : // Compile lazy
13403 : BIND(&check_is_uncompiled_data_with_preparse_data);
13404 1023 : Goto(&check_is_uncompiled_data_without_preparse_data);
13405 : BIND(&check_is_uncompiled_data_without_preparse_data);
13406 1023 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
13407 1023 : Goto(if_compile_lazy ? if_compile_lazy : &done);
13408 :
13409 : // IsFunctionTemplateInfo: API call
13410 : BIND(&check_is_function_template_info);
13411 1023 : sfi_code = HeapConstant(BUILTIN_CODE(isolate(), HandleApiCall));
13412 1023 : Goto(&done);
13413 :
13414 : // IsInterpreterData: Interpret bytecode
13415 : BIND(&check_is_interpreter_data);
13416 : // This is the default branch, so assert that we have the expected data type.
13417 : CSA_ASSERT(this,
13418 : Word32Equal(data_type, Int32Constant(INTERPRETER_DATA_TYPE)));
13419 : sfi_code = CAST(LoadObjectField(
13420 : CAST(sfi_data), InterpreterData::kInterpreterTrampolineOffset));
13421 1023 : Goto(&done);
13422 :
13423 : BIND(&done);
13424 1023 : return sfi_code.value();
13425 : }
13426 :
13427 911 : Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
13428 : Node* shared_info,
13429 : Node* context) {
13430 : CSA_SLOW_ASSERT(this, IsMap(map));
13431 :
13432 1822 : Node* const code = GetSharedFunctionInfoCode(shared_info);
13433 :
13434 : // TODO(ishell): All the callers of this function pass map loaded from
13435 : // Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove
13436 : // map parameter.
13437 : CSA_ASSERT(this, Word32BinaryNot(IsConstructorMap(map)));
13438 : CSA_ASSERT(this, Word32BinaryNot(IsFunctionWithPrototypeSlotMap(map)));
13439 1822 : Node* const fun = Allocate(JSFunction::kSizeWithoutPrototype);
13440 : STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
13441 : StoreMapNoWriteBarrier(fun, map);
13442 : StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
13443 911 : RootIndex::kEmptyFixedArray);
13444 : StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
13445 911 : RootIndex::kEmptyFixedArray);
13446 : StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
13447 911 : RootIndex::kManyClosuresCell);
13448 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
13449 : shared_info);
13450 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
13451 : StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeOffset, code);
13452 911 : return fun;
13453 : }
13454 :
13455 0 : Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
13456 : StackFrame::Type frame_type) {
13457 : return WordEqual(marker_or_function,
13458 0 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
13459 : }
13460 :
13461 0 : Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
13462 : StackFrame::Type frame_type) {
13463 : return WordNotEqual(marker_or_function,
13464 0 : IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
13465 : }
13466 :
13467 448 : void CodeStubAssembler::CheckPrototypeEnumCache(Node* receiver,
13468 : Node* receiver_map,
13469 : Label* if_fast,
13470 : Label* if_slow) {
13471 448 : VARIABLE(var_object, MachineRepresentation::kTagged, receiver);
13472 896 : VARIABLE(var_object_map, MachineRepresentation::kTagged, receiver_map);
13473 :
13474 1344 : Label loop(this, {&var_object, &var_object_map}), done_loop(this);
13475 448 : Goto(&loop);
13476 : BIND(&loop);
13477 : {
13478 : // Check that there are no elements on the current {object}.
13479 : Label if_no_elements(this);
13480 448 : Node* object = var_object.value();
13481 448 : Node* object_map = var_object_map.value();
13482 :
13483 : // The following relies on the elements only aliasing with JSProxy::target,
13484 : // which is a Javascript value and hence cannot be confused with an elements
13485 : // backing store.
13486 : STATIC_ASSERT(static_cast<int>(JSObject::kElementsOffset) ==
13487 : static_cast<int>(JSProxy::kTargetOffset));
13488 : Node* object_elements = LoadObjectField(object, JSObject::kElementsOffset);
13489 896 : GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
13490 896 : GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
13491 :
13492 : // It might still be an empty JSArray.
13493 896 : GotoIfNot(IsJSArrayMap(object_map), if_slow);
13494 896 : Node* object_length = LoadJSArrayLength(object);
13495 896 : Branch(WordEqual(object_length, SmiConstant(0)), &if_no_elements, if_slow);
13496 :
13497 : // Continue with the {object}s prototype.
13498 : BIND(&if_no_elements);
13499 896 : object = LoadMapPrototype(object_map);
13500 896 : GotoIf(IsNull(object), if_fast);
13501 :
13502 : // For all {object}s but the {receiver}, check that the cache is empty.
13503 448 : var_object.Bind(object);
13504 896 : object_map = LoadMap(object);
13505 448 : var_object_map.Bind(object_map);
13506 448 : Node* object_enum_length = LoadMapEnumLength(object_map);
13507 1344 : Branch(WordEqual(object_enum_length, IntPtrConstant(0)), &loop, if_slow);
13508 448 : }
13509 448 : }
13510 :
13511 224 : Node* CodeStubAssembler::CheckEnumCache(Node* receiver, Label* if_empty,
13512 : Label* if_runtime) {
13513 448 : Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
13514 448 : Node* receiver_map = LoadMap(receiver);
13515 :
13516 : // Check if the enum length field of the {receiver} is properly initialized,
13517 : // indicating that there is an enum cache.
13518 224 : Node* receiver_enum_length = LoadMapEnumLength(receiver_map);
13519 : Branch(WordEqual(receiver_enum_length,
13520 448 : IntPtrConstant(kInvalidEnumCacheSentinel)),
13521 448 : &if_no_cache, &if_cache);
13522 :
13523 : BIND(&if_no_cache);
13524 : {
13525 : // Avoid runtime-call for empty dictionary receivers.
13526 448 : GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
13527 224 : TNode<NameDictionary> properties = CAST(LoadSlowProperties(receiver));
13528 224 : TNode<Smi> length = GetNumberOfElements(properties);
13529 448 : GotoIfNot(WordEqual(length, SmiConstant(0)), if_runtime);
13530 : // Check that there are no elements on the {receiver} and its prototype
13531 : // chain. Given that we do not create an EnumCache for dict-mode objects,
13532 : // directly jump to {if_empty} if there are no elements and no properties
13533 : // on the {receiver}.
13534 224 : CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
13535 : }
13536 :
13537 : // Check that there are no elements on the fast {receiver} and its
13538 : // prototype chain.
13539 : BIND(&if_cache);
13540 224 : CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
13541 :
13542 : BIND(&if_fast);
13543 224 : return receiver_map;
13544 : }
13545 :
13546 3584 : TNode<IntPtrT> CodeStubAssembler::GetArgumentsLength(CodeStubArguments* args) {
13547 3584 : return args->GetLength();
13548 : }
13549 :
13550 4480 : TNode<Object> CodeStubAssembler::GetArgumentValue(CodeStubArguments* args,
13551 : TNode<IntPtrT> index) {
13552 4480 : return args->GetOptionalArgumentValue(index);
13553 : }
13554 :
13555 0 : void CodeStubAssembler::Print(const char* s) {
13556 0 : std::string formatted(s);
13557 : formatted += "\n";
13558 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13559 0 : StringConstant(formatted.c_str()));
13560 0 : }
13561 :
13562 0 : void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
13563 0 : if (prefix != nullptr) {
13564 0 : std::string formatted(prefix);
13565 : formatted += ": ";
13566 : Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
13567 0 : formatted.c_str(), TENURED);
13568 : CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13569 : HeapConstant(string));
13570 : }
13571 : CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
13572 0 : }
13573 :
13574 19152 : void CodeStubAssembler::PerformStackCheck(TNode<Context> context) {
13575 38304 : Label ok(this), stack_check_interrupt(this, Label::kDeferred);
13576 :
13577 : // The instruction sequence below is carefully crafted to hit our pattern
13578 : // matcher for stack checks within instruction selection.
13579 : // See StackCheckMatcher::Matched and JSGenericLowering::LowerJSStackCheck.
13580 :
13581 19152 : TNode<UintPtrT> sp = UncheckedCast<UintPtrT>(LoadStackPointer());
13582 : TNode<UintPtrT> stack_limit = UncheckedCast<UintPtrT>(Load(
13583 : MachineType::Pointer(),
13584 38304 : ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))));
13585 19152 : TNode<BoolT> sp_within_limit = UintPtrLessThan(stack_limit, sp);
13586 :
13587 19152 : Branch(sp_within_limit, &ok, &stack_check_interrupt);
13588 :
13589 : BIND(&stack_check_interrupt);
13590 : CallRuntime(Runtime::kStackGuard, context);
13591 19152 : Goto(&ok);
13592 :
13593 19152 : BIND(&ok);
13594 19152 : }
13595 :
13596 692 : void CodeStubAssembler::InitializeFunctionContext(Node* native_context,
13597 : Node* context, int slots) {
13598 : DCHECK_GE(slots, Context::MIN_CONTEXT_SLOTS);
13599 692 : StoreMapNoWriteBarrier(context, RootIndex::kFunctionContextMap);
13600 : StoreObjectFieldNoWriteBarrier(context, FixedArray::kLengthOffset,
13601 1384 : SmiConstant(slots));
13602 :
13603 : Node* const empty_scope_info =
13604 1384 : LoadContextElement(native_context, Context::SCOPE_INFO_INDEX);
13605 : StoreContextElementNoWriteBarrier(context, Context::SCOPE_INFO_INDEX,
13606 692 : empty_scope_info);
13607 : StoreContextElementNoWriteBarrier(context, Context::PREVIOUS_INDEX,
13608 692 : UndefinedConstant());
13609 : StoreContextElementNoWriteBarrier(context, Context::EXTENSION_INDEX,
13610 692 : TheHoleConstant());
13611 : StoreContextElementNoWriteBarrier(context, Context::NATIVE_CONTEXT_INDEX,
13612 692 : native_context);
13613 692 : }
13614 :
13615 112 : TNode<JSArray> CodeStubAssembler::ArrayCreate(TNode<Context> context,
13616 : TNode<Number> length) {
13617 112 : TVARIABLE(JSArray, array);
13618 112 : Label allocate_js_array(this);
13619 :
13620 112 : Label done(this), next(this), runtime(this, Label::kDeferred);
13621 112 : TNode<Smi> limit = SmiConstant(JSArray::kInitialMaxFastElementArray);
13622 : CSA_ASSERT_BRANCH(this, [=](Label* ok, Label* not_ok) {
13623 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13624 : SmiConstant(0), ok, not_ok);
13625 : });
13626 : // This check also transitively covers the case where length is too big
13627 : // to be representable by a SMI and so is not usable with
13628 : // AllocateJSArray.
13629 : BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13630 112 : limit, &runtime, &next);
13631 :
13632 : BIND(&runtime);
13633 : {
13634 : TNode<Context> native_context = LoadNativeContext(context);
13635 : TNode<JSFunction> array_function =
13636 112 : CAST(LoadContextElement(native_context, Context::ARRAY_FUNCTION_INDEX));
13637 : array = CAST(CallRuntime(Runtime::kNewArray, context, array_function,
13638 : length, array_function, UndefinedConstant()));
13639 112 : Goto(&done);
13640 : }
13641 :
13642 : BIND(&next);
13643 : CSA_ASSERT(this, TaggedIsSmi(length));
13644 :
13645 112 : TNode<Map> array_map = CAST(LoadContextElement(
13646 : context, Context::JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX));
13647 :
13648 : // TODO(delphick): Consider using
13649 : // AllocateUninitializedJSArrayWithElements to avoid initializing an
13650 : // array and then writing over it.
13651 224 : array =
13652 : AllocateJSArray(PACKED_SMI_ELEMENTS, array_map, length, SmiConstant(0),
13653 : nullptr, ParameterMode::SMI_PARAMETERS);
13654 112 : Goto(&done);
13655 :
13656 : BIND(&done);
13657 112 : return array.value();
13658 : }
13659 :
13660 112 : void CodeStubAssembler::SetPropertyLength(TNode<Context> context,
13661 : TNode<Object> array,
13662 : TNode<Number> length) {
13663 224 : Label fast(this), runtime(this), done(this);
13664 : // There's no need to set the length, if
13665 : // 1) the array is a fast JS array and
13666 : // 2) the new length is equal to the old length.
13667 : // as the set is not observable. Otherwise fall back to the run-time.
13668 :
13669 : // 1) Check that the array has fast elements.
13670 : // TODO(delphick): Consider changing this since it does an an unnecessary
13671 : // check for SMIs.
13672 : // TODO(delphick): Also we could hoist this to after the array construction
13673 : // and copy the args into array in the same way as the Array constructor.
13674 112 : BranchIfFastJSArray(array, context, &fast, &runtime);
13675 :
13676 : BIND(&fast);
13677 : {
13678 : TNode<JSArray> fast_array = CAST(array);
13679 :
13680 112 : TNode<Smi> length_smi = CAST(length);
13681 112 : TNode<Smi> old_length = LoadFastJSArrayLength(fast_array);
13682 : CSA_ASSERT(this, TaggedIsPositiveSmi(old_length));
13683 :
13684 : // 2) If the created array's length matches the required length, then
13685 : // there's nothing else to do. Otherwise use the runtime to set the
13686 : // property as that will insert holes into excess elements or shrink
13687 : // the backing store as appropriate.
13688 224 : Branch(SmiNotEqual(length_smi, old_length), &runtime, &done);
13689 : }
13690 :
13691 : BIND(&runtime);
13692 : {
13693 : SetPropertyStrict(context, array, CodeStubAssembler::LengthStringConstant(),
13694 : length);
13695 112 : Goto(&done);
13696 : }
13697 :
13698 112 : BIND(&done);
13699 112 : }
13700 :
13701 224 : void CodeStubAssembler::GotoIfInitialPrototypePropertyModified(
13702 : TNode<Map> object_map, TNode<Map> initial_prototype_map, int descriptor,
13703 : RootIndex field_name_root_index, Label* if_modified) {
13704 : DescriptorIndexAndName index_name{descriptor, field_name_root_index};
13705 : GotoIfInitialPrototypePropertiesModified(
13706 : object_map, initial_prototype_map,
13707 224 : Vector<DescriptorIndexAndName>(&index_name, 1), if_modified);
13708 224 : }
13709 :
13710 1176 : void CodeStubAssembler::GotoIfInitialPrototypePropertiesModified(
13711 : TNode<Map> object_map, TNode<Map> initial_prototype_map,
13712 : Vector<DescriptorIndexAndName> properties, Label* if_modified) {
13713 2352 : TNode<Map> prototype_map = LoadMap(LoadMapPrototype(object_map));
13714 2352 : GotoIfNot(WordEqual(prototype_map, initial_prototype_map), if_modified);
13715 :
13716 : if (FLAG_track_constant_fields) {
13717 : // With constant field tracking, we need to make sure that important
13718 : // properties in the prototype has not been tampered with. We do this by
13719 : // checking that their slots in the prototype's descriptor array are still
13720 : // marked as const.
13721 : TNode<DescriptorArray> descriptors = LoadMapDescriptors(prototype_map);
13722 :
13723 : TNode<Uint32T> combined_details;
13724 : for (int i = 0; i < properties.length(); i++) {
13725 : // Assert the descriptor index is in-bounds.
13726 : int descriptor = properties[i].descriptor_index;
13727 : CSA_ASSERT(this, Int32LessThan(Int32Constant(descriptor),
13728 : LoadNumberOfDescriptors(descriptors)));
13729 : // Assert that the name is correct. This essentially checks that
13730 : // the descriptor index corresponds to the insertion order in
13731 : // the bootstrapper.
13732 : CSA_ASSERT(this,
13733 : WordEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),
13734 : LoadRoot(properties[i].name_root_index)));
13735 :
13736 : TNode<Uint32T> details =
13737 : DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
13738 : if (i == 0) {
13739 : combined_details = details;
13740 : } else {
13741 : combined_details = Unsigned(Word32And(combined_details, details));
13742 : }
13743 : }
13744 :
13745 : TNode<Uint32T> constness =
13746 : DecodeWord32<PropertyDetails::ConstnessField>(combined_details);
13747 :
13748 : GotoIfNot(
13749 : Word32Equal(constness,
13750 : Int32Constant(static_cast<int>(PropertyConstness::kConst))),
13751 : if_modified);
13752 : }
13753 1176 : }
13754 :
13755 : } // namespace internal
13756 94089 : } // namespace v8
|