Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include <algorithm>
6 :
7 : #include "src/base/adapters.h"
8 : #include "src/compiler/instruction-selector-impl.h"
9 : #include "src/compiler/node-matchers.h"
10 : #include "src/compiler/node-properties.h"
11 :
12 : namespace v8 {
13 : namespace internal {
14 : namespace compiler {
15 :
16 : // Adds X64-specific methods for generating operands.
17 : class X64OperandGenerator final : public OperandGenerator {
18 : public:
19 : explicit X64OperandGenerator(InstructionSelector* selector)
20 : : OperandGenerator(selector) {}
21 :
22 17844801 : bool CanBeImmediate(Node* node) {
23 17844801 : switch (node->opcode()) {
24 : case IrOpcode::kInt32Constant:
25 : case IrOpcode::kRelocatableInt32Constant:
26 : return true;
27 : case IrOpcode::kInt64Constant: {
28 7805454 : const int64_t value = OpParameter<int64_t>(node);
29 7805454 : return std::numeric_limits<int32_t>::min() < value &&
30 : value <= std::numeric_limits<int32_t>::max();
31 : }
32 : case IrOpcode::kNumberConstant: {
33 1069157 : const double value = OpParameter<double>(node);
34 1069157 : return bit_cast<int64_t>(value) == 0;
35 : }
36 : default:
37 : return false;
38 : }
39 : }
40 :
41 84997 : int32_t GetImmediateIntegerValue(Node* node) {
42 : DCHECK(CanBeImmediate(node));
43 84997 : if (node->opcode() == IrOpcode::kInt32Constant) {
44 0 : return OpParameter<int32_t>(node);
45 : }
46 : DCHECK_EQ(IrOpcode::kInt64Constant, node->opcode());
47 84997 : return static_cast<int32_t>(OpParameter<int64_t>(node));
48 : }
49 :
50 8554990 : bool CanBeMemoryOperand(InstructionCode opcode, Node* node, Node* input,
51 : int effect_level) {
52 8968862 : if (input->opcode() != IrOpcode::kLoad ||
53 1382010 : !selector()->CanCover(node, input)) {
54 : return false;
55 : }
56 968147 : if (effect_level != selector()->GetEffectLevel(input)) {
57 : return false;
58 : }
59 : MachineRepresentation rep =
60 968141 : LoadRepresentationOf(input->op()).representation();
61 968140 : switch (opcode) {
62 : case kX64And:
63 : case kX64Or:
64 : case kX64Xor:
65 : case kX64Add:
66 : case kX64Sub:
67 : case kX64Push:
68 : case kX64Cmp:
69 : case kX64Test:
70 1188107 : return rep == MachineRepresentation::kWord64 || IsAnyTagged(rep);
71 : case kX64And32:
72 : case kX64Or32:
73 : case kX64Xor32:
74 : case kX64Add32:
75 : case kX64Sub32:
76 : case kX64Cmp32:
77 : case kX64Test32:
78 32159 : return rep == MachineRepresentation::kWord32;
79 : case kX64Cmp16:
80 : case kX64Test16:
81 1616 : return rep == MachineRepresentation::kWord16;
82 : case kX64Cmp8:
83 : case kX64Test8:
84 186308 : return rep == MachineRepresentation::kWord8;
85 : default:
86 : break;
87 : }
88 : return false;
89 : }
90 :
91 6839555 : AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent,
92 6325992 : Node* base, Node* displacement,
93 : DisplacementMode displacement_mode,
94 : InstructionOperand inputs[],
95 : size_t* input_count) {
96 : AddressingMode mode = kMode_MRI;
97 6839555 : if (base != nullptr && (index != nullptr || displacement != nullptr)) {
98 6327482 : if (base->opcode() == IrOpcode::kInt32Constant &&
99 1490 : OpParameter<int32_t>(base) == 0) {
100 : base = nullptr;
101 6562900 : } else if (base->opcode() == IrOpcode::kInt64Constant &&
102 237255 : OpParameter<int64_t>(base) == 0) {
103 : base = nullptr;
104 : }
105 : }
106 6839555 : if (base != nullptr) {
107 6694322 : inputs[(*input_count)++] = UseRegister(base);
108 6694342 : if (index != nullptr) {
109 : DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
110 616921 : inputs[(*input_count)++] = UseRegister(index);
111 616903 : if (displacement != nullptr) {
112 : inputs[(*input_count)++] = displacement_mode
113 : ? UseNegatedImmediate(displacement)
114 305278 : : UseImmediate(displacement);
115 : static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I,
116 : kMode_MR4I, kMode_MR8I};
117 305278 : mode = kMRnI_modes[scale_exponent];
118 : } else {
119 : static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2,
120 : kMode_MR4, kMode_MR8};
121 311625 : mode = kMRn_modes[scale_exponent];
122 : }
123 : } else {
124 6077421 : if (displacement == nullptr) {
125 : mode = kMode_MR;
126 : } else {
127 : inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
128 : ? UseNegatedImmediate(displacement)
129 5640641 : : UseImmediate(displacement);
130 : mode = kMode_MRI;
131 : }
132 : }
133 : } else {
134 : DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
135 145233 : if (displacement != nullptr) {
136 129837 : if (index == nullptr) {
137 641 : inputs[(*input_count)++] = UseRegister(displacement);
138 : mode = kMode_MR;
139 : } else {
140 129196 : inputs[(*input_count)++] = UseRegister(index);
141 : inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
142 : ? UseNegatedImmediate(displacement)
143 129196 : : UseImmediate(displacement);
144 : static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I,
145 : kMode_M4I, kMode_M8I};
146 129197 : mode = kMnI_modes[scale_exponent];
147 : }
148 : } else {
149 15396 : inputs[(*input_count)++] = UseRegister(index);
150 : static const AddressingMode kMn_modes[] = {kMode_MR, kMode_MR1,
151 : kMode_M4, kMode_M8};
152 15396 : mode = kMn_modes[scale_exponent];
153 15396 : if (mode == kMode_MR1) {
154 : // [%r1 + %r1*1] has a smaller encoding than [%r1*2+0]
155 4312 : inputs[(*input_count)++] = UseRegister(index);
156 : }
157 : }
158 : }
159 6839569 : return mode;
160 : }
161 :
162 6035149 : AddressingMode GetEffectiveAddressMemoryOperand(Node* operand,
163 : InstructionOperand inputs[],
164 : size_t* input_count) {
165 6035149 : if (selector()->CanAddressRelativeToRootsRegister()) {
166 3547155 : LoadMatcher<ExternalReferenceMatcher> m(operand);
167 3547156 : if (m.index().HasValue() && m.object().HasValue()) {
168 : Address const kRootsRegisterValue =
169 : kRootRegisterBias +
170 : reinterpret_cast<Address>(
171 405694 : selector()->isolate()->heap()->roots_array_start());
172 : ptrdiff_t const delta =
173 405694 : m.index().Value() +
174 405694 : (m.object().Value().address() - kRootsRegisterValue);
175 405694 : if (is_int32(delta)) {
176 405694 : inputs[(*input_count)++] = TempImmediate(static_cast<int32_t>(delta));
177 405695 : return kMode_Root;
178 : }
179 : }
180 : }
181 : BaseWithIndexAndDisplacement64Matcher m(operand, AddressOption::kAllowAll);
182 : DCHECK(m.matches());
183 5629465 : if (m.displacement() == nullptr || CanBeImmediate(m.displacement())) {
184 : return GenerateMemoryOperandInputs(
185 : m.index(), m.scale(), m.base(), m.displacement(),
186 5625668 : m.displacement_mode(), inputs, input_count);
187 6639 : } else if (m.base() == nullptr &&
188 2840 : m.displacement_mode() == kPositiveDisplacement) {
189 : // The displacement cannot be an immediate, but we can use the
190 : // displacement as base instead and still benefit from addressing
191 : // modes for the scale.
192 : return GenerateMemoryOperandInputs(m.index(), m.scale(), m.displacement(),
193 : nullptr, m.displacement_mode(), inputs,
194 2840 : input_count);
195 : } else {
196 1918 : inputs[(*input_count)++] = UseRegister(operand->InputAt(0));
197 1918 : inputs[(*input_count)++] = UseRegister(operand->InputAt(1));
198 959 : return kMode_MR1;
199 : }
200 : }
201 :
202 : bool CanBeBetterLeftOperand(Node* node) const {
203 570753 : return !selector()->IsLive(node);
204 : }
205 : };
206 :
207 : namespace {
208 3370342 : ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) {
209 : ArchOpcode opcode = kArchNop;
210 3370342 : switch (load_rep.representation()) {
211 : case MachineRepresentation::kFloat32:
212 : opcode = kX64Movss;
213 11254 : break;
214 : case MachineRepresentation::kFloat64:
215 : opcode = kX64Movsd;
216 288676 : break;
217 : case MachineRepresentation::kBit: // Fall through.
218 : case MachineRepresentation::kWord8:
219 149366 : opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl;
220 149366 : break;
221 : case MachineRepresentation::kWord16:
222 19575 : opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl;
223 19575 : break;
224 : case MachineRepresentation::kWord32:
225 : opcode = kX64Movl;
226 261959 : break;
227 : case MachineRepresentation::kTaggedSigned: // Fall through.
228 : case MachineRepresentation::kTaggedPointer: // Fall through.
229 : case MachineRepresentation::kTagged: // Fall through.
230 : case MachineRepresentation::kWord64:
231 : opcode = kX64Movq;
232 2639413 : break;
233 : case MachineRepresentation::kSimd128: // Fall through.
234 : opcode = kX64Movdqu;
235 78 : break;
236 : case MachineRepresentation::kNone:
237 0 : UNREACHABLE();
238 : break;
239 : }
240 3370342 : return opcode;
241 : }
242 :
243 1874009 : ArchOpcode GetStoreOpcode(StoreRepresentation store_rep) {
244 1874009 : switch (store_rep.representation()) {
245 : case MachineRepresentation::kFloat32:
246 : return kX64Movss;
247 : break;
248 : case MachineRepresentation::kFloat64:
249 : return kX64Movsd;
250 : break;
251 : case MachineRepresentation::kBit: // Fall through.
252 : case MachineRepresentation::kWord8:
253 : return kX64Movb;
254 : break;
255 : case MachineRepresentation::kWord16:
256 : return kX64Movw;
257 : break;
258 : case MachineRepresentation::kWord32:
259 : return kX64Movl;
260 : break;
261 : case MachineRepresentation::kTaggedSigned: // Fall through.
262 : case MachineRepresentation::kTaggedPointer: // Fall through.
263 : case MachineRepresentation::kTagged: // Fall through.
264 : case MachineRepresentation::kWord64:
265 : return kX64Movq;
266 : break;
267 : case MachineRepresentation::kSimd128: // Fall through.
268 : return kX64Movdqu;
269 : break;
270 : case MachineRepresentation::kNone:
271 0 : UNREACHABLE();
272 : }
273 0 : UNREACHABLE();
274 : }
275 :
276 : } // namespace
277 :
278 2354 : void InstructionSelector::VisitStackSlot(Node* node) {
279 1177 : StackSlotRepresentation rep = StackSlotRepresentationOf(node->op());
280 1177 : int slot = frame_->AllocateSpillSlot(rep.size());
281 : OperandGenerator g(this);
282 :
283 : Emit(kArchStackSlot, g.DefineAsRegister(node),
284 2354 : sequence()->AddImmediate(Constant(slot)), 0, nullptr);
285 1177 : }
286 :
287 0 : void InstructionSelector::VisitDebugAbort(Node* node) {
288 : X64OperandGenerator g(this);
289 0 : Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), rdx));
290 0 : }
291 :
292 6740666 : void InstructionSelector::VisitLoad(Node* node) {
293 3370330 : LoadRepresentation load_rep = LoadRepresentationOf(node->op());
294 : X64OperandGenerator g(this);
295 :
296 3370346 : ArchOpcode opcode = GetLoadOpcode(load_rep);
297 6740626 : InstructionOperand outputs[1];
298 3370313 : outputs[0] = g.DefineAsRegister(node);
299 13481309 : InstructionOperand inputs[3];
300 3370337 : size_t input_count = 0;
301 : AddressingMode mode =
302 3370337 : g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
303 3370336 : InstructionCode code = opcode | AddressingModeField::encode(mode);
304 3370336 : if (node->opcode() == IrOpcode::kProtectedLoad) {
305 3765 : code |= MiscField::encode(X64MemoryProtection::kProtected);
306 : }
307 3370336 : Emit(code, 1, outputs, input_count, inputs);
308 3370328 : }
309 :
310 3764 : void InstructionSelector::VisitProtectedLoad(Node* node) { VisitLoad(node); }
311 :
312 4386734 : void InstructionSelector::VisitStore(Node* node) {
313 : X64OperandGenerator g(this);
314 : Node* base = node->InputAt(0);
315 : Node* index = node->InputAt(1);
316 276203 : Node* value = node->InputAt(2);
317 :
318 2193367 : StoreRepresentation store_rep = StoreRepresentationOf(node->op());
319 : WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
320 :
321 2193365 : if (write_barrier_kind != kNoWriteBarrier) {
322 : DCHECK(CanBeTaggedPointer(store_rep.representation()));
323 : AddressingMode addressing_mode;
324 962646 : InstructionOperand inputs[3];
325 : size_t input_count = 0;
326 320882 : inputs[input_count++] = g.UseUniqueRegister(base);
327 320882 : if (g.CanBeImmediate(index)) {
328 299994 : inputs[input_count++] = g.UseImmediate(index);
329 : addressing_mode = kMode_MRI;
330 : } else {
331 20888 : inputs[input_count++] = g.UseUniqueRegister(index);
332 : addressing_mode = kMode_MR1;
333 : }
334 320882 : inputs[input_count++] = g.UseUniqueRegister(value);
335 : RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
336 320882 : switch (write_barrier_kind) {
337 : case kNoWriteBarrier:
338 0 : UNREACHABLE();
339 : break;
340 : case kMapWriteBarrier:
341 : record_write_mode = RecordWriteMode::kValueIsMap;
342 20669 : break;
343 : case kPointerWriteBarrier:
344 : record_write_mode = RecordWriteMode::kValueIsPointer;
345 28233 : break;
346 : case kFullWriteBarrier:
347 : record_write_mode = RecordWriteMode::kValueIsAny;
348 : break;
349 : }
350 641764 : InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
351 : size_t const temp_count = arraysize(temps);
352 : InstructionCode code = kArchStoreWithWriteBarrier;
353 320882 : code |= AddressingModeField::encode(addressing_mode);
354 320882 : code |= MiscField::encode(static_cast<int>(record_write_mode));
355 320882 : Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
356 : } else {
357 1872483 : ArchOpcode opcode = GetStoreOpcode(store_rep);
358 9362446 : InstructionOperand inputs[4];
359 1872490 : size_t input_count = 0;
360 : AddressingMode addressing_mode =
361 1872490 : g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
362 : InstructionCode code =
363 1872491 : opcode | AddressingModeField::encode(addressing_mode);
364 4021182 : if ((ElementSizeLog2Of(store_rep.representation()) < kPointerSizeLog2) &&
365 1936519 : (value->opcode() == IrOpcode::kTruncateInt64ToInt32) &&
366 64031 : CanCover(node, value)) {
367 : value = value->InputAt(0);
368 : }
369 : InstructionOperand value_operand =
370 1872488 : g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
371 1872494 : inputs[input_count++] = value_operand;
372 : Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
373 1872494 : inputs);
374 : }
375 2193374 : }
376 :
377 3050 : void InstructionSelector::VisitProtectedStore(Node* node) {
378 : X64OperandGenerator g(this);
379 : Node* value = node->InputAt(2);
380 :
381 1525 : StoreRepresentation store_rep = StoreRepresentationOf(node->op());
382 :
383 1526 : ArchOpcode opcode = GetStoreOpcode(store_rep);
384 7625 : InstructionOperand inputs[4];
385 1525 : size_t input_count = 0;
386 : AddressingMode addressing_mode =
387 1525 : g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
388 1525 : InstructionCode code = opcode | AddressingModeField::encode(addressing_mode) |
389 1525 : MiscField::encode(X64MemoryProtection::kProtected);
390 : InstructionOperand value_operand =
391 1525 : g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
392 1526 : inputs[input_count++] = value_operand;
393 1526 : Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs);
394 1526 : }
395 :
396 : // Architecture supports unaligned access, therefore VisitLoad is used instead
397 0 : void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
398 :
399 : // Architecture supports unaligned access, therefore VisitStore is used instead
400 0 : void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
401 :
402 76214 : void InstructionSelector::VisitCheckedLoad(Node* node) {
403 76214 : CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
404 : X64OperandGenerator g(this);
405 : Node* const buffer = node->InputAt(0);
406 76214 : Node* const offset = node->InputAt(1);
407 : Node* const length = node->InputAt(2);
408 : ArchOpcode opcode = kArchNop;
409 76214 : switch (load_rep.representation()) {
410 : case MachineRepresentation::kWord8:
411 6606 : opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
412 6606 : break;
413 : case MachineRepresentation::kWord16:
414 1987 : opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
415 1987 : break;
416 : case MachineRepresentation::kWord32:
417 : opcode = kCheckedLoadWord32;
418 58862 : break;
419 : case MachineRepresentation::kWord64:
420 : opcode = kCheckedLoadWord64;
421 6 : break;
422 : case MachineRepresentation::kFloat32:
423 : opcode = kCheckedLoadFloat32;
424 7666 : break;
425 : case MachineRepresentation::kFloat64:
426 : opcode = kCheckedLoadFloat64;
427 1087 : break;
428 : case MachineRepresentation::kBit: // Fall through.
429 : case MachineRepresentation::kSimd128: // Fall through.
430 : case MachineRepresentation::kTaggedSigned: // Fall through.
431 : case MachineRepresentation::kTaggedPointer: // Fall through.
432 : case MachineRepresentation::kTagged: // Fall through.
433 : case MachineRepresentation::kNone:
434 0 : UNREACHABLE();
435 : return;
436 : }
437 76214 : if (offset->opcode() == IrOpcode::kInt32Add && CanCover(node, offset)) {
438 : Int32Matcher mlength(length);
439 0 : Int32BinopMatcher moffset(offset);
440 0 : if (mlength.HasValue() && moffset.right().HasValue() &&
441 0 : moffset.right().Value() >= 0 &&
442 : mlength.Value() >= moffset.right().Value()) {
443 : Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
444 : g.UseRegister(moffset.left().node()),
445 0 : g.UseImmediate(moffset.right().node()), g.UseImmediate(length));
446 76214 : return;
447 : }
448 : }
449 : InstructionOperand length_operand =
450 76214 : g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
451 : Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
452 76214 : g.UseRegister(offset), g.TempImmediate(0), length_operand);
453 : }
454 :
455 :
456 54016 : void InstructionSelector::VisitCheckedStore(Node* node) {
457 54016 : MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
458 : X64OperandGenerator g(this);
459 : Node* const buffer = node->InputAt(0);
460 54016 : Node* const offset = node->InputAt(1);
461 : Node* const length = node->InputAt(2);
462 : Node* const value = node->InputAt(3);
463 : ArchOpcode opcode = kArchNop;
464 54016 : switch (rep) {
465 : case MachineRepresentation::kWord8:
466 : opcode = kCheckedStoreWord8;
467 4069 : break;
468 : case MachineRepresentation::kWord16:
469 : opcode = kCheckedStoreWord16;
470 1891 : break;
471 : case MachineRepresentation::kWord32:
472 : opcode = kCheckedStoreWord32;
473 42540 : break;
474 : case MachineRepresentation::kWord64:
475 : opcode = kCheckedStoreWord64;
476 6 : break;
477 : case MachineRepresentation::kFloat32:
478 : opcode = kCheckedStoreFloat32;
479 4615 : break;
480 : case MachineRepresentation::kFloat64:
481 : opcode = kCheckedStoreFloat64;
482 895 : break;
483 : case MachineRepresentation::kBit: // Fall through.
484 : case MachineRepresentation::kSimd128: // Fall through.
485 : case MachineRepresentation::kTaggedSigned: // Fall through.
486 : case MachineRepresentation::kTaggedPointer: // Fall through.
487 : case MachineRepresentation::kTagged: // Fall through.
488 : case MachineRepresentation::kNone:
489 0 : UNREACHABLE();
490 : return;
491 : }
492 : InstructionOperand value_operand =
493 54016 : g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
494 54016 : if (offset->opcode() == IrOpcode::kInt32Add && CanCover(node, offset)) {
495 : Int32Matcher mlength(length);
496 0 : Int32BinopMatcher moffset(offset);
497 0 : if (mlength.HasValue() && moffset.right().HasValue() &&
498 0 : moffset.right().Value() >= 0 &&
499 : mlength.Value() >= moffset.right().Value()) {
500 : Emit(opcode, g.NoOutput(), g.UseRegister(buffer),
501 : g.UseRegister(moffset.left().node()),
502 : g.UseImmediate(moffset.right().node()), g.UseImmediate(length),
503 0 : value_operand);
504 54016 : return;
505 : }
506 : }
507 : InstructionOperand length_operand =
508 54016 : g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
509 : Emit(opcode, g.NoOutput(), g.UseRegister(buffer), g.UseRegister(offset),
510 54016 : g.TempImmediate(0), length_operand, value_operand);
511 : }
512 :
513 :
514 : // Shared routine for multiple binary operations.
515 838490 : static void VisitBinop(InstructionSelector* selector, Node* node,
516 2269986 : InstructionCode opcode, FlagsContinuation* cont) {
517 : X64OperandGenerator g(selector);
518 609564 : Int32BinopMatcher m(node);
519 : Node* left = m.left().node();
520 : Node* right = m.right().node();
521 4266973 : InstructionOperand inputs[6];
522 609571 : size_t input_count = 0;
523 1828707 : InstructionOperand outputs[2];
524 : size_t output_count = 0;
525 :
526 : // TODO(turbofan): match complex addressing modes.
527 609571 : if (left == right) {
528 : // If both inputs refer to the same operand, enforce allocating a register
529 : // for both of them to ensure that we don't end up generating code like
530 : // this:
531 : //
532 : // mov rax, [rbp-0x10]
533 : // add rax, [rbp-0x10]
534 : // jo label
535 428 : InstructionOperand const input = g.UseRegister(left);
536 428 : inputs[input_count++] = input;
537 428 : inputs[input_count++] = input;
538 609143 : } else if (g.CanBeImmediate(right)) {
539 380214 : inputs[input_count++] = g.UseRegister(left);
540 380215 : inputs[input_count++] = g.UseImmediate(right);
541 : } else {
542 228924 : int effect_level = selector->GetEffectLevel(node);
543 228922 : if (cont->IsBranch()) {
544 : effect_level = selector->GetEffectLevel(
545 3323 : cont->true_block()->PredecessorAt(0)->control_input());
546 : }
547 406052 : if (node->op()->HasProperty(Operator::kCommutative) &&
548 353383 : g.CanBeBetterLeftOperand(right) &&
549 82443 : (!g.CanBeBetterLeftOperand(left) ||
550 82443 : !g.CanBeMemoryOperand(opcode, node, right, effect_level))) {
551 : std::swap(left, right);
552 : }
553 228927 : if (g.CanBeMemoryOperand(opcode, node, right, effect_level)) {
554 10603 : inputs[input_count++] = g.UseRegister(left);
555 : AddressingMode addressing_mode =
556 10603 : g.GetEffectiveAddressMemoryOperand(right, inputs, &input_count);
557 10603 : opcode |= AddressingModeField::encode(addressing_mode);
558 : } else {
559 218322 : inputs[input_count++] = g.UseRegister(left);
560 218320 : inputs[input_count++] = g.Use(right);
561 : }
562 : }
563 :
564 609570 : if (cont->IsBranch()) {
565 6646 : inputs[input_count++] = g.Label(cont->true_block());
566 6646 : inputs[input_count++] = g.Label(cont->false_block());
567 : }
568 :
569 609570 : outputs[output_count++] = g.DefineSameAsFirst(node);
570 609569 : if (cont->IsSet()) {
571 119455 : outputs[output_count++] = g.DefineAsRegister(cont->result());
572 : }
573 :
574 : DCHECK_NE(0u, input_count);
575 : DCHECK_NE(0u, output_count);
576 : DCHECK_GE(arraysize(inputs), input_count);
577 : DCHECK_GE(arraysize(outputs), output_count);
578 :
579 : opcode = cont->Encode(opcode);
580 609568 : if (cont->IsDeoptimize()) {
581 : selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
582 82933 : cont->kind(), cont->reason(), cont->frame_state());
583 : } else {
584 526635 : selector->Emit(opcode, output_count, outputs, input_count, inputs);
585 : }
586 609566 : }
587 :
588 :
589 : // Shared routine for multiple binary operations.
590 : static void VisitBinop(InstructionSelector* selector, Node* node,
591 : InstructionCode opcode) {
592 : FlagsContinuation cont;
593 403855 : VisitBinop(selector, node, opcode, &cont);
594 : }
595 :
596 :
597 115834 : void InstructionSelector::VisitWord32And(Node* node) {
598 : X64OperandGenerator g(this);
599 115834 : Uint32BinopMatcher m(node);
600 115839 : if (m.right().Is(0xff)) {
601 2031 : Emit(kX64Movzxbl, g.DefineAsRegister(node), g.Use(m.left().node()));
602 113808 : } else if (m.right().Is(0xffff)) {
603 4878 : Emit(kX64Movzxwl, g.DefineAsRegister(node), g.Use(m.left().node()));
604 : } else {
605 : VisitBinop(this, node, kX64And32);
606 : }
607 115838 : }
608 :
609 :
610 188859 : void InstructionSelector::VisitWord64And(Node* node) {
611 : VisitBinop(this, node, kX64And);
612 188860 : }
613 :
614 :
615 18737 : void InstructionSelector::VisitWord32Or(Node* node) {
616 : VisitBinop(this, node, kX64Or32);
617 18738 : }
618 :
619 :
620 38431 : void InstructionSelector::VisitWord64Or(Node* node) {
621 : VisitBinop(this, node, kX64Or);
622 38431 : }
623 :
624 :
625 13127 : void InstructionSelector::VisitWord32Xor(Node* node) {
626 : X64OperandGenerator g(this);
627 13127 : Uint32BinopMatcher m(node);
628 13127 : if (m.right().Is(-1)) {
629 3469 : Emit(kX64Not32, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
630 : } else {
631 : VisitBinop(this, node, kX64Xor32);
632 : }
633 13127 : }
634 :
635 :
636 444 : void InstructionSelector::VisitWord64Xor(Node* node) {
637 : X64OperandGenerator g(this);
638 444 : Uint64BinopMatcher m(node);
639 444 : if (m.right().Is(-1)) {
640 55 : Emit(kX64Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
641 : } else {
642 : VisitBinop(this, node, kX64Xor);
643 : }
644 444 : }
645 :
646 :
647 : namespace {
648 :
649 : // Shared routine for multiple 32-bit shift operations.
650 : // TODO(bmeurer): Merge this with VisitWord64Shift using template magic?
651 67357 : void VisitWord32Shift(InstructionSelector* selector, Node* node,
652 : ArchOpcode opcode) {
653 : X64OperandGenerator g(selector);
654 67357 : Int32BinopMatcher m(node);
655 : Node* left = m.left().node();
656 : Node* right = m.right().node();
657 :
658 67359 : if (g.CanBeImmediate(right)) {
659 : selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
660 59266 : g.UseImmediate(right));
661 : } else {
662 : selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
663 8094 : g.UseFixed(right, rcx));
664 : }
665 67361 : }
666 :
667 :
668 : // Shared routine for multiple 64-bit shift operations.
669 : // TODO(bmeurer): Merge this with VisitWord32Shift using template magic?
670 143464 : void VisitWord64Shift(InstructionSelector* selector, Node* node,
671 : ArchOpcode opcode) {
672 : X64OperandGenerator g(selector);
673 143464 : Int64BinopMatcher m(node);
674 : Node* left = m.left().node();
675 : Node* right = m.right().node();
676 :
677 143464 : if (g.CanBeImmediate(right)) {
678 : selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
679 142673 : g.UseImmediate(right));
680 : } else {
681 792 : if (m.right().IsWord64And()) {
682 62 : Int64BinopMatcher mright(right);
683 62 : if (mright.right().Is(0x3F)) {
684 : right = mright.left().node();
685 : }
686 : }
687 : selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
688 792 : g.UseFixed(right, rcx));
689 : }
690 143465 : }
691 :
692 1211048 : void EmitLea(InstructionSelector* selector, InstructionCode opcode,
693 : Node* result, Node* index, int scale, Node* base,
694 : Node* displacement, DisplacementMode displacement_mode) {
695 : X64OperandGenerator g(selector);
696 :
697 6055240 : InstructionOperand inputs[4];
698 1211048 : size_t input_count = 0;
699 : AddressingMode mode =
700 : g.GenerateMemoryOperandInputs(index, scale, base, displacement,
701 1211048 : displacement_mode, inputs, &input_count);
702 :
703 : DCHECK_NE(0u, input_count);
704 : DCHECK_GE(arraysize(inputs), input_count);
705 :
706 2422118 : InstructionOperand outputs[1];
707 1211060 : outputs[0] = g.DefineAsRegister(result);
708 :
709 1211059 : opcode = AddressingModeField::encode(mode) | opcode;
710 :
711 1211059 : selector->Emit(opcode, 1, outputs, input_count, inputs);
712 1211061 : }
713 :
714 : } // namespace
715 :
716 :
717 13664 : void InstructionSelector::VisitWord32Shl(Node* node) {
718 13664 : Int32ScaleMatcher m(node, true);
719 13664 : if (m.matches()) {
720 : Node* index = node->InputAt(0);
721 2344 : Node* base = m.power_of_two_plus_one() ? index : nullptr;
722 : EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr,
723 2344 : kPositiveDisplacement);
724 16008 : return;
725 : }
726 11320 : VisitWord32Shift(this, node, kX64Shl32);
727 : }
728 :
729 :
730 282466 : void InstructionSelector::VisitWord64Shl(Node* node) {
731 : X64OperandGenerator g(this);
732 282466 : Int64ScaleMatcher m(node, true);
733 282470 : if (m.matches()) {
734 : Node* index = node->InputAt(0);
735 12164 : Node* base = m.power_of_two_plus_one() ? index : nullptr;
736 : EmitLea(this, kX64Lea, node, index, m.scale(), base, nullptr,
737 12164 : kPositiveDisplacement);
738 12165 : return;
739 : } else {
740 270306 : Int64BinopMatcher m(node);
741 340668 : if ((m.left().IsChangeInt32ToInt64() ||
742 480532 : m.left().IsChangeUint32ToUint64()) &&
743 : m.right().IsInRange(32, 63)) {
744 : // There's no need to sign/zero-extend to 64-bit if we shift out the upper
745 : // 32 bits anyway.
746 : Emit(kX64Shl, g.DefineSameAsFirst(node),
747 : g.UseRegister(m.left().node()->InputAt(0)),
748 420438 : g.UseImmediate(m.right().node()));
749 210221 : return;
750 : }
751 : }
752 60087 : VisitWord64Shift(this, node, kX64Shl);
753 : }
754 :
755 :
756 43213 : void InstructionSelector::VisitWord32Shr(Node* node) {
757 43213 : VisitWord32Shift(this, node, kX64Shr32);
758 43216 : }
759 :
760 : namespace {
761 411023 : bool TryMatchLoadWord64AndShiftRight(InstructionSelector* selector, Node* node,
762 : InstructionCode opcode) {
763 : DCHECK(IrOpcode::kWord64Sar == node->opcode() ||
764 : IrOpcode::kWord64Shr == node->opcode());
765 : X64OperandGenerator g(selector);
766 411023 : Int64BinopMatcher m(node);
767 607443 : if (selector->CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
768 : m.right().Is(32)) {
769 : // Just load and sign-extend the interesting 4 bytes instead. This happens,
770 : // for example, when we're loading and untagging SMIs.
771 : BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(),
772 : AddressOption::kAllowAll);
773 169997 : if (mleft.matches() && (mleft.displacement() == nullptr ||
774 84997 : g.CanBeImmediate(mleft.displacement()))) {
775 85000 : size_t input_count = 0;
776 340000 : InstructionOperand inputs[3];
777 : AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
778 85000 : m.left().node(), inputs, &input_count);
779 85000 : if (mleft.displacement() == nullptr) {
780 : // Make sure that the addressing mode indicates the presence of an
781 : // immediate displacement. It seems that we never use M1 and M2, but we
782 : // handle them here anyways.
783 3 : switch (mode) {
784 : case kMode_MR:
785 : mode = kMode_MRI;
786 3 : break;
787 : case kMode_MR1:
788 : mode = kMode_MR1I;
789 0 : break;
790 : case kMode_MR2:
791 : mode = kMode_MR2I;
792 0 : break;
793 : case kMode_MR4:
794 : mode = kMode_MR4I;
795 0 : break;
796 : case kMode_MR8:
797 : mode = kMode_MR8I;
798 0 : break;
799 : case kMode_M1:
800 : mode = kMode_M1I;
801 0 : break;
802 : case kMode_M2:
803 : mode = kMode_M2I;
804 0 : break;
805 : case kMode_M4:
806 : mode = kMode_M4I;
807 0 : break;
808 : case kMode_M8:
809 : mode = kMode_M8I;
810 0 : break;
811 : case kMode_None:
812 : case kMode_MRI:
813 : case kMode_MR1I:
814 : case kMode_MR2I:
815 : case kMode_MR4I:
816 : case kMode_MR8I:
817 : case kMode_M1I:
818 : case kMode_M2I:
819 : case kMode_M4I:
820 : case kMode_M8I:
821 : case kMode_Root:
822 0 : UNREACHABLE();
823 : }
824 3 : inputs[input_count++] = ImmediateOperand(ImmediateOperand::INLINE, 4);
825 : } else {
826 : int32_t displacement = g.GetImmediateIntegerValue(mleft.displacement());
827 : inputs[input_count - 1] =
828 169994 : ImmediateOperand(ImmediateOperand::INLINE, displacement + 4);
829 : }
830 85000 : InstructionOperand outputs[] = {g.DefineAsRegister(node)};
831 85000 : InstructionCode code = opcode | AddressingModeField::encode(mode);
832 85000 : selector->Emit(code, 1, outputs, input_count, inputs);
833 : return true;
834 : }
835 : }
836 : return false;
837 : }
838 : } // namespace
839 :
840 51279 : void InstructionSelector::VisitWord64Shr(Node* node) {
841 102558 : if (TryMatchLoadWord64AndShiftRight(this, node, kX64Movl)) return;
842 51278 : VisitWord64Shift(this, node, kX64Shr);
843 : }
844 :
845 13507 : void InstructionSelector::VisitWord32Sar(Node* node) {
846 : X64OperandGenerator g(this);
847 13507 : Int32BinopMatcher m(node);
848 22154 : if (CanCover(m.node(), m.left().node()) && m.left().IsWord32Shl()) {
849 1676 : Int32BinopMatcher mleft(m.left().node());
850 2362 : if (mleft.right().Is(16) && m.right().Is(16)) {
851 686 : Emit(kX64Movsxwl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
852 686 : return;
853 1957 : } else if (mleft.right().Is(24) && m.right().Is(24)) {
854 967 : Emit(kX64Movsxbl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
855 967 : return;
856 : }
857 : }
858 11853 : VisitWord32Shift(this, node, kX64Sar32);
859 : }
860 :
861 67677 : void InstructionSelector::VisitWord64Sar(Node* node) {
862 135354 : if (TryMatchLoadWord64AndShiftRight(this, node, kX64Movsxlq)) return;
863 31960 : VisitWord64Shift(this, node, kX64Sar);
864 : }
865 :
866 :
867 971 : void InstructionSelector::VisitWord32Ror(Node* node) {
868 971 : VisitWord32Shift(this, node, kX64Ror32);
869 971 : }
870 :
871 :
872 140 : void InstructionSelector::VisitWord64Ror(Node* node) {
873 140 : VisitWord64Shift(this, node, kX64Ror);
874 140 : }
875 :
876 0 : void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
877 :
878 :
879 0 : void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
880 :
881 0 : void InstructionSelector::VisitWord64ReverseBytes(Node* node) { UNREACHABLE(); }
882 :
883 0 : void InstructionSelector::VisitWord32ReverseBytes(Node* node) { UNREACHABLE(); }
884 :
885 191882 : void InstructionSelector::VisitInt32Add(Node* node) {
886 : X64OperandGenerator g(this);
887 :
888 : // Try to match the Add to a leal pattern
889 191882 : BaseWithIndexAndDisplacement32Matcher m(node);
890 575661 : if (m.matches() &&
891 347982 : (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
892 : EmitLea(this, kX64Lea32, node, m.index(), m.scale(), m.base(),
893 191888 : m.displacement(), m.displacement_mode());
894 383786 : return;
895 : }
896 :
897 : // No leal pattern match, use addl
898 : VisitBinop(this, node, kX64Add32);
899 : }
900 :
901 :
902 1004164 : void InstructionSelector::VisitInt64Add(Node* node) {
903 : X64OperandGenerator g(this);
904 :
905 : // Try to match the Add to a leaq pattern
906 1004164 : BaseWithIndexAndDisplacement64Matcher m(node);
907 3012501 : if (m.matches() &&
908 1923458 : (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
909 : EmitLea(this, kX64Lea, node, m.index(), m.scale(), m.base(),
910 994048 : m.displacement(), m.displacement_mode());
911 1998219 : return;
912 : }
913 :
914 : // No leal pattern match, use addq
915 : VisitBinop(this, node, kX64Add);
916 : }
917 :
918 :
919 33620 : void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
920 33620 : if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
921 : FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
922 67240 : return VisitBinop(this, node, kX64Add, &cont);
923 : }
924 : FlagsContinuation cont;
925 0 : VisitBinop(this, node, kX64Add, &cont);
926 : }
927 :
928 :
929 41018 : void InstructionSelector::VisitInt32Sub(Node* node) {
930 : X64OperandGenerator g(this);
931 41018 : Int32BinopMatcher m(node);
932 41025 : if (m.left().Is(0)) {
933 5169 : Emit(kX64Neg32, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
934 : } else {
935 53307 : if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
936 : // Turn subtractions of constant values into immediate "leal" instructions
937 : // by negating the value.
938 : Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI),
939 : g.DefineAsRegister(node), g.UseRegister(m.left().node()),
940 34903 : g.TempImmediate(-m.right().Value()));
941 58477 : return;
942 : }
943 : VisitBinop(this, node, kX64Sub32);
944 : }
945 : }
946 :
947 :
948 132439 : void InstructionSelector::VisitInt64Sub(Node* node) {
949 : X64OperandGenerator g(this);
950 132439 : Int64BinopMatcher m(node);
951 132445 : if (m.left().Is(0)) {
952 4670 : Emit(kX64Neg, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
953 : } else {
954 246257 : if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
955 : // Turn subtractions of constant values into immediate "leaq" instructions
956 : // by negating the value.
957 : Emit(kX64Lea | AddressingModeField::encode(kMode_MRI),
958 : g.DefineAsRegister(node), g.UseRegister(m.left().node()),
959 234927 : g.TempImmediate(-static_cast<int32_t>(m.right().Value())));
960 249900 : return;
961 : }
962 : VisitBinop(this, node, kX64Sub);
963 : }
964 : }
965 :
966 :
967 33620 : void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
968 33620 : if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
969 : FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
970 67240 : return VisitBinop(this, node, kX64Sub, &cont);
971 : }
972 : FlagsContinuation cont;
973 0 : VisitBinop(this, node, kX64Sub, &cont);
974 : }
975 :
976 :
977 : namespace {
978 :
979 28522 : void VisitMul(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
980 : X64OperandGenerator g(selector);
981 28522 : Int32BinopMatcher m(node);
982 : Node* left = m.left().node();
983 : Node* right = m.right().node();
984 28522 : if (g.CanBeImmediate(right)) {
985 : selector->Emit(opcode, g.DefineAsRegister(node), g.Use(left),
986 26168 : g.UseImmediate(right));
987 : } else {
988 2353 : if (g.CanBeBetterLeftOperand(right)) {
989 : std::swap(left, right);
990 : }
991 : selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
992 2353 : g.Use(right));
993 : }
994 28522 : }
995 :
996 5482 : void VisitMulHigh(InstructionSelector* selector, Node* node,
997 : ArchOpcode opcode) {
998 : X64OperandGenerator g(selector);
999 : Node* left = node->InputAt(0);
1000 : Node* right = node->InputAt(1);
1001 5482 : if (selector->IsLive(left) && !selector->IsLive(right)) {
1002 : std::swap(left, right);
1003 : }
1004 5482 : InstructionOperand temps[] = {g.TempRegister(rax)};
1005 : // TODO(turbofan): We use UseUniqueRegister here to improve register
1006 : // allocation.
1007 : selector->Emit(opcode, g.DefineAsFixed(node, rdx), g.UseFixed(left, rax),
1008 5482 : g.UseUniqueRegister(right), arraysize(temps), temps);
1009 5482 : }
1010 :
1011 :
1012 4084 : void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
1013 : X64OperandGenerator g(selector);
1014 4084 : InstructionOperand temps[] = {g.TempRegister(rdx)};
1015 : selector->Emit(
1016 : opcode, g.DefineAsFixed(node, rax), g.UseFixed(node->InputAt(0), rax),
1017 8168 : g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
1018 4084 : }
1019 :
1020 :
1021 4472 : void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
1022 : X64OperandGenerator g(selector);
1023 4472 : InstructionOperand temps[] = {g.TempRegister(rax)};
1024 : selector->Emit(
1025 : opcode, g.DefineAsFixed(node, rdx), g.UseFixed(node->InputAt(0), rax),
1026 8946 : g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
1027 4474 : }
1028 :
1029 : } // namespace
1030 :
1031 :
1032 36717 : void InstructionSelector::VisitInt32Mul(Node* node) {
1033 36717 : Int32ScaleMatcher m(node, true);
1034 36717 : if (m.matches()) {
1035 : Node* index = node->InputAt(0);
1036 10609 : Node* base = m.power_of_two_plus_one() ? index : nullptr;
1037 : EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr,
1038 10609 : kPositiveDisplacement);
1039 47326 : return;
1040 : }
1041 26108 : VisitMul(this, node, kX64Imul32);
1042 : }
1043 :
1044 17405 : void InstructionSelector::VisitInt32MulWithOverflow(Node* node) {
1045 : // TODO(mvstanton): Use Int32ScaleMatcher somehow.
1046 17405 : if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1047 : FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1048 34810 : return VisitBinop(this, node, kX64Imul32, &cont);
1049 : }
1050 : FlagsContinuation cont;
1051 0 : VisitBinop(this, node, kX64Imul32, &cont);
1052 : }
1053 :
1054 2414 : void InstructionSelector::VisitInt64Mul(Node* node) {
1055 2414 : VisitMul(this, node, kX64Imul);
1056 2414 : }
1057 :
1058 4852 : void InstructionSelector::VisitInt32MulHigh(Node* node) {
1059 4852 : VisitMulHigh(this, node, kX64ImulHigh32);
1060 4852 : }
1061 :
1062 :
1063 2408 : void InstructionSelector::VisitInt32Div(Node* node) {
1064 2408 : VisitDiv(this, node, kX64Idiv32);
1065 2408 : }
1066 :
1067 :
1068 600 : void InstructionSelector::VisitInt64Div(Node* node) {
1069 600 : VisitDiv(this, node, kX64Idiv);
1070 600 : }
1071 :
1072 :
1073 628 : void InstructionSelector::VisitUint32Div(Node* node) {
1074 628 : VisitDiv(this, node, kX64Udiv32);
1075 628 : }
1076 :
1077 :
1078 448 : void InstructionSelector::VisitUint64Div(Node* node) {
1079 448 : VisitDiv(this, node, kX64Udiv);
1080 448 : }
1081 :
1082 :
1083 3086 : void InstructionSelector::VisitInt32Mod(Node* node) {
1084 3086 : VisitMod(this, node, kX64Idiv32);
1085 3088 : }
1086 :
1087 :
1088 428 : void InstructionSelector::VisitInt64Mod(Node* node) {
1089 428 : VisitMod(this, node, kX64Idiv);
1090 428 : }
1091 :
1092 :
1093 539 : void InstructionSelector::VisitUint32Mod(Node* node) {
1094 539 : VisitMod(this, node, kX64Udiv32);
1095 540 : }
1096 :
1097 :
1098 418 : void InstructionSelector::VisitUint64Mod(Node* node) {
1099 418 : VisitMod(this, node, kX64Udiv);
1100 418 : }
1101 :
1102 :
1103 630 : void InstructionSelector::VisitUint32MulHigh(Node* node) {
1104 630 : VisitMulHigh(this, node, kX64UmulHigh32);
1105 630 : }
1106 :
1107 74 : void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1108 : X64OperandGenerator g(this);
1109 74 : InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1110 222 : InstructionOperand outputs[2];
1111 : size_t output_count = 0;
1112 74 : outputs[output_count++] = g.DefineAsRegister(node);
1113 :
1114 74 : Node* success_output = NodeProperties::FindProjection(node, 1);
1115 74 : if (success_output) {
1116 69 : outputs[output_count++] = g.DefineAsRegister(success_output);
1117 : }
1118 :
1119 74 : Emit(kSSEFloat32ToInt64, output_count, outputs, 1, inputs);
1120 74 : }
1121 :
1122 :
1123 126 : void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1124 : X64OperandGenerator g(this);
1125 126 : InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1126 378 : InstructionOperand outputs[2];
1127 : size_t output_count = 0;
1128 126 : outputs[output_count++] = g.DefineAsRegister(node);
1129 :
1130 126 : Node* success_output = NodeProperties::FindProjection(node, 1);
1131 126 : if (success_output) {
1132 121 : outputs[output_count++] = g.DefineAsRegister(success_output);
1133 : }
1134 :
1135 126 : Emit(kSSEFloat64ToInt64, output_count, outputs, 1, inputs);
1136 126 : }
1137 :
1138 :
1139 74 : void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1140 : X64OperandGenerator g(this);
1141 74 : InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1142 222 : InstructionOperand outputs[2];
1143 : size_t output_count = 0;
1144 74 : outputs[output_count++] = g.DefineAsRegister(node);
1145 :
1146 74 : Node* success_output = NodeProperties::FindProjection(node, 1);
1147 74 : if (success_output) {
1148 69 : outputs[output_count++] = g.DefineAsRegister(success_output);
1149 : }
1150 :
1151 74 : Emit(kSSEFloat32ToUint64, output_count, outputs, 1, inputs);
1152 74 : }
1153 :
1154 :
1155 84 : void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1156 : X64OperandGenerator g(this);
1157 84 : InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1158 252 : InstructionOperand outputs[2];
1159 : size_t output_count = 0;
1160 84 : outputs[output_count++] = g.DefineAsRegister(node);
1161 :
1162 84 : Node* success_output = NodeProperties::FindProjection(node, 1);
1163 84 : if (success_output) {
1164 79 : outputs[output_count++] = g.DefineAsRegister(success_output);
1165 : }
1166 :
1167 84 : Emit(kSSEFloat64ToUint64, output_count, outputs, 1, inputs);
1168 84 : }
1169 :
1170 :
1171 62512 : void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1172 : X64OperandGenerator g(this);
1173 97230 : Node* const value = node->InputAt(0);
1174 62512 : if (value->opcode() == IrOpcode::kLoad && CanCover(node, value)) {
1175 34718 : LoadRepresentation load_rep = LoadRepresentationOf(value->op());
1176 34718 : MachineRepresentation rep = load_rep.representation();
1177 : InstructionCode opcode = kArchNop;
1178 34718 : switch (rep) {
1179 : case MachineRepresentation::kBit: // Fall through.
1180 : case MachineRepresentation::kWord8:
1181 13032 : opcode = load_rep.IsSigned() ? kX64Movsxbq : kX64Movzxbq;
1182 13032 : break;
1183 : case MachineRepresentation::kWord16:
1184 4978 : opcode = load_rep.IsSigned() ? kX64Movsxwq : kX64Movzxwq;
1185 4978 : break;
1186 : case MachineRepresentation::kWord32:
1187 16708 : opcode = load_rep.IsSigned() ? kX64Movsxlq : kX64Movl;
1188 16708 : break;
1189 : default:
1190 0 : UNREACHABLE();
1191 : return;
1192 : }
1193 34718 : InstructionOperand outputs[] = {g.DefineAsRegister(node)};
1194 34718 : size_t input_count = 0;
1195 138872 : InstructionOperand inputs[3];
1196 : AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
1197 34718 : node->InputAt(0), inputs, &input_count);
1198 34718 : opcode |= AddressingModeField::encode(mode);
1199 34718 : Emit(opcode, 1, outputs, input_count, inputs);
1200 : } else {
1201 27794 : Emit(kX64Movsxlq, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1202 : }
1203 62512 : }
1204 :
1205 : namespace {
1206 :
1207 235890 : bool ZeroExtendsWord32ToWord64(Node* node) {
1208 235890 : switch (node->opcode()) {
1209 : case IrOpcode::kWord32And:
1210 : case IrOpcode::kWord32Or:
1211 : case IrOpcode::kWord32Xor:
1212 : case IrOpcode::kWord32Shl:
1213 : case IrOpcode::kWord32Shr:
1214 : case IrOpcode::kWord32Sar:
1215 : case IrOpcode::kWord32Ror:
1216 : case IrOpcode::kWord32Equal:
1217 : case IrOpcode::kInt32Add:
1218 : case IrOpcode::kInt32Sub:
1219 : case IrOpcode::kInt32Mul:
1220 : case IrOpcode::kInt32MulHigh:
1221 : case IrOpcode::kInt32Div:
1222 : case IrOpcode::kInt32LessThan:
1223 : case IrOpcode::kInt32LessThanOrEqual:
1224 : case IrOpcode::kInt32Mod:
1225 : case IrOpcode::kUint32Div:
1226 : case IrOpcode::kUint32LessThan:
1227 : case IrOpcode::kUint32LessThanOrEqual:
1228 : case IrOpcode::kUint32Mod:
1229 : case IrOpcode::kUint32MulHigh:
1230 : // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
1231 : // zero-extension is a no-op.
1232 : return true;
1233 : case IrOpcode::kProjection: {
1234 1058 : Node* const value = node->InputAt(0);
1235 1058 : switch (value->opcode()) {
1236 : case IrOpcode::kInt32AddWithOverflow:
1237 : case IrOpcode::kInt32SubWithOverflow:
1238 : case IrOpcode::kInt32MulWithOverflow:
1239 : return true;
1240 : default:
1241 0 : return false;
1242 : }
1243 : }
1244 : case IrOpcode::kLoad: {
1245 : // The movzxbl/movsxbl/movzxwl/movsxwl/movl operations implicitly
1246 : // zero-extend to 64-bit on x64, so the zero-extension is a no-op.
1247 91946 : LoadRepresentation load_rep = LoadRepresentationOf(node->op());
1248 91944 : switch (load_rep.representation()) {
1249 : case MachineRepresentation::kWord8:
1250 : case MachineRepresentation::kWord16:
1251 : case MachineRepresentation::kWord32:
1252 : return true;
1253 : default:
1254 0 : return false;
1255 : }
1256 : }
1257 : default:
1258 61025 : return false;
1259 : }
1260 : }
1261 :
1262 : } // namespace
1263 :
1264 235890 : void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1265 : X64OperandGenerator g(this);
1266 : Node* value = node->InputAt(0);
1267 235890 : if (ZeroExtendsWord32ToWord64(value)) {
1268 : // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
1269 : // zero-extension is a no-op.
1270 410767 : return EmitIdentity(node);
1271 : }
1272 61037 : Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
1273 : }
1274 :
1275 :
1276 : namespace {
1277 :
1278 660185 : void VisitRO(InstructionSelector* selector, Node* node,
1279 : InstructionCode opcode) {
1280 : X64OperandGenerator g(selector);
1281 660185 : selector->Emit(opcode, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1282 660194 : }
1283 :
1284 :
1285 64714 : void VisitRR(InstructionSelector* selector, Node* node,
1286 : InstructionCode opcode) {
1287 : X64OperandGenerator g(selector);
1288 : selector->Emit(opcode, g.DefineAsRegister(node),
1289 64714 : g.UseRegister(node->InputAt(0)));
1290 64724 : }
1291 :
1292 565 : void VisitRRO(InstructionSelector* selector, Node* node,
1293 : InstructionCode opcode) {
1294 : X64OperandGenerator g(selector);
1295 : selector->Emit(opcode, g.DefineSameAsFirst(node),
1296 1130 : g.UseRegister(node->InputAt(0)), g.Use(node->InputAt(1)));
1297 565 : }
1298 :
1299 112273 : void VisitFloatBinop(InstructionSelector* selector, Node* node,
1300 : ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
1301 : X64OperandGenerator g(selector);
1302 112273 : InstructionOperand operand0 = g.UseRegister(node->InputAt(0));
1303 112293 : InstructionOperand operand1 = g.Use(node->InputAt(1));
1304 112289 : if (selector->IsSupported(AVX)) {
1305 111581 : selector->Emit(avx_opcode, g.DefineAsRegister(node), operand0, operand1);
1306 : } else {
1307 708 : selector->Emit(sse_opcode, g.DefineSameAsFirst(node), operand0, operand1);
1308 : }
1309 112286 : }
1310 :
1311 :
1312 10785 : void VisitFloatUnop(InstructionSelector* selector, Node* node, Node* input,
1313 : ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
1314 : X64OperandGenerator g(selector);
1315 10785 : if (selector->IsSupported(AVX)) {
1316 10595 : selector->Emit(avx_opcode, g.DefineAsRegister(node), g.Use(input));
1317 : } else {
1318 190 : selector->Emit(sse_opcode, g.DefineSameAsFirst(node), g.UseRegister(input));
1319 : }
1320 10785 : }
1321 :
1322 : } // namespace
1323 :
1324 : #define RO_OP_LIST(V) \
1325 : V(Word64Clz, kX64Lzcnt) \
1326 : V(Word32Clz, kX64Lzcnt32) \
1327 : V(Word64Ctz, kX64Tzcnt) \
1328 : V(Word32Ctz, kX64Tzcnt32) \
1329 : V(Word64Popcnt, kX64Popcnt) \
1330 : V(Word32Popcnt, kX64Popcnt32) \
1331 : V(Float64Sqrt, kSSEFloat64Sqrt) \
1332 : V(Float32Sqrt, kSSEFloat32Sqrt) \
1333 : V(ChangeFloat64ToInt32, kSSEFloat64ToInt32) \
1334 : V(ChangeFloat64ToUint32, kSSEFloat64ToUint32 | MiscField::encode(1)) \
1335 : V(TruncateFloat64ToUint32, kSSEFloat64ToUint32 | MiscField::encode(0)) \
1336 : V(ChangeFloat64ToUint64, kSSEFloat64ToUint64) \
1337 : V(TruncateFloat64ToFloat32, kSSEFloat64ToFloat32) \
1338 : V(ChangeFloat32ToFloat64, kSSEFloat32ToFloat64) \
1339 : V(TruncateFloat32ToInt32, kSSEFloat32ToInt32) \
1340 : V(TruncateFloat32ToUint32, kSSEFloat32ToUint32) \
1341 : V(ChangeInt32ToFloat64, kSSEInt32ToFloat64) \
1342 : V(ChangeUint32ToFloat64, kSSEUint32ToFloat64) \
1343 : V(RoundFloat64ToInt32, kSSEFloat64ToInt32) \
1344 : V(RoundInt32ToFloat32, kSSEInt32ToFloat32) \
1345 : V(RoundInt64ToFloat32, kSSEInt64ToFloat32) \
1346 : V(RoundInt64ToFloat64, kSSEInt64ToFloat64) \
1347 : V(RoundUint32ToFloat32, kSSEUint32ToFloat32) \
1348 : V(BitcastFloat32ToInt32, kX64BitcastFI) \
1349 : V(BitcastFloat64ToInt64, kX64BitcastDL) \
1350 : V(BitcastInt32ToFloat32, kX64BitcastIF) \
1351 : V(BitcastInt64ToFloat64, kX64BitcastLD) \
1352 : V(Float64ExtractLowWord32, kSSEFloat64ExtractLowWord32) \
1353 : V(Float64ExtractHighWord32, kSSEFloat64ExtractHighWord32)
1354 :
1355 : #define RR_OP_LIST(V) \
1356 : V(Float32RoundDown, kSSEFloat32Round | MiscField::encode(kRoundDown)) \
1357 : V(Float64RoundDown, kSSEFloat64Round | MiscField::encode(kRoundDown)) \
1358 : V(Float32RoundUp, kSSEFloat32Round | MiscField::encode(kRoundUp)) \
1359 : V(Float64RoundUp, kSSEFloat64Round | MiscField::encode(kRoundUp)) \
1360 : V(Float32RoundTruncate, kSSEFloat32Round | MiscField::encode(kRoundToZero)) \
1361 : V(Float64RoundTruncate, kSSEFloat64Round | MiscField::encode(kRoundToZero)) \
1362 : V(Float32RoundTiesEven, \
1363 : kSSEFloat32Round | MiscField::encode(kRoundToNearest)) \
1364 : V(Float64RoundTiesEven, kSSEFloat64Round | MiscField::encode(kRoundToNearest))
1365 :
1366 : #define RO_VISITOR(Name, opcode) \
1367 : void InstructionSelector::Visit##Name(Node* node) { \
1368 : VisitRO(this, node, opcode); \
1369 : }
1370 660194 : RO_OP_LIST(RO_VISITOR)
1371 : #undef RO_VISITOR
1372 :
1373 : #define RR_VISITOR(Name, opcode) \
1374 : void InstructionSelector::Visit##Name(Node* node) { \
1375 : VisitRR(this, node, opcode); \
1376 : }
1377 19311 : RR_OP_LIST(RR_VISITOR)
1378 : #undef RR_VISITOR
1379 :
1380 45409 : void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1381 45409 : VisitRR(this, node, kArchTruncateDoubleToI);
1382 45413 : }
1383 :
1384 302619 : void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1385 : X64OperandGenerator g(this);
1386 294172 : Node* value = node->InputAt(0);
1387 302619 : if (CanCover(node, value)) {
1388 294172 : switch (value->opcode()) {
1389 : case IrOpcode::kWord64Sar:
1390 : case IrOpcode::kWord64Shr: {
1391 292156 : Int64BinopMatcher m(value);
1392 292160 : if (m.right().Is(32)) {
1393 292074 : if (TryMatchLoadWord64AndShiftRight(this, value, kX64Movl)) {
1394 341356 : return EmitIdentity(node);
1395 : }
1396 : Emit(kX64Shr, g.DefineSameAsFirst(node),
1397 485582 : g.UseRegister(m.left().node()), g.TempImmediate(32));
1398 242792 : return;
1399 : }
1400 86 : break;
1401 : }
1402 : default:
1403 : break;
1404 : }
1405 : }
1406 10556 : Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
1407 : }
1408 :
1409 46 : void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1410 : X64OperandGenerator g(this);
1411 46 : InstructionOperand temps[] = {g.TempRegister()};
1412 : Emit(kSSEUint64ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)),
1413 46 : arraysize(temps), temps);
1414 47 : }
1415 :
1416 :
1417 176 : void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1418 : X64OperandGenerator g(this);
1419 176 : InstructionOperand temps[] = {g.TempRegister()};
1420 : Emit(kSSEUint64ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)),
1421 176 : arraysize(temps), temps);
1422 177 : }
1423 :
1424 626 : void InstructionSelector::VisitFloat32Add(Node* node) {
1425 626 : VisitFloatBinop(this, node, kAVXFloat32Add, kSSEFloat32Add);
1426 626 : }
1427 :
1428 :
1429 1605 : void InstructionSelector::VisitFloat32Sub(Node* node) {
1430 1605 : VisitFloatBinop(this, node, kAVXFloat32Sub, kSSEFloat32Sub);
1431 1605 : }
1432 :
1433 571 : void InstructionSelector::VisitFloat32Mul(Node* node) {
1434 571 : VisitFloatBinop(this, node, kAVXFloat32Mul, kSSEFloat32Mul);
1435 571 : }
1436 :
1437 :
1438 502 : void InstructionSelector::VisitFloat32Div(Node* node) {
1439 502 : VisitFloatBinop(this, node, kAVXFloat32Div, kSSEFloat32Div);
1440 503 : }
1441 :
1442 :
1443 87 : void InstructionSelector::VisitFloat32Abs(Node* node) {
1444 87 : VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Abs, kSSEFloat32Abs);
1445 87 : }
1446 :
1447 :
1448 61 : void InstructionSelector::VisitFloat32Max(Node* node) {
1449 61 : VisitRRO(this, node, kSSEFloat32Max);
1450 61 : }
1451 :
1452 61 : void InstructionSelector::VisitFloat32Min(Node* node) {
1453 61 : VisitRRO(this, node, kSSEFloat32Min);
1454 61 : }
1455 :
1456 60794 : void InstructionSelector::VisitFloat64Add(Node* node) {
1457 60794 : VisitFloatBinop(this, node, kAVXFloat64Add, kSSEFloat64Add);
1458 60796 : }
1459 :
1460 :
1461 22632 : void InstructionSelector::VisitFloat64Sub(Node* node) {
1462 22632 : VisitFloatBinop(this, node, kAVXFloat64Sub, kSSEFloat64Sub);
1463 22633 : }
1464 :
1465 13860 : void InstructionSelector::VisitFloat64Mul(Node* node) {
1466 13860 : VisitFloatBinop(this, node, kAVXFloat64Mul, kSSEFloat64Mul);
1467 13862 : }
1468 :
1469 :
1470 11698 : void InstructionSelector::VisitFloat64Div(Node* node) {
1471 11698 : VisitFloatBinop(this, node, kAVXFloat64Div, kSSEFloat64Div);
1472 11699 : }
1473 :
1474 :
1475 1365 : void InstructionSelector::VisitFloat64Mod(Node* node) {
1476 : X64OperandGenerator g(this);
1477 1365 : InstructionOperand temps[] = {g.TempRegister(rax)};
1478 : Emit(kSSEFloat64Mod, g.DefineSameAsFirst(node),
1479 : g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)), 1,
1480 2730 : temps);
1481 1365 : }
1482 :
1483 :
1484 142 : void InstructionSelector::VisitFloat64Max(Node* node) {
1485 142 : VisitRRO(this, node, kSSEFloat64Max);
1486 142 : }
1487 :
1488 :
1489 301 : void InstructionSelector::VisitFloat64Min(Node* node) {
1490 301 : VisitRRO(this, node, kSSEFloat64Min);
1491 301 : }
1492 :
1493 :
1494 375 : void InstructionSelector::VisitFloat64Abs(Node* node) {
1495 375 : VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Abs, kSSEFloat64Abs);
1496 375 : }
1497 :
1498 :
1499 0 : void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1500 0 : UNREACHABLE();
1501 : }
1502 :
1503 :
1504 228 : void InstructionSelector::VisitFloat32Neg(Node* node) {
1505 228 : VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Neg, kSSEFloat32Neg);
1506 228 : }
1507 :
1508 10097 : void InstructionSelector::VisitFloat64Neg(Node* node) {
1509 10097 : VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Neg, kSSEFloat64Neg);
1510 10096 : }
1511 :
1512 231 : void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1513 : InstructionCode opcode) {
1514 : X64OperandGenerator g(this);
1515 : Emit(opcode, g.DefineAsFixed(node, xmm0), g.UseFixed(node->InputAt(0), xmm0),
1516 : g.UseFixed(node->InputAt(1), xmm1))
1517 462 : ->MarkAsCall();
1518 231 : }
1519 :
1520 1884 : void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1521 : InstructionCode opcode) {
1522 : X64OperandGenerator g(this);
1523 : Emit(opcode, g.DefineAsFixed(node, xmm0), g.UseFixed(node->InputAt(0), xmm0))
1524 1884 : ->MarkAsCall();
1525 1884 : }
1526 :
1527 4584093 : void InstructionSelector::EmitPrepareArguments(
1528 4625393 : ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1529 3503145 : Node* node) {
1530 : X64OperandGenerator g(this);
1531 :
1532 : // Prepare for C function call.
1533 4584093 : if (descriptor->IsCFunctionCall()) {
1534 41300 : Emit(kArchPrepareCallCFunction |
1535 : MiscField::encode(static_cast<int>(descriptor->ParameterCount())),
1536 41300 : 0, nullptr, 0, nullptr);
1537 :
1538 : // Poke any stack arguments.
1539 86406 : for (size_t n = 0; n < arguments->size(); ++n) {
1540 45106 : PushParameter input = (*arguments)[n];
1541 1903 : if (input.node()) {
1542 : int slot = static_cast<int>(n);
1543 1903 : InstructionOperand value = g.CanBeImmediate(input.node())
1544 : ? g.UseImmediate(input.node())
1545 1903 : : g.UseRegister(input.node());
1546 1903 : Emit(kX64Poke | MiscField::encode(slot), g.NoOutput(), value);
1547 : }
1548 : }
1549 : } else {
1550 : // Push any stack arguments.
1551 4542793 : int effect_level = GetEffectLevel(node);
1552 11938885 : for (PushParameter input : base::Reversed(*arguments)) {
1553 : Node* input_node = input.node();
1554 3698042 : if (g.CanBeImmediate(input_node)) {
1555 194900 : Emit(kX64Push, g.NoOutput(), g.UseImmediate(input_node));
1556 7006286 : } else if (IsSupported(ATOM) ||
1557 3503142 : sequence()->IsFP(GetVirtualRegister(input_node))) {
1558 : // TODO(titzer): X64Push cannot handle stack->stack double moves
1559 : // because there is no way to encode fixed double slots.
1560 0 : Emit(kX64Push, g.NoOutput(), g.UseRegister(input_node));
1561 3503144 : } else if (g.CanBeMemoryOperand(kX64Push, node, input_node,
1562 : effect_level)) {
1563 135062 : InstructionOperand outputs[1];
1564 540248 : InstructionOperand inputs[4];
1565 135062 : size_t input_count = 0;
1566 : InstructionCode opcode = kX64Push;
1567 : AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
1568 135062 : input_node, inputs, &input_count);
1569 135062 : opcode |= AddressingModeField::encode(mode);
1570 135062 : Emit(opcode, 0, outputs, input_count, inputs);
1571 : } else {
1572 3368081 : Emit(kX64Push, g.NoOutput(), g.Use(input_node));
1573 : }
1574 : }
1575 : }
1576 4584101 : }
1577 :
1578 :
1579 186553 : bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
1580 :
1581 930 : int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
1582 :
1583 : namespace {
1584 :
1585 525431 : void VisitCompareWithMemoryOperand(InstructionSelector* selector,
1586 : InstructionCode opcode, Node* left,
1587 : InstructionOperand right,
1588 1482316 : FlagsContinuation* cont) {
1589 : DCHECK_EQ(IrOpcode::kLoad, left->opcode());
1590 : X64OperandGenerator g(selector);
1591 525431 : size_t input_count = 0;
1592 3678017 : InstructionOperand inputs[6];
1593 : AddressingMode addressing_mode =
1594 525431 : g.GetEffectiveAddressMemoryOperand(left, inputs, &input_count);
1595 525434 : opcode |= AddressingModeField::encode(addressing_mode);
1596 : opcode = cont->Encode(opcode);
1597 525434 : inputs[input_count++] = right;
1598 :
1599 525434 : if (cont->IsBranch()) {
1600 862898 : inputs[input_count++] = g.Label(cont->true_block());
1601 862896 : inputs[input_count++] = g.Label(cont->false_block());
1602 431449 : selector->Emit(opcode, 0, nullptr, input_count, inputs);
1603 93985 : } else if (cont->IsDeoptimize()) {
1604 : selector->EmitDeoptimize(opcode, 0, nullptr, input_count, inputs,
1605 78352 : cont->kind(), cont->reason(), cont->frame_state());
1606 15633 : } else if (cont->IsSet()) {
1607 12383 : InstructionOperand output = g.DefineAsRegister(cont->result());
1608 12383 : selector->Emit(opcode, 1, &output, input_count, inputs);
1609 : } else {
1610 : DCHECK(cont->IsTrap());
1611 6500 : inputs[input_count++] = g.UseImmediate(cont->trap_id());
1612 3249 : selector->Emit(opcode, 0, nullptr, input_count, inputs);
1613 : }
1614 525433 : }
1615 :
1616 : // Shared routine for multiple compare operations.
1617 2913317 : void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1618 : InstructionOperand left, InstructionOperand right,
1619 8336424 : FlagsContinuation* cont) {
1620 : X64OperandGenerator g(selector);
1621 : opcode = cont->Encode(opcode);
1622 2913317 : if (cont->IsBranch()) {
1623 : selector->Emit(opcode, g.NoOutput(), left, right,
1624 5019572 : g.Label(cont->true_block()), g.Label(cont->false_block()));
1625 403535 : } else if (cont->IsDeoptimize()) {
1626 : selector->EmitDeoptimize(opcode, g.NoOutput(), left, right, cont->kind(),
1627 141031 : cont->reason(), cont->frame_state());
1628 262504 : } else if (cont->IsSet()) {
1629 68084 : selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
1630 : } else {
1631 : DCHECK(cont->IsTrap());
1632 : selector->Emit(opcode, g.NoOutput(), left, right,
1633 194420 : g.UseImmediate(cont->trap_id()));
1634 : }
1635 2913345 : }
1636 :
1637 :
1638 : // Shared routine for multiple compare operations.
1639 629482 : void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1640 : Node* left, Node* right, FlagsContinuation* cont,
1641 : bool commutative) {
1642 : X64OperandGenerator g(selector);
1643 896300 : if (commutative && g.CanBeBetterLeftOperand(right)) {
1644 : std::swap(left, right);
1645 : }
1646 629482 : VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont);
1647 629494 : }
1648 :
1649 8039312 : MachineType MachineTypeForNarrow(Node* node, Node* hint_node) {
1650 3759263 : if (hint_node->opcode() == IrOpcode::kLoad) {
1651 840386 : MachineType hint = LoadRepresentationOf(hint_node->op());
1652 840390 : if (node->opcode() == IrOpcode::kInt32Constant ||
1653 : node->opcode() == IrOpcode::kInt64Constant) {
1654 : int64_t constant = node->opcode() == IrOpcode::kInt32Constant
1655 : ? OpParameter<int32_t>(node)
1656 419477 : : OpParameter<int64_t>(node);
1657 419477 : if (hint == MachineType::Int8()) {
1658 186 : if (constant >= std::numeric_limits<int8_t>::min() &&
1659 : constant <= std::numeric_limits<int8_t>::max()) {
1660 186 : return hint;
1661 : }
1662 419291 : } else if (hint == MachineType::Uint8()) {
1663 285957 : if (constant >= std::numeric_limits<uint8_t>::min() &&
1664 : constant <= std::numeric_limits<uint8_t>::max()) {
1665 283725 : return hint;
1666 : }
1667 133334 : } else if (hint == MachineType::Int16()) {
1668 0 : if (constant >= std::numeric_limits<int16_t>::min() &&
1669 : constant <= std::numeric_limits<int16_t>::max()) {
1670 0 : return hint;
1671 : }
1672 133334 : } else if (hint == MachineType::Uint16()) {
1673 3880 : if (constant >= std::numeric_limits<uint16_t>::min() &&
1674 : constant <= std::numeric_limits<uint16_t>::max()) {
1675 3880 : return hint;
1676 : }
1677 129454 : } else if (hint == MachineType::Int32()) {
1678 14825 : return hint;
1679 114629 : } else if (hint == MachineType::Uint32()) {
1680 20120 : if (constant >= 0) return hint;
1681 : }
1682 : }
1683 : }
1684 : return node->opcode() == IrOpcode::kLoad ? LoadRepresentationOf(node->op())
1685 3439659 : : MachineType::None();
1686 : }
1687 :
1688 : // Tries to match the size of the given opcode to that of the operands, if
1689 : // possible.
1690 1879611 : InstructionCode TryNarrowOpcodeSize(InstructionCode opcode, Node* left,
1691 : Node* right, FlagsContinuation* cont) {
1692 : // TODO(epertoso): we can probably get some size information out phi nodes.
1693 : // If the load representations don't match, both operands will be
1694 : // zero/sign-extended to 32bit.
1695 1879611 : MachineType left_type = MachineTypeForNarrow(left, right);
1696 1879630 : MachineType right_type = MachineTypeForNarrow(right, left);
1697 1879652 : if (left_type == right_type) {
1698 1441587 : switch (left_type.representation()) {
1699 : case MachineRepresentation::kBit:
1700 : case MachineRepresentation::kWord8: {
1701 286748 : if (opcode == kX64Test32) return kX64Test8;
1702 189220 : if (opcode == kX64Cmp32) {
1703 189220 : if (left_type.semantic() == MachineSemantic::kUint32) {
1704 : cont->OverwriteUnsignedIfSigned();
1705 : } else {
1706 248 : CHECK_EQ(MachineSemantic::kInt32, left_type.semantic());
1707 : }
1708 : return kX64Cmp8;
1709 : }
1710 : break;
1711 : }
1712 : case MachineRepresentation::kWord16:
1713 5688 : if (opcode == kX64Test32) return kX64Test16;
1714 3504 : if (opcode == kX64Cmp32) {
1715 3504 : if (left_type.semantic() == MachineSemantic::kUint32) {
1716 : cont->OverwriteUnsignedIfSigned();
1717 : } else {
1718 0 : CHECK_EQ(MachineSemantic::kInt32, left_type.semantic());
1719 : }
1720 : return kX64Cmp16;
1721 : }
1722 : break;
1723 : default:
1724 : break;
1725 : }
1726 : }
1727 : return opcode;
1728 : }
1729 :
1730 : // Shared routine for multiple word compare operations.
1731 2558330 : void VisitWordCompare(InstructionSelector* selector, Node* node,
1732 3463800 : InstructionCode opcode, FlagsContinuation* cont) {
1733 : X64OperandGenerator g(selector);
1734 : Node* left = node->InputAt(0);
1735 : Node* right = node->InputAt(1);
1736 :
1737 1879606 : opcode = TryNarrowOpcodeSize(opcode, left, right, cont);
1738 :
1739 : // If one of the two inputs is an immediate, make sure it's on the right, or
1740 : // if one of the two inputs is a memory operand, make sure it's on the left.
1741 1879632 : int effect_level = selector->GetEffectLevel(node);
1742 1879623 : if (cont->IsBranch()) {
1743 : effect_level = selector->GetEffectLevel(
1744 1584177 : cont->true_block()->PredecessorAt(0)->control_input());
1745 : }
1746 :
1747 5509027 : if ((!g.CanBeImmediate(right) && g.CanBeImmediate(left)) ||
1748 1892753 : (g.CanBeMemoryOperand(opcode, node, right, effect_level) &&
1749 142954 : !g.CanBeMemoryOperand(opcode, node, left, effect_level))) {
1750 263951 : if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1751 : std::swap(left, right);
1752 : }
1753 :
1754 : // Match immediates on right side of comparison.
1755 1879616 : if (g.CanBeImmediate(right)) {
1756 1165257 : if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
1757 : return VisitCompareWithMemoryOperand(selector, opcode, left,
1758 225826 : g.UseImmediate(right), cont);
1759 : }
1760 : return VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right),
1761 939431 : cont);
1762 : }
1763 :
1764 : // Match memory operands on left side of comparison.
1765 714375 : if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
1766 : return VisitCompareWithMemoryOperand(selector, opcode, left,
1767 299607 : g.UseRegister(right), cont);
1768 : }
1769 :
1770 : return VisitCompare(selector, opcode, left, right, cont,
1771 414773 : node->op()->HasProperty(Operator::kCommutative));
1772 : }
1773 :
1774 : // Shared routine for 64-bit word comparison operations.
1775 3621693 : void VisitWord64Compare(InstructionSelector* selector, Node* node,
1776 1998691 : FlagsContinuation* cont) {
1777 : X64OperandGenerator g(selector);
1778 2169428 : if (selector->CanUseRootsRegister()) {
1779 : Heap* const heap = selector->isolate()->heap();
1780 : Heap::RootListIndex root_index;
1781 1897935 : HeapObjectBinopMatcher m(node);
1782 2772558 : if (m.right().HasValue() &&
1783 : heap->IsRootHandle(m.right().Value(), &root_index)) {
1784 785994 : if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1785 : InstructionCode opcode =
1786 : kX64Cmp | AddressingModeField::encode(kMode_Root);
1787 : return VisitCompare(
1788 : selector, opcode,
1789 : g.TempImmediate((root_index * kPointerSize) - kRootRegisterBias),
1790 1571982 : g.UseRegister(m.left().node()), cont);
1791 1111960 : } else if (m.left().HasValue() &&
1792 : heap->IsRootHandle(m.left().Value(), &root_index)) {
1793 : InstructionCode opcode =
1794 : kX64Cmp | AddressingModeField::encode(kMode_Root);
1795 : return VisitCompare(
1796 : selector, opcode,
1797 : g.TempImmediate((root_index * kPointerSize) - kRootRegisterBias),
1798 0 : g.UseRegister(m.right().node()), cont);
1799 : }
1800 : }
1801 1383489 : Int64BinopMatcher m(node);
1802 2311751 : if (m.left().IsLoad() && m.right().IsLoadStackPointer()) {
1803 666246 : LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node());
1804 : ExternalReference js_stack_limit =
1805 666247 : ExternalReference::address_of_stack_limit(selector->isolate());
1806 1332498 : if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) {
1807 : // Compare(Load(js_stack_limit), LoadStackPointer)
1808 666271 : if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1809 : InstructionCode opcode = cont->Encode(kX64StackCheck);
1810 666221 : if (cont->IsBranch()) {
1811 : selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()),
1812 1332469 : g.Label(cont->false_block()));
1813 1 : } else if (cont->IsDeoptimize()) {
1814 : selector->EmitDeoptimize(opcode, 0, nullptr, 0, nullptr, cont->kind(),
1815 0 : cont->reason(), cont->frame_state());
1816 1 : } else if (cont->IsSet()) {
1817 1 : selector->Emit(opcode, g.DefineAsRegister(cont->result()));
1818 : } else {
1819 : DCHECK(cont->IsTrap());
1820 0 : selector->Emit(opcode, g.NoOutput(), g.UseImmediate(cont->trap_id()));
1821 : }
1822 666233 : return;
1823 : }
1824 : }
1825 717202 : VisitWordCompare(selector, node, kX64Cmp, cont);
1826 : }
1827 :
1828 :
1829 : // Shared routine for comparison with zero.
1830 558424 : void VisitCompareZero(InstructionSelector* selector, Node* node,
1831 : InstructionCode opcode, FlagsContinuation* cont) {
1832 : X64OperandGenerator g(selector);
1833 558424 : VisitCompare(selector, opcode, g.Use(node), g.TempImmediate(0), cont);
1834 558427 : }
1835 :
1836 :
1837 : // Shared routine for multiple float32 compare operations (inputs commuted).
1838 1952 : void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1839 : FlagsContinuation* cont) {
1840 : Node* const left = node->InputAt(0);
1841 : Node* const right = node->InputAt(1);
1842 : InstructionCode const opcode =
1843 1952 : selector->IsSupported(AVX) ? kAVXFloat32Cmp : kSSEFloat32Cmp;
1844 1952 : VisitCompare(selector, opcode, right, left, cont, false);
1845 1953 : }
1846 :
1847 :
1848 : // Shared routine for multiple float64 compare operations (inputs commuted).
1849 153796 : void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1850 : FlagsContinuation* cont) {
1851 : Node* const left = node->InputAt(0);
1852 : Node* const right = node->InputAt(1);
1853 : InstructionCode const opcode =
1854 153796 : selector->IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
1855 153796 : VisitCompare(selector, opcode, right, left, cont, false);
1856 153797 : }
1857 :
1858 : // Shared routine for word comparison against zero.
1859 4140223 : void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1860 8625409 : Node* value, FlagsContinuation* cont) {
1861 : // Try to combine with comparisons against 0 by simply inverting the branch.
1862 9916507 : while (value->opcode() == IrOpcode::kWord32Equal &&
1863 965486 : selector->CanCover(user, value)) {
1864 958063 : Int32BinopMatcher m(value);
1865 958065 : if (!m.right().Is(0)) break;
1866 :
1867 : user = value;
1868 : value = m.left().node();
1869 : cont->Negate();
1870 : }
1871 :
1872 4140226 : if (selector->CanCover(user, value)) {
1873 3814612 : switch (value->opcode()) {
1874 : case IrOpcode::kWord32Equal:
1875 : cont->OverwriteAndNegateIfEqual(kEqual);
1876 287491 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1877 : case IrOpcode::kInt32LessThan:
1878 : cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1879 178465 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1880 : case IrOpcode::kInt32LessThanOrEqual:
1881 : cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1882 30243 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1883 : case IrOpcode::kUint32LessThan:
1884 : cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1885 67141 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1886 : case IrOpcode::kUint32LessThanOrEqual:
1887 : cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1888 34273 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1889 : case IrOpcode::kWord64Equal: {
1890 : cont->OverwriteAndNegateIfEqual(kEqual);
1891 1662725 : Int64BinopMatcher m(value);
1892 1662731 : if (m.right().Is(0)) {
1893 : // Try to combine the branch with a comparison.
1894 : Node* const user = m.node();
1895 404468 : Node* const value = m.left().node();
1896 493002 : if (selector->CanCover(user, value)) {
1897 404468 : switch (value->opcode()) {
1898 : case IrOpcode::kInt64Sub:
1899 22 : return VisitWord64Compare(selector, value, cont);
1900 : case IrOpcode::kWord64And:
1901 396793 : return VisitWordCompare(selector, value, kX64Test, cont);
1902 : default:
1903 : break;
1904 : }
1905 : }
1906 96184 : return VisitCompareZero(selector, value, kX64Cmp, cont);
1907 : }
1908 1169729 : return VisitWord64Compare(selector, value, cont);
1909 : }
1910 : case IrOpcode::kInt64LessThan:
1911 : cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1912 25162 : return VisitWord64Compare(selector, value, cont);
1913 : case IrOpcode::kInt64LessThanOrEqual:
1914 : cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1915 13997 : return VisitWord64Compare(selector, value, cont);
1916 : case IrOpcode::kUint64LessThan:
1917 : cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1918 848372 : return VisitWord64Compare(selector, value, cont);
1919 : case IrOpcode::kUint64LessThanOrEqual:
1920 : cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1921 96419 : return VisitWord64Compare(selector, value, cont);
1922 : case IrOpcode::kFloat32Equal:
1923 : cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1924 601 : return VisitFloat32Compare(selector, value, cont);
1925 : case IrOpcode::kFloat32LessThan:
1926 : cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1927 674 : return VisitFloat32Compare(selector, value, cont);
1928 : case IrOpcode::kFloat32LessThanOrEqual:
1929 : cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1930 212 : return VisitFloat32Compare(selector, value, cont);
1931 : case IrOpcode::kFloat64Equal:
1932 : cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1933 128876 : return VisitFloat64Compare(selector, value, cont);
1934 : case IrOpcode::kFloat64LessThan: {
1935 68242 : Float64BinopMatcher m(value);
1936 127233 : if (m.left().Is(0.0) && m.right().IsFloat64Abs()) {
1937 : // This matches the pattern
1938 : //
1939 : // Float64LessThan(#0.0, Float64Abs(x))
1940 : //
1941 : // which TurboFan generates for NumberToBoolean in the general case,
1942 : // and which evaluates to false if x is 0, -0 or NaN. We can compile
1943 : // this to a simple (v)ucomisd using not_equal flags condition, which
1944 : // avoids the costly Float64Abs.
1945 : cont->OverwriteAndNegateIfEqual(kNotEqual);
1946 : InstructionCode const opcode =
1947 53267 : selector->IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
1948 : return VisitCompare(selector, opcode, m.left().node(),
1949 53267 : m.right().InputAt(0), cont, false);
1950 : }
1951 : cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1952 14975 : return VisitFloat64Compare(selector, value, cont);
1953 : }
1954 : case IrOpcode::kFloat64LessThanOrEqual:
1955 : cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1956 2940 : return VisitFloat64Compare(selector, value, cont);
1957 : case IrOpcode::kProjection:
1958 : // Check if this is the overflow output projection of an
1959 : // <Operation>WithOverflow node.
1960 86262 : if (ProjectionIndexOf(value->op()) == 1u) {
1961 : // We cannot combine the <Operation>WithOverflow with this branch
1962 : // unless the 0th projection (the use of the actual value of the
1963 : // <Operation> is either nullptr, which means there's no use of the
1964 : // actual value, or was already defined, which means it is scheduled
1965 : // *AFTER* this branch).
1966 86256 : Node* const node = value->InputAt(0);
1967 86255 : Node* const result = NodeProperties::FindProjection(node, 0);
1968 86256 : if (result == nullptr || selector->IsDefined(result)) {
1969 86256 : switch (node->opcode()) {
1970 : case IrOpcode::kInt32AddWithOverflow:
1971 : cont->OverwriteAndNegateIfEqual(kOverflow);
1972 42332 : return VisitBinop(selector, node, kX64Add32, cont);
1973 : case IrOpcode::kInt32SubWithOverflow:
1974 : cont->OverwriteAndNegateIfEqual(kOverflow);
1975 38296 : return VisitBinop(selector, node, kX64Sub32, cont);
1976 : case IrOpcode::kInt32MulWithOverflow:
1977 : cont->OverwriteAndNegateIfEqual(kOverflow);
1978 3001 : return VisitBinop(selector, node, kX64Imul32, cont);
1979 : case IrOpcode::kInt64AddWithOverflow:
1980 : cont->OverwriteAndNegateIfEqual(kOverflow);
1981 1747 : return VisitBinop(selector, node, kX64Add, cont);
1982 : case IrOpcode::kInt64SubWithOverflow:
1983 : cont->OverwriteAndNegateIfEqual(kOverflow);
1984 879 : return VisitBinop(selector, node, kX64Sub, cont);
1985 : default:
1986 : break;
1987 : }
1988 : }
1989 : }
1990 : break;
1991 : case IrOpcode::kInt32Sub:
1992 1210 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1993 : case IrOpcode::kWord32And:
1994 144673 : return VisitWordCompare(selector, value, kX64Test32, cont);
1995 : default:
1996 : break;
1997 : }
1998 : }
1999 :
2000 : // Branch could not be combined with a compare, emit compare against 0.
2001 462287 : VisitCompareZero(selector, value, kX64Cmp32, cont);
2002 : }
2003 :
2004 : } // namespace
2005 :
2006 3610801 : void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
2007 : BasicBlock* fbranch) {
2008 : FlagsContinuation cont(kNotEqual, tbranch, fbranch);
2009 3610801 : VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
2010 3610794 : }
2011 :
2012 121560 : void InstructionSelector::VisitDeoptimizeIf(Node* node) {
2013 121560 : DeoptimizeParameters p = DeoptimizeParametersOf(node->op());
2014 : FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
2015 : kNotEqual, p.kind(), p.reason(), node->InputAt(1));
2016 121560 : VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2017 121559 : }
2018 :
2019 180752 : void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
2020 180752 : DeoptimizeParameters p = DeoptimizeParametersOf(node->op());
2021 : FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
2022 : kEqual, p.kind(), p.reason(), node->InputAt(1));
2023 180760 : VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2024 180756 : }
2025 :
2026 1894 : void InstructionSelector::VisitTrapIf(Node* node, Runtime::FunctionId func_id) {
2027 : FlagsContinuation cont =
2028 : FlagsContinuation::ForTrap(kNotEqual, func_id, node->InputAt(1));
2029 1894 : VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2030 1895 : }
2031 :
2032 195773 : void InstructionSelector::VisitTrapUnless(Node* node,
2033 : Runtime::FunctionId func_id) {
2034 : FlagsContinuation cont =
2035 : FlagsContinuation::ForTrap(kEqual, func_id, node->InputAt(1));
2036 195773 : VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2037 195778 : }
2038 :
2039 28588 : void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
2040 : X64OperandGenerator g(this);
2041 28588 : InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
2042 :
2043 : // Emit either ArchTableSwitch or ArchLookupSwitch.
2044 : static const size_t kMaxTableSwitchValueRange = 2 << 16;
2045 28588 : size_t table_space_cost = 4 + sw.value_range;
2046 : size_t table_time_cost = 3;
2047 28588 : size_t lookup_space_cost = 3 + 2 * sw.case_count;
2048 : size_t lookup_time_cost = sw.case_count;
2049 36458 : if (sw.case_count > 4 &&
2050 7870 : table_space_cost + 3 * table_time_cost <=
2051 15677 : lookup_space_cost + 3 * lookup_time_cost &&
2052 44202 : sw.min_value > std::numeric_limits<int32_t>::min() &&
2053 : sw.value_range <= kMaxTableSwitchValueRange) {
2054 7807 : InstructionOperand index_operand = g.TempRegister();
2055 7807 : if (sw.min_value) {
2056 : // The leal automatically zero extends, so result is a valid 64-bit index.
2057 : Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI), index_operand,
2058 1303 : value_operand, g.TempImmediate(-sw.min_value));
2059 : } else {
2060 : // Zero extend, because we use it as 64-bit index into the jump table.
2061 6504 : Emit(kX64Movl, index_operand, value_operand);
2062 : }
2063 : // Generate a table lookup.
2064 7807 : return EmitTableSwitch(sw, index_operand);
2065 : }
2066 :
2067 : // Generate a sequence of conditional jumps.
2068 20781 : return EmitLookupSwitch(sw, value_operand);
2069 : }
2070 :
2071 :
2072 34547 : void InstructionSelector::VisitWord32Equal(Node* const node) {
2073 : Node* user = node;
2074 : FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
2075 34547 : Int32BinopMatcher m(user);
2076 34548 : if (m.right().Is(0)) {
2077 64004 : return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
2078 : }
2079 5093 : VisitWordCompare(this, node, kX64Cmp32, &cont);
2080 : }
2081 :
2082 :
2083 1736 : void InstructionSelector::VisitInt32LessThan(Node* node) {
2084 : FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
2085 1736 : VisitWordCompare(this, node, kX64Cmp32, &cont);
2086 1736 : }
2087 :
2088 :
2089 1139 : void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
2090 : FlagsContinuation cont =
2091 : FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
2092 1139 : VisitWordCompare(this, node, kX64Cmp32, &cont);
2093 1139 : }
2094 :
2095 :
2096 3737 : void InstructionSelector::VisitUint32LessThan(Node* node) {
2097 : FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
2098 3737 : VisitWordCompare(this, node, kX64Cmp32, &cont);
2099 3736 : }
2100 :
2101 :
2102 2130 : void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
2103 : FlagsContinuation cont =
2104 : FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
2105 2130 : VisitWordCompare(this, node, kX64Cmp32, &cont);
2106 2130 : }
2107 :
2108 :
2109 21328 : void InstructionSelector::VisitWord64Equal(Node* const node) {
2110 : FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
2111 21328 : Int64BinopMatcher m(node);
2112 21328 : if (m.right().Is(0)) {
2113 : // Try to combine the equality check with a comparison.
2114 : Node* const user = m.node();
2115 8402 : Node* const value = m.left().node();
2116 8402 : if (CanCover(user, value)) {
2117 8402 : switch (value->opcode()) {
2118 : case IrOpcode::kInt64Sub:
2119 8263 : return VisitWord64Compare(this, value, &cont);
2120 : case IrOpcode::kWord64And:
2121 8263 : return VisitWordCompare(this, value, kX64Test, &cont);
2122 : default:
2123 : break;
2124 : }
2125 : }
2126 : }
2127 13065 : VisitWord64Compare(this, node, &cont);
2128 : }
2129 :
2130 :
2131 17405 : void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
2132 17405 : if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
2133 : FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
2134 34810 : return VisitBinop(this, node, kX64Add32, &cont);
2135 : }
2136 : FlagsContinuation cont;
2137 0 : VisitBinop(this, node, kX64Add32, &cont);
2138 : }
2139 :
2140 :
2141 17405 : void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
2142 17405 : if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
2143 : FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
2144 34810 : return VisitBinop(this, node, kX64Sub32, &cont);
2145 : }
2146 : FlagsContinuation cont;
2147 0 : VisitBinop(this, node, kX64Sub32, &cont);
2148 : }
2149 :
2150 :
2151 1195 : void InstructionSelector::VisitInt64LessThan(Node* node) {
2152 : FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
2153 1195 : VisitWord64Compare(this, node, &cont);
2154 1195 : }
2155 :
2156 :
2157 650 : void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
2158 : FlagsContinuation cont =
2159 : FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
2160 650 : VisitWord64Compare(this, node, &cont);
2161 650 : }
2162 :
2163 :
2164 428 : void InstructionSelector::VisitUint64LessThan(Node* node) {
2165 : FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
2166 428 : VisitWord64Compare(this, node, &cont);
2167 428 : }
2168 :
2169 :
2170 412 : void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
2171 : FlagsContinuation cont =
2172 : FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
2173 412 : VisitWord64Compare(this, node, &cont);
2174 412 : }
2175 :
2176 :
2177 144 : void InstructionSelector::VisitFloat32Equal(Node* node) {
2178 : FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
2179 144 : VisitFloat32Compare(this, node, &cont);
2180 144 : }
2181 :
2182 :
2183 162 : void InstructionSelector::VisitFloat32LessThan(Node* node) {
2184 : FlagsContinuation cont =
2185 : FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
2186 162 : VisitFloat32Compare(this, node, &cont);
2187 162 : }
2188 :
2189 :
2190 159 : void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
2191 : FlagsContinuation cont =
2192 : FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
2193 159 : VisitFloat32Compare(this, node, &cont);
2194 159 : }
2195 :
2196 :
2197 3997 : void InstructionSelector::VisitFloat64Equal(Node* node) {
2198 : FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
2199 3997 : VisitFloat64Compare(this, node, &cont);
2200 3997 : }
2201 :
2202 7387 : void InstructionSelector::VisitFloat64LessThan(Node* node) {
2203 7387 : Float64BinopMatcher m(node);
2204 13208 : if (m.left().Is(0.0) && m.right().IsFloat64Abs()) {
2205 : // This matches the pattern
2206 : //
2207 : // Float64LessThan(#0.0, Float64Abs(x))
2208 : //
2209 : // which TurboFan generates for NumberToBoolean in the general case,
2210 : // and which evaluates to false if x is 0, -0 or NaN. We can compile
2211 : // this to a simple (v)ucomisd using not_equal flags condition, which
2212 : // avoids the costly Float64Abs.
2213 : FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, node);
2214 : InstructionCode const opcode =
2215 5700 : IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
2216 : return VisitCompare(this, opcode, m.left().node(), m.right().InputAt(0),
2217 13087 : &cont, false);
2218 : }
2219 : FlagsContinuation cont =
2220 : FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
2221 1687 : VisitFloat64Compare(this, node, &cont);
2222 : }
2223 :
2224 1322 : void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
2225 : FlagsContinuation cont =
2226 : FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
2227 1322 : VisitFloat64Compare(this, node, &cont);
2228 1322 : }
2229 :
2230 5 : void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
2231 : X64OperandGenerator g(this);
2232 : Node* left = node->InputAt(0);
2233 : Node* right = node->InputAt(1);
2234 : Float64Matcher mleft(left);
2235 5 : if (mleft.HasValue() && (bit_cast<uint64_t>(mleft.Value()) >> 32) == 0u) {
2236 0 : Emit(kSSEFloat64LoadLowWord32, g.DefineAsRegister(node), g.Use(right));
2237 5 : return;
2238 : }
2239 : Emit(kSSEFloat64InsertLowWord32, g.DefineSameAsFirst(node),
2240 5 : g.UseRegister(left), g.Use(right));
2241 : }
2242 :
2243 :
2244 5 : void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
2245 : X64OperandGenerator g(this);
2246 : Node* left = node->InputAt(0);
2247 : Node* right = node->InputAt(1);
2248 : Emit(kSSEFloat64InsertHighWord32, g.DefineSameAsFirst(node),
2249 5 : g.UseRegister(left), g.Use(right));
2250 5 : }
2251 :
2252 1825 : void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
2253 : X64OperandGenerator g(this);
2254 : Emit(kSSEFloat64SilenceNaN, g.DefineSameAsFirst(node),
2255 1825 : g.UseRegister(node->InputAt(0)));
2256 1825 : }
2257 :
2258 464 : void InstructionSelector::VisitAtomicLoad(Node* node) {
2259 464 : LoadRepresentation load_rep = LoadRepresentationOf(node->op());
2260 : DCHECK(load_rep.representation() == MachineRepresentation::kWord8 ||
2261 : load_rep.representation() == MachineRepresentation::kWord16 ||
2262 : load_rep.representation() == MachineRepresentation::kWord32);
2263 : USE(load_rep);
2264 464 : VisitLoad(node);
2265 464 : }
2266 :
2267 540 : void InstructionSelector::VisitAtomicStore(Node* node) {
2268 : X64OperandGenerator g(this);
2269 : Node* base = node->InputAt(0);
2270 : Node* index = node->InputAt(1);
2271 : Node* value = node->InputAt(2);
2272 :
2273 270 : MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
2274 : ArchOpcode opcode = kArchNop;
2275 270 : switch (rep) {
2276 : case MachineRepresentation::kWord8:
2277 : opcode = kAtomicExchangeInt8;
2278 : break;
2279 : case MachineRepresentation::kWord16:
2280 : opcode = kAtomicExchangeInt16;
2281 78 : break;
2282 : case MachineRepresentation::kWord32:
2283 : opcode = kAtomicExchangeWord32;
2284 114 : break;
2285 : default:
2286 0 : UNREACHABLE();
2287 : return;
2288 : }
2289 : AddressingMode addressing_mode;
2290 1350 : InstructionOperand inputs[4];
2291 : size_t input_count = 0;
2292 270 : inputs[input_count++] = g.UseUniqueRegister(value);
2293 270 : inputs[input_count++] = g.UseUniqueRegister(base);
2294 270 : if (g.CanBeImmediate(index)) {
2295 54 : inputs[input_count++] = g.UseImmediate(index);
2296 : addressing_mode = kMode_MRI;
2297 : } else {
2298 216 : inputs[input_count++] = g.UseUniqueRegister(index);
2299 : addressing_mode = kMode_MR1;
2300 : }
2301 270 : InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2302 270 : Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs);
2303 270 : }
2304 :
2305 840 : void InstructionSelector::VisitAtomicExchange(Node* node) {
2306 : X64OperandGenerator g(this);
2307 : Node* base = node->InputAt(0);
2308 : Node* index = node->InputAt(1);
2309 : Node* value = node->InputAt(2);
2310 :
2311 420 : MachineType type = AtomicOpRepresentationOf(node->op());
2312 : ArchOpcode opcode = kArchNop;
2313 420 : if (type == MachineType::Int8()) {
2314 : opcode = kAtomicExchangeInt8;
2315 358 : } else if (type == MachineType::Uint8()) {
2316 : opcode = kAtomicExchangeUint8;
2317 280 : } else if (type == MachineType::Int16()) {
2318 : opcode = kAtomicExchangeInt16;
2319 218 : } else if (type == MachineType::Uint16()) {
2320 : opcode = kAtomicExchangeUint16;
2321 218 : } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2322 : opcode = kAtomicExchangeWord32;
2323 : } else {
2324 0 : UNREACHABLE();
2325 : return;
2326 : }
2327 840 : InstructionOperand outputs[1];
2328 : AddressingMode addressing_mode;
2329 1260 : InstructionOperand inputs[3];
2330 : size_t input_count = 0;
2331 420 : inputs[input_count++] = g.UseUniqueRegister(value);
2332 420 : inputs[input_count++] = g.UseUniqueRegister(base);
2333 420 : if (g.CanBeImmediate(index)) {
2334 18 : inputs[input_count++] = g.UseImmediate(index);
2335 : addressing_mode = kMode_MRI;
2336 : } else {
2337 402 : inputs[input_count++] = g.UseUniqueRegister(index);
2338 : addressing_mode = kMode_MR1;
2339 : }
2340 420 : outputs[0] = g.DefineSameAsFirst(node);
2341 420 : InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2342 420 : Emit(code, 1, outputs, input_count, inputs);
2343 420 : }
2344 :
2345 840 : void InstructionSelector::VisitAtomicCompareExchange(Node* node) {
2346 : X64OperandGenerator g(this);
2347 : Node* base = node->InputAt(0);
2348 : Node* index = node->InputAt(1);
2349 : Node* old_value = node->InputAt(2);
2350 : Node* new_value = node->InputAt(3);
2351 :
2352 420 : MachineType type = AtomicOpRepresentationOf(node->op());
2353 : ArchOpcode opcode = kArchNop;
2354 420 : if (type == MachineType::Int8()) {
2355 : opcode = kAtomicCompareExchangeInt8;
2356 358 : } else if (type == MachineType::Uint8()) {
2357 : opcode = kAtomicCompareExchangeUint8;
2358 280 : } else if (type == MachineType::Int16()) {
2359 : opcode = kAtomicCompareExchangeInt16;
2360 218 : } else if (type == MachineType::Uint16()) {
2361 : opcode = kAtomicCompareExchangeUint16;
2362 218 : } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2363 : opcode = kAtomicCompareExchangeWord32;
2364 : } else {
2365 0 : UNREACHABLE();
2366 : return;
2367 : }
2368 840 : InstructionOperand outputs[1];
2369 : AddressingMode addressing_mode;
2370 1680 : InstructionOperand inputs[4];
2371 : size_t input_count = 0;
2372 420 : inputs[input_count++] = g.UseFixed(old_value, rax);
2373 420 : inputs[input_count++] = g.UseUniqueRegister(new_value);
2374 420 : inputs[input_count++] = g.UseUniqueRegister(base);
2375 420 : if (g.CanBeImmediate(index)) {
2376 18 : inputs[input_count++] = g.UseImmediate(index);
2377 : addressing_mode = kMode_MRI;
2378 : } else {
2379 402 : inputs[input_count++] = g.UseUniqueRegister(index);
2380 : addressing_mode = kMode_MR1;
2381 : }
2382 420 : outputs[0] = g.DefineAsFixed(node, rax);
2383 420 : InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2384 420 : Emit(code, 1, outputs, input_count, inputs);
2385 420 : }
2386 :
2387 2100 : void InstructionSelector::VisitAtomicBinaryOperation(
2388 2100 : Node* node, ArchOpcode int8_op, ArchOpcode uint8_op, ArchOpcode int16_op,
2389 : ArchOpcode uint16_op, ArchOpcode word32_op) {
2390 : X64OperandGenerator g(this);
2391 : Node* base = node->InputAt(0);
2392 : Node* index = node->InputAt(1);
2393 : Node* value = node->InputAt(2);
2394 :
2395 2100 : MachineType type = AtomicOpRepresentationOf(node->op());
2396 : ArchOpcode opcode = kArchNop;
2397 2100 : if (type == MachineType::Int8()) {
2398 : opcode = int8_op;
2399 1790 : } else if (type == MachineType::Uint8()) {
2400 : opcode = uint8_op;
2401 1400 : } else if (type == MachineType::Int16()) {
2402 : opcode = int16_op;
2403 1090 : } else if (type == MachineType::Uint16()) {
2404 : opcode = uint16_op;
2405 1090 : } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2406 : opcode = word32_op;
2407 : } else {
2408 0 : UNREACHABLE();
2409 : return;
2410 : }
2411 4200 : InstructionOperand outputs[1];
2412 : AddressingMode addressing_mode;
2413 6300 : InstructionOperand inputs[3];
2414 : size_t input_count = 0;
2415 2100 : inputs[input_count++] = g.UseUniqueRegister(value);
2416 2100 : inputs[input_count++] = g.UseUniqueRegister(base);
2417 2100 : if (g.CanBeImmediate(index)) {
2418 90 : inputs[input_count++] = g.UseImmediate(index);
2419 : addressing_mode = kMode_MRI;
2420 : } else {
2421 2010 : inputs[input_count++] = g.UseUniqueRegister(index);
2422 : addressing_mode = kMode_MR1;
2423 : }
2424 2100 : outputs[0] = g.DefineAsFixed(node, rax);
2425 4200 : InstructionOperand temp[1];
2426 2100 : temp[0] = g.TempRegister();
2427 2100 : InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2428 2100 : Emit(code, 1, outputs, input_count, inputs, 1, temp);
2429 2100 : }
2430 :
2431 : #define VISIT_ATOMIC_BINOP(op) \
2432 : void InstructionSelector::VisitAtomic##op(Node* node) { \
2433 : VisitAtomicBinaryOperation(node, kAtomic##op##Int8, kAtomic##op##Uint8, \
2434 : kAtomic##op##Int16, kAtomic##op##Uint16, \
2435 : kAtomic##op##Word32); \
2436 : }
2437 420 : VISIT_ATOMIC_BINOP(Add)
2438 420 : VISIT_ATOMIC_BINOP(Sub)
2439 420 : VISIT_ATOMIC_BINOP(And)
2440 420 : VISIT_ATOMIC_BINOP(Or)
2441 420 : VISIT_ATOMIC_BINOP(Xor)
2442 : #undef VISIT_ATOMIC_BINOP
2443 :
2444 : #define SIMD_TYPES(V) \
2445 : V(I32x4) \
2446 : V(I16x8) \
2447 : V(I8x16)
2448 :
2449 : #define SIMD_FORMAT_LIST(V) \
2450 : V(32x4) \
2451 : V(16x8) \
2452 : V(8x16)
2453 :
2454 : #define SIMD_BINOP_LIST(V) \
2455 : V(I32x4Add) \
2456 : V(I32x4AddHoriz) \
2457 : V(I32x4Sub) \
2458 : V(I32x4Mul) \
2459 : V(I32x4MinS) \
2460 : V(I32x4MaxS) \
2461 : V(I32x4Eq) \
2462 : V(I32x4Ne) \
2463 : V(I32x4GtS) \
2464 : V(I32x4GeS) \
2465 : V(I32x4MinU) \
2466 : V(I32x4MaxU) \
2467 : V(I32x4GtU) \
2468 : V(I32x4GeU) \
2469 : V(I16x8Add) \
2470 : V(I16x8AddSaturateS) \
2471 : V(I16x8AddHoriz) \
2472 : V(I16x8Sub) \
2473 : V(I16x8SubSaturateS) \
2474 : V(I16x8Mul) \
2475 : V(I16x8MinS) \
2476 : V(I16x8MaxS) \
2477 : V(I16x8Eq) \
2478 : V(I16x8Ne) \
2479 : V(I16x8GtS) \
2480 : V(I16x8GeS) \
2481 : V(I16x8AddSaturateU) \
2482 : V(I16x8SubSaturateU) \
2483 : V(I16x8MinU) \
2484 : V(I16x8MaxU) \
2485 : V(I16x8GtU) \
2486 : V(I16x8GeU) \
2487 : V(I8x16Add) \
2488 : V(I8x16AddSaturateS) \
2489 : V(I8x16Sub) \
2490 : V(I8x16SubSaturateS) \
2491 : V(I8x16MinS) \
2492 : V(I8x16MaxS) \
2493 : V(I8x16Eq) \
2494 : V(I8x16Ne) \
2495 : V(I8x16GtS) \
2496 : V(I8x16GeS) \
2497 : V(I8x16AddSaturateU) \
2498 : V(I8x16SubSaturateU) \
2499 : V(I8x16MinU) \
2500 : V(I8x16MaxU) \
2501 : V(I8x16GtU) \
2502 : V(I8x16GeU) \
2503 : V(S128And) \
2504 : V(S128Or) \
2505 : V(S128Xor)
2506 :
2507 : #define SIMD_UNOP_LIST(V) \
2508 : V(I32x4Neg) \
2509 : V(I16x8Neg) \
2510 : V(I8x16Neg) \
2511 : V(S128Not)
2512 :
2513 : #define SIMD_SHIFT_OPCODES(V) \
2514 : V(I32x4Shl) \
2515 : V(I32x4ShrS) \
2516 : V(I32x4ShrU) \
2517 : V(I16x8Shl) \
2518 : V(I16x8ShrS) \
2519 : V(I16x8ShrU)
2520 :
2521 0 : void InstructionSelector::VisitS128Zero(Node* node) {
2522 : X64OperandGenerator g(this);
2523 0 : Emit(kX64S128Zero, g.DefineAsRegister(node), g.DefineAsRegister(node));
2524 0 : }
2525 :
2526 : #define VISIT_SIMD_SPLAT(Type) \
2527 : void InstructionSelector::Visit##Type##Splat(Node* node) { \
2528 : X64OperandGenerator g(this); \
2529 : Emit(kX64##Type##Splat, g.DefineAsRegister(node), \
2530 : g.Use(node->InputAt(0))); \
2531 : }
2532 1944 : SIMD_TYPES(VISIT_SIMD_SPLAT)
2533 : #undef VISIT_SIMD_SPLAT
2534 :
2535 : #define VISIT_SIMD_EXTRACT_LANE(Type) \
2536 : void InstructionSelector::Visit##Type##ExtractLane(Node* node) { \
2537 : X64OperandGenerator g(this); \
2538 : int32_t lane = OpParameter<int32_t>(node); \
2539 : Emit(kX64##Type##ExtractLane, g.DefineAsRegister(node), \
2540 : g.UseRegister(node->InputAt(0)), g.UseImmediate(lane)); \
2541 : }
2542 18504 : SIMD_TYPES(VISIT_SIMD_EXTRACT_LANE)
2543 : #undef VISIT_SIMD_EXTRACT_LANE
2544 :
2545 : #define VISIT_SIMD_REPLACE_LANE(Type) \
2546 : void InstructionSelector::Visit##Type##ReplaceLane(Node* node) { \
2547 : X64OperandGenerator g(this); \
2548 : int32_t lane = OpParameter<int32_t>(node); \
2549 : Emit(kX64##Type##ReplaceLane, g.DefineSameAsFirst(node), \
2550 : g.UseRegister(node->InputAt(0)), g.UseImmediate(lane), \
2551 : g.Use(node->InputAt(1))); \
2552 : }
2553 1080 : SIMD_TYPES(VISIT_SIMD_REPLACE_LANE)
2554 : #undef VISIT_SIMD_REPLACE_LANE
2555 :
2556 : #define VISIT_SIMD_SHIFT(Opcode) \
2557 : void InstructionSelector::Visit##Opcode(Node* node) { \
2558 : X64OperandGenerator g(this); \
2559 : int32_t value = OpParameter<int32_t>(node); \
2560 : Emit(kX64##Opcode, g.DefineSameAsFirst(node), \
2561 : g.UseRegister(node->InputAt(0)), g.UseImmediate(value)); \
2562 : }
2563 108 : SIMD_SHIFT_OPCODES(VISIT_SIMD_SHIFT)
2564 : #undef VISIT_SIMD_SHIFT
2565 :
2566 : #define VISIT_SIMD_UNOP(Opcode) \
2567 : void InstructionSelector::Visit##Opcode(Node* node) { \
2568 : X64OperandGenerator g(this); \
2569 : Emit(kX64##Opcode, g.DefineAsRegister(node), \
2570 : g.UseRegister(node->InputAt(0))); \
2571 : }
2572 48 : SIMD_UNOP_LIST(VISIT_SIMD_UNOP)
2573 : #undef VISIT_SIMD_UNOP
2574 :
2575 : #define VISIT_SIMD_BINOP(Opcode) \
2576 : void InstructionSelector::Visit##Opcode(Node* node) { \
2577 : X64OperandGenerator g(this); \
2578 : Emit(kX64##Opcode, g.DefineSameAsFirst(node), \
2579 : g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1))); \
2580 : }
2581 1206 : SIMD_BINOP_LIST(VISIT_SIMD_BINOP)
2582 : #undef VISIT_SIMD_BINOP
2583 :
2584 36 : void InstructionSelector::VisitS128Select(Node* node) {
2585 : X64OperandGenerator g(this);
2586 : Emit(kX64S128Select, g.DefineSameAsFirst(node),
2587 : g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)),
2588 108 : g.UseRegister(node->InputAt(2)));
2589 36 : }
2590 :
2591 0 : void InstructionSelector::VisitInt32AbsWithOverflow(Node* node) {
2592 0 : UNREACHABLE();
2593 : }
2594 :
2595 0 : void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
2596 0 : UNREACHABLE();
2597 : }
2598 :
2599 : // static
2600 : MachineOperatorBuilder::Flags
2601 1346329 : InstructionSelector::SupportedMachineOperatorFlags() {
2602 : MachineOperatorBuilder::Flags flags =
2603 : MachineOperatorBuilder::kWord32ShiftIsSafe |
2604 : MachineOperatorBuilder::kWord32Ctz | MachineOperatorBuilder::kWord64Ctz;
2605 1346329 : if (CpuFeatures::IsSupported(POPCNT)) {
2606 : flags |= MachineOperatorBuilder::kWord32Popcnt |
2607 : MachineOperatorBuilder::kWord64Popcnt;
2608 : }
2609 1346329 : if (CpuFeatures::IsSupported(SSE4_1)) {
2610 : flags |= MachineOperatorBuilder::kFloat32RoundDown |
2611 : MachineOperatorBuilder::kFloat64RoundDown |
2612 : MachineOperatorBuilder::kFloat32RoundUp |
2613 : MachineOperatorBuilder::kFloat64RoundUp |
2614 : MachineOperatorBuilder::kFloat32RoundTruncate |
2615 : MachineOperatorBuilder::kFloat64RoundTruncate |
2616 : MachineOperatorBuilder::kFloat32RoundTiesEven |
2617 : MachineOperatorBuilder::kFloat64RoundTiesEven;
2618 : }
2619 1346329 : return flags;
2620 : }
2621 :
2622 : // static
2623 : MachineOperatorBuilder::AlignmentRequirements
2624 1346434 : InstructionSelector::AlignmentRequirements() {
2625 : return MachineOperatorBuilder::AlignmentRequirements::
2626 1346434 : FullUnalignedAccessSupport();
2627 : }
2628 :
2629 : } // namespace compiler
2630 : } // namespace internal
2631 : } // namespace v8
|