Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include <algorithm>
6 :
7 : #include "src/base/adapters.h"
8 : #include "src/compiler/instruction-selector-impl.h"
9 : #include "src/compiler/node-matchers.h"
10 : #include "src/compiler/node-properties.h"
11 :
12 : namespace v8 {
13 : namespace internal {
14 : namespace compiler {
15 :
16 : // Adds X64-specific methods for generating operands.
17 : class X64OperandGenerator final : public OperandGenerator {
18 : public:
19 : explicit X64OperandGenerator(InstructionSelector* selector)
20 : : OperandGenerator(selector) {}
21 :
22 16975583 : bool CanBeImmediate(Node* node) {
23 16975583 : switch (node->opcode()) {
24 : case IrOpcode::kInt32Constant:
25 : case IrOpcode::kRelocatableInt32Constant:
26 : return true;
27 : case IrOpcode::kInt64Constant: {
28 7426517 : const int64_t value = OpParameter<int64_t>(node);
29 7426517 : return std::numeric_limits<int32_t>::min() < value &&
30 : value <= std::numeric_limits<int32_t>::max();
31 : }
32 : case IrOpcode::kNumberConstant: {
33 1348428 : const double value = OpParameter<double>(node);
34 1348428 : return bit_cast<int64_t>(value) == 0;
35 : }
36 : default:
37 : return false;
38 : }
39 : }
40 :
41 70124 : int32_t GetImmediateIntegerValue(Node* node) {
42 : DCHECK(CanBeImmediate(node));
43 70124 : if (node->opcode() == IrOpcode::kInt32Constant) {
44 0 : return OpParameter<int32_t>(node);
45 : }
46 : DCHECK_EQ(IrOpcode::kInt64Constant, node->opcode());
47 70124 : return static_cast<int32_t>(OpParameter<int64_t>(node));
48 : }
49 :
50 8373439 : bool CanBeMemoryOperand(InstructionCode opcode, Node* node, Node* input,
51 : int effect_level) {
52 8601079 : if (input->opcode() != IrOpcode::kLoad ||
53 1446764 : !selector()->CanCover(node, input)) {
54 : return false;
55 : }
56 1219136 : if (effect_level != selector()->GetEffectLevel(input)) {
57 : return false;
58 : }
59 : MachineRepresentation rep =
60 1219129 : LoadRepresentationOf(input->op()).representation();
61 1219129 : switch (opcode) {
62 : case kX64And:
63 : case kX64Or:
64 : case kX64Xor:
65 : case kX64Add:
66 : case kX64Sub:
67 : case kX64Push:
68 : case kX64Cmp:
69 : case kX64Test:
70 1598007 : return rep == MachineRepresentation::kWord64 || IsAnyTagged(rep);
71 : case kX64And32:
72 : case kX64Or32:
73 : case kX64Xor32:
74 : case kX64Add32:
75 : case kX64Sub32:
76 : case kX64Cmp32:
77 : case kX64Test32:
78 27172 : return rep == MachineRepresentation::kWord32;
79 : case kX64Cmp16:
80 : case kX64Test16:
81 1902 : return rep == MachineRepresentation::kWord16;
82 : case kX64Cmp8:
83 : case kX64Test8:
84 224566 : return rep == MachineRepresentation::kWord8;
85 : default:
86 : break;
87 : }
88 : return false;
89 : }
90 :
91 6301054 : AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent,
92 5892289 : Node* base, Node* displacement,
93 : DisplacementMode displacement_mode,
94 : InstructionOperand inputs[],
95 : size_t* input_count) {
96 : AddressingMode mode = kMode_MRI;
97 6301054 : if (base != nullptr && (index != nullptr || displacement != nullptr)) {
98 5893029 : if (base->opcode() == IrOpcode::kInt32Constant &&
99 740 : OpParameter<int32_t>(base) == 0) {
100 : base = nullptr;
101 6184137 : } else if (base->opcode() == IrOpcode::kInt64Constant &&
102 291872 : OpParameter<int64_t>(base) == 0) {
103 : base = nullptr;
104 : }
105 : }
106 6301054 : if (base != nullptr) {
107 6144637 : inputs[(*input_count)++] = UseRegister(base);
108 6144603 : if (index != nullptr) {
109 : DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
110 526884 : inputs[(*input_count)++] = UseRegister(index);
111 526879 : if (displacement != nullptr) {
112 : inputs[(*input_count)++] = displacement_mode
113 : ? UseNegatedImmediate(displacement)
114 314596 : : UseImmediate(displacement);
115 : static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I,
116 : kMode_MR4I, kMode_MR8I};
117 314596 : mode = kMRnI_modes[scale_exponent];
118 : } else {
119 : static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2,
120 : kMode_MR4, kMode_MR8};
121 212283 : mode = kMRn_modes[scale_exponent];
122 : }
123 : } else {
124 5617719 : if (displacement == nullptr) {
125 : mode = kMode_MR;
126 : } else {
127 : inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
128 : ? UseNegatedImmediate(displacement)
129 5267654 : : UseImmediate(displacement);
130 : mode = kMode_MRI;
131 : }
132 : }
133 : } else {
134 : DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
135 156417 : if (displacement != nullptr) {
136 147191 : if (index == nullptr) {
137 398 : inputs[(*input_count)++] = UseRegister(displacement);
138 : mode = kMode_MR;
139 : } else {
140 146793 : inputs[(*input_count)++] = UseRegister(index);
141 : inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
142 : ? UseNegatedImmediate(displacement)
143 146793 : : UseImmediate(displacement);
144 : static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I,
145 : kMode_M4I, kMode_M8I};
146 146793 : mode = kMnI_modes[scale_exponent];
147 : }
148 : } else {
149 9226 : inputs[(*input_count)++] = UseRegister(index);
150 : static const AddressingMode kMn_modes[] = {kMode_MR, kMode_MR1,
151 : kMode_M4, kMode_M8};
152 9226 : mode = kMn_modes[scale_exponent];
153 9226 : if (mode == kMode_MR1) {
154 : // [%r1 + %r1*1] has a smaller encoding than [%r1*2+0]
155 4877 : inputs[(*input_count)++] = UseRegister(index);
156 : }
157 : }
158 : }
159 6301062 : return mode;
160 : }
161 :
162 5688918 : AddressingMode GetEffectiveAddressMemoryOperand(Node* operand,
163 : InstructionOperand inputs[],
164 : size_t* input_count) {
165 5688918 : if (selector()->CanAddressRelativeToRootsRegister()) {
166 4081839 : LoadMatcher<ExternalReferenceMatcher> m(operand);
167 4081847 : if (m.index().HasValue() && m.object().HasValue()) {
168 : Address const kRootsRegisterValue =
169 : kRootRegisterBias +
170 : reinterpret_cast<Address>(
171 585662 : selector()->isolate()->heap()->roots_array_start());
172 : ptrdiff_t const delta =
173 585662 : m.index().Value() +
174 585662 : (m.object().Value().address() - kRootsRegisterValue);
175 585662 : if (is_int32(delta)) {
176 585662 : inputs[(*input_count)++] = TempImmediate(static_cast<int32_t>(delta));
177 585663 : return kMode_Root;
178 : }
179 : }
180 : }
181 : BaseWithIndexAndDisplacement64Matcher m(operand, AddressOption::kAllowAll);
182 : DCHECK(m.matches());
183 5103277 : if (m.displacement() == nullptr || CanBeImmediate(m.displacement())) {
184 : return GenerateMemoryOperandInputs(
185 : m.index(), m.scale(), m.base(), m.displacement(),
186 5102907 : m.displacement_mode(), inputs, input_count);
187 471 : } else if (m.base() == nullptr &&
188 100 : m.displacement_mode() == kPositiveDisplacement) {
189 : // The displacement cannot be an immediate, but we can use the
190 : // displacement as base instead and still benefit from addressing
191 : // modes for the scale.
192 : return GenerateMemoryOperandInputs(m.index(), m.scale(), m.displacement(),
193 : nullptr, m.displacement_mode(), inputs,
194 100 : input_count);
195 : } else {
196 542 : inputs[(*input_count)++] = UseRegister(operand->InputAt(0));
197 542 : inputs[(*input_count)++] = UseRegister(operand->InputAt(1));
198 271 : return kMode_MR1;
199 : }
200 : }
201 :
202 : bool CanBeBetterLeftOperand(Node* node) const {
203 402117 : return !selector()->IsLive(node);
204 : }
205 : };
206 :
207 : namespace {
208 2779058 : ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) {
209 : ArchOpcode opcode = kArchNop;
210 2779058 : switch (load_rep.representation()) {
211 : case MachineRepresentation::kFloat32:
212 : opcode = kX64Movss;
213 10508 : break;
214 : case MachineRepresentation::kFloat64:
215 : opcode = kX64Movsd;
216 289497 : break;
217 : case MachineRepresentation::kBit: // Fall through.
218 : case MachineRepresentation::kWord8:
219 97158 : opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl;
220 97158 : break;
221 : case MachineRepresentation::kWord16:
222 13071 : opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl;
223 13071 : break;
224 : case MachineRepresentation::kWord32:
225 : opcode = kX64Movl;
226 234926 : break;
227 : case MachineRepresentation::kTaggedSigned: // Fall through.
228 : case MachineRepresentation::kTaggedPointer: // Fall through.
229 : case MachineRepresentation::kTagged: // Fall through.
230 : case MachineRepresentation::kWord64:
231 : opcode = kX64Movq;
232 2133891 : break;
233 : case MachineRepresentation::kSimd128: // Fall through.
234 : case MachineRepresentation::kSimd1x4: // Fall through.
235 : case MachineRepresentation::kSimd1x8: // Fall through.
236 : case MachineRepresentation::kSimd1x16: // Fall through.
237 : case MachineRepresentation::kNone:
238 0 : UNREACHABLE();
239 : break;
240 : }
241 2779058 : return opcode;
242 : }
243 :
244 1985400 : ArchOpcode GetStoreOpcode(StoreRepresentation store_rep) {
245 1985400 : switch (store_rep.representation()) {
246 : case MachineRepresentation::kFloat32:
247 : return kX64Movss;
248 : break;
249 : case MachineRepresentation::kFloat64:
250 : return kX64Movsd;
251 : break;
252 : case MachineRepresentation::kBit: // Fall through.
253 : case MachineRepresentation::kWord8:
254 : return kX64Movb;
255 : break;
256 : case MachineRepresentation::kWord16:
257 : return kX64Movw;
258 : break;
259 : case MachineRepresentation::kWord32:
260 : return kX64Movl;
261 : break;
262 : case MachineRepresentation::kTaggedSigned: // Fall through.
263 : case MachineRepresentation::kTaggedPointer: // Fall through.
264 : case MachineRepresentation::kTagged: // Fall through.
265 : case MachineRepresentation::kWord64:
266 : return kX64Movq;
267 : break;
268 : case MachineRepresentation::kSimd128: // Fall through.
269 : case MachineRepresentation::kSimd1x4: // Fall through.
270 : case MachineRepresentation::kSimd1x8: // Fall through.
271 : case MachineRepresentation::kSimd1x16: // Fall through.
272 : case MachineRepresentation::kNone:
273 0 : UNREACHABLE();
274 : return kArchNop;
275 : }
276 0 : UNREACHABLE();
277 : return kArchNop;
278 : }
279 :
280 : } // namespace
281 :
282 5558121 : void InstructionSelector::VisitLoad(Node* node) {
283 2779055 : LoadRepresentation load_rep = LoadRepresentationOf(node->op());
284 : X64OperandGenerator g(this);
285 :
286 2779058 : ArchOpcode opcode = GetLoadOpcode(load_rep);
287 5558084 : InstructionOperand outputs[1];
288 2779042 : outputs[0] = g.DefineAsRegister(node);
289 13895266 : InstructionOperand inputs[4];
290 2779054 : size_t input_count = 0;
291 : AddressingMode mode =
292 2779054 : g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
293 2779066 : InstructionCode code = opcode | AddressingModeField::encode(mode);
294 2779066 : if (node->opcode() == IrOpcode::kProtectedLoad) {
295 1470 : code |= MiscField::encode(X64MemoryProtection::kProtected);
296 : // Add the source position as an input
297 2940 : inputs[input_count++] = g.UseImmediate(node->InputAt(2));
298 : }
299 2779066 : Emit(code, 1, outputs, input_count, inputs);
300 2779065 : }
301 :
302 1470 : void InstructionSelector::VisitProtectedLoad(Node* node) { VisitLoad(node); }
303 :
304 4668800 : void InstructionSelector::VisitStore(Node* node) {
305 : X64OperandGenerator g(this);
306 : Node* base = node->InputAt(0);
307 : Node* index = node->InputAt(1);
308 222483 : Node* value = node->InputAt(2);
309 :
310 2334400 : StoreRepresentation store_rep = StoreRepresentationOf(node->op());
311 : WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
312 :
313 2334398 : if (write_barrier_kind != kNoWriteBarrier) {
314 : DCHECK(CanBeTaggedPointer(store_rep.representation()));
315 : AddressingMode addressing_mode;
316 1047537 : InstructionOperand inputs[3];
317 : size_t input_count = 0;
318 349179 : inputs[input_count++] = g.UseUniqueRegister(base);
319 349179 : if (g.CanBeImmediate(index)) {
320 273960 : inputs[input_count++] = g.UseImmediate(index);
321 : addressing_mode = kMode_MRI;
322 : } else {
323 75218 : inputs[input_count++] = g.UseUniqueRegister(index);
324 : addressing_mode = kMode_MR1;
325 : }
326 349179 : inputs[input_count++] = g.UseUniqueRegister(value);
327 : RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
328 349179 : switch (write_barrier_kind) {
329 : case kNoWriteBarrier:
330 0 : UNREACHABLE();
331 : break;
332 : case kMapWriteBarrier:
333 : record_write_mode = RecordWriteMode::kValueIsMap;
334 37014 : break;
335 : case kPointerWriteBarrier:
336 : record_write_mode = RecordWriteMode::kValueIsPointer;
337 21280 : break;
338 : case kFullWriteBarrier:
339 : record_write_mode = RecordWriteMode::kValueIsAny;
340 : break;
341 : }
342 698358 : InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
343 : size_t const temp_count = arraysize(temps);
344 : InstructionCode code = kArchStoreWithWriteBarrier;
345 349179 : code |= AddressingModeField::encode(addressing_mode);
346 349179 : code |= MiscField::encode(static_cast<int>(record_write_mode));
347 349179 : Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
348 : } else {
349 1985219 : ArchOpcode opcode = GetStoreOpcode(store_rep);
350 9926100 : InstructionOperand inputs[4];
351 1985220 : size_t input_count = 0;
352 : AddressingMode addressing_mode =
353 1985220 : g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
354 : InstructionCode code =
355 1985221 : opcode | AddressingModeField::encode(addressing_mode);
356 4192923 : if ((ElementSizeLog2Of(store_rep.representation()) < kPointerSizeLog2) &&
357 2022303 : (value->opcode() == IrOpcode::kTruncateInt64ToInt32) &&
358 37084 : CanCover(node, value)) {
359 : value = value->InputAt(0);
360 : }
361 : InstructionOperand value_operand =
362 1985219 : g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
363 1985221 : inputs[input_count++] = value_operand;
364 : Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
365 1985221 : inputs);
366 : }
367 2334402 : }
368 :
369 360 : void InstructionSelector::VisitProtectedStore(Node* node) {
370 : X64OperandGenerator g(this);
371 : Node* value = node->InputAt(2);
372 : Node* position = node->InputAt(3);
373 :
374 180 : StoreRepresentation store_rep = StoreRepresentationOf(node->op());
375 :
376 180 : ArchOpcode opcode = GetStoreOpcode(store_rep);
377 1080 : InstructionOperand inputs[5];
378 180 : size_t input_count = 0;
379 : AddressingMode addressing_mode =
380 180 : g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
381 180 : InstructionCode code = opcode | AddressingModeField::encode(addressing_mode) |
382 180 : MiscField::encode(X64MemoryProtection::kProtected);
383 : InstructionOperand value_operand =
384 180 : g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
385 180 : inputs[input_count++] = value_operand;
386 180 : inputs[input_count++] = g.UseImmediate(position);
387 180 : Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs);
388 180 : }
389 :
390 : // Architecture supports unaligned access, therefore VisitLoad is used instead
391 0 : void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
392 :
393 : // Architecture supports unaligned access, therefore VisitStore is used instead
394 0 : void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
395 :
396 74190 : void InstructionSelector::VisitCheckedLoad(Node* node) {
397 74190 : CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
398 : X64OperandGenerator g(this);
399 : Node* const buffer = node->InputAt(0);
400 74190 : Node* const offset = node->InputAt(1);
401 : Node* const length = node->InputAt(2);
402 : ArchOpcode opcode = kArchNop;
403 74190 : switch (load_rep.representation()) {
404 : case MachineRepresentation::kWord8:
405 6595 : opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
406 6595 : break;
407 : case MachineRepresentation::kWord16:
408 2007 : opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
409 2007 : break;
410 : case MachineRepresentation::kWord32:
411 : opcode = kCheckedLoadWord32;
412 55966 : break;
413 : case MachineRepresentation::kWord64:
414 : opcode = kCheckedLoadWord64;
415 7 : break;
416 : case MachineRepresentation::kFloat32:
417 : opcode = kCheckedLoadFloat32;
418 9016 : break;
419 : case MachineRepresentation::kFloat64:
420 : opcode = kCheckedLoadFloat64;
421 599 : break;
422 : case MachineRepresentation::kBit: // Fall through.
423 : case MachineRepresentation::kSimd128: // Fall through.
424 : case MachineRepresentation::kSimd1x4: // Fall through.
425 : case MachineRepresentation::kSimd1x8: // Fall through.
426 : case MachineRepresentation::kSimd1x16: // Fall through.
427 : case MachineRepresentation::kTaggedSigned: // Fall through.
428 : case MachineRepresentation::kTaggedPointer: // Fall through.
429 : case MachineRepresentation::kTagged: // Fall through.
430 : case MachineRepresentation::kNone:
431 0 : UNREACHABLE();
432 : return;
433 : }
434 74190 : if (offset->opcode() == IrOpcode::kInt32Add && CanCover(node, offset)) {
435 : Int32Matcher mlength(length);
436 21017 : Int32BinopMatcher moffset(offset);
437 25388 : if (mlength.HasValue() && moffset.right().HasValue() &&
438 25276 : moffset.right().Value() >= 0 &&
439 : mlength.Value() >= moffset.right().Value()) {
440 : Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
441 : g.UseRegister(moffset.left().node()),
442 6375 : g.UseImmediate(moffset.right().node()), g.UseImmediate(length));
443 76315 : return;
444 : }
445 : }
446 : InstructionOperand length_operand =
447 72065 : g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
448 : Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
449 72065 : g.UseRegister(offset), g.TempImmediate(0), length_operand);
450 : }
451 :
452 :
453 53151 : void InstructionSelector::VisitCheckedStore(Node* node) {
454 53151 : MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
455 : X64OperandGenerator g(this);
456 : Node* const buffer = node->InputAt(0);
457 53151 : Node* const offset = node->InputAt(1);
458 : Node* const length = node->InputAt(2);
459 : Node* const value = node->InputAt(3);
460 : ArchOpcode opcode = kArchNop;
461 53151 : switch (rep) {
462 : case MachineRepresentation::kWord8:
463 : opcode = kCheckedStoreWord8;
464 4211 : break;
465 : case MachineRepresentation::kWord16:
466 : opcode = kCheckedStoreWord16;
467 2018 : break;
468 : case MachineRepresentation::kWord32:
469 : opcode = kCheckedStoreWord32;
470 40971 : break;
471 : case MachineRepresentation::kWord64:
472 : opcode = kCheckedStoreWord64;
473 7 : break;
474 : case MachineRepresentation::kFloat32:
475 : opcode = kCheckedStoreFloat32;
476 5456 : break;
477 : case MachineRepresentation::kFloat64:
478 : opcode = kCheckedStoreFloat64;
479 488 : break;
480 : case MachineRepresentation::kBit: // Fall through.
481 : case MachineRepresentation::kSimd128: // Fall through.
482 : case MachineRepresentation::kSimd1x4: // Fall through.
483 : case MachineRepresentation::kSimd1x8: // Fall through.
484 : case MachineRepresentation::kSimd1x16: // Fall through.
485 : case MachineRepresentation::kTaggedSigned: // Fall through.
486 : case MachineRepresentation::kTaggedPointer: // Fall through.
487 : case MachineRepresentation::kTagged: // Fall through.
488 : case MachineRepresentation::kNone:
489 0 : UNREACHABLE();
490 : return;
491 : }
492 : InstructionOperand value_operand =
493 53151 : g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
494 53151 : if (offset->opcode() == IrOpcode::kInt32Add && CanCover(node, offset)) {
495 : Int32Matcher mlength(length);
496 14336 : Int32BinopMatcher moffset(offset);
497 16620 : if (mlength.HasValue() && moffset.right().HasValue() &&
498 16477 : moffset.right().Value() >= 0 &&
499 : mlength.Value() >= moffset.right().Value()) {
500 : Emit(opcode, g.NoOutput(), g.UseRegister(buffer),
501 : g.UseRegister(moffset.left().node()),
502 : g.UseImmediate(moffset.right().node()), g.UseImmediate(length),
503 3210 : value_operand);
504 54221 : return;
505 : }
506 : }
507 : InstructionOperand length_operand =
508 52081 : g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
509 : Emit(opcode, g.NoOutput(), g.UseRegister(buffer), g.UseRegister(offset),
510 52081 : g.TempImmediate(0), length_operand, value_operand);
511 : }
512 :
513 :
514 : // Shared routine for multiple binary operations.
515 704296 : static void VisitBinop(InstructionSelector* selector, Node* node,
516 1939386 : InstructionCode opcode, FlagsContinuation* cont) {
517 : X64OperandGenerator g(selector);
518 516851 : Int32BinopMatcher m(node);
519 : Node* left = m.left().node();
520 : Node* right = m.right().node();
521 3617962 : InstructionOperand inputs[6];
522 516850 : size_t input_count = 0;
523 1550556 : InstructionOperand outputs[2];
524 : size_t output_count = 0;
525 :
526 : // TODO(turbofan): match complex addressing modes.
527 516850 : if (left == right) {
528 : // If both inputs refer to the same operand, enforce allocating a register
529 : // for both of them to ensure that we don't end up generating code like
530 : // this:
531 : //
532 : // mov rax, [rbp-0x10]
533 : // add rax, [rbp-0x10]
534 : // jo label
535 421 : InstructionOperand const input = g.UseRegister(left);
536 421 : inputs[input_count++] = input;
537 421 : inputs[input_count++] = input;
538 516429 : } else if (g.CanBeImmediate(right)) {
539 328986 : inputs[input_count++] = g.UseRegister(left);
540 328987 : inputs[input_count++] = g.UseImmediate(right);
541 : } else {
542 187445 : int effect_level = selector->GetEffectLevel(node);
543 187446 : if (cont->IsBranch()) {
544 : effect_level = selector->GetEffectLevel(
545 2352 : cont->true_block()->PredecessorAt(0)->control_input());
546 : }
547 325482 : if (node->op()->HasProperty(Operator::kCommutative) &&
548 290743 : g.CanBeBetterLeftOperand(right) &&
549 95275 : (!g.CanBeBetterLeftOperand(left) ||
550 95275 : !g.CanBeMemoryOperand(opcode, node, right, effect_level))) {
551 : std::swap(left, right);
552 : }
553 187446 : if (g.CanBeMemoryOperand(opcode, node, right, effect_level)) {
554 39947 : inputs[input_count++] = g.UseRegister(left);
555 : AddressingMode addressing_mode =
556 39947 : g.GetEffectiveAddressMemoryOperand(right, inputs, &input_count);
557 39947 : opcode |= AddressingModeField::encode(addressing_mode);
558 : } else {
559 147497 : inputs[input_count++] = g.UseRegister(left);
560 147500 : inputs[input_count++] = g.Use(right);
561 : }
562 : }
563 :
564 516855 : if (cont->IsBranch()) {
565 4704 : inputs[input_count++] = g.Label(cont->true_block());
566 4704 : inputs[input_count++] = g.Label(cont->false_block());
567 : }
568 :
569 516855 : outputs[output_count++] = g.DefineSameAsFirst(node);
570 516853 : if (cont->IsSet()) {
571 127458 : outputs[output_count++] = g.DefineAsRegister(cont->result());
572 : }
573 :
574 : DCHECK_NE(0u, input_count);
575 : DCHECK_NE(0u, output_count);
576 : DCHECK_GE(arraysize(inputs), input_count);
577 : DCHECK_GE(arraysize(outputs), output_count);
578 :
579 : opcode = cont->Encode(opcode);
580 516851 : if (cont->IsDeoptimize()) {
581 : selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
582 66867 : cont->kind(), cont->reason(), cont->frame_state());
583 : } else {
584 449984 : selector->Emit(opcode, output_count, outputs, input_count, inputs);
585 : }
586 516855 : }
587 :
588 :
589 : // Shared routine for multiple binary operations.
590 : static void VisitBinop(InstructionSelector* selector, Node* node,
591 : InstructionCode opcode) {
592 : FlagsContinuation cont;
593 320175 : VisitBinop(selector, node, opcode, &cont);
594 : }
595 :
596 :
597 84560 : void InstructionSelector::VisitWord32And(Node* node) {
598 : X64OperandGenerator g(this);
599 84560 : Uint32BinopMatcher m(node);
600 84560 : if (m.right().Is(0xff)) {
601 1514 : Emit(kX64Movzxbl, g.DefineAsRegister(node), g.Use(m.left().node()));
602 83046 : } else if (m.right().Is(0xffff)) {
603 4432 : Emit(kX64Movzxwl, g.DefineAsRegister(node), g.Use(m.left().node()));
604 : } else {
605 : VisitBinop(this, node, kX64And32);
606 : }
607 84560 : }
608 :
609 :
610 139328 : void InstructionSelector::VisitWord64And(Node* node) {
611 : VisitBinop(this, node, kX64And);
612 139328 : }
613 :
614 :
615 13949 : void InstructionSelector::VisitWord32Or(Node* node) {
616 : VisitBinop(this, node, kX64Or32);
617 13949 : }
618 :
619 :
620 17920 : void InstructionSelector::VisitWord64Or(Node* node) {
621 : VisitBinop(this, node, kX64Or);
622 17920 : }
623 :
624 :
625 13814 : void InstructionSelector::VisitWord32Xor(Node* node) {
626 : X64OperandGenerator g(this);
627 13814 : Uint32BinopMatcher m(node);
628 13814 : if (m.right().Is(-1)) {
629 3655 : Emit(kX64Not32, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
630 : } else {
631 : VisitBinop(this, node, kX64Xor32);
632 : }
633 13815 : }
634 :
635 :
636 454 : void InstructionSelector::VisitWord64Xor(Node* node) {
637 : X64OperandGenerator g(this);
638 454 : Uint64BinopMatcher m(node);
639 454 : if (m.right().Is(-1)) {
640 66 : Emit(kX64Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
641 : } else {
642 : VisitBinop(this, node, kX64Xor);
643 : }
644 454 : }
645 :
646 :
647 : namespace {
648 :
649 : // Shared routine for multiple 32-bit shift operations.
650 : // TODO(bmeurer): Merge this with VisitWord64Shift using template magic?
651 59485 : void VisitWord32Shift(InstructionSelector* selector, Node* node,
652 : ArchOpcode opcode) {
653 : X64OperandGenerator g(selector);
654 59485 : Int32BinopMatcher m(node);
655 : Node* left = m.left().node();
656 : Node* right = m.right().node();
657 :
658 59490 : if (g.CanBeImmediate(right)) {
659 : selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
660 52509 : g.UseImmediate(right));
661 : } else {
662 : selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
663 6984 : g.UseFixed(right, rcx));
664 : }
665 59494 : }
666 :
667 :
668 : // Shared routine for multiple 64-bit shift operations.
669 : // TODO(bmeurer): Merge this with VisitWord32Shift using template magic?
670 161904 : void VisitWord64Shift(InstructionSelector* selector, Node* node,
671 : ArchOpcode opcode) {
672 : X64OperandGenerator g(selector);
673 161904 : Int64BinopMatcher m(node);
674 : Node* left = m.left().node();
675 : Node* right = m.right().node();
676 :
677 161905 : if (g.CanBeImmediate(right)) {
678 : selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
679 161212 : g.UseImmediate(right));
680 : } else {
681 693 : if (m.right().IsWord64And()) {
682 0 : Int64BinopMatcher mright(right);
683 0 : if (mright.right().Is(0x3F)) {
684 : right = mright.left().node();
685 : }
686 : }
687 : selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
688 693 : g.UseFixed(right, rcx));
689 : }
690 161905 : }
691 :
692 1198014 : void EmitLea(InstructionSelector* selector, InstructionCode opcode,
693 : Node* result, Node* index, int scale, Node* base,
694 : Node* displacement, DisplacementMode displacement_mode) {
695 : X64OperandGenerator g(selector);
696 :
697 5990066 : InstructionOperand inputs[4];
698 1198014 : size_t input_count = 0;
699 : AddressingMode mode =
700 : g.GenerateMemoryOperandInputs(index, scale, base, displacement,
701 1198014 : displacement_mode, inputs, &input_count);
702 :
703 : DCHECK_NE(0u, input_count);
704 : DCHECK_GE(arraysize(inputs), input_count);
705 :
706 2396105 : InstructionOperand outputs[1];
707 1198053 : outputs[0] = g.DefineAsRegister(result);
708 :
709 1198048 : opcode = AddressingModeField::encode(mode) | opcode;
710 :
711 1198048 : selector->Emit(opcode, 1, outputs, input_count, inputs);
712 1198052 : }
713 :
714 : } // namespace
715 :
716 :
717 12367 : void InstructionSelector::VisitWord32Shl(Node* node) {
718 12367 : Int32ScaleMatcher m(node, true);
719 12367 : if (m.matches()) {
720 : Node* index = node->InputAt(0);
721 2487 : Node* base = m.power_of_two_plus_one() ? index : nullptr;
722 : EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr,
723 2487 : kPositiveDisplacement);
724 14854 : return;
725 : }
726 9880 : VisitWord32Shift(this, node, kX64Shl32);
727 : }
728 :
729 :
730 282570 : void InstructionSelector::VisitWord64Shl(Node* node) {
731 : X64OperandGenerator g(this);
732 282570 : Int64ScaleMatcher m(node, true);
733 282576 : if (m.matches()) {
734 : Node* index = node->InputAt(0);
735 5758 : Node* base = m.power_of_two_plus_one() ? index : nullptr;
736 : EmitLea(this, kX64Lea, node, index, m.scale(), base, nullptr,
737 5758 : kPositiveDisplacement);
738 5758 : return;
739 : } else {
740 276818 : Int64BinopMatcher m(node);
741 323980 : if ((m.left().IsChangeInt32ToInt64() ||
742 513208 : m.left().IsChangeUint32ToUint64()) &&
743 : m.right().IsInRange(32, 63)) {
744 : // There's no need to sign/zero-extend to 64-bit if we shift out the upper
745 : // 32 bits anyway.
746 : Emit(kX64Shl, g.DefineSameAsFirst(node),
747 : g.UseRegister(m.left().node()->InputAt(0)),
748 472771 : g.UseImmediate(m.right().node()));
749 236389 : return;
750 : }
751 : }
752 40434 : VisitWord64Shift(this, node, kX64Shl);
753 : }
754 :
755 :
756 35860 : void InstructionSelector::VisitWord32Shr(Node* node) {
757 35860 : VisitWord32Shift(this, node, kX64Shr32);
758 35863 : }
759 :
760 : namespace {
761 418388 : bool TryMatchLoadWord64AndShiftRight(InstructionSelector* selector, Node* node,
762 : InstructionCode opcode) {
763 : DCHECK(IrOpcode::kWord64Sar == node->opcode() ||
764 : IrOpcode::kWord64Shr == node->opcode());
765 : X64OperandGenerator g(selector);
766 418388 : Int64BinopMatcher m(node);
767 578168 : if (selector->CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
768 : m.right().Is(32)) {
769 : // Just load and sign-extend the interesting 4 bytes instead. This happens,
770 : // for example, when we're loading and untagging SMIs.
771 : BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(),
772 : AddressOption::kAllowAll);
773 140251 : if (mleft.matches() && (mleft.displacement() == nullptr ||
774 70124 : g.CanBeImmediate(mleft.displacement()))) {
775 70127 : size_t input_count = 0;
776 280508 : InstructionOperand inputs[3];
777 : AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
778 70127 : m.left().node(), inputs, &input_count);
779 70127 : if (mleft.displacement() == nullptr) {
780 : // Make sure that the addressing mode indicates the presence of an
781 : // immediate displacement. It seems that we never use M1 and M2, but we
782 : // handle them here anyways.
783 3 : switch (mode) {
784 : case kMode_MR:
785 : mode = kMode_MRI;
786 3 : break;
787 : case kMode_MR1:
788 : mode = kMode_MR1I;
789 0 : break;
790 : case kMode_MR2:
791 : mode = kMode_MR2I;
792 0 : break;
793 : case kMode_MR4:
794 : mode = kMode_MR4I;
795 0 : break;
796 : case kMode_MR8:
797 : mode = kMode_MR8I;
798 0 : break;
799 : case kMode_M1:
800 : mode = kMode_M1I;
801 0 : break;
802 : case kMode_M2:
803 : mode = kMode_M2I;
804 0 : break;
805 : case kMode_M4:
806 : mode = kMode_M4I;
807 0 : break;
808 : case kMode_M8:
809 : mode = kMode_M8I;
810 0 : break;
811 : case kMode_None:
812 : case kMode_MRI:
813 : case kMode_MR1I:
814 : case kMode_MR2I:
815 : case kMode_MR4I:
816 : case kMode_MR8I:
817 : case kMode_M1I:
818 : case kMode_M2I:
819 : case kMode_M4I:
820 : case kMode_M8I:
821 : case kMode_Root:
822 0 : UNREACHABLE();
823 : }
824 3 : inputs[input_count++] = ImmediateOperand(ImmediateOperand::INLINE, 4);
825 : } else {
826 : int32_t displacement = g.GetImmediateIntegerValue(mleft.displacement());
827 : inputs[input_count - 1] =
828 140248 : ImmediateOperand(ImmediateOperand::INLINE, displacement + 4);
829 : }
830 70127 : InstructionOperand outputs[] = {g.DefineAsRegister(node)};
831 70127 : InstructionCode code = opcode | AddressingModeField::encode(mode);
832 70127 : selector->Emit(code, 1, outputs, input_count, inputs);
833 : return true;
834 : }
835 : }
836 : return false;
837 : }
838 : } // namespace
839 :
840 98915 : void InstructionSelector::VisitWord64Shr(Node* node) {
841 197830 : if (TryMatchLoadWord64AndShiftRight(this, node, kX64Movl)) return;
842 98913 : VisitWord64Shift(this, node, kX64Shr);
843 : }
844 :
845 13542 : void InstructionSelector::VisitWord32Sar(Node* node) {
846 : X64OperandGenerator g(this);
847 13542 : Int32BinopMatcher m(node);
848 22222 : if (CanCover(m.node(), m.left().node()) && m.left().IsWord32Shl()) {
849 729 : Int32BinopMatcher mleft(m.left().node());
850 1027 : if (mleft.right().Is(16) && m.right().Is(16)) {
851 298 : Emit(kX64Movsxwl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
852 298 : return;
853 861 : } else if (mleft.right().Is(24) && m.right().Is(24)) {
854 430 : Emit(kX64Movsxbl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
855 430 : return;
856 : }
857 : }
858 12816 : VisitWord32Shift(this, node, kX64Sar32);
859 : }
860 :
861 46646 : void InstructionSelector::VisitWord64Sar(Node* node) {
862 93292 : if (TryMatchLoadWord64AndShiftRight(this, node, kX64Movsxlq)) return;
863 22487 : VisitWord64Shift(this, node, kX64Sar);
864 : }
865 :
866 :
867 933 : void InstructionSelector::VisitWord32Ror(Node* node) {
868 933 : VisitWord32Shift(this, node, kX64Ror32);
869 933 : }
870 :
871 :
872 70 : void InstructionSelector::VisitWord64Ror(Node* node) {
873 70 : VisitWord64Shift(this, node, kX64Ror);
874 70 : }
875 :
876 0 : void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
877 :
878 :
879 0 : void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
880 :
881 0 : void InstructionSelector::VisitWord64ReverseBytes(Node* node) { UNREACHABLE(); }
882 :
883 0 : void InstructionSelector::VisitWord32ReverseBytes(Node* node) { UNREACHABLE(); }
884 :
885 193750 : void InstructionSelector::VisitInt32Add(Node* node) {
886 : X64OperandGenerator g(this);
887 :
888 : // Try to match the Add to a leal pattern
889 193750 : BaseWithIndexAndDisplacement32Matcher m(node);
890 581263 : if (m.matches() &&
891 350365 : (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
892 : EmitLea(this, kX64Lea32, node, m.index(), m.scale(), m.base(),
893 193756 : m.displacement(), m.displacement_mode());
894 387540 : return;
895 : }
896 :
897 : // No leal pattern match, use addl
898 : VisitBinop(this, node, kX64Add32);
899 : }
900 :
901 :
902 1022104 : void InstructionSelector::VisitInt64Add(Node* node) {
903 : X64OperandGenerator g(this);
904 :
905 : // Try to match the Add to a leaq pattern
906 1022104 : BaseWithIndexAndDisplacement64Matcher m(node);
907 3066321 : if (m.matches() &&
908 1991634 : (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
909 : EmitLea(this, kX64Lea, node, m.index(), m.scale(), m.base(),
910 988324 : m.displacement(), m.displacement_mode());
911 2010433 : return;
912 : }
913 :
914 : // No leal pattern match, use addq
915 : VisitBinop(this, node, kX64Add);
916 : }
917 :
918 :
919 36504 : void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
920 36504 : if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
921 : FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
922 73008 : return VisitBinop(this, node, kX64Add, &cont);
923 : }
924 : FlagsContinuation cont;
925 0 : VisitBinop(this, node, kX64Add, &cont);
926 : }
927 :
928 :
929 28280 : void InstructionSelector::VisitInt32Sub(Node* node) {
930 : X64OperandGenerator g(this);
931 28280 : Int32BinopMatcher m(node);
932 28279 : if (m.left().Is(0)) {
933 5377 : Emit(kX64Neg32, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
934 : } else {
935 27304 : if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
936 : // Turn subtractions of constant values into immediate "leal" instructions
937 : // by negating the value.
938 : Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI),
939 : g.DefineAsRegister(node), g.UseRegister(m.left().node()),
940 8804 : g.TempImmediate(-m.right().Value()));
941 32682 : return;
942 : }
943 : VisitBinop(this, node, kX64Sub32);
944 : }
945 : }
946 :
947 :
948 28926 : void InstructionSelector::VisitInt64Sub(Node* node) {
949 : X64OperandGenerator g(this);
950 28926 : Int64BinopMatcher m(node);
951 28926 : if (m.left().Is(0)) {
952 235 : Emit(kX64Neg, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
953 : } else {
954 50406 : if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
955 : // Turn subtractions of constant values into immediate "leaq" instructions
956 : // by negating the value.
957 : Emit(kX64Lea | AddressingModeField::encode(kMode_MRI),
958 : g.DefineAsRegister(node), g.UseRegister(m.left().node()),
959 42314 : g.TempImmediate(-static_cast<int32_t>(m.right().Value())));
960 50083 : return;
961 : }
962 : VisitBinop(this, node, kX64Sub);
963 : }
964 : }
965 :
966 :
967 36504 : void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
968 36504 : if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
969 : FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
970 73008 : return VisitBinop(this, node, kX64Sub, &cont);
971 : }
972 : FlagsContinuation cont;
973 0 : VisitBinop(this, node, kX64Sub, &cont);
974 : }
975 :
976 :
977 : namespace {
978 :
979 31692 : void VisitMul(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
980 : X64OperandGenerator g(selector);
981 31692 : Int32BinopMatcher m(node);
982 : Node* left = m.left().node();
983 : Node* right = m.right().node();
984 31693 : if (g.CanBeImmediate(right)) {
985 : selector->Emit(opcode, g.DefineAsRegister(node), g.Use(left),
986 29783 : g.UseImmediate(right));
987 : } else {
988 1910 : if (g.CanBeBetterLeftOperand(right)) {
989 : std::swap(left, right);
990 : }
991 : selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
992 1910 : g.Use(right));
993 : }
994 31693 : }
995 :
996 6274 : void VisitMulHigh(InstructionSelector* selector, Node* node,
997 : ArchOpcode opcode) {
998 : X64OperandGenerator g(selector);
999 : Node* left = node->InputAt(0);
1000 : Node* right = node->InputAt(1);
1001 6274 : if (selector->IsLive(left) && !selector->IsLive(right)) {
1002 : std::swap(left, right);
1003 : }
1004 6279 : InstructionOperand temps[] = {g.TempRegister(rax)};
1005 : // TODO(turbofan): We use UseUniqueRegister here to improve register
1006 : // allocation.
1007 : selector->Emit(opcode, g.DefineAsFixed(node, rdx), g.UseFixed(left, rax),
1008 6279 : g.UseUniqueRegister(right), arraysize(temps), temps);
1009 6279 : }
1010 :
1011 :
1012 2468 : void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
1013 : X64OperandGenerator g(selector);
1014 2468 : InstructionOperand temps[] = {g.TempRegister(rdx)};
1015 : selector->Emit(
1016 : opcode, g.DefineAsFixed(node, rax), g.UseFixed(node->InputAt(0), rax),
1017 4936 : g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
1018 2468 : }
1019 :
1020 :
1021 3102 : void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
1022 : X64OperandGenerator g(selector);
1023 3102 : InstructionOperand temps[] = {g.TempRegister(rax)};
1024 : selector->Emit(
1025 : opcode, g.DefineAsFixed(node, rdx), g.UseFixed(node->InputAt(0), rax),
1026 6204 : g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
1027 3102 : }
1028 :
1029 : } // namespace
1030 :
1031 :
1032 35248 : void InstructionSelector::VisitInt32Mul(Node* node) {
1033 35248 : Int32ScaleMatcher m(node, true);
1034 35250 : if (m.matches()) {
1035 : Node* index = node->InputAt(0);
1036 7714 : Node* base = m.power_of_two_plus_one() ? index : nullptr;
1037 : EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr,
1038 7714 : kPositiveDisplacement);
1039 42964 : return;
1040 : }
1041 27536 : VisitMul(this, node, kX64Imul32);
1042 : }
1043 :
1044 18150 : void InstructionSelector::VisitInt32MulWithOverflow(Node* node) {
1045 : // TODO(mvstanton): Use Int32ScaleMatcher somehow.
1046 18150 : if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1047 : FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1048 36300 : return VisitBinop(this, node, kX64Imul32, &cont);
1049 : }
1050 : FlagsContinuation cont;
1051 0 : VisitBinop(this, node, kX64Imul32, &cont);
1052 : }
1053 :
1054 4157 : void InstructionSelector::VisitInt64Mul(Node* node) {
1055 4157 : VisitMul(this, node, kX64Imul);
1056 4157 : }
1057 :
1058 5418 : void InstructionSelector::VisitInt32MulHigh(Node* node) {
1059 5418 : VisitMulHigh(this, node, kX64ImulHigh32);
1060 5422 : }
1061 :
1062 :
1063 1016 : void InstructionSelector::VisitInt32Div(Node* node) {
1064 1016 : VisitDiv(this, node, kX64Idiv32);
1065 1016 : }
1066 :
1067 :
1068 482 : void InstructionSelector::VisitInt64Div(Node* node) {
1069 482 : VisitDiv(this, node, kX64Idiv);
1070 482 : }
1071 :
1072 :
1073 558 : void InstructionSelector::VisitUint32Div(Node* node) {
1074 558 : VisitDiv(this, node, kX64Udiv32);
1075 558 : }
1076 :
1077 :
1078 412 : void InstructionSelector::VisitUint64Div(Node* node) {
1079 412 : VisitDiv(this, node, kX64Udiv);
1080 412 : }
1081 :
1082 :
1083 1748 : void InstructionSelector::VisitInt32Mod(Node* node) {
1084 1748 : VisitMod(this, node, kX64Idiv32);
1085 1748 : }
1086 :
1087 :
1088 412 : void InstructionSelector::VisitInt64Mod(Node* node) {
1089 412 : VisitMod(this, node, kX64Idiv);
1090 412 : }
1091 :
1092 :
1093 530 : void InstructionSelector::VisitUint32Mod(Node* node) {
1094 530 : VisitMod(this, node, kX64Udiv32);
1095 530 : }
1096 :
1097 :
1098 412 : void InstructionSelector::VisitUint64Mod(Node* node) {
1099 412 : VisitMod(this, node, kX64Udiv);
1100 412 : }
1101 :
1102 :
1103 856 : void InstructionSelector::VisitUint32MulHigh(Node* node) {
1104 856 : VisitMulHigh(this, node, kX64UmulHigh32);
1105 857 : }
1106 :
1107 26 : void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1108 : X64OperandGenerator g(this);
1109 26 : InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1110 78 : InstructionOperand outputs[2];
1111 : size_t output_count = 0;
1112 26 : outputs[output_count++] = g.DefineAsRegister(node);
1113 :
1114 26 : Node* success_output = NodeProperties::FindProjection(node, 1);
1115 26 : if (success_output) {
1116 20 : outputs[output_count++] = g.DefineAsRegister(success_output);
1117 : }
1118 :
1119 26 : Emit(kSSEFloat32ToInt64, output_count, outputs, 1, inputs);
1120 26 : }
1121 :
1122 :
1123 49 : void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1124 : X64OperandGenerator g(this);
1125 49 : InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1126 147 : InstructionOperand outputs[2];
1127 : size_t output_count = 0;
1128 49 : outputs[output_count++] = g.DefineAsRegister(node);
1129 :
1130 49 : Node* success_output = NodeProperties::FindProjection(node, 1);
1131 49 : if (success_output) {
1132 43 : outputs[output_count++] = g.DefineAsRegister(success_output);
1133 : }
1134 :
1135 49 : Emit(kSSEFloat64ToInt64, output_count, outputs, 1, inputs);
1136 49 : }
1137 :
1138 :
1139 26 : void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1140 : X64OperandGenerator g(this);
1141 26 : InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1142 78 : InstructionOperand outputs[2];
1143 : size_t output_count = 0;
1144 26 : outputs[output_count++] = g.DefineAsRegister(node);
1145 :
1146 26 : Node* success_output = NodeProperties::FindProjection(node, 1);
1147 26 : if (success_output) {
1148 20 : outputs[output_count++] = g.DefineAsRegister(success_output);
1149 : }
1150 :
1151 26 : Emit(kSSEFloat32ToUint64, output_count, outputs, 1, inputs);
1152 26 : }
1153 :
1154 :
1155 26 : void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1156 : X64OperandGenerator g(this);
1157 26 : InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1158 78 : InstructionOperand outputs[2];
1159 : size_t output_count = 0;
1160 26 : outputs[output_count++] = g.DefineAsRegister(node);
1161 :
1162 26 : Node* success_output = NodeProperties::FindProjection(node, 1);
1163 26 : if (success_output) {
1164 20 : outputs[output_count++] = g.DefineAsRegister(success_output);
1165 : }
1166 :
1167 26 : Emit(kSSEFloat64ToUint64, output_count, outputs, 1, inputs);
1168 26 : }
1169 :
1170 :
1171 66903 : void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1172 : X64OperandGenerator g(this);
1173 89932 : Node* const value = node->InputAt(0);
1174 66903 : if (value->opcode() == IrOpcode::kLoad && CanCover(node, value)) {
1175 23029 : LoadRepresentation load_rep = LoadRepresentationOf(value->op());
1176 23029 : MachineRepresentation rep = load_rep.representation();
1177 : InstructionCode opcode = kArchNop;
1178 23029 : switch (rep) {
1179 : case MachineRepresentation::kBit: // Fall through.
1180 : case MachineRepresentation::kWord8:
1181 5518 : opcode = load_rep.IsSigned() ? kX64Movsxbq : kX64Movzxbq;
1182 5518 : break;
1183 : case MachineRepresentation::kWord16:
1184 4099 : opcode = load_rep.IsSigned() ? kX64Movsxwq : kX64Movzxwq;
1185 4099 : break;
1186 : case MachineRepresentation::kWord32:
1187 13412 : opcode = load_rep.IsSigned() ? kX64Movsxlq : kX64Movl;
1188 13412 : break;
1189 : default:
1190 0 : UNREACHABLE();
1191 : return;
1192 : }
1193 23029 : InstructionOperand outputs[] = {g.DefineAsRegister(node)};
1194 23029 : size_t input_count = 0;
1195 92116 : InstructionOperand inputs[3];
1196 : AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
1197 23029 : node->InputAt(0), inputs, &input_count);
1198 23029 : opcode |= AddressingModeField::encode(mode);
1199 23029 : Emit(opcode, 1, outputs, input_count, inputs);
1200 : } else {
1201 43874 : Emit(kX64Movsxlq, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1202 : }
1203 66903 : }
1204 :
1205 : namespace {
1206 :
1207 78216 : bool ZeroExtendsWord32ToWord64(Node* node) {
1208 78216 : switch (node->opcode()) {
1209 : case IrOpcode::kWord32And:
1210 : case IrOpcode::kWord32Or:
1211 : case IrOpcode::kWord32Xor:
1212 : case IrOpcode::kWord32Shl:
1213 : case IrOpcode::kWord32Shr:
1214 : case IrOpcode::kWord32Sar:
1215 : case IrOpcode::kWord32Ror:
1216 : case IrOpcode::kWord32Equal:
1217 : case IrOpcode::kInt32Add:
1218 : case IrOpcode::kInt32Sub:
1219 : case IrOpcode::kInt32Mul:
1220 : case IrOpcode::kInt32MulHigh:
1221 : case IrOpcode::kInt32Div:
1222 : case IrOpcode::kInt32LessThan:
1223 : case IrOpcode::kInt32LessThanOrEqual:
1224 : case IrOpcode::kInt32Mod:
1225 : case IrOpcode::kUint32Div:
1226 : case IrOpcode::kUint32LessThan:
1227 : case IrOpcode::kUint32LessThanOrEqual:
1228 : case IrOpcode::kUint32Mod:
1229 : case IrOpcode::kUint32MulHigh:
1230 : // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
1231 : // zero-extension is a no-op.
1232 : return true;
1233 : case IrOpcode::kProjection: {
1234 1586 : Node* const value = node->InputAt(0);
1235 1586 : switch (value->opcode()) {
1236 : case IrOpcode::kInt32AddWithOverflow:
1237 : case IrOpcode::kInt32SubWithOverflow:
1238 : case IrOpcode::kInt32MulWithOverflow:
1239 : return true;
1240 : default:
1241 0 : return false;
1242 : }
1243 : }
1244 : case IrOpcode::kLoad: {
1245 : // The movzxbl/movsxbl/movzxwl/movsxwl operations implicitly zero-extend
1246 : // to 64-bit on x64,
1247 : // so the zero-extension is a no-op.
1248 45925 : LoadRepresentation load_rep = LoadRepresentationOf(node->op());
1249 45925 : switch (load_rep.representation()) {
1250 : case MachineRepresentation::kWord8:
1251 : case MachineRepresentation::kWord16:
1252 : return true;
1253 : default:
1254 7503 : return false;
1255 : }
1256 : }
1257 : default:
1258 19460 : return false;
1259 : }
1260 : }
1261 :
1262 : } // namespace
1263 :
1264 78216 : void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1265 : X64OperandGenerator g(this);
1266 : Node* value = node->InputAt(0);
1267 78216 : if (ZeroExtendsWord32ToWord64(value)) {
1268 : // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
1269 : // zero-extension is a no-op.
1270 129469 : return EmitIdentity(node);
1271 : }
1272 26963 : Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
1273 : }
1274 :
1275 :
1276 : namespace {
1277 :
1278 402669 : void VisitRO(InstructionSelector* selector, Node* node,
1279 : InstructionCode opcode) {
1280 : X64OperandGenerator g(selector);
1281 402669 : selector->Emit(opcode, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1282 402672 : }
1283 :
1284 :
1285 89099 : void VisitRR(InstructionSelector* selector, Node* node,
1286 : InstructionCode opcode) {
1287 : X64OperandGenerator g(selector);
1288 : selector->Emit(opcode, g.DefineAsRegister(node),
1289 89099 : g.UseRegister(node->InputAt(0)));
1290 89102 : }
1291 :
1292 308 : void VisitRRO(InstructionSelector* selector, Node* node,
1293 : InstructionCode opcode) {
1294 : X64OperandGenerator g(selector);
1295 : selector->Emit(opcode, g.DefineSameAsFirst(node),
1296 616 : g.UseRegister(node->InputAt(0)), g.Use(node->InputAt(1)));
1297 308 : }
1298 :
1299 109805 : void VisitFloatBinop(InstructionSelector* selector, Node* node,
1300 : ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
1301 : X64OperandGenerator g(selector);
1302 109805 : InstructionOperand operand0 = g.UseRegister(node->InputAt(0));
1303 109805 : InstructionOperand operand1 = g.Use(node->InputAt(1));
1304 109805 : if (selector->IsSupported(AVX)) {
1305 109502 : selector->Emit(avx_opcode, g.DefineAsRegister(node), operand0, operand1);
1306 : } else {
1307 303 : selector->Emit(sse_opcode, g.DefineSameAsFirst(node), operand0, operand1);
1308 : }
1309 109806 : }
1310 :
1311 :
1312 10139 : void VisitFloatUnop(InstructionSelector* selector, Node* node, Node* input,
1313 : ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
1314 : X64OperandGenerator g(selector);
1315 10139 : if (selector->IsSupported(AVX)) {
1316 10056 : selector->Emit(avx_opcode, g.DefineAsRegister(node), g.Use(input));
1317 : } else {
1318 83 : selector->Emit(sse_opcode, g.DefineSameAsFirst(node), g.UseRegister(input));
1319 : }
1320 10141 : }
1321 :
1322 : } // namespace
1323 :
1324 : #define RO_OP_LIST(V) \
1325 : V(Word64Clz, kX64Lzcnt) \
1326 : V(Word32Clz, kX64Lzcnt32) \
1327 : V(Word64Ctz, kX64Tzcnt) \
1328 : V(Word32Ctz, kX64Tzcnt32) \
1329 : V(Word64Popcnt, kX64Popcnt) \
1330 : V(Word32Popcnt, kX64Popcnt32) \
1331 : V(Float64Sqrt, kSSEFloat64Sqrt) \
1332 : V(Float32Sqrt, kSSEFloat32Sqrt) \
1333 : V(ChangeFloat64ToInt32, kSSEFloat64ToInt32) \
1334 : V(ChangeFloat64ToUint32, kSSEFloat64ToUint32 | MiscField::encode(1)) \
1335 : V(TruncateFloat64ToUint32, kSSEFloat64ToUint32 | MiscField::encode(0)) \
1336 : V(ChangeFloat64ToUint64, kSSEFloat64ToUint64) \
1337 : V(TruncateFloat64ToFloat32, kSSEFloat64ToFloat32) \
1338 : V(ChangeFloat32ToFloat64, kSSEFloat32ToFloat64) \
1339 : V(TruncateFloat32ToInt32, kSSEFloat32ToInt32) \
1340 : V(TruncateFloat32ToUint32, kSSEFloat32ToUint32) \
1341 : V(ChangeInt32ToFloat64, kSSEInt32ToFloat64) \
1342 : V(ChangeUint32ToFloat64, kSSEUint32ToFloat64) \
1343 : V(RoundFloat64ToInt32, kSSEFloat64ToInt32) \
1344 : V(RoundInt32ToFloat32, kSSEInt32ToFloat32) \
1345 : V(RoundInt64ToFloat32, kSSEInt64ToFloat32) \
1346 : V(RoundInt64ToFloat64, kSSEInt64ToFloat64) \
1347 : V(RoundUint32ToFloat32, kSSEUint32ToFloat32) \
1348 : V(BitcastFloat32ToInt32, kX64BitcastFI) \
1349 : V(BitcastFloat64ToInt64, kX64BitcastDL) \
1350 : V(BitcastInt32ToFloat32, kX64BitcastIF) \
1351 : V(BitcastInt64ToFloat64, kX64BitcastLD) \
1352 : V(Float64ExtractLowWord32, kSSEFloat64ExtractLowWord32) \
1353 : V(Float64ExtractHighWord32, kSSEFloat64ExtractHighWord32)
1354 :
1355 : #define RR_OP_LIST(V) \
1356 : V(Float32RoundDown, kSSEFloat32Round | MiscField::encode(kRoundDown)) \
1357 : V(Float64RoundDown, kSSEFloat64Round | MiscField::encode(kRoundDown)) \
1358 : V(Float32RoundUp, kSSEFloat32Round | MiscField::encode(kRoundUp)) \
1359 : V(Float64RoundUp, kSSEFloat64Round | MiscField::encode(kRoundUp)) \
1360 : V(Float32RoundTruncate, kSSEFloat32Round | MiscField::encode(kRoundToZero)) \
1361 : V(Float64RoundTruncate, kSSEFloat64Round | MiscField::encode(kRoundToZero)) \
1362 : V(Float32RoundTiesEven, \
1363 : kSSEFloat32Round | MiscField::encode(kRoundToNearest)) \
1364 : V(Float64RoundTiesEven, kSSEFloat64Round | MiscField::encode(kRoundToNearest))
1365 :
1366 : #define RO_VISITOR(Name, opcode) \
1367 : void InstructionSelector::Visit##Name(Node* node) { \
1368 : VisitRO(this, node, opcode); \
1369 : }
1370 402669 : RO_OP_LIST(RO_VISITOR)
1371 : #undef RO_VISITOR
1372 :
1373 : #define RR_VISITOR(Name, opcode) \
1374 : void InstructionSelector::Visit##Name(Node* node) { \
1375 : VisitRR(this, node, opcode); \
1376 : }
1377 18735 : RR_OP_LIST(RR_VISITOR)
1378 : #undef RR_VISITOR
1379 :
1380 70365 : void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1381 70365 : VisitRR(this, node, kArchTruncateDoubleToI);
1382 70367 : }
1383 :
1384 279129 : void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1385 : X64OperandGenerator g(this);
1386 272973 : Node* value = node->InputAt(0);
1387 279129 : if (CanCover(node, value)) {
1388 272973 : switch (value->opcode()) {
1389 : case IrOpcode::kWord64Sar:
1390 : case IrOpcode::kWord64Shr: {
1391 272883 : Int64BinopMatcher m(value);
1392 272890 : if (m.right().Is(32)) {
1393 272833 : if (TryMatchLoadWord64AndShiftRight(this, value, kX64Movl)) {
1394 318800 : return EmitIdentity(node);
1395 : }
1396 : Emit(kX64Shr, g.DefineSameAsFirst(node),
1397 453727 : g.UseRegister(m.left().node()), g.TempImmediate(32));
1398 226864 : return;
1399 : }
1400 57 : break;
1401 : }
1402 : default:
1403 : break;
1404 : }
1405 : }
1406 6311 : Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
1407 : }
1408 :
1409 13 : void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1410 : X64OperandGenerator g(this);
1411 13 : InstructionOperand temps[] = {g.TempRegister()};
1412 : Emit(kSSEUint64ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)),
1413 13 : arraysize(temps), temps);
1414 13 : }
1415 :
1416 :
1417 13 : void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1418 : X64OperandGenerator g(this);
1419 13 : InstructionOperand temps[] = {g.TempRegister()};
1420 : Emit(kSSEUint64ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)),
1421 13 : arraysize(temps), temps);
1422 13 : }
1423 :
1424 118 : void InstructionSelector::VisitFloat32Add(Node* node) {
1425 118 : VisitFloatBinop(this, node, kAVXFloat32Add, kSSEFloat32Add);
1426 118 : }
1427 :
1428 :
1429 1456 : void InstructionSelector::VisitFloat32Sub(Node* node) {
1430 1456 : VisitFloatBinop(this, node, kAVXFloat32Sub, kSSEFloat32Sub);
1431 1456 : }
1432 :
1433 47 : void InstructionSelector::VisitFloat32Mul(Node* node) {
1434 47 : VisitFloatBinop(this, node, kAVXFloat32Mul, kSSEFloat32Mul);
1435 47 : }
1436 :
1437 :
1438 44 : void InstructionSelector::VisitFloat32Div(Node* node) {
1439 44 : VisitFloatBinop(this, node, kAVXFloat32Div, kSSEFloat32Div);
1440 44 : }
1441 :
1442 :
1443 52 : void InstructionSelector::VisitFloat32Abs(Node* node) {
1444 52 : VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Abs, kSSEFloat32Abs);
1445 52 : }
1446 :
1447 :
1448 29 : void InstructionSelector::VisitFloat32Max(Node* node) {
1449 29 : VisitRRO(this, node, kSSEFloat32Max);
1450 29 : }
1451 :
1452 29 : void InstructionSelector::VisitFloat32Min(Node* node) {
1453 29 : VisitRRO(this, node, kSSEFloat32Min);
1454 29 : }
1455 :
1456 54725 : void InstructionSelector::VisitFloat64Add(Node* node) {
1457 54725 : VisitFloatBinop(this, node, kAVXFloat64Add, kSSEFloat64Add);
1458 54725 : }
1459 :
1460 :
1461 25324 : void InstructionSelector::VisitFloat64Sub(Node* node) {
1462 25324 : VisitFloatBinop(this, node, kAVXFloat64Sub, kSSEFloat64Sub);
1463 25324 : }
1464 :
1465 15570 : void InstructionSelector::VisitFloat64Mul(Node* node) {
1466 15570 : VisitFloatBinop(this, node, kAVXFloat64Mul, kSSEFloat64Mul);
1467 15571 : }
1468 :
1469 :
1470 12521 : void InstructionSelector::VisitFloat64Div(Node* node) {
1471 12521 : VisitFloatBinop(this, node, kAVXFloat64Div, kSSEFloat64Div);
1472 12522 : }
1473 :
1474 :
1475 1022 : void InstructionSelector::VisitFloat64Mod(Node* node) {
1476 : X64OperandGenerator g(this);
1477 1022 : InstructionOperand temps[] = {g.TempRegister(rax)};
1478 : Emit(kSSEFloat64Mod, g.DefineSameAsFirst(node),
1479 : g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)), 1,
1480 2044 : temps);
1481 1022 : }
1482 :
1483 :
1484 97 : void InstructionSelector::VisitFloat64Max(Node* node) {
1485 97 : VisitRRO(this, node, kSSEFloat64Max);
1486 97 : }
1487 :
1488 :
1489 153 : void InstructionSelector::VisitFloat64Min(Node* node) {
1490 153 : VisitRRO(this, node, kSSEFloat64Min);
1491 153 : }
1492 :
1493 :
1494 245 : void InstructionSelector::VisitFloat64Abs(Node* node) {
1495 245 : VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Abs, kSSEFloat64Abs);
1496 245 : }
1497 :
1498 :
1499 0 : void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1500 0 : UNREACHABLE();
1501 : }
1502 :
1503 :
1504 28 : void InstructionSelector::VisitFloat32Neg(Node* node) {
1505 28 : VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Neg, kSSEFloat32Neg);
1506 28 : }
1507 :
1508 9815 : void InstructionSelector::VisitFloat64Neg(Node* node) {
1509 9815 : VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Neg, kSSEFloat64Neg);
1510 9814 : }
1511 :
1512 178 : void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1513 : InstructionCode opcode) {
1514 : X64OperandGenerator g(this);
1515 : Emit(opcode, g.DefineAsFixed(node, xmm0), g.UseFixed(node->InputAt(0), xmm0),
1516 : g.UseFixed(node->InputAt(1), xmm1))
1517 356 : ->MarkAsCall();
1518 178 : }
1519 :
1520 1618 : void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1521 : InstructionCode opcode) {
1522 : X64OperandGenerator g(this);
1523 : Emit(opcode, g.DefineAsFixed(node, xmm0), g.UseFixed(node->InputAt(0), xmm0))
1524 1618 : ->MarkAsCall();
1525 1618 : }
1526 :
1527 3400862 : void InstructionSelector::EmitPrepareArguments(
1528 3402946 : ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1529 3447259 : Node* node) {
1530 : X64OperandGenerator g(this);
1531 :
1532 : // Prepare for C function call.
1533 3400862 : if (descriptor->IsCFunctionCall()) {
1534 2084 : Emit(kArchPrepareCallCFunction |
1535 : MiscField::encode(static_cast<int>(descriptor->ParameterCount())),
1536 2084 : 0, nullptr, 0, nullptr);
1537 :
1538 : // Poke any stack arguments.
1539 6864 : for (size_t n = 0; n < arguments->size(); ++n) {
1540 4780 : PushParameter input = (*arguments)[n];
1541 1348 : if (input.node()) {
1542 : int slot = static_cast<int>(n);
1543 1348 : InstructionOperand value = g.CanBeImmediate(input.node())
1544 : ? g.UseImmediate(input.node())
1545 1348 : : g.UseRegister(input.node());
1546 1348 : Emit(kX64Poke | MiscField::encode(slot), g.NoOutput(), value);
1547 : }
1548 : }
1549 : } else {
1550 : // Push any stack arguments.
1551 3398778 : int effect_level = GetEffectLevel(node);
1552 10770150 : for (PushParameter input : base::Reversed(*arguments)) {
1553 : Node* input_node = input.node();
1554 3685684 : if (g.CanBeImmediate(input_node)) {
1555 238424 : Emit(kX64Push, g.NoOutput(), g.UseImmediate(input_node));
1556 6894518 : } else if (IsSupported(ATOM) ||
1557 3447260 : sequence()->IsFP(GetVirtualRegister(input_node))) {
1558 : // TODO(titzer): X64Push cannot handle stack->stack double moves
1559 : // because there is no way to encode fixed double slots.
1560 0 : Emit(kX64Push, g.NoOutput(), g.UseRegister(input_node));
1561 3447258 : } else if (g.CanBeMemoryOperand(kX64Push, node, input_node,
1562 : effect_level)) {
1563 112021 : InstructionOperand outputs[1];
1564 448084 : InstructionOperand inputs[4];
1565 112021 : size_t input_count = 0;
1566 : InstructionCode opcode = kX64Push;
1567 : AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
1568 112021 : input_node, inputs, &input_count);
1569 112021 : opcode |= AddressingModeField::encode(mode);
1570 112021 : Emit(opcode, 0, outputs, input_count, inputs);
1571 : } else {
1572 3335236 : Emit(kX64Push, g.NoOutput(), g.Use(input_node));
1573 : }
1574 : }
1575 : }
1576 3400866 : }
1577 :
1578 :
1579 149226 : bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
1580 :
1581 1041 : int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
1582 :
1583 : namespace {
1584 :
1585 679358 : void VisitCompareWithMemoryOperand(InstructionSelector* selector,
1586 : InstructionCode opcode, Node* left,
1587 : InstructionOperand right,
1588 1947154 : FlagsContinuation* cont) {
1589 : DCHECK(left->opcode() == IrOpcode::kLoad);
1590 : X64OperandGenerator g(selector);
1591 679358 : size_t input_count = 0;
1592 4755500 : InstructionOperand inputs[6];
1593 : AddressingMode addressing_mode =
1594 679358 : g.GetEffectiveAddressMemoryOperand(left, inputs, &input_count);
1595 679361 : opcode |= AddressingModeField::encode(addressing_mode);
1596 : opcode = cont->Encode(opcode);
1597 679361 : inputs[input_count++] = right;
1598 :
1599 679361 : if (cont->IsBranch()) {
1600 1176864 : inputs[input_count++] = g.Label(cont->true_block());
1601 1176864 : inputs[input_count++] = g.Label(cont->false_block());
1602 588432 : selector->Emit(opcode, 0, nullptr, input_count, inputs);
1603 90929 : } else if (cont->IsDeoptimize()) {
1604 : selector->EmitDeoptimize(opcode, 0, nullptr, input_count, inputs,
1605 71911 : cont->kind(), cont->reason(), cont->frame_state());
1606 19018 : } else if (cont->IsSet()) {
1607 16654 : InstructionOperand output = g.DefineAsRegister(cont->result());
1608 16654 : selector->Emit(opcode, 1, &output, input_count, inputs);
1609 : } else {
1610 : DCHECK(cont->IsTrap());
1611 4728 : inputs[input_count++] = g.UseImmediate(cont->trap_id());
1612 2364 : selector->Emit(opcode, 0, nullptr, input_count, inputs);
1613 : }
1614 679363 : }
1615 :
1616 : // Shared routine for multiple compare operations.
1617 2409837 : void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1618 : InstructionOperand left, InstructionOperand right,
1619 7014474 : FlagsContinuation* cont) {
1620 : X64OperandGenerator g(selector);
1621 : opcode = cont->Encode(opcode);
1622 2409837 : if (cont->IsBranch()) {
1623 : selector->Emit(opcode, g.NoOutput(), left, right,
1624 4389586 : g.Label(cont->true_block()), g.Label(cont->false_block()));
1625 215051 : } else if (cont->IsDeoptimize()) {
1626 : selector->EmitDeoptimize(opcode, g.NoOutput(), left, right, cont->kind(),
1627 127899 : cont->reason(), cont->frame_state());
1628 87152 : } else if (cont->IsSet()) {
1629 73218 : selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
1630 : } else {
1631 : DCHECK(cont->IsTrap());
1632 : selector->Emit(opcode, g.NoOutput(), left, right,
1633 13934 : g.UseImmediate(cont->trap_id()));
1634 : }
1635 2409853 : }
1636 :
1637 :
1638 : // Shared routine for multiple compare operations.
1639 460272 : void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1640 : Node* left, Node* right, FlagsContinuation* cont,
1641 : bool commutative) {
1642 : X64OperandGenerator g(selector);
1643 619147 : if (commutative && g.CanBeBetterLeftOperand(right)) {
1644 : std::swap(left, right);
1645 : }
1646 460273 : VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont);
1647 460271 : }
1648 :
1649 7278909 : MachineType MachineTypeForNarrow(Node* node, Node* hint_node) {
1650 3348403 : if (hint_node->opcode() == IrOpcode::kLoad) {
1651 855743 : MachineType hint = LoadRepresentationOf(hint_node->op());
1652 855747 : if (node->opcode() == IrOpcode::kInt32Constant ||
1653 : node->opcode() == IrOpcode::kInt64Constant) {
1654 : int64_t constant = node->opcode() == IrOpcode::kInt32Constant
1655 : ? OpParameter<int32_t>(node)
1656 327198 : : OpParameter<int64_t>(node);
1657 327198 : if (hint == MachineType::Int8()) {
1658 136 : if (constant >= std::numeric_limits<int8_t>::min() &&
1659 : constant <= std::numeric_limits<int8_t>::max()) {
1660 136 : return hint;
1661 : }
1662 327062 : } else if (hint == MachineType::Uint8()) {
1663 247923 : if (constant >= std::numeric_limits<uint8_t>::min() &&
1664 : constant <= std::numeric_limits<uint8_t>::max()) {
1665 247842 : return hint;
1666 : }
1667 79139 : } else if (hint == MachineType::Int16()) {
1668 0 : if (constant >= std::numeric_limits<int16_t>::min() &&
1669 : constant <= std::numeric_limits<int16_t>::max()) {
1670 0 : return hint;
1671 : }
1672 79139 : } else if (hint == MachineType::Uint16()) {
1673 484 : if (constant >= std::numeric_limits<uint16_t>::min() &&
1674 : constant <= std::numeric_limits<uint16_t>::max()) {
1675 457 : return hint;
1676 : }
1677 78655 : } else if (hint == MachineType::Int32()) {
1678 13518 : return hint;
1679 65137 : } else if (hint == MachineType::Uint32()) {
1680 12956 : if (constant >= 0) return hint;
1681 : }
1682 : }
1683 : }
1684 : return node->opcode() == IrOpcode::kLoad ? LoadRepresentationOf(node->op())
1685 3074759 : : MachineType::None();
1686 : }
1687 :
1688 : // Tries to match the size of the given opcode to that of the operands, if
1689 : // possible.
1690 1674201 : InstructionCode TryNarrowOpcodeSize(InstructionCode opcode, Node* left,
1691 : Node* right, FlagsContinuation* cont) {
1692 : // TODO(epertoso): we can probably get some size information out phi nodes.
1693 : // If the load representations don't match, both operands will be
1694 : // zero/sign-extended to 32bit.
1695 1674201 : MachineType left_type = MachineTypeForNarrow(left, right);
1696 1674203 : MachineType right_type = MachineTypeForNarrow(right, left);
1697 1674207 : if (left_type == right_type) {
1698 1208685 : switch (left_type.representation()) {
1699 : case MachineRepresentation::kBit:
1700 : case MachineRepresentation::kWord8: {
1701 250017 : if (opcode == kX64Test32) return kX64Test8;
1702 135204 : if (opcode == kX64Cmp32) {
1703 135204 : if (left_type.semantic() == MachineSemantic::kUint32) {
1704 : cont->OverwriteUnsignedIfSigned();
1705 : } else {
1706 173 : CHECK_EQ(MachineSemantic::kInt32, left_type.semantic());
1707 : }
1708 : return kX64Cmp8;
1709 : }
1710 : break;
1711 : }
1712 : case MachineRepresentation::kWord16:
1713 2572 : if (opcode == kX64Test32) return kX64Test16;
1714 24 : if (opcode == kX64Cmp32) {
1715 24 : if (left_type.semantic() == MachineSemantic::kUint32) {
1716 : cont->OverwriteUnsignedIfSigned();
1717 : } else {
1718 0 : CHECK_EQ(MachineSemantic::kInt32, left_type.semantic());
1719 : }
1720 : return kX64Cmp16;
1721 : }
1722 : break;
1723 : default:
1724 : break;
1725 : }
1726 : }
1727 : return opcode;
1728 : }
1729 :
1730 : // Shared routine for multiple word compare operations.
1731 2198813 : void VisitWordCompare(InstructionSelector* selector, Node* node,
1732 3112878 : InstructionCode opcode, FlagsContinuation* cont) {
1733 : X64OperandGenerator g(selector);
1734 : Node* left = node->InputAt(0);
1735 : Node* right = node->InputAt(1);
1736 :
1737 1674198 : opcode = TryNarrowOpcodeSize(opcode, left, right, cont);
1738 :
1739 : // If one of the two inputs is an immediate, make sure it's on the right, or
1740 : // if one of the two inputs is a memory operand, make sure it's on the left.
1741 1674209 : int effect_level = selector->GetEffectLevel(node);
1742 1674209 : if (cont->IsBranch()) {
1743 : effect_level = selector->GetEffectLevel(
1744 1438669 : cont->true_block()->PredecessorAt(0)->control_input());
1745 : }
1746 :
1747 4910053 : if ((!g.CanBeImmediate(right) && g.CanBeImmediate(left)) ||
1748 1750157 : (g.CanBeMemoryOperand(opcode, node, right, effect_level) &&
1749 188510 : !g.CanBeMemoryOperand(opcode, node, left, effect_level))) {
1750 268535 : if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1751 : std::swap(left, right);
1752 : }
1753 :
1754 : // Match immediates on right side of comparison.
1755 1674203 : if (g.CanBeImmediate(right)) {
1756 989700 : if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
1757 : return VisitCompareWithMemoryOperand(selector, opcode, left,
1758 250932 : g.UseImmediate(right), cont);
1759 : }
1760 : return VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right),
1761 738770 : cont);
1762 : }
1763 :
1764 : // Match memory operands on left side of comparison.
1765 684503 : if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
1766 : return VisitCompareWithMemoryOperand(selector, opcode, left,
1767 428426 : g.UseRegister(right), cont);
1768 : }
1769 :
1770 : return VisitCompare(selector, opcode, left, right, cont,
1771 256080 : node->op()->HasProperty(Operator::kCommutative));
1772 : }
1773 :
1774 : // Shared routine for 64-bit word comparison operations.
1775 3397197 : void VisitWord64Compare(InstructionSelector* selector, Node* node,
1776 1569481 : FlagsContinuation* cont) {
1777 : X64OperandGenerator g(selector);
1778 2025082 : if (selector->CanUseRootsRegister()) {
1779 : Heap* const heap = selector->isolate()->heap();
1780 : Heap::RootListIndex root_index;
1781 1928465 : HeapObjectBinopMatcher m(node);
1782 2857820 : if (m.right().HasValue() &&
1783 : heap->IsRootHandle(m.right().Value(), &root_index)) {
1784 848907 : if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1785 : InstructionCode opcode =
1786 : kX64Cmp | AddressingModeField::encode(kMode_Root);
1787 : return VisitCompare(
1788 : selector, opcode,
1789 : g.TempImmediate((root_index * kPointerSize) - kRootRegisterBias),
1790 1697789 : g.UseRegister(m.left().node()), cont);
1791 1079577 : } else if (m.left().HasValue() &&
1792 : heap->IsRootHandle(m.left().Value(), &root_index)) {
1793 : InstructionCode opcode =
1794 : kX64Cmp | AddressingModeField::encode(kMode_Root);
1795 : return VisitCompare(
1796 : selector, opcode,
1797 : g.TempImmediate((root_index * kPointerSize) - kRootRegisterBias),
1798 0 : g.UseRegister(m.right().node()), cont);
1799 : }
1800 : }
1801 1176265 : Int64BinopMatcher m(node);
1802 2015509 : if (m.left().IsLoad() && m.right().IsLoadStackPointer()) {
1803 523133 : LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node());
1804 : ExternalReference js_stack_limit =
1805 523144 : ExternalReference::address_of_stack_limit(selector->isolate());
1806 1046332 : if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) {
1807 : // Compare(Load(js_stack_limit), LoadStackPointer)
1808 523208 : if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1809 : InstructionCode opcode = cont->Encode(kX64StackCheck);
1810 523151 : if (cont->IsBranch()) {
1811 : selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()),
1812 1046329 : g.Label(cont->false_block()));
1813 1 : } else if (cont->IsDeoptimize()) {
1814 : selector->EmitDeoptimize(opcode, 0, nullptr, 0, nullptr, cont->kind(),
1815 0 : cont->reason(), cont->frame_state());
1816 1 : } else if (cont->IsSet()) {
1817 1 : selector->Emit(opcode, g.DefineAsRegister(cont->result()));
1818 : } else {
1819 : DCHECK(cont->IsTrap());
1820 0 : selector->Emit(opcode, g.NoOutput(), g.UseImmediate(cont->trap_id()));
1821 : }
1822 523112 : return;
1823 : }
1824 : }
1825 653084 : VisitWordCompare(selector, node, kX64Cmp, cont);
1826 : }
1827 :
1828 :
1829 : // Shared routine for comparison with zero.
1830 361899 : void VisitCompareZero(InstructionSelector* selector, Node* node,
1831 : InstructionCode opcode, FlagsContinuation* cont) {
1832 : X64OperandGenerator g(selector);
1833 361899 : VisitCompare(selector, opcode, g.Use(node), g.TempImmediate(0), cont);
1834 361899 : }
1835 :
1836 :
1837 : // Shared routine for multiple float32 compare operations (inputs commuted).
1838 1258 : void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1839 : FlagsContinuation* cont) {
1840 : Node* const left = node->InputAt(0);
1841 : Node* const right = node->InputAt(1);
1842 : InstructionCode const opcode =
1843 1258 : selector->IsSupported(AVX) ? kAVXFloat32Cmp : kSSEFloat32Cmp;
1844 1258 : VisitCompare(selector, opcode, right, left, cont, false);
1845 1258 : }
1846 :
1847 :
1848 : // Shared routine for multiple float64 compare operations (inputs commuted).
1849 121158 : void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1850 : FlagsContinuation* cont) {
1851 : Node* const left = node->InputAt(0);
1852 : Node* const right = node->InputAt(1);
1853 : InstructionCode const opcode =
1854 121158 : selector->IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
1855 121158 : VisitCompare(selector, opcode, right, left, cont, false);
1856 121159 : }
1857 :
1858 : // Shared routine for word comparison against zero.
1859 3622587 : void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1860 7542144 : Node* value, FlagsContinuation* cont) {
1861 : // Try to combine with comparisons against 0 by simply inverting the branch.
1862 8282711 : while (value->opcode() == IrOpcode::kWord32Equal &&
1863 603569 : selector->CanCover(user, value)) {
1864 597452 : Int32BinopMatcher m(value);
1865 597452 : if (!m.right().Is(0)) break;
1866 :
1867 : user = value;
1868 : value = m.left().node();
1869 : cont->Negate();
1870 : }
1871 :
1872 3622590 : if (selector->CanCover(user, value)) {
1873 3485592 : switch (value->opcode()) {
1874 : case IrOpcode::kWord32Equal:
1875 : cont->OverwriteAndNegateIfEqual(kEqual);
1876 163486 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1877 : case IrOpcode::kInt32LessThan:
1878 : cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1879 142503 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1880 : case IrOpcode::kInt32LessThanOrEqual:
1881 : cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1882 23286 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1883 : case IrOpcode::kUint32LessThan:
1884 : cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1885 67597 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1886 : case IrOpcode::kUint32LessThanOrEqual:
1887 : cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1888 38893 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1889 : case IrOpcode::kWord64Equal: {
1890 : cont->OverwriteAndNegateIfEqual(kEqual);
1891 1733305 : Int64BinopMatcher m(value);
1892 1733307 : if (m.right().Is(0)) {
1893 : // Try to combine the branch with a comparison.
1894 : Node* const user = m.node();
1895 415665 : Node* const value = m.left().node();
1896 514269 : if (selector->CanCover(user, value)) {
1897 415665 : switch (value->opcode()) {
1898 : case IrOpcode::kInt64Sub:
1899 0 : return VisitWord64Compare(selector, value, cont);
1900 : case IrOpcode::kWord64And:
1901 411957 : return VisitWordCompare(selector, value, kX64Test, cont);
1902 : default:
1903 : break;
1904 : }
1905 : }
1906 102311 : return VisitCompareZero(selector, value, kX64Cmp, cont);
1907 : }
1908 1219038 : return VisitWord64Compare(selector, value, cont);
1909 : }
1910 : case IrOpcode::kInt64LessThan:
1911 : cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1912 15423 : return VisitWord64Compare(selector, value, cont);
1913 : case IrOpcode::kInt64LessThanOrEqual:
1914 : cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1915 8677 : return VisitWord64Compare(selector, value, cont);
1916 : case IrOpcode::kUint64LessThan:
1917 : cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1918 654520 : return VisitWord64Compare(selector, value, cont);
1919 : case IrOpcode::kUint64LessThanOrEqual:
1920 : cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1921 109042 : return VisitWord64Compare(selector, value, cont);
1922 : case IrOpcode::kFloat32Equal:
1923 : cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1924 311 : return VisitFloat32Compare(selector, value, cont);
1925 : case IrOpcode::kFloat32LessThan:
1926 : cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1927 611 : return VisitFloat32Compare(selector, value, cont);
1928 : case IrOpcode::kFloat32LessThanOrEqual:
1929 : cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1930 107 : return VisitFloat32Compare(selector, value, cont);
1931 : case IrOpcode::kFloat64Equal:
1932 : cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1933 102187 : return VisitFloat64Compare(selector, value, cont);
1934 : case IrOpcode::kFloat64LessThan: {
1935 83599 : Float64BinopMatcher m(value);
1936 160943 : if (m.left().Is(0.0) && m.right().IsFloat64Abs()) {
1937 : // This matches the pattern
1938 : //
1939 : // Float64LessThan(#0.0, Float64Abs(x))
1940 : //
1941 : // which TurboFan generates for NumberToBoolean in the general case,
1942 : // and which evaluates to false if x is 0, -0 or NaN. We can compile
1943 : // this to a simple (v)ucomisd using not_equal flags condition, which
1944 : // avoids the costly Float64Abs.
1945 : cont->OverwriteAndNegateIfEqual(kNotEqual);
1946 : InstructionCode const opcode =
1947 76255 : selector->IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
1948 : return VisitCompare(selector, opcode, m.left().node(),
1949 76255 : m.right().InputAt(0), cont, false);
1950 : }
1951 : cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1952 7344 : return VisitFloat64Compare(selector, value, cont);
1953 : }
1954 : case IrOpcode::kFloat64LessThanOrEqual:
1955 : cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1956 2432 : return VisitFloat64Compare(selector, value, cont);
1957 : case IrOpcode::kProjection:
1958 : // Check if this is the overflow output projection of an
1959 : // <Operation>WithOverflow node.
1960 69237 : if (ProjectionIndexOf(value->op()) == 1u) {
1961 : // We cannot combine the <Operation>WithOverflow with this branch
1962 : // unless the 0th projection (the use of the actual value of the
1963 : // <Operation> is either nullptr, which means there's no use of the
1964 : // actual value, or was already defined, which means it is scheduled
1965 : // *AFTER* this branch).
1966 69219 : Node* const node = value->InputAt(0);
1967 69218 : Node* const result = NodeProperties::FindProjection(node, 0);
1968 69219 : if (result == nullptr || selector->IsDefined(result)) {
1969 69219 : switch (node->opcode()) {
1970 : case IrOpcode::kInt32AddWithOverflow:
1971 : cont->OverwriteAndNegateIfEqual(kOverflow);
1972 42757 : return VisitBinop(selector, node, kX64Add32, cont);
1973 : case IrOpcode::kInt32SubWithOverflow:
1974 : cont->OverwriteAndNegateIfEqual(kOverflow);
1975 21865 : return VisitBinop(selector, node, kX64Sub32, cont);
1976 : case IrOpcode::kInt32MulWithOverflow:
1977 : cont->OverwriteAndNegateIfEqual(kOverflow);
1978 2778 : return VisitBinop(selector, node, kX64Imul32, cont);
1979 : case IrOpcode::kInt64AddWithOverflow:
1980 : cont->OverwriteAndNegateIfEqual(kOverflow);
1981 1167 : return VisitBinop(selector, node, kX64Add, cont);
1982 : case IrOpcode::kInt64SubWithOverflow:
1983 : cont->OverwriteAndNegateIfEqual(kOverflow);
1984 651 : return VisitBinop(selector, node, kX64Sub, cont);
1985 : default:
1986 : break;
1987 : }
1988 : }
1989 : }
1990 : break;
1991 : case IrOpcode::kInt32Sub:
1992 1356 : return VisitWordCompare(selector, value, kX64Cmp32, cont);
1993 : case IrOpcode::kWord32And:
1994 146442 : return VisitWordCompare(selector, value, kX64Test32, cont);
1995 : default:
1996 : break;
1997 : }
1998 : }
1999 :
2000 : // Branch could not be combined with a compare, emit compare against 0.
2001 259606 : VisitCompareZero(selector, value, kX64Cmp32, cont);
2002 : }
2003 :
2004 : } // namespace
2005 :
2006 3308740 : void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
2007 : BasicBlock* fbranch) {
2008 : FlagsContinuation cont(kNotEqual, tbranch, fbranch);
2009 3308740 : VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
2010 3308628 : }
2011 :
2012 95921 : void InstructionSelector::VisitDeoptimizeIf(Node* node) {
2013 95921 : DeoptimizeParameters p = DeoptimizeParametersOf(node->op());
2014 : FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
2015 : kNotEqual, p.kind(), p.reason(), node->InputAt(1));
2016 95923 : VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2017 95923 : }
2018 :
2019 170751 : void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
2020 170751 : DeoptimizeParameters p = DeoptimizeParametersOf(node->op());
2021 : FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
2022 : kEqual, p.kind(), p.reason(), node->InputAt(1));
2023 170756 : VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2024 170756 : }
2025 :
2026 657 : void InstructionSelector::VisitTrapIf(Node* node, Runtime::FunctionId func_id) {
2027 : FlagsContinuation cont =
2028 : FlagsContinuation::ForTrap(kNotEqual, func_id, node->InputAt(1));
2029 657 : VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2030 657 : }
2031 :
2032 15642 : void InstructionSelector::VisitTrapUnless(Node* node,
2033 : Runtime::FunctionId func_id) {
2034 : FlagsContinuation cont =
2035 : FlagsContinuation::ForTrap(kEqual, func_id, node->InputAt(1));
2036 15642 : VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2037 15643 : }
2038 :
2039 23132 : void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
2040 : X64OperandGenerator g(this);
2041 23132 : InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
2042 :
2043 : // Emit either ArchTableSwitch or ArchLookupSwitch.
2044 23132 : size_t table_space_cost = 4 + sw.value_range;
2045 : size_t table_time_cost = 3;
2046 23132 : size_t lookup_space_cost = 3 + 2 * sw.case_count;
2047 : size_t lookup_time_cost = sw.case_count;
2048 29288 : if (sw.case_count > 4 &&
2049 6156 : table_space_cost + 3 * table_time_cost <=
2050 35379 : lookup_space_cost + 3 * lookup_time_cost &&
2051 6091 : sw.min_value > std::numeric_limits<int32_t>::min()) {
2052 6091 : InstructionOperand index_operand = g.TempRegister();
2053 6091 : if (sw.min_value) {
2054 : // The leal automatically zero extends, so result is a valid 64-bit index.
2055 : Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI), index_operand,
2056 829 : value_operand, g.TempImmediate(-sw.min_value));
2057 : } else {
2058 : // Zero extend, because we use it as 64-bit index into the jump table.
2059 5262 : Emit(kX64Movl, index_operand, value_operand);
2060 : }
2061 : // Generate a table lookup.
2062 6091 : return EmitTableSwitch(sw, index_operand);
2063 : }
2064 :
2065 : // Generate a sequence of conditional jumps.
2066 17041 : return EmitLookupSwitch(sw, value_operand);
2067 : }
2068 :
2069 :
2070 36265 : void InstructionSelector::VisitWord32Equal(Node* const node) {
2071 : Node* user = node;
2072 : FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
2073 36265 : Int32BinopMatcher m(user);
2074 36265 : if (m.right().Is(0)) {
2075 67186 : return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
2076 : }
2077 5344 : VisitWordCompare(this, node, kX64Cmp32, &cont);
2078 : }
2079 :
2080 :
2081 1827 : void InstructionSelector::VisitInt32LessThan(Node* node) {
2082 : FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
2083 1827 : VisitWordCompare(this, node, kX64Cmp32, &cont);
2084 1827 : }
2085 :
2086 :
2087 1199 : void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
2088 : FlagsContinuation cont =
2089 : FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
2090 1199 : VisitWordCompare(this, node, kX64Cmp32, &cont);
2091 1199 : }
2092 :
2093 :
2094 2506 : void InstructionSelector::VisitUint32LessThan(Node* node) {
2095 : FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
2096 2506 : VisitWordCompare(this, node, kX64Cmp32, &cont);
2097 2506 : }
2098 :
2099 :
2100 3734 : void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
2101 : FlagsContinuation cont =
2102 : FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
2103 3734 : VisitWordCompare(this, node, kX64Cmp32, &cont);
2104 3734 : }
2105 :
2106 :
2107 27433 : void InstructionSelector::VisitWord64Equal(Node* const node) {
2108 : FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
2109 27433 : Int64BinopMatcher m(node);
2110 27433 : if (m.right().Is(0)) {
2111 : // Try to combine the equality check with a comparison.
2112 : Node* const user = m.node();
2113 11040 : Node* const value = m.left().node();
2114 11040 : if (CanCover(user, value)) {
2115 11040 : switch (value->opcode()) {
2116 : case IrOpcode::kInt64Sub:
2117 10967 : return VisitWord64Compare(this, value, &cont);
2118 : case IrOpcode::kWord64And:
2119 10967 : return VisitWordCompare(this, value, kX64Test, &cont);
2120 : default:
2121 : break;
2122 : }
2123 : }
2124 : }
2125 16466 : VisitWord64Compare(this, node, &cont);
2126 : }
2127 :
2128 :
2129 18150 : void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
2130 18150 : if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
2131 : FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
2132 36300 : return VisitBinop(this, node, kX64Add32, &cont);
2133 : }
2134 : FlagsContinuation cont;
2135 0 : VisitBinop(this, node, kX64Add32, &cont);
2136 : }
2137 :
2138 :
2139 18150 : void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
2140 18150 : if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
2141 : FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
2142 36300 : return VisitBinop(this, node, kX64Sub32, &cont);
2143 : }
2144 : FlagsContinuation cont;
2145 0 : VisitBinop(this, node, kX64Sub32, &cont);
2146 : }
2147 :
2148 :
2149 577 : void InstructionSelector::VisitInt64LessThan(Node* node) {
2150 : FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
2151 577 : VisitWord64Compare(this, node, &cont);
2152 577 : }
2153 :
2154 :
2155 533 : void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
2156 : FlagsContinuation cont =
2157 : FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
2158 533 : VisitWord64Compare(this, node, &cont);
2159 533 : }
2160 :
2161 :
2162 427 : void InstructionSelector::VisitUint64LessThan(Node* node) {
2163 : FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
2164 427 : VisitWord64Compare(this, node, &cont);
2165 427 : }
2166 :
2167 :
2168 426 : void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
2169 : FlagsContinuation cont =
2170 : FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
2171 426 : VisitWord64Compare(this, node, &cont);
2172 426 : }
2173 :
2174 :
2175 121 : void InstructionSelector::VisitFloat32Equal(Node* node) {
2176 : FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
2177 121 : VisitFloat32Compare(this, node, &cont);
2178 121 : }
2179 :
2180 :
2181 59 : void InstructionSelector::VisitFloat32LessThan(Node* node) {
2182 : FlagsContinuation cont =
2183 : FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
2184 59 : VisitFloat32Compare(this, node, &cont);
2185 59 : }
2186 :
2187 :
2188 49 : void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
2189 : FlagsContinuation cont =
2190 : FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
2191 49 : VisitFloat32Compare(this, node, &cont);
2192 49 : }
2193 :
2194 :
2195 5913 : void InstructionSelector::VisitFloat64Equal(Node* node) {
2196 : FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
2197 5913 : VisitFloat64Compare(this, node, &cont);
2198 5913 : }
2199 :
2200 7359 : void InstructionSelector::VisitFloat64LessThan(Node* node) {
2201 7359 : Float64BinopMatcher m(node);
2202 13022 : if (m.left().Is(0.0) && m.right().IsFloat64Abs()) {
2203 : // This matches the pattern
2204 : //
2205 : // Float64LessThan(#0.0, Float64Abs(x))
2206 : //
2207 : // which TurboFan generates for NumberToBoolean in the general case,
2208 : // and which evaluates to false if x is 0, -0 or NaN. We can compile
2209 : // this to a simple (v)ucomisd using not_equal flags condition, which
2210 : // avoids the costly Float64Abs.
2211 : FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, node);
2212 : InstructionCode const opcode =
2213 5522 : IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
2214 : return VisitCompare(this, opcode, m.left().node(), m.right().InputAt(0),
2215 12881 : &cont, false);
2216 : }
2217 : FlagsContinuation cont =
2218 : FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
2219 1837 : VisitFloat64Compare(this, node, &cont);
2220 : }
2221 :
2222 1445 : void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
2223 : FlagsContinuation cont =
2224 : FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
2225 1445 : VisitFloat64Compare(this, node, &cont);
2226 1445 : }
2227 :
2228 6 : void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
2229 : X64OperandGenerator g(this);
2230 : Node* left = node->InputAt(0);
2231 : Node* right = node->InputAt(1);
2232 : Float64Matcher mleft(left);
2233 6 : if (mleft.HasValue() && (bit_cast<uint64_t>(mleft.Value()) >> 32) == 0u) {
2234 0 : Emit(kSSEFloat64LoadLowWord32, g.DefineAsRegister(node), g.Use(right));
2235 6 : return;
2236 : }
2237 : Emit(kSSEFloat64InsertLowWord32, g.DefineSameAsFirst(node),
2238 6 : g.UseRegister(left), g.Use(right));
2239 : }
2240 :
2241 :
2242 6 : void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
2243 : X64OperandGenerator g(this);
2244 : Node* left = node->InputAt(0);
2245 : Node* right = node->InputAt(1);
2246 : Emit(kSSEFloat64InsertHighWord32, g.DefineSameAsFirst(node),
2247 6 : g.UseRegister(left), g.Use(right));
2248 6 : }
2249 :
2250 1765 : void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
2251 : X64OperandGenerator g(this);
2252 : Emit(kSSEFloat64SilenceNaN, g.DefineSameAsFirst(node),
2253 1765 : g.UseRegister(node->InputAt(0)));
2254 1765 : }
2255 :
2256 258 : void InstructionSelector::VisitAtomicLoad(Node* node) {
2257 258 : LoadRepresentation load_rep = LoadRepresentationOf(node->op());
2258 : DCHECK(load_rep.representation() == MachineRepresentation::kWord8 ||
2259 : load_rep.representation() == MachineRepresentation::kWord16 ||
2260 : load_rep.representation() == MachineRepresentation::kWord32);
2261 : USE(load_rep);
2262 258 : VisitLoad(node);
2263 258 : }
2264 :
2265 258 : void InstructionSelector::VisitAtomicStore(Node* node) {
2266 : X64OperandGenerator g(this);
2267 : Node* base = node->InputAt(0);
2268 : Node* index = node->InputAt(1);
2269 : Node* value = node->InputAt(2);
2270 :
2271 129 : MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
2272 : ArchOpcode opcode = kArchNop;
2273 129 : switch (rep) {
2274 : case MachineRepresentation::kWord8:
2275 : opcode = kAtomicExchangeInt8;
2276 : break;
2277 : case MachineRepresentation::kWord16:
2278 : opcode = kAtomicExchangeInt16;
2279 43 : break;
2280 : case MachineRepresentation::kWord32:
2281 : opcode = kAtomicExchangeWord32;
2282 43 : break;
2283 : default:
2284 0 : UNREACHABLE();
2285 : return;
2286 : }
2287 : AddressingMode addressing_mode;
2288 645 : InstructionOperand inputs[4];
2289 : size_t input_count = 0;
2290 129 : inputs[input_count++] = g.UseUniqueRegister(value);
2291 129 : inputs[input_count++] = g.UseUniqueRegister(base);
2292 129 : if (g.CanBeImmediate(index)) {
2293 0 : inputs[input_count++] = g.UseImmediate(index);
2294 : addressing_mode = kMode_MRI;
2295 : } else {
2296 129 : inputs[input_count++] = g.UseUniqueRegister(index);
2297 : addressing_mode = kMode_MR1;
2298 : }
2299 129 : InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2300 129 : Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs);
2301 129 : }
2302 :
2303 516 : void InstructionSelector::VisitAtomicExchange(Node* node) {
2304 : X64OperandGenerator g(this);
2305 : Node* base = node->InputAt(0);
2306 : Node* index = node->InputAt(1);
2307 : Node* value = node->InputAt(2);
2308 :
2309 258 : MachineType type = AtomicOpRepresentationOf(node->op());
2310 : ArchOpcode opcode = kArchNop;
2311 258 : if (type == MachineType::Int8()) {
2312 : opcode = kAtomicExchangeInt8;
2313 215 : } else if (type == MachineType::Uint8()) {
2314 : opcode = kAtomicExchangeUint8;
2315 172 : } else if (type == MachineType::Int16()) {
2316 : opcode = kAtomicExchangeInt16;
2317 129 : } else if (type == MachineType::Uint16()) {
2318 : opcode = kAtomicExchangeUint16;
2319 129 : } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2320 : opcode = kAtomicExchangeWord32;
2321 : } else {
2322 0 : UNREACHABLE();
2323 : return;
2324 : }
2325 516 : InstructionOperand outputs[1];
2326 : AddressingMode addressing_mode;
2327 774 : InstructionOperand inputs[3];
2328 : size_t input_count = 0;
2329 258 : inputs[input_count++] = g.UseUniqueRegister(value);
2330 258 : inputs[input_count++] = g.UseUniqueRegister(base);
2331 258 : if (g.CanBeImmediate(index)) {
2332 0 : inputs[input_count++] = g.UseImmediate(index);
2333 : addressing_mode = kMode_MRI;
2334 : } else {
2335 258 : inputs[input_count++] = g.UseUniqueRegister(index);
2336 : addressing_mode = kMode_MR1;
2337 : }
2338 258 : outputs[0] = g.DefineSameAsFirst(node);
2339 258 : InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2340 258 : Emit(code, 1, outputs, input_count, inputs);
2341 258 : }
2342 :
2343 516 : void InstructionSelector::VisitAtomicCompareExchange(Node* node) {
2344 : X64OperandGenerator g(this);
2345 : Node* base = node->InputAt(0);
2346 : Node* index = node->InputAt(1);
2347 : Node* old_value = node->InputAt(2);
2348 : Node* new_value = node->InputAt(3);
2349 :
2350 258 : MachineType type = AtomicOpRepresentationOf(node->op());
2351 : ArchOpcode opcode = kArchNop;
2352 258 : if (type == MachineType::Int8()) {
2353 : opcode = kAtomicCompareExchangeInt8;
2354 215 : } else if (type == MachineType::Uint8()) {
2355 : opcode = kAtomicCompareExchangeUint8;
2356 172 : } else if (type == MachineType::Int16()) {
2357 : opcode = kAtomicCompareExchangeInt16;
2358 129 : } else if (type == MachineType::Uint16()) {
2359 : opcode = kAtomicCompareExchangeUint16;
2360 129 : } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2361 : opcode = kAtomicCompareExchangeWord32;
2362 : } else {
2363 0 : UNREACHABLE();
2364 : return;
2365 : }
2366 516 : InstructionOperand outputs[1];
2367 : AddressingMode addressing_mode;
2368 1032 : InstructionOperand inputs[4];
2369 : size_t input_count = 0;
2370 258 : inputs[input_count++] = g.UseFixed(old_value, rax);
2371 258 : inputs[input_count++] = g.UseUniqueRegister(new_value);
2372 258 : inputs[input_count++] = g.UseUniqueRegister(base);
2373 258 : if (g.CanBeImmediate(index)) {
2374 0 : inputs[input_count++] = g.UseImmediate(index);
2375 : addressing_mode = kMode_MRI;
2376 : } else {
2377 258 : inputs[input_count++] = g.UseUniqueRegister(index);
2378 : addressing_mode = kMode_MR1;
2379 : }
2380 258 : outputs[0] = g.DefineAsFixed(node, rax);
2381 258 : InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2382 258 : Emit(code, 1, outputs, input_count, inputs);
2383 258 : }
2384 :
2385 1290 : void InstructionSelector::VisitAtomicBinaryOperation(
2386 1290 : Node* node, ArchOpcode int8_op, ArchOpcode uint8_op, ArchOpcode int16_op,
2387 : ArchOpcode uint16_op, ArchOpcode word32_op) {
2388 : X64OperandGenerator g(this);
2389 : Node* base = node->InputAt(0);
2390 : Node* index = node->InputAt(1);
2391 : Node* value = node->InputAt(2);
2392 :
2393 1290 : MachineType type = AtomicOpRepresentationOf(node->op());
2394 : ArchOpcode opcode = kArchNop;
2395 1290 : if (type == MachineType::Int8()) {
2396 : opcode = int8_op;
2397 1075 : } else if (type == MachineType::Uint8()) {
2398 : opcode = uint8_op;
2399 860 : } else if (type == MachineType::Int16()) {
2400 : opcode = int16_op;
2401 645 : } else if (type == MachineType::Uint16()) {
2402 : opcode = uint16_op;
2403 645 : } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2404 : opcode = word32_op;
2405 : } else {
2406 0 : UNREACHABLE();
2407 : return;
2408 : }
2409 2580 : InstructionOperand outputs[1];
2410 : AddressingMode addressing_mode;
2411 3870 : InstructionOperand inputs[3];
2412 : size_t input_count = 0;
2413 1290 : inputs[input_count++] = g.UseUniqueRegister(value);
2414 1290 : inputs[input_count++] = g.UseUniqueRegister(base);
2415 1290 : if (g.CanBeImmediate(index)) {
2416 0 : inputs[input_count++] = g.UseImmediate(index);
2417 : addressing_mode = kMode_MRI;
2418 : } else {
2419 1290 : inputs[input_count++] = g.UseUniqueRegister(index);
2420 : addressing_mode = kMode_MR1;
2421 : }
2422 1290 : outputs[0] = g.DefineAsFixed(node, rax);
2423 2580 : InstructionOperand temp[1];
2424 1290 : temp[0] = g.TempRegister();
2425 1290 : InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2426 1290 : Emit(code, 1, outputs, input_count, inputs, 1, temp);
2427 1290 : }
2428 :
2429 : #define VISIT_ATOMIC_BINOP(op) \
2430 : void InstructionSelector::VisitAtomic##op(Node* node) { \
2431 : VisitAtomicBinaryOperation(node, kAtomic##op##Int8, kAtomic##op##Uint8, \
2432 : kAtomic##op##Int16, kAtomic##op##Uint16, \
2433 : kAtomic##op##Word32); \
2434 : }
2435 258 : VISIT_ATOMIC_BINOP(Add)
2436 258 : VISIT_ATOMIC_BINOP(Sub)
2437 258 : VISIT_ATOMIC_BINOP(And)
2438 258 : VISIT_ATOMIC_BINOP(Or)
2439 258 : VISIT_ATOMIC_BINOP(Xor)
2440 : #undef VISIT_ATOMIC_BINOP
2441 :
2442 : #define SIMD_TYPES(V) \
2443 : V(I32x4) \
2444 : V(I16x8) \
2445 : V(I8x16)
2446 :
2447 : #define SIMD_FORMAT_LIST(V) \
2448 : V(32x4) \
2449 : V(16x8) \
2450 : V(8x16)
2451 :
2452 : #define SIMD_ZERO_OP_LIST(V) \
2453 : V(S128Zero) \
2454 : V(S1x4Zero) \
2455 : V(S1x8Zero) \
2456 : V(S1x16Zero)
2457 :
2458 : #define SIMD_BINOP_LIST(V) \
2459 : V(I32x4Add) \
2460 : V(I32x4Sub) \
2461 : V(I32x4Mul) \
2462 : V(I32x4MinS) \
2463 : V(I32x4MaxS) \
2464 : V(I32x4Eq) \
2465 : V(I32x4Ne) \
2466 : V(I32x4MinU) \
2467 : V(I32x4MaxU) \
2468 : V(I16x8Add) \
2469 : V(I16x8AddSaturateS) \
2470 : V(I16x8Sub) \
2471 : V(I16x8SubSaturateS) \
2472 : V(I16x8Mul) \
2473 : V(I16x8MinS) \
2474 : V(I16x8MaxS) \
2475 : V(I16x8Eq) \
2476 : V(I16x8Ne) \
2477 : V(I16x8AddSaturateU) \
2478 : V(I16x8SubSaturateU) \
2479 : V(I16x8MinU) \
2480 : V(I16x8MaxU) \
2481 : V(I8x16Add) \
2482 : V(I8x16AddSaturateS) \
2483 : V(I8x16Sub) \
2484 : V(I8x16SubSaturateS) \
2485 : V(I8x16MinS) \
2486 : V(I8x16MaxS) \
2487 : V(I8x16Eq) \
2488 : V(I8x16Ne) \
2489 : V(I8x16AddSaturateU) \
2490 : V(I8x16SubSaturateU) \
2491 : V(I8x16MinU) \
2492 : V(I8x16MaxU)
2493 :
2494 : #define SIMD_SHIFT_OPCODES(V) \
2495 : V(I32x4Shl) \
2496 : V(I32x4ShrS) \
2497 : V(I32x4ShrU) \
2498 : V(I16x8Shl) \
2499 : V(I16x8ShrS) \
2500 : V(I16x8ShrU)
2501 :
2502 : #define VISIT_SIMD_SPLAT(Type) \
2503 : void InstructionSelector::Visit##Type##Splat(Node* node) { \
2504 : X64OperandGenerator g(this); \
2505 : Emit(kX64##Type##Splat, g.DefineAsRegister(node), \
2506 : g.Use(node->InputAt(0))); \
2507 : }
2508 1414 : SIMD_TYPES(VISIT_SIMD_SPLAT)
2509 : #undef VISIT_SIMD_SPLAT
2510 :
2511 : #define VISIT_SIMD_EXTRACT_LANE(Type) \
2512 : void InstructionSelector::Visit##Type##ExtractLane(Node* node) { \
2513 : X64OperandGenerator g(this); \
2514 : int32_t lane = OpParameter<int32_t>(node); \
2515 : Emit(kX64##Type##ExtractLane, g.DefineAsRegister(node), \
2516 : g.UseRegister(node->InputAt(0)), g.UseImmediate(lane)); \
2517 : }
2518 15456 : SIMD_TYPES(VISIT_SIMD_EXTRACT_LANE)
2519 : #undef VISIT_SIMD_EXTRACT_LANE
2520 :
2521 : #define VISIT_SIMD_REPLACE_LANE(Type) \
2522 : void InstructionSelector::Visit##Type##ReplaceLane(Node* node) { \
2523 : X64OperandGenerator g(this); \
2524 : int32_t lane = OpParameter<int32_t>(node); \
2525 : Emit(kX64##Type##ReplaceLane, g.DefineSameAsFirst(node), \
2526 : g.UseRegister(node->InputAt(0)), g.UseImmediate(lane), \
2527 : g.Use(node->InputAt(1))); \
2528 : }
2529 952 : SIMD_TYPES(VISIT_SIMD_REPLACE_LANE)
2530 : #undef VISIT_SIMD_REPLACE_LANE
2531 :
2532 : #define SIMD_VISIT_ZERO_OP(Name) \
2533 : void InstructionSelector::Visit##Name(Node* node) { \
2534 : X64OperandGenerator g(this); \
2535 : Emit(kX64S128Zero, g.DefineAsRegister(node), g.DefineAsRegister(node)); \
2536 : }
2537 0 : SIMD_ZERO_OP_LIST(SIMD_VISIT_ZERO_OP)
2538 : #undef SIMD_VISIT_ZERO_OP
2539 :
2540 : #define VISIT_SIMD_SHIFT(Opcode) \
2541 : void InstructionSelector::Visit##Opcode(Node* node) { \
2542 : X64OperandGenerator g(this); \
2543 : int32_t value = OpParameter<int32_t>(node); \
2544 : Emit(kX64##Opcode, g.DefineSameAsFirst(node), \
2545 : g.UseRegister(node->InputAt(0)), g.UseImmediate(value)); \
2546 : }
2547 126 : SIMD_SHIFT_OPCODES(VISIT_SIMD_SHIFT)
2548 : #undef VISIT_SIMD_SHIFT
2549 :
2550 : #define VISIT_SIMD_BINOP(Opcode) \
2551 : void InstructionSelector::Visit##Opcode(Node* node) { \
2552 : X64OperandGenerator g(this); \
2553 : Emit(kX64##Opcode, g.DefineSameAsFirst(node), \
2554 : g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1))); \
2555 : }
2556 777 : SIMD_BINOP_LIST(VISIT_SIMD_BINOP)
2557 : #undef VISIT_SIMD_BINOP
2558 :
2559 : #define SIMD_VISIT_SELECT_OP(format) \
2560 : void InstructionSelector::VisitS##format##Select(Node* node) { \
2561 : X64OperandGenerator g(this); \
2562 : Emit(kX64S128Select, g.DefineSameAsFirst(node), \
2563 : g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)), \
2564 : g.UseRegister(node->InputAt(2))); \
2565 : }
2566 252 : SIMD_FORMAT_LIST(SIMD_VISIT_SELECT_OP)
2567 : #undef SIMD_VISIT_SELECT_OP
2568 :
2569 0 : void InstructionSelector::VisitInt32AbsWithOverflow(Node* node) {
2570 0 : UNREACHABLE();
2571 : }
2572 :
2573 0 : void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
2574 0 : UNREACHABLE();
2575 : }
2576 :
2577 : // static
2578 : MachineOperatorBuilder::Flags
2579 896536 : InstructionSelector::SupportedMachineOperatorFlags() {
2580 : MachineOperatorBuilder::Flags flags =
2581 : MachineOperatorBuilder::kWord32ShiftIsSafe |
2582 : MachineOperatorBuilder::kWord32Ctz | MachineOperatorBuilder::kWord64Ctz;
2583 896536 : if (CpuFeatures::IsSupported(POPCNT)) {
2584 : flags |= MachineOperatorBuilder::kWord32Popcnt |
2585 : MachineOperatorBuilder::kWord64Popcnt;
2586 : }
2587 896536 : if (CpuFeatures::IsSupported(SSE4_1)) {
2588 : flags |= MachineOperatorBuilder::kFloat32RoundDown |
2589 : MachineOperatorBuilder::kFloat64RoundDown |
2590 : MachineOperatorBuilder::kFloat32RoundUp |
2591 : MachineOperatorBuilder::kFloat64RoundUp |
2592 : MachineOperatorBuilder::kFloat32RoundTruncate |
2593 : MachineOperatorBuilder::kFloat64RoundTruncate |
2594 : MachineOperatorBuilder::kFloat32RoundTiesEven |
2595 : MachineOperatorBuilder::kFloat64RoundTiesEven;
2596 : }
2597 896536 : return flags;
2598 : }
2599 :
2600 : // static
2601 : MachineOperatorBuilder::AlignmentRequirements
2602 896632 : InstructionSelector::AlignmentRequirements() {
2603 : return MachineOperatorBuilder::AlignmentRequirements::
2604 896632 : FullUnalignedAccessSupport();
2605 : }
2606 :
2607 : } // namespace compiler
2608 : } // namespace internal
2609 : } // namespace v8
|