Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/assembler-inl.h"
6 : #include "src/base/utils/random-number-generator.h"
7 : #include "src/code-stub-assembler.h"
8 : #include "src/codegen.h"
9 : #include "src/compilation-info.h"
10 : #include "src/compiler/code-generator.h"
11 : #include "src/compiler/instruction.h"
12 : #include "src/compiler/linkage.h"
13 : #include "src/isolate.h"
14 : #include "src/objects-inl.h"
15 :
16 : #include "test/cctest/cctest.h"
17 : #include "test/cctest/compiler/code-assembler-tester.h"
18 : #include "test/cctest/compiler/function-tester.h"
19 :
20 : namespace v8 {
21 : namespace internal {
22 : namespace compiler {
23 :
24 : #define __ assembler.
25 :
26 : namespace {
27 :
28 4608 : int GetSlotSizeInBytes(MachineRepresentation rep) {
29 4608 : switch (rep) {
30 : case MachineRepresentation::kTagged:
31 : case MachineRepresentation::kFloat32:
32 : return kPointerSize;
33 : case MachineRepresentation::kFloat64:
34 : return kDoubleSize;
35 : case MachineRepresentation::kSimd128:
36 0 : return kSimd128Size;
37 : default:
38 : break;
39 : }
40 0 : UNREACHABLE();
41 : }
42 :
43 : // Forward declaration.
44 : Handle<Code> BuildTeardownFunction(Isolate* isolate, CallDescriptor* descriptor,
45 : std::vector<AllocatedOperand> parameters);
46 :
47 : // Build the `setup` function. It takes a code object and a FixedArray as
48 : // parameters and calls the former while passing it each element of the array as
49 : // arguments:
50 : // ~~~
51 : // FixedArray setup(CodeObject* test, FixedArray state_in) {
52 : // // `test` will tail-call to its first parameter which will be `teardown`.
53 : // return test(teardown, state_in[0], state_in[1], state_in[2], ...);
54 : // }
55 : // ~~~
56 : //
57 : // This function needs to convert each element of the FixedArray to raw unboxed
58 : // values to pass to the `test` function. The array will have been created using
59 : // `GenerateInitialState()` and needs to be converted in the following way:
60 : //
61 : // | Parameter type | FixedArray element | Conversion |
62 : // |----------------+--------------------+------------------------------------|
63 : // | kTagged | Smi | None. |
64 : // | kFloat32 | HeapNumber | Load value and convert to Float32. |
65 : // | kFloat64 | HeapNumber | Load value. |
66 : //
67 18 : Handle<Code> BuildSetupFunction(Isolate* isolate, CallDescriptor* descriptor,
68 7290 : std::vector<AllocatedOperand> parameters) {
69 18 : CodeAssemblerTester tester(isolate, 2);
70 18 : CodeStubAssembler assembler(tester.state());
71 : std::vector<Node*> params;
72 : // The first parameter is always the callee.
73 36 : params.push_back(__ Parameter(0));
74 : params.push_back(
75 72 : __ HeapConstant(BuildTeardownFunction(isolate, descriptor, parameters)));
76 18 : Node* state_in = __ Parameter(1);
77 7308 : for (int i = 0; i < static_cast<int>(parameters.size()); i++) {
78 7272 : Node* element = __ LoadFixedArrayElement(state_in, __ IntPtrConstant(i));
79 : // Unbox all elements before passing them as arguments.
80 7272 : switch (parameters[i].representation()) {
81 : // Tagged parameters are Smis, they do not need unboxing.
82 : case MachineRepresentation::kTagged:
83 : break;
84 : case MachineRepresentation::kFloat32:
85 3618 : element = __ TruncateFloat64ToFloat32(__ LoadHeapNumberValue(element));
86 1206 : break;
87 : case MachineRepresentation::kFloat64:
88 2412 : element = __ LoadHeapNumberValue(element);
89 1206 : break;
90 : default:
91 0 : UNREACHABLE();
92 : break;
93 : }
94 3636 : params.push_back(element);
95 : }
96 : __ Return(tester.raw_assembler_for_testing()->AddNode(
97 : tester.raw_assembler_for_testing()->common()->Call(descriptor),
98 90 : static_cast<int>(params.size()), params.data()));
99 36 : return tester.GenerateCodeCloseAndEscape();
100 : }
101 :
102 : // Build the `teardown` function. It allocates and fills a FixedArray with all
103 : // its parameters. The parameters need to be consistent with `parameters`.
104 : // ~~~
105 : // FixedArray teardown(CodeObject* /* unused */,
106 : // // Tagged registers.
107 : // Object* r0, Object* r1, ...,
108 : // // FP registers.
109 : // Float32 s0, Float64 d1, ...,
110 : // // Mixed stack slots.
111 : // Float64 mem0, Object* mem1, Float32 mem2, ...) {
112 : // return new FixedArray(r0, r1, ..., s0, d1, ..., mem0, mem1, mem2, ...);
113 : // }
114 : // ~~~
115 : //
116 : // This function needs to convert its parameters into values fit for a
117 : // FixedArray, essentially reverting what the `setup` function did:
118 : //
119 : // | Parameter type | Parameter value | Conversion |
120 : // |----------------+-------------------+----------------------------|
121 : // | kTagged | Smi or HeapNumber | None. |
122 : // | kFloat32 | Raw Float32 | Convert to Float64 and |
123 : // | | | allocate a new HeapNumber. |
124 : // | kFloat64 | Raw Float64 | Allocate a new HeapNumber. |
125 : //
126 : // Note that it is possible for a `kTagged` value to go from a Smi to a
127 : // HeapNumber. This is because `AssembleMove` will allocate a new HeapNumber if
128 : // it is asked to move a FP constant to a tagged register or slot.
129 : //
130 18 : Handle<Code> BuildTeardownFunction(Isolate* isolate, CallDescriptor* descriptor,
131 7308 : std::vector<AllocatedOperand> parameters) {
132 18 : CodeAssemblerTester tester(isolate, descriptor);
133 18 : CodeStubAssembler assembler(tester.state());
134 : Node* result_array = __ AllocateFixedArray(
135 36 : PACKED_ELEMENTS, __ IntPtrConstant(parameters.size()));
136 3672 : for (int i = 0; i < static_cast<int>(parameters.size()); i++) {
137 : // The first argument is not used.
138 3636 : Node* param = __ Parameter(i + 1);
139 7272 : switch (parameters[i].representation()) {
140 : case MachineRepresentation::kTagged:
141 : break;
142 : // Box FP values into HeapNumbers.
143 : case MachineRepresentation::kFloat32:
144 : param =
145 1206 : tester.raw_assembler_for_testing()->ChangeFloat32ToFloat64(param);
146 : // Fallthrough
147 : case MachineRepresentation::kFloat64:
148 4824 : param = __ AllocateHeapNumberWithValue(param);
149 2412 : break;
150 : default:
151 0 : UNREACHABLE();
152 : break;
153 : }
154 3636 : __ StoreFixedArrayElement(result_array, i, param);
155 : }
156 18 : __ Return(result_array);
157 36 : return tester.GenerateCodeCloseAndEscape();
158 : }
159 :
160 : // Print the content of `value`, representing the register or stack slot
161 : // described by `operand`.
162 0 : void PrintStateValue(std::ostream& os, Handle<Object> value,
163 : AllocatedOperand operand) {
164 0 : switch (operand.representation()) {
165 : case MachineRepresentation::kTagged:
166 0 : if (value->IsSmi()) {
167 0 : os << Smi::cast(*value)->value();
168 : } else {
169 : os << value->Number();
170 : }
171 : break;
172 : case MachineRepresentation::kFloat32:
173 : case MachineRepresentation::kFloat64:
174 : os << value->Number();
175 : break;
176 : default:
177 0 : UNREACHABLE();
178 : break;
179 : }
180 0 : os << " (" << operand.representation() << " ";
181 0 : if (operand.location_kind() == AllocatedOperand::REGISTER) {
182 0 : os << "register";
183 : } else {
184 : DCHECK_EQ(operand.location_kind(), AllocatedOperand::STACK_SLOT);
185 0 : os << "stack slot";
186 : }
187 0 : os << ")";
188 0 : }
189 :
190 : } // namespace
191 :
192 : #undef __
193 :
194 : // Representation of a test environment. It describes a set of registers, stack
195 : // slots and constants available to the CodeGeneratorTester to perform moves
196 : // with. It has the ability to randomly generate lists of moves and run the code
197 : // generated by the CodeGeneratorTester.
198 : //
199 : // At the moment, only the following representations are tested:
200 : // - kTagged
201 : // - kFloat32
202 : // - kFloat64
203 : // - TODO(planglois): Add support for kSimd128.
204 : // There is no need to test using Word32 or Word64 as they are the same as
205 : // Tagged as far as the code generator is concerned.
206 : //
207 : // Testing the generated code is achieved by wrapping it around `setup` and
208 : // `teardown` functions, written using the CodeStubAssembler. The key idea here
209 : // is that `teardown` and the generated code share the same custom
210 : // CallDescriptor. This descriptor assigns parameters to either registers or
211 : // stack slot of a given representation and therefore essentially describes the
212 : // environment.
213 : //
214 : // What happens is the following:
215 : //
216 : // - The `setup` function receives a FixedArray as the initial state. It
217 : // unpacks it and passes each element as arguments to the generated code
218 : // `test`. We also pass the `teardown` function as a first argument. Thanks
219 : // to the custom CallDescriptor, registers and stack slots get initialised
220 : // according to the content of the FixedArray.
221 : //
222 : // - The `test` function performs the list of moves on its parameters and
223 : // eventually tail-calls to its first parameter, which is the `teardown`
224 : // function.
225 : //
226 : // - The `teardown` function allocates a new FixedArray and fills it with all
227 : // its parameters. Thanks to the tail-call, this is as if the `setup`
228 : // function called `teardown` directly, except now moves were performed!
229 : //
230 : // .----------------setup--------------------------.
231 : // | Take a FixedArray as parameters with |
232 : // | all the initial values of registers |
233 : // | and stack slots. | <- CodeStubAssembler
234 : // | |
235 : // | Call test(teardown, state[0], state[1], ...); |
236 : // '-----------------------------------------------'
237 : // |
238 : // V
239 : // .----------------test-----------------------------.
240 : // | - Move(param3, param42); |
241 : // | - Swap(param64, param1); |
242 : // | - Move(param2, param6); | <- CodeGeneratorTester
243 : // | ... |
244 : // | |
245 : // | // "teardown" is the first parameter as well as |
246 : // | // the callee. |
247 : // | TailCall param0(param0, param1, param2, ...); |
248 : // '-------------------------------------------------'
249 : // |
250 : // V
251 : // .----------------teardown--------------.
252 : // | Create a FixedArray with all |
253 : // | parameters and return it. | <- CodeStubAssembler
254 : // '--------------------------------------'
255 :
256 48 : class TestEnvironment : public HandleAndZoneScope {
257 : public:
258 : // These constants may be tuned to experiment with different environments.
259 :
260 : static const int kGeneralRegisterCount = 4;
261 : static const int kDoubleRegisterCount = 6;
262 :
263 : static const int kTaggedSlotCount = 64;
264 : static const int kFloat32SlotCount = 64;
265 : static const int kFloat64SlotCount = 64;
266 : static const int kStackParameterCount =
267 : kTaggedSlotCount + kFloat32SlotCount + kFloat64SlotCount;
268 :
269 : // TODO(all): Test all types of constants (e.g. ExternalReference and
270 : // HeapObject).
271 : static const int kSmiConstantCount = 4;
272 : static const int kFloatConstantCount = 4;
273 : static const int kDoubleConstantCount = 4;
274 :
275 24 : TestEnvironment()
276 : : blocks_(main_zone()),
277 : code_(main_isolate(), main_zone(), &blocks_),
278 24 : rng_(CcTest::random_number_generator()),
279 : // TODO(planglois): Support kSimd128.
280 : supported_reps_({MachineRepresentation::kTagged,
281 : MachineRepresentation::kFloat32,
282 96 : MachineRepresentation::kFloat64}) {
283 : // The "teardown" and "test" functions share the same descriptor with the
284 : // following signature:
285 : // ~~~
286 : // FixedArray f(CodeObject* teardown,
287 : // // Tagged registers.
288 : // Object*, Object*, ...,
289 : // // FP registers.
290 : // Float32, Float64, ...,
291 : // // Mixed stack slots.
292 : // Float64, Object*, Float32, ...);
293 : // ~~~
294 : LocationSignature::Builder test_signature(main_zone(), 1,
295 : 1 + kGeneralRegisterCount +
296 : kDoubleRegisterCount +
297 : kStackParameterCount);
298 :
299 : // The first parameter will be the code object of the "teardown"
300 : // function. This way, the "test" function can tail-call to it.
301 : test_signature.AddParam(LinkageLocation::ForRegister(
302 : kReturnRegister0.code(), MachineType::AnyTagged()));
303 :
304 : // Initialise registers.
305 :
306 : int32_t general_mask =
307 24 : RegisterConfiguration::Default()->allocatable_general_codes_mask();
308 : // kReturnRegister0 is used to hold the "teardown" code object, do not
309 : // generate moves using it.
310 : std::unique_ptr<const RegisterConfiguration> registers(
311 : RegisterConfiguration::RestrictGeneralRegisters(
312 24 : general_mask & ~(1 << kReturnRegister0.code())));
313 :
314 120 : for (int i = 0; i < kGeneralRegisterCount; i++) {
315 96 : int code = registers->GetAllocatableGeneralCode(i);
316 96 : AddRegister(&test_signature, MachineRepresentation::kTagged, code);
317 : }
318 : // We assume that Double and Float registers alias, depending on
319 : // kSimpleFPAliasing. For this reason, we allocate a Float and a Double in
320 : // pairs.
321 : static_assert((kDoubleRegisterCount % 2) == 0,
322 : "kDoubleRegisterCount should be a multiple of two.");
323 72 : for (int i = 0; i < kDoubleRegisterCount; i += 2) {
324 : // Make sure we do not allocate FP registers which alias. We double the
325 : // index for Float registers if the aliasing is not "Simple":
326 : // Simple -> s0, d1, s2, d3, s4, d5, ...
327 : // Arm32-style -> s0, d1, s4, d3, s8, d5, ...
328 : // This isn't space-efficient at all but suits our need.
329 : static_assert(kDoubleRegisterCount < 16,
330 : "Arm has a d16 register but no overlapping s32 register.");
331 : int float_code =
332 : registers->GetAllocatableFloatCode(kSimpleFPAliasing ? i : i * 2);
333 72 : int double_code = registers->GetAllocatableDoubleCode(i + 1);
334 72 : AddRegister(&test_signature, MachineRepresentation::kFloat32, float_code);
335 : AddRegister(&test_signature, MachineRepresentation::kFloat64,
336 72 : double_code);
337 : }
338 :
339 : // Initialise stack slots.
340 :
341 : // Stack parameters start at -1.
342 : int slot_parameter_n = -1;
343 :
344 : // TODO(planglois): Support kSimd128 stack slots.
345 : std::map<MachineRepresentation, int> slots = {
346 : {MachineRepresentation::kTagged, kTaggedSlotCount},
347 : {MachineRepresentation::kFloat32, kFloat32SlotCount},
348 24 : {MachineRepresentation::kFloat64, kFloat64SlotCount}};
349 :
350 : // Allocate new slots until we run out of them.
351 5112 : while (std::any_of(slots.cbegin(), slots.cend(),
352 : [](const std::pair<MachineRepresentation, int>& entry) {
353 : // True if there are slots left to allocate for this
354 : // representation.
355 : return entry.second > 0;
356 : })) {
357 : // Pick a random MachineRepresentation from supported_reps_.
358 5064 : MachineRepresentation rep = CreateRandomMachineRepresentation();
359 : auto entry = slots.find(rep);
360 : DCHECK(entry != slots.end());
361 : // We may have picked a representation for which all slots have already
362 : // been allocated.
363 5064 : if (entry->second > 0) {
364 : // Keep a map of (MachineRepresentation . std::vector<int>) with
365 : // allocated slots to pick from for each representation.
366 : int slot = slot_parameter_n;
367 4608 : slot_parameter_n -= (GetSlotSizeInBytes(rep) / kPointerSize);
368 4608 : AddStackSlot(&test_signature, rep, slot);
369 4608 : entry->second--;
370 : }
371 : }
372 :
373 : // Initialise random constants.
374 :
375 : // While constants do not know about Smis, we need to be able to
376 : // differentiate between a pointer to a HeapNumber and a integer. For this
377 : // reason, we make sure all integers are Smis, including constants.
378 96 : for (int i = 0; i < kSmiConstantCount; i++) {
379 : intptr_t smi_value = reinterpret_cast<intptr_t>(
380 192 : Smi::FromInt(rng_->NextInt(Smi::kMaxValue)));
381 : Constant constant = kPointerSize == 8
382 : ? Constant(static_cast<int64_t>(smi_value))
383 : : Constant(static_cast<int32_t>(smi_value));
384 96 : AddConstant(MachineRepresentation::kTagged, AllocateConstant(constant));
385 : }
386 : // Float and Double constants can be moved to both Tagged and FP registers
387 : // or slots. Register them as compatible with both FP and Tagged
388 : // destinations.
389 96 : for (int i = 0; i < kFloatConstantCount; i++) {
390 : int virtual_register =
391 192 : AllocateConstant(Constant(DoubleToFloat32(rng_->NextDouble())));
392 96 : AddConstant(MachineRepresentation::kTagged, virtual_register);
393 96 : AddConstant(MachineRepresentation::kFloat32, virtual_register);
394 : }
395 96 : for (int i = 0; i < kDoubleConstantCount; i++) {
396 192 : int virtual_register = AllocateConstant(Constant(rng_->NextDouble()));
397 96 : AddConstant(MachineRepresentation::kTagged, virtual_register);
398 96 : AddConstant(MachineRepresentation::kFloat64, virtual_register);
399 : }
400 :
401 : // The "teardown" function returns a FixedArray with the resulting state.
402 : test_signature.AddReturn(LinkageLocation::ForRegister(
403 24 : kReturnRegister0.code(), MachineType::AnyTagged()));
404 :
405 : test_descriptor_ = new (main_zone())
406 : CallDescriptor(CallDescriptor::kCallCodeObject, // kind
407 : MachineType::AnyTagged(), // target MachineType
408 : LinkageLocation::ForAnyRegister(
409 : MachineType::AnyTagged()), // target location
410 : test_signature.Build(), // location_sig
411 : kStackParameterCount, // stack_parameter_count
412 : Operator::kNoProperties, // properties
413 : kNoCalleeSaved, // callee-saved registers
414 : kNoCalleeSaved, // callee-saved fp
415 48 : CallDescriptor::kNoFlags); // flags
416 24 : }
417 :
418 288 : int AllocateConstant(Constant constant) {
419 288 : int virtual_register = code_.NextVirtualRegister();
420 : code_.AddConstant(virtual_register, constant);
421 288 : return virtual_register;
422 : }
423 :
424 : // Register a constant referenced by `virtual_register` as compatible with
425 : // `rep`.
426 480 : void AddConstant(MachineRepresentation rep, int virtual_register) {
427 : auto entry = allocated_constants_.find(rep);
428 480 : if (entry == allocated_constants_.end()) {
429 : allocated_constants_.emplace(
430 144 : rep, std::vector<ConstantOperand>{ConstantOperand(virtual_register)});
431 : } else {
432 408 : entry->second.emplace_back(virtual_register);
433 : }
434 480 : }
435 :
436 : // Register a new register or stack slot as compatible with `rep`. As opposed
437 : // to constants, registers and stack slots are written to on `setup` and read
438 : // from on `teardown`. Therefore they are part of the environment's layout,
439 : // and are parameters of the `test` function.
440 :
441 240 : void AddRegister(LocationSignature::Builder* test_signature,
442 : MachineRepresentation rep, int code) {
443 240 : AllocatedOperand operand(AllocatedOperand::REGISTER, rep, code);
444 240 : layout_.push_back(operand);
445 : test_signature->AddParam(LinkageLocation::ForRegister(
446 240 : code, MachineType::TypeForRepresentation(rep)));
447 : auto entry = allocated_registers_.find(rep);
448 240 : if (entry == allocated_registers_.end()) {
449 144 : allocated_registers_.emplace(rep, std::vector<AllocatedOperand>{operand});
450 : } else {
451 168 : entry->second.push_back(operand);
452 : }
453 240 : }
454 :
455 4608 : void AddStackSlot(LocationSignature::Builder* test_signature,
456 : MachineRepresentation rep, int slot) {
457 4608 : AllocatedOperand operand(AllocatedOperand::STACK_SLOT, rep, slot);
458 4608 : layout_.push_back(operand);
459 : test_signature->AddParam(LinkageLocation::ForCallerFrameSlot(
460 4608 : slot, MachineType::TypeForRepresentation(rep)));
461 : auto entry = allocated_slots_.find(rep);
462 4608 : if (entry == allocated_slots_.end()) {
463 144 : allocated_slots_.emplace(rep, std::vector<AllocatedOperand>{operand});
464 : } else {
465 4536 : entry->second.push_back(operand);
466 : }
467 4608 : }
468 :
469 : // Generate a random initial state to test moves against. A "state" is a
470 : // packed FixedArray with Smis and HeapNumbers, according to the layout of the
471 : // environment.
472 18 : Handle<FixedArray> GenerateInitialState() {
473 : Handle<FixedArray> state = main_isolate()->factory()->NewFixedArray(
474 6084 : static_cast<int>(layout_.size()));
475 7308 : for (int i = 0; i < state->length(); i++) {
476 7272 : switch (layout_[i].representation()) {
477 : case MachineRepresentation::kTagged:
478 1224 : state->set(i, Smi::FromInt(rng_->NextInt(Smi::kMaxValue)));
479 : break;
480 : case MachineRepresentation::kFloat32:
481 : // HeapNumbers are Float64 values. However, we will convert it to a
482 : // Float32 and back inside `setup` and `teardown`. Make sure the value
483 : // we pick fits in a Float32.
484 : state->set(
485 : i, *main_isolate()->factory()->NewHeapNumber(
486 3618 : static_cast<double>(DoubleToFloat32(rng_->NextDouble()))));
487 1206 : break;
488 : case MachineRepresentation::kFloat64:
489 : state->set(
490 2412 : i, *main_isolate()->factory()->NewHeapNumber(rng_->NextDouble()));
491 1206 : break;
492 : default:
493 0 : UNREACHABLE();
494 : break;
495 : }
496 : }
497 18 : return state;
498 : }
499 :
500 : // Run the code generated by a CodeGeneratorTester against `state_in` and
501 : // return a new resulting state.
502 18 : Handle<FixedArray> Run(Handle<Code> test, Handle<FixedArray> state_in) {
503 : Handle<FixedArray> state_out = main_isolate()->factory()->NewFixedArray(
504 54 : static_cast<int>(layout_.size()));
505 : {
506 : Handle<Code> setup =
507 54 : BuildSetupFunction(main_isolate(), test_descriptor_, layout_);
508 : // FunctionTester maintains its own HandleScope which means that its
509 : // return value will be freed along with it. Copy the result into
510 : // state_out.
511 18 : FunctionTester ft(setup, 2);
512 18 : Handle<FixedArray> result = ft.CallChecked<FixedArray>(test, state_in);
513 18 : CHECK_EQ(result->length(), state_in->length());
514 18 : result->CopyTo(0, *state_out, 0, result->length());
515 : }
516 18 : return state_out;
517 : }
518 :
519 : // For a given operand representing either a register or a stack slot, return
520 : // what position it should live in inside a FixedArray state.
521 32910 : int OperandToStatePosition(const AllocatedOperand& operand) const {
522 : // Search `layout_` for `operand`.
523 : auto it = std::find_if(layout_.cbegin(), layout_.cend(),
524 : [operand](const AllocatedOperand& this_operand) {
525 : return this_operand.Equals(operand);
526 65820 : });
527 : DCHECK_NE(it, layout_.cend());
528 32910 : return static_cast<int>(std::distance(layout_.cbegin(), it));
529 : }
530 :
531 : // Perform the given list of moves on `state_in` and return a newly allocated
532 : // state with the results.
533 3198 : Handle<FixedArray> SimulateMoves(ParallelMove* moves,
534 : Handle<FixedArray> state_in) {
535 : Handle<FixedArray> state_out = main_isolate()->factory()->NewFixedArray(
536 15588 : static_cast<int>(layout_.size()));
537 : // We do not want to modify `state_in` in place so perform the moves on a
538 : // copy.
539 3198 : state_in->CopyTo(0, *state_out, 0, state_in->length());
540 15588 : for (auto move : *moves) {
541 : int to_index =
542 9192 : OperandToStatePosition(AllocatedOperand::cast(move->destination()));
543 9192 : InstructionOperand from = move->source();
544 9192 : if (from.IsConstant()) {
545 : Constant constant =
546 : code_.GetConstant(ConstantOperand::cast(from).virtual_register());
547 : Handle<Object> constant_value;
548 3090 : switch (constant.type()) {
549 : case Constant::kInt32:
550 : constant_value =
551 : Handle<Smi>(reinterpret_cast<Smi*>(
552 : static_cast<intptr_t>(constant.ToInt32())),
553 0 : main_isolate());
554 0 : break;
555 : case Constant::kInt64:
556 : constant_value =
557 : Handle<Smi>(reinterpret_cast<Smi*>(
558 : static_cast<intptr_t>(constant.ToInt64())),
559 312 : main_isolate());
560 312 : break;
561 : case Constant::kFloat32:
562 : constant_value = main_isolate()->factory()->NewHeapNumber(
563 1338 : static_cast<double>(constant.ToFloat32()));
564 1338 : break;
565 : case Constant::kFloat64:
566 : constant_value = main_isolate()->factory()->NewHeapNumber(
567 : constant.ToFloat64().value());
568 1440 : break;
569 : default:
570 0 : UNREACHABLE();
571 : break;
572 : }
573 3090 : state_out->set(to_index, *constant_value);
574 : } else {
575 6102 : int from_index = OperandToStatePosition(AllocatedOperand::cast(from));
576 : state_out->set(to_index, *state_out->GetValueChecked<Object>(
577 12204 : main_isolate(), from_index));
578 : }
579 : }
580 3198 : return state_out;
581 : }
582 :
583 : // Perform the given list of swaps on `state_in` and return a newly allocated
584 : // state with the results.
585 2814 : Handle<FixedArray> SimulateSwaps(ParallelMove* swaps,
586 : Handle<FixedArray> state_in) {
587 : Handle<FixedArray> state_out = main_isolate()->factory()->NewFixedArray(
588 23244 : static_cast<int>(layout_.size()));
589 : // We do not want to modify `state_in` in place so perform the swaps on a
590 : // copy.
591 2814 : state_in->CopyTo(0, *state_out, 0, state_in->length());
592 14436 : for (auto swap : *swaps) {
593 : int lhs_index =
594 8808 : OperandToStatePosition(AllocatedOperand::cast(swap->destination()));
595 : int rhs_index =
596 8808 : OperandToStatePosition(AllocatedOperand::cast(swap->source()));
597 : Handle<Object> lhs =
598 8808 : state_out->GetValueChecked<Object>(main_isolate(), lhs_index);
599 : Handle<Object> rhs =
600 8808 : state_out->GetValueChecked<Object>(main_isolate(), rhs_index);
601 8808 : state_out->set(lhs_index, *rhs);
602 8808 : state_out->set(rhs_index, *lhs);
603 : }
604 2814 : return state_out;
605 : }
606 :
607 : // Compare the given state with a reference.
608 18 : void CheckState(Handle<FixedArray> actual, Handle<FixedArray> expected) {
609 7308 : for (int i = 0; i < static_cast<int>(layout_.size()); i++) {
610 : Handle<Object> actual_value =
611 7272 : actual->GetValueChecked<Object>(main_isolate(), i);
612 : Handle<Object> expected_value =
613 3636 : expected->GetValueChecked<Object>(main_isolate(), i);
614 3636 : if (!actual_value->StrictEquals(*expected_value)) {
615 0 : std::ostringstream expected_str;
616 3654 : PrintStateValue(expected_str, expected_value, layout_[i]);
617 0 : std::ostringstream actual_str;
618 0 : PrintStateValue(actual_str, actual_value, layout_[i]);
619 : V8_Fatal(__FILE__, __LINE__, "Expected: '%s' but got '%s'",
620 0 : expected_str.str().c_str(), actual_str.str().c_str());
621 : }
622 : }
623 18 : }
624 :
625 : enum OperandConstraint {
626 : kNone,
627 : // Restrict operands to non-constants. This is useful when generating a
628 : // destination.
629 : kCannotBeConstant
630 : };
631 :
632 : // Generate parallel moves at random. Note that they may not be compatible
633 : // between each other as this doesn't matter to the code generator.
634 3198 : ParallelMove* GenerateRandomMoves(int size) {
635 : ParallelMove* parallel_move = new (main_zone()) ParallelMove(main_zone());
636 :
637 16122 : for (int i = 0; i < size;) {
638 9726 : MachineRepresentation rep = CreateRandomMachineRepresentation();
639 : MoveOperands mo(CreateRandomOperand(kNone, rep),
640 19452 : CreateRandomOperand(kCannotBeConstant, rep));
641 : // It isn't valid to call `AssembleMove` and `AssembleSwap` with redundant
642 : // moves.
643 10260 : if (mo.IsRedundant()) continue;
644 : parallel_move->AddMove(mo.source(), mo.destination());
645 : // Iterate only when a move was created.
646 9192 : i++;
647 : }
648 :
649 3198 : return parallel_move;
650 : }
651 :
652 2814 : ParallelMove* GenerateRandomSwaps(int size) {
653 : ParallelMove* parallel_move = new (main_zone()) ParallelMove(main_zone());
654 :
655 15126 : for (int i = 0; i < size;) {
656 9498 : MachineRepresentation rep = CreateRandomMachineRepresentation();
657 9498 : InstructionOperand lhs = CreateRandomOperand(kCannotBeConstant, rep);
658 9498 : InstructionOperand rhs = CreateRandomOperand(kCannotBeConstant, rep);
659 : MoveOperands mo(lhs, rhs);
660 : // It isn't valid to call `AssembleMove` and `AssembleSwap` with redundant
661 : // moves.
662 10188 : if (mo.IsRedundant()) continue;
663 : // Canonicalize the swap: the register operand has to be the left hand
664 : // side.
665 15990 : if (lhs.IsStackSlot() || lhs.IsFPStackSlot()) {
666 : std::swap(lhs, rhs);
667 : }
668 : parallel_move->AddMove(lhs, rhs);
669 : // Iterate only when a swap was created.
670 8808 : i++;
671 : }
672 :
673 2814 : return parallel_move;
674 : }
675 :
676 24288 : MachineRepresentation CreateRandomMachineRepresentation() {
677 72864 : int index = rng_->NextInt(static_cast<int>(supported_reps_.size()));
678 48576 : return supported_reps_[index];
679 : }
680 :
681 38448 : InstructionOperand CreateRandomOperand(OperandConstraint constraint,
682 : MachineRepresentation rep) {
683 : // Only generate a Constant if the operand is a source and we have a
684 : // constant with a compatible representation in stock.
685 : bool generate_constant =
686 48174 : (constraint != kCannotBeConstant) &&
687 : (allocated_constants_.find(rep) != allocated_constants_.end());
688 38448 : switch (rng_->NextInt(generate_constant ? 3 : 2)) {
689 : case 0:
690 17814 : return CreateRandomStackSlotOperand(rep);
691 : case 1:
692 17544 : return CreateRandomRegisterOperand(rep);
693 : case 2:
694 3090 : return CreateRandomConstant(rep);
695 : }
696 0 : UNREACHABLE();
697 : }
698 :
699 17544 : AllocatedOperand CreateRandomRegisterOperand(MachineRepresentation rep) {
700 : int index =
701 35088 : rng_->NextInt(static_cast<int>(allocated_registers_[rep].size()));
702 35088 : return allocated_registers_[rep][index];
703 : }
704 :
705 17814 : AllocatedOperand CreateRandomStackSlotOperand(MachineRepresentation rep) {
706 35628 : int index = rng_->NextInt(static_cast<int>(allocated_slots_[rep].size()));
707 35628 : return allocated_slots_[rep][index];
708 : }
709 :
710 3090 : ConstantOperand CreateRandomConstant(MachineRepresentation rep) {
711 : int index =
712 6180 : rng_->NextInt(static_cast<int>(allocated_constants_[rep].size()));
713 6180 : return allocated_constants_[rep][index];
714 : }
715 :
716 : v8::base::RandomNumberGenerator* rng() const { return rng_; }
717 : InstructionSequence* code() { return &code_; }
718 : CallDescriptor* test_descriptor() { return test_descriptor_; }
719 :
720 : private:
721 : ZoneVector<InstructionBlock*> blocks_;
722 : InstructionSequence code_;
723 : v8::base::RandomNumberGenerator* rng_;
724 : // The layout describes the type of each element in the environment, in order.
725 : std::vector<AllocatedOperand> layout_;
726 : CallDescriptor* test_descriptor_;
727 : // Allocated constants, registers and stack slots that we can generate moves
728 : // with. Each per compatible representation.
729 : std::vector<MachineRepresentation> supported_reps_;
730 : std::map<MachineRepresentation, std::vector<ConstantOperand>>
731 : allocated_constants_;
732 : std::map<MachineRepresentation, std::vector<AllocatedOperand>>
733 : allocated_registers_;
734 : std::map<MachineRepresentation, std::vector<AllocatedOperand>>
735 : allocated_slots_;
736 : };
737 :
738 : // Wrapper around the CodeGenerator. Code generated by this can only be called
739 : // using the given `TestEnvironment`.
740 : //
741 : // TODO(planglois): We execute moves on stack parameters only which restricts
742 : // ourselves to small positive offsets relative to the frame pointer. We should
743 : // test large and negative offsets too. A way to do this would be to move some
744 : // stack parameters to local spill slots and create artificial stack space
745 : // between them.
746 36 : class CodeGeneratorTester {
747 : public:
748 108 : explicit CodeGeneratorTester(TestEnvironment* environment)
749 : : zone_(environment->main_zone()),
750 : info_(ArrayVector("test"), environment->main_isolate(),
751 : environment->main_zone(), Code::STUB),
752 : linkage_(environment->test_descriptor()),
753 : frame_(environment->test_descriptor()->CalculateFixedFrameSize()),
754 : generator_(environment->main_zone(), &frame_, &linkage_,
755 : environment->code(), &info_, base::Optional<OsrHelper>(),
756 180 : kNoSourcePosition, nullptr) {
757 : // Force a frame to be created.
758 36 : generator_.frame_access_state()->MarkHasFrame(true);
759 36 : generator_.AssembleConstructFrame();
760 : // TODO(all): Generate a stack check here so that we fail gracefully if the
761 : // frame is too big.
762 36 : }
763 :
764 : enum PushTypeFlag {
765 : kRegisterPush = CodeGenerator::kRegisterPush,
766 : kStackSlotPush = CodeGenerator::kStackSlotPush,
767 : kScalarPush = CodeGenerator::kScalarPush
768 : };
769 :
770 18 : void CheckAssembleTailCallGaps(Instruction* instr,
771 : int first_unused_stack_slot,
772 : CodeGeneratorTester::PushTypeFlag push_type) {
773 18 : generator_.AssembleTailCallBeforeGap(instr, first_unused_stack_slot);
774 : #if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_S390) || \
775 : defined(V8_TARGET_ARCH_PPC)
776 : // Only folding register pushes is supported on ARM.
777 : bool supported = ((push_type & CodeGenerator::kRegisterPush) == push_type);
778 : #elif defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_IA32) || \
779 : defined(V8_TARGET_ARCH_X87)
780 18 : bool supported = ((push_type & CodeGenerator::kScalarPush) == push_type);
781 : #else
782 : bool supported = false;
783 : #endif
784 18 : if (supported) {
785 : // Architectures supporting folding adjacent pushes should now have
786 : // resolved all moves.
787 108 : for (const auto& move :
788 18 : *instr->parallel_moves()[Instruction::FIRST_GAP_POSITION]) {
789 144 : CHECK(move->IsEliminated());
790 : }
791 : }
792 18 : generator_.AssembleGaps(instr);
793 18 : generator_.AssembleTailCallAfterGap(instr, first_unused_stack_slot);
794 18 : }
795 :
796 9192 : void CheckAssembleMove(InstructionOperand* source,
797 : InstructionOperand* destination) {
798 9192 : int start = generator_.tasm()->pc_offset();
799 9192 : generator_.AssembleMove(source, destination);
800 18384 : CHECK(generator_.tasm()->pc_offset() > start);
801 9192 : }
802 :
803 8808 : void CheckAssembleSwap(InstructionOperand* source,
804 : InstructionOperand* destination) {
805 8808 : int start = generator_.tasm()->pc_offset();
806 8808 : generator_.AssembleSwap(source, destination);
807 17616 : CHECK(generator_.tasm()->pc_offset() > start);
808 8808 : }
809 :
810 36 : Handle<Code> Finalize() {
811 : // The environment expects this code to tail-call to it's first parameter
812 : // placed in `kReturnRegister0`.
813 : generator_.AssembleArchInstruction(
814 36 : Instruction::New(zone_, kArchPrepareTailCall));
815 :
816 : InstructionOperand callee[] = {
817 : AllocatedOperand(LocationOperand::REGISTER,
818 : MachineRepresentation::kTagged,
819 : kReturnRegister0.code()),
820 : ImmediateOperand(
821 : ImmediateOperand::INLINE,
822 36 : V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0)};
823 : Instruction* tail_call = Instruction::New(zone_, kArchTailCallCodeObject, 0,
824 36 : nullptr, 2, callee, 0, nullptr);
825 : int first_unused_stack_slot;
826 36 : if (generator_.GetSlotAboveSPBeforeTailCall(tail_call,
827 : &first_unused_stack_slot)) {
828 36 : generator_.AssembleTailCallBeforeGap(tail_call, first_unused_stack_slot);
829 36 : generator_.AssembleTailCallAfterGap(tail_call, first_unused_stack_slot);
830 : }
831 36 : generator_.AssembleArchInstruction(tail_call);
832 :
833 36 : generator_.FinishCode();
834 36 : generator_.safepoints()->Emit(generator_.tasm(),
835 72 : frame_.GetTotalFrameSlotCount());
836 36 : return generator_.FinalizeCode();
837 : }
838 :
839 : private:
840 : Zone* zone_;
841 : CompilationInfo info_;
842 : Linkage linkage_;
843 : Frame frame_;
844 : CodeGenerator generator_;
845 : };
846 :
847 : // The following fuzz tests will assemble a lot of moves, wrap them in
848 : // executable native code and run them. In order to check that moves were
849 : // performed correctly, we need to setup an environment with an initial state
850 : // and get it back after the list of moves were performed.
851 : //
852 : // We have two components to do this: TestEnvironment and CodeGeneratorTester.
853 : //
854 : // The TestEnvironment is in charge of bringing up an environment consisting of
855 : // a set of registers, stack slots and constants, with initial values in
856 : // them. The CodeGeneratorTester is a wrapper around the CodeGenerator and its
857 : // only purpose is to generate code for a list of moves. The TestEnvironment is
858 : // then able to run this code against the environment and return a resulting
859 : // state.
860 : //
861 : // A "state" here is a packed FixedArray with tagged values which can either be
862 : // Smis or HeapNumbers. When calling TestEnvironment::Run(...), registers and
863 : // stack slots will be initialised according to this FixedArray. A new
864 : // FixedArray is returned containing values that were moved by the generated
865 : // code.
866 : //
867 : // And finally, we are able to compare the resulting FixedArray against a
868 : // reference, computed with a simulation of AssembleMove and AssembleSwap. See
869 : // SimulateMoves and SimulateSwaps.
870 :
871 23724 : TEST(FuzzAssembleMove) {
872 6 : TestEnvironment env;
873 6 : CodeGeneratorTester c(&env);
874 :
875 6 : Handle<FixedArray> state_in = env.GenerateInitialState();
876 6 : ParallelMove* moves = env.GenerateRandomMoves(1000);
877 :
878 6012 : for (auto m : *moves) {
879 6000 : c.CheckAssembleMove(&m->source(), &m->destination());
880 : }
881 :
882 6 : Handle<Code> test = c.Finalize();
883 : if (FLAG_print_code) {
884 : test->Print();
885 : }
886 :
887 6 : Handle<FixedArray> actual = env.Run(test, state_in);
888 6 : Handle<FixedArray> expected = env.SimulateMoves(moves, state_in);
889 12 : env.CheckState(actual, expected);
890 6 : }
891 :
892 23724 : TEST(FuzzAssembleSwap) {
893 6 : TestEnvironment env;
894 6 : CodeGeneratorTester c(&env);
895 :
896 6 : Handle<FixedArray> state_in = env.GenerateInitialState();
897 6 : ParallelMove* swaps = env.GenerateRandomSwaps(1000);
898 :
899 6012 : for (auto s : *swaps) {
900 6000 : c.CheckAssembleSwap(&s->source(), &s->destination());
901 : }
902 :
903 6 : Handle<Code> test = c.Finalize();
904 : if (FLAG_print_code) {
905 : test->Print();
906 : }
907 :
908 6 : Handle<FixedArray> actual = env.Run(test, state_in);
909 6 : Handle<FixedArray> expected = env.SimulateSwaps(swaps, state_in);
910 12 : env.CheckState(actual, expected);
911 6 : }
912 :
913 23724 : TEST(FuzzAssembleMoveAndSwap) {
914 6 : TestEnvironment env;
915 6 : CodeGeneratorTester c(&env);
916 :
917 6 : Handle<FixedArray> state_in = env.GenerateInitialState();
918 : Handle<FixedArray> expected =
919 6 : env.main_isolate()->factory()->NewFixedArray(state_in->length());
920 6 : state_in->CopyTo(0, *expected, 0, state_in->length());
921 :
922 6006 : for (int i = 0; i < 1000; i++) {
923 : // Randomly alternate between swaps and moves.
924 6000 : if (env.rng()->NextInt(2) == 0) {
925 3192 : ParallelMove* move = env.GenerateRandomMoves(1);
926 3192 : expected = env.SimulateMoves(move, expected);
927 6384 : c.CheckAssembleMove(&move->at(0)->source(), &move->at(0)->destination());
928 : } else {
929 2808 : ParallelMove* swap = env.GenerateRandomSwaps(1);
930 2808 : expected = env.SimulateSwaps(swap, expected);
931 5616 : c.CheckAssembleSwap(&swap->at(0)->source(), &swap->at(0)->destination());
932 : }
933 : }
934 :
935 6 : Handle<Code> test = c.Finalize();
936 : if (FLAG_print_code) {
937 : test->Print();
938 : }
939 :
940 6 : Handle<FixedArray> actual = env.Run(test, state_in);
941 12 : env.CheckState(actual, expected);
942 6 : }
943 :
944 23724 : TEST(AssembleTailCallGap) {
945 18 : const RegisterConfiguration* conf = RegisterConfiguration::Default();
946 6 : TestEnvironment env;
947 :
948 : // This test assumes at least 4 registers are allocatable.
949 6 : CHECK_LE(4, conf->num_allocatable_general_registers());
950 :
951 : auto r0 = AllocatedOperand(LocationOperand::REGISTER,
952 : MachineRepresentation::kTagged,
953 : conf->GetAllocatableGeneralCode(0));
954 : auto r1 = AllocatedOperand(LocationOperand::REGISTER,
955 : MachineRepresentation::kTagged,
956 : conf->GetAllocatableGeneralCode(1));
957 : auto r2 = AllocatedOperand(LocationOperand::REGISTER,
958 : MachineRepresentation::kTagged,
959 : conf->GetAllocatableGeneralCode(2));
960 : auto r3 = AllocatedOperand(LocationOperand::REGISTER,
961 : MachineRepresentation::kTagged,
962 : conf->GetAllocatableGeneralCode(3));
963 :
964 : auto slot_minus_4 = AllocatedOperand(LocationOperand::STACK_SLOT,
965 : MachineRepresentation::kTagged, -4);
966 : auto slot_minus_3 = AllocatedOperand(LocationOperand::STACK_SLOT,
967 : MachineRepresentation::kTagged, -3);
968 : auto slot_minus_2 = AllocatedOperand(LocationOperand::STACK_SLOT,
969 : MachineRepresentation::kTagged, -2);
970 : auto slot_minus_1 = AllocatedOperand(LocationOperand::STACK_SLOT,
971 : MachineRepresentation::kTagged, -1);
972 :
973 : // Avoid slot 0 for architectures which use it store the return address.
974 : int first_slot = V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
975 : auto slot_0 = AllocatedOperand(LocationOperand::STACK_SLOT,
976 : MachineRepresentation::kTagged, first_slot);
977 : auto slot_1 =
978 : AllocatedOperand(LocationOperand::STACK_SLOT,
979 : MachineRepresentation::kTagged, first_slot + 1);
980 : auto slot_2 =
981 : AllocatedOperand(LocationOperand::STACK_SLOT,
982 : MachineRepresentation::kTagged, first_slot + 2);
983 : auto slot_3 =
984 : AllocatedOperand(LocationOperand::STACK_SLOT,
985 : MachineRepresentation::kTagged, first_slot + 3);
986 :
987 : // These tests all generate series of moves that the code generator should
988 : // detect as adjacent pushes. Depending on the architecture, we make sure
989 : // these moves get eliminated.
990 : // Also, disassembling with `--print-code` is useful when debugging.
991 :
992 : {
993 : // Generate a series of register pushes only.
994 6 : CodeGeneratorTester c(&env);
995 6 : Instruction* instr = Instruction::New(env.main_zone(), kArchNop);
996 : instr
997 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
998 : env.main_zone())
999 6 : ->AddMove(r3, slot_0);
1000 : instr
1001 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1002 : env.main_zone())
1003 6 : ->AddMove(r2, slot_1);
1004 : instr
1005 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1006 : env.main_zone())
1007 6 : ->AddMove(r1, slot_2);
1008 : instr
1009 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1010 : env.main_zone())
1011 6 : ->AddMove(r0, slot_3);
1012 :
1013 : c.CheckAssembleTailCallGaps(instr, first_slot + 4,
1014 6 : CodeGeneratorTester::kRegisterPush);
1015 6 : Handle<Code> code = c.Finalize();
1016 : if (FLAG_print_code) {
1017 : code->Print();
1018 : }
1019 : }
1020 :
1021 : {
1022 : // Generate a series of stack pushes only.
1023 6 : CodeGeneratorTester c(&env);
1024 6 : Instruction* instr = Instruction::New(env.main_zone(), kArchNop);
1025 : instr
1026 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1027 : env.main_zone())
1028 6 : ->AddMove(slot_minus_4, slot_0);
1029 : instr
1030 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1031 : env.main_zone())
1032 6 : ->AddMove(slot_minus_3, slot_1);
1033 : instr
1034 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1035 : env.main_zone())
1036 6 : ->AddMove(slot_minus_2, slot_2);
1037 : instr
1038 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1039 : env.main_zone())
1040 6 : ->AddMove(slot_minus_1, slot_3);
1041 :
1042 : c.CheckAssembleTailCallGaps(instr, first_slot + 4,
1043 6 : CodeGeneratorTester::kStackSlotPush);
1044 6 : Handle<Code> code = c.Finalize();
1045 : if (FLAG_print_code) {
1046 : code->Print();
1047 : }
1048 : }
1049 :
1050 : {
1051 : // Generate a mix of stack and register pushes.
1052 6 : CodeGeneratorTester c(&env);
1053 6 : Instruction* instr = Instruction::New(env.main_zone(), kArchNop);
1054 : instr
1055 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1056 : env.main_zone())
1057 6 : ->AddMove(slot_minus_2, slot_0);
1058 : instr
1059 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1060 : env.main_zone())
1061 6 : ->AddMove(r1, slot_1);
1062 : instr
1063 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1064 : env.main_zone())
1065 6 : ->AddMove(slot_minus_1, slot_2);
1066 : instr
1067 : ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
1068 : env.main_zone())
1069 6 : ->AddMove(r0, slot_3);
1070 :
1071 : c.CheckAssembleTailCallGaps(instr, first_slot + 4,
1072 6 : CodeGeneratorTester::kScalarPush);
1073 6 : Handle<Code> code = c.Finalize();
1074 : if (FLAG_print_code) {
1075 : code->Print();
1076 : }
1077 6 : }
1078 6 : }
1079 :
1080 : } // namespace compiler
1081 : } // namespace internal
1082 71154 : } // namespace v8
|