LCOV - code coverage report
Current view: top level - test/cctest/compiler - test-code-generator.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 377 414 91.1 %
Date: 2019-01-20 Functions: 39 40 97.5 %

          Line data    Source code
       1             : // Copyright 2017 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/assembler-inl.h"
       6             : #include "src/base/utils/random-number-generator.h"
       7             : #include "src/code-stub-assembler.h"
       8             : #include "src/compiler/backend/code-generator.h"
       9             : #include "src/compiler/backend/instruction.h"
      10             : #include "src/compiler/linkage.h"
      11             : #include "src/isolate.h"
      12             : #include "src/objects-inl.h"
      13             : #include "src/objects/heap-number-inl.h"
      14             : #include "src/objects/smi.h"
      15             : #include "src/optimized-compilation-info.h"
      16             : 
      17             : #include "test/cctest/cctest.h"
      18             : #include "test/cctest/compiler/code-assembler-tester.h"
      19             : #include "test/cctest/compiler/function-tester.h"
      20             : 
      21             : namespace v8 {
      22             : namespace internal {
      23             : namespace compiler {
      24             : 
      25             : #define __ assembler.
      26             : 
      27             : namespace {
      28             : 
      29        8840 : int GetSlotSizeInBytes(MachineRepresentation rep) {
      30        8840 :   switch (rep) {
      31             :     case MachineRepresentation::kTagged:
      32             :     case MachineRepresentation::kFloat32:
      33             :       return kPointerSize;
      34             :     case MachineRepresentation::kFloat64:
      35             :       return kDoubleSize;
      36             :     case MachineRepresentation::kSimd128:
      37        1010 :       return kSimd128Size;
      38             :     default:
      39             :       break;
      40             :   }
      41           0 :   UNREACHABLE();
      42             : }
      43             : 
      44             : // Forward declaration.
      45             : Handle<Code> BuildTeardownFunction(Isolate* isolate,
      46             :                                    CallDescriptor* call_descriptor,
      47             :                                    std::vector<AllocatedOperand> parameters);
      48             : 
      49             : // Build the `setup` function. It takes a code object and a FixedArray as
      50             : // parameters and calls the former while passing it each element of the array as
      51             : // arguments:
      52             : // ~~~
      53             : // FixedArray setup(CodeObject* test, FixedArray state_in) {
      54             : //   FixedArray state_out = AllocateZeroedFixedArray(state_in.length());
      55             : //   // `test` will tail-call to its first parameter which will be `teardown`.
      56             : //   return test(teardown, state_out, state_in[0], state_in[1],
      57             : //               state_in[2], ...);
      58             : // }
      59             : // ~~~
      60             : //
      61             : // This function needs to convert each element of the FixedArray to raw unboxed
      62             : // values to pass to the `test` function. The array will have been created using
      63             : // `GenerateInitialState()` and needs to be converted in the following way:
      64             : //
      65             : // | Parameter type | FixedArray element  | Conversion                         |
      66             : // |----------------+---------------------+------------------------------------|
      67             : // | kTagged        | Smi                 | None.                              |
      68             : // | kFloat32       | HeapNumber          | Load value and convert to Float32. |
      69             : // | kFloat64       | HeapNumber          | Load value.                        |
      70             : // | kSimd128       | FixedArray<Smi>[4]  | Untag each Smi and write the       |
      71             : // |                |                     | results into lanes of a new        |
      72             : // |                |                     | 128-bit vector.                    |
      73             : //
      74          30 : Handle<Code> BuildSetupFunction(Isolate* isolate,
      75             :                                 CallDescriptor* call_descriptor,
      76       19710 :                                 std::vector<AllocatedOperand> parameters) {
      77          30 :   CodeAssemblerTester tester(isolate, 2, Code::BUILTIN, "setup");
      78          30 :   CodeStubAssembler assembler(tester.state());
      79             :   std::vector<Node*> params;
      80             :   // The first parameter is always the callee.
      81          60 :   params.push_back(__ Parameter(0));
      82             :   params.push_back(__ HeapConstant(
      83         120 :       BuildTeardownFunction(isolate, call_descriptor, parameters)));
      84             :   // First allocate the FixedArray which will hold the final results. Here we
      85             :   // should take care of all allocations, meaning we allocate HeapNumbers and
      86             :   // FixedArrays representing Simd128 values.
      87             :   TNode<FixedArray> state_out =
      88          30 :       __ AllocateZeroedFixedArray(__ IntPtrConstant(parameters.size()));
      89       13140 :   for (int i = 0; i < static_cast<int>(parameters.size()); i++) {
      90       13080 :     switch (parameters[i].representation()) {
      91             :       case MachineRepresentation::kTagged:
      92             :         break;
      93             :       case MachineRepresentation::kFloat32:
      94             :       case MachineRepresentation::kFloat64:
      95        7920 :         __ StoreFixedArrayElement(state_out, i, __ AllocateHeapNumber());
      96        3960 :         break;
      97             :       case MachineRepresentation::kSimd128: {
      98             :         TNode<FixedArray> vector =
      99         540 :             __ AllocateZeroedFixedArray(__ IntPtrConstant(4));
     100        2700 :         for (int lane = 0; lane < 4; lane++) {
     101        2160 :           __ StoreFixedArrayElement(vector, lane, __ SmiConstant(0));
     102             :         }
     103         540 :         __ StoreFixedArrayElement(state_out, i, vector);
     104             :         break;
     105             :       }
     106             :       default:
     107           0 :         UNREACHABLE();
     108             :         break;
     109             :     }
     110             :   }
     111          60 :   params.push_back(state_out);
     112             :   // Then take each element of the initial state and pass them as arguments.
     113          30 :   TNode<FixedArray> state_in = __ Cast(__ Parameter(1));
     114       13140 :   for (int i = 0; i < static_cast<int>(parameters.size()); i++) {
     115       13080 :     Node* element = __ LoadFixedArrayElement(state_in, __ IntPtrConstant(i));
     116             :     // Unbox all elements before passing them as arguments.
     117       13080 :     switch (parameters[i].representation()) {
     118             :       // Tagged parameters are Smis, they do not need unboxing.
     119             :       case MachineRepresentation::kTagged:
     120             :         break;
     121             :       case MachineRepresentation::kFloat32:
     122        5940 :         element = __ TruncateFloat64ToFloat32(__ LoadHeapNumberValue(element));
     123        1980 :         break;
     124             :       case MachineRepresentation::kFloat64:
     125        3960 :         element = __ LoadHeapNumberValue(element);
     126        1980 :         break;
     127             :       case MachineRepresentation::kSimd128: {
     128             :         Node* vector = tester.raw_assembler_for_testing()->AddNode(
     129             :             tester.raw_assembler_for_testing()->machine()->I32x4Splat(),
     130        1620 :             __ Int32Constant(0));
     131        2700 :         for (int lane = 0; lane < 4; lane++) {
     132             :           TNode<Int32T> lane_value = __ LoadAndUntagToWord32FixedArrayElement(
     133        4320 :               __ CAST(element), __ IntPtrConstant(lane));
     134             :           vector = tester.raw_assembler_for_testing()->AddNode(
     135             :               tester.raw_assembler_for_testing()->machine()->I32x4ReplaceLane(
     136             :                   lane),
     137        2160 :               vector, lane_value);
     138             :         }
     139         540 :         element = vector;
     140         540 :         break;
     141             :       }
     142             :       default:
     143           0 :         UNREACHABLE();
     144             :         break;
     145             :     }
     146        6540 :     params.push_back(element);
     147             :   }
     148             :   __ Return(tester.raw_assembler_for_testing()->AddNode(
     149             :       tester.raw_assembler_for_testing()->common()->Call(call_descriptor),
     150         150 :       static_cast<int>(params.size()), params.data()));
     151          60 :   return tester.GenerateCodeCloseAndEscape();
     152             : }
     153             : 
     154             : // Build the `teardown` function. It takes a FixedArray as argument, fills it
     155             : // with the rest of its parameters and returns it. The parameters need to be
     156             : // consistent with `parameters`.
     157             : // ~~~
     158             : // FixedArray teardown(CodeObject* /* unused  */, FixedArray result,
     159             : //                     // Tagged registers.
     160             : //                     Object r0, Object r1, ...,
     161             : //                     // FP registers.
     162             : //                     Float32 s0, Float64 d1, ...,
     163             : //                     // Mixed stack slots.
     164             : //                     Float64 mem0, Object mem1, Float32 mem2, ...) {
     165             : //   result[0] = r0;
     166             : //   result[1] = r1;
     167             : //   ...
     168             : //   result[..] = s0;
     169             : //   ...
     170             : //   result[..] = mem0;
     171             : //   ...
     172             : //   return result;
     173             : // }
     174             : // ~~~
     175             : //
     176             : // This function needs to convert its parameters into values fit for a
     177             : // FixedArray, essentially reverting what the `setup` function did:
     178             : //
     179             : // | Parameter type | Parameter value   | Conversion                           |
     180             : // |----------------+-------------------+--------------------------------------|
     181             : // | kTagged        | Smi or HeapNumber | None.                                |
     182             : // | kFloat32       | Raw Float32       | Convert to Float64.                  |
     183             : // | kFloat64       | Raw Float64       | None.                                |
     184             : // | kSimd128       | Raw Simd128       | Split into 4 Word32 values and tag   |
     185             : // |                |                   | them.                                |
     186             : //
     187             : // Note that it is possible for a `kTagged` value to go from a Smi to a
     188             : // HeapNumber. This is because `AssembleMove` will allocate a new HeapNumber if
     189             : // it is asked to move a FP constant to a tagged register or slot.
     190             : //
     191             : // Finally, it is important that this function does not call `RecordWrite` which
     192             : // is why "setup" is in charge of all allocations and we are using
     193             : // SKIP_WRITE_BARRIER. The reason for this is that `RecordWrite` may clobber the
     194             : // top 64 bits of Simd128 registers. This is the case on x64, ia32 and Arm64 for
     195             : // example.
     196          30 : Handle<Code> BuildTeardownFunction(Isolate* isolate,
     197             :                                    CallDescriptor* call_descriptor,
     198       13110 :                                    std::vector<AllocatedOperand> parameters) {
     199          30 :   CodeAssemblerTester tester(isolate, call_descriptor, "teardown");
     200          30 :   CodeStubAssembler assembler(tester.state());
     201          30 :   TNode<FixedArray> result_array = __ Cast(__ Parameter(1));
     202       13140 :   for (int i = 0; i < static_cast<int>(parameters.size()); i++) {
     203             :     // The first argument is not used and the second is "result_array".
     204        6540 :     Node* param = __ Parameter(i + 2);
     205       13080 :     switch (parameters[i].representation()) {
     206             :       case MachineRepresentation::kTagged:
     207        2040 :         __ StoreFixedArrayElement(result_array, i, param, SKIP_WRITE_BARRIER);
     208        2040 :         break;
     209             :       // Box FP values into HeapNumbers.
     210             :       case MachineRepresentation::kFloat32:
     211             :         param =
     212        1980 :             tester.raw_assembler_for_testing()->ChangeFloat32ToFloat64(param);
     213             :         V8_FALLTHROUGH;
     214             :       case MachineRepresentation::kFloat64:
     215             :         __ StoreObjectFieldNoWriteBarrier(
     216             :             __ LoadFixedArrayElement(result_array, i), HeapNumber::kValueOffset,
     217        7920 :             param, MachineRepresentation::kFloat64);
     218        3960 :         break;
     219             :       case MachineRepresentation::kSimd128: {
     220             :         TNode<FixedArray> vector =
     221         540 :             __ Cast(__ LoadFixedArrayElement(result_array, i));
     222        2700 :         for (int lane = 0; lane < 4; lane++) {
     223             :           Node* lane_value =
     224             :               __ SmiFromInt32(tester.raw_assembler_for_testing()->AddNode(
     225             :                   tester.raw_assembler_for_testing()
     226             :                       ->machine()
     227             :                       ->I32x4ExtractLane(lane),
     228        6480 :                   param));
     229             :           __ StoreFixedArrayElement(vector, lane, lane_value,
     230        2160 :                                     SKIP_WRITE_BARRIER);
     231             :         }
     232             :         break;
     233             :       }
     234             :       default:
     235           0 :         UNREACHABLE();
     236             :         break;
     237             :     }
     238             :   }
     239          30 :   __ Return(result_array);
     240          60 :   return tester.GenerateCodeCloseAndEscape();
     241             : }
     242             : 
     243             : // Print the content of `value`, representing the register or stack slot
     244             : // described by `operand`.
     245           0 : void PrintStateValue(std::ostream& os, Isolate* isolate, Handle<Object> value,
     246             :                      AllocatedOperand operand) {
     247           0 :   switch (operand.representation()) {
     248             :     case MachineRepresentation::kTagged:
     249           0 :       if (value->IsSmi()) {
     250           0 :         os << Smi::cast(*value)->value();
     251             :       } else {
     252           0 :         os << value->Number();
     253             :       }
     254             :       break;
     255             :     case MachineRepresentation::kFloat32:
     256             :     case MachineRepresentation::kFloat64:
     257           0 :       os << value->Number();
     258           0 :       break;
     259             :     case MachineRepresentation::kSimd128: {
     260           0 :       FixedArray vector = FixedArray::cast(*value);
     261           0 :       os << "[";
     262           0 :       for (int lane = 0; lane < 4; lane++) {
     263           0 :         os << Smi::cast(*vector->GetValueChecked<Smi>(isolate, lane))->value();
     264           0 :         if (lane < 3) {
     265           0 :           os << ", ";
     266             :         }
     267             :       }
     268           0 :       os << "]";
     269             :       break;
     270             :     }
     271             :     default:
     272           0 :       UNREACHABLE();
     273             :       break;
     274             :   }
     275           0 :   os << " (" << operand.representation() << " ";
     276           0 :   if (operand.location_kind() == AllocatedOperand::REGISTER) {
     277           0 :     os << "register";
     278             :   } else {
     279             :     DCHECK_EQ(operand.location_kind(), AllocatedOperand::STACK_SLOT);
     280           0 :     os << "stack slot";
     281             :   }
     282           0 :   os << ")";
     283           0 : }
     284             : 
     285             : bool TestSimd128Moves() {
     286             :   return CpuFeatures::SupportsWasmSimd128();
     287             : }
     288             : 
     289             : }  // namespace
     290             : 
     291             : #undef __
     292             : 
     293             : // Representation of a test environment. It describes a set of registers, stack
     294             : // slots and constants available to the CodeGeneratorTester to perform moves
     295             : // with. It has the ability to randomly generate lists of moves and run the code
     296             : // generated by the CodeGeneratorTester.
     297             : //
     298             : // The following representations are tested:
     299             : //   - kTagged
     300             : //   - kFloat32
     301             : //   - kFloat64
     302             : //   - kSimd128 (if supported)
     303             : // There is no need to test using Word32 or Word64 as they are the same as
     304             : // Tagged as far as the code generator is concerned.
     305             : //
     306             : // Testing the generated code is achieved by wrapping it around `setup` and
     307             : // `teardown` functions, written using the CodeStubAssembler. The key idea here
     308             : // is that `teardown` and the generated code share the same custom
     309             : // CallDescriptor. This descriptor assigns parameters to either registers or
     310             : // stack slot of a given representation and therefore essentially describes the
     311             : // environment.
     312             : //
     313             : // What happens is the following:
     314             : //
     315             : //   - The `setup` function receives a FixedArray as the initial state. It
     316             : //     unpacks it and passes each element as arguments to the generated code
     317             : //     `test`. We also pass the `teardown` function as a first argument as well
     318             : //     as a newly allocated FixedArray as a second argument which will hold the
     319             : //     final results. Thanks to the custom CallDescriptor, registers and stack
     320             : //     slots get initialised according to the content of the initial FixedArray.
     321             : //
     322             : //   - The `test` function performs the list of moves on its parameters and
     323             : //     eventually tail-calls to its first parameter, which is the `teardown`
     324             : //     function.
     325             : //
     326             : //   - The `teardown` function receives the final results as a FixedArray, fills
     327             : //     it with the rest of its arguments and returns it. Thanks to the
     328             : //     tail-call, this is as if the `setup` function called `teardown` directly,
     329             : //     except now moves were performed!
     330             : //
     331             : // .----------------setup--------------------------.
     332             : // | Take a FixedArray as parameters with          |
     333             : // | all the initial values of registers           |
     334             : // | and stack slots.                              | <- CodeStubAssembler
     335             : // |                                               |
     336             : // | Allocate a new FixedArray `result` with       |
     337             : // | initial values.                               |
     338             : // |                                               |
     339             : // | Call test(teardown, result, state[0],         |
     340             : // |           state[1], state[2], ...);           |
     341             : // '-----------------------------------------------'
     342             : //   |
     343             : //   V
     344             : // .----------------test-------------------------------.
     345             : // | - Move(param3, param42);                          |
     346             : // | - Swap(param64, param4);                          |
     347             : // | - Move(param2, param6);                           | <- CodeGeneratorTester
     348             : // | ...                                               |
     349             : // |                                                   |
     350             : // | // "teardown" is the first parameter as well as   |
     351             : // | // the callee.                                    |
     352             : // | TailCall teardown(teardown, result, param2, ...); |
     353             : // '---------------------------------------------------'
     354             : //   |
     355             : //   V
     356             : // .----------------teardown---------------------------.
     357             : // | Fill in the incoming `result` FixedArray with all |
     358             : // | parameters and return it.                         | <- CodeStubAssembler
     359             : // '---------------------------------------------------'
     360             : 
     361          40 : class TestEnvironment : public HandleAndZoneScope {
     362             :  public:
     363             :   // These constants may be tuned to experiment with different environments.
     364             : 
     365             : #ifdef V8_TARGET_ARCH_IA32
     366             :   static constexpr int kGeneralRegisterCount = 3;
     367             : #else
     368             :   static constexpr int kGeneralRegisterCount = 4;
     369             : #endif
     370             :   static constexpr int kDoubleRegisterCount = 6;
     371             : 
     372             :   static constexpr int kTaggedSlotCount = 64;
     373             :   static constexpr int kFloat32SlotCount = 64;
     374             :   static constexpr int kFloat64SlotCount = 64;
     375             :   static constexpr int kSimd128SlotCount = 16;
     376             : 
     377             :   // TODO(all): Test all types of constants (e.g. ExternalReference and
     378             :   // HeapObject).
     379             :   static constexpr int kSmiConstantCount = 4;
     380             :   static constexpr int kFloatConstantCount = 4;
     381             :   static constexpr int kDoubleConstantCount = 4;
     382             : 
     383          20 :   TestEnvironment()
     384             :       : blocks_(1, NewBlock(main_zone(), RpoNumber::FromInt(0)), main_zone()),
     385             :         code_(main_isolate(), main_zone(), &blocks_),
     386          20 :         rng_(CcTest::random_number_generator()),
     387             :         supported_reps_({MachineRepresentation::kTagged,
     388             :                          MachineRepresentation::kFloat32,
     389         120 :                          MachineRepresentation::kFloat64}) {
     390             :     stack_slot_count_ =
     391          20 :         kTaggedSlotCount + kFloat32SlotCount + kFloat64SlotCount;
     392          20 :     if (TestSimd128Moves()) {
     393          20 :       stack_slot_count_ += kSimd128SlotCount;
     394          40 :       supported_reps_.push_back(MachineRepresentation::kSimd128);
     395             :     }
     396             :     // The "teardown" and "test" functions share the same descriptor with the
     397             :     // following signature:
     398             :     // ~~~
     399             :     // FixedArray f(CodeObject* teardown, FixedArray preallocated_result,
     400             :     //              // Tagged registers.
     401             :     //              Object, Object, ...,
     402             :     //              // FP registers.
     403             :     //              Float32, Float64, Simd128, ...,
     404             :     //              // Mixed stack slots.
     405             :     //              Float64, Object, Float32, Simd128, ...);
     406             :     // ~~~
     407             :     LocationSignature::Builder test_signature(
     408             :         main_zone(), 1,
     409          20 :         2 + kGeneralRegisterCount + kDoubleRegisterCount + stack_slot_count_);
     410             : 
     411             :     // The first parameter will be the code object of the "teardown"
     412             :     // function. This way, the "test" function can tail-call to it.
     413             :     test_signature.AddParam(LinkageLocation::ForRegister(
     414             :         kReturnRegister0.code(), MachineType::AnyTagged()));
     415             : 
     416             :     // The second parameter will be a pre-allocated FixedArray that the
     417             :     // "teardown" function will fill with result and then return. We place this
     418             :     // parameter on the first stack argument slot which is always -1. And
     419             :     // therefore slots to perform moves on start at -2.
     420             :     test_signature.AddParam(
     421             :         LinkageLocation::ForCallerFrameSlot(-1, MachineType::AnyTagged()));
     422             :     int slot_parameter_n = -2;
     423          20 :     const int kTotalStackParameterCount = stack_slot_count_ + 1;
     424             : 
     425             :     // Initialise registers.
     426             : 
     427             :     // Make sure that the target has enough general purpose registers to
     428             :     // generate a call to a CodeObject using this descriptor. We have reserved
     429             :     // kReturnRegister0 as the first parameter, and the call will need a
     430             :     // register to hold the CodeObject address. So the maximum number of
     431             :     // registers left to test with is the number of available registers minus 2.
     432             :     DCHECK_LE(kGeneralRegisterCount,
     433             :               GetRegConfig()->num_allocatable_general_registers() - 2);
     434             : 
     435          20 :     int32_t general_mask = GetRegConfig()->allocatable_general_codes_mask();
     436             :     // kReturnRegister0 is used to hold the "teardown" code object, do not
     437             :     // generate moves using it.
     438             :     std::unique_ptr<const RegisterConfiguration> registers(
     439             :         RegisterConfiguration::RestrictGeneralRegisters(
     440          20 :             general_mask & ~kReturnRegister0.bit()));
     441             : 
     442         100 :     for (int i = 0; i < kGeneralRegisterCount; i++) {
     443          80 :       int code = registers->GetAllocatableGeneralCode(i);
     444          80 :       AddRegister(&test_signature, MachineRepresentation::kTagged, code);
     445             :     }
     446             :     // We assume that Double, Float and Simd128 registers alias, depending on
     447             :     // kSimpleFPAliasing. For this reason, we allocate a Float, Double and
     448             :     // Simd128 together, hence the reason why `kDoubleRegisterCount` should be a
     449             :     // multiple of 3 and 2 in case Simd128 is not supported.
     450             :     static_assert(
     451             :         ((kDoubleRegisterCount % 2) == 0) && ((kDoubleRegisterCount % 3) == 0),
     452             :         "kDoubleRegisterCount should be a multiple of two and three.");
     453          40 :     for (int i = 0; i < kDoubleRegisterCount; i += 2) {
     454             :       if (kSimpleFPAliasing) {
     455             :         // Allocate three registers at once if kSimd128 is supported, else
     456             :         // allocate in pairs.
     457             :         AddRegister(&test_signature, MachineRepresentation::kFloat32,
     458          40 :                     registers->GetAllocatableFloatCode(i));
     459             :         AddRegister(&test_signature, MachineRepresentation::kFloat64,
     460          80 :                     registers->GetAllocatableDoubleCode(i + 1));
     461          40 :         if (TestSimd128Moves()) {
     462             :           AddRegister(&test_signature, MachineRepresentation::kSimd128,
     463          80 :                       registers->GetAllocatableSimd128Code(i + 2));
     464             :           i++;
     465             :         }
     466             :       } else {
     467             :         // Make sure we do not allocate FP registers which alias. To do this, we
     468             :         // allocate three 128-bit registers and then convert two of them to a
     469             :         // float and a double. With this aliasing scheme, a Simd128 register
     470             :         // aliases two Double registers and four Float registers, so we need to
     471             :         // scale indexes accordingly:
     472             :         //
     473             :         //   Simd128 register: q0, q1, q2, q3,  q4, q5
     474             :         //                      |   |       |    |
     475             :         //                      V   V       V    V
     476             :         //   Aliases:          s0, d2, q2, s12, d8, q5
     477             :         //
     478             :         // This isn't space efficient at all but suits our need.
     479             :         static_assert(
     480             :             kDoubleRegisterCount < 8,
     481             :             "Arm has a q8 and a d16 register but no overlapping s32 register.");
     482             :         int first_simd128 = registers->GetAllocatableSimd128Code(i);
     483             :         int second_simd128 = registers->GetAllocatableSimd128Code(i + 1);
     484             :         AddRegister(&test_signature, MachineRepresentation::kFloat32,
     485             :                     first_simd128 * 4);
     486             :         AddRegister(&test_signature, MachineRepresentation::kFloat64,
     487             :                     second_simd128 * 2);
     488             :         if (TestSimd128Moves()) {
     489             :           int third_simd128 = registers->GetAllocatableSimd128Code(i + 2);
     490             :           AddRegister(&test_signature, MachineRepresentation::kSimd128,
     491             :                       third_simd128);
     492             :           i++;
     493             :         }
     494             :       }
     495             :     }
     496             : 
     497             :     // Initialise stack slots.
     498             : 
     499             :     std::map<MachineRepresentation, int> slots = {
     500             :         {MachineRepresentation::kTagged, kTaggedSlotCount},
     501             :         {MachineRepresentation::kFloat32, kFloat32SlotCount},
     502          20 :         {MachineRepresentation::kFloat64, kFloat64SlotCount}};
     503          20 :     if (TestSimd128Moves()) {
     504          40 :       slots.emplace(MachineRepresentation::kSimd128, kSimd128SlotCount);
     505             :     }
     506             : 
     507             :     // Allocate new slots until we run out of them.
     508        5380 :     while (std::any_of(slots.cbegin(), slots.cend(),
     509             :                        [](const std::pair<MachineRepresentation, int>& entry) {
     510             :                          // True if there are slots left to allocate for this
     511             :                          // representation.
     512             :                          return entry.second > 0;
     513             :                        })) {
     514             :       // Pick a random MachineRepresentation from supported_reps_.
     515        5360 :       MachineRepresentation rep = CreateRandomMachineRepresentation();
     516             :       auto entry = slots.find(rep);
     517             :       DCHECK(entry != slots.end());
     518             :       // We may have picked a representation for which all slots have already
     519             :       // been allocated.
     520        5360 :       if (entry->second > 0) {
     521             :         // Keep a map of (MachineRepresentation . std::vector<int>) with
     522             :         // allocated slots to pick from for each representation.
     523             :         int slot = slot_parameter_n;
     524        4160 :         slot_parameter_n -= (GetSlotSizeInBytes(rep) / kPointerSize);
     525        4160 :         AddStackSlot(&test_signature, rep, slot);
     526        4160 :         entry->second--;
     527             :       }
     528             :     }
     529             : 
     530             :     // Initialise random constants.
     531             : 
     532             :     // While constants do not know about Smis, we need to be able to
     533             :     // differentiate between a pointer to a HeapNumber and a integer. For this
     534             :     // reason, we make sure all integers are Smis, including constants.
     535          80 :     for (int i = 0; i < kSmiConstantCount; i++) {
     536             :       intptr_t smi_value = static_cast<intptr_t>(
     537         160 :           Smi::FromInt(rng_->NextInt(Smi::kMaxValue)).ptr());
     538             :       Constant constant = kPointerSize == 8
     539             :                               ? Constant(static_cast<int64_t>(smi_value))
     540             :                               : Constant(static_cast<int32_t>(smi_value));
     541          80 :       AddConstant(MachineRepresentation::kTagged, AllocateConstant(constant));
     542             :     }
     543             :     // Float and Double constants can be moved to both Tagged and FP registers
     544             :     // or slots. Register them as compatible with both FP and Tagged
     545             :     // destinations.
     546          80 :     for (int i = 0; i < kFloatConstantCount; i++) {
     547             :       int virtual_register =
     548         160 :           AllocateConstant(Constant(DoubleToFloat32(rng_->NextDouble())));
     549          80 :       AddConstant(MachineRepresentation::kTagged, virtual_register);
     550          80 :       AddConstant(MachineRepresentation::kFloat32, virtual_register);
     551             :     }
     552          80 :     for (int i = 0; i < kDoubleConstantCount; i++) {
     553         160 :       int virtual_register = AllocateConstant(Constant(rng_->NextDouble()));
     554          80 :       AddConstant(MachineRepresentation::kTagged, virtual_register);
     555          80 :       AddConstant(MachineRepresentation::kFloat64, virtual_register);
     556             :     }
     557             : 
     558             :     // The "teardown" function returns a FixedArray with the resulting state.
     559             :     test_signature.AddReturn(LinkageLocation::ForRegister(
     560          20 :         kReturnRegister0.code(), MachineType::AnyTagged()));
     561             : 
     562             :     test_descriptor_ = new (main_zone())
     563             :         CallDescriptor(CallDescriptor::kCallCodeObject,  // kind
     564             :                        MachineType::AnyTagged(),         // target MachineType
     565             :                        LinkageLocation::ForAnyRegister(
     566             :                            MachineType::AnyTagged()),  // target location
     567             :                        test_signature.Build(),         // location_sig
     568             :                        kTotalStackParameterCount,      // stack_parameter_count
     569             :                        Operator::kNoProperties,        // properties
     570             :                        kNoCalleeSaved,                 // callee-saved registers
     571             :                        kNoCalleeSaved,                 // callee-saved fp
     572          60 :                        CallDescriptor::kNoFlags);      // flags
     573          20 :   }
     574             : 
     575         240 :   int AllocateConstant(Constant constant) {
     576         240 :     int virtual_register = code_.NextVirtualRegister();
     577             :     code_.AddConstant(virtual_register, constant);
     578         240 :     return virtual_register;
     579             :   }
     580             : 
     581             :   // Register a constant referenced by `virtual_register` as compatible with
     582             :   // `rep`.
     583         400 :   void AddConstant(MachineRepresentation rep, int virtual_register) {
     584             :     auto entry = allocated_constants_.find(rep);
     585         400 :     if (entry == allocated_constants_.end()) {
     586             :       allocated_constants_.emplace(
     587         120 :           rep, std::vector<ConstantOperand>{ConstantOperand(virtual_register)});
     588             :     } else {
     589         340 :       entry->second.emplace_back(virtual_register);
     590             :     }
     591         400 :   }
     592             : 
     593             :   // Register a new register or stack slot as compatible with `rep`. As opposed
     594             :   // to constants, registers and stack slots are written to on `setup` and read
     595             :   // from on `teardown`. Therefore they are part of the environment's layout,
     596             :   // and are parameters of the `test` function.
     597             : 
     598         200 :   void AddRegister(LocationSignature::Builder* test_signature,
     599             :                    MachineRepresentation rep, int code) {
     600         200 :     AllocatedOperand operand(AllocatedOperand::REGISTER, rep, code);
     601         200 :     layout_.push_back(operand);
     602             :     test_signature->AddParam(LinkageLocation::ForRegister(
     603         200 :         code, MachineType::TypeForRepresentation(rep)));
     604             :     auto entry = allocated_registers_.find(rep);
     605         200 :     if (entry == allocated_registers_.end()) {
     606         160 :       allocated_registers_.emplace(rep, std::vector<AllocatedOperand>{operand});
     607             :     } else {
     608         120 :       entry->second.push_back(operand);
     609             :     }
     610         200 :   }
     611             : 
     612        4160 :   void AddStackSlot(LocationSignature::Builder* test_signature,
     613             :                     MachineRepresentation rep, int slot) {
     614        4160 :     AllocatedOperand operand(AllocatedOperand::STACK_SLOT, rep, slot);
     615        4160 :     layout_.push_back(operand);
     616             :     test_signature->AddParam(LinkageLocation::ForCallerFrameSlot(
     617        4160 :         slot, MachineType::TypeForRepresentation(rep)));
     618             :     auto entry = allocated_slots_.find(rep);
     619        4160 :     if (entry == allocated_slots_.end()) {
     620         160 :       allocated_slots_.emplace(rep, std::vector<AllocatedOperand>{operand});
     621             :     } else {
     622        4080 :       entry->second.push_back(operand);
     623             :     }
     624        4160 :   }
     625             : 
     626             :   // Generate a random initial state to test moves against. A "state" is a
     627             :   // packed FixedArray with Smis and HeapNumbers, according to the layout of the
     628             :   // environment.
     629          15 :   Handle<FixedArray> GenerateInitialState() {
     630             :     Handle<FixedArray> state = main_isolate()->factory()->NewFixedArray(
     631        5550 :         static_cast<int>(layout_.size()));
     632        6570 :     for (int i = 0; i < state->length(); i++) {
     633        6540 :       switch (layout_[i].representation()) {
     634             :         case MachineRepresentation::kTagged:
     635        1020 :           state->set(i, Smi::FromInt(rng_->NextInt(Smi::kMaxValue)));
     636        1020 :           break;
     637             :         case MachineRepresentation::kFloat32: {
     638             :           // HeapNumbers are Float64 values. However, we will convert it to a
     639             :           // Float32 and back inside `setup` and `teardown`. Make sure the value
     640             :           // we pick fits in a Float32.
     641             :           Handle<HeapNumber> num = main_isolate()->factory()->NewHeapNumber(
     642        2970 :               static_cast<double>(DoubleToFloat32(rng_->NextDouble())));
     643        1980 :           state->set(i, *num);
     644             :           break;
     645             :         }
     646             :         case MachineRepresentation::kFloat64: {
     647             :           Handle<HeapNumber> num =
     648        1980 :               main_isolate()->factory()->NewHeapNumber(rng_->NextDouble());
     649        1980 :           state->set(i, *num);
     650             :           break;
     651             :         }
     652             :         case MachineRepresentation::kSimd128: {
     653             :           Handle<FixedArray> vector =
     654         270 :               main_isolate()->factory()->NewFixedArray(4);
     655        1350 :           for (int lane = 0; lane < 4; lane++) {
     656        1080 :             vector->set(lane, Smi::FromInt(rng_->NextInt(Smi::kMaxValue)));
     657             :           }
     658         540 :           state->set(i, *vector);
     659             :           break;
     660             :         }
     661             :         default:
     662           0 :           UNREACHABLE();
     663             :           break;
     664             :       }
     665             :     }
     666          15 :     return state;
     667             :   }
     668             : 
     669             :   // Run the code generated by a CodeGeneratorTester against `state_in` and
     670             :   // return a new resulting state.
     671          30 :   Handle<FixedArray> Run(Handle<Code> test, Handle<FixedArray> state_in) {
     672             :     Handle<FixedArray> state_out = main_isolate()->factory()->NewFixedArray(
     673          90 :         static_cast<int>(layout_.size()));
     674             :     {
     675             : #ifdef ENABLE_SLOW_DCHECKS
     676             :       // The "setup" and "teardown" functions are relatively big, and with
     677             :       // runtime assertions enabled they get so big that memory during register
     678             :       // allocation becomes a problem. Temporarily disable such assertions.
     679             :       bool old_enable_slow_asserts = FLAG_enable_slow_asserts;
     680             :       FLAG_enable_slow_asserts = false;
     681             : #endif
     682             :       Handle<Code> setup =
     683          90 :           BuildSetupFunction(main_isolate(), test_descriptor_, layout_);
     684             : #ifdef ENABLE_SLOW_DCHECKS
     685             :       FLAG_enable_slow_asserts = old_enable_slow_asserts;
     686             : #endif
     687             :       // FunctionTester maintains its own HandleScope which means that its
     688             :       // return value will be freed along with it. Copy the result into
     689             :       // state_out.
     690          30 :       FunctionTester ft(setup, 2);
     691          30 :       Handle<FixedArray> result = ft.CallChecked<FixedArray>(test, state_in);
     692          30 :       CHECK_EQ(result->length(), state_in->length());
     693          30 :       result->CopyTo(0, *state_out, 0, result->length());
     694             :     }
     695          30 :     return state_out;
     696             :   }
     697             : 
     698             :   // For a given operand representing either a register or a stack slot, return
     699             :   // what position it should live in inside a FixedArray state.
     700       37260 :   int OperandToStatePosition(const AllocatedOperand& operand) const {
     701             :     // Search `layout_` for `operand`.
     702             :     auto it = std::find_if(layout_.cbegin(), layout_.cend(),
     703             :                            [operand](const AllocatedOperand& this_operand) {
     704             :                              return this_operand.Equals(operand);
     705       74520 :                            });
     706             :     DCHECK_NE(it, layout_.cend());
     707       37260 :     return static_cast<int>(std::distance(layout_.cbegin(), it));
     708             :   }
     709             : 
     710             :   // Perform the given list of moves on `state_in` and return a newly allocated
     711             :   // state with the results.
     712        4960 :   Handle<FixedArray> SimulateMoves(ParallelMove* moves,
     713             :                                    Handle<FixedArray> state_in) {
     714             :     Handle<FixedArray> state_out = main_isolate()->factory()->NewFixedArray(
     715       19875 :         static_cast<int>(layout_.size()));
     716             :     // We do not want to modify `state_in` in place so perform the moves on a
     717             :     // copy.
     718        4960 :     state_in->CopyTo(0, *state_out, 0, state_in->length());
     719       19875 :     for (auto move : *moves) {
     720             :       int to_index =
     721        9955 :           OperandToStatePosition(AllocatedOperand::cast(move->destination()));
     722        9955 :       InstructionOperand from = move->source();
     723        9955 :       if (from.IsConstant()) {
     724             :         Constant constant =
     725             :             code_.GetConstant(ConstantOperand::cast(from).virtual_register());
     726             :         Handle<Object> constant_value;
     727        2740 :         switch (constant.type()) {
     728             :           case Constant::kInt32:
     729             :             constant_value =
     730             :                 Handle<Smi>(Smi(static_cast<Address>(
     731             :                                 static_cast<intptr_t>(constant.ToInt32()))),
     732           0 :                             main_isolate());
     733           0 :             break;
     734             :           case Constant::kInt64:
     735             :             constant_value = Handle<Smi>(
     736         285 :                 Smi(static_cast<Address>(constant.ToInt64())), main_isolate());
     737         285 :             break;
     738             :           case Constant::kFloat32:
     739             :             constant_value = main_isolate()->factory()->NewHeapNumber(
     740        2480 :                 static_cast<double>(constant.ToFloat32()));
     741        1240 :             break;
     742             :           case Constant::kFloat64:
     743             :             constant_value = main_isolate()->factory()->NewHeapNumber(
     744        1215 :                 constant.ToFloat64().value());
     745        1215 :             break;
     746             :           default:
     747           0 :             UNREACHABLE();
     748             :             break;
     749             :         }
     750        2740 :         state_out->set(to_index, *constant_value);
     751             :       } else {
     752        7215 :         int from_index = OperandToStatePosition(AllocatedOperand::cast(from));
     753             :         state_out->set(to_index, *state_out->GetValueChecked<Object>(
     754       14430 :                                      main_isolate(), from_index));
     755             :       }
     756             :     }
     757        4960 :     return state_out;
     758             :   }
     759             : 
     760             :   // Perform the given list of swaps on `state_in` and return a newly allocated
     761             :   // state with the results.
     762        5050 :   Handle<FixedArray> SimulateSwaps(ParallelMove* swaps,
     763             :                                    Handle<FixedArray> state_in) {
     764             :     Handle<FixedArray> state_out = main_isolate()->factory()->NewFixedArray(
     765       30190 :         static_cast<int>(layout_.size()));
     766             :     // We do not want to modify `state_in` in place so perform the swaps on a
     767             :     // copy.
     768        5050 :     state_in->CopyTo(0, *state_out, 0, state_in->length());
     769       20145 :     for (auto swap : *swaps) {
     770             :       int lhs_index =
     771       10045 :           OperandToStatePosition(AllocatedOperand::cast(swap->destination()));
     772             :       int rhs_index =
     773       10045 :           OperandToStatePosition(AllocatedOperand::cast(swap->source()));
     774             :       Handle<Object> lhs =
     775       10045 :           state_out->GetValueChecked<Object>(main_isolate(), lhs_index);
     776             :       Handle<Object> rhs =
     777       10045 :           state_out->GetValueChecked<Object>(main_isolate(), rhs_index);
     778       10045 :       state_out->set(lhs_index, *rhs);
     779       10045 :       state_out->set(rhs_index, *lhs);
     780             :     }
     781        5050 :     return state_out;
     782             :   }
     783             : 
     784             :   // Compare the given state with a reference.
     785          30 :   void CheckState(Handle<FixedArray> actual, Handle<FixedArray> expected) {
     786       13140 :     for (int i = 0; i < static_cast<int>(layout_.size()); i++) {
     787             :       Handle<Object> actual_value =
     788       13080 :           actual->GetValueChecked<Object>(main_isolate(), i);
     789             :       Handle<Object> expected_value =
     790        6540 :           expected->GetValueChecked<Object>(main_isolate(), i);
     791        6540 :       if (!CompareValues(actual_value, expected_value,
     792       19650 :                          layout_[i].representation())) {
     793           0 :         std::ostringstream expected_str;
     794             :         PrintStateValue(expected_str, main_isolate(), expected_value,
     795           0 :                         layout_[i]);
     796           0 :         std::ostringstream actual_str;
     797           0 :         PrintStateValue(actual_str, main_isolate(), actual_value, layout_[i]);
     798             :         V8_Fatal(__FILE__, __LINE__, "Expected: '%s' but got '%s'",
     799           0 :                  expected_str.str().c_str(), actual_str.str().c_str());
     800             :       }
     801             :     }
     802          30 :   }
     803             : 
     804        6540 :   bool CompareValues(Handle<Object> actual, Handle<Object> expected,
     805             :                      MachineRepresentation rep) {
     806        6540 :     switch (rep) {
     807             :       case MachineRepresentation::kTagged:
     808             :       case MachineRepresentation::kFloat32:
     809             :       case MachineRepresentation::kFloat64:
     810        6000 :         return actual->StrictEquals(*expected);
     811             :       case MachineRepresentation::kSimd128:
     812        2160 :         for (int lane = 0; lane < 4; lane++) {
     813             :           Handle<Smi> actual_lane =
     814             :               FixedArray::cast(*actual)->GetValueChecked<Smi>(main_isolate(),
     815        4320 :                                                               lane);
     816             :           Handle<Smi> expected_lane =
     817             :               FixedArray::cast(*expected)->GetValueChecked<Smi>(main_isolate(),
     818        2160 :                                                                 lane);
     819        2160 :           if (*actual_lane != *expected_lane) {
     820           0 :             return false;
     821             :           }
     822             :         }
     823             :         return true;
     824             :       default:
     825           0 :         UNREACHABLE();
     826             :         break;
     827             :     }
     828             :   }
     829             : 
     830             :   enum OperandConstraint {
     831             :     kNone,
     832             :     // Restrict operands to non-constants. This is useful when generating a
     833             :     // destination.
     834             :     kCannotBeConstant
     835             :   };
     836             : 
     837             :   // Generate parallel moves at random. Note that they may not be compatible
     838             :   // between each other as this doesn't matter to the code generator.
     839        4960 :   ParallelMove* GenerateRandomMoves(int size) {
     840             :     ParallelMove* parallel_move = new (main_zone()) ParallelMove(main_zone());
     841             : 
     842       20845 :     for (int i = 0; i < size;) {
     843       10925 :       MachineRepresentation rep = CreateRandomMachineRepresentation();
     844             :       MoveOperands mo(CreateRandomOperand(kNone, rep),
     845       21850 :                       CreateRandomOperand(kCannotBeConstant, rep));
     846             :       // It isn't valid to call `AssembleMove` and `AssembleSwap` with redundant
     847             :       // moves.
     848       11895 :       if (mo.IsRedundant()) continue;
     849             :       parallel_move->AddMove(mo.source(), mo.destination());
     850             :       // Iterate only when a move was created.
     851        9955 :       i++;
     852             :     }
     853             : 
     854        4960 :     return parallel_move;
     855             :   }
     856             : 
     857        5050 :   ParallelMove* GenerateRandomSwaps(int size) {
     858             :     ParallelMove* parallel_move = new (main_zone()) ParallelMove(main_zone());
     859             : 
     860       21430 :     for (int i = 0; i < size;) {
     861       11330 :       MachineRepresentation rep = CreateRandomMachineRepresentation();
     862       11330 :       InstructionOperand lhs = CreateRandomOperand(kCannotBeConstant, rep);
     863       11330 :       InstructionOperand rhs = CreateRandomOperand(kCannotBeConstant, rep);
     864             :       MoveOperands mo(lhs, rhs);
     865             :       // It isn't valid to call `AssembleMove` and `AssembleSwap` with redundant
     866             :       // moves.
     867       12615 :       if (mo.IsRedundant()) continue;
     868             :       // Canonicalize the swap: the register operand has to be the left hand
     869             :       // side.
     870       18600 :       if (lhs.IsStackSlot() || lhs.IsFPStackSlot()) {
     871             :         std::swap(lhs, rhs);
     872             :       }
     873             :       parallel_move->AddMove(lhs, rhs);
     874             :       // Iterate only when a swap was created.
     875       10045 :       i++;
     876             :     }
     877             : 
     878        5050 :     return parallel_move;
     879             :   }
     880             : 
     881       34950 :   MachineRepresentation CreateRandomMachineRepresentation() {
     882      104850 :     int index = rng_->NextInt(static_cast<int>(supported_reps_.size()));
     883       69900 :     return supported_reps_[index];
     884             :   }
     885             : 
     886       44510 :   InstructionOperand CreateRandomOperand(OperandConstraint constraint,
     887             :                                          MachineRepresentation rep) {
     888             :     // Only generate a Constant if the operand is a source and we have a
     889             :     // constant with a compatible representation in stock.
     890             :     bool generate_constant =
     891       55435 :         (constraint != kCannotBeConstant) &&
     892             :         (allocated_constants_.find(rep) != allocated_constants_.end());
     893       44510 :     switch (rng_->NextInt(generate_constant ? 3 : 2)) {
     894             :       case 0:
     895       21340 :         return CreateRandomStackSlotOperand(rep);
     896             :       case 1:
     897       20430 :         return CreateRandomRegisterOperand(rep);
     898             :       case 2:
     899        2740 :         return CreateRandomConstant(rep);
     900             :     }
     901           0 :     UNREACHABLE();
     902             :   }
     903             : 
     904       20430 :   AllocatedOperand CreateRandomRegisterOperand(MachineRepresentation rep) {
     905             :     int index =
     906       40860 :         rng_->NextInt(static_cast<int>(allocated_registers_[rep].size()));
     907       40860 :     return allocated_registers_[rep][index];
     908             :   }
     909             : 
     910       28675 :   AllocatedOperand CreateRandomStackSlotOperand(MachineRepresentation rep) {
     911       57350 :     int index = rng_->NextInt(static_cast<int>(allocated_slots_[rep].size()));
     912       57350 :     return allocated_slots_[rep][index];
     913             :   }
     914             : 
     915        2740 :   ConstantOperand CreateRandomConstant(MachineRepresentation rep) {
     916             :     int index =
     917        5480 :         rng_->NextInt(static_cast<int>(allocated_constants_[rep].size()));
     918        5480 :     return allocated_constants_[rep][index];
     919             :   }
     920             : 
     921          20 :   static InstructionBlock* NewBlock(Zone* zone, RpoNumber rpo) {
     922             :     return new (zone) InstructionBlock(zone, rpo, RpoNumber::Invalid(),
     923          40 :                                        RpoNumber::Invalid(), false, false);
     924             :   }
     925             : 
     926             :   v8::base::RandomNumberGenerator* rng() const { return rng_; }
     927             :   InstructionSequence* code() { return &code_; }
     928             :   CallDescriptor* test_descriptor() { return test_descriptor_; }
     929             :   int stack_slot_count() const { return stack_slot_count_; }
     930             : 
     931             :  private:
     932             :   ZoneVector<InstructionBlock*> blocks_;
     933             :   InstructionSequence code_;
     934             :   v8::base::RandomNumberGenerator* rng_;
     935             :   // The layout describes the type of each element in the environment, in order.
     936             :   std::vector<AllocatedOperand> layout_;
     937             :   CallDescriptor* test_descriptor_;
     938             :   // Allocated constants, registers and stack slots that we can generate moves
     939             :   // with. Each per compatible representation.
     940             :   std::vector<MachineRepresentation> supported_reps_;
     941             :   std::map<MachineRepresentation, std::vector<ConstantOperand>>
     942             :       allocated_constants_;
     943             :   std::map<MachineRepresentation, std::vector<AllocatedOperand>>
     944             :       allocated_registers_;
     945             :   std::map<MachineRepresentation, std::vector<AllocatedOperand>>
     946             :       allocated_slots_;
     947             :   int stack_slot_count_;
     948             : };
     949             : 
     950             : // static
     951             : constexpr int TestEnvironment::kGeneralRegisterCount;
     952             : constexpr int TestEnvironment::kDoubleRegisterCount;
     953             : constexpr int TestEnvironment::kTaggedSlotCount;
     954             : constexpr int TestEnvironment::kFloat32SlotCount;
     955             : constexpr int TestEnvironment::kFloat64SlotCount;
     956             : constexpr int TestEnvironment::kSimd128SlotCount;
     957             : constexpr int TestEnvironment::kSmiConstantCount;
     958             : constexpr int TestEnvironment::kFloatConstantCount;
     959             : constexpr int TestEnvironment::kDoubleConstantCount;
     960             : 
     961             : // Wrapper around the CodeGenerator. Code generated by this can only be called
     962             : // using the given `TestEnvironment`.
     963             : class CodeGeneratorTester {
     964             :  public:
     965        7515 :   explicit CodeGeneratorTester(TestEnvironment* environment,
     966             :                                int extra_stack_space = 0)
     967             :       : zone_(environment->main_zone()),
     968             :         info_(ArrayVector("test"), environment->main_zone(), Code::STUB),
     969             :         linkage_(environment->test_descriptor()),
     970         135 :         frame_(environment->test_descriptor()->CalculateFixedFrameSize()) {
     971             :     // Pick half of the stack parameters at random and move them into spill
     972             :     // slots, separated by `extra_stack_space` bytes.
     973             :     // When testing a move with stack slots using CheckAssembleMove or
     974             :     // CheckAssembleSwap, we'll transparently make use of local spill slots
     975             :     // instead of stack parameters for those that were picked. This allows us to
     976             :     // test negative, positive, far and near ranges.
     977        7425 :     for (int i = 0; i < (environment->stack_slot_count() / 2);) {
     978             :       MachineRepresentation rep =
     979        7335 :           environment->CreateRandomMachineRepresentation();
     980             :       LocationOperand old_slot =
     981       14670 :           LocationOperand::cast(environment->CreateRandomStackSlotOperand(rep));
     982             :       // Do not pick the same slot twice.
     983       14670 :       if (GetSpillSlot(&old_slot) != spill_slots_.end()) {
     984        2655 :         continue;
     985             :       }
     986             :       LocationOperand new_slot =
     987             :           AllocatedOperand(LocationOperand::STACK_SLOT, rep,
     988        9360 :                            frame_.AllocateSpillSlot(GetSlotSizeInBytes(rep)));
     989             :       // Artificially create space on the stack by allocating a new slot.
     990        4680 :       if (extra_stack_space > 0) {
     991        1560 :         frame_.AllocateSpillSlot(extra_stack_space);
     992             :       }
     993        4680 :       spill_slots_.emplace_back(old_slot, new_slot);
     994        4680 :       i++;
     995             :     }
     996             : 
     997             :     generator_ = new CodeGenerator(
     998             :         environment->main_zone(), &frame_, &linkage_, environment->code(),
     999          45 :         &info_, environment->main_isolate(), base::Optional<OsrHelper>(),
    1000             :         kNoSourcePosition, nullptr, PoisoningMitigationLevel::kDontPoison,
    1001             :         AssemblerOptions::Default(environment->main_isolate()),
    1002         135 :         Builtins::kNoBuiltinId);
    1003             : 
    1004             :     // Force a frame to be created.
    1005          45 :     generator_->frame_access_state()->MarkHasFrame(true);
    1006          45 :     generator_->AssembleConstructFrame();
    1007             :     // TODO(all): Generate a stack check here so that we fail gracefully if the
    1008             :     // frame is too big.
    1009             : 
    1010             :     // Move chosen stack parameters into spill slots.
    1011        4770 :     for (auto move : spill_slots_) {
    1012        4680 :       generator_->AssembleMove(&move.first, &move.second);
    1013             :     }
    1014          45 :   }
    1015             : 
    1016          90 :   ~CodeGeneratorTester() { delete generator_; }
    1017             : 
    1018             :   std::vector<std::pair<LocationOperand, LocationOperand>>::iterator
    1019       67335 :   GetSpillSlot(InstructionOperand* op) {
    1020       67335 :     if (op->IsAnyStackSlot()) {
    1021             :       LocationOperand slot = LocationOperand::cast(*op);
    1022             :       return std::find_if(
    1023             :           spill_slots_.begin(), spill_slots_.end(),
    1024             :           [slot](
    1025             :               const std::pair<LocationOperand, LocationOperand>& moved_pair) {
    1026             :             return moved_pair.first.index() == slot.index();
    1027             :           });
    1028             :     } else {
    1029             :       return spill_slots_.end();
    1030             :     }
    1031             :   }
    1032             : 
    1033             :   // If the operand corresponds to a spill slot, return it. Else just pass it
    1034             :   // through.
    1035             :   InstructionOperand* MaybeTranslateSlot(InstructionOperand* op) {
    1036       60000 :     auto it = GetSpillSlot(op);
    1037       60000 :     if (it != spill_slots_.end()) {
    1038             :       // The second element is the spill slot associated with op.
    1039       19015 :       return &it->second;
    1040             :     } else {
    1041             :       return op;
    1042             :     }
    1043             :   }
    1044             : 
    1045          15 :   Instruction* CreateTailCall(int stack_slot_delta) {
    1046             :     int optional_padding_slot = stack_slot_delta;
    1047             :     InstructionOperand callee[] = {
    1048             :         AllocatedOperand(LocationOperand::REGISTER,
    1049             :                          MachineRepresentation::kTagged,
    1050             :                          kReturnRegister0.code()),
    1051             :         ImmediateOperand(ImmediateOperand::INLINE, -1),  // poison index.
    1052             :         ImmediateOperand(ImmediateOperand::INLINE, optional_padding_slot),
    1053          30 :         ImmediateOperand(ImmediateOperand::INLINE, stack_slot_delta)};
    1054             :     Instruction* tail_call =
    1055             :         Instruction::New(zone_, kArchTailCallCodeObject, 0, nullptr,
    1056          15 :                          arraysize(callee), callee, 0, nullptr);
    1057          15 :     return tail_call;
    1058             :   }
    1059             : 
    1060             :   enum PushTypeFlag {
    1061             :     kRegisterPush = CodeGenerator::kRegisterPush,
    1062             :     kStackSlotPush = CodeGenerator::kStackSlotPush,
    1063             :     kScalarPush = CodeGenerator::kScalarPush
    1064             :   };
    1065             : 
    1066          15 :   void CheckAssembleTailCallGaps(Instruction* instr,
    1067             :                                  int first_unused_stack_slot,
    1068             :                                  CodeGeneratorTester::PushTypeFlag push_type) {
    1069          15 :     generator_->AssembleTailCallBeforeGap(instr, first_unused_stack_slot);
    1070             : #if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_S390) || \
    1071             :     defined(V8_TARGET_ARCH_PPC)
    1072             :     // Only folding register pushes is supported on ARM.
    1073             :     bool supported = ((push_type & CodeGenerator::kRegisterPush) == push_type);
    1074             : #elif defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_IA32) || \
    1075             :     defined(V8_TARGET_ARCH_X87)
    1076          15 :     bool supported = ((push_type & CodeGenerator::kScalarPush) == push_type);
    1077             : #else
    1078             :     bool supported = false;
    1079             : #endif
    1080          15 :     if (supported) {
    1081             :       // Architectures supporting folding adjacent pushes should now have
    1082             :       // resolved all moves.
    1083          90 :       for (const auto& move :
    1084          15 :            *instr->parallel_moves()[Instruction::FIRST_GAP_POSITION]) {
    1085         120 :         CHECK(move->IsEliminated());
    1086             :       }
    1087             :     }
    1088          15 :     generator_->AssembleGaps(instr);
    1089          15 :     generator_->AssembleTailCallAfterGap(instr, first_unused_stack_slot);
    1090          15 :   }
    1091             : 
    1092       14955 :   void CheckAssembleMove(InstructionOperand* source,
    1093             :                          InstructionOperand* destination) {
    1094       14955 :     int start = generator_->tasm()->pc_offset();
    1095             :     generator_->AssembleMove(MaybeTranslateSlot(source),
    1096       14955 :                              MaybeTranslateSlot(destination));
    1097       29910 :     CHECK(generator_->tasm()->pc_offset() > start);
    1098       14955 :   }
    1099             : 
    1100       15045 :   void CheckAssembleSwap(InstructionOperand* source,
    1101             :                          InstructionOperand* destination) {
    1102       15045 :     int start = generator_->tasm()->pc_offset();
    1103             :     generator_->AssembleSwap(MaybeTranslateSlot(source),
    1104       15045 :                              MaybeTranslateSlot(destination));
    1105       30090 :     CHECK(generator_->tasm()->pc_offset() > start);
    1106       15045 :   }
    1107             : 
    1108          45 :   Handle<Code> Finalize() {
    1109          45 :     generator_->FinishCode();
    1110          45 :     generator_->safepoints()->Emit(generator_->tasm(),
    1111          90 :                                    frame_.GetTotalFrameSlotCount());
    1112          90 :     return generator_->FinalizeCode().ToHandleChecked();
    1113             :   }
    1114             : 
    1115          30 :   Handle<Code> FinalizeForExecuting() {
    1116             :     // The test environment expects us to have performed moves on stack
    1117             :     // parameters. However, some of them are mapped to local spill slots. They
    1118             :     // should be moved back into stack parameters so their values are passed
    1119             :     // along to the `teardown` function.
    1120        3180 :     for (auto move : spill_slots_) {
    1121        3150 :       generator_->AssembleMove(&move.second, &move.first);
    1122             :     }
    1123             : 
    1124          60 :     InstructionSequence* sequence = generator_->code();
    1125             : 
    1126          30 :     sequence->StartBlock(RpoNumber::FromInt(0));
    1127             :     // The environment expects this code to tail-call to it's first parameter
    1128             :     // placed in `kReturnRegister0`.
    1129          30 :     sequence->AddInstruction(Instruction::New(zone_, kArchPrepareTailCall));
    1130             : 
    1131             :     // We use either zero or one slots.
    1132             :     int first_unused_stack_slot =
    1133             :         V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
    1134             :     int optional_padding_slot = first_unused_stack_slot;
    1135             :     InstructionOperand callee[] = {
    1136             :         AllocatedOperand(LocationOperand::REGISTER,
    1137             :                          MachineRepresentation::kTagged,
    1138             :                          kReturnRegister0.code()),
    1139             :         ImmediateOperand(ImmediateOperand::INLINE, -1),  // poison index.
    1140             :         ImmediateOperand(ImmediateOperand::INLINE, optional_padding_slot),
    1141          30 :         ImmediateOperand(ImmediateOperand::INLINE, first_unused_stack_slot)};
    1142             :     Instruction* tail_call =
    1143             :         Instruction::New(zone_, kArchTailCallCodeObject, 0, nullptr,
    1144          30 :                          arraysize(callee), callee, 0, nullptr);
    1145          30 :     sequence->AddInstruction(tail_call);
    1146          30 :     sequence->EndBlock(RpoNumber::FromInt(0));
    1147             : 
    1148             :     generator_->AssembleBlock(
    1149          30 :         sequence->InstructionBlockAt(RpoNumber::FromInt(0)));
    1150             : 
    1151          30 :     return Finalize();
    1152             :   }
    1153             : 
    1154             :  private:
    1155             :   Zone* zone_;
    1156             :   OptimizedCompilationInfo info_;
    1157             :   Linkage linkage_;
    1158             :   Frame frame_;
    1159             :   CodeGenerator* generator_;
    1160             :   // List of operands to be moved from stack parameters to spill slots.
    1161             :   std::vector<std::pair<LocationOperand, LocationOperand>> spill_slots_;
    1162             : };
    1163             : 
    1164             : // The following fuzz tests will assemble a lot of moves, wrap them in
    1165             : // executable native code and run them. In order to check that moves were
    1166             : // performed correctly, we need to setup an environment with an initial state
    1167             : // and get it back after the list of moves were performed.
    1168             : //
    1169             : // We have two components to do this: TestEnvironment and CodeGeneratorTester.
    1170             : //
    1171             : // The TestEnvironment is in charge of bringing up an environment consisting of
    1172             : // a set of registers, stack slots and constants, with initial values in
    1173             : // them. The CodeGeneratorTester is a wrapper around the CodeGenerator and its
    1174             : // only purpose is to generate code for a list of moves. The TestEnvironment is
    1175             : // then able to run this code against the environment and return a resulting
    1176             : // state.
    1177             : //
    1178             : // A "state" here is a packed FixedArray with tagged values which can either be
    1179             : // Smis or HeapNumbers. When calling TestEnvironment::Run(...), registers and
    1180             : // stack slots will be initialised according to this FixedArray. A new
    1181             : // FixedArray is returned containing values that were moved by the generated
    1182             : // code.
    1183             : //
    1184             : // And finally, we are able to compare the resulting FixedArray against a
    1185             : // reference, computed with a simulation of AssembleMove and AssembleSwap. See
    1186             : // SimulateMoves and SimulateSwaps.
    1187             : 
    1188             : // Allocate space between slots to increase coverage of moves with larger
    1189             : // ranges. Note that this affects how much stack is allocated when running the
    1190             : // generated code. It means we have to be careful not to exceed the stack limit,
    1191             : // which is lower on Windows.
    1192             : #ifdef V8_OS_WIN
    1193             : constexpr int kExtraSpace = 0;
    1194             : #else
    1195             : constexpr int kExtraSpace = 1 * KB;
    1196             : #endif
    1197             : 
    1198       28342 : TEST(FuzzAssembleMove) {
    1199           5 :   TestEnvironment env;
    1200             : 
    1201           5 :   Handle<FixedArray> state_in = env.GenerateInitialState();
    1202           5 :   ParallelMove* moves = env.GenerateRandomMoves(1000);
    1203             : 
    1204           5 :   Handle<FixedArray> expected = env.SimulateMoves(moves, state_in);
    1205             : 
    1206             :   // Test small and potentially large ranges separately.
    1207          15 :   for (int extra_space : {0, kExtraSpace}) {
    1208          10 :     CodeGeneratorTester c(&env, extra_space);
    1209             : 
    1210       10020 :     for (auto m : *moves) {
    1211       10000 :       c.CheckAssembleMove(&m->source(), &m->destination());
    1212             :     }
    1213             : 
    1214          10 :     Handle<Code> test = c.FinalizeForExecuting();
    1215             :     if (FLAG_print_code) {
    1216             :       test->Print();
    1217             :     }
    1218             : 
    1219          10 :     Handle<FixedArray> actual = env.Run(test, state_in);
    1220          10 :     env.CheckState(actual, expected);
    1221          15 :   }
    1222           5 : }
    1223             : 
    1224       28342 : TEST(FuzzAssembleSwap) {
    1225           5 :   TestEnvironment env;
    1226             : 
    1227           5 :   Handle<FixedArray> state_in = env.GenerateInitialState();
    1228           5 :   ParallelMove* swaps = env.GenerateRandomSwaps(1000);
    1229             : 
    1230           5 :   Handle<FixedArray> expected = env.SimulateSwaps(swaps, state_in);
    1231             : 
    1232             :   // Test small and potentially large ranges separately.
    1233          15 :   for (int extra_space : {0, kExtraSpace}) {
    1234          10 :     CodeGeneratorTester c(&env, extra_space);
    1235             : 
    1236       10020 :     for (auto s : *swaps) {
    1237       10000 :       c.CheckAssembleSwap(&s->source(), &s->destination());
    1238             :     }
    1239             : 
    1240          10 :     Handle<Code> test = c.FinalizeForExecuting();
    1241             :     if (FLAG_print_code) {
    1242             :       test->Print();
    1243             :     }
    1244             : 
    1245          10 :     Handle<FixedArray> actual = env.Run(test, state_in);
    1246          10 :     env.CheckState(actual, expected);
    1247          15 :   }
    1248           5 : }
    1249             : 
    1250       28342 : TEST(FuzzAssembleMoveAndSwap) {
    1251           5 :   TestEnvironment env;
    1252             : 
    1253           5 :   Handle<FixedArray> state_in = env.GenerateInitialState();
    1254             :   Handle<FixedArray> expected =
    1255           5 :       env.main_isolate()->factory()->NewFixedArray(state_in->length());
    1256             : 
    1257             :   // Test small and potentially large ranges separately.
    1258          15 :   for (int extra_space : {0, kExtraSpace}) {
    1259          10 :     CodeGeneratorTester c(&env, extra_space);
    1260             : 
    1261          10 :     state_in->CopyTo(0, *expected, 0, state_in->length());
    1262             : 
    1263       10010 :     for (int i = 0; i < 1000; i++) {
    1264             :       // Randomly alternate between swaps and moves.
    1265       10000 :       if (env.rng()->NextInt(2) == 0) {
    1266        4955 :         ParallelMove* move = env.GenerateRandomMoves(1);
    1267        4955 :         expected = env.SimulateMoves(move, expected);
    1268        4955 :         c.CheckAssembleMove(&move->at(0)->source(),
    1269        9910 :                             &move->at(0)->destination());
    1270             :       } else {
    1271        5045 :         ParallelMove* swap = env.GenerateRandomSwaps(1);
    1272        5045 :         expected = env.SimulateSwaps(swap, expected);
    1273        5045 :         c.CheckAssembleSwap(&swap->at(0)->source(),
    1274       10090 :                             &swap->at(0)->destination());
    1275             :       }
    1276             :     }
    1277             : 
    1278          10 :     Handle<Code> test = c.FinalizeForExecuting();
    1279             :     if (FLAG_print_code) {
    1280             :       test->Print();
    1281             :     }
    1282             : 
    1283          10 :     Handle<FixedArray> actual = env.Run(test, state_in);
    1284          10 :     env.CheckState(actual, expected);
    1285          15 :   }
    1286           5 : }
    1287             : 
    1288       28342 : TEST(AssembleTailCallGap) {
    1289          15 :   const RegisterConfiguration* conf = GetRegConfig();
    1290           5 :   TestEnvironment env;
    1291             : 
    1292             :   // This test assumes at least 4 registers are allocatable.
    1293           5 :   CHECK_LE(4, conf->num_allocatable_general_registers());
    1294             : 
    1295             :   auto r0 = AllocatedOperand(LocationOperand::REGISTER,
    1296             :                              MachineRepresentation::kTagged,
    1297             :                              conf->GetAllocatableGeneralCode(0));
    1298             :   auto r1 = AllocatedOperand(LocationOperand::REGISTER,
    1299             :                              MachineRepresentation::kTagged,
    1300             :                              conf->GetAllocatableGeneralCode(1));
    1301             :   auto r2 = AllocatedOperand(LocationOperand::REGISTER,
    1302             :                              MachineRepresentation::kTagged,
    1303             :                              conf->GetAllocatableGeneralCode(2));
    1304             :   auto r3 = AllocatedOperand(LocationOperand::REGISTER,
    1305             :                              MachineRepresentation::kTagged,
    1306             :                              conf->GetAllocatableGeneralCode(3));
    1307             : 
    1308             :   auto slot_minus_4 = AllocatedOperand(LocationOperand::STACK_SLOT,
    1309             :                                        MachineRepresentation::kTagged, -4);
    1310             :   auto slot_minus_3 = AllocatedOperand(LocationOperand::STACK_SLOT,
    1311             :                                        MachineRepresentation::kTagged, -3);
    1312             :   auto slot_minus_2 = AllocatedOperand(LocationOperand::STACK_SLOT,
    1313             :                                        MachineRepresentation::kTagged, -2);
    1314             :   auto slot_minus_1 = AllocatedOperand(LocationOperand::STACK_SLOT,
    1315             :                                        MachineRepresentation::kTagged, -1);
    1316             : 
    1317             :   // Avoid slot 0 for architectures which use it store the return address.
    1318             :   int first_slot = V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
    1319             :   auto slot_0 = AllocatedOperand(LocationOperand::STACK_SLOT,
    1320             :                                  MachineRepresentation::kTagged, first_slot);
    1321             :   auto slot_1 =
    1322             :       AllocatedOperand(LocationOperand::STACK_SLOT,
    1323             :                        MachineRepresentation::kTagged, first_slot + 1);
    1324             :   auto slot_2 =
    1325             :       AllocatedOperand(LocationOperand::STACK_SLOT,
    1326             :                        MachineRepresentation::kTagged, first_slot + 2);
    1327             :   auto slot_3 =
    1328             :       AllocatedOperand(LocationOperand::STACK_SLOT,
    1329             :                        MachineRepresentation::kTagged, first_slot + 3);
    1330             : 
    1331             :   // These tests all generate series of moves that the code generator should
    1332             :   // detect as adjacent pushes. Depending on the architecture, we make sure
    1333             :   // these moves get eliminated.
    1334             :   // Also, disassembling with `--print-code` is useful when debugging.
    1335             : 
    1336             :   {
    1337             :     // Generate a series of register pushes only.
    1338           5 :     CodeGeneratorTester c(&env);
    1339           5 :     Instruction* instr = c.CreateTailCall(first_slot + 4);
    1340             :     instr
    1341             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1342             :                                   env.main_zone())
    1343           5 :         ->AddMove(r3, slot_0);
    1344             :     instr
    1345             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1346             :                                   env.main_zone())
    1347           5 :         ->AddMove(r2, slot_1);
    1348             :     instr
    1349             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1350             :                                   env.main_zone())
    1351           5 :         ->AddMove(r1, slot_2);
    1352             :     instr
    1353             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1354             :                                   env.main_zone())
    1355           5 :         ->AddMove(r0, slot_3);
    1356             : 
    1357             :     c.CheckAssembleTailCallGaps(instr, first_slot + 4,
    1358           5 :                                 CodeGeneratorTester::kRegisterPush);
    1359           5 :     Handle<Code> code = c.Finalize();
    1360             :     if (FLAG_print_code) {
    1361             :       code->Print();
    1362           5 :     }
    1363             :   }
    1364             : 
    1365             :   {
    1366             :     // Generate a series of stack pushes only.
    1367           5 :     CodeGeneratorTester c(&env);
    1368           5 :     Instruction* instr = c.CreateTailCall(first_slot + 4);
    1369             :     instr
    1370             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1371             :                                   env.main_zone())
    1372           5 :         ->AddMove(slot_minus_4, slot_0);
    1373             :     instr
    1374             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1375             :                                   env.main_zone())
    1376           5 :         ->AddMove(slot_minus_3, slot_1);
    1377             :     instr
    1378             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1379             :                                   env.main_zone())
    1380           5 :         ->AddMove(slot_minus_2, slot_2);
    1381             :     instr
    1382             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1383             :                                   env.main_zone())
    1384           5 :         ->AddMove(slot_minus_1, slot_3);
    1385             : 
    1386             :     c.CheckAssembleTailCallGaps(instr, first_slot + 4,
    1387           5 :                                 CodeGeneratorTester::kStackSlotPush);
    1388           5 :     Handle<Code> code = c.Finalize();
    1389             :     if (FLAG_print_code) {
    1390             :       code->Print();
    1391           5 :     }
    1392             :   }
    1393             : 
    1394             :   {
    1395             :     // Generate a mix of stack and register pushes.
    1396           5 :     CodeGeneratorTester c(&env);
    1397           5 :     Instruction* instr = c.CreateTailCall(first_slot + 4);
    1398             :     instr
    1399             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1400             :                                   env.main_zone())
    1401           5 :         ->AddMove(slot_minus_2, slot_0);
    1402             :     instr
    1403             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1404             :                                   env.main_zone())
    1405           5 :         ->AddMove(r1, slot_1);
    1406             :     instr
    1407             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1408             :                                   env.main_zone())
    1409           5 :         ->AddMove(slot_minus_1, slot_2);
    1410             :     instr
    1411             :         ->GetOrCreateParallelMove(Instruction::FIRST_GAP_POSITION,
    1412             :                                   env.main_zone())
    1413           5 :         ->AddMove(r0, slot_3);
    1414             : 
    1415             :     c.CheckAssembleTailCallGaps(instr, first_slot + 4,
    1416           5 :                                 CodeGeneratorTester::kScalarPush);
    1417           5 :     Handle<Code> code = c.Finalize();
    1418             :     if (FLAG_print_code) {
    1419             :       code->Print();
    1420           5 :     }
    1421           5 :   }
    1422           5 : }
    1423             : 
    1424             : }  // namespace compiler
    1425             : }  // namespace internal
    1426       85011 : }  // namespace v8

Generated by: LCOV version 1.10