/src/WasmEdge/lib/llvm/compiler.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | // SPDX-License-Identifier: Apache-2.0 |
2 | | // SPDX-FileCopyrightText: 2019-2024 Second State INC |
3 | | |
4 | | #include "llvm/compiler.h" |
5 | | |
6 | | #include "aot/version.h" |
7 | | #include "common/defines.h" |
8 | | #include "common/filesystem.h" |
9 | | #include "common/spdlog.h" |
10 | | #include "data.h" |
11 | | #include "llvm.h" |
12 | | #include "system/allocator.h" |
13 | | |
14 | | #include <algorithm> |
15 | | #include <array> |
16 | | #include <cinttypes> |
17 | | #include <cstdint> |
18 | | #include <cstdlib> |
19 | | #include <limits> |
20 | | #include <memory> |
21 | | #include <numeric> |
22 | | #include <string> |
23 | | #include <string_view> |
24 | | #include <system_error> |
25 | | |
26 | | namespace LLVM = WasmEdge::LLVM; |
27 | | using namespace std::literals; |
28 | | |
29 | | namespace { |
30 | | |
31 | | static bool |
32 | | isVoidReturn(WasmEdge::Span<const WasmEdge::ValType> ValTypes) noexcept; |
33 | | static LLVM::Type toLLVMType(LLVM::Context LLContext, |
34 | | const WasmEdge::ValType &ValType) noexcept; |
35 | | static std::vector<LLVM::Type> |
36 | | toLLVMArgsType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
37 | | WasmEdge::Span<const WasmEdge::ValType> ValTypes) noexcept; |
38 | | static LLVM::Type |
39 | | toLLVMRetsType(LLVM::Context LLContext, |
40 | | WasmEdge::Span<const WasmEdge::ValType> ValTypes) noexcept; |
41 | | static LLVM::Type |
42 | | toLLVMType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
43 | | const WasmEdge::AST::FunctionType &FuncType) noexcept; |
44 | | static LLVM::Value |
45 | | toLLVMConstantZero(LLVM::Context LLContext, |
46 | | const WasmEdge::ValType &ValType) noexcept; |
47 | | static std::vector<LLVM::Value> unpackStruct(LLVM::Builder &Builder, |
48 | | LLVM::Value Struct) noexcept; |
49 | | class FunctionCompiler; |
50 | | |
51 | | // XXX: Misalignment handler not implemented yet, forcing unalignment |
52 | | // force unalignment load/store |
53 | | static inline constexpr const bool kForceUnalignment = true; |
54 | | |
55 | | // force checking div/rem on zero |
56 | | static inline constexpr const bool kForceDivCheck = true; |
57 | | |
58 | | // Size of a ValVariant |
59 | | static inline constexpr const uint32_t kValSize = sizeof(WasmEdge::ValVariant); |
60 | | |
61 | | // Translate Compiler::OptimizationLevel to llvm::PassBuilder version |
62 | | #if LLVM_VERSION_MAJOR >= 13 |
63 | | static inline const char * |
64 | | toLLVMLevel(WasmEdge::CompilerConfigure::OptimizationLevel Level) noexcept { |
65 | | using OL = WasmEdge::CompilerConfigure::OptimizationLevel; |
66 | | switch (Level) { |
67 | | case OL::O0: |
68 | | return "default<O0>,function(tailcallelim)"; |
69 | | case OL::O1: |
70 | | return "default<O1>,function(tailcallelim)"; |
71 | | case OL::O2: |
72 | | return "default<O2>"; |
73 | | case OL::O3: |
74 | | return "default<O3>"; |
75 | | case OL::Os: |
76 | | return "default<Os>"; |
77 | | case OL::Oz: |
78 | | return "default<Oz>"; |
79 | | default: |
80 | | assumingUnreachable(); |
81 | | } |
82 | | } |
83 | | #else |
84 | | static inline std::pair<unsigned int, unsigned int> |
85 | 2.15k | toLLVMLevel(WasmEdge::CompilerConfigure::OptimizationLevel Level) noexcept { |
86 | 2.15k | using OL = WasmEdge::CompilerConfigure::OptimizationLevel; |
87 | 2.15k | switch (Level) { |
88 | 0 | case OL::O0: |
89 | 0 | return {0, 0}; |
90 | 0 | case OL::O1: |
91 | 0 | return {1, 0}; |
92 | 0 | case OL::O2: |
93 | 0 | return {2, 0}; |
94 | 2.15k | case OL::O3: |
95 | 2.15k | return {3, 0}; |
96 | 0 | case OL::Os: |
97 | 0 | return {2, 1}; |
98 | 0 | case OL::Oz: |
99 | 0 | return {2, 2}; |
100 | 0 | default: |
101 | 0 | assumingUnreachable(); |
102 | 2.15k | } |
103 | 2.15k | } |
104 | | #endif |
105 | | |
106 | | static inline LLVMCodeGenOptLevel toLLVMCodeGenLevel( |
107 | 2.15k | WasmEdge::CompilerConfigure::OptimizationLevel Level) noexcept { |
108 | 2.15k | using OL = WasmEdge::CompilerConfigure::OptimizationLevel; |
109 | 2.15k | switch (Level) { |
110 | 0 | case OL::O0: |
111 | 0 | return LLVMCodeGenLevelNone; |
112 | 0 | case OL::O1: |
113 | 0 | return LLVMCodeGenLevelLess; |
114 | 0 | case OL::O2: |
115 | 0 | return LLVMCodeGenLevelDefault; |
116 | 2.15k | case OL::O3: |
117 | 2.15k | return LLVMCodeGenLevelAggressive; |
118 | 0 | case OL::Os: |
119 | 0 | return LLVMCodeGenLevelDefault; |
120 | 0 | case OL::Oz: |
121 | 0 | return LLVMCodeGenLevelDefault; |
122 | 0 | default: |
123 | 0 | assumingUnreachable(); |
124 | 2.15k | } |
125 | 2.15k | } |
126 | | } // namespace |
127 | | |
128 | | struct LLVM::Compiler::CompileContext { |
129 | | LLVM::Context LLContext; |
130 | | LLVM::Module &LLModule; |
131 | | LLVM::Attribute Cold; |
132 | | LLVM::Attribute NoAlias; |
133 | | LLVM::Attribute NoInline; |
134 | | LLVM::Attribute NoReturn; |
135 | | LLVM::Attribute ReadOnly; |
136 | | LLVM::Attribute StrictFP; |
137 | | LLVM::Attribute UWTable; |
138 | | LLVM::Attribute NoStackArgProbe; |
139 | | LLVM::Type VoidTy; |
140 | | LLVM::Type Int8Ty; |
141 | | LLVM::Type Int16Ty; |
142 | | LLVM::Type Int32Ty; |
143 | | LLVM::Type Int64Ty; |
144 | | LLVM::Type Int128Ty; |
145 | | LLVM::Type FloatTy; |
146 | | LLVM::Type DoubleTy; |
147 | | LLVM::Type Int8x16Ty; |
148 | | LLVM::Type Int16x8Ty; |
149 | | LLVM::Type Int32x4Ty; |
150 | | LLVM::Type Floatx4Ty; |
151 | | LLVM::Type Int64x2Ty; |
152 | | LLVM::Type Doublex2Ty; |
153 | | LLVM::Type Int128x1Ty; |
154 | | LLVM::Type Int8PtrTy; |
155 | | LLVM::Type Int32PtrTy; |
156 | | LLVM::Type Int64PtrTy; |
157 | | LLVM::Type Int128PtrTy; |
158 | | LLVM::Type Int8PtrPtrTy; |
159 | | LLVM::Type ExecCtxTy; |
160 | | LLVM::Type ExecCtxPtrTy; |
161 | | LLVM::Type IntrinsicsTableTy; |
162 | | LLVM::Type IntrinsicsTablePtrTy; |
163 | | LLVM::Message SubtargetFeatures; |
164 | | |
165 | | #if defined(__x86_64__) |
166 | | #if defined(__XOP__) |
167 | | bool SupportXOP = true; |
168 | | #else |
169 | | bool SupportXOP = false; |
170 | | #endif |
171 | | |
172 | | #if defined(__SSE4_1__) |
173 | | bool SupportSSE4_1 = true; |
174 | | #else |
175 | | bool SupportSSE4_1 = false; |
176 | | #endif |
177 | | |
178 | | #if defined(__SSSE3__) |
179 | | bool SupportSSSE3 = true; |
180 | | #else |
181 | | bool SupportSSSE3 = false; |
182 | | #endif |
183 | | |
184 | | #if defined(__SSE2__) |
185 | | bool SupportSSE2 = true; |
186 | | #else |
187 | | bool SupportSSE2 = false; |
188 | | #endif |
189 | | #endif |
190 | | |
191 | | #if defined(__aarch64__) |
192 | | #if defined(__ARM_NEON__) || defined(__ARM_NEON) || defined(__ARM_NEON_FP) |
193 | | bool SupportNEON = true; |
194 | | #else |
195 | | bool SupportNEON = false; |
196 | | #endif |
197 | | #endif |
198 | | |
199 | | std::vector<const AST::CompositeType *> CompositeTypes; |
200 | | std::vector<LLVM::Value> FunctionWrappers; |
201 | | std::vector<std::tuple<uint32_t, LLVM::FunctionCallee, |
202 | | const WasmEdge::AST::CodeSegment *>> |
203 | | Functions; |
204 | | std::vector<LLVM::Type> Globals; |
205 | | LLVM::Value IntrinsicsTable; |
206 | | LLVM::FunctionCallee Trap; |
207 | | CompileContext(LLVM::Context C, LLVM::Module &M, |
208 | | bool IsGenericBinary) noexcept |
209 | 2.15k | : LLContext(C), LLModule(M), |
210 | 2.15k | Cold(LLVM::Attribute::createEnum(C, LLVM::Core::Cold, 0)), |
211 | 2.15k | NoAlias(LLVM::Attribute::createEnum(C, LLVM::Core::NoAlias, 0)), |
212 | 2.15k | NoInline(LLVM::Attribute::createEnum(C, LLVM::Core::NoInline, 0)), |
213 | 2.15k | NoReturn(LLVM::Attribute::createEnum(C, LLVM::Core::NoReturn, 0)), |
214 | 2.15k | ReadOnly(LLVM::Attribute::createEnum(C, LLVM::Core::ReadOnly, 0)), |
215 | 2.15k | StrictFP(LLVM::Attribute::createEnum(C, LLVM::Core::StrictFP, 0)), |
216 | 2.15k | UWTable(LLVM::Attribute::createEnum(C, LLVM::Core::UWTable, |
217 | 2.15k | LLVM::Core::UWTableDefault)), |
218 | | NoStackArgProbe( |
219 | 2.15k | LLVM::Attribute::createString(C, "no-stack-arg-probe"sv, {})), |
220 | 2.15k | VoidTy(LLContext.getVoidTy()), Int8Ty(LLContext.getInt8Ty()), |
221 | 2.15k | Int16Ty(LLContext.getInt16Ty()), Int32Ty(LLContext.getInt32Ty()), |
222 | 2.15k | Int64Ty(LLContext.getInt64Ty()), Int128Ty(LLContext.getInt128Ty()), |
223 | 2.15k | FloatTy(LLContext.getFloatTy()), DoubleTy(LLContext.getDoubleTy()), |
224 | 2.15k | Int8x16Ty(LLVM::Type::getVectorType(Int8Ty, 16)), |
225 | 2.15k | Int16x8Ty(LLVM::Type::getVectorType(Int16Ty, 8)), |
226 | 2.15k | Int32x4Ty(LLVM::Type::getVectorType(Int32Ty, 4)), |
227 | 2.15k | Floatx4Ty(LLVM::Type::getVectorType(FloatTy, 4)), |
228 | 2.15k | Int64x2Ty(LLVM::Type::getVectorType(Int64Ty, 2)), |
229 | 2.15k | Doublex2Ty(LLVM::Type::getVectorType(DoubleTy, 2)), |
230 | 2.15k | Int128x1Ty(LLVM::Type::getVectorType(Int128Ty, 1)), |
231 | 2.15k | Int8PtrTy(Int8Ty.getPointerTo()), Int32PtrTy(Int32Ty.getPointerTo()), |
232 | 2.15k | Int64PtrTy(Int64Ty.getPointerTo()), |
233 | 2.15k | Int128PtrTy(Int128Ty.getPointerTo()), |
234 | 2.15k | Int8PtrPtrTy(Int8PtrTy.getPointerTo()), |
235 | 2.15k | ExecCtxTy(LLVM::Type::getStructType( |
236 | 2.15k | "ExecCtx", |
237 | 2.15k | std::initializer_list<LLVM::Type>{ |
238 | | // Memory |
239 | 2.15k | Int8PtrTy.getPointerTo(), |
240 | | // Globals |
241 | 2.15k | Int128PtrTy.getPointerTo(), |
242 | | // InstrCount |
243 | 2.15k | Int64PtrTy, |
244 | | // CostTable |
245 | 2.15k | LLVM::Type::getArrayType(Int64Ty, UINT16_MAX + 1) |
246 | 2.15k | .getPointerTo(), |
247 | | // Gas |
248 | 2.15k | Int64PtrTy, |
249 | | // GasLimit |
250 | 2.15k | Int64Ty, |
251 | | // StopToken |
252 | 2.15k | Int32PtrTy, |
253 | 2.15k | })), |
254 | 2.15k | ExecCtxPtrTy(ExecCtxTy.getPointerTo()), |
255 | 2.15k | IntrinsicsTableTy(LLVM::Type::getArrayType( |
256 | 2.15k | Int8PtrTy, |
257 | 2.15k | static_cast<uint32_t>(Executable::Intrinsics::kIntrinsicMax))), |
258 | 2.15k | IntrinsicsTablePtrTy(IntrinsicsTableTy.getPointerTo()), |
259 | 2.15k | IntrinsicsTable(LLModule.addGlobal(IntrinsicsTablePtrTy, true, |
260 | 2.15k | LLVMExternalLinkage, LLVM::Value(), |
261 | 2.15k | "intrinsics")) { |
262 | 2.15k | Trap.Ty = LLVM::Type::getFunctionType(VoidTy, {Int32Ty}); |
263 | 2.15k | Trap.Fn = LLModule.addFunction(Trap.Ty, LLVMPrivateLinkage, "trap"); |
264 | 2.15k | Trap.Fn.setDSOLocal(true); |
265 | 2.15k | Trap.Fn.addFnAttr(NoStackArgProbe); |
266 | 2.15k | Trap.Fn.addFnAttr(StrictFP); |
267 | 2.15k | Trap.Fn.addFnAttr(UWTable); |
268 | 2.15k | Trap.Fn.addFnAttr(NoReturn); |
269 | 2.15k | Trap.Fn.addFnAttr(Cold); |
270 | 2.15k | Trap.Fn.addFnAttr(NoInline); |
271 | | |
272 | 2.15k | LLModule.addGlobal(Int32Ty, true, LLVMExternalLinkage, |
273 | 2.15k | LLVM::Value::getConstInt(Int32Ty, AOT::kBinaryVersion), |
274 | 2.15k | "version"); |
275 | | |
276 | 2.15k | if (!IsGenericBinary) { |
277 | 2.15k | SubtargetFeatures = LLVM::getHostCPUFeatures(); |
278 | 2.15k | auto Features = SubtargetFeatures.string_view(); |
279 | 187k | while (!Features.empty()) { |
280 | 185k | std::string_view Feature; |
281 | 185k | if (auto Pos = Features.find(','); Pos != std::string_view::npos) { |
282 | 183k | Feature = Features.substr(0, Pos); |
283 | 183k | Features = Features.substr(Pos + 1); |
284 | 183k | } else { |
285 | 2.15k | Feature = std::exchange(Features, std::string_view()); |
286 | 2.15k | } |
287 | 185k | if (Feature[0] != '+') { |
288 | 103k | continue; |
289 | 103k | } |
290 | 82.0k | Feature = Feature.substr(1); |
291 | | |
292 | 82.0k | #if defined(__x86_64__) |
293 | 82.0k | if (!SupportXOP && Feature == "xop"sv) { |
294 | 0 | SupportXOP = true; |
295 | 0 | } |
296 | 82.0k | if (!SupportSSE4_1 && Feature == "sse4.1"sv) { |
297 | 2.15k | SupportSSE4_1 = true; |
298 | 2.15k | } |
299 | 82.0k | if (!SupportSSSE3 && Feature == "ssse3"sv) { |
300 | 2.15k | SupportSSSE3 = true; |
301 | 2.15k | } |
302 | 82.0k | if (!SupportSSE2 && Feature == "sse2"sv) { |
303 | 0 | SupportSSE2 = true; |
304 | 0 | } |
305 | | #elif defined(__aarch64__) |
306 | | if (!SupportNEON && Feature == "neon"sv) { |
307 | | SupportNEON = true; |
308 | | } |
309 | | #endif |
310 | 82.0k | } |
311 | 2.15k | } |
312 | | |
313 | 2.15k | { |
314 | | // create trap |
315 | 2.15k | LLVM::Builder Builder(LLContext); |
316 | 2.15k | Builder.positionAtEnd( |
317 | 2.15k | LLVM::BasicBlock::create(LLContext, Trap.Fn, "entry")); |
318 | 2.15k | auto FnTy = LLVM::Type::getFunctionType(VoidTy, {Int32Ty}); |
319 | 2.15k | auto CallTrap = Builder.createCall( |
320 | 2.15k | getIntrinsic(Builder, Executable::Intrinsics::kTrap, FnTy), |
321 | 2.15k | {Trap.Fn.getFirstParam()}); |
322 | 2.15k | CallTrap.addCallSiteAttribute(NoReturn); |
323 | 2.15k | Builder.createUnreachable(); |
324 | 2.15k | } |
325 | 2.15k | } |
326 | | LLVM::Value getMemory(LLVM::Builder &Builder, LLVM::Value ExecCtx, |
327 | 21.5k | uint32_t Index) noexcept { |
328 | 21.5k | auto Array = Builder.createExtractValue(ExecCtx, 0); |
329 | | #if WASMEDGE_ALLOCATOR_IS_STABLE |
330 | | auto VPtr = Builder.createLoad( |
331 | | Int8PtrTy, Builder.createInBoundsGEP1(Int8PtrTy, Array, |
332 | | LLContext.getInt64(Index))); |
333 | | VPtr.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
334 | | LLVM::Metadata(LLContext, {})); |
335 | | #else |
336 | 21.5k | auto VPtrPtr = Builder.createLoad( |
337 | 21.5k | Int8PtrPtrTy, Builder.createInBoundsGEP1(Int8PtrPtrTy, Array, |
338 | 21.5k | LLContext.getInt64(Index))); |
339 | 21.5k | VPtrPtr.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
340 | 21.5k | LLVM::Metadata(LLContext, {})); |
341 | 21.5k | auto VPtr = Builder.createLoad( |
342 | 21.5k | Int8PtrTy, |
343 | 21.5k | Builder.createInBoundsGEP1(Int8PtrTy, VPtrPtr, LLContext.getInt64(0))); |
344 | 21.5k | #endif |
345 | 21.5k | return Builder.createBitCast(VPtr, Int8PtrTy); |
346 | 21.5k | } |
347 | | std::pair<LLVM::Type, LLVM::Value> getGlobal(LLVM::Builder &Builder, |
348 | | LLVM::Value ExecCtx, |
349 | 388 | uint32_t Index) noexcept { |
350 | 388 | auto Ty = Globals[Index]; |
351 | 388 | auto Array = Builder.createExtractValue(ExecCtx, 1); |
352 | 388 | auto VPtr = Builder.createLoad( |
353 | 388 | Int128PtrTy, Builder.createInBoundsGEP1(Int8PtrTy, Array, |
354 | 388 | LLContext.getInt64(Index))); |
355 | 388 | VPtr.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
356 | 388 | LLVM::Metadata(LLContext, {})); |
357 | 388 | auto Ptr = Builder.createBitCast(VPtr, Ty.getPointerTo()); |
358 | 388 | return {Ty, Ptr}; |
359 | 388 | } |
360 | | LLVM::Value getInstrCount(LLVM::Builder &Builder, |
361 | 0 | LLVM::Value ExecCtx) noexcept { |
362 | 0 | return Builder.createExtractValue(ExecCtx, 2); |
363 | 0 | } |
364 | | LLVM::Value getCostTable(LLVM::Builder &Builder, |
365 | 0 | LLVM::Value ExecCtx) noexcept { |
366 | 0 | return Builder.createExtractValue(ExecCtx, 3); |
367 | 0 | } |
368 | 0 | LLVM::Value getGas(LLVM::Builder &Builder, LLVM::Value ExecCtx) noexcept { |
369 | 0 | return Builder.createExtractValue(ExecCtx, 4); |
370 | 0 | } |
371 | | LLVM::Value getGasLimit(LLVM::Builder &Builder, |
372 | 0 | LLVM::Value ExecCtx) noexcept { |
373 | 0 | return Builder.createExtractValue(ExecCtx, 5); |
374 | 0 | } |
375 | | LLVM::Value getStopToken(LLVM::Builder &Builder, |
376 | 0 | LLVM::Value ExecCtx) noexcept { |
377 | 0 | return Builder.createExtractValue(ExecCtx, 6); |
378 | 0 | } |
379 | | LLVM::FunctionCallee getIntrinsic(LLVM::Builder &Builder, |
380 | | Executable::Intrinsics Index, |
381 | 5.93k | LLVM::Type Ty) noexcept { |
382 | 5.93k | const auto Value = static_cast<uint32_t>(Index); |
383 | 5.93k | auto PtrTy = Ty.getPointerTo(); |
384 | 5.93k | auto PtrPtrTy = PtrTy.getPointerTo(); |
385 | 5.93k | auto IT = Builder.createLoad(IntrinsicsTablePtrTy, IntrinsicsTable); |
386 | 5.93k | IT.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
387 | 5.93k | LLVM::Metadata(LLContext, {})); |
388 | 5.93k | auto VPtr = |
389 | 5.93k | Builder.createInBoundsGEP2(IntrinsicsTableTy, IT, LLContext.getInt64(0), |
390 | 5.93k | LLContext.getInt64(Value)); |
391 | 5.93k | auto Ptr = Builder.createBitCast(VPtr, PtrPtrTy); |
392 | 5.93k | return {Ty, Builder.createLoad(PtrTy, Ptr)}; |
393 | 5.93k | } |
394 | | std::pair<std::vector<ValType>, std::vector<ValType>> |
395 | 18.6k | resolveBlockType(const BlockType &BType) const noexcept { |
396 | 18.6k | using VecT = std::vector<ValType>; |
397 | 18.6k | using RetT = std::pair<VecT, VecT>; |
398 | 18.6k | if (BType.isEmpty()) { |
399 | 2.38k | return RetT{}; |
400 | 2.38k | } |
401 | 16.2k | if (BType.isValType()) { |
402 | 2.72k | return RetT{{}, {BType.getValType()}}; |
403 | 13.5k | } else { |
404 | | // Type index case. t2* = type[index].returns |
405 | 13.5k | const uint32_t TypeIdx = BType.getTypeIndex(); |
406 | 13.5k | const auto &FType = CompositeTypes[TypeIdx]->getFuncType(); |
407 | 13.5k | return RetT{ |
408 | 13.5k | VecT(FType.getParamTypes().begin(), FType.getParamTypes().end()), |
409 | 13.5k | VecT(FType.getReturnTypes().begin(), FType.getReturnTypes().end())}; |
410 | 13.5k | } |
411 | 16.2k | } |
412 | | }; |
413 | | |
414 | | namespace { |
415 | | |
416 | | using namespace WasmEdge; |
417 | | |
418 | 34.2k | static bool isVoidReturn(Span<const ValType> ValTypes) noexcept { |
419 | 34.2k | return ValTypes.empty(); |
420 | 34.2k | } |
421 | | |
422 | | static LLVM::Type toLLVMType(LLVM::Context LLContext, |
423 | 2.49M | const ValType &ValType) noexcept { |
424 | 2.49M | switch (ValType.getCode()) { |
425 | 275k | case TypeCode::I32: |
426 | 275k | return LLContext.getInt32Ty(); |
427 | 206k | case TypeCode::I64: |
428 | 206k | return LLContext.getInt64Ty(); |
429 | 0 | case TypeCode::Ref: |
430 | 36.6k | case TypeCode::RefNull: |
431 | 1.71M | case TypeCode::V128: |
432 | 1.71M | return LLVM::Type::getVectorType(LLContext.getInt64Ty(), 2); |
433 | 270k | case TypeCode::F32: |
434 | 270k | return LLContext.getFloatTy(); |
435 | 20.2k | case TypeCode::F64: |
436 | 20.2k | return LLContext.getDoubleTy(); |
437 | 0 | default: |
438 | 0 | assumingUnreachable(); |
439 | 2.49M | } |
440 | 2.49M | } |
441 | | |
442 | | static std::vector<LLVM::Type> |
443 | | toLLVMTypeVector(LLVM::Context LLContext, |
444 | 19.0k | Span<const ValType> ValTypes) noexcept { |
445 | 19.0k | std::vector<LLVM::Type> Result; |
446 | 19.0k | Result.reserve(ValTypes.size()); |
447 | 19.0k | for (const auto &Type : ValTypes) { |
448 | 18.7k | Result.push_back(toLLVMType(LLContext, Type)); |
449 | 18.7k | } |
450 | 19.0k | return Result; |
451 | 19.0k | } |
452 | | |
453 | | static std::vector<LLVM::Type> |
454 | | toLLVMArgsType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
455 | 15.5k | Span<const ValType> ValTypes) noexcept { |
456 | 15.5k | auto Result = toLLVMTypeVector(LLContext, ValTypes); |
457 | 15.5k | Result.insert(Result.begin(), ExecCtxPtrTy); |
458 | 15.5k | return Result; |
459 | 15.5k | } |
460 | | |
461 | | static LLVM::Type toLLVMRetsType(LLVM::Context LLContext, |
462 | 15.5k | Span<const ValType> ValTypes) noexcept { |
463 | 15.5k | if (isVoidReturn(ValTypes)) { |
464 | 3.59k | return LLContext.getVoidTy(); |
465 | 3.59k | } |
466 | 11.9k | if (ValTypes.size() == 1) { |
467 | 11.3k | return toLLVMType(LLContext, ValTypes.front()); |
468 | 11.3k | } |
469 | 592 | std::vector<LLVM::Type> Result; |
470 | 592 | Result.reserve(ValTypes.size()); |
471 | 1.56k | for (const auto &Type : ValTypes) { |
472 | 1.56k | Result.push_back(toLLVMType(LLContext, Type)); |
473 | 1.56k | } |
474 | 592 | return LLVM::Type::getStructType(Result); |
475 | 11.9k | } |
476 | | |
477 | | static LLVM::Type toLLVMType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
478 | 15.5k | const AST::FunctionType &FuncType) noexcept { |
479 | 15.5k | auto ArgsTy = |
480 | 15.5k | toLLVMArgsType(LLContext, ExecCtxPtrTy, FuncType.getParamTypes()); |
481 | 15.5k | auto RetTy = toLLVMRetsType(LLContext, FuncType.getReturnTypes()); |
482 | 15.5k | return LLVM::Type::getFunctionType(RetTy, ArgsTy); |
483 | 15.5k | } |
484 | | |
485 | | static LLVM::Value toLLVMConstantZero(LLVM::Context LLContext, |
486 | 2.45M | const ValType &ValType) noexcept { |
487 | 2.45M | switch (ValType.getCode()) { |
488 | 259k | case TypeCode::I32: |
489 | 259k | return LLVM::Value::getConstNull(LLContext.getInt32Ty()); |
490 | 203k | case TypeCode::I64: |
491 | 203k | return LLVM::Value::getConstNull(LLContext.getInt64Ty()); |
492 | 0 | case TypeCode::Ref: |
493 | 36.1k | case TypeCode::RefNull: |
494 | 1.70M | case TypeCode::V128: |
495 | 1.70M | return LLVM::Value::getConstNull( |
496 | 1.70M | LLVM::Type::getVectorType(LLContext.getInt64Ty(), 2)); |
497 | 268k | case TypeCode::F32: |
498 | 268k | return LLVM::Value::getConstNull(LLContext.getFloatTy()); |
499 | 17.8k | case TypeCode::F64: |
500 | 17.8k | return LLVM::Value::getConstNull(LLContext.getDoubleTy()); |
501 | 0 | default: |
502 | 0 | assumingUnreachable(); |
503 | 2.45M | } |
504 | 2.45M | } |
505 | | |
506 | | class FunctionCompiler { |
507 | | struct Control; |
508 | | |
509 | | public: |
510 | | FunctionCompiler(LLVM::Compiler::CompileContext &Context, |
511 | | LLVM::FunctionCallee F, Span<const ValType> Locals, |
512 | | bool Interruptible, bool InstructionCounting, |
513 | | bool GasMeasuring) noexcept |
514 | 10.7k | : Context(Context), LLContext(Context.LLContext), |
515 | 10.7k | Interruptible(Interruptible), F(F), Builder(LLContext) { |
516 | 10.7k | if (F.Fn) { |
517 | 10.7k | Builder.positionAtEnd(LLVM::BasicBlock::create(LLContext, F.Fn, "entry")); |
518 | 10.7k | ExecCtx = Builder.createLoad(Context.ExecCtxTy, F.Fn.getFirstParam()); |
519 | | |
520 | 10.7k | if (InstructionCounting) { |
521 | 0 | LocalInstrCount = Builder.createAlloca(Context.Int64Ty); |
522 | 0 | Builder.createStore(LLContext.getInt64(0), LocalInstrCount); |
523 | 0 | } |
524 | | |
525 | 10.7k | if (GasMeasuring) { |
526 | 0 | LocalGas = Builder.createAlloca(Context.Int64Ty); |
527 | 0 | Builder.createStore(LLContext.getInt64(0), LocalGas); |
528 | 0 | } |
529 | | |
530 | 20.6k | for (LLVM::Value Arg = F.Fn.getFirstParam().getNextParam(); Arg; |
531 | 10.7k | Arg = Arg.getNextParam()) { |
532 | 9.87k | LLVM::Type Ty = Arg.getType(); |
533 | 9.87k | LLVM::Value ArgPtr = Builder.createAlloca(Ty); |
534 | 9.87k | Builder.createStore(Arg, ArgPtr); |
535 | 9.87k | Local.emplace_back(Ty, ArgPtr); |
536 | 9.87k | } |
537 | | |
538 | 2.45M | for (const auto &Type : Locals) { |
539 | 2.45M | LLVM::Type Ty = toLLVMType(LLContext, Type); |
540 | 2.45M | LLVM::Value ArgPtr = Builder.createAlloca(Ty); |
541 | 2.45M | Builder.createStore(toLLVMConstantZero(LLContext, Type), ArgPtr); |
542 | 2.45M | Local.emplace_back(Ty, ArgPtr); |
543 | 2.45M | } |
544 | 10.7k | } |
545 | 10.7k | } |
546 | | |
547 | 28.8k | LLVM::BasicBlock getTrapBB(ErrCode::Value Error) noexcept { |
548 | 28.8k | if (auto Iter = TrapBB.find(Error); Iter != TrapBB.end()) { |
549 | 25.9k | return Iter->second; |
550 | 25.9k | } |
551 | 2.92k | auto BB = LLVM::BasicBlock::create(LLContext, F.Fn, "trap"); |
552 | 2.92k | TrapBB.emplace(Error, BB); |
553 | 2.92k | return BB; |
554 | 28.8k | } |
555 | | |
556 | | void |
557 | | compile(const AST::CodeSegment &Code, |
558 | 10.7k | std::pair<std::vector<ValType>, std::vector<ValType>> Type) noexcept { |
559 | 10.7k | auto RetBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ret"); |
560 | 10.7k | Type.first.clear(); |
561 | 10.7k | enterBlock(RetBB, {}, {}, {}, std::move(Type)); |
562 | 10.7k | compile(Code.getExpr().getInstrs()); |
563 | 10.7k | assuming(ControlStack.empty()); |
564 | 10.7k | compileReturn(); |
565 | | |
566 | 10.7k | for (auto &[Error, BB] : TrapBB) { |
567 | 2.92k | Builder.positionAtEnd(BB); |
568 | 2.92k | updateInstrCount(); |
569 | 2.92k | updateGasAtTrap(); |
570 | 2.92k | auto CallTrap = Builder.createCall( |
571 | 2.92k | Context.Trap, {LLContext.getInt32(static_cast<uint32_t>(Error))}); |
572 | 2.92k | CallTrap.addCallSiteAttribute(Context.NoReturn); |
573 | 2.92k | Builder.createUnreachable(); |
574 | 2.92k | } |
575 | 10.7k | } |
576 | | |
577 | 10.7k | void compile(AST::InstrView Instrs) noexcept { |
578 | 1.39M | auto Dispatch = [this](const AST::Instruction &Instr) -> void { |
579 | 1.39M | switch (Instr.getOpCode()) { |
580 | | // Control instructions (for blocks) |
581 | 3.80k | case OpCode::Block: { |
582 | 3.80k | auto Block = LLVM::BasicBlock::create(LLContext, F.Fn, "block"); |
583 | 3.80k | auto EndBlock = LLVM::BasicBlock::create(LLContext, F.Fn, "block.end"); |
584 | 3.80k | Builder.createBr(Block); |
585 | | |
586 | 3.80k | Builder.positionAtEnd(Block); |
587 | 3.80k | auto Type = Context.resolveBlockType(Instr.getBlockType()); |
588 | 3.80k | const auto Arity = Type.first.size(); |
589 | 3.80k | std::vector<LLVM::Value> Args(Arity); |
590 | 3.80k | if (isUnreachable()) { |
591 | 847 | for (size_t I = 0; I < Arity; ++I) { |
592 | 264 | auto Ty = toLLVMType(LLContext, Type.first[I]); |
593 | 264 | Args[I] = LLVM::Value::getUndef(Ty); |
594 | 264 | } |
595 | 3.21k | } else { |
596 | 3.66k | for (size_t I = 0; I < Arity; ++I) { |
597 | 443 | const size_t J = Arity - 1 - I; |
598 | 443 | Args[J] = stackPop(); |
599 | 443 | } |
600 | 3.21k | } |
601 | 3.80k | enterBlock(EndBlock, {}, {}, std::move(Args), std::move(Type)); |
602 | 3.80k | checkStop(); |
603 | 3.80k | updateGas(); |
604 | 3.80k | return; |
605 | 0 | } |
606 | 1.61k | case OpCode::Loop: { |
607 | 1.61k | auto Curr = Builder.getInsertBlock(); |
608 | 1.61k | auto Loop = LLVM::BasicBlock::create(LLContext, F.Fn, "loop"); |
609 | 1.61k | auto EndLoop = LLVM::BasicBlock::create(LLContext, F.Fn, "loop.end"); |
610 | 1.61k | Builder.createBr(Loop); |
611 | | |
612 | 1.61k | Builder.positionAtEnd(Loop); |
613 | 1.61k | auto Type = Context.resolveBlockType(Instr.getBlockType()); |
614 | 1.61k | const auto Arity = Type.first.size(); |
615 | 1.61k | std::vector<LLVM::Value> Args(Arity); |
616 | 1.61k | if (isUnreachable()) { |
617 | 757 | for (size_t I = 0; I < Arity; ++I) { |
618 | 324 | auto Ty = toLLVMType(LLContext, Type.first[I]); |
619 | 324 | auto Value = LLVM::Value::getUndef(Ty); |
620 | 324 | auto PHINode = Builder.createPHI(Ty); |
621 | 324 | PHINode.addIncoming(Value, Curr); |
622 | 324 | Args[I] = PHINode; |
623 | 324 | } |
624 | 1.18k | } else { |
625 | 1.81k | for (size_t I = 0; I < Arity; ++I) { |
626 | 631 | const size_t J = Arity - 1 - I; |
627 | 631 | auto Value = stackPop(); |
628 | 631 | auto PHINode = Builder.createPHI(Value.getType()); |
629 | 631 | PHINode.addIncoming(Value, Curr); |
630 | 631 | Args[J] = PHINode; |
631 | 631 | } |
632 | 1.18k | } |
633 | 1.61k | enterBlock(Loop, EndLoop, {}, std::move(Args), std::move(Type)); |
634 | 1.61k | checkStop(); |
635 | 1.61k | updateGas(); |
636 | 1.61k | return; |
637 | 0 | } |
638 | 2.48k | case OpCode::If: { |
639 | 2.48k | auto Then = LLVM::BasicBlock::create(LLContext, F.Fn, "then"); |
640 | 2.48k | auto Else = LLVM::BasicBlock::create(LLContext, F.Fn, "else"); |
641 | 2.48k | auto EndIf = LLVM::BasicBlock::create(LLContext, F.Fn, "if.end"); |
642 | 2.48k | LLVM::Value Cond; |
643 | 2.48k | if (isUnreachable()) { |
644 | 490 | Cond = LLVM::Value::getUndef(LLContext.getInt1Ty()); |
645 | 1.99k | } else { |
646 | 1.99k | Cond = Builder.createICmpNE(stackPop(), LLContext.getInt32(0)); |
647 | 1.99k | } |
648 | 2.48k | Builder.createCondBr(Cond, Then, Else); |
649 | | |
650 | 2.48k | Builder.positionAtEnd(Then); |
651 | 2.48k | auto Type = Context.resolveBlockType(Instr.getBlockType()); |
652 | 2.48k | const auto Arity = Type.first.size(); |
653 | 2.48k | std::vector<LLVM::Value> Args(Arity); |
654 | 2.48k | if (isUnreachable()) { |
655 | 991 | for (size_t I = 0; I < Arity; ++I) { |
656 | 501 | auto Ty = toLLVMType(LLContext, Type.first[I]); |
657 | 501 | Args[I] = LLVM::Value::getUndef(Ty); |
658 | 501 | } |
659 | 1.99k | } else { |
660 | 2.80k | for (size_t I = 0; I < Arity; ++I) { |
661 | 807 | const size_t J = Arity - 1 - I; |
662 | 807 | Args[J] = stackPop(); |
663 | 807 | } |
664 | 1.99k | } |
665 | 2.48k | enterBlock(EndIf, {}, Else, std::move(Args), std::move(Type)); |
666 | 2.48k | return; |
667 | 0 | } |
668 | 18.6k | case OpCode::End: { |
669 | 18.6k | auto Entry = leaveBlock(); |
670 | 18.6k | if (Entry.ElseBlock) { |
671 | 1.10k | auto Block = Builder.getInsertBlock(); |
672 | 1.10k | Builder.positionAtEnd(Entry.ElseBlock); |
673 | 1.10k | enterBlock(Block, {}, {}, std::move(Entry.Args), |
674 | 1.10k | std::move(Entry.Type), std::move(Entry.ReturnPHI)); |
675 | 1.10k | Entry = leaveBlock(); |
676 | 1.10k | } |
677 | 18.6k | buildPHI(Entry.Type.second, Entry.ReturnPHI); |
678 | 18.6k | return; |
679 | 0 | } |
680 | 1.38k | case OpCode::Else: { |
681 | 1.38k | auto Entry = leaveBlock(); |
682 | 1.38k | Builder.positionAtEnd(Entry.ElseBlock); |
683 | 1.38k | enterBlock(Entry.JumpBlock, {}, {}, std::move(Entry.Args), |
684 | 1.38k | std::move(Entry.Type), std::move(Entry.ReturnPHI)); |
685 | 1.38k | return; |
686 | 0 | } |
687 | 1.37M | default: |
688 | 1.37M | break; |
689 | 1.39M | } |
690 | | |
691 | 1.37M | if (isUnreachable()) { |
692 | 411k | return; |
693 | 411k | } |
694 | | |
695 | 959k | switch (Instr.getOpCode()) { |
696 | | // Control instructions |
697 | 2.93k | case OpCode::Unreachable: |
698 | 2.93k | Builder.createBr(getTrapBB(ErrCode::Value::Unreachable)); |
699 | 2.93k | setUnreachable(); |
700 | 2.93k | Builder.positionAtEnd( |
701 | 2.93k | LLVM::BasicBlock::create(LLContext, F.Fn, "unreachable.end")); |
702 | 2.93k | break; |
703 | 37.8k | case OpCode::Nop: |
704 | 37.8k | break; |
705 | | // LEGACY-EH: remove the `Try` cases after deprecating legacy EH. |
706 | | // case OpCode::Try: |
707 | | // case OpCode::Throw: |
708 | | // case OpCode::Throw_ref: |
709 | 857 | case OpCode::Br: { |
710 | 857 | const auto Label = Instr.getJump().TargetIndex; |
711 | 857 | setLableJumpPHI(Label); |
712 | 857 | Builder.createBr(getLabel(Label)); |
713 | 857 | setUnreachable(); |
714 | 857 | Builder.positionAtEnd( |
715 | 857 | LLVM::BasicBlock::create(LLContext, F.Fn, "br.end")); |
716 | 857 | break; |
717 | 0 | } |
718 | 356 | case OpCode::Br_if: { |
719 | 356 | const auto Label = Instr.getJump().TargetIndex; |
720 | 356 | auto Cond = Builder.createICmpNE(stackPop(), LLContext.getInt32(0)); |
721 | 356 | setLableJumpPHI(Label); |
722 | 356 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "br_if.end"); |
723 | 356 | Builder.createCondBr(Cond, getLabel(Label), Next); |
724 | 356 | Builder.positionAtEnd(Next); |
725 | 356 | break; |
726 | 0 | } |
727 | 988 | case OpCode::Br_table: { |
728 | 988 | auto LabelTable = Instr.getLabelList(); |
729 | 988 | assuming(LabelTable.size() <= std::numeric_limits<uint32_t>::max()); |
730 | 988 | const auto LabelTableSize = |
731 | 988 | static_cast<uint32_t>(LabelTable.size() - 1); |
732 | 988 | auto Value = stackPop(); |
733 | 988 | setLableJumpPHI(LabelTable[LabelTableSize].TargetIndex); |
734 | 988 | auto Switch = Builder.createSwitch( |
735 | 988 | Value, getLabel(LabelTable[LabelTableSize].TargetIndex), |
736 | 988 | LabelTableSize); |
737 | 36.8k | for (uint32_t I = 0; I < LabelTableSize; ++I) { |
738 | 35.8k | setLableJumpPHI(LabelTable[I].TargetIndex); |
739 | 35.8k | Switch.addCase(LLContext.getInt32(I), |
740 | 35.8k | getLabel(LabelTable[I].TargetIndex)); |
741 | 35.8k | } |
742 | 988 | setUnreachable(); |
743 | 988 | Builder.positionAtEnd( |
744 | 988 | LLVM::BasicBlock::create(LLContext, F.Fn, "br_table.end")); |
745 | 988 | break; |
746 | 988 | } |
747 | 0 | case OpCode::Br_on_null: { |
748 | 0 | const auto Label = Instr.getJump().TargetIndex; |
749 | 0 | auto Value = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
750 | 0 | auto Cond = Builder.createICmpEQ( |
751 | 0 | Builder.createExtractElement(Value, LLContext.getInt64(1)), |
752 | 0 | LLContext.getInt64(0)); |
753 | 0 | setLableJumpPHI(Label); |
754 | 0 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "br_on_null.end"); |
755 | 0 | Builder.createCondBr(Cond, getLabel(Label), Next); |
756 | 0 | Builder.positionAtEnd(Next); |
757 | 0 | stackPush(Value); |
758 | 0 | break; |
759 | 988 | } |
760 | 0 | case OpCode::Br_on_non_null: { |
761 | 0 | const auto Label = Instr.getJump().TargetIndex; |
762 | 0 | auto Cond = Builder.createICmpNE( |
763 | 0 | Builder.createExtractElement( |
764 | 0 | Builder.createBitCast(Stack.back(), Context.Int64x2Ty), |
765 | 0 | LLContext.getInt64(1)), |
766 | 0 | LLContext.getInt64(0)); |
767 | 0 | setLableJumpPHI(Label); |
768 | 0 | auto Next = |
769 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "br_on_non_null.end"); |
770 | 0 | Builder.createCondBr(Cond, getLabel(Label), Next); |
771 | 0 | Builder.positionAtEnd(Next); |
772 | 0 | stackPop(); |
773 | 0 | break; |
774 | 988 | } |
775 | 0 | case OpCode::Br_on_cast: |
776 | 0 | case OpCode::Br_on_cast_fail: { |
777 | 0 | auto Ref = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
778 | 0 | const auto Label = Instr.getBrCast().Jump.TargetIndex; |
779 | 0 | std::array<uint8_t, 16> Buf = {0}; |
780 | 0 | std::copy_n(Instr.getBrCast().RType2.getRawData().cbegin(), 8, |
781 | 0 | Buf.begin()); |
782 | 0 | auto VType = Builder.createExtractElement( |
783 | 0 | Builder.createBitCast(LLVM::Value::getConstVector8(LLContext, Buf), |
784 | 0 | Context.Int64x2Ty), |
785 | 0 | LLContext.getInt64(0)); |
786 | 0 | auto IsRefTest = Builder.createCall( |
787 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefTest, |
788 | 0 | LLVM::Type::getFunctionType( |
789 | 0 | Context.Int32Ty, |
790 | 0 | {Context.Int64x2Ty, Context.Int64Ty}, |
791 | 0 | false)), |
792 | 0 | {Ref, VType}); |
793 | 0 | auto Cond = |
794 | 0 | (Instr.getOpCode() == OpCode::Br_on_cast) |
795 | 0 | ? Builder.createICmpNE(IsRefTest, LLContext.getInt32(0)) |
796 | 0 | : Builder.createICmpEQ(IsRefTest, LLContext.getInt32(0)); |
797 | 0 | setLableJumpPHI(Label); |
798 | 0 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "br_on_cast.end"); |
799 | 0 | Builder.createCondBr(Cond, getLabel(Label), Next); |
800 | 0 | Builder.positionAtEnd(Next); |
801 | 0 | break; |
802 | 0 | } |
803 | 664 | case OpCode::Return: |
804 | 664 | compileReturn(); |
805 | 664 | setUnreachable(); |
806 | 664 | Builder.positionAtEnd( |
807 | 664 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret.end")); |
808 | 664 | break; |
809 | 2.86k | case OpCode::Call: |
810 | 2.86k | updateInstrCount(); |
811 | 2.86k | updateGas(); |
812 | 2.86k | compileCallOp(Instr.getTargetIndex()); |
813 | 2.86k | break; |
814 | 592 | case OpCode::Call_indirect: |
815 | 592 | updateInstrCount(); |
816 | 592 | updateGas(); |
817 | 592 | compileIndirectCallOp(Instr.getSourceIndex(), Instr.getTargetIndex()); |
818 | 592 | break; |
819 | 0 | case OpCode::Return_call: |
820 | 0 | updateInstrCount(); |
821 | 0 | updateGas(); |
822 | 0 | compileReturnCallOp(Instr.getTargetIndex()); |
823 | 0 | setUnreachable(); |
824 | 0 | Builder.positionAtEnd( |
825 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret_call.end")); |
826 | 0 | break; |
827 | 0 | case OpCode::Return_call_indirect: |
828 | 0 | updateInstrCount(); |
829 | 0 | updateGas(); |
830 | 0 | compileReturnIndirectCallOp(Instr.getSourceIndex(), |
831 | 0 | Instr.getTargetIndex()); |
832 | 0 | setUnreachable(); |
833 | 0 | Builder.positionAtEnd( |
834 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret_call_indir.end")); |
835 | 0 | break; |
836 | 0 | case OpCode::Call_ref: |
837 | 0 | updateInstrCount(); |
838 | 0 | updateGas(); |
839 | 0 | compileCallRefOp(Instr.getTargetIndex()); |
840 | 0 | break; |
841 | 0 | case OpCode::Return_call_ref: |
842 | 0 | updateInstrCount(); |
843 | 0 | updateGas(); |
844 | 0 | compileReturnCallRefOp(Instr.getTargetIndex()); |
845 | 0 | setUnreachable(); |
846 | 0 | Builder.positionAtEnd( |
847 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret_call_ref.end")); |
848 | 0 | break; |
849 | | // LEGACY-EH: remove the `Catch` cases after deprecating legacy EH. |
850 | | // case OpCode::Catch: |
851 | | // case OpCode::Catch_all: |
852 | | // case OpCode::Try_table: |
853 | | |
854 | | // Reference Instructions |
855 | 879 | case OpCode::Ref__null: { |
856 | 879 | std::array<uint8_t, 16> Buf = {0}; |
857 | | // For null references, the dynamic type down scaling is needed. |
858 | 879 | ValType VType; |
859 | 879 | if (Instr.getValType().isAbsHeapType()) { |
860 | 879 | switch (Instr.getValType().getHeapTypeCode()) { |
861 | 0 | case TypeCode::NullFuncRef: |
862 | 386 | case TypeCode::FuncRef: |
863 | 386 | VType = TypeCode::NullFuncRef; |
864 | 386 | break; |
865 | 0 | case TypeCode::NullExternRef: |
866 | 493 | case TypeCode::ExternRef: |
867 | 493 | VType = TypeCode::NullExternRef; |
868 | 493 | break; |
869 | 0 | case TypeCode::NullRef: |
870 | 0 | case TypeCode::AnyRef: |
871 | 0 | case TypeCode::EqRef: |
872 | 0 | case TypeCode::I31Ref: |
873 | 0 | case TypeCode::StructRef: |
874 | 0 | case TypeCode::ArrayRef: |
875 | 0 | VType = TypeCode::NullRef; |
876 | 0 | break; |
877 | 0 | default: |
878 | 0 | assumingUnreachable(); |
879 | 879 | } |
880 | 879 | } else { |
881 | 0 | assuming(Instr.getValType().getTypeIndex() < |
882 | 0 | Context.CompositeTypes.size()); |
883 | 0 | const auto *CompType = |
884 | 0 | Context.CompositeTypes[Instr.getValType().getTypeIndex()]; |
885 | 0 | assuming(CompType != nullptr); |
886 | 0 | if (CompType->isFunc()) { |
887 | 0 | VType = TypeCode::NullFuncRef; |
888 | 0 | } else { |
889 | 0 | VType = TypeCode::NullRef; |
890 | 0 | } |
891 | 0 | } |
892 | 879 | std::copy_n(VType.getRawData().cbegin(), 8, Buf.begin()); |
893 | 879 | stackPush(Builder.createBitCast( |
894 | 879 | LLVM::Value::getConstVector8(LLContext, Buf), Context.Int64x2Ty)); |
895 | 879 | break; |
896 | 879 | } |
897 | 454 | case OpCode::Ref__is_null: |
898 | 454 | stackPush(Builder.createZExt( |
899 | 454 | Builder.createICmpEQ( |
900 | 454 | Builder.createExtractElement( |
901 | 454 | Builder.createBitCast(stackPop(), Context.Int64x2Ty), |
902 | 454 | LLContext.getInt64(1)), |
903 | 454 | LLContext.getInt64(0)), |
904 | 454 | Context.Int32Ty)); |
905 | 454 | break; |
906 | 29 | case OpCode::Ref__func: |
907 | 29 | stackPush(Builder.createCall( |
908 | 29 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefFunc, |
909 | 29 | LLVM::Type::getFunctionType(Context.Int64x2Ty, |
910 | 29 | {Context.Int32Ty}, |
911 | 29 | false)), |
912 | 29 | {LLContext.getInt32(Instr.getTargetIndex())})); |
913 | 29 | break; |
914 | 0 | case OpCode::Ref__eq: { |
915 | 0 | LLVM::Value RHS = stackPop(); |
916 | 0 | LLVM::Value LHS = stackPop(); |
917 | 0 | stackPush(Builder.createZExt( |
918 | 0 | Builder.createICmpEQ( |
919 | 0 | Builder.createExtractElement(LHS, LLContext.getInt64(1)), |
920 | 0 | Builder.createExtractElement(RHS, LLContext.getInt64(1))), |
921 | 0 | Context.Int32Ty)); |
922 | 0 | break; |
923 | 879 | } |
924 | 0 | case OpCode::Ref__as_non_null: { |
925 | 0 | auto Next = |
926 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "ref_as_non_null.ok"); |
927 | 0 | Stack.back() = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
928 | 0 | auto IsNotNull = Builder.createLikely(Builder.createICmpNE( |
929 | 0 | Builder.createExtractElement(Stack.back(), LLContext.getInt64(1)), |
930 | 0 | LLContext.getInt64(0))); |
931 | 0 | Builder.createCondBr(IsNotNull, Next, |
932 | 0 | getTrapBB(ErrCode::Value::CastNullToNonNull)); |
933 | 0 | Builder.positionAtEnd(Next); |
934 | 0 | break; |
935 | 879 | } |
936 | | |
937 | | // Reference Instructions (GC proposal) |
938 | 0 | case OpCode::Struct__new: |
939 | 0 | case OpCode::Struct__new_default: { |
940 | 0 | LLVM::Value Args = LLVM::Value::getConstPointerNull(Context.Int8PtrTy); |
941 | 0 | assuming(Instr.getTargetIndex() < Context.CompositeTypes.size()); |
942 | 0 | const auto *CompType = Context.CompositeTypes[Instr.getTargetIndex()]; |
943 | 0 | assuming(CompType != nullptr && !CompType->isFunc()); |
944 | 0 | auto ArgSize = CompType->getFieldTypes().size(); |
945 | 0 | if (Instr.getOpCode() == OpCode::Struct__new) { |
946 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize, nullptr); |
947 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
948 | 0 | ArgsVec[ArgSize - I - 1] = stackPop(); |
949 | 0 | } |
950 | 0 | Args = Builder.createArray(ArgSize, kValSize); |
951 | 0 | Builder.createArrayPtrStore(ArgsVec, Args, Context.Int8Ty, kValSize); |
952 | 0 | } else { |
953 | 0 | ArgSize = 0; |
954 | 0 | } |
955 | 0 | stackPush(Builder.createCall( |
956 | 0 | Context.getIntrinsic( |
957 | 0 | Builder, Executable::Intrinsics::kStructNew, |
958 | 0 | LLVM::Type::getFunctionType( |
959 | 0 | Context.Int64x2Ty, |
960 | 0 | {Context.Int32Ty, Context.Int8PtrTy, Context.Int32Ty}, |
961 | 0 | false)), |
962 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), Args, |
963 | 0 | LLContext.getInt32(static_cast<uint32_t>(ArgSize))})); |
964 | 0 | break; |
965 | 0 | } |
966 | 0 | case OpCode::Struct__get: |
967 | 0 | case OpCode::Struct__get_u: |
968 | 0 | case OpCode::Struct__get_s: { |
969 | 0 | assuming(static_cast<size_t>(Instr.getTargetIndex()) < |
970 | 0 | Context.CompositeTypes.size()); |
971 | 0 | const auto *CompType = Context.CompositeTypes[Instr.getTargetIndex()]; |
972 | 0 | assuming(CompType != nullptr && !CompType->isFunc()); |
973 | 0 | assuming(static_cast<size_t>(Instr.getSourceIndex()) < |
974 | 0 | CompType->getFieldTypes().size()); |
975 | 0 | const auto &StorageType = |
976 | 0 | CompType->getFieldTypes()[Instr.getSourceIndex()].getStorageType(); |
977 | 0 | auto Ref = stackPop(); |
978 | 0 | auto IsSigned = (Instr.getOpCode() == OpCode::Struct__get_s) |
979 | 0 | ? LLContext.getInt8(1) |
980 | 0 | : LLContext.getInt8(0); |
981 | 0 | LLVM::Value Ret = Builder.createAlloca(Context.Int64x2Ty); |
982 | 0 | Builder.createCall( |
983 | 0 | Context.getIntrinsic( |
984 | 0 | Builder, Executable::Intrinsics::kStructGet, |
985 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
986 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
987 | 0 | Context.Int32Ty, Context.Int8Ty, |
988 | 0 | Context.Int8PtrTy}, |
989 | 0 | false)), |
990 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), |
991 | 0 | LLContext.getInt32(Instr.getSourceIndex()), IsSigned, Ret}); |
992 | |
|
993 | 0 | switch (StorageType.getCode()) { |
994 | 0 | case TypeCode::I8: |
995 | 0 | case TypeCode::I16: |
996 | 0 | case TypeCode::I32: { |
997 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int32Ty, Ret, |
998 | 0 | Context.Int64x2Ty)); |
999 | 0 | break; |
1000 | 0 | } |
1001 | 0 | case TypeCode::I64: { |
1002 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64Ty, Ret, |
1003 | 0 | Context.Int64x2Ty)); |
1004 | 0 | break; |
1005 | 0 | } |
1006 | 0 | case TypeCode::F32: { |
1007 | 0 | stackPush(Builder.createValuePtrLoad(Context.FloatTy, Ret, |
1008 | 0 | Context.Int64x2Ty)); |
1009 | 0 | break; |
1010 | 0 | } |
1011 | 0 | case TypeCode::F64: { |
1012 | 0 | stackPush(Builder.createValuePtrLoad(Context.DoubleTy, Ret, |
1013 | 0 | Context.Int64x2Ty)); |
1014 | 0 | break; |
1015 | 0 | } |
1016 | 0 | case TypeCode::V128: |
1017 | 0 | case TypeCode::Ref: |
1018 | 0 | case TypeCode::RefNull: { |
1019 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64x2Ty, Ret, |
1020 | 0 | Context.Int64x2Ty)); |
1021 | 0 | break; |
1022 | 0 | } |
1023 | 0 | default: |
1024 | 0 | assumingUnreachable(); |
1025 | 0 | } |
1026 | 0 | break; |
1027 | 0 | } |
1028 | 0 | case OpCode::Struct__set: { |
1029 | 0 | auto Val = stackPop(); |
1030 | 0 | auto Ref = stackPop(); |
1031 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1032 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1033 | 0 | Builder.createCall( |
1034 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kStructSet, |
1035 | 0 | LLVM::Type::getFunctionType( |
1036 | 0 | Context.VoidTy, |
1037 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1038 | 0 | Context.Int32Ty, Context.Int8PtrTy}, |
1039 | 0 | false)), |
1040 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), |
1041 | 0 | LLContext.getInt32(Instr.getSourceIndex()), Arg}); |
1042 | 0 | break; |
1043 | 0 | } |
1044 | 0 | case OpCode::Array__new: { |
1045 | 0 | auto Length = stackPop(); |
1046 | 0 | auto Val = stackPop(); |
1047 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1048 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1049 | 0 | stackPush(Builder.createCall( |
1050 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArrayNew, |
1051 | 0 | LLVM::Type::getFunctionType( |
1052 | 0 | Context.Int64x2Ty, |
1053 | 0 | {Context.Int32Ty, Context.Int32Ty, |
1054 | 0 | Context.Int8PtrTy, Context.Int32Ty}, |
1055 | 0 | false)), |
1056 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), Length, Arg, |
1057 | 0 | LLContext.getInt32(1)})); |
1058 | 0 | break; |
1059 | 0 | } |
1060 | 0 | case OpCode::Array__new_default: { |
1061 | 0 | auto Length = stackPop(); |
1062 | 0 | LLVM::Value Arg = LLVM::Value::getConstPointerNull(Context.Int8PtrTy); |
1063 | 0 | stackPush(Builder.createCall( |
1064 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArrayNew, |
1065 | 0 | LLVM::Type::getFunctionType( |
1066 | 0 | Context.Int64x2Ty, |
1067 | 0 | {Context.Int32Ty, Context.Int32Ty, |
1068 | 0 | Context.Int8PtrTy, Context.Int32Ty}, |
1069 | 0 | false)), |
1070 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), Length, Arg, |
1071 | 0 | LLContext.getInt32(0)})); |
1072 | 0 | break; |
1073 | 0 | } |
1074 | 0 | case OpCode::Array__new_fixed: { |
1075 | 0 | const auto ArgSize = Instr.getSourceIndex(); |
1076 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize, nullptr); |
1077 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
1078 | 0 | ArgsVec[ArgSize - I - 1] = stackPop(); |
1079 | 0 | } |
1080 | 0 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
1081 | 0 | Builder.createArrayPtrStore(ArgsVec, Args, Context.Int8Ty, kValSize); |
1082 | 0 | stackPush(Builder.createCall( |
1083 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArrayNew, |
1084 | 0 | LLVM::Type::getFunctionType( |
1085 | 0 | Context.Int64x2Ty, |
1086 | 0 | {Context.Int32Ty, Context.Int32Ty, |
1087 | 0 | Context.Int8PtrTy, Context.Int32Ty}, |
1088 | 0 | false)), |
1089 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), |
1090 | 0 | LLContext.getInt32(ArgSize), Args, LLContext.getInt32(ArgSize)})); |
1091 | 0 | break; |
1092 | 0 | } |
1093 | 0 | case OpCode::Array__new_data: |
1094 | 0 | case OpCode::Array__new_elem: { |
1095 | 0 | auto Length = stackPop(); |
1096 | 0 | auto Start = stackPop(); |
1097 | 0 | stackPush(Builder.createCall( |
1098 | 0 | Context.getIntrinsic( |
1099 | 0 | Builder, |
1100 | 0 | ((Instr.getOpCode() == OpCode::Array__new_data) |
1101 | 0 | ? Executable::Intrinsics::kArrayNewData |
1102 | 0 | : Executable::Intrinsics::kArrayNewElem), |
1103 | 0 | LLVM::Type::getFunctionType(Context.Int64x2Ty, |
1104 | 0 | {Context.Int32Ty, Context.Int32Ty, |
1105 | 0 | Context.Int32Ty, Context.Int32Ty}, |
1106 | 0 | false)), |
1107 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), |
1108 | 0 | LLContext.getInt32(Instr.getSourceIndex()), Start, Length})); |
1109 | 0 | break; |
1110 | 0 | } |
1111 | 0 | case OpCode::Array__get: |
1112 | 0 | case OpCode::Array__get_u: |
1113 | 0 | case OpCode::Array__get_s: { |
1114 | 0 | assuming(static_cast<size_t>(Instr.getTargetIndex()) < |
1115 | 0 | Context.CompositeTypes.size()); |
1116 | 0 | const auto *CompType = Context.CompositeTypes[Instr.getTargetIndex()]; |
1117 | 0 | assuming(CompType != nullptr && !CompType->isFunc()); |
1118 | 0 | assuming(static_cast<size_t>(1) == CompType->getFieldTypes().size()); |
1119 | 0 | const auto &StorageType = CompType->getFieldTypes()[0].getStorageType(); |
1120 | 0 | auto Idx = stackPop(); |
1121 | 0 | auto Ref = stackPop(); |
1122 | 0 | auto IsSigned = (Instr.getOpCode() == OpCode::Array__get_s) |
1123 | 0 | ? LLContext.getInt8(1) |
1124 | 0 | : LLContext.getInt8(0); |
1125 | 0 | LLVM::Value Ret = Builder.createAlloca(Context.Int64x2Ty); |
1126 | 0 | Builder.createCall( |
1127 | 0 | Context.getIntrinsic( |
1128 | 0 | Builder, Executable::Intrinsics::kArrayGet, |
1129 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1130 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1131 | 0 | Context.Int32Ty, Context.Int8Ty, |
1132 | 0 | Context.Int8PtrTy}, |
1133 | 0 | false)), |
1134 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), Idx, IsSigned, |
1135 | 0 | Ret}); |
1136 | |
|
1137 | 0 | switch (StorageType.getCode()) { |
1138 | 0 | case TypeCode::I8: |
1139 | 0 | case TypeCode::I16: |
1140 | 0 | case TypeCode::I32: { |
1141 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int32Ty, Ret, |
1142 | 0 | Context.Int64x2Ty)); |
1143 | 0 | break; |
1144 | 0 | } |
1145 | 0 | case TypeCode::I64: { |
1146 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64Ty, Ret, |
1147 | 0 | Context.Int64x2Ty)); |
1148 | 0 | break; |
1149 | 0 | } |
1150 | 0 | case TypeCode::F32: { |
1151 | 0 | stackPush(Builder.createValuePtrLoad(Context.FloatTy, Ret, |
1152 | 0 | Context.Int64x2Ty)); |
1153 | 0 | break; |
1154 | 0 | } |
1155 | 0 | case TypeCode::F64: { |
1156 | 0 | stackPush(Builder.createValuePtrLoad(Context.DoubleTy, Ret, |
1157 | 0 | Context.Int64x2Ty)); |
1158 | 0 | break; |
1159 | 0 | } |
1160 | 0 | case TypeCode::V128: |
1161 | 0 | case TypeCode::Ref: |
1162 | 0 | case TypeCode::RefNull: { |
1163 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64x2Ty, Ret, |
1164 | 0 | Context.Int64x2Ty)); |
1165 | 0 | break; |
1166 | 0 | } |
1167 | 0 | default: |
1168 | 0 | assumingUnreachable(); |
1169 | 0 | } |
1170 | 0 | break; |
1171 | 0 | } |
1172 | 0 | case OpCode::Array__set: { |
1173 | 0 | auto Val = stackPop(); |
1174 | 0 | auto Idx = stackPop(); |
1175 | 0 | auto Ref = stackPop(); |
1176 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1177 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1178 | 0 | Builder.createCall( |
1179 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArraySet, |
1180 | 0 | LLVM::Type::getFunctionType( |
1181 | 0 | Context.VoidTy, |
1182 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1183 | 0 | Context.Int32Ty, Context.Int8PtrTy}, |
1184 | 0 | false)), |
1185 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), Idx, Arg}); |
1186 | 0 | break; |
1187 | 0 | } |
1188 | 0 | case OpCode::Array__len: { |
1189 | 0 | auto Ref = stackPop(); |
1190 | 0 | stackPush(Builder.createCall( |
1191 | 0 | Context.getIntrinsic( |
1192 | 0 | Builder, Executable::Intrinsics::kArrayLen, |
1193 | 0 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1194 | 0 | {Context.Int64x2Ty}, false)), |
1195 | 0 | {Ref})); |
1196 | 0 | break; |
1197 | 0 | } |
1198 | 0 | case OpCode::Array__fill: { |
1199 | 0 | auto Cnt = stackPop(); |
1200 | 0 | auto Val = stackPop(); |
1201 | 0 | auto Off = stackPop(); |
1202 | 0 | auto Ref = stackPop(); |
1203 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1204 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1205 | 0 | Builder.createCall( |
1206 | 0 | Context.getIntrinsic( |
1207 | 0 | Builder, Executable::Intrinsics::kArrayFill, |
1208 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1209 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1210 | 0 | Context.Int32Ty, Context.Int32Ty, |
1211 | 0 | Context.Int8PtrTy}, |
1212 | 0 | false)), |
1213 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), Off, Cnt, Arg}); |
1214 | 0 | break; |
1215 | 0 | } |
1216 | 0 | case OpCode::Array__copy: { |
1217 | 0 | auto Cnt = stackPop(); |
1218 | 0 | auto SrcOff = stackPop(); |
1219 | 0 | auto SrcRef = stackPop(); |
1220 | 0 | auto DstOff = stackPop(); |
1221 | 0 | auto DstRef = stackPop(); |
1222 | 0 | Builder.createCall( |
1223 | 0 | Context.getIntrinsic( |
1224 | 0 | Builder, Executable::Intrinsics::kArrayCopy, |
1225 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1226 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1227 | 0 | Context.Int32Ty, Context.Int64x2Ty, |
1228 | 0 | Context.Int32Ty, Context.Int32Ty, |
1229 | 0 | Context.Int32Ty}, |
1230 | 0 | false)), |
1231 | 0 | {DstRef, LLContext.getInt32(Instr.getTargetIndex()), DstOff, SrcRef, |
1232 | 0 | LLContext.getInt32(Instr.getSourceIndex()), SrcOff, Cnt}); |
1233 | 0 | break; |
1234 | 0 | } |
1235 | 0 | case OpCode::Array__init_data: |
1236 | 0 | case OpCode::Array__init_elem: { |
1237 | 0 | auto Cnt = stackPop(); |
1238 | 0 | auto SrcOff = stackPop(); |
1239 | 0 | auto DstOff = stackPop(); |
1240 | 0 | auto Ref = stackPop(); |
1241 | 0 | Builder.createCall( |
1242 | 0 | Context.getIntrinsic( |
1243 | 0 | Builder, |
1244 | 0 | ((Instr.getOpCode() == OpCode::Array__init_data) |
1245 | 0 | ? Executable::Intrinsics::kArrayInitData |
1246 | 0 | : Executable::Intrinsics::kArrayInitElem), |
1247 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1248 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1249 | 0 | Context.Int32Ty, Context.Int32Ty, |
1250 | 0 | Context.Int32Ty, Context.Int32Ty}, |
1251 | 0 | false)), |
1252 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), |
1253 | 0 | LLContext.getInt32(Instr.getSourceIndex()), DstOff, SrcOff, Cnt}); |
1254 | 0 | break; |
1255 | 0 | } |
1256 | 0 | case OpCode::Ref__test: |
1257 | 0 | case OpCode::Ref__test_null: { |
1258 | 0 | auto Ref = stackPop(); |
1259 | 0 | std::array<uint8_t, 16> Buf = {0}; |
1260 | 0 | std::copy_n(Instr.getValType().getRawData().cbegin(), 8, Buf.begin()); |
1261 | 0 | auto VType = Builder.createExtractElement( |
1262 | 0 | Builder.createBitCast(LLVM::Value::getConstVector8(LLContext, Buf), |
1263 | 0 | Context.Int64x2Ty), |
1264 | 0 | LLContext.getInt64(0)); |
1265 | 0 | stackPush(Builder.createCall( |
1266 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefTest, |
1267 | 0 | LLVM::Type::getFunctionType( |
1268 | 0 | Context.Int32Ty, |
1269 | 0 | {Context.Int64x2Ty, Context.Int64Ty}, |
1270 | 0 | false)), |
1271 | 0 | {Ref, VType})); |
1272 | 0 | break; |
1273 | 0 | } |
1274 | 0 | case OpCode::Ref__cast: |
1275 | 0 | case OpCode::Ref__cast_null: { |
1276 | 0 | auto Ref = stackPop(); |
1277 | 0 | std::array<uint8_t, 16> Buf = {0}; |
1278 | 0 | std::copy_n(Instr.getValType().getRawData().cbegin(), 8, Buf.begin()); |
1279 | 0 | auto VType = Builder.createExtractElement( |
1280 | 0 | Builder.createBitCast(LLVM::Value::getConstVector8(LLContext, Buf), |
1281 | 0 | Context.Int64x2Ty), |
1282 | 0 | LLContext.getInt64(0)); |
1283 | 0 | stackPush(Builder.createCall( |
1284 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefCast, |
1285 | 0 | LLVM::Type::getFunctionType( |
1286 | 0 | Context.Int64x2Ty, |
1287 | 0 | {Context.Int64x2Ty, Context.Int64Ty}, |
1288 | 0 | false)), |
1289 | 0 | {Ref, VType})); |
1290 | 0 | break; |
1291 | 0 | } |
1292 | 0 | case OpCode::Any__convert_extern: { |
1293 | 0 | std::array<uint8_t, 16> RawRef = {0}; |
1294 | 0 | auto Ref = stackPop(); |
1295 | 0 | auto PtrVal = Builder.createExtractElement(Ref, LLContext.getInt64(1)); |
1296 | 0 | auto IsNullBB = |
1297 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "any_conv_extern.null"); |
1298 | 0 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, |
1299 | 0 | "any_conv_extern.not_null"); |
1300 | 0 | auto IsExtrefBB = LLVM::BasicBlock::create(LLContext, F.Fn, |
1301 | 0 | "any_conv_extern.is_extref"); |
1302 | 0 | auto EndBB = |
1303 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "any_conv_extern.end"); |
1304 | 0 | auto CondIsNull = Builder.createICmpEQ(PtrVal, LLContext.getInt64(0)); |
1305 | 0 | Builder.createCondBr(CondIsNull, IsNullBB, NotNullBB); |
1306 | |
|
1307 | 0 | Builder.positionAtEnd(IsNullBB); |
1308 | 0 | auto VT = ValType(TypeCode::RefNull, TypeCode::NullRef); |
1309 | 0 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1310 | 0 | auto Ret1 = Builder.createBitCast( |
1311 | 0 | LLVM::Value::getConstVector8(LLContext, RawRef), Context.Int64x2Ty); |
1312 | 0 | Builder.createBr(EndBB); |
1313 | |
|
1314 | 0 | Builder.positionAtEnd(NotNullBB); |
1315 | 0 | auto Ret2 = Builder.createBitCast( |
1316 | 0 | Builder.createInsertElement( |
1317 | 0 | Builder.createBitCast(Ref, Context.Int8x16Ty), |
1318 | 0 | LLContext.getInt8(0), LLContext.getInt64(1)), |
1319 | 0 | Context.Int64x2Ty); |
1320 | 0 | auto HType = Builder.createExtractElement( |
1321 | 0 | Builder.createBitCast(Ret2, Context.Int8x16Ty), |
1322 | 0 | LLContext.getInt64(3)); |
1323 | 0 | auto CondIsExtref = Builder.createOr( |
1324 | 0 | Builder.createICmpEQ(HType, LLContext.getInt8(static_cast<uint8_t>( |
1325 | 0 | TypeCode::ExternRef))), |
1326 | 0 | Builder.createICmpEQ(HType, LLContext.getInt8(static_cast<uint8_t>( |
1327 | 0 | TypeCode::NullExternRef)))); |
1328 | 0 | Builder.createCondBr(CondIsExtref, IsExtrefBB, EndBB); |
1329 | |
|
1330 | 0 | Builder.positionAtEnd(IsExtrefBB); |
1331 | 0 | VT = ValType(TypeCode::Ref, TypeCode::AnyRef); |
1332 | 0 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1333 | 0 | auto Ret3 = Builder.createInsertElement( |
1334 | 0 | Builder.createBitCast( |
1335 | 0 | LLVM::Value::getConstVector8(LLContext, RawRef), |
1336 | 0 | Context.Int64x2Ty), |
1337 | 0 | PtrVal, LLContext.getInt64(1)); |
1338 | 0 | Builder.createBr(EndBB); |
1339 | |
|
1340 | 0 | Builder.positionAtEnd(EndBB); |
1341 | 0 | auto Ret = Builder.createPHI(Context.Int64x2Ty); |
1342 | 0 | Ret.addIncoming(Ret1, IsNullBB); |
1343 | 0 | Ret.addIncoming(Ret2, NotNullBB); |
1344 | 0 | Ret.addIncoming(Ret3, IsExtrefBB); |
1345 | 0 | stackPush(Ret); |
1346 | 0 | break; |
1347 | 0 | } |
1348 | 0 | case OpCode::Extern__convert_any: { |
1349 | 0 | std::array<uint8_t, 16> RawRef = {0}; |
1350 | 0 | auto Ref = stackPop(); |
1351 | 0 | auto IsNullBB = |
1352 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "extern_conv_any.null"); |
1353 | 0 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, |
1354 | 0 | "extern_conv_any.not_null"); |
1355 | 0 | auto EndBB = |
1356 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "extern_conv_any.end"); |
1357 | 0 | auto CondIsNull = Builder.createICmpEQ( |
1358 | 0 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
1359 | 0 | LLContext.getInt64(0)); |
1360 | 0 | Builder.createCondBr(CondIsNull, IsNullBB, NotNullBB); |
1361 | |
|
1362 | 0 | Builder.positionAtEnd(IsNullBB); |
1363 | 0 | auto VT = ValType(TypeCode::RefNull, TypeCode::NullExternRef); |
1364 | 0 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1365 | 0 | auto Ret1 = Builder.createBitCast( |
1366 | 0 | LLVM::Value::getConstVector8(LLContext, RawRef), Context.Int64x2Ty); |
1367 | 0 | Builder.createBr(EndBB); |
1368 | |
|
1369 | 0 | Builder.positionAtEnd(NotNullBB); |
1370 | 0 | auto Ret2 = Builder.createBitCast( |
1371 | 0 | Builder.createInsertElement( |
1372 | 0 | Builder.createBitCast(Ref, Context.Int8x16Ty), |
1373 | 0 | LLContext.getInt8(1), LLContext.getInt64(1)), |
1374 | 0 | Context.Int64x2Ty); |
1375 | 0 | Builder.createBr(EndBB); |
1376 | |
|
1377 | 0 | Builder.positionAtEnd(EndBB); |
1378 | 0 | auto Ret = Builder.createPHI(Context.Int64x2Ty); |
1379 | 0 | Ret.addIncoming(Ret1, IsNullBB); |
1380 | 0 | Ret.addIncoming(Ret2, NotNullBB); |
1381 | 0 | stackPush(Ret); |
1382 | 0 | break; |
1383 | 0 | } |
1384 | 0 | case OpCode::Ref__i31: { |
1385 | 0 | std::array<uint8_t, 16> RawRef = {0}; |
1386 | 0 | auto VT = ValType(TypeCode::Ref, TypeCode::I31Ref); |
1387 | 0 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1388 | 0 | auto Ref = Builder.createBitCast( |
1389 | 0 | LLVM::Value::getConstVector8(LLContext, RawRef), Context.Int64x2Ty); |
1390 | 0 | auto Val = Builder.createZExt( |
1391 | 0 | Builder.createOr( |
1392 | 0 | Builder.createAnd(stackPop(), LLContext.getInt32(0x7FFFFFFFU)), |
1393 | 0 | LLContext.getInt32(0x80000000U)), |
1394 | 0 | Context.Int64Ty); |
1395 | 0 | stackPush(Builder.createInsertElement(Ref, Val, LLContext.getInt64(1))); |
1396 | 0 | break; |
1397 | 0 | } |
1398 | 0 | case OpCode::I31__get_s: { |
1399 | 0 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "i31.get.ok"); |
1400 | 0 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
1401 | 0 | auto Val = Builder.createTrunc( |
1402 | 0 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
1403 | 0 | Context.Int32Ty); |
1404 | 0 | auto IsNotNull = Builder.createLikely(Builder.createICmpNE( |
1405 | 0 | Builder.createAnd(Val, LLContext.getInt32(0x80000000U)), |
1406 | 0 | LLContext.getInt32(0))); |
1407 | 0 | Builder.createCondBr(IsNotNull, Next, |
1408 | 0 | getTrapBB(ErrCode::Value::AccessNullI31)); |
1409 | 0 | Builder.positionAtEnd(Next); |
1410 | 0 | Val = Builder.createAnd(Val, LLContext.getInt32(0x7FFFFFFFU)); |
1411 | 0 | stackPush(Builder.createOr( |
1412 | 0 | Val, Builder.createShl( |
1413 | 0 | Builder.createAnd(Val, LLContext.getInt32(0x40000000U)), |
1414 | 0 | LLContext.getInt32(1)))); |
1415 | 0 | break; |
1416 | 0 | } |
1417 | 0 | case OpCode::I31__get_u: { |
1418 | 0 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "i31.get.ok"); |
1419 | 0 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
1420 | 0 | auto Val = Builder.createTrunc( |
1421 | 0 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
1422 | 0 | Context.Int32Ty); |
1423 | 0 | auto IsNotNull = Builder.createLikely(Builder.createICmpNE( |
1424 | 0 | Builder.createAnd(Val, LLContext.getInt32(0x80000000U)), |
1425 | 0 | LLContext.getInt32(0))); |
1426 | 0 | Builder.createCondBr(IsNotNull, Next, |
1427 | 0 | getTrapBB(ErrCode::Value::AccessNullI31)); |
1428 | 0 | Builder.positionAtEnd(Next); |
1429 | 0 | stackPush(Builder.createAnd(Val, LLContext.getInt32(0x7FFFFFFFU))); |
1430 | 0 | break; |
1431 | 0 | } |
1432 | | |
1433 | | // Parametric Instructions |
1434 | 3.20k | case OpCode::Drop: |
1435 | 3.20k | stackPop(); |
1436 | 3.20k | break; |
1437 | 656 | case OpCode::Select: |
1438 | 1.08k | case OpCode::Select_t: { |
1439 | 1.08k | auto Cond = Builder.createICmpNE(stackPop(), LLContext.getInt32(0)); |
1440 | 1.08k | auto False = stackPop(); |
1441 | 1.08k | auto True = stackPop(); |
1442 | 1.08k | stackPush(Builder.createSelect(Cond, True, False)); |
1443 | 1.08k | break; |
1444 | 656 | } |
1445 | | |
1446 | | // Variable Instructions |
1447 | 12.2k | case OpCode::Local__get: { |
1448 | 12.2k | const auto &L = Local[Instr.getTargetIndex()]; |
1449 | 12.2k | stackPush(Builder.createLoad(L.first, L.second)); |
1450 | 12.2k | break; |
1451 | 656 | } |
1452 | 4.08k | case OpCode::Local__set: |
1453 | 4.08k | Builder.createStore(stackPop(), Local[Instr.getTargetIndex()].second); |
1454 | 4.08k | break; |
1455 | 770 | case OpCode::Local__tee: |
1456 | 770 | Builder.createStore(Stack.back(), Local[Instr.getTargetIndex()].second); |
1457 | 770 | break; |
1458 | 332 | case OpCode::Global__get: { |
1459 | 332 | const auto G = |
1460 | 332 | Context.getGlobal(Builder, ExecCtx, Instr.getTargetIndex()); |
1461 | 332 | stackPush(Builder.createLoad(G.first, G.second)); |
1462 | 332 | break; |
1463 | 656 | } |
1464 | 56 | case OpCode::Global__set: |
1465 | 56 | Builder.createStore( |
1466 | 56 | stackPop(), |
1467 | 56 | Context.getGlobal(Builder, ExecCtx, Instr.getTargetIndex()).second); |
1468 | 56 | break; |
1469 | | |
1470 | | // Table Instructions |
1471 | 32 | case OpCode::Table__get: { |
1472 | 32 | auto Idx = stackPop(); |
1473 | 32 | stackPush(Builder.createCall( |
1474 | 32 | Context.getIntrinsic( |
1475 | 32 | Builder, Executable::Intrinsics::kTableGet, |
1476 | 32 | LLVM::Type::getFunctionType(Context.Int64x2Ty, |
1477 | 32 | {Context.Int32Ty, Context.Int32Ty}, |
1478 | 32 | false)), |
1479 | 32 | {LLContext.getInt32(Instr.getTargetIndex()), Idx})); |
1480 | 32 | break; |
1481 | 656 | } |
1482 | 25 | case OpCode::Table__set: { |
1483 | 25 | auto Ref = stackPop(); |
1484 | 25 | auto Idx = stackPop(); |
1485 | 25 | Builder.createCall( |
1486 | 25 | Context.getIntrinsic( |
1487 | 25 | Builder, Executable::Intrinsics::kTableSet, |
1488 | 25 | LLVM::Type::getFunctionType( |
1489 | 25 | Context.Int64Ty, |
1490 | 25 | {Context.Int32Ty, Context.Int32Ty, Context.Int64x2Ty}, |
1491 | 25 | false)), |
1492 | 25 | {LLContext.getInt32(Instr.getTargetIndex()), Idx, Ref}); |
1493 | 25 | break; |
1494 | 656 | } |
1495 | 25 | case OpCode::Table__init: { |
1496 | 25 | auto Len = stackPop(); |
1497 | 25 | auto Src = stackPop(); |
1498 | 25 | auto Dst = stackPop(); |
1499 | 25 | Builder.createCall( |
1500 | 25 | Context.getIntrinsic( |
1501 | 25 | Builder, Executable::Intrinsics::kTableInit, |
1502 | 25 | LLVM::Type::getFunctionType(Context.VoidTy, |
1503 | 25 | {Context.Int32Ty, Context.Int32Ty, |
1504 | 25 | Context.Int32Ty, Context.Int32Ty, |
1505 | 25 | Context.Int32Ty}, |
1506 | 25 | false)), |
1507 | 25 | {LLContext.getInt32(Instr.getTargetIndex()), |
1508 | 25 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1509 | 25 | break; |
1510 | 656 | } |
1511 | 30 | case OpCode::Elem__drop: { |
1512 | 30 | Builder.createCall( |
1513 | 30 | Context.getIntrinsic(Builder, Executable::Intrinsics::kElemDrop, |
1514 | 30 | LLVM::Type::getFunctionType( |
1515 | 30 | Context.VoidTy, {Context.Int32Ty}, false)), |
1516 | 30 | {LLContext.getInt32(Instr.getTargetIndex())}); |
1517 | 30 | break; |
1518 | 656 | } |
1519 | 20 | case OpCode::Table__copy: { |
1520 | 20 | auto Len = stackPop(); |
1521 | 20 | auto Src = stackPop(); |
1522 | 20 | auto Dst = stackPop(); |
1523 | 20 | Builder.createCall( |
1524 | 20 | Context.getIntrinsic( |
1525 | 20 | Builder, Executable::Intrinsics::kTableCopy, |
1526 | 20 | LLVM::Type::getFunctionType(Context.VoidTy, |
1527 | 20 | {Context.Int32Ty, Context.Int32Ty, |
1528 | 20 | Context.Int32Ty, Context.Int32Ty, |
1529 | 20 | Context.Int32Ty}, |
1530 | 20 | false)), |
1531 | 20 | {LLContext.getInt32(Instr.getTargetIndex()), |
1532 | 20 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1533 | 20 | break; |
1534 | 656 | } |
1535 | 19 | case OpCode::Table__grow: { |
1536 | 19 | auto NewSize = stackPop(); |
1537 | 19 | auto Val = stackPop(); |
1538 | 19 | stackPush(Builder.createCall( |
1539 | 19 | Context.getIntrinsic( |
1540 | 19 | Builder, Executable::Intrinsics::kTableGrow, |
1541 | 19 | LLVM::Type::getFunctionType( |
1542 | 19 | Context.Int32Ty, |
1543 | 19 | {Context.Int32Ty, Context.Int64x2Ty, Context.Int32Ty}, |
1544 | 19 | false)), |
1545 | 19 | {LLContext.getInt32(Instr.getTargetIndex()), Val, NewSize})); |
1546 | 19 | break; |
1547 | 656 | } |
1548 | 21 | case OpCode::Table__size: { |
1549 | 21 | stackPush(Builder.createCall( |
1550 | 21 | Context.getIntrinsic(Builder, Executable::Intrinsics::kTableSize, |
1551 | 21 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1552 | 21 | {Context.Int32Ty}, |
1553 | 21 | false)), |
1554 | 21 | {LLContext.getInt32(Instr.getTargetIndex())})); |
1555 | 21 | break; |
1556 | 656 | } |
1557 | 3 | case OpCode::Table__fill: { |
1558 | 3 | auto Len = stackPop(); |
1559 | 3 | auto Val = stackPop(); |
1560 | 3 | auto Off = stackPop(); |
1561 | 3 | Builder.createCall( |
1562 | 3 | Context.getIntrinsic(Builder, Executable::Intrinsics::kTableFill, |
1563 | 3 | LLVM::Type::getFunctionType( |
1564 | 3 | Context.Int32Ty, |
1565 | 3 | {Context.Int32Ty, Context.Int32Ty, |
1566 | 3 | Context.Int64x2Ty, Context.Int32Ty}, |
1567 | 3 | false)), |
1568 | 3 | {LLContext.getInt32(Instr.getTargetIndex()), Off, Val, Len}); |
1569 | 3 | break; |
1570 | 656 | } |
1571 | | |
1572 | | // Memory Instructions |
1573 | 1.12k | case OpCode::I32__load: |
1574 | 1.12k | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1575 | 1.12k | Instr.getMemoryAlign(), Context.Int32Ty); |
1576 | 1.12k | break; |
1577 | 3.11k | case OpCode::I64__load: |
1578 | 3.11k | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1579 | 3.11k | Instr.getMemoryAlign(), Context.Int64Ty); |
1580 | 3.11k | break; |
1581 | 125 | case OpCode::F32__load: |
1582 | 125 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1583 | 125 | Instr.getMemoryAlign(), Context.FloatTy); |
1584 | 125 | break; |
1585 | 263 | case OpCode::F64__load: |
1586 | 263 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1587 | 263 | Instr.getMemoryAlign(), Context.DoubleTy); |
1588 | 263 | break; |
1589 | 528 | case OpCode::I32__load8_s: |
1590 | 528 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1591 | 528 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int32Ty, |
1592 | 528 | true); |
1593 | 528 | break; |
1594 | 189 | case OpCode::I32__load8_u: |
1595 | 189 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1596 | 189 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int32Ty, |
1597 | 189 | false); |
1598 | 189 | break; |
1599 | 365 | case OpCode::I32__load16_s: |
1600 | 365 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1601 | 365 | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int32Ty, |
1602 | 365 | true); |
1603 | 365 | break; |
1604 | 1.53k | case OpCode::I32__load16_u: |
1605 | 1.53k | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1606 | 1.53k | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int32Ty, |
1607 | 1.53k | false); |
1608 | 1.53k | break; |
1609 | 691 | case OpCode::I64__load8_s: |
1610 | 691 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1611 | 691 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int64Ty, |
1612 | 691 | true); |
1613 | 691 | break; |
1614 | 449 | case OpCode::I64__load8_u: |
1615 | 449 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1616 | 449 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int64Ty, |
1617 | 449 | false); |
1618 | 449 | break; |
1619 | 430 | case OpCode::I64__load16_s: |
1620 | 430 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1621 | 430 | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int64Ty, |
1622 | 430 | true); |
1623 | 430 | break; |
1624 | 558 | case OpCode::I64__load16_u: |
1625 | 558 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1626 | 558 | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int64Ty, |
1627 | 558 | false); |
1628 | 558 | break; |
1629 | 381 | case OpCode::I64__load32_s: |
1630 | 381 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1631 | 381 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int64Ty, |
1632 | 381 | true); |
1633 | 381 | break; |
1634 | 462 | case OpCode::I64__load32_u: |
1635 | 462 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1636 | 462 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int64Ty, |
1637 | 462 | false); |
1638 | 462 | break; |
1639 | 420 | case OpCode::I32__store: |
1640 | 420 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1641 | 420 | Instr.getMemoryAlign(), Context.Int32Ty); |
1642 | 420 | break; |
1643 | 1.46k | case OpCode::I64__store: |
1644 | 1.46k | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1645 | 1.46k | Instr.getMemoryAlign(), Context.Int64Ty); |
1646 | 1.46k | break; |
1647 | 87 | case OpCode::F32__store: |
1648 | 87 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1649 | 87 | Instr.getMemoryAlign(), Context.FloatTy); |
1650 | 87 | break; |
1651 | 76 | case OpCode::F64__store: |
1652 | 76 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1653 | 76 | Instr.getMemoryAlign(), Context.DoubleTy); |
1654 | 76 | break; |
1655 | 305 | case OpCode::I32__store8: |
1656 | 350 | case OpCode::I64__store8: |
1657 | 350 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1658 | 350 | Instr.getMemoryAlign(), Context.Int8Ty, true); |
1659 | 350 | break; |
1660 | 247 | case OpCode::I32__store16: |
1661 | 332 | case OpCode::I64__store16: |
1662 | 332 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1663 | 332 | Instr.getMemoryAlign(), Context.Int16Ty, true); |
1664 | 332 | break; |
1665 | 83 | case OpCode::I64__store32: |
1666 | 83 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1667 | 83 | Instr.getMemoryAlign(), Context.Int32Ty, true); |
1668 | 83 | break; |
1669 | 540 | case OpCode::Memory__size: |
1670 | 540 | stackPush(Builder.createCall( |
1671 | 540 | Context.getIntrinsic(Builder, Executable::Intrinsics::kMemSize, |
1672 | 540 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1673 | 540 | {Context.Int32Ty}, |
1674 | 540 | false)), |
1675 | 540 | {LLContext.getInt32(Instr.getTargetIndex())})); |
1676 | 540 | break; |
1677 | 506 | case OpCode::Memory__grow: { |
1678 | 506 | auto Diff = stackPop(); |
1679 | 506 | stackPush(Builder.createCall( |
1680 | 506 | Context.getIntrinsic( |
1681 | 506 | Builder, Executable::Intrinsics::kMemGrow, |
1682 | 506 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1683 | 506 | {Context.Int32Ty, Context.Int32Ty}, |
1684 | 506 | false)), |
1685 | 506 | {LLContext.getInt32(Instr.getTargetIndex()), Diff})); |
1686 | 506 | break; |
1687 | 247 | } |
1688 | 20 | case OpCode::Memory__init: { |
1689 | 20 | auto Len = stackPop(); |
1690 | 20 | auto Src = stackPop(); |
1691 | 20 | auto Dst = stackPop(); |
1692 | 20 | Builder.createCall( |
1693 | 20 | Context.getIntrinsic( |
1694 | 20 | Builder, Executable::Intrinsics::kMemInit, |
1695 | 20 | LLVM::Type::getFunctionType(Context.VoidTy, |
1696 | 20 | {Context.Int32Ty, Context.Int32Ty, |
1697 | 20 | Context.Int32Ty, Context.Int32Ty, |
1698 | 20 | Context.Int32Ty}, |
1699 | 20 | false)), |
1700 | 20 | {LLContext.getInt32(Instr.getTargetIndex()), |
1701 | 20 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1702 | 20 | break; |
1703 | 247 | } |
1704 | 20 | case OpCode::Data__drop: { |
1705 | 20 | Builder.createCall( |
1706 | 20 | Context.getIntrinsic(Builder, Executable::Intrinsics::kDataDrop, |
1707 | 20 | LLVM::Type::getFunctionType( |
1708 | 20 | Context.VoidTy, {Context.Int32Ty}, false)), |
1709 | 20 | {LLContext.getInt32(Instr.getTargetIndex())}); |
1710 | 20 | break; |
1711 | 247 | } |
1712 | 360 | case OpCode::Memory__copy: { |
1713 | 360 | auto Len = stackPop(); |
1714 | 360 | auto Src = stackPop(); |
1715 | 360 | auto Dst = stackPop(); |
1716 | 360 | Builder.createCall( |
1717 | 360 | Context.getIntrinsic( |
1718 | 360 | Builder, Executable::Intrinsics::kMemCopy, |
1719 | 360 | LLVM::Type::getFunctionType(Context.VoidTy, |
1720 | 360 | {Context.Int32Ty, Context.Int32Ty, |
1721 | 360 | Context.Int32Ty, Context.Int32Ty, |
1722 | 360 | Context.Int32Ty}, |
1723 | 360 | false)), |
1724 | 360 | {LLContext.getInt32(Instr.getTargetIndex()), |
1725 | 360 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1726 | 360 | break; |
1727 | 247 | } |
1728 | 650 | case OpCode::Memory__fill: { |
1729 | 650 | auto Len = stackPop(); |
1730 | 650 | auto Val = Builder.createTrunc(stackPop(), Context.Int8Ty); |
1731 | 650 | auto Off = stackPop(); |
1732 | 650 | Builder.createCall( |
1733 | 650 | Context.getIntrinsic( |
1734 | 650 | Builder, Executable::Intrinsics::kMemFill, |
1735 | 650 | LLVM::Type::getFunctionType(Context.VoidTy, |
1736 | 650 | {Context.Int32Ty, Context.Int32Ty, |
1737 | 650 | Context.Int8Ty, Context.Int32Ty}, |
1738 | 650 | false)), |
1739 | 650 | {LLContext.getInt32(Instr.getTargetIndex()), Off, Val, Len}); |
1740 | 650 | break; |
1741 | 247 | } |
1742 | | |
1743 | | // Const Numeric Instructions |
1744 | 518k | case OpCode::I32__const: |
1745 | 518k | stackPush(LLContext.getInt32(Instr.getNum().get<uint32_t>())); |
1746 | 518k | break; |
1747 | 85.3k | case OpCode::I64__const: |
1748 | 85.3k | stackPush(LLContext.getInt64(Instr.getNum().get<uint64_t>())); |
1749 | 85.3k | break; |
1750 | 14.2k | case OpCode::F32__const: |
1751 | 14.2k | stackPush(LLContext.getFloat(Instr.getNum().get<float>())); |
1752 | 14.2k | break; |
1753 | 6.56k | case OpCode::F64__const: |
1754 | 6.56k | stackPush(LLContext.getDouble(Instr.getNum().get<double>())); |
1755 | 6.56k | break; |
1756 | | |
1757 | | // Unary Numeric Instructions |
1758 | 6.79k | case OpCode::I32__eqz: |
1759 | 6.79k | stackPush(Builder.createZExt( |
1760 | 6.79k | Builder.createICmpEQ(stackPop(), LLContext.getInt32(0)), |
1761 | 6.79k | Context.Int32Ty)); |
1762 | 6.79k | break; |
1763 | 1.28k | case OpCode::I64__eqz: |
1764 | 1.28k | stackPush(Builder.createZExt( |
1765 | 1.28k | Builder.createICmpEQ(stackPop(), LLContext.getInt64(0)), |
1766 | 1.28k | Context.Int32Ty)); |
1767 | 1.28k | break; |
1768 | 2.11k | case OpCode::I32__clz: |
1769 | 2.11k | assuming(LLVM::Core::Ctlz != LLVM::Core::NotIntrinsic); |
1770 | 2.11k | stackPush(Builder.createIntrinsic(LLVM::Core::Ctlz, {Context.Int32Ty}, |
1771 | 2.11k | {stackPop(), LLContext.getFalse()})); |
1772 | 2.11k | break; |
1773 | 309 | case OpCode::I64__clz: |
1774 | 309 | assuming(LLVM::Core::Ctlz != LLVM::Core::NotIntrinsic); |
1775 | 309 | stackPush(Builder.createIntrinsic(LLVM::Core::Ctlz, {Context.Int64Ty}, |
1776 | 309 | {stackPop(), LLContext.getFalse()})); |
1777 | 309 | break; |
1778 | 1.96k | case OpCode::I32__ctz: |
1779 | 1.96k | assuming(LLVM::Core::Cttz != LLVM::Core::NotIntrinsic); |
1780 | 1.96k | stackPush(Builder.createIntrinsic(LLVM::Core::Cttz, {Context.Int32Ty}, |
1781 | 1.96k | {stackPop(), LLContext.getFalse()})); |
1782 | 1.96k | break; |
1783 | 453 | case OpCode::I64__ctz: |
1784 | 453 | assuming(LLVM::Core::Cttz != LLVM::Core::NotIntrinsic); |
1785 | 453 | stackPush(Builder.createIntrinsic(LLVM::Core::Cttz, {Context.Int64Ty}, |
1786 | 453 | {stackPop(), LLContext.getFalse()})); |
1787 | 453 | break; |
1788 | 12.0k | case OpCode::I32__popcnt: |
1789 | 13.9k | case OpCode::I64__popcnt: |
1790 | 13.9k | assuming(LLVM::Core::Ctpop != LLVM::Core::NotIntrinsic); |
1791 | 13.9k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Ctpop, stackPop())); |
1792 | 13.9k | break; |
1793 | 826 | case OpCode::F32__abs: |
1794 | 1.37k | case OpCode::F64__abs: |
1795 | 1.37k | assuming(LLVM::Core::Fabs != LLVM::Core::NotIntrinsic); |
1796 | 1.37k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Fabs, stackPop())); |
1797 | 1.37k | break; |
1798 | 1.03k | case OpCode::F32__neg: |
1799 | 1.79k | case OpCode::F64__neg: |
1800 | 1.79k | stackPush(Builder.createFNeg(stackPop())); |
1801 | 1.79k | break; |
1802 | 1.74k | case OpCode::F32__ceil: |
1803 | 3.86k | case OpCode::F64__ceil: |
1804 | 3.86k | assuming(LLVM::Core::Ceil != LLVM::Core::NotIntrinsic); |
1805 | 3.86k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Ceil, stackPop())); |
1806 | 3.86k | break; |
1807 | 884 | case OpCode::F32__floor: |
1808 | 1.26k | case OpCode::F64__floor: |
1809 | 1.26k | assuming(LLVM::Core::Floor != LLVM::Core::NotIntrinsic); |
1810 | 1.26k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Floor, stackPop())); |
1811 | 1.26k | break; |
1812 | 571 | case OpCode::F32__trunc: |
1813 | 866 | case OpCode::F64__trunc: |
1814 | 866 | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
1815 | 866 | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Trunc, stackPop())); |
1816 | 866 | break; |
1817 | 820 | case OpCode::F32__nearest: |
1818 | 1.19k | case OpCode::F64__nearest: { |
1819 | 1.19k | const bool IsFloat = Instr.getOpCode() == OpCode::F32__nearest; |
1820 | 1.19k | LLVM::Value Value = stackPop(); |
1821 | | |
1822 | 1.19k | #if LLVM_VERSION_MAJOR >= 12 |
1823 | 1.19k | assuming(LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic); |
1824 | 1.19k | if (LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic) { |
1825 | 1.19k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Roundeven, Value)); |
1826 | 1.19k | break; |
1827 | 1.19k | } |
1828 | 0 | #endif |
1829 | | |
1830 | | // The VectorSize is only used when SSE4_1 or NEON is supported. |
1831 | 0 | [[maybe_unused]] const uint32_t VectorSize = IsFloat ? 4 : 2; |
1832 | 0 | #if defined(__x86_64__) |
1833 | 0 | if (Context.SupportSSE4_1) { |
1834 | 0 | auto Zero = LLContext.getInt64(0); |
1835 | 0 | auto VectorTy = |
1836 | 0 | LLVM::Type::getVectorType(Value.getType(), VectorSize); |
1837 | 0 | LLVM::Value Ret = LLVM::Value::getUndef(VectorTy); |
1838 | 0 | Ret = Builder.createInsertElement(Ret, Value, Zero); |
1839 | 0 | auto ID = IsFloat ? LLVM::Core::X86SSE41RoundSs |
1840 | 0 | : LLVM::Core::X86SSE41RoundSd; |
1841 | 0 | assuming(ID != LLVM::Core::NotIntrinsic); |
1842 | 0 | Ret = Builder.createIntrinsic(ID, {}, |
1843 | 0 | {Ret, Ret, LLContext.getInt32(8)}); |
1844 | 0 | Ret = Builder.createExtractElement(Ret, Zero); |
1845 | 0 | stackPush(Ret); |
1846 | 0 | break; |
1847 | 0 | } |
1848 | 0 | #endif |
1849 | | |
1850 | | #if defined(__aarch64__) |
1851 | | if (Context.SupportNEON && |
1852 | | LLVM::Core::AArch64NeonFRIntN != LLVM::Core::NotIntrinsic) { |
1853 | | auto Zero = LLContext.getInt64(0); |
1854 | | auto VectorTy = |
1855 | | LLVM::Type::getVectorType(Value.getType(), VectorSize); |
1856 | | LLVM::Value Ret = LLVM::Value::getUndef(VectorTy); |
1857 | | Ret = Builder.createInsertElement(Ret, Value, Zero); |
1858 | | Ret = |
1859 | | Builder.createUnaryIntrinsic(LLVM::Core::AArch64NeonFRIntN, Ret); |
1860 | | Ret = Builder.createExtractElement(Ret, Zero); |
1861 | | stackPush(Ret); |
1862 | | break; |
1863 | | } |
1864 | | #endif |
1865 | | |
1866 | | // Fallback case. |
1867 | | // If the SSE4.1 is not supported on the x86_64 platform or |
1868 | | // the NEON is not supported on the aarch64 platform, |
1869 | | // then fallback to this. |
1870 | 0 | assuming(LLVM::Core::Nearbyint != LLVM::Core::NotIntrinsic); |
1871 | 0 | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Nearbyint, Value)); |
1872 | 0 | break; |
1873 | 0 | } |
1874 | 414 | case OpCode::F32__sqrt: |
1875 | 2.43k | case OpCode::F64__sqrt: |
1876 | 2.43k | assuming(LLVM::Core::Sqrt != LLVM::Core::NotIntrinsic); |
1877 | 2.43k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Sqrt, stackPop())); |
1878 | 2.43k | break; |
1879 | 320 | case OpCode::I32__wrap_i64: |
1880 | 320 | stackPush(Builder.createTrunc(stackPop(), Context.Int32Ty)); |
1881 | 320 | break; |
1882 | 1.12k | case OpCode::I32__trunc_f32_s: |
1883 | 1.12k | compileSignedTrunc(Context.Int32Ty); |
1884 | 1.12k | break; |
1885 | 238 | case OpCode::I32__trunc_f64_s: |
1886 | 238 | compileSignedTrunc(Context.Int32Ty); |
1887 | 238 | break; |
1888 | 151 | case OpCode::I32__trunc_f32_u: |
1889 | 151 | compileUnsignedTrunc(Context.Int32Ty); |
1890 | 151 | break; |
1891 | 1.03k | case OpCode::I32__trunc_f64_u: |
1892 | 1.03k | compileUnsignedTrunc(Context.Int32Ty); |
1893 | 1.03k | break; |
1894 | 1.82k | case OpCode::I64__extend_i32_s: |
1895 | 1.82k | stackPush(Builder.createSExt(stackPop(), Context.Int64Ty)); |
1896 | 1.82k | break; |
1897 | 352 | case OpCode::I64__extend_i32_u: |
1898 | 352 | stackPush(Builder.createZExt(stackPop(), Context.Int64Ty)); |
1899 | 352 | break; |
1900 | 54 | case OpCode::I64__trunc_f32_s: |
1901 | 54 | compileSignedTrunc(Context.Int64Ty); |
1902 | 54 | break; |
1903 | 370 | case OpCode::I64__trunc_f64_s: |
1904 | 370 | compileSignedTrunc(Context.Int64Ty); |
1905 | 370 | break; |
1906 | 1.07k | case OpCode::I64__trunc_f32_u: |
1907 | 1.07k | compileUnsignedTrunc(Context.Int64Ty); |
1908 | 1.07k | break; |
1909 | 1.44k | case OpCode::I64__trunc_f64_u: |
1910 | 1.44k | compileUnsignedTrunc(Context.Int64Ty); |
1911 | 1.44k | break; |
1912 | 1.56k | case OpCode::F32__convert_i32_s: |
1913 | 1.97k | case OpCode::F32__convert_i64_s: |
1914 | 1.97k | stackPush(Builder.createSIToFP(stackPop(), Context.FloatTy)); |
1915 | 1.97k | break; |
1916 | 593 | case OpCode::F32__convert_i32_u: |
1917 | 1.82k | case OpCode::F32__convert_i64_u: |
1918 | 1.82k | stackPush(Builder.createUIToFP(stackPop(), Context.FloatTy)); |
1919 | 1.82k | break; |
1920 | 1.32k | case OpCode::F64__convert_i32_s: |
1921 | 5.51k | case OpCode::F64__convert_i64_s: |
1922 | 5.51k | stackPush(Builder.createSIToFP(stackPop(), Context.DoubleTy)); |
1923 | 5.51k | break; |
1924 | 1.33k | case OpCode::F64__convert_i32_u: |
1925 | 1.51k | case OpCode::F64__convert_i64_u: |
1926 | 1.51k | stackPush(Builder.createUIToFP(stackPop(), Context.DoubleTy)); |
1927 | 1.51k | break; |
1928 | 211 | case OpCode::F32__demote_f64: |
1929 | 211 | stackPush(Builder.createFPTrunc(stackPop(), Context.FloatTy)); |
1930 | 211 | break; |
1931 | 92 | case OpCode::F64__promote_f32: |
1932 | 92 | stackPush(Builder.createFPExt(stackPop(), Context.DoubleTy)); |
1933 | 92 | break; |
1934 | 674 | case OpCode::I32__reinterpret_f32: |
1935 | 674 | stackPush(Builder.createBitCast(stackPop(), Context.Int32Ty)); |
1936 | 674 | break; |
1937 | 689 | case OpCode::I64__reinterpret_f64: |
1938 | 689 | stackPush(Builder.createBitCast(stackPop(), Context.Int64Ty)); |
1939 | 689 | break; |
1940 | 3.87k | case OpCode::F32__reinterpret_i32: |
1941 | 3.87k | stackPush(Builder.createBitCast(stackPop(), Context.FloatTy)); |
1942 | 3.87k | break; |
1943 | 1.18k | case OpCode::F64__reinterpret_i64: |
1944 | 1.18k | stackPush(Builder.createBitCast(stackPop(), Context.DoubleTy)); |
1945 | 1.18k | break; |
1946 | 876 | case OpCode::I32__extend8_s: |
1947 | 876 | stackPush(Builder.createSExt( |
1948 | 876 | Builder.createTrunc(stackPop(), Context.Int8Ty), Context.Int32Ty)); |
1949 | 876 | break; |
1950 | 2.75k | case OpCode::I32__extend16_s: |
1951 | 2.75k | stackPush(Builder.createSExt( |
1952 | 2.75k | Builder.createTrunc(stackPop(), Context.Int16Ty), Context.Int32Ty)); |
1953 | 2.75k | break; |
1954 | 359 | case OpCode::I64__extend8_s: |
1955 | 359 | stackPush(Builder.createSExt( |
1956 | 359 | Builder.createTrunc(stackPop(), Context.Int8Ty), Context.Int64Ty)); |
1957 | 359 | break; |
1958 | 615 | case OpCode::I64__extend16_s: |
1959 | 615 | stackPush(Builder.createSExt( |
1960 | 615 | Builder.createTrunc(stackPop(), Context.Int16Ty), Context.Int64Ty)); |
1961 | 615 | break; |
1962 | 727 | case OpCode::I64__extend32_s: |
1963 | 727 | stackPush(Builder.createSExt( |
1964 | 727 | Builder.createTrunc(stackPop(), Context.Int32Ty), Context.Int64Ty)); |
1965 | 727 | break; |
1966 | | |
1967 | | // Binary Numeric Instructions |
1968 | 1.36k | case OpCode::I32__eq: |
1969 | 1.60k | case OpCode::I64__eq: { |
1970 | 1.60k | LLVM::Value RHS = stackPop(); |
1971 | 1.60k | LLVM::Value LHS = stackPop(); |
1972 | 1.60k | stackPush(Builder.createZExt(Builder.createICmpEQ(LHS, RHS), |
1973 | 1.60k | Context.Int32Ty)); |
1974 | 1.60k | break; |
1975 | 1.36k | } |
1976 | 672 | case OpCode::I32__ne: |
1977 | 695 | case OpCode::I64__ne: { |
1978 | 695 | LLVM::Value RHS = stackPop(); |
1979 | 695 | LLVM::Value LHS = stackPop(); |
1980 | 695 | stackPush(Builder.createZExt(Builder.createICmpNE(LHS, RHS), |
1981 | 695 | Context.Int32Ty)); |
1982 | 695 | break; |
1983 | 672 | } |
1984 | 4.27k | case OpCode::I32__lt_s: |
1985 | 4.86k | case OpCode::I64__lt_s: { |
1986 | 4.86k | LLVM::Value RHS = stackPop(); |
1987 | 4.86k | LLVM::Value LHS = stackPop(); |
1988 | 4.86k | stackPush(Builder.createZExt(Builder.createICmpSLT(LHS, RHS), |
1989 | 4.86k | Context.Int32Ty)); |
1990 | 4.86k | break; |
1991 | 4.27k | } |
1992 | 5.47k | case OpCode::I32__lt_u: |
1993 | 5.83k | case OpCode::I64__lt_u: { |
1994 | 5.83k | LLVM::Value RHS = stackPop(); |
1995 | 5.83k | LLVM::Value LHS = stackPop(); |
1996 | 5.83k | stackPush(Builder.createZExt(Builder.createICmpULT(LHS, RHS), |
1997 | 5.83k | Context.Int32Ty)); |
1998 | 5.83k | break; |
1999 | 5.47k | } |
2000 | 1.01k | case OpCode::I32__gt_s: |
2001 | 1.51k | case OpCode::I64__gt_s: { |
2002 | 1.51k | LLVM::Value RHS = stackPop(); |
2003 | 1.51k | LLVM::Value LHS = stackPop(); |
2004 | 1.51k | stackPush(Builder.createZExt(Builder.createICmpSGT(LHS, RHS), |
2005 | 1.51k | Context.Int32Ty)); |
2006 | 1.51k | break; |
2007 | 1.01k | } |
2008 | 5.56k | case OpCode::I32__gt_u: |
2009 | 5.68k | case OpCode::I64__gt_u: { |
2010 | 5.68k | LLVM::Value RHS = stackPop(); |
2011 | 5.68k | LLVM::Value LHS = stackPop(); |
2012 | 5.68k | stackPush(Builder.createZExt(Builder.createICmpUGT(LHS, RHS), |
2013 | 5.68k | Context.Int32Ty)); |
2014 | 5.68k | break; |
2015 | 5.56k | } |
2016 | 1.86k | case OpCode::I32__le_s: |
2017 | 2.73k | case OpCode::I64__le_s: { |
2018 | 2.73k | LLVM::Value RHS = stackPop(); |
2019 | 2.73k | LLVM::Value LHS = stackPop(); |
2020 | 2.73k | stackPush(Builder.createZExt(Builder.createICmpSLE(LHS, RHS), |
2021 | 2.73k | Context.Int32Ty)); |
2022 | 2.73k | break; |
2023 | 1.86k | } |
2024 | 458 | case OpCode::I32__le_u: |
2025 | 1.79k | case OpCode::I64__le_u: { |
2026 | 1.79k | LLVM::Value RHS = stackPop(); |
2027 | 1.79k | LLVM::Value LHS = stackPop(); |
2028 | 1.79k | stackPush(Builder.createZExt(Builder.createICmpULE(LHS, RHS), |
2029 | 1.79k | Context.Int32Ty)); |
2030 | 1.79k | break; |
2031 | 458 | } |
2032 | 1.08k | case OpCode::I32__ge_s: |
2033 | 1.11k | case OpCode::I64__ge_s: { |
2034 | 1.11k | LLVM::Value RHS = stackPop(); |
2035 | 1.11k | LLVM::Value LHS = stackPop(); |
2036 | 1.11k | stackPush(Builder.createZExt(Builder.createICmpSGE(LHS, RHS), |
2037 | 1.11k | Context.Int32Ty)); |
2038 | 1.11k | break; |
2039 | 1.08k | } |
2040 | 2.74k | case OpCode::I32__ge_u: |
2041 | 3.46k | case OpCode::I64__ge_u: { |
2042 | 3.46k | LLVM::Value RHS = stackPop(); |
2043 | 3.46k | LLVM::Value LHS = stackPop(); |
2044 | 3.46k | stackPush(Builder.createZExt(Builder.createICmpUGE(LHS, RHS), |
2045 | 3.46k | Context.Int32Ty)); |
2046 | 3.46k | break; |
2047 | 2.74k | } |
2048 | 160 | case OpCode::F32__eq: |
2049 | 215 | case OpCode::F64__eq: { |
2050 | 215 | LLVM::Value RHS = stackPop(); |
2051 | 215 | LLVM::Value LHS = stackPop(); |
2052 | 215 | stackPush(Builder.createZExt(Builder.createFCmpOEQ(LHS, RHS), |
2053 | 215 | Context.Int32Ty)); |
2054 | 215 | break; |
2055 | 160 | } |
2056 | 90 | case OpCode::F32__ne: |
2057 | 117 | case OpCode::F64__ne: { |
2058 | 117 | LLVM::Value RHS = stackPop(); |
2059 | 117 | LLVM::Value LHS = stackPop(); |
2060 | 117 | stackPush(Builder.createZExt(Builder.createFCmpUNE(LHS, RHS), |
2061 | 117 | Context.Int32Ty)); |
2062 | 117 | break; |
2063 | 90 | } |
2064 | 177 | case OpCode::F32__lt: |
2065 | 303 | case OpCode::F64__lt: { |
2066 | 303 | LLVM::Value RHS = stackPop(); |
2067 | 303 | LLVM::Value LHS = stackPop(); |
2068 | 303 | stackPush(Builder.createZExt(Builder.createFCmpOLT(LHS, RHS), |
2069 | 303 | Context.Int32Ty)); |
2070 | 303 | break; |
2071 | 177 | } |
2072 | 147 | case OpCode::F32__gt: |
2073 | 202 | case OpCode::F64__gt: { |
2074 | 202 | LLVM::Value RHS = stackPop(); |
2075 | 202 | LLVM::Value LHS = stackPop(); |
2076 | 202 | stackPush(Builder.createZExt(Builder.createFCmpOGT(LHS, RHS), |
2077 | 202 | Context.Int32Ty)); |
2078 | 202 | break; |
2079 | 147 | } |
2080 | 77 | case OpCode::F32__le: |
2081 | 178 | case OpCode::F64__le: { |
2082 | 178 | LLVM::Value RHS = stackPop(); |
2083 | 178 | LLVM::Value LHS = stackPop(); |
2084 | 178 | stackPush(Builder.createZExt(Builder.createFCmpOLE(LHS, RHS), |
2085 | 178 | Context.Int32Ty)); |
2086 | 178 | break; |
2087 | 77 | } |
2088 | 234 | case OpCode::F32__ge: |
2089 | 260 | case OpCode::F64__ge: { |
2090 | 260 | LLVM::Value RHS = stackPop(); |
2091 | 260 | LLVM::Value LHS = stackPop(); |
2092 | 260 | stackPush(Builder.createZExt(Builder.createFCmpOGE(LHS, RHS), |
2093 | 260 | Context.Int32Ty)); |
2094 | 260 | break; |
2095 | 234 | } |
2096 | 724 | case OpCode::I32__add: |
2097 | 1.18k | case OpCode::I64__add: { |
2098 | 1.18k | LLVM::Value RHS = stackPop(); |
2099 | 1.18k | LLVM::Value LHS = stackPop(); |
2100 | 1.18k | stackPush(Builder.createAdd(LHS, RHS)); |
2101 | 1.18k | break; |
2102 | 724 | } |
2103 | 1.59k | case OpCode::I32__sub: |
2104 | 2.01k | case OpCode::I64__sub: { |
2105 | 2.01k | LLVM::Value RHS = stackPop(); |
2106 | 2.01k | LLVM::Value LHS = stackPop(); |
2107 | | |
2108 | 2.01k | stackPush(Builder.createSub(LHS, RHS)); |
2109 | 2.01k | break; |
2110 | 1.59k | } |
2111 | 573 | case OpCode::I32__mul: |
2112 | 940 | case OpCode::I64__mul: { |
2113 | 940 | LLVM::Value RHS = stackPop(); |
2114 | 940 | LLVM::Value LHS = stackPop(); |
2115 | 940 | stackPush(Builder.createMul(LHS, RHS)); |
2116 | 940 | break; |
2117 | 573 | } |
2118 | 1.04k | case OpCode::I32__div_s: |
2119 | 1.50k | case OpCode::I64__div_s: { |
2120 | 1.50k | LLVM::Value RHS = stackPop(); |
2121 | 1.50k | LLVM::Value LHS = stackPop(); |
2122 | 1.50k | if constexpr (kForceDivCheck) { |
2123 | 1.50k | const bool Is32 = Instr.getOpCode() == OpCode::I32__div_s; |
2124 | 1.50k | LLVM::Value IntZero = |
2125 | 1.50k | Is32 ? LLContext.getInt32(0) : LLContext.getInt64(0); |
2126 | 1.50k | LLVM::Value IntMinusOne = |
2127 | 1.50k | Is32 ? LLContext.getInt32(static_cast<uint32_t>(INT32_C(-1))) |
2128 | 1.50k | : LLContext.getInt64(static_cast<uint64_t>(INT64_C(-1))); |
2129 | 1.50k | LLVM::Value IntMin = Is32 ? LLContext.getInt32(static_cast<uint32_t>( |
2130 | 1.04k | std::numeric_limits<int32_t>::min())) |
2131 | 1.50k | : LLContext.getInt64(static_cast<uint64_t>( |
2132 | 458 | std::numeric_limits<int64_t>::min())); |
2133 | | |
2134 | 1.50k | auto NoZeroBB = |
2135 | 1.50k | LLVM::BasicBlock::create(LLContext, F.Fn, "div.nozero"); |
2136 | 1.50k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "div.ok"); |
2137 | | |
2138 | 1.50k | auto IsNotZero = |
2139 | 1.50k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2140 | 1.50k | Builder.createCondBr(IsNotZero, NoZeroBB, |
2141 | 1.50k | getTrapBB(ErrCode::Value::DivideByZero)); |
2142 | | |
2143 | 1.50k | Builder.positionAtEnd(NoZeroBB); |
2144 | 1.50k | auto NotOverflow = Builder.createLikely( |
2145 | 1.50k | Builder.createOr(Builder.createICmpNE(LHS, IntMin), |
2146 | 1.50k | Builder.createICmpNE(RHS, IntMinusOne))); |
2147 | 1.50k | Builder.createCondBr(NotOverflow, OkBB, |
2148 | 1.50k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
2149 | | |
2150 | 1.50k | Builder.positionAtEnd(OkBB); |
2151 | 1.50k | } |
2152 | 1.50k | stackPush(Builder.createSDiv(LHS, RHS)); |
2153 | 1.50k | break; |
2154 | 1.04k | } |
2155 | 3.43k | case OpCode::I32__div_u: |
2156 | 3.91k | case OpCode::I64__div_u: { |
2157 | 3.91k | LLVM::Value RHS = stackPop(); |
2158 | 3.91k | LLVM::Value LHS = stackPop(); |
2159 | 3.91k | if constexpr (kForceDivCheck) { |
2160 | 3.91k | const bool Is32 = Instr.getOpCode() == OpCode::I32__div_u; |
2161 | 3.91k | LLVM::Value IntZero = |
2162 | 3.91k | Is32 ? LLContext.getInt32(0) : LLContext.getInt64(0); |
2163 | 3.91k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "div.ok"); |
2164 | | |
2165 | 3.91k | auto IsNotZero = |
2166 | 3.91k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2167 | 3.91k | Builder.createCondBr(IsNotZero, OkBB, |
2168 | 3.91k | getTrapBB(ErrCode::Value::DivideByZero)); |
2169 | 3.91k | Builder.positionAtEnd(OkBB); |
2170 | 3.91k | } |
2171 | 3.91k | stackPush(Builder.createUDiv(LHS, RHS)); |
2172 | 3.91k | break; |
2173 | 3.43k | } |
2174 | 817 | case OpCode::I32__rem_s: |
2175 | 1.26k | case OpCode::I64__rem_s: { |
2176 | 1.26k | LLVM::Value RHS = stackPop(); |
2177 | 1.26k | LLVM::Value LHS = stackPop(); |
2178 | | // handle INT32_MIN % -1 |
2179 | 1.26k | const bool Is32 = Instr.getOpCode() == OpCode::I32__rem_s; |
2180 | 1.26k | LLVM::Value IntMinusOne = |
2181 | 1.26k | Is32 ? LLContext.getInt32(static_cast<uint32_t>(INT32_C(-1))) |
2182 | 1.26k | : LLContext.getInt64(static_cast<uint64_t>(INT64_C(-1))); |
2183 | 1.26k | LLVM::Value IntMin = Is32 ? LLContext.getInt32(static_cast<uint32_t>( |
2184 | 817 | std::numeric_limits<int32_t>::min())) |
2185 | 1.26k | : LLContext.getInt64(static_cast<uint64_t>( |
2186 | 452 | std::numeric_limits<int64_t>::min())); |
2187 | 1.26k | LLVM::Value IntZero = |
2188 | 1.26k | Is32 ? LLContext.getInt32(0) : LLContext.getInt64(0); |
2189 | | |
2190 | 1.26k | auto NoOverflowBB = |
2191 | 1.26k | LLVM::BasicBlock::create(LLContext, F.Fn, "no.overflow"); |
2192 | 1.26k | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "end.overflow"); |
2193 | | |
2194 | 1.26k | if constexpr (kForceDivCheck) { |
2195 | 1.26k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "rem.ok"); |
2196 | | |
2197 | 1.26k | auto IsNotZero = |
2198 | 1.26k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2199 | 1.26k | Builder.createCondBr(IsNotZero, OkBB, |
2200 | 1.26k | getTrapBB(ErrCode::Value::DivideByZero)); |
2201 | 1.26k | Builder.positionAtEnd(OkBB); |
2202 | 1.26k | } |
2203 | | |
2204 | 1.26k | auto CurrBB = Builder.getInsertBlock(); |
2205 | | |
2206 | 1.26k | auto NotOverflow = Builder.createLikely( |
2207 | 1.26k | Builder.createOr(Builder.createICmpNE(LHS, IntMin), |
2208 | 1.26k | Builder.createICmpNE(RHS, IntMinusOne))); |
2209 | 1.26k | Builder.createCondBr(NotOverflow, NoOverflowBB, EndBB); |
2210 | | |
2211 | 1.26k | Builder.positionAtEnd(NoOverflowBB); |
2212 | 1.26k | auto Ret1 = Builder.createSRem(LHS, RHS); |
2213 | 1.26k | Builder.createBr(EndBB); |
2214 | | |
2215 | 1.26k | Builder.positionAtEnd(EndBB); |
2216 | 1.26k | auto Ret = Builder.createPHI(Ret1.getType()); |
2217 | 1.26k | Ret.addIncoming(Ret1, NoOverflowBB); |
2218 | 1.26k | Ret.addIncoming(IntZero, CurrBB); |
2219 | | |
2220 | 1.26k | stackPush(Ret); |
2221 | 1.26k | break; |
2222 | 817 | } |
2223 | 787 | case OpCode::I32__rem_u: |
2224 | 1.26k | case OpCode::I64__rem_u: { |
2225 | 1.26k | LLVM::Value RHS = stackPop(); |
2226 | 1.26k | LLVM::Value LHS = stackPop(); |
2227 | 1.26k | if constexpr (kForceDivCheck) { |
2228 | 1.26k | LLVM::Value IntZero = Instr.getOpCode() == OpCode::I32__rem_u |
2229 | 1.26k | ? LLContext.getInt32(0) |
2230 | 1.26k | : LLContext.getInt64(0); |
2231 | 1.26k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "rem.ok"); |
2232 | | |
2233 | 1.26k | auto IsNotZero = |
2234 | 1.26k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2235 | 1.26k | Builder.createCondBr(IsNotZero, OkBB, |
2236 | 1.26k | getTrapBB(ErrCode::Value::DivideByZero)); |
2237 | 1.26k | Builder.positionAtEnd(OkBB); |
2238 | 1.26k | } |
2239 | 1.26k | stackPush(Builder.createURem(LHS, RHS)); |
2240 | 1.26k | break; |
2241 | 787 | } |
2242 | 643 | case OpCode::I32__and: |
2243 | 2.00k | case OpCode::I64__and: { |
2244 | 2.00k | LLVM::Value RHS = stackPop(); |
2245 | 2.00k | LLVM::Value LHS = stackPop(); |
2246 | 2.00k | stackPush(Builder.createAnd(LHS, RHS)); |
2247 | 2.00k | break; |
2248 | 643 | } |
2249 | 983 | case OpCode::I32__or: |
2250 | 1.32k | case OpCode::I64__or: { |
2251 | 1.32k | LLVM::Value RHS = stackPop(); |
2252 | 1.32k | LLVM::Value LHS = stackPop(); |
2253 | 1.32k | stackPush(Builder.createOr(LHS, RHS)); |
2254 | 1.32k | break; |
2255 | 983 | } |
2256 | 955 | case OpCode::I32__xor: |
2257 | 1.45k | case OpCode::I64__xor: { |
2258 | 1.45k | LLVM::Value RHS = stackPop(); |
2259 | 1.45k | LLVM::Value LHS = stackPop(); |
2260 | 1.45k | stackPush(Builder.createXor(LHS, RHS)); |
2261 | 1.45k | break; |
2262 | 955 | } |
2263 | 1.33k | case OpCode::I32__shl: |
2264 | 1.69k | case OpCode::I64__shl: { |
2265 | 1.69k | LLVM::Value Mask = Instr.getOpCode() == OpCode::I32__shl |
2266 | 1.69k | ? LLContext.getInt32(31) |
2267 | 1.69k | : LLContext.getInt64(63); |
2268 | 1.69k | LLVM::Value RHS = Builder.createAnd(stackPop(), Mask); |
2269 | 1.69k | LLVM::Value LHS = stackPop(); |
2270 | 1.69k | stackPush(Builder.createShl(LHS, RHS)); |
2271 | 1.69k | break; |
2272 | 1.33k | } |
2273 | 1.14k | case OpCode::I32__shr_s: |
2274 | 1.52k | case OpCode::I64__shr_s: { |
2275 | 1.52k | LLVM::Value Mask = Instr.getOpCode() == OpCode::I32__shr_s |
2276 | 1.52k | ? LLContext.getInt32(31) |
2277 | 1.52k | : LLContext.getInt64(63); |
2278 | 1.52k | LLVM::Value RHS = Builder.createAnd(stackPop(), Mask); |
2279 | 1.52k | LLVM::Value LHS = stackPop(); |
2280 | 1.52k | stackPush(Builder.createAShr(LHS, RHS)); |
2281 | 1.52k | break; |
2282 | 1.14k | } |
2283 | 3.39k | case OpCode::I32__shr_u: |
2284 | 3.67k | case OpCode::I64__shr_u: { |
2285 | 3.67k | LLVM::Value Mask = Instr.getOpCode() == OpCode::I32__shr_u |
2286 | 3.67k | ? LLContext.getInt32(31) |
2287 | 3.67k | : LLContext.getInt64(63); |
2288 | 3.67k | LLVM::Value RHS = Builder.createAnd(stackPop(), Mask); |
2289 | 3.67k | LLVM::Value LHS = stackPop(); |
2290 | 3.67k | stackPush(Builder.createLShr(LHS, RHS)); |
2291 | 3.67k | break; |
2292 | 3.39k | } |
2293 | 2.30k | case OpCode::I32__rotl: { |
2294 | 2.30k | LLVM::Value RHS = stackPop(); |
2295 | 2.30k | LLVM::Value LHS = stackPop(); |
2296 | 2.30k | assuming(LLVM::Core::FShl != LLVM::Core::NotIntrinsic); |
2297 | 2.30k | stackPush(Builder.createIntrinsic(LLVM::Core::FShl, {Context.Int32Ty}, |
2298 | 2.30k | {LHS, LHS, RHS})); |
2299 | 2.30k | break; |
2300 | 2.30k | } |
2301 | 678 | case OpCode::I32__rotr: { |
2302 | 678 | LLVM::Value RHS = stackPop(); |
2303 | 678 | LLVM::Value LHS = stackPop(); |
2304 | 678 | assuming(LLVM::Core::FShr != LLVM::Core::NotIntrinsic); |
2305 | 678 | stackPush(Builder.createIntrinsic(LLVM::Core::FShr, {Context.Int32Ty}, |
2306 | 678 | {LHS, LHS, RHS})); |
2307 | 678 | break; |
2308 | 678 | } |
2309 | 699 | case OpCode::I64__rotl: { |
2310 | 699 | LLVM::Value RHS = stackPop(); |
2311 | 699 | LLVM::Value LHS = stackPop(); |
2312 | 699 | assuming(LLVM::Core::FShl != LLVM::Core::NotIntrinsic); |
2313 | 699 | stackPush(Builder.createIntrinsic(LLVM::Core::FShl, {Context.Int64Ty}, |
2314 | 699 | {LHS, LHS, RHS})); |
2315 | 699 | break; |
2316 | 699 | } |
2317 | 1.35k | case OpCode::I64__rotr: { |
2318 | 1.35k | LLVM::Value RHS = stackPop(); |
2319 | 1.35k | LLVM::Value LHS = stackPop(); |
2320 | 1.35k | assuming(LLVM::Core::FShr != LLVM::Core::NotIntrinsic); |
2321 | 1.35k | stackPush(Builder.createIntrinsic(LLVM::Core::FShr, {Context.Int64Ty}, |
2322 | 1.35k | {LHS, LHS, RHS})); |
2323 | 1.35k | break; |
2324 | 1.35k | } |
2325 | 275 | case OpCode::F32__add: |
2326 | 578 | case OpCode::F64__add: { |
2327 | 578 | LLVM::Value RHS = stackPop(); |
2328 | 578 | LLVM::Value LHS = stackPop(); |
2329 | 578 | stackPush(Builder.createFAdd(LHS, RHS)); |
2330 | 578 | break; |
2331 | 275 | } |
2332 | 148 | case OpCode::F32__sub: |
2333 | 435 | case OpCode::F64__sub: { |
2334 | 435 | LLVM::Value RHS = stackPop(); |
2335 | 435 | LLVM::Value LHS = stackPop(); |
2336 | 435 | stackPush(Builder.createFSub(LHS, RHS)); |
2337 | 435 | break; |
2338 | 148 | } |
2339 | 546 | case OpCode::F32__mul: |
2340 | 687 | case OpCode::F64__mul: { |
2341 | 687 | LLVM::Value RHS = stackPop(); |
2342 | 687 | LLVM::Value LHS = stackPop(); |
2343 | 687 | stackPush(Builder.createFMul(LHS, RHS)); |
2344 | 687 | break; |
2345 | 546 | } |
2346 | 228 | case OpCode::F32__div: |
2347 | 572 | case OpCode::F64__div: { |
2348 | 572 | LLVM::Value RHS = stackPop(); |
2349 | 572 | LLVM::Value LHS = stackPop(); |
2350 | 572 | stackPush(Builder.createFDiv(LHS, RHS)); |
2351 | 572 | break; |
2352 | 228 | } |
2353 | 306 | case OpCode::F32__min: |
2354 | 694 | case OpCode::F64__min: { |
2355 | 694 | LLVM::Value RHS = stackPop(); |
2356 | 694 | LLVM::Value LHS = stackPop(); |
2357 | 694 | auto FpTy = Instr.getOpCode() == OpCode::F32__min ? Context.FloatTy |
2358 | 694 | : Context.DoubleTy; |
2359 | 694 | auto IntTy = Instr.getOpCode() == OpCode::F32__min ? Context.Int32Ty |
2360 | 694 | : Context.Int64Ty; |
2361 | | |
2362 | 694 | auto UEQ = Builder.createFCmpUEQ(LHS, RHS); |
2363 | 694 | auto UNO = Builder.createFCmpUNO(LHS, RHS); |
2364 | | |
2365 | 694 | auto LHSInt = Builder.createBitCast(LHS, IntTy); |
2366 | 694 | auto RHSInt = Builder.createBitCast(RHS, IntTy); |
2367 | 694 | auto OrInt = Builder.createOr(LHSInt, RHSInt); |
2368 | 694 | auto OrFp = Builder.createBitCast(OrInt, FpTy); |
2369 | | |
2370 | 694 | auto AddFp = Builder.createFAdd(LHS, RHS); |
2371 | | |
2372 | 694 | assuming(LLVM::Core::MinNum != LLVM::Core::NotIntrinsic); |
2373 | 694 | auto MinFp = Builder.createIntrinsic(LLVM::Core::MinNum, |
2374 | 694 | {LHS.getType()}, {LHS, RHS}); |
2375 | | |
2376 | 694 | auto Ret = Builder.createSelect( |
2377 | 694 | UEQ, Builder.createSelect(UNO, AddFp, OrFp), MinFp); |
2378 | 694 | stackPush(Ret); |
2379 | 694 | break; |
2380 | 694 | } |
2381 | 331 | case OpCode::F32__max: |
2382 | 793 | case OpCode::F64__max: { |
2383 | 793 | LLVM::Value RHS = stackPop(); |
2384 | 793 | LLVM::Value LHS = stackPop(); |
2385 | 793 | auto FpTy = Instr.getOpCode() == OpCode::F32__max ? Context.FloatTy |
2386 | 793 | : Context.DoubleTy; |
2387 | 793 | auto IntTy = Instr.getOpCode() == OpCode::F32__max ? Context.Int32Ty |
2388 | 793 | : Context.Int64Ty; |
2389 | | |
2390 | 793 | auto UEQ = Builder.createFCmpUEQ(LHS, RHS); |
2391 | 793 | auto UNO = Builder.createFCmpUNO(LHS, RHS); |
2392 | | |
2393 | 793 | auto LHSInt = Builder.createBitCast(LHS, IntTy); |
2394 | 793 | auto RHSInt = Builder.createBitCast(RHS, IntTy); |
2395 | 793 | auto AndInt = Builder.createAnd(LHSInt, RHSInt); |
2396 | 793 | auto AndFp = Builder.createBitCast(AndInt, FpTy); |
2397 | | |
2398 | 793 | auto AddFp = Builder.createFAdd(LHS, RHS); |
2399 | | |
2400 | 793 | assuming(LLVM::Core::MaxNum != LLVM::Core::NotIntrinsic); |
2401 | 793 | auto MaxFp = Builder.createIntrinsic(LLVM::Core::MaxNum, |
2402 | 793 | {LHS.getType()}, {LHS, RHS}); |
2403 | | |
2404 | 793 | auto Ret = Builder.createSelect( |
2405 | 793 | UEQ, Builder.createSelect(UNO, AddFp, AndFp), MaxFp); |
2406 | 793 | stackPush(Ret); |
2407 | 793 | break; |
2408 | 793 | } |
2409 | 429 | case OpCode::F32__copysign: |
2410 | 838 | case OpCode::F64__copysign: { |
2411 | 838 | LLVM::Value RHS = stackPop(); |
2412 | 838 | LLVM::Value LHS = stackPop(); |
2413 | 838 | assuming(LLVM::Core::CopySign != LLVM::Core::NotIntrinsic); |
2414 | 838 | stackPush(Builder.createIntrinsic(LLVM::Core::CopySign, {LHS.getType()}, |
2415 | 838 | {LHS, RHS})); |
2416 | 838 | break; |
2417 | 838 | } |
2418 | | |
2419 | | // Saturating Truncation Numeric Instructions |
2420 | 181 | case OpCode::I32__trunc_sat_f32_s: |
2421 | 181 | compileSignedTruncSat(Context.Int32Ty); |
2422 | 181 | break; |
2423 | 94 | case OpCode::I32__trunc_sat_f32_u: |
2424 | 94 | compileUnsignedTruncSat(Context.Int32Ty); |
2425 | 94 | break; |
2426 | 319 | case OpCode::I32__trunc_sat_f64_s: |
2427 | 319 | compileSignedTruncSat(Context.Int32Ty); |
2428 | 319 | break; |
2429 | 195 | case OpCode::I32__trunc_sat_f64_u: |
2430 | 195 | compileUnsignedTruncSat(Context.Int32Ty); |
2431 | 195 | break; |
2432 | 346 | case OpCode::I64__trunc_sat_f32_s: |
2433 | 346 | compileSignedTruncSat(Context.Int64Ty); |
2434 | 346 | break; |
2435 | 353 | case OpCode::I64__trunc_sat_f32_u: |
2436 | 353 | compileUnsignedTruncSat(Context.Int64Ty); |
2437 | 353 | break; |
2438 | 303 | case OpCode::I64__trunc_sat_f64_s: |
2439 | 303 | compileSignedTruncSat(Context.Int64Ty); |
2440 | 303 | break; |
2441 | 345 | case OpCode::I64__trunc_sat_f64_u: |
2442 | 345 | compileUnsignedTruncSat(Context.Int64Ty); |
2443 | 345 | break; |
2444 | | |
2445 | | // SIMD Memory Instructions |
2446 | 5.21k | case OpCode::V128__load: |
2447 | 5.21k | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2448 | 5.21k | Instr.getMemoryAlign(), Context.Int128x1Ty); |
2449 | 5.21k | break; |
2450 | 178 | case OpCode::V128__load8x8_s: |
2451 | 178 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2452 | 178 | Instr.getMemoryAlign(), |
2453 | 178 | LLVM::Type::getVectorType(Context.Int8Ty, 8), |
2454 | 178 | Context.Int16x8Ty, true); |
2455 | 178 | break; |
2456 | 56 | case OpCode::V128__load8x8_u: |
2457 | 56 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2458 | 56 | Instr.getMemoryAlign(), |
2459 | 56 | LLVM::Type::getVectorType(Context.Int8Ty, 8), |
2460 | 56 | Context.Int16x8Ty, false); |
2461 | 56 | break; |
2462 | 377 | case OpCode::V128__load16x4_s: |
2463 | 377 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2464 | 377 | Instr.getMemoryAlign(), |
2465 | 377 | LLVM::Type::getVectorType(Context.Int16Ty, 4), |
2466 | 377 | Context.Int32x4Ty, true); |
2467 | 377 | break; |
2468 | 418 | case OpCode::V128__load16x4_u: |
2469 | 418 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2470 | 418 | Instr.getMemoryAlign(), |
2471 | 418 | LLVM::Type::getVectorType(Context.Int16Ty, 4), |
2472 | 418 | Context.Int32x4Ty, false); |
2473 | 418 | break; |
2474 | 148 | case OpCode::V128__load32x2_s: |
2475 | 148 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2476 | 148 | Instr.getMemoryAlign(), |
2477 | 148 | LLVM::Type::getVectorType(Context.Int32Ty, 2), |
2478 | 148 | Context.Int64x2Ty, true); |
2479 | 148 | break; |
2480 | 141 | case OpCode::V128__load32x2_u: |
2481 | 141 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2482 | 141 | Instr.getMemoryAlign(), |
2483 | 141 | LLVM::Type::getVectorType(Context.Int32Ty, 2), |
2484 | 141 | Context.Int64x2Ty, false); |
2485 | 141 | break; |
2486 | 90 | case OpCode::V128__load8_splat: |
2487 | 90 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2488 | 90 | Instr.getMemoryAlign(), Context.Int8Ty, |
2489 | 90 | Context.Int8x16Ty); |
2490 | 90 | break; |
2491 | 150 | case OpCode::V128__load16_splat: |
2492 | 150 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2493 | 150 | Instr.getMemoryAlign(), Context.Int16Ty, |
2494 | 150 | Context.Int16x8Ty); |
2495 | 150 | break; |
2496 | 237 | case OpCode::V128__load32_splat: |
2497 | 237 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2498 | 237 | Instr.getMemoryAlign(), Context.Int32Ty, |
2499 | 237 | Context.Int32x4Ty); |
2500 | 237 | break; |
2501 | 152 | case OpCode::V128__load64_splat: |
2502 | 152 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2503 | 152 | Instr.getMemoryAlign(), Context.Int64Ty, |
2504 | 152 | Context.Int64x2Ty); |
2505 | 152 | break; |
2506 | 86 | case OpCode::V128__load32_zero: |
2507 | 86 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2508 | 86 | Instr.getMemoryAlign(), Context.Int32Ty, |
2509 | 86 | Context.Int128Ty, false); |
2510 | 86 | break; |
2511 | 155 | case OpCode::V128__load64_zero: |
2512 | 155 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2513 | 155 | Instr.getMemoryAlign(), Context.Int64Ty, |
2514 | 155 | Context.Int128Ty, false); |
2515 | 155 | break; |
2516 | 245 | case OpCode::V128__store: |
2517 | 245 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2518 | 245 | Instr.getMemoryAlign(), Context.Int128x1Ty, false, true); |
2519 | 245 | break; |
2520 | 182 | case OpCode::V128__load8_lane: |
2521 | 182 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2522 | 182 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2523 | 182 | Context.Int8Ty, Context.Int8x16Ty); |
2524 | 182 | break; |
2525 | 162 | case OpCode::V128__load16_lane: |
2526 | 162 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2527 | 162 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2528 | 162 | Context.Int16Ty, Context.Int16x8Ty); |
2529 | 162 | break; |
2530 | 124 | case OpCode::V128__load32_lane: |
2531 | 124 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2532 | 124 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2533 | 124 | Context.Int32Ty, Context.Int32x4Ty); |
2534 | 124 | break; |
2535 | 22 | case OpCode::V128__load64_lane: |
2536 | 22 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2537 | 22 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2538 | 22 | Context.Int64Ty, Context.Int64x2Ty); |
2539 | 22 | break; |
2540 | 109 | case OpCode::V128__store8_lane: |
2541 | 109 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2542 | 109 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2543 | 109 | Context.Int8Ty, Context.Int8x16Ty); |
2544 | 109 | break; |
2545 | 65 | case OpCode::V128__store16_lane: |
2546 | 65 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2547 | 65 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2548 | 65 | Context.Int16Ty, Context.Int16x8Ty); |
2549 | 65 | break; |
2550 | 129 | case OpCode::V128__store32_lane: |
2551 | 129 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2552 | 129 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2553 | 129 | Context.Int32Ty, Context.Int32x4Ty); |
2554 | 129 | break; |
2555 | 36 | case OpCode::V128__store64_lane: |
2556 | 36 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2557 | 36 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2558 | 36 | Context.Int64Ty, Context.Int64x2Ty); |
2559 | 36 | break; |
2560 | | |
2561 | | // SIMD Const Instructions |
2562 | 406 | case OpCode::V128__const: { |
2563 | 406 | const auto Value = Instr.getNum().get<uint64x2_t>(); |
2564 | 406 | auto Vector = |
2565 | 406 | LLVM::Value::getConstVector64(LLContext, {Value[0], Value[1]}); |
2566 | 406 | stackPush(Builder.createBitCast(Vector, Context.Int64x2Ty)); |
2567 | 406 | break; |
2568 | 838 | } |
2569 | | |
2570 | | // SIMD Shuffle Instructions |
2571 | 15 | case OpCode::I8x16__shuffle: { |
2572 | 15 | auto V2 = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
2573 | 15 | auto V1 = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
2574 | 15 | const auto V3 = Instr.getNum().get<uint128_t>(); |
2575 | 15 | std::array<uint8_t, 16> Mask; |
2576 | 255 | for (size_t I = 0; I < 16; ++I) { |
2577 | 240 | Mask[I] = static_cast<uint8_t>(V3 >> (I * 8)); |
2578 | 240 | } |
2579 | 15 | stackPush(Builder.createBitCast( |
2580 | 15 | Builder.createShuffleVector( |
2581 | 15 | V1, V2, LLVM::Value::getConstVector8(LLContext, Mask)), |
2582 | 15 | Context.Int64x2Ty)); |
2583 | 15 | break; |
2584 | 838 | } |
2585 | | |
2586 | | // SIMD Lane Instructions |
2587 | 90 | case OpCode::I8x16__extract_lane_s: |
2588 | 90 | compileExtractLaneOp(Context.Int8x16Ty, Instr.getMemoryLane(), |
2589 | 90 | Context.Int32Ty, true); |
2590 | 90 | break; |
2591 | 30 | case OpCode::I8x16__extract_lane_u: |
2592 | 30 | compileExtractLaneOp(Context.Int8x16Ty, Instr.getMemoryLane(), |
2593 | 30 | Context.Int32Ty, false); |
2594 | 30 | break; |
2595 | 167 | case OpCode::I8x16__replace_lane: |
2596 | 167 | compileReplaceLaneOp(Context.Int8x16Ty, Instr.getMemoryLane()); |
2597 | 167 | break; |
2598 | 388 | case OpCode::I16x8__extract_lane_s: |
2599 | 388 | compileExtractLaneOp(Context.Int16x8Ty, Instr.getMemoryLane(), |
2600 | 388 | Context.Int32Ty, true); |
2601 | 388 | break; |
2602 | 472 | case OpCode::I16x8__extract_lane_u: |
2603 | 472 | compileExtractLaneOp(Context.Int16x8Ty, Instr.getMemoryLane(), |
2604 | 472 | Context.Int32Ty, false); |
2605 | 472 | break; |
2606 | 251 | case OpCode::I16x8__replace_lane: |
2607 | 251 | compileReplaceLaneOp(Context.Int16x8Ty, Instr.getMemoryLane()); |
2608 | 251 | break; |
2609 | 68 | case OpCode::I32x4__extract_lane: |
2610 | 68 | compileExtractLaneOp(Context.Int32x4Ty, Instr.getMemoryLane()); |
2611 | 68 | break; |
2612 | 243 | case OpCode::I32x4__replace_lane: |
2613 | 243 | compileReplaceLaneOp(Context.Int32x4Ty, Instr.getMemoryLane()); |
2614 | 243 | break; |
2615 | 132 | case OpCode::I64x2__extract_lane: |
2616 | 132 | compileExtractLaneOp(Context.Int64x2Ty, Instr.getMemoryLane()); |
2617 | 132 | break; |
2618 | 15 | case OpCode::I64x2__replace_lane: |
2619 | 15 | compileReplaceLaneOp(Context.Int64x2Ty, Instr.getMemoryLane()); |
2620 | 15 | break; |
2621 | 69 | case OpCode::F32x4__extract_lane: |
2622 | 69 | compileExtractLaneOp(Context.Floatx4Ty, Instr.getMemoryLane()); |
2623 | 69 | break; |
2624 | 23 | case OpCode::F32x4__replace_lane: |
2625 | 23 | compileReplaceLaneOp(Context.Floatx4Ty, Instr.getMemoryLane()); |
2626 | 23 | break; |
2627 | 75 | case OpCode::F64x2__extract_lane: |
2628 | 75 | compileExtractLaneOp(Context.Doublex2Ty, Instr.getMemoryLane()); |
2629 | 75 | break; |
2630 | 8 | case OpCode::F64x2__replace_lane: |
2631 | 8 | compileReplaceLaneOp(Context.Doublex2Ty, Instr.getMemoryLane()); |
2632 | 8 | break; |
2633 | | |
2634 | | // SIMD Numeric Instructions |
2635 | 81 | case OpCode::I8x16__swizzle: |
2636 | 81 | compileVectorSwizzle(); |
2637 | 81 | break; |
2638 | 32.8k | case OpCode::I8x16__splat: |
2639 | 32.8k | compileSplatOp(Context.Int8x16Ty); |
2640 | 32.8k | break; |
2641 | 8.91k | case OpCode::I16x8__splat: |
2642 | 8.91k | compileSplatOp(Context.Int16x8Ty); |
2643 | 8.91k | break; |
2644 | 1.19k | case OpCode::I32x4__splat: |
2645 | 1.19k | compileSplatOp(Context.Int32x4Ty); |
2646 | 1.19k | break; |
2647 | 403 | case OpCode::I64x2__splat: |
2648 | 403 | compileSplatOp(Context.Int64x2Ty); |
2649 | 403 | break; |
2650 | 344 | case OpCode::F32x4__splat: |
2651 | 344 | compileSplatOp(Context.Floatx4Ty); |
2652 | 344 | break; |
2653 | 59 | case OpCode::F64x2__splat: |
2654 | 59 | compileSplatOp(Context.Doublex2Ty); |
2655 | 59 | break; |
2656 | 120 | case OpCode::I8x16__eq: |
2657 | 120 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntEQ); |
2658 | 120 | break; |
2659 | 338 | case OpCode::I8x16__ne: |
2660 | 338 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntNE); |
2661 | 338 | break; |
2662 | 80 | case OpCode::I8x16__lt_s: |
2663 | 80 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSLT); |
2664 | 80 | break; |
2665 | 83 | case OpCode::I8x16__lt_u: |
2666 | 83 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntULT); |
2667 | 83 | break; |
2668 | 157 | case OpCode::I8x16__gt_s: |
2669 | 157 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSGT); |
2670 | 157 | break; |
2671 | 179 | case OpCode::I8x16__gt_u: |
2672 | 179 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntUGT); |
2673 | 179 | break; |
2674 | 86 | case OpCode::I8x16__le_s: |
2675 | 86 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSLE); |
2676 | 86 | break; |
2677 | 117 | case OpCode::I8x16__le_u: |
2678 | 117 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntULE); |
2679 | 117 | break; |
2680 | 652 | case OpCode::I8x16__ge_s: |
2681 | 652 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSGE); |
2682 | 652 | break; |
2683 | 118 | case OpCode::I8x16__ge_u: |
2684 | 118 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntUGE); |
2685 | 118 | break; |
2686 | 103 | case OpCode::I16x8__eq: |
2687 | 103 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntEQ); |
2688 | 103 | break; |
2689 | 243 | case OpCode::I16x8__ne: |
2690 | 243 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntNE); |
2691 | 243 | break; |
2692 | 68 | case OpCode::I16x8__lt_s: |
2693 | 68 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSLT); |
2694 | 68 | break; |
2695 | 230 | case OpCode::I16x8__lt_u: |
2696 | 230 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntULT); |
2697 | 230 | break; |
2698 | 226 | case OpCode::I16x8__gt_s: |
2699 | 226 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSGT); |
2700 | 226 | break; |
2701 | 146 | case OpCode::I16x8__gt_u: |
2702 | 146 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntUGT); |
2703 | 146 | break; |
2704 | 78 | case OpCode::I16x8__le_s: |
2705 | 78 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSLE); |
2706 | 78 | break; |
2707 | 98 | case OpCode::I16x8__le_u: |
2708 | 98 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntULE); |
2709 | 98 | break; |
2710 | 154 | case OpCode::I16x8__ge_s: |
2711 | 154 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSGE); |
2712 | 154 | break; |
2713 | 71 | case OpCode::I16x8__ge_u: |
2714 | 71 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntUGE); |
2715 | 71 | break; |
2716 | 99 | case OpCode::I32x4__eq: |
2717 | 99 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntEQ); |
2718 | 99 | break; |
2719 | 124 | case OpCode::I32x4__ne: |
2720 | 124 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntNE); |
2721 | 124 | break; |
2722 | 74 | case OpCode::I32x4__lt_s: |
2723 | 74 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSLT); |
2724 | 74 | break; |
2725 | 143 | case OpCode::I32x4__lt_u: |
2726 | 143 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntULT); |
2727 | 143 | break; |
2728 | 118 | case OpCode::I32x4__gt_s: |
2729 | 118 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSGT); |
2730 | 118 | break; |
2731 | 265 | case OpCode::I32x4__gt_u: |
2732 | 265 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntUGT); |
2733 | 265 | break; |
2734 | 253 | case OpCode::I32x4__le_s: |
2735 | 253 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSLE); |
2736 | 253 | break; |
2737 | 276 | case OpCode::I32x4__le_u: |
2738 | 276 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntULE); |
2739 | 276 | break; |
2740 | 99 | case OpCode::I32x4__ge_s: |
2741 | 99 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSGE); |
2742 | 99 | break; |
2743 | 103 | case OpCode::I32x4__ge_u: |
2744 | 103 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntUGE); |
2745 | 103 | break; |
2746 | 117 | case OpCode::I64x2__eq: |
2747 | 117 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntEQ); |
2748 | 117 | break; |
2749 | 53 | case OpCode::I64x2__ne: |
2750 | 53 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntNE); |
2751 | 53 | break; |
2752 | 51 | case OpCode::I64x2__lt_s: |
2753 | 51 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSLT); |
2754 | 51 | break; |
2755 | 161 | case OpCode::I64x2__gt_s: |
2756 | 161 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSGT); |
2757 | 161 | break; |
2758 | 36 | case OpCode::I64x2__le_s: |
2759 | 36 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSLE); |
2760 | 36 | break; |
2761 | 46 | case OpCode::I64x2__ge_s: |
2762 | 46 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSGE); |
2763 | 46 | break; |
2764 | 1.33k | case OpCode::F32x4__eq: |
2765 | 1.33k | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOEQ, |
2766 | 1.33k | Context.Int32x4Ty); |
2767 | 1.33k | break; |
2768 | 50 | case OpCode::F32x4__ne: |
2769 | 50 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealUNE, |
2770 | 50 | Context.Int32x4Ty); |
2771 | 50 | break; |
2772 | 908 | case OpCode::F32x4__lt: |
2773 | 908 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOLT, |
2774 | 908 | Context.Int32x4Ty); |
2775 | 908 | break; |
2776 | 86 | case OpCode::F32x4__gt: |
2777 | 86 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOGT, |
2778 | 86 | Context.Int32x4Ty); |
2779 | 86 | break; |
2780 | 348 | case OpCode::F32x4__le: |
2781 | 348 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOLE, |
2782 | 348 | Context.Int32x4Ty); |
2783 | 348 | break; |
2784 | 87 | case OpCode::F32x4__ge: |
2785 | 87 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOGE, |
2786 | 87 | Context.Int32x4Ty); |
2787 | 87 | break; |
2788 | 68 | case OpCode::F64x2__eq: |
2789 | 68 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOEQ, |
2790 | 68 | Context.Int64x2Ty); |
2791 | 68 | break; |
2792 | 121 | case OpCode::F64x2__ne: |
2793 | 121 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealUNE, |
2794 | 121 | Context.Int64x2Ty); |
2795 | 121 | break; |
2796 | 150 | case OpCode::F64x2__lt: |
2797 | 150 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOLT, |
2798 | 150 | Context.Int64x2Ty); |
2799 | 150 | break; |
2800 | 63 | case OpCode::F64x2__gt: |
2801 | 63 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOGT, |
2802 | 63 | Context.Int64x2Ty); |
2803 | 63 | break; |
2804 | 182 | case OpCode::F64x2__le: |
2805 | 182 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOLE, |
2806 | 182 | Context.Int64x2Ty); |
2807 | 182 | break; |
2808 | 91 | case OpCode::F64x2__ge: |
2809 | 91 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOGE, |
2810 | 91 | Context.Int64x2Ty); |
2811 | 91 | break; |
2812 | 181 | case OpCode::V128__not: |
2813 | 181 | Stack.back() = Builder.createNot(Stack.back()); |
2814 | 181 | break; |
2815 | 82 | case OpCode::V128__and: { |
2816 | 82 | auto RHS = stackPop(); |
2817 | 82 | auto LHS = stackPop(); |
2818 | 82 | stackPush(Builder.createAnd(LHS, RHS)); |
2819 | 82 | break; |
2820 | 838 | } |
2821 | 83 | case OpCode::V128__andnot: { |
2822 | 83 | auto RHS = stackPop(); |
2823 | 83 | auto LHS = stackPop(); |
2824 | 83 | stackPush(Builder.createAnd(LHS, Builder.createNot(RHS))); |
2825 | 83 | break; |
2826 | 838 | } |
2827 | 124 | case OpCode::V128__or: { |
2828 | 124 | auto RHS = stackPop(); |
2829 | 124 | auto LHS = stackPop(); |
2830 | 124 | stackPush(Builder.createOr(LHS, RHS)); |
2831 | 124 | break; |
2832 | 838 | } |
2833 | 64 | case OpCode::V128__xor: { |
2834 | 64 | auto RHS = stackPop(); |
2835 | 64 | auto LHS = stackPop(); |
2836 | 64 | stackPush(Builder.createXor(LHS, RHS)); |
2837 | 64 | break; |
2838 | 838 | } |
2839 | 126 | case OpCode::V128__bitselect: { |
2840 | 126 | auto C = stackPop(); |
2841 | 126 | auto V2 = stackPop(); |
2842 | 126 | auto V1 = stackPop(); |
2843 | 126 | stackPush(Builder.createXor( |
2844 | 126 | Builder.createAnd(Builder.createXor(V1, V2), C), V2)); |
2845 | 126 | break; |
2846 | 838 | } |
2847 | 112 | case OpCode::V128__any_true: |
2848 | 112 | compileVectorAnyTrue(); |
2849 | 112 | break; |
2850 | 1.05k | case OpCode::I8x16__abs: |
2851 | 1.05k | compileVectorAbs(Context.Int8x16Ty); |
2852 | 1.05k | break; |
2853 | 1.97k | case OpCode::I8x16__neg: |
2854 | 1.97k | compileVectorNeg(Context.Int8x16Ty); |
2855 | 1.97k | break; |
2856 | 121 | case OpCode::I8x16__popcnt: |
2857 | 121 | compileVectorPopcnt(); |
2858 | 121 | break; |
2859 | 311 | case OpCode::I8x16__all_true: |
2860 | 311 | compileVectorAllTrue(Context.Int8x16Ty); |
2861 | 311 | break; |
2862 | 602 | case OpCode::I8x16__bitmask: |
2863 | 602 | compileVectorBitMask(Context.Int8x16Ty); |
2864 | 602 | break; |
2865 | 95 | case OpCode::I8x16__narrow_i16x8_s: |
2866 | 95 | compileVectorNarrow(Context.Int16x8Ty, true); |
2867 | 95 | break; |
2868 | 169 | case OpCode::I8x16__narrow_i16x8_u: |
2869 | 169 | compileVectorNarrow(Context.Int16x8Ty, false); |
2870 | 169 | break; |
2871 | 164 | case OpCode::I8x16__shl: |
2872 | 164 | compileVectorShl(Context.Int8x16Ty); |
2873 | 164 | break; |
2874 | 1.05k | case OpCode::I8x16__shr_s: |
2875 | 1.05k | compileVectorAShr(Context.Int8x16Ty); |
2876 | 1.05k | break; |
2877 | 94 | case OpCode::I8x16__shr_u: |
2878 | 94 | compileVectorLShr(Context.Int8x16Ty); |
2879 | 94 | break; |
2880 | 55 | case OpCode::I8x16__add: |
2881 | 55 | compileVectorVectorAdd(Context.Int8x16Ty); |
2882 | 55 | break; |
2883 | 446 | case OpCode::I8x16__add_sat_s: |
2884 | 446 | compileVectorVectorAddSat(Context.Int8x16Ty, true); |
2885 | 446 | break; |
2886 | 83 | case OpCode::I8x16__add_sat_u: |
2887 | 83 | compileVectorVectorAddSat(Context.Int8x16Ty, false); |
2888 | 83 | break; |
2889 | 70 | case OpCode::I8x16__sub: |
2890 | 70 | compileVectorVectorSub(Context.Int8x16Ty); |
2891 | 70 | break; |
2892 | 158 | case OpCode::I8x16__sub_sat_s: |
2893 | 158 | compileVectorVectorSubSat(Context.Int8x16Ty, true); |
2894 | 158 | break; |
2895 | 93 | case OpCode::I8x16__sub_sat_u: |
2896 | 93 | compileVectorVectorSubSat(Context.Int8x16Ty, false); |
2897 | 93 | break; |
2898 | 73 | case OpCode::I8x16__min_s: |
2899 | 73 | compileVectorVectorSMin(Context.Int8x16Ty); |
2900 | 73 | break; |
2901 | 75 | case OpCode::I8x16__min_u: |
2902 | 75 | compileVectorVectorUMin(Context.Int8x16Ty); |
2903 | 75 | break; |
2904 | 334 | case OpCode::I8x16__max_s: |
2905 | 334 | compileVectorVectorSMax(Context.Int8x16Ty); |
2906 | 334 | break; |
2907 | 100 | case OpCode::I8x16__max_u: |
2908 | 100 | compileVectorVectorUMax(Context.Int8x16Ty); |
2909 | 100 | break; |
2910 | 130 | case OpCode::I8x16__avgr_u: |
2911 | 130 | compileVectorVectorUAvgr(Context.Int8x16Ty); |
2912 | 130 | break; |
2913 | 206 | case OpCode::I16x8__abs: |
2914 | 206 | compileVectorAbs(Context.Int16x8Ty); |
2915 | 206 | break; |
2916 | 198 | case OpCode::I16x8__neg: |
2917 | 198 | compileVectorNeg(Context.Int16x8Ty); |
2918 | 198 | break; |
2919 | 117 | case OpCode::I16x8__all_true: |
2920 | 117 | compileVectorAllTrue(Context.Int16x8Ty); |
2921 | 117 | break; |
2922 | 118 | case OpCode::I16x8__bitmask: |
2923 | 118 | compileVectorBitMask(Context.Int16x8Ty); |
2924 | 118 | break; |
2925 | 49 | case OpCode::I16x8__narrow_i32x4_s: |
2926 | 49 | compileVectorNarrow(Context.Int32x4Ty, true); |
2927 | 49 | break; |
2928 | 365 | case OpCode::I16x8__narrow_i32x4_u: |
2929 | 365 | compileVectorNarrow(Context.Int32x4Ty, false); |
2930 | 365 | break; |
2931 | 634 | case OpCode::I16x8__extend_low_i8x16_s: |
2932 | 634 | compileVectorExtend(Context.Int8x16Ty, true, true); |
2933 | 634 | break; |
2934 | 63 | case OpCode::I16x8__extend_high_i8x16_s: |
2935 | 63 | compileVectorExtend(Context.Int8x16Ty, true, false); |
2936 | 63 | break; |
2937 | 373 | case OpCode::I16x8__extend_low_i8x16_u: |
2938 | 373 | compileVectorExtend(Context.Int8x16Ty, false, true); |
2939 | 373 | break; |
2940 | 12 | case OpCode::I16x8__extend_high_i8x16_u: |
2941 | 12 | compileVectorExtend(Context.Int8x16Ty, false, false); |
2942 | 12 | break; |
2943 | 84 | case OpCode::I16x8__shl: |
2944 | 84 | compileVectorShl(Context.Int16x8Ty); |
2945 | 84 | break; |
2946 | 261 | case OpCode::I16x8__shr_s: |
2947 | 261 | compileVectorAShr(Context.Int16x8Ty); |
2948 | 261 | break; |
2949 | 64 | case OpCode::I16x8__shr_u: |
2950 | 64 | compileVectorLShr(Context.Int16x8Ty); |
2951 | 64 | break; |
2952 | 115 | case OpCode::I16x8__add: |
2953 | 115 | compileVectorVectorAdd(Context.Int16x8Ty); |
2954 | 115 | break; |
2955 | 22 | case OpCode::I16x8__add_sat_s: |
2956 | 22 | compileVectorVectorAddSat(Context.Int16x8Ty, true); |
2957 | 22 | break; |
2958 | 422 | case OpCode::I16x8__add_sat_u: |
2959 | 422 | compileVectorVectorAddSat(Context.Int16x8Ty, false); |
2960 | 422 | break; |
2961 | 317 | case OpCode::I16x8__sub: |
2962 | 317 | compileVectorVectorSub(Context.Int16x8Ty); |
2963 | 317 | break; |
2964 | 24 | case OpCode::I16x8__sub_sat_s: |
2965 | 24 | compileVectorVectorSubSat(Context.Int16x8Ty, true); |
2966 | 24 | break; |
2967 | 73 | case OpCode::I16x8__sub_sat_u: |
2968 | 73 | compileVectorVectorSubSat(Context.Int16x8Ty, false); |
2969 | 73 | break; |
2970 | 111 | case OpCode::I16x8__mul: |
2971 | 111 | compileVectorVectorMul(Context.Int16x8Ty); |
2972 | 111 | break; |
2973 | 125 | case OpCode::I16x8__min_s: |
2974 | 125 | compileVectorVectorSMin(Context.Int16x8Ty); |
2975 | 125 | break; |
2976 | 124 | case OpCode::I16x8__min_u: |
2977 | 124 | compileVectorVectorUMin(Context.Int16x8Ty); |
2978 | 124 | break; |
2979 | 81 | case OpCode::I16x8__max_s: |
2980 | 81 | compileVectorVectorSMax(Context.Int16x8Ty); |
2981 | 81 | break; |
2982 | 551 | case OpCode::I16x8__max_u: |
2983 | 551 | compileVectorVectorUMax(Context.Int16x8Ty); |
2984 | 551 | break; |
2985 | 109 | case OpCode::I16x8__avgr_u: |
2986 | 109 | compileVectorVectorUAvgr(Context.Int16x8Ty); |
2987 | 109 | break; |
2988 | 69 | case OpCode::I16x8__extmul_low_i8x16_s: |
2989 | 69 | compileVectorExtMul(Context.Int8x16Ty, true, true); |
2990 | 69 | break; |
2991 | 203 | case OpCode::I16x8__extmul_high_i8x16_s: |
2992 | 203 | compileVectorExtMul(Context.Int8x16Ty, true, false); |
2993 | 203 | break; |
2994 | 125 | case OpCode::I16x8__extmul_low_i8x16_u: |
2995 | 125 | compileVectorExtMul(Context.Int8x16Ty, false, true); |
2996 | 125 | break; |
2997 | 461 | case OpCode::I16x8__extmul_high_i8x16_u: |
2998 | 461 | compileVectorExtMul(Context.Int8x16Ty, false, false); |
2999 | 461 | break; |
3000 | 133 | case OpCode::I16x8__q15mulr_sat_s: |
3001 | 133 | compileVectorVectorQ15MulSat(); |
3002 | 133 | break; |
3003 | 374 | case OpCode::I16x8__extadd_pairwise_i8x16_s: |
3004 | 374 | compileVectorExtAddPairwise(Context.Int8x16Ty, true); |
3005 | 374 | break; |
3006 | 336 | case OpCode::I16x8__extadd_pairwise_i8x16_u: |
3007 | 336 | compileVectorExtAddPairwise(Context.Int8x16Ty, false); |
3008 | 336 | break; |
3009 | 84 | case OpCode::I32x4__abs: |
3010 | 84 | compileVectorAbs(Context.Int32x4Ty); |
3011 | 84 | break; |
3012 | 180 | case OpCode::I32x4__neg: |
3013 | 180 | compileVectorNeg(Context.Int32x4Ty); |
3014 | 180 | break; |
3015 | 176 | case OpCode::I32x4__all_true: |
3016 | 176 | compileVectorAllTrue(Context.Int32x4Ty); |
3017 | 176 | break; |
3018 | 85 | case OpCode::I32x4__bitmask: |
3019 | 85 | compileVectorBitMask(Context.Int32x4Ty); |
3020 | 85 | break; |
3021 | 113 | case OpCode::I32x4__extend_low_i16x8_s: |
3022 | 113 | compileVectorExtend(Context.Int16x8Ty, true, true); |
3023 | 113 | break; |
3024 | 527 | case OpCode::I32x4__extend_high_i16x8_s: |
3025 | 527 | compileVectorExtend(Context.Int16x8Ty, true, false); |
3026 | 527 | break; |
3027 | 1.89k | case OpCode::I32x4__extend_low_i16x8_u: |
3028 | 1.89k | compileVectorExtend(Context.Int16x8Ty, false, true); |
3029 | 1.89k | break; |
3030 | 147 | case OpCode::I32x4__extend_high_i16x8_u: |
3031 | 147 | compileVectorExtend(Context.Int16x8Ty, false, false); |
3032 | 147 | break; |
3033 | 966 | case OpCode::I32x4__shl: |
3034 | 966 | compileVectorShl(Context.Int32x4Ty); |
3035 | 966 | break; |
3036 | 177 | case OpCode::I32x4__shr_s: |
3037 | 177 | compileVectorAShr(Context.Int32x4Ty); |
3038 | 177 | break; |
3039 | 113 | case OpCode::I32x4__shr_u: |
3040 | 113 | compileVectorLShr(Context.Int32x4Ty); |
3041 | 113 | break; |
3042 | 106 | case OpCode::I32x4__add: |
3043 | 106 | compileVectorVectorAdd(Context.Int32x4Ty); |
3044 | 106 | break; |
3045 | 148 | case OpCode::I32x4__sub: |
3046 | 148 | compileVectorVectorSub(Context.Int32x4Ty); |
3047 | 148 | break; |
3048 | 243 | case OpCode::I32x4__mul: |
3049 | 243 | compileVectorVectorMul(Context.Int32x4Ty); |
3050 | 243 | break; |
3051 | 128 | case OpCode::I32x4__min_s: |
3052 | 128 | compileVectorVectorSMin(Context.Int32x4Ty); |
3053 | 128 | break; |
3054 | 99 | case OpCode::I32x4__min_u: |
3055 | 99 | compileVectorVectorUMin(Context.Int32x4Ty); |
3056 | 99 | break; |
3057 | 94 | case OpCode::I32x4__max_s: |
3058 | 94 | compileVectorVectorSMax(Context.Int32x4Ty); |
3059 | 94 | break; |
3060 | 117 | case OpCode::I32x4__max_u: |
3061 | 117 | compileVectorVectorUMax(Context.Int32x4Ty); |
3062 | 117 | break; |
3063 | 111 | case OpCode::I32x4__extmul_low_i16x8_s: |
3064 | 111 | compileVectorExtMul(Context.Int16x8Ty, true, true); |
3065 | 111 | break; |
3066 | 50 | case OpCode::I32x4__extmul_high_i16x8_s: |
3067 | 50 | compileVectorExtMul(Context.Int16x8Ty, true, false); |
3068 | 50 | break; |
3069 | 202 | case OpCode::I32x4__extmul_low_i16x8_u: |
3070 | 202 | compileVectorExtMul(Context.Int16x8Ty, false, true); |
3071 | 202 | break; |
3072 | 43 | case OpCode::I32x4__extmul_high_i16x8_u: |
3073 | 43 | compileVectorExtMul(Context.Int16x8Ty, false, false); |
3074 | 43 | break; |
3075 | 819 | case OpCode::I32x4__extadd_pairwise_i16x8_s: |
3076 | 819 | compileVectorExtAddPairwise(Context.Int16x8Ty, true); |
3077 | 819 | break; |
3078 | 519 | case OpCode::I32x4__extadd_pairwise_i16x8_u: |
3079 | 519 | compileVectorExtAddPairwise(Context.Int16x8Ty, false); |
3080 | 519 | break; |
3081 | 145 | case OpCode::I32x4__dot_i16x8_s: { |
3082 | 145 | auto ExtendTy = Context.Int16x8Ty.getExtendedElementVectorType(); |
3083 | 145 | auto Undef = LLVM::Value::getUndef(ExtendTy); |
3084 | 145 | auto LHS = Builder.createSExt( |
3085 | 145 | Builder.createBitCast(stackPop(), Context.Int16x8Ty), ExtendTy); |
3086 | 145 | auto RHS = Builder.createSExt( |
3087 | 145 | Builder.createBitCast(stackPop(), Context.Int16x8Ty), ExtendTy); |
3088 | 145 | auto M = Builder.createMul(LHS, RHS); |
3089 | 145 | auto L = Builder.createShuffleVector( |
3090 | 145 | M, Undef, |
3091 | 145 | LLVM::Value::getConstVector32(LLContext, {0U, 2U, 4U, 6U})); |
3092 | 145 | auto R = Builder.createShuffleVector( |
3093 | 145 | M, Undef, |
3094 | 145 | LLVM::Value::getConstVector32(LLContext, {1U, 3U, 5U, 7U})); |
3095 | 145 | auto V = Builder.createAdd(L, R); |
3096 | 145 | stackPush(Builder.createBitCast(V, Context.Int64x2Ty)); |
3097 | 145 | break; |
3098 | 838 | } |
3099 | 877 | case OpCode::I64x2__abs: |
3100 | 877 | compileVectorAbs(Context.Int64x2Ty); |
3101 | 877 | break; |
3102 | 519 | case OpCode::I64x2__neg: |
3103 | 519 | compileVectorNeg(Context.Int64x2Ty); |
3104 | 519 | break; |
3105 | 299 | case OpCode::I64x2__all_true: |
3106 | 299 | compileVectorAllTrue(Context.Int64x2Ty); |
3107 | 299 | break; |
3108 | 211 | case OpCode::I64x2__bitmask: |
3109 | 211 | compileVectorBitMask(Context.Int64x2Ty); |
3110 | 211 | break; |
3111 | 112 | case OpCode::I64x2__extend_low_i32x4_s: |
3112 | 112 | compileVectorExtend(Context.Int32x4Ty, true, true); |
3113 | 112 | break; |
3114 | 685 | case OpCode::I64x2__extend_high_i32x4_s: |
3115 | 685 | compileVectorExtend(Context.Int32x4Ty, true, false); |
3116 | 685 | break; |
3117 | 126 | case OpCode::I64x2__extend_low_i32x4_u: |
3118 | 126 | compileVectorExtend(Context.Int32x4Ty, false, true); |
3119 | 126 | break; |
3120 | 560 | case OpCode::I64x2__extend_high_i32x4_u: |
3121 | 560 | compileVectorExtend(Context.Int32x4Ty, false, false); |
3122 | 560 | break; |
3123 | 124 | case OpCode::I64x2__shl: |
3124 | 124 | compileVectorShl(Context.Int64x2Ty); |
3125 | 124 | break; |
3126 | 275 | case OpCode::I64x2__shr_s: |
3127 | 275 | compileVectorAShr(Context.Int64x2Ty); |
3128 | 275 | break; |
3129 | 93 | case OpCode::I64x2__shr_u: |
3130 | 93 | compileVectorLShr(Context.Int64x2Ty); |
3131 | 93 | break; |
3132 | 49 | case OpCode::I64x2__add: |
3133 | 49 | compileVectorVectorAdd(Context.Int64x2Ty); |
3134 | 49 | break; |
3135 | 247 | case OpCode::I64x2__sub: |
3136 | 247 | compileVectorVectorSub(Context.Int64x2Ty); |
3137 | 247 | break; |
3138 | 78 | case OpCode::I64x2__mul: |
3139 | 78 | compileVectorVectorMul(Context.Int64x2Ty); |
3140 | 78 | break; |
3141 | 41 | case OpCode::I64x2__extmul_low_i32x4_s: |
3142 | 41 | compileVectorExtMul(Context.Int32x4Ty, true, true); |
3143 | 41 | break; |
3144 | 290 | case OpCode::I64x2__extmul_high_i32x4_s: |
3145 | 290 | compileVectorExtMul(Context.Int32x4Ty, true, false); |
3146 | 290 | break; |
3147 | 32 | case OpCode::I64x2__extmul_low_i32x4_u: |
3148 | 32 | compileVectorExtMul(Context.Int32x4Ty, false, true); |
3149 | 32 | break; |
3150 | 136 | case OpCode::I64x2__extmul_high_i32x4_u: |
3151 | 136 | compileVectorExtMul(Context.Int32x4Ty, false, false); |
3152 | 136 | break; |
3153 | 64 | case OpCode::F32x4__abs: |
3154 | 64 | compileVectorFAbs(Context.Floatx4Ty); |
3155 | 64 | break; |
3156 | 143 | case OpCode::F32x4__neg: |
3157 | 143 | compileVectorFNeg(Context.Floatx4Ty); |
3158 | 143 | break; |
3159 | 189 | case OpCode::F32x4__sqrt: |
3160 | 189 | compileVectorFSqrt(Context.Floatx4Ty); |
3161 | 189 | break; |
3162 | 126 | case OpCode::F32x4__add: |
3163 | 126 | compileVectorVectorFAdd(Context.Floatx4Ty); |
3164 | 126 | break; |
3165 | 250 | case OpCode::F32x4__sub: |
3166 | 250 | compileVectorVectorFSub(Context.Floatx4Ty); |
3167 | 250 | break; |
3168 | 41 | case OpCode::F32x4__mul: |
3169 | 41 | compileVectorVectorFMul(Context.Floatx4Ty); |
3170 | 41 | break; |
3171 | 189 | case OpCode::F32x4__div: |
3172 | 189 | compileVectorVectorFDiv(Context.Floatx4Ty); |
3173 | 189 | break; |
3174 | 129 | case OpCode::F32x4__min: |
3175 | 129 | compileVectorVectorFMin(Context.Floatx4Ty); |
3176 | 129 | break; |
3177 | 47 | case OpCode::F32x4__max: |
3178 | 47 | compileVectorVectorFMax(Context.Floatx4Ty); |
3179 | 47 | break; |
3180 | 32 | case OpCode::F32x4__pmin: |
3181 | 32 | compileVectorVectorFPMin(Context.Floatx4Ty); |
3182 | 32 | break; |
3183 | 215 | case OpCode::F32x4__pmax: |
3184 | 215 | compileVectorVectorFPMax(Context.Floatx4Ty); |
3185 | 215 | break; |
3186 | 978 | case OpCode::F32x4__ceil: |
3187 | 978 | compileVectorFCeil(Context.Floatx4Ty); |
3188 | 978 | break; |
3189 | 1.87k | case OpCode::F32x4__floor: |
3190 | 1.87k | compileVectorFFloor(Context.Floatx4Ty); |
3191 | 1.87k | break; |
3192 | 1.95k | case OpCode::F32x4__trunc: |
3193 | 1.95k | compileVectorFTrunc(Context.Floatx4Ty); |
3194 | 1.95k | break; |
3195 | 272 | case OpCode::F32x4__nearest: |
3196 | 272 | compileVectorFNearest(Context.Floatx4Ty); |
3197 | 272 | break; |
3198 | 442 | case OpCode::F64x2__abs: |
3199 | 442 | compileVectorFAbs(Context.Doublex2Ty); |
3200 | 442 | break; |
3201 | 802 | case OpCode::F64x2__neg: |
3202 | 802 | compileVectorFNeg(Context.Doublex2Ty); |
3203 | 802 | break; |
3204 | 126 | case OpCode::F64x2__sqrt: |
3205 | 126 | compileVectorFSqrt(Context.Doublex2Ty); |
3206 | 126 | break; |
3207 | 51 | case OpCode::F64x2__add: |
3208 | 51 | compileVectorVectorFAdd(Context.Doublex2Ty); |
3209 | 51 | break; |
3210 | 221 | case OpCode::F64x2__sub: |
3211 | 221 | compileVectorVectorFSub(Context.Doublex2Ty); |
3212 | 221 | break; |
3213 | 160 | case OpCode::F64x2__mul: |
3214 | 160 | compileVectorVectorFMul(Context.Doublex2Ty); |
3215 | 160 | break; |
3216 | 41 | case OpCode::F64x2__div: |
3217 | 41 | compileVectorVectorFDiv(Context.Doublex2Ty); |
3218 | 41 | break; |
3219 | 174 | case OpCode::F64x2__min: |
3220 | 174 | compileVectorVectorFMin(Context.Doublex2Ty); |
3221 | 174 | break; |
3222 | 165 | case OpCode::F64x2__max: |
3223 | 165 | compileVectorVectorFMax(Context.Doublex2Ty); |
3224 | 165 | break; |
3225 | 262 | case OpCode::F64x2__pmin: |
3226 | 262 | compileVectorVectorFPMin(Context.Doublex2Ty); |
3227 | 262 | break; |
3228 | 65 | case OpCode::F64x2__pmax: |
3229 | 65 | compileVectorVectorFPMax(Context.Doublex2Ty); |
3230 | 65 | break; |
3231 | 687 | case OpCode::F64x2__ceil: |
3232 | 687 | compileVectorFCeil(Context.Doublex2Ty); |
3233 | 687 | break; |
3234 | 820 | case OpCode::F64x2__floor: |
3235 | 820 | compileVectorFFloor(Context.Doublex2Ty); |
3236 | 820 | break; |
3237 | 114 | case OpCode::F64x2__trunc: |
3238 | 114 | compileVectorFTrunc(Context.Doublex2Ty); |
3239 | 114 | break; |
3240 | 159 | case OpCode::F64x2__nearest: |
3241 | 159 | compileVectorFNearest(Context.Doublex2Ty); |
3242 | 159 | break; |
3243 | 205 | case OpCode::I32x4__trunc_sat_f32x4_s: |
3244 | 205 | compileVectorTruncSatS32(Context.Floatx4Ty, false); |
3245 | 205 | break; |
3246 | 3.73k | case OpCode::I32x4__trunc_sat_f32x4_u: |
3247 | 3.73k | compileVectorTruncSatU32(Context.Floatx4Ty, false); |
3248 | 3.73k | break; |
3249 | 335 | case OpCode::F32x4__convert_i32x4_s: |
3250 | 335 | compileVectorConvertS(Context.Int32x4Ty, Context.Floatx4Ty, false); |
3251 | 335 | break; |
3252 | 722 | case OpCode::F32x4__convert_i32x4_u: |
3253 | 722 | compileVectorConvertU(Context.Int32x4Ty, Context.Floatx4Ty, false); |
3254 | 722 | break; |
3255 | 755 | case OpCode::I32x4__trunc_sat_f64x2_s_zero: |
3256 | 755 | compileVectorTruncSatS32(Context.Doublex2Ty, true); |
3257 | 755 | break; |
3258 | 2.14k | case OpCode::I32x4__trunc_sat_f64x2_u_zero: |
3259 | 2.14k | compileVectorTruncSatU32(Context.Doublex2Ty, true); |
3260 | 2.14k | break; |
3261 | 335 | case OpCode::F64x2__convert_low_i32x4_s: |
3262 | 335 | compileVectorConvertS(Context.Int32x4Ty, Context.Doublex2Ty, true); |
3263 | 335 | break; |
3264 | 1.26k | case OpCode::F64x2__convert_low_i32x4_u: |
3265 | 1.26k | compileVectorConvertU(Context.Int32x4Ty, Context.Doublex2Ty, true); |
3266 | 1.26k | break; |
3267 | 734 | case OpCode::F32x4__demote_f64x2_zero: |
3268 | 734 | compileVectorDemote(); |
3269 | 734 | break; |
3270 | 730 | case OpCode::F64x2__promote_low_f32x4: |
3271 | 730 | compileVectorPromote(); |
3272 | 730 | break; |
3273 | | |
3274 | | // Relaxed SIMD Instructions |
3275 | 0 | case OpCode::I8x16__relaxed_swizzle: |
3276 | 0 | compileVectorSwizzle(); |
3277 | 0 | break; |
3278 | 0 | case OpCode::I32x4__relaxed_trunc_f32x4_s: |
3279 | 0 | compileVectorTruncSatS32(Context.Floatx4Ty, false); |
3280 | 0 | break; |
3281 | 0 | case OpCode::I32x4__relaxed_trunc_f32x4_u: |
3282 | 0 | compileVectorTruncSatU32(Context.Floatx4Ty, false); |
3283 | 0 | break; |
3284 | 0 | case OpCode::I32x4__relaxed_trunc_f64x2_s_zero: |
3285 | 0 | compileVectorTruncSatS32(Context.Doublex2Ty, true); |
3286 | 0 | break; |
3287 | 0 | case OpCode::I32x4__relaxed_trunc_f64x2_u_zero: |
3288 | 0 | compileVectorTruncSatU32(Context.Doublex2Ty, true); |
3289 | 0 | break; |
3290 | 0 | case OpCode::F32x4__relaxed_madd: |
3291 | 0 | compileVectorVectorMAdd(Context.Floatx4Ty); |
3292 | 0 | break; |
3293 | 0 | case OpCode::F32x4__relaxed_nmadd: |
3294 | 0 | compileVectorVectorNMAdd(Context.Floatx4Ty); |
3295 | 0 | break; |
3296 | 0 | case OpCode::F64x2__relaxed_madd: |
3297 | 0 | compileVectorVectorMAdd(Context.Doublex2Ty); |
3298 | 0 | break; |
3299 | 0 | case OpCode::F64x2__relaxed_nmadd: |
3300 | 0 | compileVectorVectorNMAdd(Context.Doublex2Ty); |
3301 | 0 | break; |
3302 | 0 | case OpCode::I8x16__relaxed_laneselect: |
3303 | 0 | case OpCode::I16x8__relaxed_laneselect: |
3304 | 0 | case OpCode::I32x4__relaxed_laneselect: |
3305 | 0 | case OpCode::I64x2__relaxed_laneselect: { |
3306 | 0 | auto C = stackPop(); |
3307 | 0 | auto V2 = stackPop(); |
3308 | 0 | auto V1 = stackPop(); |
3309 | 0 | stackPush(Builder.createXor( |
3310 | 0 | Builder.createAnd(Builder.createXor(V1, V2), C), V2)); |
3311 | 0 | break; |
3312 | 0 | } |
3313 | 0 | case OpCode::F32x4__relaxed_min: |
3314 | 0 | compileVectorVectorFMin(Context.Floatx4Ty); |
3315 | 0 | break; |
3316 | 0 | case OpCode::F32x4__relaxed_max: |
3317 | 0 | compileVectorVectorFMax(Context.Floatx4Ty); |
3318 | 0 | break; |
3319 | 0 | case OpCode::F64x2__relaxed_min: |
3320 | 0 | compileVectorVectorFMin(Context.Doublex2Ty); |
3321 | 0 | break; |
3322 | 0 | case OpCode::F64x2__relaxed_max: |
3323 | 0 | compileVectorVectorFMax(Context.Doublex2Ty); |
3324 | 0 | break; |
3325 | 0 | case OpCode::I16x8__relaxed_q15mulr_s: |
3326 | 0 | compileVectorVectorQ15MulSat(); |
3327 | 0 | break; |
3328 | 0 | case OpCode::I16x8__relaxed_dot_i8x16_i7x16_s: |
3329 | 0 | compileVectorRelaxedIntegerDotProduct(); |
3330 | 0 | break; |
3331 | 0 | case OpCode::I32x4__relaxed_dot_i8x16_i7x16_add_s: |
3332 | 0 | compileVectorRelaxedIntegerDotProductAdd(); |
3333 | 0 | break; |
3334 | | |
3335 | | // Atomic Instructions |
3336 | 186 | case OpCode::Atomic__fence: |
3337 | 186 | return compileMemoryFence(); |
3338 | 26 | case OpCode::Memory__atomic__notify: |
3339 | 26 | return compileAtomicNotify(Instr.getTargetIndex(), |
3340 | 26 | Instr.getMemoryOffset()); |
3341 | 5 | case OpCode::Memory__atomic__wait32: |
3342 | 5 | return compileAtomicWait(Instr.getTargetIndex(), |
3343 | 5 | Instr.getMemoryOffset(), Context.Int32Ty, 32); |
3344 | 2 | case OpCode::Memory__atomic__wait64: |
3345 | 2 | return compileAtomicWait(Instr.getTargetIndex(), |
3346 | 2 | Instr.getMemoryOffset(), Context.Int64Ty, 64); |
3347 | 0 | case OpCode::I32__atomic__load: |
3348 | 0 | return compileAtomicLoad( |
3349 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3350 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int32Ty, true); |
3351 | 0 | case OpCode::I64__atomic__load: |
3352 | 0 | return compileAtomicLoad( |
3353 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3354 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int64Ty, true); |
3355 | 0 | case OpCode::I32__atomic__load8_u: |
3356 | 0 | return compileAtomicLoad( |
3357 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3358 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int8Ty); |
3359 | 0 | case OpCode::I32__atomic__load16_u: |
3360 | 0 | return compileAtomicLoad( |
3361 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3362 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int16Ty); |
3363 | 0 | case OpCode::I64__atomic__load8_u: |
3364 | 0 | return compileAtomicLoad( |
3365 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3366 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int8Ty); |
3367 | 0 | case OpCode::I64__atomic__load16_u: |
3368 | 0 | return compileAtomicLoad( |
3369 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3370 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int16Ty); |
3371 | 0 | case OpCode::I64__atomic__load32_u: |
3372 | 0 | return compileAtomicLoad( |
3373 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3374 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int32Ty); |
3375 | 0 | case OpCode::I32__atomic__store: |
3376 | 0 | return compileAtomicStore( |
3377 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3378 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int32Ty, true); |
3379 | 0 | case OpCode::I64__atomic__store: |
3380 | 0 | return compileAtomicStore( |
3381 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3382 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int64Ty, true); |
3383 | 0 | case OpCode::I32__atomic__store8: |
3384 | 0 | return compileAtomicStore( |
3385 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3386 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int8Ty, true); |
3387 | 0 | case OpCode::I32__atomic__store16: |
3388 | 0 | return compileAtomicStore( |
3389 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3390 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int16Ty, true); |
3391 | 0 | case OpCode::I64__atomic__store8: |
3392 | 0 | return compileAtomicStore( |
3393 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3394 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int8Ty, true); |
3395 | 0 | case OpCode::I64__atomic__store16: |
3396 | 0 | return compileAtomicStore( |
3397 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3398 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int16Ty, true); |
3399 | 0 | case OpCode::I64__atomic__store32: |
3400 | 0 | return compileAtomicStore( |
3401 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3402 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int32Ty, true); |
3403 | 0 | case OpCode::I32__atomic__rmw__add: |
3404 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3405 | 0 | Instr.getMemoryOffset(), |
3406 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3407 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3408 | 0 | case OpCode::I64__atomic__rmw__add: |
3409 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3410 | 0 | Instr.getMemoryOffset(), |
3411 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3412 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3413 | 0 | case OpCode::I32__atomic__rmw8__add_u: |
3414 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3415 | 0 | Instr.getMemoryOffset(), |
3416 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3417 | 0 | Context.Int32Ty, Context.Int8Ty); |
3418 | 0 | case OpCode::I32__atomic__rmw16__add_u: |
3419 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3420 | 0 | Instr.getMemoryOffset(), |
3421 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3422 | 0 | Context.Int32Ty, Context.Int16Ty); |
3423 | 0 | case OpCode::I64__atomic__rmw8__add_u: |
3424 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3425 | 0 | Instr.getMemoryOffset(), |
3426 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3427 | 0 | Context.Int64Ty, Context.Int8Ty); |
3428 | 0 | case OpCode::I64__atomic__rmw16__add_u: |
3429 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3430 | 0 | Instr.getMemoryOffset(), |
3431 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3432 | 0 | Context.Int64Ty, Context.Int16Ty); |
3433 | 0 | case OpCode::I64__atomic__rmw32__add_u: |
3434 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3435 | 0 | Instr.getMemoryOffset(), |
3436 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3437 | 0 | Context.Int64Ty, Context.Int32Ty); |
3438 | 0 | case OpCode::I32__atomic__rmw__sub: |
3439 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3440 | 0 | Instr.getMemoryOffset(), |
3441 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3442 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3443 | 0 | case OpCode::I64__atomic__rmw__sub: |
3444 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3445 | 0 | Instr.getMemoryOffset(), |
3446 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3447 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3448 | 0 | case OpCode::I32__atomic__rmw8__sub_u: |
3449 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3450 | 0 | Instr.getMemoryOffset(), |
3451 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3452 | 0 | Context.Int32Ty, Context.Int8Ty); |
3453 | 0 | case OpCode::I32__atomic__rmw16__sub_u: |
3454 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3455 | 0 | Instr.getMemoryOffset(), |
3456 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3457 | 0 | Context.Int32Ty, Context.Int16Ty); |
3458 | 0 | case OpCode::I64__atomic__rmw8__sub_u: |
3459 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3460 | 0 | Instr.getMemoryOffset(), |
3461 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3462 | 0 | Context.Int64Ty, Context.Int8Ty); |
3463 | 0 | case OpCode::I64__atomic__rmw16__sub_u: |
3464 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3465 | 0 | Instr.getMemoryOffset(), |
3466 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3467 | 0 | Context.Int64Ty, Context.Int16Ty); |
3468 | 0 | case OpCode::I64__atomic__rmw32__sub_u: |
3469 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3470 | 0 | Instr.getMemoryOffset(), |
3471 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3472 | 0 | Context.Int64Ty, Context.Int32Ty); |
3473 | 0 | case OpCode::I32__atomic__rmw__and: |
3474 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3475 | 0 | Instr.getMemoryOffset(), |
3476 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3477 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3478 | 0 | case OpCode::I64__atomic__rmw__and: |
3479 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3480 | 0 | Instr.getMemoryOffset(), |
3481 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3482 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3483 | 0 | case OpCode::I32__atomic__rmw8__and_u: |
3484 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3485 | 0 | Instr.getMemoryOffset(), |
3486 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3487 | 0 | Context.Int32Ty, Context.Int8Ty); |
3488 | 0 | case OpCode::I32__atomic__rmw16__and_u: |
3489 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3490 | 0 | Instr.getMemoryOffset(), |
3491 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3492 | 0 | Context.Int32Ty, Context.Int16Ty); |
3493 | 0 | case OpCode::I64__atomic__rmw8__and_u: |
3494 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3495 | 0 | Instr.getMemoryOffset(), |
3496 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3497 | 0 | Context.Int64Ty, Context.Int8Ty); |
3498 | 0 | case OpCode::I64__atomic__rmw16__and_u: |
3499 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3500 | 0 | Instr.getMemoryOffset(), |
3501 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3502 | 0 | Context.Int64Ty, Context.Int16Ty); |
3503 | 0 | case OpCode::I64__atomic__rmw32__and_u: |
3504 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3505 | 0 | Instr.getMemoryOffset(), |
3506 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3507 | 0 | Context.Int64Ty, Context.Int32Ty); |
3508 | 0 | case OpCode::I32__atomic__rmw__or: |
3509 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3510 | 0 | Instr.getMemoryOffset(), |
3511 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3512 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3513 | 0 | case OpCode::I64__atomic__rmw__or: |
3514 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3515 | 0 | Instr.getMemoryOffset(), |
3516 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3517 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3518 | 0 | case OpCode::I32__atomic__rmw8__or_u: |
3519 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3520 | 0 | Instr.getMemoryOffset(), |
3521 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3522 | 0 | Context.Int32Ty, Context.Int8Ty); |
3523 | 0 | case OpCode::I32__atomic__rmw16__or_u: |
3524 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3525 | 0 | Instr.getMemoryOffset(), |
3526 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3527 | 0 | Context.Int32Ty, Context.Int16Ty); |
3528 | 0 | case OpCode::I64__atomic__rmw8__or_u: |
3529 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3530 | 0 | Instr.getMemoryOffset(), |
3531 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3532 | 0 | Context.Int64Ty, Context.Int8Ty); |
3533 | 0 | case OpCode::I64__atomic__rmw16__or_u: |
3534 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3535 | 0 | Instr.getMemoryOffset(), |
3536 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3537 | 0 | Context.Int64Ty, Context.Int16Ty); |
3538 | 0 | case OpCode::I64__atomic__rmw32__or_u: |
3539 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3540 | 0 | Instr.getMemoryOffset(), |
3541 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3542 | 0 | Context.Int64Ty, Context.Int32Ty); |
3543 | 0 | case OpCode::I32__atomic__rmw__xor: |
3544 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3545 | 0 | Instr.getMemoryOffset(), |
3546 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3547 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3548 | 0 | case OpCode::I64__atomic__rmw__xor: |
3549 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3550 | 0 | Instr.getMemoryOffset(), |
3551 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3552 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3553 | 0 | case OpCode::I32__atomic__rmw8__xor_u: |
3554 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3555 | 0 | Instr.getMemoryOffset(), |
3556 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3557 | 0 | Context.Int32Ty, Context.Int8Ty); |
3558 | 0 | case OpCode::I32__atomic__rmw16__xor_u: |
3559 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3560 | 0 | Instr.getMemoryOffset(), |
3561 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3562 | 0 | Context.Int32Ty, Context.Int16Ty); |
3563 | 0 | case OpCode::I64__atomic__rmw8__xor_u: |
3564 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3565 | 0 | Instr.getMemoryOffset(), |
3566 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3567 | 0 | Context.Int64Ty, Context.Int8Ty); |
3568 | 0 | case OpCode::I64__atomic__rmw16__xor_u: |
3569 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3570 | 0 | Instr.getMemoryOffset(), |
3571 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3572 | 0 | Context.Int64Ty, Context.Int16Ty); |
3573 | 0 | case OpCode::I64__atomic__rmw32__xor_u: |
3574 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3575 | 0 | Instr.getMemoryOffset(), |
3576 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3577 | 0 | Context.Int64Ty, Context.Int32Ty); |
3578 | 0 | case OpCode::I32__atomic__rmw__xchg: |
3579 | 0 | return compileAtomicRMWOp( |
3580 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3581 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int32Ty, |
3582 | 0 | Context.Int32Ty, true); |
3583 | 0 | case OpCode::I64__atomic__rmw__xchg: |
3584 | 0 | return compileAtomicRMWOp( |
3585 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3586 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int64Ty, |
3587 | 0 | Context.Int64Ty, true); |
3588 | 0 | case OpCode::I32__atomic__rmw8__xchg_u: |
3589 | 0 | return compileAtomicRMWOp( |
3590 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3591 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int32Ty, |
3592 | 0 | Context.Int8Ty); |
3593 | 0 | case OpCode::I32__atomic__rmw16__xchg_u: |
3594 | 0 | return compileAtomicRMWOp( |
3595 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3596 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int32Ty, |
3597 | 0 | Context.Int16Ty); |
3598 | 0 | case OpCode::I64__atomic__rmw8__xchg_u: |
3599 | 0 | return compileAtomicRMWOp( |
3600 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3601 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int64Ty, |
3602 | 0 | Context.Int8Ty); |
3603 | 0 | case OpCode::I64__atomic__rmw16__xchg_u: |
3604 | 0 | return compileAtomicRMWOp( |
3605 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3606 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int64Ty, |
3607 | 0 | Context.Int16Ty); |
3608 | 0 | case OpCode::I64__atomic__rmw32__xchg_u: |
3609 | 0 | return compileAtomicRMWOp( |
3610 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3611 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int64Ty, |
3612 | 0 | Context.Int32Ty); |
3613 | 0 | case OpCode::I32__atomic__rmw__cmpxchg: |
3614 | 0 | return compileAtomicCompareExchange( |
3615 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3616 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int32Ty, true); |
3617 | 0 | case OpCode::I64__atomic__rmw__cmpxchg: |
3618 | 0 | return compileAtomicCompareExchange( |
3619 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3620 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int64Ty, true); |
3621 | 0 | case OpCode::I32__atomic__rmw8__cmpxchg_u: |
3622 | 0 | return compileAtomicCompareExchange( |
3623 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3624 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int8Ty); |
3625 | 0 | case OpCode::I32__atomic__rmw16__cmpxchg_u: |
3626 | 0 | return compileAtomicCompareExchange( |
3627 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3628 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int16Ty); |
3629 | 0 | case OpCode::I64__atomic__rmw8__cmpxchg_u: |
3630 | 0 | return compileAtomicCompareExchange( |
3631 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3632 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int8Ty); |
3633 | 0 | case OpCode::I64__atomic__rmw16__cmpxchg_u: |
3634 | 0 | return compileAtomicCompareExchange( |
3635 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3636 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int16Ty); |
3637 | 0 | case OpCode::I64__atomic__rmw32__cmpxchg_u: |
3638 | 0 | return compileAtomicCompareExchange( |
3639 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3640 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int32Ty); |
3641 | | |
3642 | 0 | default: |
3643 | 0 | assumingUnreachable(); |
3644 | 959k | } |
3645 | 959k | return; |
3646 | 959k | }; |
3647 | 1.39M | for (const auto &Instr : Instrs) { |
3648 | | // Update instruction count |
3649 | 1.39M | if (LocalInstrCount) { |
3650 | 0 | Builder.createStore( |
3651 | 0 | Builder.createAdd( |
3652 | 0 | Builder.createLoad(Context.Int64Ty, LocalInstrCount), |
3653 | 0 | LLContext.getInt64(1)), |
3654 | 0 | LocalInstrCount); |
3655 | 0 | } |
3656 | 1.39M | if (LocalGas) { |
3657 | 0 | auto NewGas = Builder.createAdd( |
3658 | 0 | Builder.createLoad(Context.Int64Ty, LocalGas), |
3659 | 0 | Builder.createLoad( |
3660 | 0 | Context.Int64Ty, |
3661 | 0 | Builder.createConstInBoundsGEP2_64( |
3662 | 0 | LLVM::Type::getArrayType(Context.Int64Ty, UINT16_MAX + 1), |
3663 | 0 | Context.getCostTable(Builder, ExecCtx), 0, |
3664 | 0 | uint16_t(Instr.getOpCode())))); |
3665 | 0 | Builder.createStore(NewGas, LocalGas); |
3666 | 0 | } |
3667 | | |
3668 | | // Make the instruction node according to Code. |
3669 | 1.39M | Dispatch(Instr); |
3670 | 1.39M | } |
3671 | 10.7k | } |
3672 | 1.78k | void compileSignedTrunc(LLVM::Type IntType) noexcept { |
3673 | 1.78k | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "strunc.norm"); |
3674 | 1.78k | auto NotMinBB = LLVM::BasicBlock::create(LLContext, F.Fn, "strunc.notmin"); |
3675 | 1.78k | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "strunc.notmax"); |
3676 | 1.78k | auto Value = stackPop(); |
3677 | 1.78k | const auto [Precise, MinFp, MaxFp] = |
3678 | 1.78k | [IntType, Value]() -> std::tuple<bool, LLVM::Value, LLVM::Value> { |
3679 | 1.78k | const auto BitWidth = IntType.getIntegerBitWidth(); |
3680 | 1.78k | const auto [Min, Max] = [BitWidth]() -> std::tuple<int64_t, int64_t> { |
3681 | 1.78k | switch (BitWidth) { |
3682 | 1.35k | case 32: |
3683 | 1.35k | return {std::numeric_limits<int32_t>::min(), |
3684 | 1.35k | std::numeric_limits<int32_t>::max()}; |
3685 | 424 | case 64: |
3686 | 424 | return {std::numeric_limits<int64_t>::min(), |
3687 | 424 | std::numeric_limits<int64_t>::max()}; |
3688 | 0 | default: |
3689 | 0 | assumingUnreachable(); |
3690 | 1.78k | } |
3691 | 1.78k | }(); |
3692 | 1.78k | auto FPType = Value.getType(); |
3693 | 1.78k | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3694 | 1.78k | const auto FPWidth = FPType.getFPMantissaWidth(); |
3695 | 1.78k | return {BitWidth <= FPWidth, LLVM::Value::getConstReal(FPType, Min), |
3696 | 1.78k | LLVM::Value::getConstReal(FPType, Max)}; |
3697 | 1.78k | }(); |
3698 | | |
3699 | 1.78k | auto IsNotNan = Builder.createLikely(Builder.createFCmpORD(Value, Value)); |
3700 | 1.78k | Builder.createCondBr(IsNotNan, NormBB, |
3701 | 1.78k | getTrapBB(ErrCode::Value::InvalidConvToInt)); |
3702 | | |
3703 | 1.78k | Builder.positionAtEnd(NormBB); |
3704 | 1.78k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3705 | 1.78k | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3706 | 1.78k | auto IsNotUnderflow = |
3707 | 1.78k | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3708 | 1.78k | Builder.createCondBr(IsNotUnderflow, NotMinBB, |
3709 | 1.78k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3710 | | |
3711 | 1.78k | Builder.positionAtEnd(NotMinBB); |
3712 | 1.78k | auto IsNotOverflow = Builder.createLikely( |
3713 | 1.78k | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3714 | 1.78k | Builder.createCondBr(IsNotOverflow, NotMaxBB, |
3715 | 1.78k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3716 | | |
3717 | 1.78k | Builder.positionAtEnd(NotMaxBB); |
3718 | 1.78k | stackPush(Builder.createFPToSI(Trunc, IntType)); |
3719 | 1.78k | } |
3720 | 1.14k | void compileSignedTruncSat(LLVM::Type IntType) noexcept { |
3721 | 1.14k | auto CurrBB = Builder.getInsertBlock(); |
3722 | 1.14k | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.norm"); |
3723 | 1.14k | auto NotMinBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.notmin"); |
3724 | 1.14k | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.notmax"); |
3725 | 1.14k | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.end"); |
3726 | 1.14k | auto Value = stackPop(); |
3727 | 1.14k | const auto [Precise, MinInt, MaxInt, MinFp, MaxFp] = [IntType, Value]() |
3728 | 1.14k | -> std::tuple<bool, uint64_t, uint64_t, LLVM::Value, LLVM::Value> { |
3729 | 1.14k | const auto BitWidth = IntType.getIntegerBitWidth(); |
3730 | 1.14k | const auto [Min, Max] = [BitWidth]() -> std::tuple<int64_t, int64_t> { |
3731 | 1.14k | switch (BitWidth) { |
3732 | 500 | case 32: |
3733 | 500 | return {std::numeric_limits<int32_t>::min(), |
3734 | 500 | std::numeric_limits<int32_t>::max()}; |
3735 | 649 | case 64: |
3736 | 649 | return {std::numeric_limits<int64_t>::min(), |
3737 | 649 | std::numeric_limits<int64_t>::max()}; |
3738 | 0 | default: |
3739 | 0 | assumingUnreachable(); |
3740 | 1.14k | } |
3741 | 1.14k | }(); |
3742 | 1.14k | auto FPType = Value.getType(); |
3743 | 1.14k | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3744 | 1.14k | const auto FPWidth = FPType.getFPMantissaWidth(); |
3745 | 1.14k | return {BitWidth <= FPWidth, static_cast<uint64_t>(Min), |
3746 | 1.14k | static_cast<uint64_t>(Max), |
3747 | 1.14k | LLVM::Value::getConstReal(FPType, Min), |
3748 | 1.14k | LLVM::Value::getConstReal(FPType, Max)}; |
3749 | 1.14k | }(); |
3750 | | |
3751 | 1.14k | auto IsNotNan = Builder.createLikely(Builder.createFCmpORD(Value, Value)); |
3752 | 1.14k | Builder.createCondBr(IsNotNan, NormBB, EndBB); |
3753 | | |
3754 | 1.14k | Builder.positionAtEnd(NormBB); |
3755 | 1.14k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3756 | 1.14k | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3757 | 1.14k | auto IsNotUnderflow = |
3758 | 1.14k | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3759 | 1.14k | Builder.createCondBr(IsNotUnderflow, NotMinBB, EndBB); |
3760 | | |
3761 | 1.14k | Builder.positionAtEnd(NotMinBB); |
3762 | 1.14k | auto IsNotOverflow = Builder.createLikely( |
3763 | 1.14k | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3764 | 1.14k | Builder.createCondBr(IsNotOverflow, NotMaxBB, EndBB); |
3765 | | |
3766 | 1.14k | Builder.positionAtEnd(NotMaxBB); |
3767 | 1.14k | auto IntValue = Builder.createFPToSI(Trunc, IntType); |
3768 | 1.14k | Builder.createBr(EndBB); |
3769 | | |
3770 | 1.14k | Builder.positionAtEnd(EndBB); |
3771 | 1.14k | auto PHIRet = Builder.createPHI(IntType); |
3772 | 1.14k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, 0, true), CurrBB); |
3773 | 1.14k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MinInt, true), NormBB); |
3774 | 1.14k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MaxInt, true), |
3775 | 1.14k | NotMinBB); |
3776 | 1.14k | PHIRet.addIncoming(IntValue, NotMaxBB); |
3777 | | |
3778 | 1.14k | stackPush(PHIRet); |
3779 | 1.14k | } |
3780 | 3.70k | void compileUnsignedTrunc(LLVM::Type IntType) noexcept { |
3781 | 3.70k | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "utrunc.norm"); |
3782 | 3.70k | auto NotMinBB = LLVM::BasicBlock::create(LLContext, F.Fn, "utrunc.notmin"); |
3783 | 3.70k | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "utrunc.notmax"); |
3784 | 3.70k | auto Value = stackPop(); |
3785 | 3.70k | const auto [Precise, MinFp, MaxFp] = |
3786 | 3.70k | [IntType, Value]() -> std::tuple<bool, LLVM::Value, LLVM::Value> { |
3787 | 3.70k | const auto BitWidth = IntType.getIntegerBitWidth(); |
3788 | 3.70k | const auto [Min, Max] = [BitWidth]() -> std::tuple<uint64_t, uint64_t> { |
3789 | 3.70k | switch (BitWidth) { |
3790 | 1.18k | case 32: |
3791 | 1.18k | return {std::numeric_limits<uint32_t>::min(), |
3792 | 1.18k | std::numeric_limits<uint32_t>::max()}; |
3793 | 2.51k | case 64: |
3794 | 2.51k | return {std::numeric_limits<uint64_t>::min(), |
3795 | 2.51k | std::numeric_limits<uint64_t>::max()}; |
3796 | 0 | default: |
3797 | 0 | assumingUnreachable(); |
3798 | 3.70k | } |
3799 | 3.70k | }(); |
3800 | 3.70k | auto FPType = Value.getType(); |
3801 | 3.70k | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3802 | 3.70k | const auto FPWidth = FPType.getFPMantissaWidth(); |
3803 | 3.70k | return {BitWidth <= FPWidth, LLVM::Value::getConstReal(FPType, Min), |
3804 | 3.70k | LLVM::Value::getConstReal(FPType, Max)}; |
3805 | 3.70k | }(); |
3806 | | |
3807 | 3.70k | auto IsNotNan = Builder.createLikely(Builder.createFCmpORD(Value, Value)); |
3808 | 3.70k | Builder.createCondBr(IsNotNan, NormBB, |
3809 | 3.70k | getTrapBB(ErrCode::Value::InvalidConvToInt)); |
3810 | | |
3811 | 3.70k | Builder.positionAtEnd(NormBB); |
3812 | 3.70k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3813 | 3.70k | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3814 | 3.70k | auto IsNotUnderflow = |
3815 | 3.70k | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3816 | 3.70k | Builder.createCondBr(IsNotUnderflow, NotMinBB, |
3817 | 3.70k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3818 | | |
3819 | 3.70k | Builder.positionAtEnd(NotMinBB); |
3820 | 3.70k | auto IsNotOverflow = Builder.createLikely( |
3821 | 3.70k | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3822 | 3.70k | Builder.createCondBr(IsNotOverflow, NotMaxBB, |
3823 | 3.70k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3824 | | |
3825 | 3.70k | Builder.positionAtEnd(NotMaxBB); |
3826 | 3.70k | stackPush(Builder.createFPToUI(Trunc, IntType)); |
3827 | 3.70k | } |
3828 | 987 | void compileUnsignedTruncSat(LLVM::Type IntType) noexcept { |
3829 | 987 | auto CurrBB = Builder.getInsertBlock(); |
3830 | 987 | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "usat.norm"); |
3831 | 987 | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "usat.notmax"); |
3832 | 987 | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "usat.end"); |
3833 | 987 | auto Value = stackPop(); |
3834 | 987 | const auto [Precise, MinInt, MaxInt, MinFp, MaxFp] = [IntType, Value]() |
3835 | 987 | -> std::tuple<bool, uint64_t, uint64_t, LLVM::Value, LLVM::Value> { |
3836 | 987 | const auto BitWidth = IntType.getIntegerBitWidth(); |
3837 | 987 | const auto [Min, Max] = [BitWidth]() -> std::tuple<uint64_t, uint64_t> { |
3838 | 987 | switch (BitWidth) { |
3839 | 289 | case 32: |
3840 | 289 | return {std::numeric_limits<uint32_t>::min(), |
3841 | 289 | std::numeric_limits<uint32_t>::max()}; |
3842 | 698 | case 64: |
3843 | 698 | return {std::numeric_limits<uint64_t>::min(), |
3844 | 698 | std::numeric_limits<uint64_t>::max()}; |
3845 | 0 | default: |
3846 | 0 | assumingUnreachable(); |
3847 | 987 | } |
3848 | 987 | }(); |
3849 | 987 | auto FPType = Value.getType(); |
3850 | 987 | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3851 | 987 | const auto FPWidth = FPType.getFPMantissaWidth(); |
3852 | 987 | return {BitWidth <= FPWidth, Min, Max, |
3853 | 987 | LLVM::Value::getConstReal(FPType, Min), |
3854 | 987 | LLVM::Value::getConstReal(FPType, Max)}; |
3855 | 987 | }(); |
3856 | | |
3857 | 987 | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3858 | 987 | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3859 | 987 | auto IsNotUnderflow = |
3860 | 987 | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3861 | 987 | Builder.createCondBr(IsNotUnderflow, NormBB, EndBB); |
3862 | | |
3863 | 987 | Builder.positionAtEnd(NormBB); |
3864 | 987 | auto IsNotOverflow = Builder.createLikely( |
3865 | 987 | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3866 | 987 | Builder.createCondBr(IsNotOverflow, NotMaxBB, EndBB); |
3867 | | |
3868 | 987 | Builder.positionAtEnd(NotMaxBB); |
3869 | 987 | auto IntValue = Builder.createFPToUI(Trunc, IntType); |
3870 | 987 | Builder.createBr(EndBB); |
3871 | | |
3872 | 987 | Builder.positionAtEnd(EndBB); |
3873 | 987 | auto PHIRet = Builder.createPHI(IntType); |
3874 | 987 | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MinInt), CurrBB); |
3875 | 987 | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MaxInt), NormBB); |
3876 | 987 | PHIRet.addIncoming(IntValue, NotMaxBB); |
3877 | | |
3878 | 987 | stackPush(PHIRet); |
3879 | 987 | } |
3880 | | |
3881 | | void compileAtomicCheckOffsetAlignment(LLVM::Value Offset, |
3882 | 33 | LLVM::Type IntType) noexcept { |
3883 | 33 | const auto BitWidth = IntType.getIntegerBitWidth(); |
3884 | 33 | auto BWMask = LLContext.getInt64((BitWidth >> 3) - 1); |
3885 | 33 | auto Value = Builder.createAnd(Offset, BWMask); |
3886 | 33 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "address_align_ok"); |
3887 | 33 | auto IsAddressAligned = Builder.createLikely( |
3888 | 33 | Builder.createICmpEQ(Value, LLContext.getInt64(0))); |
3889 | 33 | Builder.createCondBr(IsAddressAligned, OkBB, |
3890 | 33 | getTrapBB(ErrCode::Value::UnalignedAtomicAccess)); |
3891 | | |
3892 | 33 | Builder.positionAtEnd(OkBB); |
3893 | 33 | } |
3894 | | |
3895 | 186 | void compileMemoryFence() noexcept { |
3896 | 186 | Builder.createFence(LLVMAtomicOrderingSequentiallyConsistent); |
3897 | 186 | } |
3898 | | void compileAtomicNotify(unsigned MemoryIndex, |
3899 | 26 | unsigned MemoryOffset) noexcept { |
3900 | 26 | auto Count = stackPop(); |
3901 | 26 | auto Addr = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3902 | 26 | if (MemoryOffset != 0) { |
3903 | 19 | Addr = Builder.createAdd(Addr, LLContext.getInt64(MemoryOffset)); |
3904 | 19 | } |
3905 | 26 | compileAtomicCheckOffsetAlignment(Addr, Context.Int32Ty); |
3906 | 26 | auto Offset = stackPop(); |
3907 | | |
3908 | 26 | stackPush(Builder.createCall( |
3909 | 26 | Context.getIntrinsic( |
3910 | 26 | Builder, Executable::Intrinsics::kMemAtomicNotify, |
3911 | 26 | LLVM::Type::getFunctionType( |
3912 | 26 | Context.Int32Ty, |
3913 | 26 | {Context.Int32Ty, Context.Int32Ty, Context.Int32Ty}, false)), |
3914 | 26 | {LLContext.getInt32(MemoryIndex), Offset, Count})); |
3915 | 26 | } |
3916 | | void compileAtomicWait(unsigned MemoryIndex, unsigned MemoryOffset, |
3917 | 7 | LLVM::Type TargetType, uint32_t BitWidth) noexcept { |
3918 | 7 | auto Timeout = stackPop(); |
3919 | 7 | auto ExpectedValue = Builder.createZExtOrTrunc(stackPop(), Context.Int64Ty); |
3920 | 7 | auto Addr = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3921 | 7 | if (MemoryOffset != 0) { |
3922 | 3 | Addr = Builder.createAdd(Addr, LLContext.getInt64(MemoryOffset)); |
3923 | 3 | } |
3924 | 7 | compileAtomicCheckOffsetAlignment(Addr, TargetType); |
3925 | 7 | auto Offset = stackPop(); |
3926 | | |
3927 | 7 | stackPush(Builder.createCall( |
3928 | 7 | Context.getIntrinsic( |
3929 | 7 | Builder, Executable::Intrinsics::kMemAtomicWait, |
3930 | 7 | LLVM::Type::getFunctionType(Context.Int32Ty, |
3931 | 7 | {Context.Int32Ty, Context.Int32Ty, |
3932 | 7 | Context.Int64Ty, Context.Int64Ty, |
3933 | 7 | Context.Int32Ty}, |
3934 | 7 | false)), |
3935 | 7 | {LLContext.getInt32(MemoryIndex), Offset, ExpectedValue, Timeout, |
3936 | 7 | LLContext.getInt32(BitWidth)})); |
3937 | 7 | } |
3938 | | void compileAtomicLoad(unsigned MemoryIndex, unsigned MemoryOffset, |
3939 | | unsigned Alignment, LLVM::Type IntType, |
3940 | 0 | LLVM::Type TargetType, bool Signed = false) noexcept { |
3941 | |
|
3942 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3943 | 0 | if (MemoryOffset != 0) { |
3944 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
3945 | 0 | } |
3946 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
3947 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
3948 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
3949 | 0 | Offset); |
3950 | |
|
3951 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
3952 | 0 | auto Load = Builder.createLoad(TargetType, Ptr, true); |
3953 | 0 | Load.setAlignment(1 << Alignment); |
3954 | 0 | Load.setOrdering(LLVMAtomicOrderingSequentiallyConsistent); |
3955 | |
|
3956 | 0 | if (Signed) { |
3957 | 0 | Stack.back() = Builder.createSExt(Load, IntType); |
3958 | 0 | } else { |
3959 | 0 | Stack.back() = Builder.createZExt(Load, IntType); |
3960 | 0 | } |
3961 | 0 | } |
3962 | | void compileAtomicStore(unsigned MemoryIndex, unsigned MemoryOffset, |
3963 | | unsigned Alignment, LLVM::Type, LLVM::Type TargetType, |
3964 | 0 | bool Signed = false) noexcept { |
3965 | 0 | auto V = stackPop(); |
3966 | |
|
3967 | 0 | if (Signed) { |
3968 | 0 | V = Builder.createSExtOrTrunc(V, TargetType); |
3969 | 0 | } else { |
3970 | 0 | V = Builder.createZExtOrTrunc(V, TargetType); |
3971 | 0 | } |
3972 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3973 | 0 | if (MemoryOffset != 0) { |
3974 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
3975 | 0 | } |
3976 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
3977 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
3978 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
3979 | 0 | Offset); |
3980 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
3981 | 0 | auto Store = Builder.createStore(V, Ptr, true); |
3982 | 0 | Store.setAlignment(1 << Alignment); |
3983 | 0 | Store.setOrdering(LLVMAtomicOrderingSequentiallyConsistent); |
3984 | 0 | } |
3985 | | |
3986 | | void compileAtomicRMWOp(unsigned MemoryIndex, unsigned MemoryOffset, |
3987 | | [[maybe_unused]] unsigned Alignment, |
3988 | | LLVMAtomicRMWBinOp BinOp, LLVM::Type IntType, |
3989 | 0 | LLVM::Type TargetType, bool Signed = false) noexcept { |
3990 | 0 | auto Value = Builder.createSExtOrTrunc(stackPop(), TargetType); |
3991 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3992 | 0 | if (MemoryOffset != 0) { |
3993 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
3994 | 0 | } |
3995 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
3996 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
3997 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
3998 | 0 | Offset); |
3999 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
4000 | |
|
4001 | 0 | auto Ret = Builder.createAtomicRMW( |
4002 | 0 | BinOp, Ptr, Value, LLVMAtomicOrderingSequentiallyConsistent); |
4003 | | #if LLVM_VERSION_MAJOR >= 13 |
4004 | | Ret.setAlignment(1 << Alignment); |
4005 | | #endif |
4006 | 0 | if (Signed) { |
4007 | 0 | Stack.back() = Builder.createSExt(Ret, IntType); |
4008 | 0 | } else { |
4009 | 0 | Stack.back() = Builder.createZExt(Ret, IntType); |
4010 | 0 | } |
4011 | 0 | } |
4012 | | void compileAtomicCompareExchange(unsigned MemoryIndex, unsigned MemoryOffset, |
4013 | | [[maybe_unused]] unsigned Alignment, |
4014 | | LLVM::Type IntType, LLVM::Type TargetType, |
4015 | 0 | bool Signed = false) noexcept { |
4016 | |
|
4017 | 0 | auto Replacement = Builder.createSExtOrTrunc(stackPop(), TargetType); |
4018 | 0 | auto Expected = Builder.createSExtOrTrunc(stackPop(), TargetType); |
4019 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
4020 | 0 | if (MemoryOffset != 0) { |
4021 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
4022 | 0 | } |
4023 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
4024 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
4025 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
4026 | 0 | Offset); |
4027 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
4028 | |
|
4029 | 0 | auto Ret = Builder.createAtomicCmpXchg( |
4030 | 0 | Ptr, Expected, Replacement, LLVMAtomicOrderingSequentiallyConsistent, |
4031 | 0 | LLVMAtomicOrderingSequentiallyConsistent); |
4032 | | #if LLVM_VERSION_MAJOR >= 13 |
4033 | | Ret.setAlignment(1 << Alignment); |
4034 | | #endif |
4035 | 0 | auto OldVal = Builder.createExtractValue(Ret, 0); |
4036 | 0 | if (Signed) { |
4037 | 0 | Stack.back() = Builder.createSExt(OldVal, IntType); |
4038 | 0 | } else { |
4039 | 0 | Stack.back() = Builder.createZExt(OldVal, IntType); |
4040 | 0 | } |
4041 | 0 | } |
4042 | | |
4043 | 11.3k | void compileReturn() noexcept { |
4044 | 11.3k | updateInstrCount(); |
4045 | 11.3k | updateGas(); |
4046 | 11.3k | auto Ty = F.Ty.getReturnType(); |
4047 | 11.3k | if (Ty.isVoidTy()) { |
4048 | 2.02k | Builder.createRetVoid(); |
4049 | 9.37k | } else if (Ty.isStructTy()) { |
4050 | 297 | const auto Count = Ty.getStructNumElements(); |
4051 | 297 | std::vector<LLVM::Value> Ret(Count); |
4052 | 1.08k | for (unsigned I = 0; I < Count; ++I) { |
4053 | 785 | const unsigned J = Count - 1 - I; |
4054 | 785 | Ret[J] = stackPop(); |
4055 | 785 | } |
4056 | 297 | Builder.createAggregateRet(Ret); |
4057 | 9.07k | } else { |
4058 | 9.07k | Builder.createRet(stackPop()); |
4059 | 9.07k | } |
4060 | 11.3k | } |
4061 | | |
4062 | 17.7k | void updateInstrCount() noexcept { |
4063 | 17.7k | if (LocalInstrCount) { |
4064 | 0 | auto Store [[maybe_unused]] = Builder.createAtomicRMW( |
4065 | 0 | LLVMAtomicRMWBinOpAdd, Context.getInstrCount(Builder, ExecCtx), |
4066 | 0 | Builder.createLoad(Context.Int64Ty, LocalInstrCount), |
4067 | 0 | LLVMAtomicOrderingMonotonic); |
4068 | | #if LLVM_VERSION_MAJOR >= 13 |
4069 | | Store.setAlignment(8); |
4070 | | #endif |
4071 | 0 | Builder.createStore(LLContext.getInt64(0), LocalInstrCount); |
4072 | 0 | } |
4073 | 17.7k | } |
4074 | | |
4075 | 20.2k | void updateGas() noexcept { |
4076 | 20.2k | if (LocalGas) { |
4077 | 0 | auto CurrBB = Builder.getInsertBlock(); |
4078 | 0 | auto CheckBB = LLVM::BasicBlock::create(LLContext, F.Fn, "gas_check"); |
4079 | 0 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "gas_ok"); |
4080 | 0 | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "gas_end"); |
4081 | |
|
4082 | 0 | auto Cost = Builder.createLoad(Context.Int64Ty, LocalGas); |
4083 | 0 | Cost.setAlignment(64); |
4084 | 0 | auto GasPtr = Context.getGas(Builder, ExecCtx); |
4085 | 0 | auto GasLimit = Context.getGasLimit(Builder, ExecCtx); |
4086 | 0 | auto Gas = Builder.createLoad(Context.Int64Ty, GasPtr); |
4087 | 0 | Gas.setAlignment(64); |
4088 | 0 | Gas.setOrdering(LLVMAtomicOrderingMonotonic); |
4089 | 0 | Builder.createBr(CheckBB); |
4090 | 0 | Builder.positionAtEnd(CheckBB); |
4091 | |
|
4092 | 0 | auto PHIOldGas = Builder.createPHI(Context.Int64Ty); |
4093 | 0 | auto NewGas = Builder.createAdd(PHIOldGas, Cost); |
4094 | 0 | auto IsGasRemain = |
4095 | 0 | Builder.createLikely(Builder.createICmpULE(NewGas, GasLimit)); |
4096 | 0 | Builder.createCondBr(IsGasRemain, OkBB, |
4097 | 0 | getTrapBB(ErrCode::Value::CostLimitExceeded)); |
4098 | 0 | Builder.positionAtEnd(OkBB); |
4099 | |
|
4100 | 0 | auto RGasAndSucceed = Builder.createAtomicCmpXchg( |
4101 | 0 | GasPtr, PHIOldGas, NewGas, LLVMAtomicOrderingMonotonic, |
4102 | 0 | LLVMAtomicOrderingMonotonic); |
4103 | | #if LLVM_VERSION_MAJOR >= 13 |
4104 | | RGasAndSucceed.setAlignment(8); |
4105 | | #endif |
4106 | 0 | RGasAndSucceed.setWeak(true); |
4107 | 0 | auto RGas = Builder.createExtractValue(RGasAndSucceed, 0); |
4108 | 0 | auto Succeed = Builder.createExtractValue(RGasAndSucceed, 1); |
4109 | 0 | Builder.createCondBr(Builder.createLikely(Succeed), EndBB, CheckBB); |
4110 | 0 | Builder.positionAtEnd(EndBB); |
4111 | |
|
4112 | 0 | Builder.createStore(LLContext.getInt64(0), LocalGas); |
4113 | |
|
4114 | 0 | PHIOldGas.addIncoming(Gas, CurrBB); |
4115 | 0 | PHIOldGas.addIncoming(RGas, OkBB); |
4116 | 0 | } |
4117 | 20.2k | } |
4118 | | |
4119 | 2.92k | void updateGasAtTrap() noexcept { |
4120 | 2.92k | if (LocalGas) { |
4121 | 0 | auto Update [[maybe_unused]] = Builder.createAtomicRMW( |
4122 | 0 | LLVMAtomicRMWBinOpAdd, Context.getGas(Builder, ExecCtx), |
4123 | 0 | Builder.createLoad(Context.Int64Ty, LocalGas), |
4124 | 0 | LLVMAtomicOrderingMonotonic); |
4125 | | #if LLVM_VERSION_MAJOR >= 13 |
4126 | | Update.setAlignment(8); |
4127 | | #endif |
4128 | 0 | } |
4129 | 2.92k | } |
4130 | | |
4131 | | private: |
4132 | 2.86k | void compileCallOp(const unsigned int FuncIndex) noexcept { |
4133 | 2.86k | const auto &FuncType = |
4134 | 2.86k | Context.CompositeTypes[std::get<0>(Context.Functions[FuncIndex])] |
4135 | 2.86k | ->getFuncType(); |
4136 | 2.86k | const auto &Function = std::get<1>(Context.Functions[FuncIndex]); |
4137 | 2.86k | const auto &ParamTypes = FuncType.getParamTypes(); |
4138 | | |
4139 | 2.86k | std::vector<LLVM::Value> Args(ParamTypes.size() + 1); |
4140 | 2.86k | Args[0] = F.Fn.getFirstParam(); |
4141 | 3.67k | for (size_t I = 0; I < ParamTypes.size(); ++I) { |
4142 | 813 | const size_t J = ParamTypes.size() - 1 - I; |
4143 | 813 | Args[J + 1] = stackPop(); |
4144 | 813 | } |
4145 | | |
4146 | 2.86k | auto Ret = Builder.createCall(Function, Args); |
4147 | 2.86k | auto Ty = Ret.getType(); |
4148 | 2.86k | if (Ty.isVoidTy()) { |
4149 | | // nothing to do |
4150 | 1.57k | } else if (Ty.isStructTy()) { |
4151 | 172 | for (auto Val : unpackStruct(Builder, Ret)) { |
4152 | 172 | stackPush(Val); |
4153 | 172 | } |
4154 | 1.21k | } else { |
4155 | 1.21k | stackPush(Ret); |
4156 | 1.21k | } |
4157 | 2.86k | } |
4158 | | |
4159 | | void compileIndirectCallOp(const uint32_t TableIndex, |
4160 | 592 | const uint32_t FuncTypeIndex) noexcept { |
4161 | 592 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.not_null"); |
4162 | 592 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.is_null"); |
4163 | 592 | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.end"); |
4164 | | |
4165 | 592 | LLVM::Value FuncIndex = stackPop(); |
4166 | 592 | const auto &FuncType = Context.CompositeTypes[FuncTypeIndex]->getFuncType(); |
4167 | 592 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4168 | 592 | auto RTy = FTy.getReturnType(); |
4169 | | |
4170 | 592 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4171 | 592 | const size_t RetSize = |
4172 | 592 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4173 | 592 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4174 | 592 | ArgsVec[0] = F.Fn.getFirstParam(); |
4175 | 1.17k | for (size_t I = 0; I < ArgSize; ++I) { |
4176 | 587 | const size_t J = ArgSize - I; |
4177 | 587 | ArgsVec[J] = stackPop(); |
4178 | 587 | } |
4179 | | |
4180 | 592 | std::vector<LLVM::Value> FPtrRetsVec; |
4181 | 592 | FPtrRetsVec.reserve(RetSize); |
4182 | 592 | { |
4183 | 592 | auto FPtr = Builder.createCall( |
4184 | 592 | Context.getIntrinsic( |
4185 | 592 | Builder, Executable::Intrinsics::kTableGetFuncSymbol, |
4186 | 592 | LLVM::Type::getFunctionType( |
4187 | 592 | FTy.getPointerTo(), |
4188 | 592 | {Context.Int32Ty, Context.Int32Ty, Context.Int32Ty}, false)), |
4189 | 592 | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4190 | 592 | FuncIndex}); |
4191 | 592 | Builder.createCondBr( |
4192 | 592 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4193 | 592 | NotNullBB, IsNullBB); |
4194 | 592 | Builder.positionAtEnd(NotNullBB); |
4195 | | |
4196 | 592 | auto FPtrRet = |
4197 | 592 | Builder.createCall(LLVM::FunctionCallee{FTy, FPtr}, ArgsVec); |
4198 | 592 | if (RetSize == 0) { |
4199 | | // nothing to do |
4200 | 452 | } else if (RetSize == 1) { |
4201 | 437 | FPtrRetsVec.push_back(FPtrRet); |
4202 | 437 | } else { |
4203 | 30 | for (auto Val : unpackStruct(Builder, FPtrRet)) { |
4204 | 30 | FPtrRetsVec.push_back(Val); |
4205 | 30 | } |
4206 | 15 | } |
4207 | 592 | } |
4208 | | |
4209 | 592 | Builder.createBr(EndBB); |
4210 | 592 | Builder.positionAtEnd(IsNullBB); |
4211 | | |
4212 | 592 | std::vector<LLVM::Value> RetsVec; |
4213 | 592 | { |
4214 | 592 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4215 | 592 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4216 | 592 | Builder.createArrayPtrStore( |
4217 | 592 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4218 | 592 | kValSize); |
4219 | | |
4220 | 592 | Builder.createCall( |
4221 | 592 | Context.getIntrinsic( |
4222 | 592 | Builder, Executable::Intrinsics::kCallIndirect, |
4223 | 592 | LLVM::Type::getFunctionType(Context.VoidTy, |
4224 | 592 | {Context.Int32Ty, Context.Int32Ty, |
4225 | 592 | Context.Int32Ty, Context.Int8PtrTy, |
4226 | 592 | Context.Int8PtrTy}, |
4227 | 592 | false)), |
4228 | 592 | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4229 | 592 | FuncIndex, Args, Rets}); |
4230 | | |
4231 | 592 | if (RetSize == 0) { |
4232 | | // nothing to do |
4233 | 452 | } else if (RetSize == 1) { |
4234 | 437 | RetsVec.push_back( |
4235 | 437 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4236 | 437 | } else { |
4237 | 15 | RetsVec = Builder.createArrayPtrLoad(RetSize, RTy, Rets, Context.Int8Ty, |
4238 | 15 | kValSize); |
4239 | 15 | } |
4240 | 592 | Builder.createBr(EndBB); |
4241 | 592 | Builder.positionAtEnd(EndBB); |
4242 | 592 | } |
4243 | | |
4244 | 1.05k | for (unsigned I = 0; I < RetSize; ++I) { |
4245 | 467 | auto PHIRet = Builder.createPHI(FPtrRetsVec[I].getType()); |
4246 | 467 | PHIRet.addIncoming(FPtrRetsVec[I], NotNullBB); |
4247 | 467 | PHIRet.addIncoming(RetsVec[I], IsNullBB); |
4248 | 467 | stackPush(PHIRet); |
4249 | 467 | } |
4250 | 592 | } |
4251 | | |
4252 | 0 | void compileReturnCallOp(const unsigned int FuncIndex) noexcept { |
4253 | 0 | const auto &FuncType = |
4254 | 0 | Context.CompositeTypes[std::get<0>(Context.Functions[FuncIndex])] |
4255 | 0 | ->getFuncType(); |
4256 | 0 | const auto &Function = std::get<1>(Context.Functions[FuncIndex]); |
4257 | 0 | const auto &ParamTypes = FuncType.getParamTypes(); |
4258 | |
|
4259 | 0 | std::vector<LLVM::Value> Args(ParamTypes.size() + 1); |
4260 | 0 | Args[0] = F.Fn.getFirstParam(); |
4261 | 0 | for (size_t I = 0; I < ParamTypes.size(); ++I) { |
4262 | 0 | const size_t J = ParamTypes.size() - 1 - I; |
4263 | 0 | Args[J + 1] = stackPop(); |
4264 | 0 | } |
4265 | |
|
4266 | 0 | auto Ret = Builder.createCall(Function, Args); |
4267 | 0 | auto Ty = Ret.getType(); |
4268 | 0 | if (Ty.isVoidTy()) { |
4269 | 0 | Builder.createRetVoid(); |
4270 | 0 | } else { |
4271 | 0 | Builder.createRet(Ret); |
4272 | 0 | } |
4273 | 0 | } |
4274 | | |
4275 | | void compileReturnIndirectCallOp(const uint32_t TableIndex, |
4276 | 0 | const uint32_t FuncTypeIndex) noexcept { |
4277 | 0 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.not_null"); |
4278 | 0 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.is_null"); |
4279 | |
|
4280 | 0 | LLVM::Value FuncIndex = stackPop(); |
4281 | 0 | const auto &FuncType = Context.CompositeTypes[FuncTypeIndex]->getFuncType(); |
4282 | 0 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4283 | 0 | auto RTy = FTy.getReturnType(); |
4284 | |
|
4285 | 0 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4286 | 0 | const size_t RetSize = |
4287 | 0 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4288 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4289 | 0 | ArgsVec[0] = F.Fn.getFirstParam(); |
4290 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
4291 | 0 | const size_t J = ArgSize - I; |
4292 | 0 | ArgsVec[J] = stackPop(); |
4293 | 0 | } |
4294 | |
|
4295 | 0 | { |
4296 | 0 | auto FPtr = Builder.createCall( |
4297 | 0 | Context.getIntrinsic( |
4298 | 0 | Builder, Executable::Intrinsics::kTableGetFuncSymbol, |
4299 | 0 | LLVM::Type::getFunctionType( |
4300 | 0 | FTy.getPointerTo(), |
4301 | 0 | {Context.Int32Ty, Context.Int32Ty, Context.Int32Ty}, false)), |
4302 | 0 | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4303 | 0 | FuncIndex}); |
4304 | 0 | Builder.createCondBr( |
4305 | 0 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4306 | 0 | NotNullBB, IsNullBB); |
4307 | 0 | Builder.positionAtEnd(NotNullBB); |
4308 | |
|
4309 | 0 | auto FPtrRet = |
4310 | 0 | Builder.createCall(LLVM::FunctionCallee(FTy, FPtr), ArgsVec); |
4311 | 0 | if (RetSize == 0) { |
4312 | 0 | Builder.createRetVoid(); |
4313 | 0 | } else { |
4314 | 0 | Builder.createRet(FPtrRet); |
4315 | 0 | } |
4316 | 0 | } |
4317 | |
|
4318 | 0 | Builder.positionAtEnd(IsNullBB); |
4319 | |
|
4320 | 0 | { |
4321 | 0 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4322 | 0 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4323 | 0 | Builder.createArrayPtrStore( |
4324 | 0 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4325 | 0 | kValSize); |
4326 | |
|
4327 | 0 | Builder.createCall( |
4328 | 0 | Context.getIntrinsic( |
4329 | 0 | Builder, Executable::Intrinsics::kCallIndirect, |
4330 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
4331 | 0 | {Context.Int32Ty, Context.Int32Ty, |
4332 | 0 | Context.Int32Ty, Context.Int8PtrTy, |
4333 | 0 | Context.Int8PtrTy}, |
4334 | 0 | false)), |
4335 | 0 | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4336 | 0 | FuncIndex, Args, Rets}); |
4337 | |
|
4338 | 0 | if (RetSize == 0) { |
4339 | 0 | Builder.createRetVoid(); |
4340 | 0 | } else if (RetSize == 1) { |
4341 | 0 | Builder.createRet( |
4342 | 0 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4343 | 0 | } else { |
4344 | 0 | Builder.createAggregateRet(Builder.createArrayPtrLoad( |
4345 | 0 | RetSize, RTy, Rets, Context.Int8Ty, kValSize)); |
4346 | 0 | } |
4347 | 0 | } |
4348 | 0 | } |
4349 | | |
4350 | 0 | void compileCallRefOp(const unsigned int TypeIndex) noexcept { |
4351 | 0 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.not_null"); |
4352 | 0 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.is_null"); |
4353 | 0 | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.end"); |
4354 | |
|
4355 | 0 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
4356 | 0 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.ref_not_null"); |
4357 | 0 | auto IsRefNotNull = Builder.createLikely(Builder.createICmpNE( |
4358 | 0 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
4359 | 0 | LLContext.getInt64(0))); |
4360 | 0 | Builder.createCondBr(IsRefNotNull, OkBB, |
4361 | 0 | getTrapBB(ErrCode::Value::AccessNullFunc)); |
4362 | 0 | Builder.positionAtEnd(OkBB); |
4363 | |
|
4364 | 0 | const auto &FuncType = Context.CompositeTypes[TypeIndex]->getFuncType(); |
4365 | 0 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4366 | 0 | auto RTy = FTy.getReturnType(); |
4367 | |
|
4368 | 0 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4369 | 0 | const size_t RetSize = |
4370 | 0 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4371 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4372 | 0 | ArgsVec[0] = F.Fn.getFirstParam(); |
4373 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
4374 | 0 | const size_t J = ArgSize - I; |
4375 | 0 | ArgsVec[J] = stackPop(); |
4376 | 0 | } |
4377 | |
|
4378 | 0 | std::vector<LLVM::Value> FPtrRetsVec; |
4379 | 0 | FPtrRetsVec.reserve(RetSize); |
4380 | 0 | { |
4381 | 0 | auto FPtr = Builder.createCall( |
4382 | 0 | Context.getIntrinsic( |
4383 | 0 | Builder, Executable::Intrinsics::kRefGetFuncSymbol, |
4384 | 0 | LLVM::Type::getFunctionType(FTy.getPointerTo(), |
4385 | 0 | {Context.Int64x2Ty}, false)), |
4386 | 0 | {Ref}); |
4387 | 0 | Builder.createCondBr( |
4388 | 0 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4389 | 0 | NotNullBB, IsNullBB); |
4390 | 0 | Builder.positionAtEnd(NotNullBB); |
4391 | |
|
4392 | 0 | auto FPtrRet = |
4393 | 0 | Builder.createCall(LLVM::FunctionCallee{FTy, FPtr}, ArgsVec); |
4394 | 0 | if (RetSize == 0) { |
4395 | | // nothing to do |
4396 | 0 | } else if (RetSize == 1) { |
4397 | 0 | FPtrRetsVec.push_back(FPtrRet); |
4398 | 0 | } else { |
4399 | 0 | for (auto Val : unpackStruct(Builder, FPtrRet)) { |
4400 | 0 | FPtrRetsVec.push_back(Val); |
4401 | 0 | } |
4402 | 0 | } |
4403 | 0 | } |
4404 | |
|
4405 | 0 | Builder.createBr(EndBB); |
4406 | 0 | Builder.positionAtEnd(IsNullBB); |
4407 | |
|
4408 | 0 | std::vector<LLVM::Value> RetsVec; |
4409 | 0 | { |
4410 | 0 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4411 | 0 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4412 | 0 | Builder.createArrayPtrStore( |
4413 | 0 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4414 | 0 | kValSize); |
4415 | |
|
4416 | 0 | Builder.createCall( |
4417 | 0 | Context.getIntrinsic( |
4418 | 0 | Builder, Executable::Intrinsics::kCallRef, |
4419 | 0 | LLVM::Type::getFunctionType( |
4420 | 0 | Context.VoidTy, |
4421 | 0 | {Context.Int64x2Ty, Context.Int8PtrTy, Context.Int8PtrTy}, |
4422 | 0 | false)), |
4423 | 0 | {Ref, Args, Rets}); |
4424 | |
|
4425 | 0 | if (RetSize == 0) { |
4426 | | // nothing to do |
4427 | 0 | } else if (RetSize == 1) { |
4428 | 0 | RetsVec.push_back( |
4429 | 0 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4430 | 0 | } else { |
4431 | 0 | RetsVec = Builder.createArrayPtrLoad(RetSize, RTy, Rets, Context.Int8Ty, |
4432 | 0 | kValSize); |
4433 | 0 | } |
4434 | 0 | Builder.createBr(EndBB); |
4435 | 0 | Builder.positionAtEnd(EndBB); |
4436 | 0 | } |
4437 | |
|
4438 | 0 | for (unsigned I = 0; I < RetSize; ++I) { |
4439 | 0 | auto PHIRet = Builder.createPHI(FPtrRetsVec[I].getType()); |
4440 | 0 | PHIRet.addIncoming(FPtrRetsVec[I], NotNullBB); |
4441 | 0 | PHIRet.addIncoming(RetsVec[I], IsNullBB); |
4442 | 0 | stackPush(PHIRet); |
4443 | 0 | } |
4444 | 0 | } |
4445 | | |
4446 | 0 | void compileReturnCallRefOp(const unsigned int TypeIndex) noexcept { |
4447 | 0 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.not_null"); |
4448 | 0 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.is_null"); |
4449 | |
|
4450 | 0 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
4451 | 0 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.ref_not_null"); |
4452 | 0 | auto IsRefNotNull = Builder.createLikely(Builder.createICmpNE( |
4453 | 0 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
4454 | 0 | LLContext.getInt64(0))); |
4455 | 0 | Builder.createCondBr(IsRefNotNull, OkBB, |
4456 | 0 | getTrapBB(ErrCode::Value::AccessNullFunc)); |
4457 | 0 | Builder.positionAtEnd(OkBB); |
4458 | |
|
4459 | 0 | const auto &FuncType = Context.CompositeTypes[TypeIndex]->getFuncType(); |
4460 | 0 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4461 | 0 | auto RTy = FTy.getReturnType(); |
4462 | |
|
4463 | 0 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4464 | 0 | const size_t RetSize = |
4465 | 0 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4466 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4467 | 0 | ArgsVec[0] = F.Fn.getFirstParam(); |
4468 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
4469 | 0 | const size_t J = ArgSize - I; |
4470 | 0 | ArgsVec[J] = stackPop(); |
4471 | 0 | } |
4472 | |
|
4473 | 0 | { |
4474 | 0 | auto FPtr = Builder.createCall( |
4475 | 0 | Context.getIntrinsic( |
4476 | 0 | Builder, Executable::Intrinsics::kRefGetFuncSymbol, |
4477 | 0 | LLVM::Type::getFunctionType(FTy.getPointerTo(), |
4478 | 0 | {Context.Int64x2Ty}, false)), |
4479 | 0 | {Ref}); |
4480 | 0 | Builder.createCondBr( |
4481 | 0 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4482 | 0 | NotNullBB, IsNullBB); |
4483 | 0 | Builder.positionAtEnd(NotNullBB); |
4484 | |
|
4485 | 0 | auto FPtrRet = |
4486 | 0 | Builder.createCall(LLVM::FunctionCallee(FTy, FPtr), ArgsVec); |
4487 | 0 | if (RetSize == 0) { |
4488 | 0 | Builder.createRetVoid(); |
4489 | 0 | } else { |
4490 | 0 | Builder.createRet(FPtrRet); |
4491 | 0 | } |
4492 | 0 | } |
4493 | |
|
4494 | 0 | Builder.positionAtEnd(IsNullBB); |
4495 | |
|
4496 | 0 | { |
4497 | 0 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4498 | 0 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4499 | 0 | Builder.createArrayPtrStore( |
4500 | 0 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4501 | 0 | kValSize); |
4502 | |
|
4503 | 0 | Builder.createCall( |
4504 | 0 | Context.getIntrinsic( |
4505 | 0 | Builder, Executable::Intrinsics::kCallRef, |
4506 | 0 | LLVM::Type::getFunctionType( |
4507 | 0 | Context.VoidTy, |
4508 | 0 | {Context.Int64x2Ty, Context.Int8PtrTy, Context.Int8PtrTy}, |
4509 | 0 | false)), |
4510 | 0 | {Ref, Args, Rets}); |
4511 | |
|
4512 | 0 | if (RetSize == 0) { |
4513 | 0 | Builder.createRetVoid(); |
4514 | 0 | } else if (RetSize == 1) { |
4515 | 0 | Builder.createRet( |
4516 | 0 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4517 | 0 | } else { |
4518 | 0 | Builder.createAggregateRet(Builder.createArrayPtrLoad( |
4519 | 0 | RetSize, RTy, Rets, Context.Int8Ty, kValSize)); |
4520 | 0 | } |
4521 | 0 | } |
4522 | 0 | } |
4523 | | |
4524 | | void compileLoadOp(unsigned MemoryIndex, unsigned Offset, unsigned Alignment, |
4525 | 18.1k | LLVM::Type LoadTy) noexcept { |
4526 | 18.1k | if constexpr (kForceUnalignment) { |
4527 | 18.1k | Alignment = 0; |
4528 | 18.1k | } |
4529 | 18.1k | auto Off = Builder.createZExt(stackPop(), Context.Int64Ty); |
4530 | 18.1k | if (Offset != 0) { |
4531 | 11.1k | Off = Builder.createAdd(Off, LLContext.getInt64(Offset)); |
4532 | 11.1k | } |
4533 | | |
4534 | 18.1k | auto VPtr = Builder.createInBoundsGEP1( |
4535 | 18.1k | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), Off); |
4536 | 18.1k | auto Ptr = Builder.createBitCast(VPtr, LoadTy.getPointerTo()); |
4537 | 18.1k | auto LoadInst = Builder.createLoad(LoadTy, Ptr, true); |
4538 | 18.1k | LoadInst.setAlignment(1 << Alignment); |
4539 | 18.1k | stackPush(LoadInst); |
4540 | 18.1k | } |
4541 | | void compileLoadOp(unsigned MemoryIndex, unsigned Offset, unsigned Alignment, |
4542 | | LLVM::Type LoadTy, LLVM::Type ExtendTy, |
4543 | 7.14k | bool Signed) noexcept { |
4544 | 7.14k | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4545 | 7.14k | if (Signed) { |
4546 | 3.09k | Stack.back() = Builder.createSExt(Stack.back(), ExtendTy); |
4547 | 4.05k | } else { |
4548 | 4.05k | Stack.back() = Builder.createZExt(Stack.back(), ExtendTy); |
4549 | 4.05k | } |
4550 | 7.14k | } |
4551 | | void compileVectorLoadOp(unsigned MemoryIndex, unsigned Offset, |
4552 | 5.21k | unsigned Alignment, LLVM::Type LoadTy) noexcept { |
4553 | 5.21k | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4554 | 5.21k | Stack.back() = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
4555 | 5.21k | } |
4556 | | void compileVectorLoadOp(unsigned MemoryIndex, unsigned Offset, |
4557 | | unsigned Alignment, LLVM::Type LoadTy, |
4558 | 1.55k | LLVM::Type ExtendTy, bool Signed) noexcept { |
4559 | 1.55k | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy, ExtendTy, Signed); |
4560 | 1.55k | Stack.back() = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
4561 | 1.55k | } |
4562 | | void compileSplatLoadOp(unsigned MemoryIndex, unsigned Offset, |
4563 | | unsigned Alignment, LLVM::Type LoadTy, |
4564 | 629 | LLVM::Type VectorTy) noexcept { |
4565 | 629 | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4566 | 629 | compileSplatOp(VectorTy); |
4567 | 629 | } |
4568 | | void compileLoadLaneOp(unsigned MemoryIndex, unsigned Offset, |
4569 | | unsigned Alignment, unsigned Index, LLVM::Type LoadTy, |
4570 | 490 | LLVM::Type VectorTy) noexcept { |
4571 | 490 | auto Vector = stackPop(); |
4572 | 490 | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4573 | 490 | auto Value = Stack.back(); |
4574 | 490 | Stack.back() = Builder.createBitCast( |
4575 | 490 | Builder.createInsertElement(Builder.createBitCast(Vector, VectorTy), |
4576 | 490 | Value, LLContext.getInt64(Index)), |
4577 | 490 | Context.Int64x2Ty); |
4578 | 490 | } |
4579 | | void compileStoreOp(unsigned MemoryIndex, unsigned Offset, unsigned Alignment, |
4580 | | LLVM::Type LoadTy, bool Trunc = false, |
4581 | 3.39k | bool BitCast = false) noexcept { |
4582 | 3.39k | if constexpr (kForceUnalignment) { |
4583 | 3.39k | Alignment = 0; |
4584 | 3.39k | } |
4585 | 3.39k | auto V = stackPop(); |
4586 | 3.39k | auto Off = Builder.createZExt(stackPop(), Context.Int64Ty); |
4587 | 3.39k | if (Offset != 0) { |
4588 | 2.33k | Off = Builder.createAdd(Off, LLContext.getInt64(Offset)); |
4589 | 2.33k | } |
4590 | | |
4591 | 3.39k | if (Trunc) { |
4592 | 765 | V = Builder.createTrunc(V, LoadTy); |
4593 | 765 | } |
4594 | 3.39k | if (BitCast) { |
4595 | 245 | V = Builder.createBitCast(V, LoadTy); |
4596 | 245 | } |
4597 | 3.39k | auto VPtr = Builder.createInBoundsGEP1( |
4598 | 3.39k | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), Off); |
4599 | 3.39k | auto Ptr = Builder.createBitCast(VPtr, LoadTy.getPointerTo()); |
4600 | 3.39k | auto StoreInst = Builder.createStore(V, Ptr, true); |
4601 | 3.39k | StoreInst.setAlignment(1 << Alignment); |
4602 | 3.39k | } |
4603 | | void compileStoreLaneOp(unsigned MemoryIndex, unsigned Offset, |
4604 | | unsigned Alignment, unsigned Index, LLVM::Type LoadTy, |
4605 | 339 | LLVM::Type VectorTy) noexcept { |
4606 | 339 | auto Vector = Stack.back(); |
4607 | 339 | Stack.back() = Builder.createExtractElement( |
4608 | 339 | Builder.createBitCast(Vector, VectorTy), LLContext.getInt64(Index)); |
4609 | 339 | compileStoreOp(MemoryIndex, Offset, Alignment, LoadTy); |
4610 | 339 | } |
4611 | 44.3k | void compileSplatOp(LLVM::Type VectorTy) noexcept { |
4612 | 44.3k | auto Undef = LLVM::Value::getUndef(VectorTy); |
4613 | 44.3k | auto Zeros = LLVM::Value::getConstNull( |
4614 | 44.3k | LLVM::Type::getVectorType(Context.Int32Ty, VectorTy.getVectorSize())); |
4615 | 44.3k | auto Value = Builder.createTrunc(Stack.back(), VectorTy.getElementType()); |
4616 | 44.3k | auto Vector = |
4617 | 44.3k | Builder.createInsertElement(Undef, Value, LLContext.getInt64(0)); |
4618 | 44.3k | Vector = Builder.createShuffleVector(Vector, Undef, Zeros); |
4619 | | |
4620 | 44.3k | Stack.back() = Builder.createBitCast(Vector, Context.Int64x2Ty); |
4621 | 44.3k | } |
4622 | 1.32k | void compileExtractLaneOp(LLVM::Type VectorTy, unsigned Index) noexcept { |
4623 | 1.32k | auto Vector = Builder.createBitCast(Stack.back(), VectorTy); |
4624 | 1.32k | Stack.back() = |
4625 | 1.32k | Builder.createExtractElement(Vector, LLContext.getInt64(Index)); |
4626 | 1.32k | } |
4627 | | void compileExtractLaneOp(LLVM::Type VectorTy, unsigned Index, |
4628 | 980 | LLVM::Type ExtendTy, bool Signed) noexcept { |
4629 | 980 | compileExtractLaneOp(VectorTy, Index); |
4630 | 980 | if (Signed) { |
4631 | 478 | Stack.back() = Builder.createSExt(Stack.back(), ExtendTy); |
4632 | 502 | } else { |
4633 | 502 | Stack.back() = Builder.createZExt(Stack.back(), ExtendTy); |
4634 | 502 | } |
4635 | 980 | } |
4636 | 707 | void compileReplaceLaneOp(LLVM::Type VectorTy, unsigned Index) noexcept { |
4637 | 707 | auto Value = Builder.createTrunc(stackPop(), VectorTy.getElementType()); |
4638 | 707 | auto Vector = Stack.back(); |
4639 | 707 | Stack.back() = Builder.createBitCast( |
4640 | 707 | Builder.createInsertElement(Builder.createBitCast(Vector, VectorTy), |
4641 | 707 | Value, LLContext.getInt64(Index)), |
4642 | 707 | Context.Int64x2Ty); |
4643 | 707 | } |
4644 | | void compileVectorCompareOp(LLVM::Type VectorTy, |
4645 | 5.36k | LLVMIntPredicate Predicate) noexcept { |
4646 | 5.36k | auto RHS = stackPop(); |
4647 | 5.36k | auto LHS = stackPop(); |
4648 | 5.36k | auto Result = Builder.createSExt( |
4649 | 5.36k | Builder.createICmp(Predicate, Builder.createBitCast(LHS, VectorTy), |
4650 | 5.36k | Builder.createBitCast(RHS, VectorTy)), |
4651 | 5.36k | VectorTy); |
4652 | 5.36k | stackPush(Builder.createBitCast(Result, Context.Int64x2Ty)); |
4653 | 5.36k | } |
4654 | | void compileVectorCompareOp(LLVM::Type VectorTy, LLVMRealPredicate Predicate, |
4655 | 3.48k | LLVM::Type ResultTy) noexcept { |
4656 | 3.48k | auto RHS = stackPop(); |
4657 | 3.48k | auto LHS = stackPop(); |
4658 | 3.48k | auto Result = Builder.createSExt( |
4659 | 3.48k | Builder.createFCmp(Predicate, Builder.createBitCast(LHS, VectorTy), |
4660 | 3.48k | Builder.createBitCast(RHS, VectorTy)), |
4661 | 3.48k | ResultTy); |
4662 | 3.48k | stackPush(Builder.createBitCast(Result, Context.Int64x2Ty)); |
4663 | 3.48k | } |
4664 | | template <typename Func> |
4665 | 26.8k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4666 | 26.8k | auto V = Builder.createBitCast(Stack.back(), VectorTy); |
4667 | 26.8k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); |
4668 | 26.8k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 2.21k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 2.21k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 2.21k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 2.21k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 2.86k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 2.86k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 2.86k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 2.86k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorPopcnt()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorPopcnt()::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 121 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 121 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 121 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 121 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorExtAddPairwise(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorExtAddPairwise(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 2.04k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 2.04k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 2.04k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 2.04k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 506 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 506 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 506 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 506 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 945 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 945 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 945 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 945 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFSqrt(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFSqrt(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 315 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 315 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 315 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 315 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFCeil(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFCeil(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 1.66k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 1.66k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 1.66k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 1.66k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFFloor(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFFloor(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 2.69k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 2.69k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 2.69k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 2.69k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFTrunc(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFTrunc(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 2.06k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 2.06k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 2.06k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 2.06k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFNearest(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFNearest(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 431 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 431 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 431 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 431 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorTruncSatS32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorTruncSatS32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 960 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 960 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 960 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 960 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorTruncSatU32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorTruncSatU32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 5.87k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 5.87k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 5.87k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 5.87k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorConvertS(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorConvertS(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 670 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 670 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 670 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 670 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorConvertU(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorConvertU(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 1.98k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 1.98k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 1.98k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 1.98k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorDemote()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorDemote()::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 734 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 734 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 734 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 734 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorPromote()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorPromote()::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 730 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 730 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 730 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 730 | } |
|
4669 | 2.21k | void compileVectorAbs(LLVM::Type VectorTy) noexcept { |
4670 | 2.21k | compileVectorOp(VectorTy, [this, VectorTy](auto V) noexcept { |
4671 | 2.21k | auto Zero = LLVM::Value::getConstNull(VectorTy); |
4672 | 2.21k | auto C = Builder.createICmpSLT(V, Zero); |
4673 | 2.21k | return Builder.createSelect(C, Builder.createNeg(V), V); |
4674 | 2.21k | }); |
4675 | 2.21k | } |
4676 | 2.86k | void compileVectorNeg(LLVM::Type VectorTy) noexcept { |
4677 | 2.86k | compileVectorOp(VectorTy, |
4678 | 2.86k | [this](auto V) noexcept { return Builder.createNeg(V); }); |
4679 | 2.86k | } |
4680 | 121 | void compileVectorPopcnt() noexcept { |
4681 | 121 | compileVectorOp(Context.Int8x16Ty, [this](auto V) noexcept { |
4682 | 121 | assuming(LLVM::Core::Ctpop != LLVM::Core::NotIntrinsic); |
4683 | 121 | return Builder.createUnaryIntrinsic(LLVM::Core::Ctpop, V); |
4684 | 121 | }); |
4685 | 121 | } |
4686 | | template <typename Func> |
4687 | 2.03k | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4688 | 2.03k | auto V = Builder.createBitCast(Stack.back(), VectorTy); |
4689 | 2.03k | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); |
4690 | 2.03k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorReduceIOp<(anonymous namespace)::FunctionCompiler::compileVectorAnyTrue()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAnyTrue()::{lambda(auto:1)#1}&&) Line | Count | Source | 4687 | 112 | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4688 | 112 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4689 | 112 | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); | 4690 | 112 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorReduceIOp<(anonymous namespace)::FunctionCompiler::compileVectorAllTrue(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAllTrue(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4687 | 903 | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4688 | 903 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4689 | 903 | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); | 4690 | 903 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorReduceIOp<(anonymous namespace)::FunctionCompiler::compileVectorBitMask(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorBitMask(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4687 | 1.01k | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4688 | 1.01k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4689 | 1.01k | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); | 4690 | 1.01k | } |
|
4691 | 112 | void compileVectorAnyTrue() noexcept { |
4692 | 112 | compileVectorReduceIOp(Context.Int128x1Ty, [this](auto V) noexcept { |
4693 | 112 | auto Zero = LLVM::Value::getConstNull(Context.Int128x1Ty); |
4694 | 112 | return Builder.createBitCast(Builder.createICmpNE(V, Zero), |
4695 | 112 | LLContext.getInt1Ty()); |
4696 | 112 | }); |
4697 | 112 | } |
4698 | 903 | void compileVectorAllTrue(LLVM::Type VectorTy) noexcept { |
4699 | 903 | compileVectorReduceIOp(VectorTy, [this, VectorTy](auto V) noexcept { |
4700 | 903 | const auto Size = VectorTy.getVectorSize(); |
4701 | 903 | auto IntType = LLContext.getIntNTy(Size); |
4702 | 903 | auto Zero = LLVM::Value::getConstNull(VectorTy); |
4703 | 903 | auto Cmp = Builder.createBitCast(Builder.createICmpEQ(V, Zero), IntType); |
4704 | 903 | auto CmpZero = LLVM::Value::getConstInt(IntType, 0); |
4705 | 903 | return Builder.createICmpEQ(Cmp, CmpZero); |
4706 | 903 | }); |
4707 | 903 | } |
4708 | 1.01k | void compileVectorBitMask(LLVM::Type VectorTy) noexcept { |
4709 | 1.01k | compileVectorReduceIOp(VectorTy, [this, VectorTy](auto V) noexcept { |
4710 | 1.01k | const auto Size = VectorTy.getVectorSize(); |
4711 | 1.01k | auto IntType = LLContext.getIntNTy(Size); |
4712 | 1.01k | auto Zero = LLVM::Value::getConstNull(VectorTy); |
4713 | 1.01k | return Builder.createBitCast(Builder.createICmpSLT(V, Zero), IntType); |
4714 | 1.01k | }); |
4715 | 1.01k | } |
4716 | | template <typename Func> |
4717 | 3.46k | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4718 | 3.46k | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; |
4719 | 3.46k | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; |
4720 | 3.46k | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); |
4721 | 3.46k | auto RHS = Builder.createVectorSplat( |
4722 | 3.46k | VectorTy.getVectorSize(), |
4723 | 3.46k | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) |
4724 | 3.46k | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); |
4725 | 3.46k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
4726 | 3.46k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); |
4727 | 3.46k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorShiftOp<(anonymous namespace)::FunctionCompiler::compileVectorShl(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorShl(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4717 | 1.33k | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4718 | 1.33k | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; | 4719 | 1.33k | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; | 4720 | 1.33k | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); | 4721 | 1.33k | auto RHS = Builder.createVectorSplat( | 4722 | 1.33k | VectorTy.getVectorSize(), | 4723 | 1.33k | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) | 4724 | 1.33k | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); | 4725 | 1.33k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4726 | 1.33k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4727 | 1.33k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorShiftOp<(anonymous namespace)::FunctionCompiler::compileVectorAShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4717 | 1.76k | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4718 | 1.76k | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; | 4719 | 1.76k | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; | 4720 | 1.76k | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); | 4721 | 1.76k | auto RHS = Builder.createVectorSplat( | 4722 | 1.76k | VectorTy.getVectorSize(), | 4723 | 1.76k | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) | 4724 | 1.76k | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); | 4725 | 1.76k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4726 | 1.76k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4727 | 1.76k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorShiftOp<(anonymous namespace)::FunctionCompiler::compileVectorLShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorLShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4717 | 364 | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4718 | 364 | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; | 4719 | 364 | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; | 4720 | 364 | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); | 4721 | 364 | auto RHS = Builder.createVectorSplat( | 4722 | 364 | VectorTy.getVectorSize(), | 4723 | 364 | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) | 4724 | 364 | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); | 4725 | 364 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4726 | 364 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4727 | 364 | } |
|
4728 | 1.33k | void compileVectorShl(LLVM::Type VectorTy) noexcept { |
4729 | 1.33k | compileVectorShiftOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4730 | 1.33k | return Builder.createShl(LHS, RHS); |
4731 | 1.33k | }); |
4732 | 1.33k | } |
4733 | 364 | void compileVectorLShr(LLVM::Type VectorTy) noexcept { |
4734 | 364 | compileVectorShiftOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4735 | 364 | return Builder.createLShr(LHS, RHS); |
4736 | 364 | }); |
4737 | 364 | } |
4738 | 1.76k | void compileVectorAShr(LLVM::Type VectorTy) noexcept { |
4739 | 1.76k | compileVectorShiftOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4740 | 1.76k | return Builder.createAShr(LHS, RHS); |
4741 | 1.76k | }); |
4742 | 1.76k | } |
4743 | | template <typename Func> |
4744 | 7.30k | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4745 | 7.30k | auto RHS = Builder.createBitCast(stackPop(), VectorTy); |
4746 | 7.30k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
4747 | 7.30k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); |
4748 | 7.30k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 325 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 325 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 325 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 325 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 325 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorAddSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorAddSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 973 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 973 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 973 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 973 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 973 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 782 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 782 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 782 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 782 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 782 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSubSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSubSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 348 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 348 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 348 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 348 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 348 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 326 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 326 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 326 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 326 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 326 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorUMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorUMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 298 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 298 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 298 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 298 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 298 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 509 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 509 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 509 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 509 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 509 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorUMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorUMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 768 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 768 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 768 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 768 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 768 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorUAvgr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorUAvgr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 239 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 239 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 239 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 239 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 239 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 432 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 432 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 432 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 432 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 432 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorQ15MulSat()::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorQ15MulSat()::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 133 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 133 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 133 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 133 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 133 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 177 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 177 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 177 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 177 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 177 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 471 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 471 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 471 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 471 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 471 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 201 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 201 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 201 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 201 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 201 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFDiv(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFDiv(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 230 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 230 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 230 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 230 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 230 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 303 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 303 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 303 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 303 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 303 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 212 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 212 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 212 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 212 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 212 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFPMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFPMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 294 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 294 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 294 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 294 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 294 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFPMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFPMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 280 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 280 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 280 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 280 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 280 | } |
|
4749 | 325 | void compileVectorVectorAdd(LLVM::Type VectorTy) noexcept { |
4750 | 325 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4751 | 325 | return Builder.createAdd(LHS, RHS); |
4752 | 325 | }); |
4753 | 325 | } |
4754 | 973 | void compileVectorVectorAddSat(LLVM::Type VectorTy, bool Signed) noexcept { |
4755 | 973 | auto ID = Signed ? LLVM::Core::SAddSat : LLVM::Core::UAddSat; |
4756 | 973 | assuming(ID != LLVM::Core::NotIntrinsic); |
4757 | 973 | compileVectorVectorOp( |
4758 | 973 | VectorTy, [this, VectorTy, ID](auto LHS, auto RHS) noexcept { |
4759 | 973 | return Builder.createIntrinsic(ID, {VectorTy}, {LHS, RHS}); |
4760 | 973 | }); |
4761 | 973 | } |
4762 | 782 | void compileVectorVectorSub(LLVM::Type VectorTy) noexcept { |
4763 | 782 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4764 | 782 | return Builder.createSub(LHS, RHS); |
4765 | 782 | }); |
4766 | 782 | } |
4767 | 348 | void compileVectorVectorSubSat(LLVM::Type VectorTy, bool Signed) noexcept { |
4768 | 348 | auto ID = Signed ? LLVM::Core::SSubSat : LLVM::Core::USubSat; |
4769 | 348 | assuming(ID != LLVM::Core::NotIntrinsic); |
4770 | 348 | compileVectorVectorOp( |
4771 | 348 | VectorTy, [this, VectorTy, ID](auto LHS, auto RHS) noexcept { |
4772 | 348 | return Builder.createIntrinsic(ID, {VectorTy}, {LHS, RHS}); |
4773 | 348 | }); |
4774 | 348 | } |
4775 | 432 | void compileVectorVectorMul(LLVM::Type VectorTy) noexcept { |
4776 | 432 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4777 | 432 | return Builder.createMul(LHS, RHS); |
4778 | 432 | }); |
4779 | 432 | } |
4780 | 81 | void compileVectorSwizzle() noexcept { |
4781 | 81 | auto Index = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
4782 | 81 | auto Vector = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
4783 | | |
4784 | 81 | #if defined(__x86_64__) |
4785 | 81 | if (Context.SupportSSSE3) { |
4786 | 81 | auto Magic = Builder.createVectorSplat(16, LLContext.getInt8(112)); |
4787 | 81 | auto Added = Builder.createAdd(Index, Magic); |
4788 | 81 | auto NewIndex = Builder.createSelect( |
4789 | 81 | Builder.createICmpUGT(Index, Added), |
4790 | 81 | LLVM::Value::getConstAllOnes(Context.Int8x16Ty), Added); |
4791 | 81 | assuming(LLVM::Core::X86SSSE3PShufB128 != LLVM::Core::NotIntrinsic); |
4792 | 81 | stackPush(Builder.createBitCast( |
4793 | 81 | Builder.createIntrinsic(LLVM::Core::X86SSSE3PShufB128, {}, |
4794 | 81 | {Vector, NewIndex}), |
4795 | 81 | Context.Int64x2Ty)); |
4796 | 81 | return; |
4797 | 81 | } |
4798 | 0 | #endif |
4799 | | |
4800 | | #if defined(__aarch64__) |
4801 | | if (Context.SupportNEON) { |
4802 | | assuming(LLVM::Core::AArch64NeonTbl1 != LLVM::Core::NotIntrinsic); |
4803 | | stackPush(Builder.createBitCast( |
4804 | | Builder.createIntrinsic(LLVM::Core::AArch64NeonTbl1, |
4805 | | {Context.Int8x16Ty}, {Vector, Index}), |
4806 | | Context.Int64x2Ty)); |
4807 | | return; |
4808 | | } |
4809 | | #endif |
4810 | | |
4811 | | // Fallback case. |
4812 | | // If the SSSE3 is not supported on the x86_64 platform or |
4813 | | // the NEON is not supported on the aarch64 platform, |
4814 | | // then fallback to this. |
4815 | 0 | auto Mask = Builder.createVectorSplat(16, LLContext.getInt8(15)); |
4816 | 0 | auto Zero = Builder.createVectorSplat(16, LLContext.getInt8(0)); |
4817 | 0 | auto IsOver = Builder.createICmpUGT(Index, Mask); |
4818 | 0 | auto InboundIndex = Builder.createAnd(Index, Mask); |
4819 | 0 | auto Array = Builder.createArray(16, 1); |
4820 | 0 | for (size_t I = 0; I < 16; ++I) { |
4821 | 0 | Builder.createStore( |
4822 | 0 | Builder.createExtractElement(Vector, LLContext.getInt64(I)), |
4823 | 0 | Builder.createInBoundsGEP1(Context.Int8Ty, Array, |
4824 | 0 | LLContext.getInt64(I))); |
4825 | 0 | } |
4826 | 0 | LLVM::Value Ret = LLVM::Value::getUndef(Context.Int8x16Ty); |
4827 | 0 | for (size_t I = 0; I < 16; ++I) { |
4828 | 0 | auto Idx = |
4829 | 0 | Builder.createExtractElement(InboundIndex, LLContext.getInt64(I)); |
4830 | 0 | auto Value = Builder.createLoad( |
4831 | 0 | Context.Int8Ty, |
4832 | 0 | Builder.createInBoundsGEP1(Context.Int8Ty, Array, Idx)); |
4833 | 0 | Ret = Builder.createInsertElement(Ret, Value, LLContext.getInt64(I)); |
4834 | 0 | } |
4835 | 0 | Ret = Builder.createSelect(IsOver, Zero, Ret); |
4836 | 0 | stackPush(Builder.createBitCast(Ret, Context.Int64x2Ty)); |
4837 | 0 | } |
4838 | | |
4839 | 133 | void compileVectorVectorQ15MulSat() noexcept { |
4840 | 133 | compileVectorVectorOp( |
4841 | 133 | Context.Int16x8Ty, [this](auto LHS, auto RHS) noexcept -> LLVM::Value { |
4842 | 133 | #if defined(__x86_64__) |
4843 | 133 | if (Context.SupportSSSE3) { |
4844 | 133 | assuming(LLVM::Core::X86SSSE3PMulHrSw128 != |
4845 | 133 | LLVM::Core::NotIntrinsic); |
4846 | 133 | auto Result = Builder.createIntrinsic( |
4847 | 133 | LLVM::Core::X86SSSE3PMulHrSw128, {}, {LHS, RHS}); |
4848 | 133 | auto IntMaxV = Builder.createVectorSplat( |
4849 | 133 | 8, LLContext.getInt16(UINT16_C(0x8000))); |
4850 | 133 | auto NotOver = Builder.createSExt( |
4851 | 133 | Builder.createICmpEQ(Result, IntMaxV), Context.Int16x8Ty); |
4852 | 133 | return Builder.createXor(Result, NotOver); |
4853 | 133 | } |
4854 | 0 | #endif |
4855 | | |
4856 | | #if defined(__aarch64__) |
4857 | | if (Context.SupportNEON) { |
4858 | | assuming(LLVM::Core::AArch64NeonSQRDMulH != |
4859 | | LLVM::Core::NotIntrinsic); |
4860 | | return Builder.createBinaryIntrinsic( |
4861 | | LLVM::Core::AArch64NeonSQRDMulH, LHS, RHS); |
4862 | | } |
4863 | | #endif |
4864 | | |
4865 | | // Fallback case. |
4866 | | // If the SSSE3 is not supported on the x86_64 platform or |
4867 | | // the NEON is not supported on the aarch64 platform, |
4868 | | // then fallback to this. |
4869 | 0 | auto ExtTy = Context.Int16x8Ty.getExtendedElementVectorType(); |
4870 | 0 | auto Offset = Builder.createVectorSplat( |
4871 | 0 | 8, LLContext.getInt32(UINT32_C(0x4000))); |
4872 | 0 | auto Shift = |
4873 | 0 | Builder.createVectorSplat(8, LLContext.getInt32(UINT32_C(15))); |
4874 | 0 | auto ExtLHS = Builder.createSExt(LHS, ExtTy); |
4875 | 0 | auto ExtRHS = Builder.createSExt(RHS, ExtTy); |
4876 | 0 | auto Result = Builder.createTrunc( |
4877 | 0 | Builder.createAShr( |
4878 | 0 | Builder.createAdd(Builder.createMul(ExtLHS, ExtRHS), Offset), |
4879 | 0 | Shift), |
4880 | 0 | Context.Int16x8Ty); |
4881 | 0 | auto IntMaxV = Builder.createVectorSplat( |
4882 | 0 | 8, LLContext.getInt16(UINT16_C(0x8000))); |
4883 | 0 | auto NotOver = Builder.createSExt( |
4884 | 0 | Builder.createICmpEQ(Result, IntMaxV), Context.Int16x8Ty); |
4885 | 0 | return Builder.createXor(Result, NotOver); |
4886 | 133 | }); |
4887 | 133 | } |
4888 | 326 | void compileVectorVectorSMin(LLVM::Type VectorTy) noexcept { |
4889 | 326 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4890 | 326 | auto C = Builder.createICmpSLE(LHS, RHS); |
4891 | 326 | return Builder.createSelect(C, LHS, RHS); |
4892 | 326 | }); |
4893 | 326 | } |
4894 | 298 | void compileVectorVectorUMin(LLVM::Type VectorTy) noexcept { |
4895 | 298 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4896 | 298 | auto C = Builder.createICmpULE(LHS, RHS); |
4897 | 298 | return Builder.createSelect(C, LHS, RHS); |
4898 | 298 | }); |
4899 | 298 | } |
4900 | 509 | void compileVectorVectorSMax(LLVM::Type VectorTy) noexcept { |
4901 | 509 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4902 | 509 | auto C = Builder.createICmpSGE(LHS, RHS); |
4903 | 509 | return Builder.createSelect(C, LHS, RHS); |
4904 | 509 | }); |
4905 | 509 | } |
4906 | 768 | void compileVectorVectorUMax(LLVM::Type VectorTy) noexcept { |
4907 | 768 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4908 | 768 | auto C = Builder.createICmpUGE(LHS, RHS); |
4909 | 768 | return Builder.createSelect(C, LHS, RHS); |
4910 | 768 | }); |
4911 | 768 | } |
4912 | 239 | void compileVectorVectorUAvgr(LLVM::Type VectorTy) noexcept { |
4913 | 239 | auto ExtendTy = VectorTy.getExtendedElementVectorType(); |
4914 | 239 | compileVectorVectorOp( |
4915 | 239 | VectorTy, |
4916 | 239 | [this, VectorTy, ExtendTy](auto LHS, auto RHS) noexcept -> LLVM::Value { |
4917 | 239 | #if defined(__x86_64__) |
4918 | 239 | if (Context.SupportSSE2) { |
4919 | 239 | const auto ID = [VectorTy]() noexcept { |
4920 | 239 | switch (VectorTy.getElementType().getIntegerBitWidth()) { |
4921 | 130 | case 8: |
4922 | 130 | return LLVM::Core::X86SSE2PAvgB; |
4923 | 109 | case 16: |
4924 | 109 | return LLVM::Core::X86SSE2PAvgW; |
4925 | 0 | default: |
4926 | 0 | assumingUnreachable(); |
4927 | 239 | } |
4928 | 239 | }(); |
4929 | 239 | assuming(ID != LLVM::Core::NotIntrinsic); |
4930 | 239 | return Builder.createIntrinsic(ID, {}, {LHS, RHS}); |
4931 | 239 | } |
4932 | 0 | #endif |
4933 | | |
4934 | | #if defined(__aarch64__) |
4935 | | if (Context.SupportNEON) { |
4936 | | assuming(LLVM::Core::AArch64NeonURHAdd != LLVM::Core::NotIntrinsic); |
4937 | | return Builder.createBinaryIntrinsic(LLVM::Core::AArch64NeonURHAdd, |
4938 | | LHS, RHS); |
4939 | | } |
4940 | | #endif |
4941 | | |
4942 | | // Fallback case. |
4943 | | // If the SSE2 is not supported on the x86_64 platform or |
4944 | | // the NEON is not supported on the aarch64 platform, |
4945 | | // then fallback to this. |
4946 | 0 | auto EL = Builder.createZExt(LHS, ExtendTy); |
4947 | 0 | auto ER = Builder.createZExt(RHS, ExtendTy); |
4948 | 0 | auto One = Builder.createZExt( |
4949 | 0 | Builder.createVectorSplat(ExtendTy.getVectorSize(), |
4950 | 0 | LLContext.getTrue()), |
4951 | 0 | ExtendTy); |
4952 | 0 | return Builder.createTrunc( |
4953 | 0 | Builder.createLShr( |
4954 | 0 | Builder.createAdd(Builder.createAdd(EL, ER), One), One), |
4955 | 0 | VectorTy); |
4956 | 239 | }); |
4957 | 239 | } |
4958 | 678 | void compileVectorNarrow(LLVM::Type FromTy, bool Signed) noexcept { |
4959 | 678 | auto [MinInt, |
4960 | 678 | MaxInt] = [&]() noexcept -> std::tuple<LLVM::Value, LLVM::Value> { |
4961 | 678 | switch (FromTy.getElementType().getIntegerBitWidth()) { |
4962 | 264 | case 16: { |
4963 | 264 | const auto Min = |
4964 | 264 | static_cast<int16_t>(Signed ? std::numeric_limits<int8_t>::min() |
4965 | 264 | : std::numeric_limits<uint8_t>::min()); |
4966 | 264 | const auto Max = |
4967 | 264 | static_cast<int16_t>(Signed ? std::numeric_limits<int8_t>::max() |
4968 | 264 | : std::numeric_limits<uint8_t>::max()); |
4969 | 264 | return {LLContext.getInt16(static_cast<uint16_t>(Min)), |
4970 | 264 | LLContext.getInt16(static_cast<uint16_t>(Max))}; |
4971 | 0 | } |
4972 | 414 | case 32: { |
4973 | 414 | const auto Min = |
4974 | 414 | static_cast<int32_t>(Signed ? std::numeric_limits<int16_t>::min() |
4975 | 414 | : std::numeric_limits<uint16_t>::min()); |
4976 | 414 | const auto Max = |
4977 | 414 | static_cast<int32_t>(Signed ? std::numeric_limits<int16_t>::max() |
4978 | 414 | : std::numeric_limits<uint16_t>::max()); |
4979 | 414 | return {LLContext.getInt32(static_cast<uint32_t>(Min)), |
4980 | 414 | LLContext.getInt32(static_cast<uint32_t>(Max))}; |
4981 | 0 | } |
4982 | 0 | default: |
4983 | 0 | assumingUnreachable(); |
4984 | 678 | } |
4985 | 678 | }(); |
4986 | 678 | const auto Count = FromTy.getVectorSize(); |
4987 | 678 | auto VMin = Builder.createVectorSplat(Count, MinInt); |
4988 | 678 | auto VMax = Builder.createVectorSplat(Count, MaxInt); |
4989 | | |
4990 | 678 | auto TruncTy = FromTy.getTruncatedElementVectorType(); |
4991 | | |
4992 | 678 | auto F2 = Builder.createBitCast(stackPop(), FromTy); |
4993 | 678 | F2 = Builder.createSelect(Builder.createICmpSLT(F2, VMin), VMin, F2); |
4994 | 678 | F2 = Builder.createSelect(Builder.createICmpSGT(F2, VMax), VMax, F2); |
4995 | 678 | F2 = Builder.createTrunc(F2, TruncTy); |
4996 | | |
4997 | 678 | auto F1 = Builder.createBitCast(stackPop(), FromTy); |
4998 | 678 | F1 = Builder.createSelect(Builder.createICmpSLT(F1, VMin), VMin, F1); |
4999 | 678 | F1 = Builder.createSelect(Builder.createICmpSGT(F1, VMax), VMax, F1); |
5000 | 678 | F1 = Builder.createTrunc(F1, TruncTy); |
5001 | | |
5002 | 678 | std::vector<uint32_t> Mask(Count * 2); |
5003 | 678 | std::iota(Mask.begin(), Mask.end(), 0); |
5004 | 678 | stackPush(Builder.createBitCast( |
5005 | 678 | Builder.createShuffleVector( |
5006 | 678 | F1, F2, LLVM::Value::getConstVector32(LLContext, Mask)), |
5007 | 678 | Context.Int64x2Ty)); |
5008 | 678 | } |
5009 | 5.24k | void compileVectorExtend(LLVM::Type FromTy, bool Signed, bool Low) noexcept { |
5010 | 5.24k | auto ExtTy = FromTy.getExtendedElementVectorType(); |
5011 | 5.24k | const auto Count = FromTy.getVectorSize(); |
5012 | 5.24k | std::vector<uint32_t> Mask(Count / 2); |
5013 | 5.24k | std::iota(Mask.begin(), Mask.end(), Low ? 0 : Count / 2); |
5014 | 5.24k | auto R = Builder.createBitCast(Stack.back(), FromTy); |
5015 | 5.24k | if (Signed) { |
5016 | 2.13k | R = Builder.createSExt(R, ExtTy); |
5017 | 3.11k | } else { |
5018 | 3.11k | R = Builder.createZExt(R, ExtTy); |
5019 | 3.11k | } |
5020 | 5.24k | R = Builder.createShuffleVector( |
5021 | 5.24k | R, LLVM::Value::getUndef(ExtTy), |
5022 | 5.24k | LLVM::Value::getConstVector32(LLContext, Mask)); |
5023 | 5.24k | Stack.back() = Builder.createBitCast(R, Context.Int64x2Ty); |
5024 | 5.24k | } |
5025 | 1.76k | void compileVectorExtMul(LLVM::Type FromTy, bool Signed, bool Low) noexcept { |
5026 | 1.76k | auto ExtTy = FromTy.getExtendedElementVectorType(); |
5027 | 1.76k | const auto Count = FromTy.getVectorSize(); |
5028 | 1.76k | std::vector<uint32_t> Mask(Count / 2); |
5029 | 1.76k | std::iota(Mask.begin(), Mask.end(), Low ? 0 : Count / 2); |
5030 | 3.52k | auto Extend = [this, FromTy, Signed, ExtTy, &Mask](LLVM::Value R) noexcept { |
5031 | 3.52k | R = Builder.createBitCast(R, FromTy); |
5032 | 3.52k | if (Signed) { |
5033 | 1.52k | R = Builder.createSExt(R, ExtTy); |
5034 | 1.99k | } else { |
5035 | 1.99k | R = Builder.createZExt(R, ExtTy); |
5036 | 1.99k | } |
5037 | 3.52k | return Builder.createShuffleVector( |
5038 | 3.52k | R, LLVM::Value::getUndef(ExtTy), |
5039 | 3.52k | LLVM::Value::getConstVector32(LLContext, Mask)); |
5040 | 3.52k | }; |
5041 | 1.76k | auto RHS = Extend(stackPop()); |
5042 | 1.76k | auto LHS = Extend(stackPop()); |
5043 | 1.76k | stackPush( |
5044 | 1.76k | Builder.createBitCast(Builder.createMul(RHS, LHS), Context.Int64x2Ty)); |
5045 | 1.76k | } |
5046 | 2.04k | void compileVectorExtAddPairwise(LLVM::Type VectorTy, bool Signed) noexcept { |
5047 | 2.04k | compileVectorOp( |
5048 | 2.04k | VectorTy, [this, VectorTy, Signed](auto V) noexcept -> LLVM::Value { |
5049 | 2.04k | auto ExtTy = VectorTy.getExtendedElementVectorType() |
5050 | 2.04k | .getHalfElementsVectorType(); |
5051 | 2.04k | #if defined(__x86_64__) |
5052 | 2.04k | const auto Count = VectorTy.getVectorSize(); |
5053 | 2.04k | if (Context.SupportXOP) { |
5054 | 0 | const auto ID = [Count, Signed]() noexcept { |
5055 | 0 | switch (Count) { |
5056 | 0 | case 8: |
5057 | 0 | return Signed ? LLVM::Core::X86XOpVPHAddWD |
5058 | 0 | : LLVM::Core::X86XOpVPHAddUWD; |
5059 | 0 | case 16: |
5060 | 0 | return Signed ? LLVM::Core::X86XOpVPHAddBW |
5061 | 0 | : LLVM::Core::X86XOpVPHAddUBW; |
5062 | 0 | default: |
5063 | 0 | assumingUnreachable(); |
5064 | 0 | } |
5065 | 0 | }(); |
5066 | 0 | assuming(ID != LLVM::Core::NotIntrinsic); |
5067 | 0 | return Builder.createUnaryIntrinsic(ID, V); |
5068 | 0 | } |
5069 | 2.04k | if (Context.SupportSSSE3 && Count == 16) { |
5070 | 710 | assuming(LLVM::Core::X86SSSE3PMAddUbSw128 != |
5071 | 710 | LLVM::Core::NotIntrinsic); |
5072 | 710 | if (Signed) { |
5073 | 374 | return Builder.createIntrinsic( |
5074 | 374 | LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5075 | 374 | {Builder.createVectorSplat(16, LLContext.getInt8(1)), V}); |
5076 | 374 | } else { |
5077 | 336 | return Builder.createIntrinsic( |
5078 | 336 | LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5079 | 336 | {V, Builder.createVectorSplat(16, LLContext.getInt8(1))}); |
5080 | 336 | } |
5081 | 710 | } |
5082 | 1.33k | if (Context.SupportSSE2 && Count == 8) { |
5083 | 1.33k | assuming(LLVM::Core::X86SSE2PMAddWd != LLVM::Core::NotIntrinsic); |
5084 | 1.33k | if (Signed) { |
5085 | 819 | return Builder.createIntrinsic( |
5086 | 819 | LLVM::Core::X86SSE2PMAddWd, {}, |
5087 | 819 | {V, Builder.createVectorSplat(8, LLContext.getInt16(1))}); |
5088 | 819 | } else { |
5089 | 519 | V = Builder.createXor( |
5090 | 519 | V, Builder.createVectorSplat(8, LLContext.getInt16(0x8000))); |
5091 | 519 | V = Builder.createIntrinsic( |
5092 | 519 | LLVM::Core::X86SSE2PMAddWd, {}, |
5093 | 519 | {V, Builder.createVectorSplat(8, LLContext.getInt16(1))}); |
5094 | 519 | return Builder.createAdd( |
5095 | 519 | V, Builder.createVectorSplat(4, LLContext.getInt32(0x10000))); |
5096 | 519 | } |
5097 | 1.33k | } |
5098 | 0 | #endif |
5099 | | |
5100 | | #if defined(__aarch64__) |
5101 | | if (Context.SupportNEON) { |
5102 | | const auto ID = Signed ? LLVM::Core::AArch64NeonSAddLP |
5103 | | : LLVM::Core::AArch64NeonUAddLP; |
5104 | | assuming(ID != LLVM::Core::NotIntrinsic); |
5105 | | return Builder.createIntrinsic(ID, {ExtTy, VectorTy}, {V}); |
5106 | | } |
5107 | | #endif |
5108 | | |
5109 | | // Fallback case. |
5110 | | // If the XOP, SSSE3, or SSE2 is not supported on the x86_64 platform |
5111 | | // or the NEON is not supported on the aarch64 platform, |
5112 | | // then fallback to this. |
5113 | 0 | auto Width = LLVM::Value::getConstInt( |
5114 | 0 | ExtTy.getElementType(), |
5115 | 0 | VectorTy.getElementType().getIntegerBitWidth()); |
5116 | 0 | Width = Builder.createVectorSplat(ExtTy.getVectorSize(), Width); |
5117 | 0 | auto EV = Builder.createBitCast(V, ExtTy); |
5118 | 0 | LLVM::Value L, R; |
5119 | 0 | if (Signed) { |
5120 | 0 | L = Builder.createAShr(EV, Width); |
5121 | 0 | R = Builder.createAShr(Builder.createShl(EV, Width), Width); |
5122 | 0 | } else { |
5123 | 0 | L = Builder.createLShr(EV, Width); |
5124 | 0 | R = Builder.createLShr(Builder.createShl(EV, Width), Width); |
5125 | 0 | } |
5126 | 0 | return Builder.createAdd(L, R); |
5127 | 1.33k | }); |
5128 | 2.04k | } |
5129 | 506 | void compileVectorFAbs(LLVM::Type VectorTy) noexcept { |
5130 | 506 | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5131 | 506 | assuming(LLVM::Core::Fabs != LLVM::Core::NotIntrinsic); |
5132 | 506 | return Builder.createUnaryIntrinsic(LLVM::Core::Fabs, V); |
5133 | 506 | }); |
5134 | 506 | } |
5135 | 945 | void compileVectorFNeg(LLVM::Type VectorTy) noexcept { |
5136 | 945 | compileVectorOp(VectorTy, |
5137 | 945 | [this](auto V) noexcept { return Builder.createFNeg(V); }); |
5138 | 945 | } |
5139 | 315 | void compileVectorFSqrt(LLVM::Type VectorTy) noexcept { |
5140 | 315 | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5141 | 315 | assuming(LLVM::Core::Sqrt != LLVM::Core::NotIntrinsic); |
5142 | 315 | return Builder.createUnaryIntrinsic(LLVM::Core::Sqrt, V); |
5143 | 315 | }); |
5144 | 315 | } |
5145 | 1.66k | void compileVectorFCeil(LLVM::Type VectorTy) noexcept { |
5146 | 1.66k | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5147 | 1.66k | assuming(LLVM::Core::Ceil != LLVM::Core::NotIntrinsic); |
5148 | 1.66k | return Builder.createUnaryIntrinsic(LLVM::Core::Ceil, V); |
5149 | 1.66k | }); |
5150 | 1.66k | } |
5151 | 2.69k | void compileVectorFFloor(LLVM::Type VectorTy) noexcept { |
5152 | 2.69k | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5153 | 2.69k | assuming(LLVM::Core::Floor != LLVM::Core::NotIntrinsic); |
5154 | 2.69k | return Builder.createUnaryIntrinsic(LLVM::Core::Floor, V); |
5155 | 2.69k | }); |
5156 | 2.69k | } |
5157 | 2.06k | void compileVectorFTrunc(LLVM::Type VectorTy) noexcept { |
5158 | 2.06k | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5159 | 2.06k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
5160 | 2.06k | return Builder.createUnaryIntrinsic(LLVM::Core::Trunc, V); |
5161 | 2.06k | }); |
5162 | 2.06k | } |
5163 | 431 | void compileVectorFNearest(LLVM::Type VectorTy) noexcept { |
5164 | 431 | compileVectorOp(VectorTy, [&](auto V) noexcept { |
5165 | 431 | #if LLVM_VERSION_MAJOR >= 12 |
5166 | 431 | assuming(LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic); |
5167 | 431 | if (LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic) { |
5168 | 431 | return Builder.createUnaryIntrinsic(LLVM::Core::Roundeven, V); |
5169 | 431 | } |
5170 | 0 | #endif |
5171 | | |
5172 | 0 | #if defined(__x86_64__) |
5173 | 0 | if (Context.SupportSSE4_1) { |
5174 | 0 | const bool IsFloat = VectorTy.getElementType().isFloatTy(); |
5175 | 0 | auto ID = |
5176 | 0 | IsFloat ? LLVM::Core::X86SSE41RoundPs : LLVM::Core::X86SSE41RoundPd; |
5177 | 0 | assuming(ID != LLVM::Core::NotIntrinsic); |
5178 | 0 | return Builder.createIntrinsic(ID, {}, {V, LLContext.getInt32(8)}); |
5179 | 0 | } |
5180 | 0 | #endif |
5181 | | |
5182 | | #if defined(__aarch64__) |
5183 | | if (Context.SupportNEON && |
5184 | | LLVM::Core::AArch64NeonFRIntN != LLVM::Core::NotIntrinsic) { |
5185 | | return Builder.createUnaryIntrinsic(LLVM::Core::AArch64NeonFRIntN, V); |
5186 | | } |
5187 | | #endif |
5188 | | |
5189 | | // Fallback case. |
5190 | | // If the SSE4.1 is not supported on the x86_64 platform or |
5191 | | // the NEON is not supported on the aarch64 platform, |
5192 | | // then fallback to this. |
5193 | 0 | assuming(LLVM::Core::Nearbyint != LLVM::Core::NotIntrinsic); |
5194 | 0 | return Builder.createUnaryIntrinsic(LLVM::Core::Nearbyint, V); |
5195 | 0 | }); |
5196 | 431 | } |
5197 | 177 | void compileVectorVectorFAdd(LLVM::Type VectorTy) noexcept { |
5198 | 177 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5199 | 177 | return Builder.createFAdd(LHS, RHS); |
5200 | 177 | }); |
5201 | 177 | } |
5202 | 471 | void compileVectorVectorFSub(LLVM::Type VectorTy) noexcept { |
5203 | 471 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5204 | 471 | return Builder.createFSub(LHS, RHS); |
5205 | 471 | }); |
5206 | 471 | } |
5207 | 201 | void compileVectorVectorFMul(LLVM::Type VectorTy) noexcept { |
5208 | 201 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5209 | 201 | return Builder.createFMul(LHS, RHS); |
5210 | 201 | }); |
5211 | 201 | } |
5212 | 230 | void compileVectorVectorFDiv(LLVM::Type VectorTy) noexcept { |
5213 | 230 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5214 | 230 | return Builder.createFDiv(LHS, RHS); |
5215 | 230 | }); |
5216 | 230 | } |
5217 | 303 | void compileVectorVectorFMin(LLVM::Type VectorTy) noexcept { |
5218 | 303 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5219 | 303 | auto LNaN = Builder.createFCmpUNO(LHS, LHS); |
5220 | 303 | auto RNaN = Builder.createFCmpUNO(RHS, RHS); |
5221 | 303 | auto OLT = Builder.createFCmpOLT(LHS, RHS); |
5222 | 303 | auto OGT = Builder.createFCmpOGT(LHS, RHS); |
5223 | 303 | auto Ret = Builder.createBitCast( |
5224 | 303 | Builder.createOr(Builder.createBitCast(LHS, Context.Int64x2Ty), |
5225 | 303 | Builder.createBitCast(RHS, Context.Int64x2Ty)), |
5226 | 303 | LHS.getType()); |
5227 | 303 | Ret = Builder.createSelect(OGT, RHS, Ret); |
5228 | 303 | Ret = Builder.createSelect(OLT, LHS, Ret); |
5229 | 303 | Ret = Builder.createSelect(RNaN, RHS, Ret); |
5230 | 303 | Ret = Builder.createSelect(LNaN, LHS, Ret); |
5231 | 303 | return Ret; |
5232 | 303 | }); |
5233 | 303 | } |
5234 | 212 | void compileVectorVectorFMax(LLVM::Type VectorTy) noexcept { |
5235 | 212 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5236 | 212 | auto LNaN = Builder.createFCmpUNO(LHS, LHS); |
5237 | 212 | auto RNaN = Builder.createFCmpUNO(RHS, RHS); |
5238 | 212 | auto OLT = Builder.createFCmpOLT(LHS, RHS); |
5239 | 212 | auto OGT = Builder.createFCmpOGT(LHS, RHS); |
5240 | 212 | auto Ret = Builder.createBitCast( |
5241 | 212 | Builder.createAnd(Builder.createBitCast(LHS, Context.Int64x2Ty), |
5242 | 212 | Builder.createBitCast(RHS, Context.Int64x2Ty)), |
5243 | 212 | LHS.getType()); |
5244 | 212 | Ret = Builder.createSelect(OLT, RHS, Ret); |
5245 | 212 | Ret = Builder.createSelect(OGT, LHS, Ret); |
5246 | 212 | Ret = Builder.createSelect(RNaN, RHS, Ret); |
5247 | 212 | Ret = Builder.createSelect(LNaN, LHS, Ret); |
5248 | 212 | return Ret; |
5249 | 212 | }); |
5250 | 212 | } |
5251 | 294 | void compileVectorVectorFPMin(LLVM::Type VectorTy) noexcept { |
5252 | 294 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5253 | 294 | auto Cmp = Builder.createFCmpOLT(RHS, LHS); |
5254 | 294 | return Builder.createSelect(Cmp, RHS, LHS); |
5255 | 294 | }); |
5256 | 294 | } |
5257 | 280 | void compileVectorVectorFPMax(LLVM::Type VectorTy) noexcept { |
5258 | 280 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5259 | 280 | auto Cmp = Builder.createFCmpOGT(RHS, LHS); |
5260 | 280 | return Builder.createSelect(Cmp, RHS, LHS); |
5261 | 280 | }); |
5262 | 280 | } |
5263 | 960 | void compileVectorTruncSatS32(LLVM::Type VectorTy, bool PadZero) noexcept { |
5264 | 960 | compileVectorOp(VectorTy, [this, VectorTy, PadZero](auto V) noexcept { |
5265 | 960 | const auto Size = VectorTy.getVectorSize(); |
5266 | 960 | auto FPTy = VectorTy.getElementType(); |
5267 | 960 | auto IntMin = LLContext.getInt32( |
5268 | 960 | static_cast<uint32_t>(std::numeric_limits<int32_t>::min())); |
5269 | 960 | auto IntMax = LLContext.getInt32( |
5270 | 960 | static_cast<uint32_t>(std::numeric_limits<int32_t>::max())); |
5271 | 960 | auto IntMinV = Builder.createVectorSplat(Size, IntMin); |
5272 | 960 | auto IntMaxV = Builder.createVectorSplat(Size, IntMax); |
5273 | 960 | auto IntZeroV = LLVM::Value::getConstNull(IntMinV.getType()); |
5274 | 960 | auto FPMin = Builder.createSIToFP(IntMin, FPTy); |
5275 | 960 | auto FPMax = Builder.createSIToFP(IntMax, FPTy); |
5276 | 960 | auto FPMinV = Builder.createVectorSplat(Size, FPMin); |
5277 | 960 | auto FPMaxV = Builder.createVectorSplat(Size, FPMax); |
5278 | | |
5279 | 960 | auto Normal = Builder.createFCmpORD(V, V); |
5280 | 960 | auto NotUnder = Builder.createFCmpUGE(V, FPMinV); |
5281 | 960 | auto NotOver = Builder.createFCmpULT(V, FPMaxV); |
5282 | 960 | V = Builder.createFPToSI( |
5283 | 960 | V, LLVM::Type::getVectorType(LLContext.getInt32Ty(), Size)); |
5284 | 960 | V = Builder.createSelect(Normal, V, IntZeroV); |
5285 | 960 | V = Builder.createSelect(NotUnder, V, IntMinV); |
5286 | 960 | V = Builder.createSelect(NotOver, V, IntMaxV); |
5287 | 960 | if (PadZero) { |
5288 | 755 | std::vector<uint32_t> Mask(Size * 2); |
5289 | 755 | std::iota(Mask.begin(), Mask.end(), 0); |
5290 | 755 | V = Builder.createShuffleVector( |
5291 | 755 | V, IntZeroV, LLVM::Value::getConstVector32(LLContext, Mask)); |
5292 | 755 | } |
5293 | 960 | return V; |
5294 | 960 | }); |
5295 | 960 | } |
5296 | 5.87k | void compileVectorTruncSatU32(LLVM::Type VectorTy, bool PadZero) noexcept { |
5297 | 5.87k | compileVectorOp(VectorTy, [this, VectorTy, PadZero](auto V) noexcept { |
5298 | 5.87k | const auto Size = VectorTy.getVectorSize(); |
5299 | 5.87k | auto FPTy = VectorTy.getElementType(); |
5300 | 5.87k | auto IntMin = LLContext.getInt32(std::numeric_limits<uint32_t>::min()); |
5301 | 5.87k | auto IntMax = LLContext.getInt32(std::numeric_limits<uint32_t>::max()); |
5302 | 5.87k | auto IntMinV = Builder.createVectorSplat(Size, IntMin); |
5303 | 5.87k | auto IntMaxV = Builder.createVectorSplat(Size, IntMax); |
5304 | 5.87k | auto FPMin = Builder.createUIToFP(IntMin, FPTy); |
5305 | 5.87k | auto FPMax = Builder.createUIToFP(IntMax, FPTy); |
5306 | 5.87k | auto FPMinV = Builder.createVectorSplat(Size, FPMin); |
5307 | 5.87k | auto FPMaxV = Builder.createVectorSplat(Size, FPMax); |
5308 | | |
5309 | 5.87k | auto NotUnder = Builder.createFCmpOGE(V, FPMinV); |
5310 | 5.87k | auto NotOver = Builder.createFCmpULT(V, FPMaxV); |
5311 | 5.87k | V = Builder.createFPToUI( |
5312 | 5.87k | V, LLVM::Type::getVectorType(LLContext.getInt32Ty(), Size)); |
5313 | 5.87k | V = Builder.createSelect(NotUnder, V, IntMinV); |
5314 | 5.87k | V = Builder.createSelect(NotOver, V, IntMaxV); |
5315 | 5.87k | if (PadZero) { |
5316 | 2.14k | auto IntZeroV = LLVM::Value::getConstNull(IntMinV.getType()); |
5317 | 2.14k | std::vector<uint32_t> Mask(Size * 2); |
5318 | 2.14k | std::iota(Mask.begin(), Mask.end(), 0); |
5319 | 2.14k | V = Builder.createShuffleVector( |
5320 | 2.14k | V, IntZeroV, LLVM::Value::getConstVector32(LLContext, Mask)); |
5321 | 2.14k | } |
5322 | 5.87k | return V; |
5323 | 5.87k | }); |
5324 | 5.87k | } |
5325 | | void compileVectorConvertS(LLVM::Type VectorTy, LLVM::Type FPVectorTy, |
5326 | 670 | bool Low) noexcept { |
5327 | 670 | compileVectorOp(VectorTy, |
5328 | 670 | [this, VectorTy, FPVectorTy, Low](auto V) noexcept { |
5329 | 670 | if (Low) { |
5330 | 335 | const auto Size = VectorTy.getVectorSize() / 2; |
5331 | 335 | std::vector<uint32_t> Mask(Size); |
5332 | 335 | std::iota(Mask.begin(), Mask.end(), 0); |
5333 | 335 | V = Builder.createShuffleVector( |
5334 | 335 | V, LLVM::Value::getUndef(VectorTy), |
5335 | 335 | LLVM::Value::getConstVector32(LLContext, Mask)); |
5336 | 335 | } |
5337 | 670 | return Builder.createSIToFP(V, FPVectorTy); |
5338 | 670 | }); |
5339 | 670 | } |
5340 | | void compileVectorConvertU(LLVM::Type VectorTy, LLVM::Type FPVectorTy, |
5341 | 1.98k | bool Low) noexcept { |
5342 | 1.98k | compileVectorOp(VectorTy, |
5343 | 1.98k | [this, VectorTy, FPVectorTy, Low](auto V) noexcept { |
5344 | 1.98k | if (Low) { |
5345 | 1.26k | const auto Size = VectorTy.getVectorSize() / 2; |
5346 | 1.26k | std::vector<uint32_t> Mask(Size); |
5347 | 1.26k | std::iota(Mask.begin(), Mask.end(), 0); |
5348 | 1.26k | V = Builder.createShuffleVector( |
5349 | 1.26k | V, LLVM::Value::getUndef(VectorTy), |
5350 | 1.26k | LLVM::Value::getConstVector32(LLContext, Mask)); |
5351 | 1.26k | } |
5352 | 1.98k | return Builder.createUIToFP(V, FPVectorTy); |
5353 | 1.98k | }); |
5354 | 1.98k | } |
5355 | 734 | void compileVectorDemote() noexcept { |
5356 | 734 | compileVectorOp(Context.Doublex2Ty, [this](auto V) noexcept { |
5357 | 734 | auto Demoted = Builder.createFPTrunc( |
5358 | 734 | V, LLVM::Type::getVectorType(Context.FloatTy, 2)); |
5359 | 734 | auto ZeroV = LLVM::Value::getConstNull(Demoted.getType()); |
5360 | 734 | return Builder.createShuffleVector( |
5361 | 734 | Demoted, ZeroV, |
5362 | 734 | LLVM::Value::getConstVector32(LLContext, {0u, 1u, 2u, 3u})); |
5363 | 734 | }); |
5364 | 734 | } |
5365 | 730 | void compileVectorPromote() noexcept { |
5366 | 730 | compileVectorOp(Context.Floatx4Ty, [this](auto V) noexcept { |
5367 | 730 | auto UndefV = LLVM::Value::getUndef(V.getType()); |
5368 | 730 | auto Low = Builder.createShuffleVector( |
5369 | 730 | V, UndefV, LLVM::Value::getConstVector32(LLContext, {0u, 1u})); |
5370 | 730 | return Builder.createFPExt( |
5371 | 730 | Low, LLVM::Type::getVectorType(Context.DoubleTy, 2)); |
5372 | 730 | }); |
5373 | 730 | } |
5374 | | |
5375 | 0 | void compileVectorVectorMAdd(LLVM::Type VectorTy) noexcept { |
5376 | 0 | auto C = Builder.createBitCast(stackPop(), VectorTy); |
5377 | 0 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); |
5378 | 0 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
5379 | 0 | stackPush(Builder.createBitCast( |
5380 | 0 | Builder.createFAdd(Builder.createFMul(LHS, RHS), C), |
5381 | 0 | Context.Int64x2Ty)); |
5382 | 0 | } |
5383 | | |
5384 | 0 | void compileVectorVectorNMAdd(LLVM::Type VectorTy) noexcept { |
5385 | 0 | auto C = Builder.createBitCast(stackPop(), VectorTy); |
5386 | 0 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); |
5387 | 0 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
5388 | 0 | stackPush(Builder.createBitCast( |
5389 | 0 | Builder.createFAdd(Builder.createFMul(Builder.createFNeg(LHS), RHS), C), |
5390 | 0 | Context.Int64x2Ty)); |
5391 | 0 | } |
5392 | | |
5393 | 0 | void compileVectorRelaxedIntegerDotProduct() noexcept { |
5394 | 0 | auto OriTy = Context.Int8x16Ty; |
5395 | 0 | auto ExtTy = Context.Int16x8Ty; |
5396 | 0 | auto RHS = Builder.createBitCast(stackPop(), OriTy); |
5397 | 0 | auto LHS = Builder.createBitCast(stackPop(), OriTy); |
5398 | 0 | #if defined(__x86_64__) |
5399 | 0 | if (Context.SupportSSSE3) { |
5400 | 0 | assuming(LLVM::Core::X86SSSE3PMAddUbSw128 != LLVM::Core::NotIntrinsic); |
5401 | | // WebAssembly Relaxed SIMD spec: signed(LHS) * unsigned/signed(RHS) |
5402 | | // But PMAddUbSw128 is unsigned(LHS) * signed(RHS). Therefore swap both |
5403 | | // side to match the WebAssembly spec |
5404 | 0 | return stackPush(Builder.createBitCast( |
5405 | 0 | Builder.createIntrinsic(LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5406 | 0 | {RHS, LHS}), |
5407 | 0 | Context.Int64x2Ty)); |
5408 | 0 | } |
5409 | 0 | #endif |
5410 | 0 | auto Width = LLVM::Value::getConstInt( |
5411 | 0 | ExtTy.getElementType(), OriTy.getElementType().getIntegerBitWidth()); |
5412 | 0 | Width = Builder.createVectorSplat(ExtTy.getVectorSize(), Width); |
5413 | 0 | auto EA = Builder.createBitCast(LHS, ExtTy); |
5414 | 0 | auto EB = Builder.createBitCast(RHS, ExtTy); |
5415 | |
|
5416 | 0 | LLVM::Value AL, AR, BL, BR; |
5417 | 0 | AL = Builder.createAShr(EA, Width); |
5418 | 0 | AR = Builder.createAShr(Builder.createShl(EA, Width), Width); |
5419 | 0 | BL = Builder.createAShr(EB, Width); |
5420 | 0 | BR = Builder.createAShr(Builder.createShl(EB, Width), Width); |
5421 | |
|
5422 | 0 | return stackPush(Builder.createBitCast( |
5423 | 0 | Builder.createAdd(Builder.createMul(AL, BL), Builder.createMul(AR, BR)), |
5424 | 0 | Context.Int64x2Ty)); |
5425 | 0 | } |
5426 | | |
5427 | 0 | void compileVectorRelaxedIntegerDotProductAdd() noexcept { |
5428 | 0 | auto OriTy = Context.Int8x16Ty; |
5429 | 0 | auto ExtTy = Context.Int16x8Ty; |
5430 | 0 | auto FinTy = Context.Int32x4Ty; |
5431 | 0 | auto VC = Builder.createBitCast(stackPop(), FinTy); |
5432 | 0 | auto RHS = Builder.createBitCast(stackPop(), OriTy); |
5433 | 0 | auto LHS = Builder.createBitCast(stackPop(), OriTy); |
5434 | 0 | LLVM::Value IM; |
5435 | 0 | #if defined(__x86_64__) |
5436 | 0 | if (Context.SupportSSSE3) { |
5437 | 0 | assuming(LLVM::Core::X86SSSE3PMAddUbSw128 != LLVM::Core::NotIntrinsic); |
5438 | | // WebAssembly Relaxed SIMD spec: signed(LHS) * unsigned/signed(RHS) |
5439 | | // But PMAddUbSw128 is unsigned(LHS) * signed(RHS). Therefore swap both |
5440 | | // side to match the WebAssembly spec |
5441 | 0 | IM = Builder.createIntrinsic(LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5442 | 0 | {RHS, LHS}); |
5443 | 0 | } else |
5444 | 0 | #endif |
5445 | 0 | { |
5446 | 0 | auto Width = LLVM::Value::getConstInt( |
5447 | 0 | ExtTy.getElementType(), OriTy.getElementType().getIntegerBitWidth()); |
5448 | 0 | Width = Builder.createVectorSplat(ExtTy.getVectorSize(), Width); |
5449 | 0 | auto EA = Builder.createBitCast(LHS, ExtTy); |
5450 | 0 | auto EB = Builder.createBitCast(RHS, ExtTy); |
5451 | |
|
5452 | 0 | LLVM::Value AL, AR, BL, BR; |
5453 | 0 | AL = Builder.createAShr(EA, Width); |
5454 | 0 | AR = Builder.createAShr(Builder.createShl(EA, Width), Width); |
5455 | 0 | BL = Builder.createAShr(EB, Width); |
5456 | 0 | BR = Builder.createAShr(Builder.createShl(EB, Width), Width); |
5457 | 0 | IM = Builder.createAdd(Builder.createMul(AL, BL), |
5458 | 0 | Builder.createMul(AR, BR)); |
5459 | 0 | } |
5460 | | |
5461 | 0 | auto Width = LLVM::Value::getConstInt( |
5462 | 0 | FinTy.getElementType(), ExtTy.getElementType().getIntegerBitWidth()); |
5463 | 0 | Width = Builder.createVectorSplat(FinTy.getVectorSize(), Width); |
5464 | 0 | auto IME = Builder.createBitCast(IM, FinTy); |
5465 | 0 | auto L = Builder.createAShr(IME, Width); |
5466 | 0 | auto R = Builder.createAShr(Builder.createShl(IME, Width), Width); |
5467 | |
|
5468 | 0 | return stackPush(Builder.createBitCast( |
5469 | 0 | Builder.createAdd(Builder.createAdd(L, R), VC), Context.Int64x2Ty)); |
5470 | 0 | } |
5471 | | |
5472 | | void |
5473 | | enterBlock(LLVM::BasicBlock JumpBlock, LLVM::BasicBlock NextBlock, |
5474 | | LLVM::BasicBlock ElseBlock, std::vector<LLVM::Value> Args, |
5475 | | std::pair<std::vector<ValType>, std::vector<ValType>> Type, |
5476 | | std::vector<std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5477 | 21.1k | ReturnPHI = {}) noexcept { |
5478 | 21.1k | assuming(Type.first.size() == Args.size()); |
5479 | 21.1k | for (auto &Value : Args) { |
5480 | 4.27k | stackPush(Value); |
5481 | 4.27k | } |
5482 | 21.1k | const auto Unreachable = isUnreachable(); |
5483 | 21.1k | ControlStack.emplace_back(Stack.size() - Args.size(), Unreachable, |
5484 | 21.1k | JumpBlock, NextBlock, ElseBlock, std::move(Args), |
5485 | 21.1k | std::move(Type), std::move(ReturnPHI)); |
5486 | 21.1k | } |
5487 | | |
5488 | 21.1k | Control leaveBlock() noexcept { |
5489 | 21.1k | Control Entry = std::move(ControlStack.back()); |
5490 | 21.1k | ControlStack.pop_back(); |
5491 | | |
5492 | 21.1k | auto NextBlock = Entry.NextBlock ? Entry.NextBlock : Entry.JumpBlock; |
5493 | 21.1k | if (!Entry.Unreachable) { |
5494 | 13.6k | const auto &ReturnType = Entry.Type.second; |
5495 | 13.6k | if (!ReturnType.empty()) { |
5496 | 10.1k | std::vector<LLVM::Value> Rets(ReturnType.size()); |
5497 | 20.7k | for (size_t I = 0; I < Rets.size(); ++I) { |
5498 | 10.5k | const size_t J = Rets.size() - 1 - I; |
5499 | 10.5k | Rets[J] = stackPop(); |
5500 | 10.5k | } |
5501 | 10.1k | Entry.ReturnPHI.emplace_back(std::move(Rets), Builder.getInsertBlock()); |
5502 | 10.1k | } |
5503 | 13.6k | Builder.createBr(NextBlock); |
5504 | 13.6k | } else { |
5505 | 7.43k | Builder.createUnreachable(); |
5506 | 7.43k | } |
5507 | 21.1k | Builder.positionAtEnd(NextBlock); |
5508 | 21.1k | Stack.erase(Stack.begin() + static_cast<int64_t>(Entry.StackSize), |
5509 | 21.1k | Stack.end()); |
5510 | 21.1k | return Entry; |
5511 | 21.1k | } |
5512 | | |
5513 | 5.41k | void checkStop() noexcept { |
5514 | 5.41k | if (!Interruptible) { |
5515 | 5.41k | return; |
5516 | 5.41k | } |
5517 | 0 | auto NotStopBB = LLVM::BasicBlock::create(LLContext, F.Fn, "NotStop"); |
5518 | 0 | auto StopToken = Builder.createAtomicRMW( |
5519 | 0 | LLVMAtomicRMWBinOpXchg, Context.getStopToken(Builder, ExecCtx), |
5520 | 0 | LLContext.getInt32(0), LLVMAtomicOrderingMonotonic); |
5521 | | #if LLVM_VERSION_MAJOR >= 13 |
5522 | | StopToken.setAlignment(32); |
5523 | | #endif |
5524 | 0 | auto NotStop = Builder.createLikely( |
5525 | 0 | Builder.createICmpEQ(StopToken, LLContext.getInt32(0))); |
5526 | 0 | Builder.createCondBr(NotStop, NotStopBB, |
5527 | 0 | getTrapBB(ErrCode::Value::Interrupted)); |
5528 | |
|
5529 | 0 | Builder.positionAtEnd(NotStopBB); |
5530 | 0 | } |
5531 | | |
5532 | 5.44k | void setUnreachable() noexcept { |
5533 | 5.44k | if (ControlStack.empty()) { |
5534 | 0 | IsUnreachable = true; |
5535 | 5.44k | } else { |
5536 | 5.44k | ControlStack.back().Unreachable = true; |
5537 | 5.44k | } |
5538 | 5.44k | } |
5539 | | |
5540 | 1.40M | bool isUnreachable() const noexcept { |
5541 | 1.40M | if (ControlStack.empty()) { |
5542 | 10.7k | return IsUnreachable; |
5543 | 1.39M | } else { |
5544 | 1.39M | return ControlStack.back().Unreachable; |
5545 | 1.39M | } |
5546 | 1.40M | } |
5547 | | |
5548 | | void |
5549 | | buildPHI(Span<const ValType> RetType, |
5550 | | Span<const std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5551 | 18.6k | Incomings) noexcept { |
5552 | 18.6k | if (isVoidReturn(RetType)) { |
5553 | 5.74k | return; |
5554 | 5.74k | } |
5555 | 12.8k | std::vector<LLVM::Value> Nodes; |
5556 | 12.8k | if (Incomings.size() == 0) { |
5557 | 2.46k | const auto &Types = toLLVMTypeVector(LLContext, RetType); |
5558 | 2.46k | Nodes.reserve(Types.size()); |
5559 | 2.72k | for (LLVM::Type Type : Types) { |
5560 | 2.72k | Nodes.push_back(LLVM::Value::getUndef(Type)); |
5561 | 2.72k | } |
5562 | 10.4k | } else if (Incomings.size() == 1) { |
5563 | 9.42k | Nodes = std::move(std::get<0>(Incomings.front())); |
5564 | 9.42k | } else { |
5565 | 1.00k | const auto &Types = toLLVMTypeVector(LLContext, RetType); |
5566 | 1.00k | Nodes.reserve(Types.size()); |
5567 | 2.09k | for (size_t I = 0; I < Types.size(); ++I) { |
5568 | 1.09k | auto PHIRet = Builder.createPHI(Types[I]); |
5569 | 2.92k | for (auto &[Value, BB] : Incomings) { |
5570 | 2.92k | assuming(Value.size() == Types.size()); |
5571 | 2.92k | PHIRet.addIncoming(Value[I], BB); |
5572 | 2.92k | } |
5573 | 1.09k | Nodes.push_back(PHIRet); |
5574 | 1.09k | } |
5575 | 1.00k | } |
5576 | 13.5k | for (auto &Val : Nodes) { |
5577 | 13.5k | stackPush(Val); |
5578 | 13.5k | } |
5579 | 12.8k | } |
5580 | | |
5581 | 38.0k | void setLableJumpPHI(unsigned int Index) noexcept { |
5582 | 38.0k | assuming(Index < ControlStack.size()); |
5583 | 38.0k | auto &Entry = *(ControlStack.rbegin() + Index); |
5584 | 38.0k | if (Entry.NextBlock) { // is loop |
5585 | 2.45k | std::vector<LLVM::Value> Args(Entry.Type.first.size()); |
5586 | 5.05k | for (size_t I = 0; I < Args.size(); ++I) { |
5587 | 2.60k | const size_t J = Args.size() - 1 - I; |
5588 | 2.60k | Args[J] = stackPop(); |
5589 | 2.60k | } |
5590 | 5.05k | for (size_t I = 0; I < Args.size(); ++I) { |
5591 | 2.60k | Entry.Args[I].addIncoming(Args[I], Builder.getInsertBlock()); |
5592 | 2.60k | stackPush(Args[I]); |
5593 | 2.60k | } |
5594 | 35.5k | } else if (!Entry.Type.second.empty()) { // has return value |
5595 | 1.98k | std::vector<LLVM::Value> Rets(Entry.Type.second.size()); |
5596 | 4.08k | for (size_t I = 0; I < Rets.size(); ++I) { |
5597 | 2.10k | const size_t J = Rets.size() - 1 - I; |
5598 | 2.10k | Rets[J] = stackPop(); |
5599 | 2.10k | } |
5600 | 4.08k | for (size_t I = 0; I < Rets.size(); ++I) { |
5601 | 2.10k | stackPush(Rets[I]); |
5602 | 2.10k | } |
5603 | 1.98k | Entry.ReturnPHI.emplace_back(std::move(Rets), Builder.getInsertBlock()); |
5604 | 1.98k | } |
5605 | 38.0k | } |
5606 | | |
5607 | 38.0k | LLVM::BasicBlock getLabel(unsigned int Index) const noexcept { |
5608 | 38.0k | return (ControlStack.rbegin() + Index)->JumpBlock; |
5609 | 38.0k | } |
5610 | | |
5611 | 843k | void stackPush(LLVM::Value Value) noexcept { Stack.push_back(Value); } |
5612 | 319k | LLVM::Value stackPop() noexcept { |
5613 | 319k | assuming(!ControlStack.empty() || !Stack.empty()); |
5614 | 319k | assuming(ControlStack.empty() || |
5615 | 319k | Stack.size() > ControlStack.back().StackSize); |
5616 | 319k | auto Value = Stack.back(); |
5617 | 319k | Stack.pop_back(); |
5618 | 319k | return Value; |
5619 | 319k | } |
5620 | | |
5621 | | LLVM::Compiler::CompileContext &Context; |
5622 | | LLVM::Context LLContext; |
5623 | | std::vector<std::pair<LLVM::Type, LLVM::Value>> Local; |
5624 | | std::vector<LLVM::Value> Stack; |
5625 | | LLVM::Value LocalInstrCount = nullptr; |
5626 | | LLVM::Value LocalGas = nullptr; |
5627 | | std::unordered_map<ErrCode::Value, LLVM::BasicBlock> TrapBB; |
5628 | | bool IsUnreachable = false; |
5629 | | bool Interruptible = false; |
5630 | | struct Control { |
5631 | | size_t StackSize; |
5632 | | bool Unreachable; |
5633 | | LLVM::BasicBlock JumpBlock; |
5634 | | LLVM::BasicBlock NextBlock; |
5635 | | LLVM::BasicBlock ElseBlock; |
5636 | | std::vector<LLVM::Value> Args; |
5637 | | std::pair<std::vector<ValType>, std::vector<ValType>> Type; |
5638 | | std::vector<std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5639 | | ReturnPHI; |
5640 | | Control(size_t S, bool U, LLVM::BasicBlock J, LLVM::BasicBlock N, |
5641 | | LLVM::BasicBlock E, std::vector<LLVM::Value> A, |
5642 | | std::pair<std::vector<ValType>, std::vector<ValType>> T, |
5643 | | std::vector<std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5644 | | R) noexcept |
5645 | 21.1k | : StackSize(S), Unreachable(U), JumpBlock(J), NextBlock(N), |
5646 | 21.1k | ElseBlock(E), Args(std::move(A)), Type(std::move(T)), |
5647 | 21.1k | ReturnPHI(std::move(R)) {} |
5648 | | Control(const Control &) = default; |
5649 | 26.4k | Control(Control &&) = default; |
5650 | | Control &operator=(const Control &) = default; |
5651 | 1.10k | Control &operator=(Control &&) = default; |
5652 | | }; |
5653 | | std::vector<Control> ControlStack; |
5654 | | LLVM::FunctionCallee F; |
5655 | | LLVM::Value ExecCtx; |
5656 | | LLVM::Builder Builder; |
5657 | | }; |
5658 | | |
5659 | | std::vector<LLVM::Value> unpackStruct(LLVM::Builder &Builder, |
5660 | 381 | LLVM::Value Struct) noexcept { |
5661 | 381 | const auto N = Struct.getType().getStructNumElements(); |
5662 | 381 | std::vector<LLVM::Value> Ret; |
5663 | 381 | Ret.reserve(N); |
5664 | 1.36k | for (unsigned I = 0; I < N; ++I) { |
5665 | 980 | Ret.push_back(Builder.createExtractValue(Struct, I)); |
5666 | 980 | } |
5667 | 381 | return Ret; |
5668 | 381 | } |
5669 | | |
5670 | | } // namespace |
5671 | | |
5672 | | namespace WasmEdge { |
5673 | | namespace LLVM { |
5674 | | |
5675 | 2.15k | Expect<void> Compiler::checkConfigure() noexcept { |
5676 | 2.15k | if (Conf.hasProposal(Proposal::ExceptionHandling)) { |
5677 | 0 | spdlog::error(ErrCode::Value::InvalidConfigure); |
5678 | 0 | spdlog::error( |
5679 | 0 | " Proposal ExceptionHandling is not yet supported in LLVM backend"); |
5680 | 0 | return Unexpect(ErrCode::Value::InvalidConfigure); |
5681 | 0 | } |
5682 | 2.15k | return {}; |
5683 | 2.15k | } |
5684 | | |
5685 | 2.15k | Expect<Data> Compiler::compile(const AST::Module &Module) noexcept { |
5686 | | // Check the module is validated. |
5687 | 2.15k | if (unlikely(!Module.getIsValidated())) { |
5688 | 0 | spdlog::error(ErrCode::Value::NotValidated); |
5689 | 0 | return Unexpect(ErrCode::Value::NotValidated); |
5690 | 0 | } |
5691 | | |
5692 | 2.15k | std::unique_lock Lock(Mutex); |
5693 | 2.15k | spdlog::info("compile start"sv); |
5694 | | |
5695 | 2.15k | LLVM::Core::init(); |
5696 | | |
5697 | 2.15k | LLVM::Data D; |
5698 | 2.15k | auto LLContext = D.extract().LLContext(); |
5699 | 2.15k | auto &LLModule = D.extract().LLModule; |
5700 | 2.15k | LLModule.setTarget(LLVM::getDefaultTargetTriple().unwrap()); |
5701 | 2.15k | LLModule.addFlag(LLVMModuleFlagBehaviorError, "PIC Level"sv, 2); |
5702 | | |
5703 | 2.15k | CompileContext NewContext(LLContext, LLModule, |
5704 | 2.15k | Conf.getCompilerConfigure().isGenericBinary()); |
5705 | 2.15k | struct RAIICleanup { |
5706 | 2.15k | RAIICleanup(CompileContext *&Context, CompileContext &NewContext) |
5707 | 2.15k | : Context(Context) { |
5708 | 2.15k | Context = &NewContext; |
5709 | 2.15k | } |
5710 | 2.15k | ~RAIICleanup() { Context = nullptr; } |
5711 | 2.15k | CompileContext *&Context; |
5712 | 2.15k | }; |
5713 | 2.15k | RAIICleanup Cleanup(Context, NewContext); |
5714 | | |
5715 | | // Compile Function Types |
5716 | 2.15k | compile(Module.getTypeSection()); |
5717 | | // Compile ImportSection |
5718 | 2.15k | compile(Module.getImportSection()); |
5719 | | // Compile GlobalSection |
5720 | 2.15k | compile(Module.getGlobalSection()); |
5721 | | // Compile MemorySection (MemorySec, DataSec) |
5722 | 2.15k | compile(Module.getMemorySection(), Module.getDataSection()); |
5723 | | // Compile TableSection (TableSec, ElemSec) |
5724 | 2.15k | compile(Module.getTableSection(), Module.getElementSection()); |
5725 | | // compile Functions in module. (FunctionSec, CodeSec) |
5726 | 2.15k | compile(Module.getFunctionSection(), Module.getCodeSection()); |
5727 | | // Compile ExportSection |
5728 | 2.15k | compile(Module.getExportSection()); |
5729 | | // StartSection is not required to compile |
5730 | | |
5731 | 2.15k | spdlog::info("verify start"sv); |
5732 | 2.15k | LLModule.verify(LLVMPrintMessageAction); |
5733 | | |
5734 | 2.15k | spdlog::info("optimize start"sv); |
5735 | 2.15k | auto &TM = D.extract().TM; |
5736 | 2.15k | { |
5737 | 2.15k | auto Triple = LLModule.getTarget(); |
5738 | 2.15k | auto [TheTarget, ErrorMessage] = LLVM::Target::getFromTriple(Triple); |
5739 | 2.15k | if (ErrorMessage) { |
5740 | 0 | spdlog::error("getFromTriple failed:{}"sv, ErrorMessage.string_view()); |
5741 | 0 | return Unexpect(ErrCode::Value::IllegalPath); |
5742 | 2.15k | } else { |
5743 | 2.15k | std::string CPUName; |
5744 | | #if defined(__riscv) && __riscv_xlen == 64 |
5745 | | CPUName = "generic-rv64"s; |
5746 | | #else |
5747 | 2.15k | if (!Conf.getCompilerConfigure().isGenericBinary()) { |
5748 | 2.15k | CPUName = LLVM::getHostCPUName().string_view(); |
5749 | 2.15k | } else { |
5750 | 0 | CPUName = "generic"s; |
5751 | 0 | } |
5752 | 2.15k | #endif |
5753 | | |
5754 | 2.15k | TM = LLVM::TargetMachine::create( |
5755 | 2.15k | TheTarget, Triple, CPUName.c_str(), |
5756 | 2.15k | LLVM::getHostCPUFeatures().unwrap(), |
5757 | 2.15k | toLLVMCodeGenLevel( |
5758 | 2.15k | Conf.getCompilerConfigure().getOptimizationLevel()), |
5759 | 2.15k | LLVMRelocPIC, LLVMCodeModelDefault); |
5760 | 2.15k | } |
5761 | | |
5762 | | #if LLVM_VERSION_MAJOR >= 13 |
5763 | | auto PBO = LLVM::PassBuilderOptions::create(); |
5764 | | if (auto Error = PBO.runPasses( |
5765 | | LLModule, |
5766 | | toLLVMLevel(Conf.getCompilerConfigure().getOptimizationLevel()), |
5767 | | TM)) { |
5768 | | spdlog::error("{}"sv, Error.message().string_view()); |
5769 | | } |
5770 | | #else |
5771 | 2.15k | auto FP = LLVM::PassManager::createForModule(LLModule); |
5772 | 2.15k | auto MP = LLVM::PassManager::create(); |
5773 | | |
5774 | 2.15k | TM.addAnalysisPasses(MP); |
5775 | 2.15k | TM.addAnalysisPasses(FP); |
5776 | 2.15k | { |
5777 | 2.15k | auto PMB = LLVM::PassManagerBuilder::create(); |
5778 | 2.15k | auto [OptLevel, SizeLevel] = |
5779 | 2.15k | toLLVMLevel(Conf.getCompilerConfigure().getOptimizationLevel()); |
5780 | 2.15k | PMB.setOptLevel(OptLevel); |
5781 | 2.15k | PMB.setSizeLevel(SizeLevel); |
5782 | 2.15k | PMB.populateFunctionPassManager(FP); |
5783 | 2.15k | PMB.populateModulePassManager(MP); |
5784 | 2.15k | } |
5785 | 2.15k | switch (Conf.getCompilerConfigure().getOptimizationLevel()) { |
5786 | 0 | case CompilerConfigure::OptimizationLevel::O0: |
5787 | 0 | case CompilerConfigure::OptimizationLevel::O1: |
5788 | 0 | FP.addTailCallEliminationPass(); |
5789 | 0 | break; |
5790 | 2.15k | default: |
5791 | 2.15k | break; |
5792 | 2.15k | } |
5793 | | |
5794 | 2.15k | FP.initializeFunctionPassManager(); |
5795 | 23.3k | for (auto Fn = LLModule.getFirstFunction(); Fn; Fn = Fn.getNextFunction()) { |
5796 | 21.2k | FP.runFunctionPassManager(Fn); |
5797 | 21.2k | } |
5798 | 2.15k | FP.finalizeFunctionPassManager(); |
5799 | 2.15k | MP.runPassManager(LLModule); |
5800 | 2.15k | #endif |
5801 | 2.15k | } |
5802 | | |
5803 | | // Set initializer for constant value |
5804 | 2.15k | if (auto IntrinsicsTable = LLModule.getNamedGlobal("intrinsics")) { |
5805 | 1.24k | IntrinsicsTable.setInitializer( |
5806 | 1.24k | LLVM::Value::getConstNull(IntrinsicsTable.getType())); |
5807 | 1.24k | IntrinsicsTable.setGlobalConstant(false); |
5808 | 1.24k | } else { |
5809 | 911 | auto IntrinsicsTableTy = LLVM::Type::getArrayType( |
5810 | 911 | LLContext.getInt8Ty().getPointerTo(), |
5811 | 911 | static_cast<uint32_t>(Executable::Intrinsics::kIntrinsicMax)); |
5812 | 911 | LLModule.addGlobal( |
5813 | 911 | IntrinsicsTableTy.getPointerTo(), false, LLVMExternalLinkage, |
5814 | 911 | LLVM::Value::getConstNull(IntrinsicsTableTy), "intrinsics"); |
5815 | 911 | } |
5816 | | |
5817 | 2.15k | spdlog::info("optimize done"sv); |
5818 | 2.15k | return Expect<Data>{std::move(D)}; |
5819 | 2.15k | } |
5820 | | |
5821 | 2.15k | void Compiler::compile(const AST::TypeSection &TypeSec) noexcept { |
5822 | 2.15k | auto WrapperTy = |
5823 | 2.15k | LLVM::Type::getFunctionType(Context->VoidTy, |
5824 | 2.15k | {Context->ExecCtxPtrTy, Context->Int8PtrTy, |
5825 | 2.15k | Context->Int8PtrTy, Context->Int8PtrTy}, |
5826 | 2.15k | false); |
5827 | 2.15k | auto SubTypes = TypeSec.getContent(); |
5828 | 2.15k | const auto Size = SubTypes.size(); |
5829 | 2.15k | if (Size == 0) { |
5830 | 106 | return; |
5831 | 106 | } |
5832 | 2.05k | Context->CompositeTypes.reserve(Size); |
5833 | 2.05k | Context->FunctionWrappers.reserve(Size); |
5834 | | |
5835 | | // Iterate and compile types. |
5836 | 6.16k | for (size_t I = 0; I < Size; ++I) { |
5837 | 4.11k | const auto &CompType = SubTypes[I].getCompositeType(); |
5838 | 4.11k | const auto Name = fmt::format("t{}"sv, Context->CompositeTypes.size()); |
5839 | 4.11k | if (CompType.isFunc()) { |
5840 | | // Check function type is unique |
5841 | 4.11k | { |
5842 | 4.11k | bool Unique = true; |
5843 | 14.2k | for (size_t J = 0; J < I; ++J) { |
5844 | 10.2k | if (Context->CompositeTypes[J] && |
5845 | 10.2k | Context->CompositeTypes[J]->isFunc()) { |
5846 | 10.2k | const auto &OldFuncType = Context->CompositeTypes[J]->getFuncType(); |
5847 | 10.2k | if (OldFuncType == CompType.getFuncType()) { |
5848 | 114 | Unique = false; |
5849 | 114 | Context->CompositeTypes.push_back(Context->CompositeTypes[J]); |
5850 | 114 | auto F = Context->FunctionWrappers[J]; |
5851 | 114 | Context->FunctionWrappers.push_back(F); |
5852 | 114 | auto A = Context->LLModule.addAlias(WrapperTy, F, Name.c_str()); |
5853 | 114 | A.setLinkage(LLVMExternalLinkage); |
5854 | 114 | A.setVisibility(LLVMProtectedVisibility); |
5855 | 114 | A.setDSOLocal(true); |
5856 | 114 | A.setDLLStorageClass(LLVMDLLExportStorageClass); |
5857 | 114 | break; |
5858 | 114 | } |
5859 | 10.2k | } |
5860 | 10.2k | } |
5861 | 4.11k | if (!Unique) { |
5862 | 114 | continue; |
5863 | 114 | } |
5864 | 4.11k | } |
5865 | | |
5866 | | // Create Wrapper |
5867 | 3.99k | auto F = Context->LLModule.addFunction(WrapperTy, LLVMExternalLinkage, |
5868 | 3.99k | Name.c_str()); |
5869 | 3.99k | { |
5870 | 3.99k | F.setVisibility(LLVMProtectedVisibility); |
5871 | 3.99k | F.setDSOLocal(true); |
5872 | 3.99k | F.setDLLStorageClass(LLVMDLLExportStorageClass); |
5873 | 3.99k | F.addFnAttr(Context->NoStackArgProbe); |
5874 | 3.99k | F.addFnAttr(Context->StrictFP); |
5875 | 3.99k | F.addFnAttr(Context->UWTable); |
5876 | 3.99k | F.addParamAttr(0, Context->ReadOnly); |
5877 | 3.99k | F.addParamAttr(0, Context->NoAlias); |
5878 | 3.99k | F.addParamAttr(1, Context->NoAlias); |
5879 | 3.99k | F.addParamAttr(2, Context->NoAlias); |
5880 | 3.99k | F.addParamAttr(3, Context->NoAlias); |
5881 | | |
5882 | 3.99k | LLVM::Builder Builder(Context->LLContext); |
5883 | 3.99k | Builder.positionAtEnd( |
5884 | 3.99k | LLVM::BasicBlock::create(Context->LLContext, F, "entry")); |
5885 | | |
5886 | 3.99k | auto FTy = toLLVMType(Context->LLContext, Context->ExecCtxPtrTy, |
5887 | 3.99k | CompType.getFuncType()); |
5888 | 3.99k | auto RTy = FTy.getReturnType(); |
5889 | 3.99k | std::vector<LLVM::Type> FPTy(FTy.getNumParams()); |
5890 | 3.99k | FTy.getParamTypes(FPTy); |
5891 | | |
5892 | 3.99k | const size_t ArgCount = FPTy.size() - 1; |
5893 | 3.99k | auto ExecCtxPtr = F.getFirstParam(); |
5894 | 3.99k | auto RawFunc = LLVM::FunctionCallee{ |
5895 | 3.99k | FTy, Builder.createBitCast(ExecCtxPtr.getNextParam(), |
5896 | 3.99k | FTy.getPointerTo())}; |
5897 | 3.99k | auto RawArgs = ExecCtxPtr.getNextParam().getNextParam(); |
5898 | 3.99k | auto RawRets = RawArgs.getNextParam(); |
5899 | | |
5900 | 3.99k | std::vector<LLVM::Value> Args; |
5901 | 3.99k | Args.reserve(FTy.getNumParams()); |
5902 | 3.99k | Args.push_back(ExecCtxPtr); |
5903 | 8.33k | for (size_t J = 0; J < ArgCount; ++J) { |
5904 | 4.34k | Args.push_back(Builder.createValuePtrLoad( |
5905 | 4.34k | FPTy[J + 1], RawArgs, Context->Int8Ty, J * kValSize)); |
5906 | 4.34k | } |
5907 | | |
5908 | 3.99k | auto Ret = Builder.createCall(RawFunc, Args); |
5909 | 3.99k | if (RTy.isVoidTy()) { |
5910 | | // nothing to do |
5911 | 2.68k | } else if (RTy.isStructTy()) { |
5912 | 290 | auto Rets = unpackStruct(Builder, Ret); |
5913 | 290 | Builder.createArrayPtrStore(Rets, RawRets, Context->Int8Ty, kValSize); |
5914 | 2.39k | } else { |
5915 | 2.39k | Builder.createValuePtrStore(Ret, RawRets, Context->Int8Ty); |
5916 | 2.39k | } |
5917 | 3.99k | Builder.createRetVoid(); |
5918 | 3.99k | } |
5919 | | // Copy wrapper, param and return lists to module instance. |
5920 | 3.99k | Context->FunctionWrappers.push_back(F); |
5921 | 3.99k | } else { |
5922 | | // Non function type case. Create empty wrapper. |
5923 | 0 | auto F = Context->LLModule.addFunction(WrapperTy, LLVMExternalLinkage, |
5924 | 0 | Name.c_str()); |
5925 | 0 | { |
5926 | 0 | F.setVisibility(LLVMProtectedVisibility); |
5927 | 0 | F.setDSOLocal(true); |
5928 | 0 | F.setDLLStorageClass(LLVMDLLExportStorageClass); |
5929 | 0 | F.addFnAttr(Context->NoStackArgProbe); |
5930 | 0 | F.addFnAttr(Context->StrictFP); |
5931 | 0 | F.addFnAttr(Context->UWTable); |
5932 | 0 | F.addParamAttr(0, Context->ReadOnly); |
5933 | 0 | F.addParamAttr(0, Context->NoAlias); |
5934 | 0 | F.addParamAttr(1, Context->NoAlias); |
5935 | 0 | F.addParamAttr(2, Context->NoAlias); |
5936 | 0 | F.addParamAttr(3, Context->NoAlias); |
5937 | |
|
5938 | 0 | LLVM::Builder Builder(Context->LLContext); |
5939 | 0 | Builder.positionAtEnd( |
5940 | 0 | LLVM::BasicBlock::create(Context->LLContext, F, "entry")); |
5941 | 0 | Builder.createRetVoid(); |
5942 | 0 | } |
5943 | 0 | Context->FunctionWrappers.push_back(F); |
5944 | 0 | } |
5945 | 3.99k | Context->CompositeTypes.push_back(&CompType); |
5946 | 3.99k | } |
5947 | 2.05k | } |
5948 | | |
5949 | 2.15k | void Compiler::compile(const AST::ImportSection &ImportSec) noexcept { |
5950 | | // Iterate and compile import descriptions. |
5951 | 2.15k | for (const auto &ImpDesc : ImportSec.getContent()) { |
5952 | | // Get data from import description. |
5953 | 334 | const auto &ExtType = ImpDesc.getExternalType(); |
5954 | | |
5955 | | // Add the imports into module instance. |
5956 | 334 | switch (ExtType) { |
5957 | 258 | case ExternalType::Function: // Function type index |
5958 | 258 | { |
5959 | 258 | const auto FuncID = static_cast<uint32_t>(Context->Functions.size()); |
5960 | | // Get the function type index in module. |
5961 | 258 | uint32_t TypeIdx = ImpDesc.getExternalFuncTypeIdx(); |
5962 | 258 | assuming(TypeIdx < Context->CompositeTypes.size()); |
5963 | 258 | assuming(Context->CompositeTypes[TypeIdx]->isFunc()); |
5964 | 258 | const auto &FuncType = Context->CompositeTypes[TypeIdx]->getFuncType(); |
5965 | 258 | auto FTy = |
5966 | 258 | toLLVMType(Context->LLContext, Context->ExecCtxPtrTy, FuncType); |
5967 | 258 | auto RTy = FTy.getReturnType(); |
5968 | 258 | auto F = LLVM::FunctionCallee{ |
5969 | 258 | FTy, |
5970 | 258 | Context->LLModule.addFunction(FTy, LLVMInternalLinkage, |
5971 | 258 | fmt::format("f{}"sv, FuncID).c_str())}; |
5972 | 258 | F.Fn.setDSOLocal(true); |
5973 | 258 | F.Fn.addFnAttr(Context->NoStackArgProbe); |
5974 | 258 | F.Fn.addFnAttr(Context->StrictFP); |
5975 | 258 | F.Fn.addFnAttr(Context->UWTable); |
5976 | 258 | F.Fn.addParamAttr(0, Context->ReadOnly); |
5977 | 258 | F.Fn.addParamAttr(0, Context->NoAlias); |
5978 | | |
5979 | 258 | LLVM::Builder Builder(Context->LLContext); |
5980 | 258 | Builder.positionAtEnd( |
5981 | 258 | LLVM::BasicBlock::create(Context->LLContext, F.Fn, "entry")); |
5982 | | |
5983 | 258 | const auto ArgSize = FuncType.getParamTypes().size(); |
5984 | 258 | const auto RetSize = |
5985 | 258 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
5986 | | |
5987 | 258 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
5988 | 258 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
5989 | | |
5990 | 258 | auto Arg = F.Fn.getFirstParam(); |
5991 | 359 | for (unsigned I = 0; I < ArgSize; ++I) { |
5992 | 101 | Arg = Arg.getNextParam(); |
5993 | 101 | Builder.createValuePtrStore(Arg, Args, Context->Int8Ty, I * kValSize); |
5994 | 101 | } |
5995 | | |
5996 | 258 | Builder.createCall( |
5997 | 258 | Context->getIntrinsic( |
5998 | 258 | Builder, Executable::Intrinsics::kCall, |
5999 | 258 | LLVM::Type::getFunctionType( |
6000 | 258 | Context->VoidTy, |
6001 | 258 | {Context->Int32Ty, Context->Int8PtrTy, Context->Int8PtrTy}, |
6002 | 258 | false)), |
6003 | 258 | {Context->LLContext.getInt32(FuncID), Args, Rets}); |
6004 | | |
6005 | 258 | if (RetSize == 0) { |
6006 | 152 | Builder.createRetVoid(); |
6007 | 152 | } else if (RetSize == 1) { |
6008 | 86 | Builder.createRet( |
6009 | 86 | Builder.createValuePtrLoad(RTy, Rets, Context->Int8Ty)); |
6010 | 86 | } else { |
6011 | 20 | Builder.createAggregateRet(Builder.createArrayPtrLoad( |
6012 | 20 | RetSize, RTy, Rets, Context->Int8Ty, kValSize)); |
6013 | 20 | } |
6014 | | |
6015 | 258 | Context->Functions.emplace_back(TypeIdx, F, nullptr); |
6016 | 258 | break; |
6017 | 258 | } |
6018 | 32 | case ExternalType::Table: // Table type |
6019 | 32 | { |
6020 | | // Nothing to do. |
6021 | 32 | break; |
6022 | 258 | } |
6023 | 5 | case ExternalType::Memory: // Memory type |
6024 | 5 | { |
6025 | | // Nothing to do. |
6026 | 5 | break; |
6027 | 258 | } |
6028 | 39 | case ExternalType::Global: // Global type |
6029 | 39 | { |
6030 | | // Get global type. External type checked in validation. |
6031 | 39 | const auto &GlobType = ImpDesc.getExternalGlobalType(); |
6032 | 39 | const auto &ValType = GlobType.getValType(); |
6033 | 39 | auto Type = toLLVMType(Context->LLContext, ValType); |
6034 | 39 | Context->Globals.push_back(Type); |
6035 | 39 | break; |
6036 | 258 | } |
6037 | 0 | default: |
6038 | 0 | break; |
6039 | 334 | } |
6040 | 334 | } |
6041 | 2.15k | } |
6042 | | |
6043 | 2.15k | void Compiler::compile(const AST::ExportSection &) noexcept {} |
6044 | | |
6045 | 2.15k | void Compiler::compile(const AST::GlobalSection &GlobalSec) noexcept { |
6046 | 2.15k | for (const auto &GlobalSeg : GlobalSec.getContent()) { |
6047 | 122 | const auto &ValType = GlobalSeg.getGlobalType().getValType(); |
6048 | 122 | auto Type = toLLVMType(Context->LLContext, ValType); |
6049 | 122 | Context->Globals.push_back(Type); |
6050 | 122 | } |
6051 | 2.15k | } |
6052 | | |
6053 | | void Compiler::compile(const AST::MemorySection &, |
6054 | 2.15k | const AST::DataSection &) noexcept {} |
6055 | | |
6056 | | void Compiler::compile(const AST::TableSection &, |
6057 | 2.15k | const AST::ElementSection &) noexcept {} |
6058 | | |
6059 | | void Compiler::compile(const AST::FunctionSection &FuncSec, |
6060 | 2.15k | const AST::CodeSection &CodeSec) noexcept { |
6061 | 2.15k | const auto &TypeIdxs = FuncSec.getContent(); |
6062 | 2.15k | const auto &CodeSegs = CodeSec.getContent(); |
6063 | 2.15k | if (TypeIdxs.size() == 0 || CodeSegs.size() == 0) { |
6064 | 186 | return; |
6065 | 186 | } |
6066 | | |
6067 | 12.7k | for (size_t I = 0; I < TypeIdxs.size() && I < CodeSegs.size(); ++I) { |
6068 | 10.7k | const auto &TypeIdx = TypeIdxs[I]; |
6069 | 10.7k | const auto &Code = CodeSegs[I]; |
6070 | 10.7k | assuming(TypeIdx < Context->CompositeTypes.size()); |
6071 | 10.7k | assuming(Context->CompositeTypes[TypeIdx]->isFunc()); |
6072 | 10.7k | const auto &FuncType = Context->CompositeTypes[TypeIdx]->getFuncType(); |
6073 | 10.7k | const auto FuncID = Context->Functions.size(); |
6074 | 10.7k | auto FTy = toLLVMType(Context->LLContext, Context->ExecCtxPtrTy, FuncType); |
6075 | 10.7k | LLVM::FunctionCallee F = {FTy, Context->LLModule.addFunction( |
6076 | 10.7k | FTy, LLVMExternalLinkage, |
6077 | 10.7k | fmt::format("f{}"sv, FuncID).c_str())}; |
6078 | 10.7k | F.Fn.setVisibility(LLVMProtectedVisibility); |
6079 | 10.7k | F.Fn.setDSOLocal(true); |
6080 | 10.7k | F.Fn.setDLLStorageClass(LLVMDLLExportStorageClass); |
6081 | 10.7k | F.Fn.addFnAttr(Context->NoStackArgProbe); |
6082 | 10.7k | F.Fn.addFnAttr(Context->StrictFP); |
6083 | 10.7k | F.Fn.addFnAttr(Context->UWTable); |
6084 | 10.7k | F.Fn.addParamAttr(0, Context->ReadOnly); |
6085 | 10.7k | F.Fn.addParamAttr(0, Context->NoAlias); |
6086 | | |
6087 | 10.7k | Context->Functions.emplace_back(TypeIdx, F, &Code); |
6088 | 10.7k | } |
6089 | | |
6090 | 10.8k | for (auto [T, F, Code] : Context->Functions) { |
6091 | 10.8k | if (!Code) { |
6092 | 103 | continue; |
6093 | 103 | } |
6094 | | |
6095 | 10.7k | std::vector<ValType> Locals; |
6096 | 10.7k | for (const auto &Local : Code->getLocals()) { |
6097 | 2.45M | for (unsigned I = 0; I < Local.first; ++I) { |
6098 | 2.45M | Locals.push_back(Local.second); |
6099 | 2.45M | } |
6100 | 1.42k | } |
6101 | 10.7k | FunctionCompiler FC(*Context, F, Locals, |
6102 | 10.7k | Conf.getCompilerConfigure().isInterruptible(), |
6103 | 10.7k | Conf.getStatisticsConfigure().isInstructionCounting(), |
6104 | 10.7k | Conf.getStatisticsConfigure().isCostMeasuring()); |
6105 | 10.7k | auto Type = Context->resolveBlockType(T); |
6106 | 10.7k | FC.compile(*Code, std::move(Type)); |
6107 | 10.7k | F.Fn.eliminateUnreachableBlocks(); |
6108 | 10.7k | } |
6109 | 1.97k | } |
6110 | | |
6111 | | } // namespace LLVM |
6112 | | } // namespace WasmEdge |