/src/WasmEdge/lib/llvm/compiler.cpp
Line | Count | Source |
1 | | // SPDX-License-Identifier: Apache-2.0 |
2 | | // SPDX-FileCopyrightText: 2019-2024 Second State INC |
3 | | |
4 | | #include "llvm/compiler.h" |
5 | | |
6 | | #include "aot/version.h" |
7 | | #include "common/defines.h" |
8 | | #include "common/filesystem.h" |
9 | | #include "common/spdlog.h" |
10 | | #include "data.h" |
11 | | #include "llvm.h" |
12 | | #include "system/allocator.h" |
13 | | |
14 | | #include <algorithm> |
15 | | #include <array> |
16 | | #include <cinttypes> |
17 | | #include <cstdint> |
18 | | #include <cstdlib> |
19 | | #include <limits> |
20 | | #include <memory> |
21 | | #include <numeric> |
22 | | #include <string> |
23 | | #include <string_view> |
24 | | #include <system_error> |
25 | | |
26 | | namespace LLVM = WasmEdge::LLVM; |
27 | | using namespace std::literals; |
28 | | |
29 | | namespace { |
30 | | |
31 | | static bool |
32 | | isVoidReturn(WasmEdge::Span<const WasmEdge::ValType> ValTypes) noexcept; |
33 | | static LLVM::Type toLLVMType(LLVM::Context LLContext, |
34 | | const WasmEdge::ValType &ValType) noexcept; |
35 | | static std::vector<LLVM::Type> |
36 | | toLLVMArgsType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
37 | | WasmEdge::Span<const WasmEdge::ValType> ValTypes) noexcept; |
38 | | static LLVM::Type |
39 | | toLLVMRetsType(LLVM::Context LLContext, |
40 | | WasmEdge::Span<const WasmEdge::ValType> ValTypes) noexcept; |
41 | | static LLVM::Type |
42 | | toLLVMType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
43 | | const WasmEdge::AST::FunctionType &FuncType) noexcept; |
44 | | static LLVM::Value |
45 | | toLLVMConstantZero(LLVM::Context LLContext, |
46 | | const WasmEdge::ValType &ValType) noexcept; |
47 | | static std::vector<LLVM::Value> unpackStruct(LLVM::Builder &Builder, |
48 | | LLVM::Value Struct) noexcept; |
49 | | class FunctionCompiler; |
50 | | |
51 | | // XXX: Misalignment handler not implemented yet, forcing unalignment |
52 | | // force unalignment load/store |
53 | | static inline constexpr const bool kForceUnalignment = true; |
54 | | |
55 | | // force checking div/rem on zero |
56 | | static inline constexpr const bool kForceDivCheck = true; |
57 | | |
58 | | // Size of a ValVariant |
59 | | static inline constexpr const uint32_t kValSize = sizeof(WasmEdge::ValVariant); |
60 | | |
61 | | // Translate Compiler::OptimizationLevel to llvm::PassBuilder version |
62 | | #if LLVM_VERSION_MAJOR >= 13 |
63 | | static inline const char * |
64 | | toLLVMLevel(WasmEdge::CompilerConfigure::OptimizationLevel Level) noexcept { |
65 | | using OL = WasmEdge::CompilerConfigure::OptimizationLevel; |
66 | | switch (Level) { |
67 | | case OL::O0: |
68 | | return "default<O0>,function(tailcallelim)"; |
69 | | case OL::O1: |
70 | | return "default<O1>,function(tailcallelim)"; |
71 | | case OL::O2: |
72 | | return "default<O2>"; |
73 | | case OL::O3: |
74 | | return "default<O3>"; |
75 | | case OL::Os: |
76 | | return "default<Os>"; |
77 | | case OL::Oz: |
78 | | return "default<Oz>"; |
79 | | default: |
80 | | assumingUnreachable(); |
81 | | } |
82 | | } |
83 | | #else |
84 | | static inline std::pair<unsigned int, unsigned int> |
85 | 2.29k | toLLVMLevel(WasmEdge::CompilerConfigure::OptimizationLevel Level) noexcept { |
86 | 2.29k | using OL = WasmEdge::CompilerConfigure::OptimizationLevel; |
87 | 2.29k | switch (Level) { |
88 | 0 | case OL::O0: |
89 | 0 | return {0, 0}; |
90 | 0 | case OL::O1: |
91 | 0 | return {1, 0}; |
92 | 0 | case OL::O2: |
93 | 0 | return {2, 0}; |
94 | 2.29k | case OL::O3: |
95 | 2.29k | return {3, 0}; |
96 | 0 | case OL::Os: |
97 | 0 | return {2, 1}; |
98 | 0 | case OL::Oz: |
99 | 0 | return {2, 2}; |
100 | 0 | default: |
101 | 0 | assumingUnreachable(); |
102 | 2.29k | } |
103 | 2.29k | } |
104 | | #endif |
105 | | |
106 | | static inline LLVMCodeGenOptLevel toLLVMCodeGenLevel( |
107 | 2.29k | WasmEdge::CompilerConfigure::OptimizationLevel Level) noexcept { |
108 | 2.29k | using OL = WasmEdge::CompilerConfigure::OptimizationLevel; |
109 | 2.29k | switch (Level) { |
110 | 0 | case OL::O0: |
111 | 0 | return LLVMCodeGenLevelNone; |
112 | 0 | case OL::O1: |
113 | 0 | return LLVMCodeGenLevelLess; |
114 | 0 | case OL::O2: |
115 | 0 | return LLVMCodeGenLevelDefault; |
116 | 2.29k | case OL::O3: |
117 | 2.29k | return LLVMCodeGenLevelAggressive; |
118 | 0 | case OL::Os: |
119 | 0 | return LLVMCodeGenLevelDefault; |
120 | 0 | case OL::Oz: |
121 | 0 | return LLVMCodeGenLevelDefault; |
122 | 0 | default: |
123 | 0 | assumingUnreachable(); |
124 | 2.29k | } |
125 | 2.29k | } |
126 | | } // namespace |
127 | | |
128 | | struct LLVM::Compiler::CompileContext { |
129 | | LLVM::Context LLContext; |
130 | | LLVM::Module &LLModule; |
131 | | LLVM::Attribute Cold; |
132 | | LLVM::Attribute NoAlias; |
133 | | LLVM::Attribute NoInline; |
134 | | LLVM::Attribute NoReturn; |
135 | | LLVM::Attribute ReadOnly; |
136 | | LLVM::Attribute StrictFP; |
137 | | LLVM::Attribute UWTable; |
138 | | LLVM::Attribute NoStackArgProbe; |
139 | | LLVM::Type VoidTy; |
140 | | LLVM::Type Int8Ty; |
141 | | LLVM::Type Int16Ty; |
142 | | LLVM::Type Int32Ty; |
143 | | LLVM::Type Int64Ty; |
144 | | LLVM::Type Int128Ty; |
145 | | LLVM::Type FloatTy; |
146 | | LLVM::Type DoubleTy; |
147 | | LLVM::Type Int8x16Ty; |
148 | | LLVM::Type Int16x8Ty; |
149 | | LLVM::Type Int32x4Ty; |
150 | | LLVM::Type Floatx4Ty; |
151 | | LLVM::Type Int64x2Ty; |
152 | | LLVM::Type Doublex2Ty; |
153 | | LLVM::Type Int128x1Ty; |
154 | | LLVM::Type Int8PtrTy; |
155 | | LLVM::Type Int32PtrTy; |
156 | | LLVM::Type Int64PtrTy; |
157 | | LLVM::Type Int128PtrTy; |
158 | | LLVM::Type Int8PtrPtrTy; |
159 | | LLVM::Type ExecCtxTy; |
160 | | LLVM::Type ExecCtxPtrTy; |
161 | | LLVM::Type IntrinsicsTableTy; |
162 | | LLVM::Type IntrinsicsTablePtrTy; |
163 | | LLVM::Message SubtargetFeatures; |
164 | | |
165 | | #if defined(__x86_64__) |
166 | | #if defined(__XOP__) |
167 | | bool SupportXOP = true; |
168 | | #else |
169 | | bool SupportXOP = false; |
170 | | #endif |
171 | | |
172 | | #if defined(__SSE4_1__) |
173 | | bool SupportSSE4_1 = true; |
174 | | #else |
175 | | bool SupportSSE4_1 = false; |
176 | | #endif |
177 | | |
178 | | #if defined(__SSSE3__) |
179 | | bool SupportSSSE3 = true; |
180 | | #else |
181 | | bool SupportSSSE3 = false; |
182 | | #endif |
183 | | |
184 | | #if defined(__SSE2__) |
185 | | bool SupportSSE2 = true; |
186 | | #else |
187 | | bool SupportSSE2 = false; |
188 | | #endif |
189 | | #endif |
190 | | |
191 | | #if defined(__aarch64__) |
192 | | #if defined(__ARM_NEON__) || defined(__ARM_NEON) || defined(__ARM_NEON_FP) |
193 | | bool SupportNEON = true; |
194 | | #else |
195 | | bool SupportNEON = false; |
196 | | #endif |
197 | | #endif |
198 | | |
199 | | std::vector<const AST::CompositeType *> CompositeTypes; |
200 | | std::vector<LLVM::Value> FunctionWrappers; |
201 | | std::vector<std::tuple<uint32_t, LLVM::FunctionCallee, |
202 | | const WasmEdge::AST::CodeSegment *>> |
203 | | Functions; |
204 | | std::vector<LLVM::Type> Globals; |
205 | | LLVM::Value IntrinsicsTable; |
206 | | LLVM::FunctionCallee Trap; |
207 | | CompileContext(LLVM::Context C, LLVM::Module &M, |
208 | | bool IsGenericBinary) noexcept |
209 | 2.30k | : LLContext(C), LLModule(M), |
210 | 2.30k | Cold(LLVM::Attribute::createEnum(C, LLVM::Core::Cold, 0)), |
211 | 2.30k | NoAlias(LLVM::Attribute::createEnum(C, LLVM::Core::NoAlias, 0)), |
212 | 2.30k | NoInline(LLVM::Attribute::createEnum(C, LLVM::Core::NoInline, 0)), |
213 | 2.30k | NoReturn(LLVM::Attribute::createEnum(C, LLVM::Core::NoReturn, 0)), |
214 | 2.30k | ReadOnly(LLVM::Attribute::createEnum(C, LLVM::Core::ReadOnly, 0)), |
215 | 2.30k | StrictFP(LLVM::Attribute::createEnum(C, LLVM::Core::StrictFP, 0)), |
216 | 2.30k | UWTable(LLVM::Attribute::createEnum(C, LLVM::Core::UWTable, |
217 | 2.30k | LLVM::Core::UWTableDefault)), |
218 | | NoStackArgProbe( |
219 | 2.30k | LLVM::Attribute::createString(C, "no-stack-arg-probe"sv, {})), |
220 | 2.30k | VoidTy(LLContext.getVoidTy()), Int8Ty(LLContext.getInt8Ty()), |
221 | 2.30k | Int16Ty(LLContext.getInt16Ty()), Int32Ty(LLContext.getInt32Ty()), |
222 | 2.30k | Int64Ty(LLContext.getInt64Ty()), Int128Ty(LLContext.getInt128Ty()), |
223 | 2.30k | FloatTy(LLContext.getFloatTy()), DoubleTy(LLContext.getDoubleTy()), |
224 | 2.30k | Int8x16Ty(LLVM::Type::getVectorType(Int8Ty, 16)), |
225 | 2.30k | Int16x8Ty(LLVM::Type::getVectorType(Int16Ty, 8)), |
226 | 2.30k | Int32x4Ty(LLVM::Type::getVectorType(Int32Ty, 4)), |
227 | 2.30k | Floatx4Ty(LLVM::Type::getVectorType(FloatTy, 4)), |
228 | 2.30k | Int64x2Ty(LLVM::Type::getVectorType(Int64Ty, 2)), |
229 | 2.30k | Doublex2Ty(LLVM::Type::getVectorType(DoubleTy, 2)), |
230 | 2.30k | Int128x1Ty(LLVM::Type::getVectorType(Int128Ty, 1)), |
231 | 2.30k | Int8PtrTy(Int8Ty.getPointerTo()), Int32PtrTy(Int32Ty.getPointerTo()), |
232 | 2.30k | Int64PtrTy(Int64Ty.getPointerTo()), |
233 | 2.30k | Int128PtrTy(Int128Ty.getPointerTo()), |
234 | 2.30k | Int8PtrPtrTy(Int8PtrTy.getPointerTo()), |
235 | 2.30k | ExecCtxTy(LLVM::Type::getStructType( |
236 | 2.30k | "ExecCtx", |
237 | 2.30k | std::initializer_list<LLVM::Type>{ |
238 | | // Memory |
239 | 2.30k | Int8PtrTy.getPointerTo(), |
240 | | // Globals |
241 | 2.30k | Int128PtrTy.getPointerTo(), |
242 | | // InstrCount |
243 | 2.30k | Int64PtrTy, |
244 | | // CostTable |
245 | 2.30k | LLVM::Type::getArrayType(Int64Ty, UINT16_MAX + 1) |
246 | 2.30k | .getPointerTo(), |
247 | | // Gas |
248 | 2.30k | Int64PtrTy, |
249 | | // GasLimit |
250 | 2.30k | Int64Ty, |
251 | | // StopToken |
252 | 2.30k | Int32PtrTy, |
253 | 2.30k | })), |
254 | 2.30k | ExecCtxPtrTy(ExecCtxTy.getPointerTo()), |
255 | 2.30k | IntrinsicsTableTy(LLVM::Type::getArrayType( |
256 | 2.30k | Int8PtrTy, |
257 | 2.30k | static_cast<uint32_t>(Executable::Intrinsics::kIntrinsicMax))), |
258 | 2.30k | IntrinsicsTablePtrTy(IntrinsicsTableTy.getPointerTo()), |
259 | 2.30k | IntrinsicsTable(LLModule.addGlobal(IntrinsicsTablePtrTy, true, |
260 | 2.30k | LLVMExternalLinkage, LLVM::Value(), |
261 | 2.30k | "intrinsics")) { |
262 | 2.30k | Trap.Ty = LLVM::Type::getFunctionType(VoidTy, {Int32Ty}); |
263 | 2.30k | Trap.Fn = LLModule.addFunction(Trap.Ty, LLVMPrivateLinkage, "trap"); |
264 | 2.30k | Trap.Fn.setDSOLocal(true); |
265 | 2.30k | Trap.Fn.addFnAttr(NoStackArgProbe); |
266 | 2.30k | Trap.Fn.addFnAttr(StrictFP); |
267 | 2.30k | Trap.Fn.addFnAttr(UWTable); |
268 | 2.30k | Trap.Fn.addFnAttr(NoReturn); |
269 | 2.30k | Trap.Fn.addFnAttr(Cold); |
270 | 2.30k | Trap.Fn.addFnAttr(NoInline); |
271 | | |
272 | 2.30k | LLModule.addGlobal(Int32Ty, true, LLVMExternalLinkage, |
273 | 2.30k | LLVM::Value::getConstInt(Int32Ty, AOT::kBinaryVersion), |
274 | 2.30k | "version"); |
275 | | |
276 | 2.30k | if (!IsGenericBinary) { |
277 | 2.30k | SubtargetFeatures = LLVM::getHostCPUFeatures(); |
278 | 2.30k | auto Features = SubtargetFeatures.string_view(); |
279 | 200k | while (!Features.empty()) { |
280 | 198k | std::string_view Feature; |
281 | 198k | if (auto Pos = Features.find(','); Pos != std::string_view::npos) { |
282 | 196k | Feature = Features.substr(0, Pos); |
283 | 196k | Features = Features.substr(Pos + 1); |
284 | 196k | } else { |
285 | 2.30k | Feature = std::exchange(Features, std::string_view()); |
286 | 2.30k | } |
287 | 198k | if (Feature[0] != '+') { |
288 | 110k | continue; |
289 | 110k | } |
290 | 87.6k | Feature = Feature.substr(1); |
291 | | |
292 | 87.6k | #if defined(__x86_64__) |
293 | 87.6k | if (!SupportXOP && Feature == "xop"sv) { |
294 | 0 | SupportXOP = true; |
295 | 0 | } |
296 | 87.6k | if (!SupportSSE4_1 && Feature == "sse4.1"sv) { |
297 | 2.30k | SupportSSE4_1 = true; |
298 | 2.30k | } |
299 | 87.6k | if (!SupportSSSE3 && Feature == "ssse3"sv) { |
300 | 2.30k | SupportSSSE3 = true; |
301 | 2.30k | } |
302 | 87.6k | if (!SupportSSE2 && Feature == "sse2"sv) { |
303 | 0 | SupportSSE2 = true; |
304 | 0 | } |
305 | | #elif defined(__aarch64__) |
306 | | if (!SupportNEON && Feature == "neon"sv) { |
307 | | SupportNEON = true; |
308 | | } |
309 | | #endif |
310 | 87.6k | } |
311 | 2.30k | } |
312 | | |
313 | 2.30k | { |
314 | | // create trap |
315 | 2.30k | LLVM::Builder Builder(LLContext); |
316 | 2.30k | Builder.positionAtEnd( |
317 | 2.30k | LLVM::BasicBlock::create(LLContext, Trap.Fn, "entry")); |
318 | 2.30k | auto FnTy = LLVM::Type::getFunctionType(VoidTy, {Int32Ty}); |
319 | 2.30k | auto CallTrap = Builder.createCall( |
320 | 2.30k | getIntrinsic(Builder, Executable::Intrinsics::kTrap, FnTy), |
321 | 2.30k | {Trap.Fn.getFirstParam()}); |
322 | 2.30k | CallTrap.addCallSiteAttribute(NoReturn); |
323 | 2.30k | Builder.createUnreachable(); |
324 | 2.30k | } |
325 | 2.30k | } |
326 | | LLVM::Value getMemory(LLVM::Builder &Builder, LLVM::Value ExecCtx, |
327 | 22.9k | uint32_t Index) noexcept { |
328 | 22.9k | auto Array = Builder.createExtractValue(ExecCtx, 0); |
329 | | #if WASMEDGE_ALLOCATOR_IS_STABLE |
330 | | auto VPtr = Builder.createLoad( |
331 | | Int8PtrTy, Builder.createInBoundsGEP1(Int8PtrTy, Array, |
332 | | LLContext.getInt64(Index))); |
333 | | VPtr.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
334 | | LLVM::Metadata(LLContext, {})); |
335 | | #else |
336 | 22.9k | auto VPtrPtr = Builder.createLoad( |
337 | 22.9k | Int8PtrPtrTy, Builder.createInBoundsGEP1(Int8PtrPtrTy, Array, |
338 | 22.9k | LLContext.getInt64(Index))); |
339 | 22.9k | VPtrPtr.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
340 | 22.9k | LLVM::Metadata(LLContext, {})); |
341 | 22.9k | auto VPtr = Builder.createLoad( |
342 | 22.9k | Int8PtrTy, |
343 | 22.9k | Builder.createInBoundsGEP1(Int8PtrTy, VPtrPtr, LLContext.getInt64(0))); |
344 | 22.9k | #endif |
345 | 22.9k | return Builder.createBitCast(VPtr, Int8PtrTy); |
346 | 22.9k | } |
347 | | std::pair<LLVM::Type, LLVM::Value> getGlobal(LLVM::Builder &Builder, |
348 | | LLVM::Value ExecCtx, |
349 | 444 | uint32_t Index) noexcept { |
350 | 444 | auto Ty = Globals[Index]; |
351 | 444 | auto Array = Builder.createExtractValue(ExecCtx, 1); |
352 | 444 | auto VPtr = Builder.createLoad( |
353 | 444 | Int128PtrTy, Builder.createInBoundsGEP1(Int8PtrTy, Array, |
354 | 444 | LLContext.getInt64(Index))); |
355 | 444 | VPtr.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
356 | 444 | LLVM::Metadata(LLContext, {})); |
357 | 444 | auto Ptr = Builder.createBitCast(VPtr, Ty.getPointerTo()); |
358 | 444 | return {Ty, Ptr}; |
359 | 444 | } |
360 | | LLVM::Value getInstrCount(LLVM::Builder &Builder, |
361 | 0 | LLVM::Value ExecCtx) noexcept { |
362 | 0 | return Builder.createExtractValue(ExecCtx, 2); |
363 | 0 | } |
364 | | LLVM::Value getCostTable(LLVM::Builder &Builder, |
365 | 0 | LLVM::Value ExecCtx) noexcept { |
366 | 0 | return Builder.createExtractValue(ExecCtx, 3); |
367 | 0 | } |
368 | 0 | LLVM::Value getGas(LLVM::Builder &Builder, LLVM::Value ExecCtx) noexcept { |
369 | 0 | return Builder.createExtractValue(ExecCtx, 4); |
370 | 0 | } |
371 | | LLVM::Value getGasLimit(LLVM::Builder &Builder, |
372 | 0 | LLVM::Value ExecCtx) noexcept { |
373 | 0 | return Builder.createExtractValue(ExecCtx, 5); |
374 | 0 | } |
375 | | LLVM::Value getStopToken(LLVM::Builder &Builder, |
376 | 0 | LLVM::Value ExecCtx) noexcept { |
377 | 0 | return Builder.createExtractValue(ExecCtx, 6); |
378 | 0 | } |
379 | | LLVM::FunctionCallee getIntrinsic(LLVM::Builder &Builder, |
380 | | Executable::Intrinsics Index, |
381 | 7.77k | LLVM::Type Ty) noexcept { |
382 | 7.77k | const auto Value = static_cast<uint32_t>(Index); |
383 | 7.77k | auto PtrTy = Ty.getPointerTo(); |
384 | 7.77k | auto PtrPtrTy = PtrTy.getPointerTo(); |
385 | 7.77k | auto IT = Builder.createLoad(IntrinsicsTablePtrTy, IntrinsicsTable); |
386 | 7.77k | IT.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
387 | 7.77k | LLVM::Metadata(LLContext, {})); |
388 | 7.77k | auto VPtr = |
389 | 7.77k | Builder.createInBoundsGEP2(IntrinsicsTableTy, IT, LLContext.getInt64(0), |
390 | 7.77k | LLContext.getInt64(Value)); |
391 | 7.77k | auto Ptr = Builder.createBitCast(VPtr, PtrPtrTy); |
392 | 7.77k | return {Ty, Builder.createLoad(PtrTy, Ptr)}; |
393 | 7.77k | } |
394 | | std::pair<std::vector<ValType>, std::vector<ValType>> |
395 | 19.4k | resolveBlockType(const BlockType &BType) const noexcept { |
396 | 19.4k | using VecT = std::vector<ValType>; |
397 | 19.4k | using RetT = std::pair<VecT, VecT>; |
398 | 19.4k | if (BType.isEmpty()) { |
399 | 2.23k | return RetT{}; |
400 | 2.23k | } |
401 | 17.1k | if (BType.isValType()) { |
402 | 2.60k | return RetT{{}, {BType.getValType()}}; |
403 | 14.5k | } else { |
404 | | // Type index case. t2* = type[index].returns |
405 | 14.5k | const uint32_t TypeIdx = BType.getTypeIndex(); |
406 | 14.5k | const auto &FType = CompositeTypes[TypeIdx]->getFuncType(); |
407 | 14.5k | return RetT{ |
408 | 14.5k | VecT(FType.getParamTypes().begin(), FType.getParamTypes().end()), |
409 | 14.5k | VecT(FType.getReturnTypes().begin(), FType.getReturnTypes().end())}; |
410 | 14.5k | } |
411 | 17.1k | } |
412 | | }; |
413 | | |
414 | | namespace { |
415 | | |
416 | | using namespace WasmEdge; |
417 | | |
418 | 36.8k | static bool isVoidReturn(Span<const ValType> ValTypes) noexcept { |
419 | 36.8k | return ValTypes.empty(); |
420 | 36.8k | } |
421 | | |
422 | | static LLVM::Type toLLVMType(LLVM::Context LLContext, |
423 | 2.41M | const ValType &ValType) noexcept { |
424 | 2.41M | switch (ValType.getCode()) { |
425 | 63.5k | case TypeCode::I32: |
426 | 63.5k | return LLContext.getInt32Ty(); |
427 | 448k | case TypeCode::I64: |
428 | 448k | return LLContext.getInt64Ty(); |
429 | 11.3k | case TypeCode::Ref: |
430 | 146k | case TypeCode::RefNull: |
431 | 1.82M | case TypeCode::V128: |
432 | 1.82M | return LLVM::Type::getVectorType(LLContext.getInt64Ty(), 2); |
433 | 51.1k | case TypeCode::F32: |
434 | 51.1k | return LLContext.getFloatTy(); |
435 | 21.1k | case TypeCode::F64: |
436 | 21.1k | return LLContext.getDoubleTy(); |
437 | 0 | default: |
438 | 0 | assumingUnreachable(); |
439 | 2.41M | } |
440 | 2.41M | } |
441 | | |
442 | | static std::vector<LLVM::Type> |
443 | | toLLVMTypeVector(LLVM::Context LLContext, |
444 | 21.4k | Span<const ValType> ValTypes) noexcept { |
445 | 21.4k | std::vector<LLVM::Type> Result; |
446 | 21.4k | Result.reserve(ValTypes.size()); |
447 | 21.4k | for (const auto &Type : ValTypes) { |
448 | 20.1k | Result.push_back(toLLVMType(LLContext, Type)); |
449 | 20.1k | } |
450 | 21.4k | return Result; |
451 | 21.4k | } |
452 | | |
453 | | static std::vector<LLVM::Type> |
454 | | toLLVMArgsType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
455 | 17.4k | Span<const ValType> ValTypes) noexcept { |
456 | 17.4k | auto Result = toLLVMTypeVector(LLContext, ValTypes); |
457 | 17.4k | Result.insert(Result.begin(), ExecCtxPtrTy); |
458 | 17.4k | return Result; |
459 | 17.4k | } |
460 | | |
461 | | static LLVM::Type toLLVMRetsType(LLVM::Context LLContext, |
462 | 17.4k | Span<const ValType> ValTypes) noexcept { |
463 | 17.4k | if (isVoidReturn(ValTypes)) { |
464 | 4.00k | return LLContext.getVoidTy(); |
465 | 4.00k | } |
466 | 13.3k | if (ValTypes.size() == 1) { |
467 | 12.6k | return toLLVMType(LLContext, ValTypes.front()); |
468 | 12.6k | } |
469 | 748 | std::vector<LLVM::Type> Result; |
470 | 748 | Result.reserve(ValTypes.size()); |
471 | 2.04k | for (const auto &Type : ValTypes) { |
472 | 2.04k | Result.push_back(toLLVMType(LLContext, Type)); |
473 | 2.04k | } |
474 | 748 | return LLVM::Type::getStructType(Result); |
475 | 13.3k | } |
476 | | |
477 | | static LLVM::Type toLLVMType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
478 | 17.4k | const AST::FunctionType &FuncType) noexcept { |
479 | 17.4k | auto ArgsTy = |
480 | 17.4k | toLLVMArgsType(LLContext, ExecCtxPtrTy, FuncType.getParamTypes()); |
481 | 17.4k | auto RetTy = toLLVMRetsType(LLContext, FuncType.getReturnTypes()); |
482 | 17.4k | return LLVM::Type::getFunctionType(RetTy, ArgsTy); |
483 | 17.4k | } |
484 | | |
485 | | static LLVM::Value toLLVMConstantZero(LLVM::Context LLContext, |
486 | 2.37M | const ValType &ValType) noexcept { |
487 | 2.37M | switch (ValType.getCode()) { |
488 | 44.1k | case TypeCode::I32: |
489 | 44.1k | return LLVM::Value::getConstNull(LLContext.getInt32Ty()); |
490 | 444k | case TypeCode::I64: |
491 | 444k | return LLVM::Value::getConstNull(LLContext.getInt64Ty()); |
492 | 11.3k | case TypeCode::Ref: |
493 | 145k | case TypeCode::RefNull: { |
494 | 145k | std::array<uint8_t, 16> Data{}; |
495 | 145k | const auto Raw = ValType.getRawData(); |
496 | 145k | std::copy(Raw.begin(), Raw.end(), Data.begin()); |
497 | 145k | return LLVM::Value::getConstVector8(LLContext, Data); |
498 | 11.3k | } |
499 | 1.67M | case TypeCode::V128: |
500 | 1.67M | return LLVM::Value::getConstNull( |
501 | 1.67M | LLVM::Type::getVectorType(LLContext.getInt64Ty(), 2)); |
502 | 48.4k | case TypeCode::F32: |
503 | 48.4k | return LLVM::Value::getConstNull(LLContext.getFloatTy()); |
504 | 18.0k | case TypeCode::F64: |
505 | 18.0k | return LLVM::Value::getConstNull(LLContext.getDoubleTy()); |
506 | 0 | default: |
507 | 0 | assumingUnreachable(); |
508 | 2.37M | } |
509 | 2.37M | } |
510 | | |
511 | | class FunctionCompiler { |
512 | | struct Control; |
513 | | |
514 | | public: |
515 | | FunctionCompiler(LLVM::Compiler::CompileContext &Context, |
516 | | LLVM::FunctionCallee F, Span<const ValType> Locals, |
517 | | bool Interruptible, bool InstructionCounting, |
518 | | bool GasMeasuring) noexcept |
519 | 11.4k | : Context(Context), LLContext(Context.LLContext), |
520 | 11.4k | Interruptible(Interruptible), F(F), Builder(LLContext) { |
521 | 11.4k | if (F.Fn) { |
522 | 11.4k | Builder.positionAtEnd(LLVM::BasicBlock::create(LLContext, F.Fn, "entry")); |
523 | 11.4k | ExecCtx = Builder.createLoad(Context.ExecCtxTy, F.Fn.getFirstParam()); |
524 | | |
525 | 11.4k | if (InstructionCounting) { |
526 | 0 | LocalInstrCount = Builder.createAlloca(Context.Int64Ty); |
527 | 0 | Builder.createStore(LLContext.getInt64(0), LocalInstrCount); |
528 | 0 | } |
529 | | |
530 | 11.4k | if (GasMeasuring) { |
531 | 0 | LocalGas = Builder.createAlloca(Context.Int64Ty); |
532 | 0 | Builder.createStore(LLContext.getInt64(0), LocalGas); |
533 | 0 | } |
534 | | |
535 | 21.3k | for (LLVM::Value Arg = F.Fn.getFirstParam().getNextParam(); Arg; |
536 | 11.4k | Arg = Arg.getNextParam()) { |
537 | 9.82k | LLVM::Type Ty = Arg.getType(); |
538 | 9.82k | LLVM::Value ArgPtr = Builder.createAlloca(Ty); |
539 | 9.82k | Builder.createStore(Arg, ArgPtr); |
540 | 9.82k | Local.emplace_back(Ty, ArgPtr); |
541 | 9.82k | } |
542 | | |
543 | 2.37M | for (const auto &Type : Locals) { |
544 | 2.37M | LLVM::Type Ty = toLLVMType(LLContext, Type); |
545 | 2.37M | LLVM::Value ArgPtr = Builder.createAlloca(Ty); |
546 | 2.37M | Builder.createStore(toLLVMConstantZero(LLContext, Type), ArgPtr); |
547 | 2.37M | Local.emplace_back(Ty, ArgPtr); |
548 | 2.37M | } |
549 | 11.4k | } |
550 | 11.4k | } |
551 | | |
552 | 33.7k | LLVM::BasicBlock getTrapBB(ErrCode::Value Error) noexcept { |
553 | 33.7k | if (auto Iter = TrapBB.find(Error); Iter != TrapBB.end()) { |
554 | 30.4k | return Iter->second; |
555 | 30.4k | } |
556 | 3.27k | auto BB = LLVM::BasicBlock::create(LLContext, F.Fn, "trap"); |
557 | 3.27k | TrapBB.emplace(Error, BB); |
558 | 3.27k | return BB; |
559 | 33.7k | } |
560 | | |
561 | | Expect<void> |
562 | | compile(const AST::CodeSegment &Code, |
563 | 11.4k | std::pair<std::vector<ValType>, std::vector<ValType>> Type) noexcept { |
564 | 11.4k | auto RetBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ret"); |
565 | 11.4k | Type.first.clear(); |
566 | 11.4k | enterBlock(RetBB, {}, {}, {}, std::move(Type)); |
567 | 11.4k | EXPECTED_TRY(compile(Code.getExpr().getInstrs())); |
568 | 11.4k | assuming(ControlStack.empty()); |
569 | 11.4k | compileReturn(); |
570 | | |
571 | 11.4k | for (auto &[Error, BB] : TrapBB) { |
572 | 3.26k | Builder.positionAtEnd(BB); |
573 | 3.26k | updateInstrCount(); |
574 | 3.26k | updateGasAtTrap(); |
575 | 3.26k | auto CallTrap = Builder.createCall( |
576 | 3.26k | Context.Trap, {LLContext.getInt32(static_cast<uint32_t>(Error))}); |
577 | 3.26k | CallTrap.addCallSiteAttribute(Context.NoReturn); |
578 | 3.26k | Builder.createUnreachable(); |
579 | 3.26k | } |
580 | 11.4k | return {}; |
581 | 11.4k | } |
582 | | |
583 | 11.4k | Expect<void> compile(AST::InstrView Instrs) noexcept { |
584 | 1.58M | auto Dispatch = [this](const AST::Instruction &Instr) -> Expect<void> { |
585 | 1.58M | switch (Instr.getOpCode()) { |
586 | | // Control instructions (for blocks) |
587 | 3.52k | case OpCode::Block: { |
588 | 3.52k | auto Block = LLVM::BasicBlock::create(LLContext, F.Fn, "block"); |
589 | 3.52k | auto EndBlock = LLVM::BasicBlock::create(LLContext, F.Fn, "block.end"); |
590 | 3.52k | Builder.createBr(Block); |
591 | | |
592 | 3.52k | Builder.positionAtEnd(Block); |
593 | 3.52k | auto Type = Context.resolveBlockType(Instr.getBlockType()); |
594 | 3.52k | const auto Arity = Type.first.size(); |
595 | 3.52k | std::vector<LLVM::Value> Args(Arity); |
596 | 3.52k | if (isUnreachable()) { |
597 | 999 | for (size_t I = 0; I < Arity; ++I) { |
598 | 315 | auto Ty = toLLVMType(LLContext, Type.first[I]); |
599 | 315 | Args[I] = LLVM::Value::getUndef(Ty); |
600 | 315 | } |
601 | 2.84k | } else { |
602 | 3.31k | for (size_t I = 0; I < Arity; ++I) { |
603 | 469 | const size_t J = Arity - 1 - I; |
604 | 469 | Args[J] = stackPop(); |
605 | 469 | } |
606 | 2.84k | } |
607 | 3.52k | enterBlock(EndBlock, {}, {}, std::move(Args), std::move(Type)); |
608 | 3.52k | checkStop(); |
609 | 3.52k | updateGas(); |
610 | 3.52k | return {}; |
611 | 0 | } |
612 | 1.65k | case OpCode::Loop: { |
613 | 1.65k | auto Curr = Builder.getInsertBlock(); |
614 | 1.65k | auto Loop = LLVM::BasicBlock::create(LLContext, F.Fn, "loop"); |
615 | 1.65k | auto EndLoop = LLVM::BasicBlock::create(LLContext, F.Fn, "loop.end"); |
616 | 1.65k | Builder.createBr(Loop); |
617 | | |
618 | 1.65k | Builder.positionAtEnd(Loop); |
619 | 1.65k | auto Type = Context.resolveBlockType(Instr.getBlockType()); |
620 | 1.65k | const auto Arity = Type.first.size(); |
621 | 1.65k | std::vector<LLVM::Value> Args(Arity); |
622 | 1.65k | if (isUnreachable()) { |
623 | 1.00k | for (size_t I = 0; I < Arity; ++I) { |
624 | 480 | auto Ty = toLLVMType(LLContext, Type.first[I]); |
625 | 480 | auto Value = LLVM::Value::getUndef(Ty); |
626 | 480 | auto PHINode = Builder.createPHI(Ty); |
627 | 480 | PHINode.addIncoming(Value, Curr); |
628 | 480 | Args[I] = PHINode; |
629 | 480 | } |
630 | 1.13k | } else { |
631 | 1.60k | for (size_t I = 0; I < Arity; ++I) { |
632 | 472 | const size_t J = Arity - 1 - I; |
633 | 472 | auto Value = stackPop(); |
634 | 472 | auto PHINode = Builder.createPHI(Value.getType()); |
635 | 472 | PHINode.addIncoming(Value, Curr); |
636 | 472 | Args[J] = PHINode; |
637 | 472 | } |
638 | 1.13k | } |
639 | 1.65k | enterBlock(Loop, EndLoop, {}, std::move(Args), std::move(Type)); |
640 | 1.65k | checkStop(); |
641 | 1.65k | updateGas(); |
642 | 1.65k | return {}; |
643 | 0 | } |
644 | 2.75k | case OpCode::If: { |
645 | 2.75k | auto Then = LLVM::BasicBlock::create(LLContext, F.Fn, "then"); |
646 | 2.75k | auto Else = LLVM::BasicBlock::create(LLContext, F.Fn, "else"); |
647 | 2.75k | auto EndIf = LLVM::BasicBlock::create(LLContext, F.Fn, "if.end"); |
648 | 2.75k | LLVM::Value Cond; |
649 | 2.75k | if (isUnreachable()) { |
650 | 524 | Cond = LLVM::Value::getUndef(LLContext.getInt1Ty()); |
651 | 2.23k | } else { |
652 | 2.23k | Cond = Builder.createICmpNE(stackPop(), LLContext.getInt32(0)); |
653 | 2.23k | } |
654 | 2.75k | Builder.createCondBr(Cond, Then, Else); |
655 | | |
656 | 2.75k | Builder.positionAtEnd(Then); |
657 | 2.75k | auto Type = Context.resolveBlockType(Instr.getBlockType()); |
658 | 2.75k | const auto Arity = Type.first.size(); |
659 | 2.75k | std::vector<LLVM::Value> Args(Arity); |
660 | 2.75k | if (isUnreachable()) { |
661 | 988 | for (size_t I = 0; I < Arity; ++I) { |
662 | 464 | auto Ty = toLLVMType(LLContext, Type.first[I]); |
663 | 464 | Args[I] = LLVM::Value::getUndef(Ty); |
664 | 464 | } |
665 | 2.23k | } else { |
666 | 3.13k | for (size_t I = 0; I < Arity; ++I) { |
667 | 900 | const size_t J = Arity - 1 - I; |
668 | 900 | Args[J] = stackPop(); |
669 | 900 | } |
670 | 2.23k | } |
671 | 2.75k | enterBlock(EndIf, {}, Else, std::move(Args), std::move(Type)); |
672 | 2.75k | return {}; |
673 | 0 | } |
674 | 5 | case OpCode::Try_table: |
675 | | // TODO: EXCEPTION - implement the AOT. |
676 | 5 | return Unexpect(ErrCode::Value::AOTNotImpl); |
677 | 19.4k | case OpCode::End: { |
678 | 19.4k | auto Entry = leaveBlock(); |
679 | 19.4k | if (Entry.ElseBlock) { |
680 | 1.01k | auto Block = Builder.getInsertBlock(); |
681 | 1.01k | Builder.positionAtEnd(Entry.ElseBlock); |
682 | 1.01k | enterBlock(Block, {}, {}, std::move(Entry.Args), |
683 | 1.01k | std::move(Entry.Type), std::move(Entry.ReturnPHI)); |
684 | 1.01k | Entry = leaveBlock(); |
685 | 1.01k | } |
686 | 19.4k | buildPHI(Entry.Type.second, Entry.ReturnPHI); |
687 | 19.4k | return {}; |
688 | 0 | } |
689 | 1.74k | case OpCode::Else: { |
690 | 1.74k | auto Entry = leaveBlock(); |
691 | 1.74k | Builder.positionAtEnd(Entry.ElseBlock); |
692 | 1.74k | enterBlock(Entry.JumpBlock, {}, {}, std::move(Entry.Args), |
693 | 1.74k | std::move(Entry.Type), std::move(Entry.ReturnPHI)); |
694 | 1.74k | return {}; |
695 | 0 | } |
696 | 1.55M | default: |
697 | 1.55M | break; |
698 | 1.58M | } |
699 | | |
700 | 1.55M | if (isUnreachable()) { |
701 | 462k | return {}; |
702 | 462k | } |
703 | | |
704 | 1.08M | switch (Instr.getOpCode()) { |
705 | | // Control instructions |
706 | 3.33k | case OpCode::Unreachable: |
707 | 3.33k | Builder.createBr(getTrapBB(ErrCode::Value::Unreachable)); |
708 | 3.33k | setUnreachable(); |
709 | 3.33k | Builder.positionAtEnd( |
710 | 3.33k | LLVM::BasicBlock::create(LLContext, F.Fn, "unreachable.end")); |
711 | 3.33k | break; |
712 | 43.5k | case OpCode::Nop: |
713 | 43.5k | break; |
714 | 1 | case OpCode::Throw: |
715 | 2 | case OpCode::Throw_ref: |
716 | | // TODO: EXCEPTION - implement the AOT. |
717 | 2 | return Unexpect(ErrCode::Value::AOTNotImpl); |
718 | 775 | case OpCode::Br: { |
719 | 775 | const auto Label = Instr.getJump().TargetIndex; |
720 | 775 | setLableJumpPHI(Label); |
721 | 775 | Builder.createBr(getLabel(Label)); |
722 | 775 | setUnreachable(); |
723 | 775 | Builder.positionAtEnd( |
724 | 775 | LLVM::BasicBlock::create(LLContext, F.Fn, "br.end")); |
725 | 775 | break; |
726 | 1 | } |
727 | 365 | case OpCode::Br_if: { |
728 | 365 | const auto Label = Instr.getJump().TargetIndex; |
729 | 365 | auto Cond = Builder.createICmpNE(stackPop(), LLContext.getInt32(0)); |
730 | 365 | setLableJumpPHI(Label); |
731 | 365 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "br_if.end"); |
732 | 365 | Builder.createCondBr(Cond, getLabel(Label), Next); |
733 | 365 | Builder.positionAtEnd(Next); |
734 | 365 | break; |
735 | 1 | } |
736 | 984 | case OpCode::Br_table: { |
737 | 984 | auto LabelTable = Instr.getLabelList(); |
738 | 984 | assuming(LabelTable.size() <= std::numeric_limits<uint32_t>::max()); |
739 | 984 | const auto LabelTableSize = |
740 | 984 | static_cast<uint32_t>(LabelTable.size() - 1); |
741 | 984 | auto Value = stackPop(); |
742 | 984 | setLableJumpPHI(LabelTable[LabelTableSize].TargetIndex); |
743 | 984 | auto Switch = Builder.createSwitch( |
744 | 984 | Value, getLabel(LabelTable[LabelTableSize].TargetIndex), |
745 | 984 | LabelTableSize); |
746 | 36.5k | for (uint32_t I = 0; I < LabelTableSize; ++I) { |
747 | 35.5k | setLableJumpPHI(LabelTable[I].TargetIndex); |
748 | 35.5k | Switch.addCase(LLContext.getInt32(I), |
749 | 35.5k | getLabel(LabelTable[I].TargetIndex)); |
750 | 35.5k | } |
751 | 984 | setUnreachable(); |
752 | 984 | Builder.positionAtEnd( |
753 | 984 | LLVM::BasicBlock::create(LLContext, F.Fn, "br_table.end")); |
754 | 984 | break; |
755 | 984 | } |
756 | 6 | case OpCode::Br_on_null: { |
757 | 6 | const auto Label = Instr.getJump().TargetIndex; |
758 | 6 | auto Value = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
759 | 6 | auto Cond = Builder.createICmpEQ( |
760 | 6 | Builder.createExtractElement(Value, LLContext.getInt64(1)), |
761 | 6 | LLContext.getInt64(0)); |
762 | 6 | setLableJumpPHI(Label); |
763 | 6 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "br_on_null.end"); |
764 | 6 | Builder.createCondBr(Cond, getLabel(Label), Next); |
765 | 6 | Builder.positionAtEnd(Next); |
766 | 6 | stackPush(Value); |
767 | 6 | break; |
768 | 984 | } |
769 | 6 | case OpCode::Br_on_non_null: { |
770 | 6 | const auto Label = Instr.getJump().TargetIndex; |
771 | 6 | auto Cond = Builder.createICmpNE( |
772 | 6 | Builder.createExtractElement( |
773 | 6 | Builder.createBitCast(Stack.back(), Context.Int64x2Ty), |
774 | 6 | LLContext.getInt64(1)), |
775 | 6 | LLContext.getInt64(0)); |
776 | 6 | setLableJumpPHI(Label); |
777 | 6 | auto Next = |
778 | 6 | LLVM::BasicBlock::create(LLContext, F.Fn, "br_on_non_null.end"); |
779 | 6 | Builder.createCondBr(Cond, getLabel(Label), Next); |
780 | 6 | Builder.positionAtEnd(Next); |
781 | 6 | stackPop(); |
782 | 6 | break; |
783 | 984 | } |
784 | 0 | case OpCode::Br_on_cast: |
785 | 0 | case OpCode::Br_on_cast_fail: { |
786 | 0 | auto Ref = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
787 | 0 | const auto Label = Instr.getBrCast().Jump.TargetIndex; |
788 | 0 | std::array<uint8_t, 16> Buf = {0}; |
789 | 0 | std::copy_n(Instr.getBrCast().RType2.getRawData().cbegin(), 8, |
790 | 0 | Buf.begin()); |
791 | 0 | auto VType = Builder.createExtractElement( |
792 | 0 | Builder.createBitCast(LLVM::Value::getConstVector8(LLContext, Buf), |
793 | 0 | Context.Int64x2Ty), |
794 | 0 | LLContext.getInt64(0)); |
795 | 0 | auto IsRefTest = Builder.createCall( |
796 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefTest, |
797 | 0 | LLVM::Type::getFunctionType( |
798 | 0 | Context.Int32Ty, |
799 | 0 | {Context.Int64x2Ty, Context.Int64Ty}, |
800 | 0 | false)), |
801 | 0 | {Ref, VType}); |
802 | 0 | auto Cond = |
803 | 0 | (Instr.getOpCode() == OpCode::Br_on_cast) |
804 | 0 | ? Builder.createICmpNE(IsRefTest, LLContext.getInt32(0)) |
805 | 0 | : Builder.createICmpEQ(IsRefTest, LLContext.getInt32(0)); |
806 | 0 | setLableJumpPHI(Label); |
807 | 0 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "br_on_cast.end"); |
808 | 0 | Builder.createCondBr(Cond, getLabel(Label), Next); |
809 | 0 | Builder.positionAtEnd(Next); |
810 | 0 | break; |
811 | 0 | } |
812 | 731 | case OpCode::Return: |
813 | 731 | compileReturn(); |
814 | 731 | setUnreachable(); |
815 | 731 | Builder.positionAtEnd( |
816 | 731 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret.end")); |
817 | 731 | break; |
818 | 3.67k | case OpCode::Call: |
819 | 3.67k | updateInstrCount(); |
820 | 3.67k | updateGas(); |
821 | 3.67k | compileCallOp(Instr.getTargetIndex()); |
822 | 3.67k | break; |
823 | 1.04k | case OpCode::Call_indirect: |
824 | 1.04k | updateInstrCount(); |
825 | 1.04k | updateGas(); |
826 | 1.04k | compileIndirectCallOp(Instr.getSourceIndex(), Instr.getTargetIndex()); |
827 | 1.04k | break; |
828 | 63 | case OpCode::Return_call: |
829 | 63 | updateInstrCount(); |
830 | 63 | updateGas(); |
831 | 63 | compileReturnCallOp(Instr.getTargetIndex()); |
832 | 63 | setUnreachable(); |
833 | 63 | Builder.positionAtEnd( |
834 | 63 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret_call.end")); |
835 | 63 | break; |
836 | 104 | case OpCode::Return_call_indirect: |
837 | 104 | updateInstrCount(); |
838 | 104 | updateGas(); |
839 | 104 | compileReturnIndirectCallOp(Instr.getSourceIndex(), |
840 | 104 | Instr.getTargetIndex()); |
841 | 104 | setUnreachable(); |
842 | 104 | Builder.positionAtEnd( |
843 | 104 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret_call_indir.end")); |
844 | 104 | break; |
845 | 11 | case OpCode::Call_ref: |
846 | 11 | updateInstrCount(); |
847 | 11 | updateGas(); |
848 | 11 | compileCallRefOp(Instr.getTargetIndex()); |
849 | 11 | break; |
850 | 2 | case OpCode::Return_call_ref: |
851 | 2 | updateInstrCount(); |
852 | 2 | updateGas(); |
853 | 2 | compileReturnCallRefOp(Instr.getTargetIndex()); |
854 | 2 | setUnreachable(); |
855 | 2 | Builder.positionAtEnd( |
856 | 2 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret_call_ref.end")); |
857 | 2 | break; |
858 | 0 | case OpCode::Try_table: |
859 | | // TODO: EXCEPTION - implement the AOT. |
860 | 0 | return Unexpect(ErrCode::Value::AOTNotImpl); |
861 | | |
862 | | // Reference Instructions |
863 | 5.15k | case OpCode::Ref__null: { |
864 | 5.15k | std::array<uint8_t, 16> Buf = {0}; |
865 | | // For null references, the dynamic type down scaling is needed. |
866 | 5.15k | ValType VType; |
867 | 5.15k | if (Instr.getValType().isAbsHeapType()) { |
868 | 5.09k | switch (Instr.getValType().getHeapTypeCode()) { |
869 | 10 | case TypeCode::NullFuncRef: |
870 | 2.18k | case TypeCode::FuncRef: |
871 | 2.18k | VType = TypeCode::NullFuncRef; |
872 | 2.18k | break; |
873 | 18 | case TypeCode::NullExternRef: |
874 | 2.63k | case TypeCode::ExternRef: |
875 | 2.63k | VType = TypeCode::NullExternRef; |
876 | 2.63k | break; |
877 | 10 | case TypeCode::NullExnRef: |
878 | 23 | case TypeCode::ExnRef: |
879 | 23 | VType = TypeCode::NullExnRef; |
880 | 23 | break; |
881 | 77 | case TypeCode::NullRef: |
882 | 117 | case TypeCode::AnyRef: |
883 | 164 | case TypeCode::EqRef: |
884 | 240 | case TypeCode::I31Ref: |
885 | 251 | case TypeCode::StructRef: |
886 | 262 | case TypeCode::ArrayRef: |
887 | 262 | VType = TypeCode::NullRef; |
888 | 262 | break; |
889 | 0 | default: |
890 | 0 | assumingUnreachable(); |
891 | 5.09k | } |
892 | 5.09k | } else { |
893 | 52 | assuming(Instr.getValType().getTypeIndex() < |
894 | 52 | Context.CompositeTypes.size()); |
895 | 52 | const auto *CompType = |
896 | 52 | Context.CompositeTypes[Instr.getValType().getTypeIndex()]; |
897 | 52 | assuming(CompType != nullptr); |
898 | 52 | if (CompType->isFunc()) { |
899 | 49 | VType = TypeCode::NullFuncRef; |
900 | 49 | } else { |
901 | 3 | VType = TypeCode::NullRef; |
902 | 3 | } |
903 | 52 | } |
904 | 5.15k | std::copy_n(VType.getRawData().cbegin(), 8, Buf.begin()); |
905 | 5.15k | stackPush(Builder.createBitCast( |
906 | 5.15k | LLVM::Value::getConstVector8(LLContext, Buf), Context.Int64x2Ty)); |
907 | 5.15k | break; |
908 | 5.15k | } |
909 | 2.57k | case OpCode::Ref__is_null: |
910 | 2.57k | stackPush(Builder.createZExt( |
911 | 2.57k | Builder.createICmpEQ( |
912 | 2.57k | Builder.createExtractElement( |
913 | 2.57k | Builder.createBitCast(stackPop(), Context.Int64x2Ty), |
914 | 2.57k | LLContext.getInt64(1)), |
915 | 2.57k | LLContext.getInt64(0)), |
916 | 2.57k | Context.Int32Ty)); |
917 | 2.57k | break; |
918 | 30 | case OpCode::Ref__func: |
919 | 30 | stackPush(Builder.createCall( |
920 | 30 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefFunc, |
921 | 30 | LLVM::Type::getFunctionType(Context.Int64x2Ty, |
922 | 30 | {Context.Int32Ty}, |
923 | 30 | false)), |
924 | 30 | {LLContext.getInt32(Instr.getTargetIndex())})); |
925 | 30 | break; |
926 | 0 | case OpCode::Ref__eq: { |
927 | 0 | LLVM::Value RHS = stackPop(); |
928 | 0 | LLVM::Value LHS = stackPop(); |
929 | 0 | stackPush(Builder.createZExt( |
930 | 0 | Builder.createICmpEQ( |
931 | 0 | Builder.createExtractElement(LHS, LLContext.getInt64(1)), |
932 | 0 | Builder.createExtractElement(RHS, LLContext.getInt64(1))), |
933 | 0 | Context.Int32Ty)); |
934 | 0 | break; |
935 | 5.15k | } |
936 | 324 | case OpCode::Ref__as_non_null: { |
937 | 324 | auto Next = |
938 | 324 | LLVM::BasicBlock::create(LLContext, F.Fn, "ref_as_non_null.ok"); |
939 | 324 | Stack.back() = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
940 | 324 | auto IsNotNull = Builder.createLikely(Builder.createICmpNE( |
941 | 324 | Builder.createExtractElement(Stack.back(), LLContext.getInt64(1)), |
942 | 324 | LLContext.getInt64(0))); |
943 | 324 | Builder.createCondBr(IsNotNull, Next, |
944 | 324 | getTrapBB(ErrCode::Value::CastNullToNonNull)); |
945 | 324 | Builder.positionAtEnd(Next); |
946 | 324 | break; |
947 | 5.15k | } |
948 | | |
949 | | // Reference Instructions (GC proposal) |
950 | 0 | case OpCode::Struct__new: |
951 | 1 | case OpCode::Struct__new_default: { |
952 | 1 | LLVM::Value Args = LLVM::Value::getConstPointerNull(Context.Int8PtrTy); |
953 | 1 | assuming(Instr.getTargetIndex() < Context.CompositeTypes.size()); |
954 | 1 | const auto *CompType = Context.CompositeTypes[Instr.getTargetIndex()]; |
955 | 1 | assuming(CompType != nullptr && !CompType->isFunc()); |
956 | 1 | auto ArgSize = CompType->getFieldTypes().size(); |
957 | 1 | if (Instr.getOpCode() == OpCode::Struct__new) { |
958 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize, nullptr); |
959 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
960 | 0 | ArgsVec[ArgSize - I - 1] = stackPop(); |
961 | 0 | } |
962 | 0 | Args = Builder.createArray(ArgSize, kValSize); |
963 | 0 | Builder.createArrayPtrStore(ArgsVec, Args, Context.Int8Ty, kValSize); |
964 | 1 | } else { |
965 | 1 | ArgSize = 0; |
966 | 1 | } |
967 | 1 | stackPush(Builder.createCall( |
968 | 1 | Context.getIntrinsic( |
969 | 1 | Builder, Executable::Intrinsics::kStructNew, |
970 | 1 | LLVM::Type::getFunctionType( |
971 | 1 | Context.Int64x2Ty, |
972 | 1 | {Context.Int32Ty, Context.Int8PtrTy, Context.Int32Ty}, |
973 | 1 | false)), |
974 | 1 | {LLContext.getInt32(Instr.getTargetIndex()), Args, |
975 | 1 | LLContext.getInt32(static_cast<uint32_t>(ArgSize))})); |
976 | 1 | break; |
977 | 1 | } |
978 | 0 | case OpCode::Struct__get: |
979 | 0 | case OpCode::Struct__get_u: |
980 | 0 | case OpCode::Struct__get_s: { |
981 | 0 | assuming(static_cast<size_t>(Instr.getTargetIndex()) < |
982 | 0 | Context.CompositeTypes.size()); |
983 | 0 | const auto *CompType = Context.CompositeTypes[Instr.getTargetIndex()]; |
984 | 0 | assuming(CompType != nullptr && !CompType->isFunc()); |
985 | 0 | assuming(static_cast<size_t>(Instr.getSourceIndex()) < |
986 | 0 | CompType->getFieldTypes().size()); |
987 | 0 | const auto &StorageType = |
988 | 0 | CompType->getFieldTypes()[Instr.getSourceIndex()].getStorageType(); |
989 | 0 | auto Ref = stackPop(); |
990 | 0 | auto IsSigned = (Instr.getOpCode() == OpCode::Struct__get_s) |
991 | 0 | ? LLContext.getInt8(1) |
992 | 0 | : LLContext.getInt8(0); |
993 | 0 | LLVM::Value Ret = Builder.createAlloca(Context.Int64x2Ty); |
994 | 0 | Builder.createCall( |
995 | 0 | Context.getIntrinsic( |
996 | 0 | Builder, Executable::Intrinsics::kStructGet, |
997 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
998 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
999 | 0 | Context.Int32Ty, Context.Int8Ty, |
1000 | 0 | Context.Int8PtrTy}, |
1001 | 0 | false)), |
1002 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), |
1003 | 0 | LLContext.getInt32(Instr.getSourceIndex()), IsSigned, Ret}); |
1004 | |
|
1005 | 0 | switch (StorageType.getCode()) { |
1006 | 0 | case TypeCode::I8: |
1007 | 0 | case TypeCode::I16: |
1008 | 0 | case TypeCode::I32: { |
1009 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int32Ty, Ret, |
1010 | 0 | Context.Int64x2Ty)); |
1011 | 0 | break; |
1012 | 0 | } |
1013 | 0 | case TypeCode::I64: { |
1014 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64Ty, Ret, |
1015 | 0 | Context.Int64x2Ty)); |
1016 | 0 | break; |
1017 | 0 | } |
1018 | 0 | case TypeCode::F32: { |
1019 | 0 | stackPush(Builder.createValuePtrLoad(Context.FloatTy, Ret, |
1020 | 0 | Context.Int64x2Ty)); |
1021 | 0 | break; |
1022 | 0 | } |
1023 | 0 | case TypeCode::F64: { |
1024 | 0 | stackPush(Builder.createValuePtrLoad(Context.DoubleTy, Ret, |
1025 | 0 | Context.Int64x2Ty)); |
1026 | 0 | break; |
1027 | 0 | } |
1028 | 0 | case TypeCode::V128: |
1029 | 0 | case TypeCode::Ref: |
1030 | 0 | case TypeCode::RefNull: { |
1031 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64x2Ty, Ret, |
1032 | 0 | Context.Int64x2Ty)); |
1033 | 0 | break; |
1034 | 0 | } |
1035 | 0 | default: |
1036 | 0 | assumingUnreachable(); |
1037 | 0 | } |
1038 | 0 | break; |
1039 | 0 | } |
1040 | 0 | case OpCode::Struct__set: { |
1041 | 0 | auto Val = stackPop(); |
1042 | 0 | auto Ref = stackPop(); |
1043 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1044 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1045 | 0 | Builder.createCall( |
1046 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kStructSet, |
1047 | 0 | LLVM::Type::getFunctionType( |
1048 | 0 | Context.VoidTy, |
1049 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1050 | 0 | Context.Int32Ty, Context.Int8PtrTy}, |
1051 | 0 | false)), |
1052 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), |
1053 | 0 | LLContext.getInt32(Instr.getSourceIndex()), Arg}); |
1054 | 0 | break; |
1055 | 0 | } |
1056 | 10 | case OpCode::Array__new: { |
1057 | 10 | auto Length = stackPop(); |
1058 | 10 | auto Val = stackPop(); |
1059 | 10 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1060 | 10 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1061 | 10 | stackPush(Builder.createCall( |
1062 | 10 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArrayNew, |
1063 | 10 | LLVM::Type::getFunctionType( |
1064 | 10 | Context.Int64x2Ty, |
1065 | 10 | {Context.Int32Ty, Context.Int32Ty, |
1066 | 10 | Context.Int8PtrTy, Context.Int32Ty}, |
1067 | 10 | false)), |
1068 | 10 | {LLContext.getInt32(Instr.getTargetIndex()), Length, Arg, |
1069 | 10 | LLContext.getInt32(1)})); |
1070 | 10 | break; |
1071 | 0 | } |
1072 | 13 | case OpCode::Array__new_default: { |
1073 | 13 | auto Length = stackPop(); |
1074 | 13 | LLVM::Value Arg = LLVM::Value::getConstPointerNull(Context.Int8PtrTy); |
1075 | 13 | stackPush(Builder.createCall( |
1076 | 13 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArrayNew, |
1077 | 13 | LLVM::Type::getFunctionType( |
1078 | 13 | Context.Int64x2Ty, |
1079 | 13 | {Context.Int32Ty, Context.Int32Ty, |
1080 | 13 | Context.Int8PtrTy, Context.Int32Ty}, |
1081 | 13 | false)), |
1082 | 13 | {LLContext.getInt32(Instr.getTargetIndex()), Length, Arg, |
1083 | 13 | LLContext.getInt32(0)})); |
1084 | 13 | break; |
1085 | 0 | } |
1086 | 23 | case OpCode::Array__new_fixed: { |
1087 | 23 | const auto ArgSize = Instr.getSourceIndex(); |
1088 | 23 | std::vector<LLVM::Value> ArgsVec(ArgSize, nullptr); |
1089 | 92 | for (size_t I = 0; I < ArgSize; ++I) { |
1090 | 69 | ArgsVec[ArgSize - I - 1] = stackPop(); |
1091 | 69 | } |
1092 | 23 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
1093 | 23 | Builder.createArrayPtrStore(ArgsVec, Args, Context.Int8Ty, kValSize); |
1094 | 23 | stackPush(Builder.createCall( |
1095 | 23 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArrayNew, |
1096 | 23 | LLVM::Type::getFunctionType( |
1097 | 23 | Context.Int64x2Ty, |
1098 | 23 | {Context.Int32Ty, Context.Int32Ty, |
1099 | 23 | Context.Int8PtrTy, Context.Int32Ty}, |
1100 | 23 | false)), |
1101 | 23 | {LLContext.getInt32(Instr.getTargetIndex()), |
1102 | 23 | LLContext.getInt32(ArgSize), Args, LLContext.getInt32(ArgSize)})); |
1103 | 23 | break; |
1104 | 0 | } |
1105 | 0 | case OpCode::Array__new_data: |
1106 | 0 | case OpCode::Array__new_elem: { |
1107 | 0 | auto Length = stackPop(); |
1108 | 0 | auto Start = stackPop(); |
1109 | 0 | stackPush(Builder.createCall( |
1110 | 0 | Context.getIntrinsic( |
1111 | 0 | Builder, |
1112 | 0 | ((Instr.getOpCode() == OpCode::Array__new_data) |
1113 | 0 | ? Executable::Intrinsics::kArrayNewData |
1114 | 0 | : Executable::Intrinsics::kArrayNewElem), |
1115 | 0 | LLVM::Type::getFunctionType(Context.Int64x2Ty, |
1116 | 0 | {Context.Int32Ty, Context.Int32Ty, |
1117 | 0 | Context.Int32Ty, Context.Int32Ty}, |
1118 | 0 | false)), |
1119 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), |
1120 | 0 | LLContext.getInt32(Instr.getSourceIndex()), Start, Length})); |
1121 | 0 | break; |
1122 | 0 | } |
1123 | 0 | case OpCode::Array__get: |
1124 | 29 | case OpCode::Array__get_u: |
1125 | 49 | case OpCode::Array__get_s: { |
1126 | 49 | assuming(static_cast<size_t>(Instr.getTargetIndex()) < |
1127 | 49 | Context.CompositeTypes.size()); |
1128 | 49 | const auto *CompType = Context.CompositeTypes[Instr.getTargetIndex()]; |
1129 | 49 | assuming(CompType != nullptr && !CompType->isFunc()); |
1130 | 49 | assuming(static_cast<size_t>(1) == CompType->getFieldTypes().size()); |
1131 | 49 | const auto &StorageType = CompType->getFieldTypes()[0].getStorageType(); |
1132 | 49 | auto Idx = stackPop(); |
1133 | 49 | auto Ref = stackPop(); |
1134 | 49 | auto IsSigned = (Instr.getOpCode() == OpCode::Array__get_s) |
1135 | 49 | ? LLContext.getInt8(1) |
1136 | 49 | : LLContext.getInt8(0); |
1137 | 49 | LLVM::Value Ret = Builder.createAlloca(Context.Int64x2Ty); |
1138 | 49 | Builder.createCall( |
1139 | 49 | Context.getIntrinsic( |
1140 | 49 | Builder, Executable::Intrinsics::kArrayGet, |
1141 | 49 | LLVM::Type::getFunctionType(Context.VoidTy, |
1142 | 49 | {Context.Int64x2Ty, Context.Int32Ty, |
1143 | 49 | Context.Int32Ty, Context.Int8Ty, |
1144 | 49 | Context.Int8PtrTy}, |
1145 | 49 | false)), |
1146 | 49 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), Idx, IsSigned, |
1147 | 49 | Ret}); |
1148 | | |
1149 | 49 | switch (StorageType.getCode()) { |
1150 | 30 | case TypeCode::I8: |
1151 | 49 | case TypeCode::I16: |
1152 | 49 | case TypeCode::I32: { |
1153 | 49 | stackPush(Builder.createValuePtrLoad(Context.Int32Ty, Ret, |
1154 | 49 | Context.Int64x2Ty)); |
1155 | 49 | break; |
1156 | 49 | } |
1157 | 0 | case TypeCode::I64: { |
1158 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64Ty, Ret, |
1159 | 0 | Context.Int64x2Ty)); |
1160 | 0 | break; |
1161 | 49 | } |
1162 | 0 | case TypeCode::F32: { |
1163 | 0 | stackPush(Builder.createValuePtrLoad(Context.FloatTy, Ret, |
1164 | 0 | Context.Int64x2Ty)); |
1165 | 0 | break; |
1166 | 49 | } |
1167 | 0 | case TypeCode::F64: { |
1168 | 0 | stackPush(Builder.createValuePtrLoad(Context.DoubleTy, Ret, |
1169 | 0 | Context.Int64x2Ty)); |
1170 | 0 | break; |
1171 | 49 | } |
1172 | 0 | case TypeCode::V128: |
1173 | 0 | case TypeCode::Ref: |
1174 | 0 | case TypeCode::RefNull: { |
1175 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64x2Ty, Ret, |
1176 | 0 | Context.Int64x2Ty)); |
1177 | 0 | break; |
1178 | 0 | } |
1179 | 0 | default: |
1180 | 0 | assumingUnreachable(); |
1181 | 49 | } |
1182 | 49 | break; |
1183 | 49 | } |
1184 | 49 | case OpCode::Array__set: { |
1185 | 0 | auto Val = stackPop(); |
1186 | 0 | auto Idx = stackPop(); |
1187 | 0 | auto Ref = stackPop(); |
1188 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1189 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1190 | 0 | Builder.createCall( |
1191 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArraySet, |
1192 | 0 | LLVM::Type::getFunctionType( |
1193 | 0 | Context.VoidTy, |
1194 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1195 | 0 | Context.Int32Ty, Context.Int8PtrTy}, |
1196 | 0 | false)), |
1197 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), Idx, Arg}); |
1198 | 0 | break; |
1199 | 49 | } |
1200 | 1 | case OpCode::Array__len: { |
1201 | 1 | auto Ref = stackPop(); |
1202 | 1 | stackPush(Builder.createCall( |
1203 | 1 | Context.getIntrinsic( |
1204 | 1 | Builder, Executable::Intrinsics::kArrayLen, |
1205 | 1 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1206 | 1 | {Context.Int64x2Ty}, false)), |
1207 | 1 | {Ref})); |
1208 | 1 | break; |
1209 | 49 | } |
1210 | 0 | case OpCode::Array__fill: { |
1211 | 0 | auto Cnt = stackPop(); |
1212 | 0 | auto Val = stackPop(); |
1213 | 0 | auto Off = stackPop(); |
1214 | 0 | auto Ref = stackPop(); |
1215 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1216 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1217 | 0 | Builder.createCall( |
1218 | 0 | Context.getIntrinsic( |
1219 | 0 | Builder, Executable::Intrinsics::kArrayFill, |
1220 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1221 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1222 | 0 | Context.Int32Ty, Context.Int32Ty, |
1223 | 0 | Context.Int8PtrTy}, |
1224 | 0 | false)), |
1225 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), Off, Cnt, Arg}); |
1226 | 0 | break; |
1227 | 49 | } |
1228 | 0 | case OpCode::Array__copy: { |
1229 | 0 | auto Cnt = stackPop(); |
1230 | 0 | auto SrcOff = stackPop(); |
1231 | 0 | auto SrcRef = stackPop(); |
1232 | 0 | auto DstOff = stackPop(); |
1233 | 0 | auto DstRef = stackPop(); |
1234 | 0 | Builder.createCall( |
1235 | 0 | Context.getIntrinsic( |
1236 | 0 | Builder, Executable::Intrinsics::kArrayCopy, |
1237 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1238 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1239 | 0 | Context.Int32Ty, Context.Int64x2Ty, |
1240 | 0 | Context.Int32Ty, Context.Int32Ty, |
1241 | 0 | Context.Int32Ty}, |
1242 | 0 | false)), |
1243 | 0 | {DstRef, LLContext.getInt32(Instr.getTargetIndex()), DstOff, SrcRef, |
1244 | 0 | LLContext.getInt32(Instr.getSourceIndex()), SrcOff, Cnt}); |
1245 | 0 | break; |
1246 | 49 | } |
1247 | 0 | case OpCode::Array__init_data: |
1248 | 0 | case OpCode::Array__init_elem: { |
1249 | 0 | auto Cnt = stackPop(); |
1250 | 0 | auto SrcOff = stackPop(); |
1251 | 0 | auto DstOff = stackPop(); |
1252 | 0 | auto Ref = stackPop(); |
1253 | 0 | Builder.createCall( |
1254 | 0 | Context.getIntrinsic( |
1255 | 0 | Builder, |
1256 | 0 | ((Instr.getOpCode() == OpCode::Array__init_data) |
1257 | 0 | ? Executable::Intrinsics::kArrayInitData |
1258 | 0 | : Executable::Intrinsics::kArrayInitElem), |
1259 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1260 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1261 | 0 | Context.Int32Ty, Context.Int32Ty, |
1262 | 0 | Context.Int32Ty, Context.Int32Ty}, |
1263 | 0 | false)), |
1264 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), |
1265 | 0 | LLContext.getInt32(Instr.getSourceIndex()), DstOff, SrcOff, Cnt}); |
1266 | 0 | break; |
1267 | 0 | } |
1268 | 8 | case OpCode::Ref__test: |
1269 | 14 | case OpCode::Ref__test_null: { |
1270 | 14 | auto Ref = stackPop(); |
1271 | 14 | std::array<uint8_t, 16> Buf = {0}; |
1272 | 14 | std::copy_n(Instr.getValType().getRawData().cbegin(), 8, Buf.begin()); |
1273 | 14 | auto VType = Builder.createExtractElement( |
1274 | 14 | Builder.createBitCast(LLVM::Value::getConstVector8(LLContext, Buf), |
1275 | 14 | Context.Int64x2Ty), |
1276 | 14 | LLContext.getInt64(0)); |
1277 | 14 | stackPush(Builder.createCall( |
1278 | 14 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefTest, |
1279 | 14 | LLVM::Type::getFunctionType( |
1280 | 14 | Context.Int32Ty, |
1281 | 14 | {Context.Int64x2Ty, Context.Int64Ty}, |
1282 | 14 | false)), |
1283 | 14 | {Ref, VType})); |
1284 | 14 | break; |
1285 | 8 | } |
1286 | 10 | case OpCode::Ref__cast: |
1287 | 13 | case OpCode::Ref__cast_null: { |
1288 | 13 | auto Ref = stackPop(); |
1289 | 13 | std::array<uint8_t, 16> Buf = {0}; |
1290 | 13 | std::copy_n(Instr.getValType().getRawData().cbegin(), 8, Buf.begin()); |
1291 | 13 | auto VType = Builder.createExtractElement( |
1292 | 13 | Builder.createBitCast(LLVM::Value::getConstVector8(LLContext, Buf), |
1293 | 13 | Context.Int64x2Ty), |
1294 | 13 | LLContext.getInt64(0)); |
1295 | 13 | stackPush(Builder.createCall( |
1296 | 13 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefCast, |
1297 | 13 | LLVM::Type::getFunctionType( |
1298 | 13 | Context.Int64x2Ty, |
1299 | 13 | {Context.Int64x2Ty, Context.Int64Ty}, |
1300 | 13 | false)), |
1301 | 13 | {Ref, VType})); |
1302 | 13 | break; |
1303 | 10 | } |
1304 | 1 | case OpCode::Any__convert_extern: { |
1305 | 1 | std::array<uint8_t, 16> RawRef = {0}; |
1306 | 1 | auto Ref = stackPop(); |
1307 | 1 | auto PtrVal = Builder.createExtractElement(Ref, LLContext.getInt64(1)); |
1308 | 1 | auto IsNullBB = |
1309 | 1 | LLVM::BasicBlock::create(LLContext, F.Fn, "any_conv_extern.null"); |
1310 | 1 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, |
1311 | 1 | "any_conv_extern.not_null"); |
1312 | 1 | auto IsExtrefBB = LLVM::BasicBlock::create(LLContext, F.Fn, |
1313 | 1 | "any_conv_extern.is_extref"); |
1314 | 1 | auto EndBB = |
1315 | 1 | LLVM::BasicBlock::create(LLContext, F.Fn, "any_conv_extern.end"); |
1316 | 1 | auto CondIsNull = Builder.createICmpEQ(PtrVal, LLContext.getInt64(0)); |
1317 | 1 | Builder.createCondBr(CondIsNull, IsNullBB, NotNullBB); |
1318 | | |
1319 | 1 | Builder.positionAtEnd(IsNullBB); |
1320 | 1 | auto VT = ValType(TypeCode::RefNull, TypeCode::NullRef); |
1321 | 1 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1322 | 1 | auto Ret1 = Builder.createBitCast( |
1323 | 1 | LLVM::Value::getConstVector8(LLContext, RawRef), Context.Int64x2Ty); |
1324 | 1 | Builder.createBr(EndBB); |
1325 | | |
1326 | 1 | Builder.positionAtEnd(NotNullBB); |
1327 | 1 | auto Ret2 = Builder.createBitCast( |
1328 | 1 | Builder.createInsertElement( |
1329 | 1 | Builder.createBitCast(Ref, Context.Int8x16Ty), |
1330 | 1 | LLContext.getInt8(0), LLContext.getInt64(1)), |
1331 | 1 | Context.Int64x2Ty); |
1332 | 1 | auto HType = Builder.createExtractElement( |
1333 | 1 | Builder.createBitCast(Ret2, Context.Int8x16Ty), |
1334 | 1 | LLContext.getInt64(3)); |
1335 | 1 | auto CondIsExtref = Builder.createOr( |
1336 | 1 | Builder.createICmpEQ(HType, LLContext.getInt8(static_cast<uint8_t>( |
1337 | 1 | TypeCode::ExternRef))), |
1338 | 1 | Builder.createICmpEQ(HType, LLContext.getInt8(static_cast<uint8_t>( |
1339 | 1 | TypeCode::NullExternRef)))); |
1340 | 1 | Builder.createCondBr(CondIsExtref, IsExtrefBB, EndBB); |
1341 | | |
1342 | 1 | Builder.positionAtEnd(IsExtrefBB); |
1343 | 1 | VT = ValType(TypeCode::Ref, TypeCode::AnyRef); |
1344 | 1 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1345 | 1 | auto Ret3 = Builder.createInsertElement( |
1346 | 1 | Builder.createBitCast( |
1347 | 1 | LLVM::Value::getConstVector8(LLContext, RawRef), |
1348 | 1 | Context.Int64x2Ty), |
1349 | 1 | PtrVal, LLContext.getInt64(1)); |
1350 | 1 | Builder.createBr(EndBB); |
1351 | | |
1352 | 1 | Builder.positionAtEnd(EndBB); |
1353 | 1 | auto Ret = Builder.createPHI(Context.Int64x2Ty); |
1354 | 1 | Ret.addIncoming(Ret1, IsNullBB); |
1355 | 1 | Ret.addIncoming(Ret2, NotNullBB); |
1356 | 1 | Ret.addIncoming(Ret3, IsExtrefBB); |
1357 | 1 | stackPush(Ret); |
1358 | 1 | break; |
1359 | 10 | } |
1360 | 1 | case OpCode::Extern__convert_any: { |
1361 | 1 | std::array<uint8_t, 16> RawRef = {0}; |
1362 | 1 | auto Ref = stackPop(); |
1363 | 1 | auto IsNullBB = |
1364 | 1 | LLVM::BasicBlock::create(LLContext, F.Fn, "extern_conv_any.null"); |
1365 | 1 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, |
1366 | 1 | "extern_conv_any.not_null"); |
1367 | 1 | auto EndBB = |
1368 | 1 | LLVM::BasicBlock::create(LLContext, F.Fn, "extern_conv_any.end"); |
1369 | 1 | auto CondIsNull = Builder.createICmpEQ( |
1370 | 1 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
1371 | 1 | LLContext.getInt64(0)); |
1372 | 1 | Builder.createCondBr(CondIsNull, IsNullBB, NotNullBB); |
1373 | | |
1374 | 1 | Builder.positionAtEnd(IsNullBB); |
1375 | 1 | auto VT = ValType(TypeCode::RefNull, TypeCode::NullExternRef); |
1376 | 1 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1377 | 1 | auto Ret1 = Builder.createBitCast( |
1378 | 1 | LLVM::Value::getConstVector8(LLContext, RawRef), Context.Int64x2Ty); |
1379 | 1 | Builder.createBr(EndBB); |
1380 | | |
1381 | 1 | Builder.positionAtEnd(NotNullBB); |
1382 | 1 | auto Ret2 = Builder.createBitCast( |
1383 | 1 | Builder.createInsertElement( |
1384 | 1 | Builder.createBitCast(Ref, Context.Int8x16Ty), |
1385 | 1 | LLContext.getInt8(1), LLContext.getInt64(1)), |
1386 | 1 | Context.Int64x2Ty); |
1387 | 1 | Builder.createBr(EndBB); |
1388 | | |
1389 | 1 | Builder.positionAtEnd(EndBB); |
1390 | 1 | auto Ret = Builder.createPHI(Context.Int64x2Ty); |
1391 | 1 | Ret.addIncoming(Ret1, IsNullBB); |
1392 | 1 | Ret.addIncoming(Ret2, NotNullBB); |
1393 | 1 | stackPush(Ret); |
1394 | 1 | break; |
1395 | 10 | } |
1396 | 44 | case OpCode::Ref__i31: { |
1397 | 44 | std::array<uint8_t, 16> RawRef = {0}; |
1398 | 44 | auto VT = ValType(TypeCode::Ref, TypeCode::I31Ref); |
1399 | 44 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1400 | 44 | auto Ref = Builder.createBitCast( |
1401 | 44 | LLVM::Value::getConstVector8(LLContext, RawRef), Context.Int64x2Ty); |
1402 | 44 | auto Val = Builder.createZExt( |
1403 | 44 | Builder.createOr( |
1404 | 44 | Builder.createAnd(stackPop(), LLContext.getInt32(0x7FFFFFFFU)), |
1405 | 44 | LLContext.getInt32(0x80000000U)), |
1406 | 44 | Context.Int64Ty); |
1407 | 44 | stackPush(Builder.createInsertElement(Ref, Val, LLContext.getInt64(1))); |
1408 | 44 | break; |
1409 | 10 | } |
1410 | 10 | case OpCode::I31__get_s: { |
1411 | 10 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "i31.get.ok"); |
1412 | 10 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
1413 | 10 | auto Val = Builder.createTrunc( |
1414 | 10 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
1415 | 10 | Context.Int32Ty); |
1416 | 10 | auto IsNotNull = Builder.createLikely(Builder.createICmpNE( |
1417 | 10 | Builder.createAnd(Val, LLContext.getInt32(0x80000000U)), |
1418 | 10 | LLContext.getInt32(0))); |
1419 | 10 | Builder.createCondBr(IsNotNull, Next, |
1420 | 10 | getTrapBB(ErrCode::Value::AccessNullI31)); |
1421 | 10 | Builder.positionAtEnd(Next); |
1422 | 10 | Val = Builder.createAnd(Val, LLContext.getInt32(0x7FFFFFFFU)); |
1423 | 10 | stackPush(Builder.createOr( |
1424 | 10 | Val, Builder.createShl( |
1425 | 10 | Builder.createAnd(Val, LLContext.getInt32(0x40000000U)), |
1426 | 10 | LLContext.getInt32(1)))); |
1427 | 10 | break; |
1428 | 10 | } |
1429 | 10 | case OpCode::I31__get_u: { |
1430 | 10 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "i31.get.ok"); |
1431 | 10 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
1432 | 10 | auto Val = Builder.createTrunc( |
1433 | 10 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
1434 | 10 | Context.Int32Ty); |
1435 | 10 | auto IsNotNull = Builder.createLikely(Builder.createICmpNE( |
1436 | 10 | Builder.createAnd(Val, LLContext.getInt32(0x80000000U)), |
1437 | 10 | LLContext.getInt32(0))); |
1438 | 10 | Builder.createCondBr(IsNotNull, Next, |
1439 | 10 | getTrapBB(ErrCode::Value::AccessNullI31)); |
1440 | 10 | Builder.positionAtEnd(Next); |
1441 | 10 | stackPush(Builder.createAnd(Val, LLContext.getInt32(0x7FFFFFFFU))); |
1442 | 10 | break; |
1443 | 10 | } |
1444 | | |
1445 | | // Parametric Instructions |
1446 | 3.36k | case OpCode::Drop: |
1447 | 3.36k | stackPop(); |
1448 | 3.36k | break; |
1449 | 694 | case OpCode::Select: |
1450 | 1.13k | case OpCode::Select_t: { |
1451 | 1.13k | auto Cond = Builder.createICmpNE(stackPop(), LLContext.getInt32(0)); |
1452 | 1.13k | auto False = stackPop(); |
1453 | 1.13k | auto True = stackPop(); |
1454 | 1.13k | stackPush(Builder.createSelect(Cond, True, False)); |
1455 | 1.13k | break; |
1456 | 694 | } |
1457 | | |
1458 | | // Variable Instructions |
1459 | 11.6k | case OpCode::Local__get: { |
1460 | 11.6k | const auto &L = Local[Instr.getTargetIndex()]; |
1461 | 11.6k | stackPush(Builder.createLoad(L.first, L.second)); |
1462 | 11.6k | break; |
1463 | 694 | } |
1464 | 4.65k | case OpCode::Local__set: |
1465 | 4.65k | Builder.createStore(stackPop(), Local[Instr.getTargetIndex()].second); |
1466 | 4.65k | break; |
1467 | 780 | case OpCode::Local__tee: |
1468 | 780 | Builder.createStore(Stack.back(), Local[Instr.getTargetIndex()].second); |
1469 | 780 | break; |
1470 | 382 | case OpCode::Global__get: { |
1471 | 382 | const auto G = |
1472 | 382 | Context.getGlobal(Builder, ExecCtx, Instr.getTargetIndex()); |
1473 | 382 | stackPush(Builder.createLoad(G.first, G.second)); |
1474 | 382 | break; |
1475 | 694 | } |
1476 | 62 | case OpCode::Global__set: |
1477 | 62 | Builder.createStore( |
1478 | 62 | stackPop(), |
1479 | 62 | Context.getGlobal(Builder, ExecCtx, Instr.getTargetIndex()).second); |
1480 | 62 | break; |
1481 | | |
1482 | | // Table Instructions |
1483 | 30 | case OpCode::Table__get: { |
1484 | 30 | auto Idx = stackPop(); |
1485 | 30 | stackPush(Builder.createCall( |
1486 | 30 | Context.getIntrinsic( |
1487 | 30 | Builder, Executable::Intrinsics::kTableGet, |
1488 | 30 | LLVM::Type::getFunctionType(Context.Int64x2Ty, |
1489 | 30 | {Context.Int32Ty, Context.Int32Ty}, |
1490 | 30 | false)), |
1491 | 30 | {LLContext.getInt32(Instr.getTargetIndex()), Idx})); |
1492 | 30 | break; |
1493 | 694 | } |
1494 | 23 | case OpCode::Table__set: { |
1495 | 23 | auto Ref = stackPop(); |
1496 | 23 | auto Idx = stackPop(); |
1497 | 23 | Builder.createCall( |
1498 | 23 | Context.getIntrinsic( |
1499 | 23 | Builder, Executable::Intrinsics::kTableSet, |
1500 | 23 | LLVM::Type::getFunctionType( |
1501 | 23 | Context.Int64Ty, |
1502 | 23 | {Context.Int32Ty, Context.Int32Ty, Context.Int64x2Ty}, |
1503 | 23 | false)), |
1504 | 23 | {LLContext.getInt32(Instr.getTargetIndex()), Idx, Ref}); |
1505 | 23 | break; |
1506 | 694 | } |
1507 | 24 | case OpCode::Table__init: { |
1508 | 24 | auto Len = stackPop(); |
1509 | 24 | auto Src = stackPop(); |
1510 | 24 | auto Dst = stackPop(); |
1511 | 24 | Builder.createCall( |
1512 | 24 | Context.getIntrinsic( |
1513 | 24 | Builder, Executable::Intrinsics::kTableInit, |
1514 | 24 | LLVM::Type::getFunctionType(Context.VoidTy, |
1515 | 24 | {Context.Int32Ty, Context.Int32Ty, |
1516 | 24 | Context.Int32Ty, Context.Int32Ty, |
1517 | 24 | Context.Int32Ty}, |
1518 | 24 | false)), |
1519 | 24 | {LLContext.getInt32(Instr.getTargetIndex()), |
1520 | 24 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1521 | 24 | break; |
1522 | 694 | } |
1523 | 33 | case OpCode::Elem__drop: { |
1524 | 33 | Builder.createCall( |
1525 | 33 | Context.getIntrinsic(Builder, Executable::Intrinsics::kElemDrop, |
1526 | 33 | LLVM::Type::getFunctionType( |
1527 | 33 | Context.VoidTy, {Context.Int32Ty}, false)), |
1528 | 33 | {LLContext.getInt32(Instr.getTargetIndex())}); |
1529 | 33 | break; |
1530 | 694 | } |
1531 | 16 | case OpCode::Table__copy: { |
1532 | 16 | auto Len = stackPop(); |
1533 | 16 | auto Src = stackPop(); |
1534 | 16 | auto Dst = stackPop(); |
1535 | 16 | Builder.createCall( |
1536 | 16 | Context.getIntrinsic( |
1537 | 16 | Builder, Executable::Intrinsics::kTableCopy, |
1538 | 16 | LLVM::Type::getFunctionType(Context.VoidTy, |
1539 | 16 | {Context.Int32Ty, Context.Int32Ty, |
1540 | 16 | Context.Int32Ty, Context.Int32Ty, |
1541 | 16 | Context.Int32Ty}, |
1542 | 16 | false)), |
1543 | 16 | {LLContext.getInt32(Instr.getTargetIndex()), |
1544 | 16 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1545 | 16 | break; |
1546 | 694 | } |
1547 | 16 | case OpCode::Table__grow: { |
1548 | 16 | auto NewSize = stackPop(); |
1549 | 16 | auto Val = stackPop(); |
1550 | 16 | stackPush(Builder.createCall( |
1551 | 16 | Context.getIntrinsic( |
1552 | 16 | Builder, Executable::Intrinsics::kTableGrow, |
1553 | 16 | LLVM::Type::getFunctionType( |
1554 | 16 | Context.Int32Ty, |
1555 | 16 | {Context.Int32Ty, Context.Int64x2Ty, Context.Int32Ty}, |
1556 | 16 | false)), |
1557 | 16 | {LLContext.getInt32(Instr.getTargetIndex()), Val, NewSize})); |
1558 | 16 | break; |
1559 | 694 | } |
1560 | 17 | case OpCode::Table__size: { |
1561 | 17 | stackPush(Builder.createCall( |
1562 | 17 | Context.getIntrinsic(Builder, Executable::Intrinsics::kTableSize, |
1563 | 17 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1564 | 17 | {Context.Int32Ty}, |
1565 | 17 | false)), |
1566 | 17 | {LLContext.getInt32(Instr.getTargetIndex())})); |
1567 | 17 | break; |
1568 | 694 | } |
1569 | 3 | case OpCode::Table__fill: { |
1570 | 3 | auto Len = stackPop(); |
1571 | 3 | auto Val = stackPop(); |
1572 | 3 | auto Off = stackPop(); |
1573 | 3 | Builder.createCall( |
1574 | 3 | Context.getIntrinsic(Builder, Executable::Intrinsics::kTableFill, |
1575 | 3 | LLVM::Type::getFunctionType( |
1576 | 3 | Context.Int32Ty, |
1577 | 3 | {Context.Int32Ty, Context.Int32Ty, |
1578 | 3 | Context.Int64x2Ty, Context.Int32Ty}, |
1579 | 3 | false)), |
1580 | 3 | {LLContext.getInt32(Instr.getTargetIndex()), Off, Val, Len}); |
1581 | 3 | break; |
1582 | 694 | } |
1583 | | |
1584 | | // Memory Instructions |
1585 | 1.33k | case OpCode::I32__load: |
1586 | 1.33k | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1587 | 1.33k | Instr.getMemoryAlign(), Context.Int32Ty); |
1588 | 1.33k | break; |
1589 | 3.99k | case OpCode::I64__load: |
1590 | 3.99k | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1591 | 3.99k | Instr.getMemoryAlign(), Context.Int64Ty); |
1592 | 3.99k | break; |
1593 | 110 | case OpCode::F32__load: |
1594 | 110 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1595 | 110 | Instr.getMemoryAlign(), Context.FloatTy); |
1596 | 110 | break; |
1597 | 232 | case OpCode::F64__load: |
1598 | 232 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1599 | 232 | Instr.getMemoryAlign(), Context.DoubleTy); |
1600 | 232 | break; |
1601 | 667 | case OpCode::I32__load8_s: |
1602 | 667 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1603 | 667 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int32Ty, |
1604 | 667 | true); |
1605 | 667 | break; |
1606 | 207 | case OpCode::I32__load8_u: |
1607 | 207 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1608 | 207 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int32Ty, |
1609 | 207 | false); |
1610 | 207 | break; |
1611 | 350 | case OpCode::I32__load16_s: |
1612 | 350 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1613 | 350 | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int32Ty, |
1614 | 350 | true); |
1615 | 350 | break; |
1616 | 1.60k | case OpCode::I32__load16_u: |
1617 | 1.60k | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1618 | 1.60k | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int32Ty, |
1619 | 1.60k | false); |
1620 | 1.60k | break; |
1621 | 748 | case OpCode::I64__load8_s: |
1622 | 748 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1623 | 748 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int64Ty, |
1624 | 748 | true); |
1625 | 748 | break; |
1626 | 467 | case OpCode::I64__load8_u: |
1627 | 467 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1628 | 467 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int64Ty, |
1629 | 467 | false); |
1630 | 467 | break; |
1631 | 426 | case OpCode::I64__load16_s: |
1632 | 426 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1633 | 426 | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int64Ty, |
1634 | 426 | true); |
1635 | 426 | break; |
1636 | 640 | case OpCode::I64__load16_u: |
1637 | 640 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1638 | 640 | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int64Ty, |
1639 | 640 | false); |
1640 | 640 | break; |
1641 | 439 | case OpCode::I64__load32_s: |
1642 | 439 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1643 | 439 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int64Ty, |
1644 | 439 | true); |
1645 | 439 | break; |
1646 | 541 | case OpCode::I64__load32_u: |
1647 | 541 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1648 | 541 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int64Ty, |
1649 | 541 | false); |
1650 | 541 | break; |
1651 | 457 | case OpCode::I32__store: |
1652 | 457 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1653 | 457 | Instr.getMemoryAlign(), Context.Int32Ty); |
1654 | 457 | break; |
1655 | 1.48k | case OpCode::I64__store: |
1656 | 1.48k | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1657 | 1.48k | Instr.getMemoryAlign(), Context.Int64Ty); |
1658 | 1.48k | break; |
1659 | 69 | case OpCode::F32__store: |
1660 | 69 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1661 | 69 | Instr.getMemoryAlign(), Context.FloatTy); |
1662 | 69 | break; |
1663 | 48 | case OpCode::F64__store: |
1664 | 48 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1665 | 48 | Instr.getMemoryAlign(), Context.DoubleTy); |
1666 | 48 | break; |
1667 | 339 | case OpCode::I32__store8: |
1668 | 360 | case OpCode::I64__store8: |
1669 | 360 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1670 | 360 | Instr.getMemoryAlign(), Context.Int8Ty, true); |
1671 | 360 | break; |
1672 | 220 | case OpCode::I32__store16: |
1673 | 292 | case OpCode::I64__store16: |
1674 | 292 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1675 | 292 | Instr.getMemoryAlign(), Context.Int16Ty, true); |
1676 | 292 | break; |
1677 | 37 | case OpCode::I64__store32: |
1678 | 37 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1679 | 37 | Instr.getMemoryAlign(), Context.Int32Ty, true); |
1680 | 37 | break; |
1681 | 811 | case OpCode::Memory__size: |
1682 | 811 | stackPush(Builder.createCall( |
1683 | 811 | Context.getIntrinsic(Builder, Executable::Intrinsics::kMemSize, |
1684 | 811 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1685 | 811 | {Context.Int32Ty}, |
1686 | 811 | false)), |
1687 | 811 | {LLContext.getInt32(Instr.getTargetIndex())})); |
1688 | 811 | break; |
1689 | 618 | case OpCode::Memory__grow: { |
1690 | 618 | auto Diff = stackPop(); |
1691 | 618 | stackPush(Builder.createCall( |
1692 | 618 | Context.getIntrinsic( |
1693 | 618 | Builder, Executable::Intrinsics::kMemGrow, |
1694 | 618 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1695 | 618 | {Context.Int32Ty, Context.Int32Ty}, |
1696 | 618 | false)), |
1697 | 618 | {LLContext.getInt32(Instr.getTargetIndex()), Diff})); |
1698 | 618 | break; |
1699 | 220 | } |
1700 | 23 | case OpCode::Memory__init: { |
1701 | 23 | auto Len = stackPop(); |
1702 | 23 | auto Src = stackPop(); |
1703 | 23 | auto Dst = stackPop(); |
1704 | 23 | Builder.createCall( |
1705 | 23 | Context.getIntrinsic( |
1706 | 23 | Builder, Executable::Intrinsics::kMemInit, |
1707 | 23 | LLVM::Type::getFunctionType(Context.VoidTy, |
1708 | 23 | {Context.Int32Ty, Context.Int32Ty, |
1709 | 23 | Context.Int32Ty, Context.Int32Ty, |
1710 | 23 | Context.Int32Ty}, |
1711 | 23 | false)), |
1712 | 23 | {LLContext.getInt32(Instr.getTargetIndex()), |
1713 | 23 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1714 | 23 | break; |
1715 | 220 | } |
1716 | 22 | case OpCode::Data__drop: { |
1717 | 22 | Builder.createCall( |
1718 | 22 | Context.getIntrinsic(Builder, Executable::Intrinsics::kDataDrop, |
1719 | 22 | LLVM::Type::getFunctionType( |
1720 | 22 | Context.VoidTy, {Context.Int32Ty}, false)), |
1721 | 22 | {LLContext.getInt32(Instr.getTargetIndex())}); |
1722 | 22 | break; |
1723 | 220 | } |
1724 | 353 | case OpCode::Memory__copy: { |
1725 | 353 | auto Len = stackPop(); |
1726 | 353 | auto Src = stackPop(); |
1727 | 353 | auto Dst = stackPop(); |
1728 | 353 | Builder.createCall( |
1729 | 353 | Context.getIntrinsic( |
1730 | 353 | Builder, Executable::Intrinsics::kMemCopy, |
1731 | 353 | LLVM::Type::getFunctionType(Context.VoidTy, |
1732 | 353 | {Context.Int32Ty, Context.Int32Ty, |
1733 | 353 | Context.Int32Ty, Context.Int32Ty, |
1734 | 353 | Context.Int32Ty}, |
1735 | 353 | false)), |
1736 | 353 | {LLContext.getInt32(Instr.getTargetIndex()), |
1737 | 353 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1738 | 353 | break; |
1739 | 220 | } |
1740 | 665 | case OpCode::Memory__fill: { |
1741 | 665 | auto Len = stackPop(); |
1742 | 665 | auto Val = Builder.createTrunc(stackPop(), Context.Int8Ty); |
1743 | 665 | auto Off = stackPop(); |
1744 | 665 | Builder.createCall( |
1745 | 665 | Context.getIntrinsic( |
1746 | 665 | Builder, Executable::Intrinsics::kMemFill, |
1747 | 665 | LLVM::Type::getFunctionType(Context.VoidTy, |
1748 | 665 | {Context.Int32Ty, Context.Int32Ty, |
1749 | 665 | Context.Int8Ty, Context.Int32Ty}, |
1750 | 665 | false)), |
1751 | 665 | {LLContext.getInt32(Instr.getTargetIndex()), Off, Val, Len}); |
1752 | 665 | break; |
1753 | 220 | } |
1754 | | |
1755 | | // Const Numeric Instructions |
1756 | 594k | case OpCode::I32__const: |
1757 | 594k | stackPush(LLContext.getInt32(Instr.getNum().get<uint32_t>())); |
1758 | 594k | break; |
1759 | 90.3k | case OpCode::I64__const: |
1760 | 90.3k | stackPush(LLContext.getInt64(Instr.getNum().get<uint64_t>())); |
1761 | 90.3k | break; |
1762 | 14.8k | case OpCode::F32__const: |
1763 | 14.8k | stackPush(LLContext.getFloat(Instr.getNum().get<float>())); |
1764 | 14.8k | break; |
1765 | 7.39k | case OpCode::F64__const: |
1766 | 7.39k | stackPush(LLContext.getDouble(Instr.getNum().get<double>())); |
1767 | 7.39k | break; |
1768 | | |
1769 | | // Unary Numeric Instructions |
1770 | 7.83k | case OpCode::I32__eqz: |
1771 | 7.83k | stackPush(Builder.createZExt( |
1772 | 7.83k | Builder.createICmpEQ(stackPop(), LLContext.getInt32(0)), |
1773 | 7.83k | Context.Int32Ty)); |
1774 | 7.83k | break; |
1775 | 1.45k | case OpCode::I64__eqz: |
1776 | 1.45k | stackPush(Builder.createZExt( |
1777 | 1.45k | Builder.createICmpEQ(stackPop(), LLContext.getInt64(0)), |
1778 | 1.45k | Context.Int32Ty)); |
1779 | 1.45k | break; |
1780 | 2.26k | case OpCode::I32__clz: |
1781 | 2.26k | assuming(LLVM::Core::Ctlz != LLVM::Core::NotIntrinsic); |
1782 | 2.26k | stackPush(Builder.createIntrinsic(LLVM::Core::Ctlz, {Context.Int32Ty}, |
1783 | 2.26k | {stackPop(), LLContext.getFalse()})); |
1784 | 2.26k | break; |
1785 | 308 | case OpCode::I64__clz: |
1786 | 308 | assuming(LLVM::Core::Ctlz != LLVM::Core::NotIntrinsic); |
1787 | 308 | stackPush(Builder.createIntrinsic(LLVM::Core::Ctlz, {Context.Int64Ty}, |
1788 | 308 | {stackPop(), LLContext.getFalse()})); |
1789 | 308 | break; |
1790 | 1.76k | case OpCode::I32__ctz: |
1791 | 1.76k | assuming(LLVM::Core::Cttz != LLVM::Core::NotIntrinsic); |
1792 | 1.76k | stackPush(Builder.createIntrinsic(LLVM::Core::Cttz, {Context.Int32Ty}, |
1793 | 1.76k | {stackPop(), LLContext.getFalse()})); |
1794 | 1.76k | break; |
1795 | 440 | case OpCode::I64__ctz: |
1796 | 440 | assuming(LLVM::Core::Cttz != LLVM::Core::NotIntrinsic); |
1797 | 440 | stackPush(Builder.createIntrinsic(LLVM::Core::Cttz, {Context.Int64Ty}, |
1798 | 440 | {stackPop(), LLContext.getFalse()})); |
1799 | 440 | break; |
1800 | 16.6k | case OpCode::I32__popcnt: |
1801 | 18.5k | case OpCode::I64__popcnt: |
1802 | 18.5k | assuming(LLVM::Core::Ctpop != LLVM::Core::NotIntrinsic); |
1803 | 18.5k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Ctpop, stackPop())); |
1804 | 18.5k | break; |
1805 | 825 | case OpCode::F32__abs: |
1806 | 1.63k | case OpCode::F64__abs: |
1807 | 1.63k | assuming(LLVM::Core::Fabs != LLVM::Core::NotIntrinsic); |
1808 | 1.63k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Fabs, stackPop())); |
1809 | 1.63k | break; |
1810 | 1.06k | case OpCode::F32__neg: |
1811 | 1.84k | case OpCode::F64__neg: |
1812 | 1.84k | stackPush(Builder.createFNeg(stackPop())); |
1813 | 1.84k | break; |
1814 | 1.96k | case OpCode::F32__ceil: |
1815 | 4.20k | case OpCode::F64__ceil: |
1816 | 4.20k | assuming(LLVM::Core::Ceil != LLVM::Core::NotIntrinsic); |
1817 | 4.20k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Ceil, stackPop())); |
1818 | 4.20k | break; |
1819 | 883 | case OpCode::F32__floor: |
1820 | 1.26k | case OpCode::F64__floor: |
1821 | 1.26k | assuming(LLVM::Core::Floor != LLVM::Core::NotIntrinsic); |
1822 | 1.26k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Floor, stackPop())); |
1823 | 1.26k | break; |
1824 | 509 | case OpCode::F32__trunc: |
1825 | 806 | case OpCode::F64__trunc: |
1826 | 806 | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
1827 | 806 | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Trunc, stackPop())); |
1828 | 806 | break; |
1829 | 837 | case OpCode::F32__nearest: |
1830 | 1.22k | case OpCode::F64__nearest: { |
1831 | 1.22k | const bool IsFloat = Instr.getOpCode() == OpCode::F32__nearest; |
1832 | 1.22k | LLVM::Value Value = stackPop(); |
1833 | | |
1834 | 1.22k | #if LLVM_VERSION_MAJOR >= 12 && !defined(__s390x__) |
1835 | 1.22k | assuming(LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic); |
1836 | 1.22k | if (LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic) { |
1837 | 1.22k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Roundeven, Value)); |
1838 | 1.22k | break; |
1839 | 1.22k | } |
1840 | 0 | #endif |
1841 | | |
1842 | | // The VectorSize is only used when SSE4_1 or NEON is supported. |
1843 | 0 | [[maybe_unused]] const uint32_t VectorSize = IsFloat ? 4 : 2; |
1844 | 0 | #if defined(__x86_64__) |
1845 | 0 | if (Context.SupportSSE4_1) { |
1846 | 0 | auto Zero = LLContext.getInt64(0); |
1847 | 0 | auto VectorTy = |
1848 | 0 | LLVM::Type::getVectorType(Value.getType(), VectorSize); |
1849 | 0 | LLVM::Value Ret = LLVM::Value::getUndef(VectorTy); |
1850 | 0 | Ret = Builder.createInsertElement(Ret, Value, Zero); |
1851 | 0 | auto ID = IsFloat ? LLVM::Core::X86SSE41RoundSs |
1852 | 0 | : LLVM::Core::X86SSE41RoundSd; |
1853 | 0 | assuming(ID != LLVM::Core::NotIntrinsic); |
1854 | 0 | Ret = Builder.createIntrinsic(ID, {}, |
1855 | 0 | {Ret, Ret, LLContext.getInt32(8)}); |
1856 | 0 | Ret = Builder.createExtractElement(Ret, Zero); |
1857 | 0 | stackPush(Ret); |
1858 | 0 | break; |
1859 | 0 | } |
1860 | 0 | #endif |
1861 | | |
1862 | | #if defined(__aarch64__) |
1863 | | if (Context.SupportNEON && |
1864 | | LLVM::Core::AArch64NeonFRIntN != LLVM::Core::NotIntrinsic) { |
1865 | | auto Zero = LLContext.getInt64(0); |
1866 | | auto VectorTy = |
1867 | | LLVM::Type::getVectorType(Value.getType(), VectorSize); |
1868 | | LLVM::Value Ret = LLVM::Value::getUndef(VectorTy); |
1869 | | Ret = Builder.createInsertElement(Ret, Value, Zero); |
1870 | | Ret = |
1871 | | Builder.createUnaryIntrinsic(LLVM::Core::AArch64NeonFRIntN, Ret); |
1872 | | Ret = Builder.createExtractElement(Ret, Zero); |
1873 | | stackPush(Ret); |
1874 | | break; |
1875 | | } |
1876 | | #endif |
1877 | | |
1878 | | // Fallback case. |
1879 | | // If the SSE4.1 is not supported on the x86_64 platform or |
1880 | | // the NEON is not supported on the aarch64 platform, |
1881 | | // then fallback to this. |
1882 | 0 | assuming(LLVM::Core::Nearbyint != LLVM::Core::NotIntrinsic); |
1883 | 0 | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Nearbyint, Value)); |
1884 | 0 | break; |
1885 | 0 | } |
1886 | 401 | case OpCode::F32__sqrt: |
1887 | 1.63k | case OpCode::F64__sqrt: |
1888 | 1.63k | assuming(LLVM::Core::Sqrt != LLVM::Core::NotIntrinsic); |
1889 | 1.63k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Sqrt, stackPop())); |
1890 | 1.63k | break; |
1891 | 331 | case OpCode::I32__wrap_i64: |
1892 | 331 | stackPush(Builder.createTrunc(stackPop(), Context.Int32Ty)); |
1893 | 331 | break; |
1894 | 1.39k | case OpCode::I32__trunc_f32_s: |
1895 | 1.39k | compileSignedTrunc(Context.Int32Ty); |
1896 | 1.39k | break; |
1897 | 252 | case OpCode::I32__trunc_f64_s: |
1898 | 252 | compileSignedTrunc(Context.Int32Ty); |
1899 | 252 | break; |
1900 | 178 | case OpCode::I32__trunc_f32_u: |
1901 | 178 | compileUnsignedTrunc(Context.Int32Ty); |
1902 | 178 | break; |
1903 | 1.35k | case OpCode::I32__trunc_f64_u: |
1904 | 1.35k | compileUnsignedTrunc(Context.Int32Ty); |
1905 | 1.35k | break; |
1906 | 2.19k | case OpCode::I64__extend_i32_s: |
1907 | 2.19k | stackPush(Builder.createSExt(stackPop(), Context.Int64Ty)); |
1908 | 2.19k | break; |
1909 | 352 | case OpCode::I64__extend_i32_u: |
1910 | 352 | stackPush(Builder.createZExt(stackPop(), Context.Int64Ty)); |
1911 | 352 | break; |
1912 | 62 | case OpCode::I64__trunc_f32_s: |
1913 | 62 | compileSignedTrunc(Context.Int64Ty); |
1914 | 62 | break; |
1915 | 399 | case OpCode::I64__trunc_f64_s: |
1916 | 399 | compileSignedTrunc(Context.Int64Ty); |
1917 | 399 | break; |
1918 | 1.22k | case OpCode::I64__trunc_f32_u: |
1919 | 1.22k | compileUnsignedTrunc(Context.Int64Ty); |
1920 | 1.22k | break; |
1921 | 1.68k | case OpCode::I64__trunc_f64_u: |
1922 | 1.68k | compileUnsignedTrunc(Context.Int64Ty); |
1923 | 1.68k | break; |
1924 | 1.67k | case OpCode::F32__convert_i32_s: |
1925 | 2.06k | case OpCode::F32__convert_i64_s: |
1926 | 2.06k | stackPush(Builder.createSIToFP(stackPop(), Context.FloatTy)); |
1927 | 2.06k | break; |
1928 | 698 | case OpCode::F32__convert_i32_u: |
1929 | 2.08k | case OpCode::F32__convert_i64_u: |
1930 | 2.08k | stackPush(Builder.createUIToFP(stackPop(), Context.FloatTy)); |
1931 | 2.08k | break; |
1932 | 1.62k | case OpCode::F64__convert_i32_s: |
1933 | 6.23k | case OpCode::F64__convert_i64_s: |
1934 | 6.23k | stackPush(Builder.createSIToFP(stackPop(), Context.DoubleTy)); |
1935 | 6.23k | break; |
1936 | 1.64k | case OpCode::F64__convert_i32_u: |
1937 | 1.83k | case OpCode::F64__convert_i64_u: |
1938 | 1.83k | stackPush(Builder.createUIToFP(stackPop(), Context.DoubleTy)); |
1939 | 1.83k | break; |
1940 | 213 | case OpCode::F32__demote_f64: |
1941 | 213 | stackPush(Builder.createFPTrunc(stackPop(), Context.FloatTy)); |
1942 | 213 | break; |
1943 | 88 | case OpCode::F64__promote_f32: |
1944 | 88 | stackPush(Builder.createFPExt(stackPop(), Context.DoubleTy)); |
1945 | 88 | break; |
1946 | 646 | case OpCode::I32__reinterpret_f32: |
1947 | 646 | stackPush(Builder.createBitCast(stackPop(), Context.Int32Ty)); |
1948 | 646 | break; |
1949 | 674 | case OpCode::I64__reinterpret_f64: |
1950 | 674 | stackPush(Builder.createBitCast(stackPop(), Context.Int64Ty)); |
1951 | 674 | break; |
1952 | 4.27k | case OpCode::F32__reinterpret_i32: |
1953 | 4.27k | stackPush(Builder.createBitCast(stackPop(), Context.FloatTy)); |
1954 | 4.27k | break; |
1955 | 1.13k | case OpCode::F64__reinterpret_i64: |
1956 | 1.13k | stackPush(Builder.createBitCast(stackPop(), Context.DoubleTy)); |
1957 | 1.13k | break; |
1958 | 2.40k | case OpCode::I32__extend8_s: |
1959 | 2.40k | stackPush(Builder.createSExt( |
1960 | 2.40k | Builder.createTrunc(stackPop(), Context.Int8Ty), Context.Int32Ty)); |
1961 | 2.40k | break; |
1962 | 3.07k | case OpCode::I32__extend16_s: |
1963 | 3.07k | stackPush(Builder.createSExt( |
1964 | 3.07k | Builder.createTrunc(stackPop(), Context.Int16Ty), Context.Int32Ty)); |
1965 | 3.07k | break; |
1966 | 365 | case OpCode::I64__extend8_s: |
1967 | 365 | stackPush(Builder.createSExt( |
1968 | 365 | Builder.createTrunc(stackPop(), Context.Int8Ty), Context.Int64Ty)); |
1969 | 365 | break; |
1970 | 618 | case OpCode::I64__extend16_s: |
1971 | 618 | stackPush(Builder.createSExt( |
1972 | 618 | Builder.createTrunc(stackPop(), Context.Int16Ty), Context.Int64Ty)); |
1973 | 618 | break; |
1974 | 740 | case OpCode::I64__extend32_s: |
1975 | 740 | stackPush(Builder.createSExt( |
1976 | 740 | Builder.createTrunc(stackPop(), Context.Int32Ty), Context.Int64Ty)); |
1977 | 740 | break; |
1978 | | |
1979 | | // Binary Numeric Instructions |
1980 | 1.20k | case OpCode::I32__eq: |
1981 | 1.46k | case OpCode::I64__eq: { |
1982 | 1.46k | LLVM::Value RHS = stackPop(); |
1983 | 1.46k | LLVM::Value LHS = stackPop(); |
1984 | 1.46k | stackPush(Builder.createZExt(Builder.createICmpEQ(LHS, RHS), |
1985 | 1.46k | Context.Int32Ty)); |
1986 | 1.46k | break; |
1987 | 1.20k | } |
1988 | 722 | case OpCode::I32__ne: |
1989 | 744 | case OpCode::I64__ne: { |
1990 | 744 | LLVM::Value RHS = stackPop(); |
1991 | 744 | LLVM::Value LHS = stackPop(); |
1992 | 744 | stackPush(Builder.createZExt(Builder.createICmpNE(LHS, RHS), |
1993 | 744 | Context.Int32Ty)); |
1994 | 744 | break; |
1995 | 722 | } |
1996 | 4.38k | case OpCode::I32__lt_s: |
1997 | 5.02k | case OpCode::I64__lt_s: { |
1998 | 5.02k | LLVM::Value RHS = stackPop(); |
1999 | 5.02k | LLVM::Value LHS = stackPop(); |
2000 | 5.02k | stackPush(Builder.createZExt(Builder.createICmpSLT(LHS, RHS), |
2001 | 5.02k | Context.Int32Ty)); |
2002 | 5.02k | break; |
2003 | 4.38k | } |
2004 | 6.28k | case OpCode::I32__lt_u: |
2005 | 6.62k | case OpCode::I64__lt_u: { |
2006 | 6.62k | LLVM::Value RHS = stackPop(); |
2007 | 6.62k | LLVM::Value LHS = stackPop(); |
2008 | 6.62k | stackPush(Builder.createZExt(Builder.createICmpULT(LHS, RHS), |
2009 | 6.62k | Context.Int32Ty)); |
2010 | 6.62k | break; |
2011 | 6.28k | } |
2012 | 1.21k | case OpCode::I32__gt_s: |
2013 | 1.78k | case OpCode::I64__gt_s: { |
2014 | 1.78k | LLVM::Value RHS = stackPop(); |
2015 | 1.78k | LLVM::Value LHS = stackPop(); |
2016 | 1.78k | stackPush(Builder.createZExt(Builder.createICmpSGT(LHS, RHS), |
2017 | 1.78k | Context.Int32Ty)); |
2018 | 1.78k | break; |
2019 | 1.21k | } |
2020 | 7.20k | case OpCode::I32__gt_u: |
2021 | 7.38k | case OpCode::I64__gt_u: { |
2022 | 7.38k | LLVM::Value RHS = stackPop(); |
2023 | 7.38k | LLVM::Value LHS = stackPop(); |
2024 | 7.38k | stackPush(Builder.createZExt(Builder.createICmpUGT(LHS, RHS), |
2025 | 7.38k | Context.Int32Ty)); |
2026 | 7.38k | break; |
2027 | 7.20k | } |
2028 | 2.17k | case OpCode::I32__le_s: |
2029 | 3.12k | case OpCode::I64__le_s: { |
2030 | 3.12k | LLVM::Value RHS = stackPop(); |
2031 | 3.12k | LLVM::Value LHS = stackPop(); |
2032 | 3.12k | stackPush(Builder.createZExt(Builder.createICmpSLE(LHS, RHS), |
2033 | 3.12k | Context.Int32Ty)); |
2034 | 3.12k | break; |
2035 | 2.17k | } |
2036 | 465 | case OpCode::I32__le_u: |
2037 | 1.75k | case OpCode::I64__le_u: { |
2038 | 1.75k | LLVM::Value RHS = stackPop(); |
2039 | 1.75k | LLVM::Value LHS = stackPop(); |
2040 | 1.75k | stackPush(Builder.createZExt(Builder.createICmpULE(LHS, RHS), |
2041 | 1.75k | Context.Int32Ty)); |
2042 | 1.75k | break; |
2043 | 465 | } |
2044 | 1.16k | case OpCode::I32__ge_s: |
2045 | 1.19k | case OpCode::I64__ge_s: { |
2046 | 1.19k | LLVM::Value RHS = stackPop(); |
2047 | 1.19k | LLVM::Value LHS = stackPop(); |
2048 | 1.19k | stackPush(Builder.createZExt(Builder.createICmpSGE(LHS, RHS), |
2049 | 1.19k | Context.Int32Ty)); |
2050 | 1.19k | break; |
2051 | 1.16k | } |
2052 | 2.74k | case OpCode::I32__ge_u: |
2053 | 3.56k | case OpCode::I64__ge_u: { |
2054 | 3.56k | LLVM::Value RHS = stackPop(); |
2055 | 3.56k | LLVM::Value LHS = stackPop(); |
2056 | 3.56k | stackPush(Builder.createZExt(Builder.createICmpUGE(LHS, RHS), |
2057 | 3.56k | Context.Int32Ty)); |
2058 | 3.56k | break; |
2059 | 2.74k | } |
2060 | 159 | case OpCode::F32__eq: |
2061 | 236 | case OpCode::F64__eq: { |
2062 | 236 | LLVM::Value RHS = stackPop(); |
2063 | 236 | LLVM::Value LHS = stackPop(); |
2064 | 236 | stackPush(Builder.createZExt(Builder.createFCmpOEQ(LHS, RHS), |
2065 | 236 | Context.Int32Ty)); |
2066 | 236 | break; |
2067 | 159 | } |
2068 | 88 | case OpCode::F32__ne: |
2069 | 116 | case OpCode::F64__ne: { |
2070 | 116 | LLVM::Value RHS = stackPop(); |
2071 | 116 | LLVM::Value LHS = stackPop(); |
2072 | 116 | stackPush(Builder.createZExt(Builder.createFCmpUNE(LHS, RHS), |
2073 | 116 | Context.Int32Ty)); |
2074 | 116 | break; |
2075 | 88 | } |
2076 | 186 | case OpCode::F32__lt: |
2077 | 312 | case OpCode::F64__lt: { |
2078 | 312 | LLVM::Value RHS = stackPop(); |
2079 | 312 | LLVM::Value LHS = stackPop(); |
2080 | 312 | stackPush(Builder.createZExt(Builder.createFCmpOLT(LHS, RHS), |
2081 | 312 | Context.Int32Ty)); |
2082 | 312 | break; |
2083 | 186 | } |
2084 | 153 | case OpCode::F32__gt: |
2085 | 212 | case OpCode::F64__gt: { |
2086 | 212 | LLVM::Value RHS = stackPop(); |
2087 | 212 | LLVM::Value LHS = stackPop(); |
2088 | 212 | stackPush(Builder.createZExt(Builder.createFCmpOGT(LHS, RHS), |
2089 | 212 | Context.Int32Ty)); |
2090 | 212 | break; |
2091 | 153 | } |
2092 | 76 | case OpCode::F32__le: |
2093 | 179 | case OpCode::F64__le: { |
2094 | 179 | LLVM::Value RHS = stackPop(); |
2095 | 179 | LLVM::Value LHS = stackPop(); |
2096 | 179 | stackPush(Builder.createZExt(Builder.createFCmpOLE(LHS, RHS), |
2097 | 179 | Context.Int32Ty)); |
2098 | 179 | break; |
2099 | 76 | } |
2100 | 232 | case OpCode::F32__ge: |
2101 | 261 | case OpCode::F64__ge: { |
2102 | 261 | LLVM::Value RHS = stackPop(); |
2103 | 261 | LLVM::Value LHS = stackPop(); |
2104 | 261 | stackPush(Builder.createZExt(Builder.createFCmpOGE(LHS, RHS), |
2105 | 261 | Context.Int32Ty)); |
2106 | 261 | break; |
2107 | 232 | } |
2108 | 740 | case OpCode::I32__add: |
2109 | 1.21k | case OpCode::I64__add: { |
2110 | 1.21k | LLVM::Value RHS = stackPop(); |
2111 | 1.21k | LLVM::Value LHS = stackPop(); |
2112 | 1.21k | stackPush(Builder.createAdd(LHS, RHS)); |
2113 | 1.21k | break; |
2114 | 740 | } |
2115 | 1.83k | case OpCode::I32__sub: |
2116 | 2.25k | case OpCode::I64__sub: { |
2117 | 2.25k | LLVM::Value RHS = stackPop(); |
2118 | 2.25k | LLVM::Value LHS = stackPop(); |
2119 | | |
2120 | 2.25k | stackPush(Builder.createSub(LHS, RHS)); |
2121 | 2.25k | break; |
2122 | 1.83k | } |
2123 | 619 | case OpCode::I32__mul: |
2124 | 1.32k | case OpCode::I64__mul: { |
2125 | 1.32k | LLVM::Value RHS = stackPop(); |
2126 | 1.32k | LLVM::Value LHS = stackPop(); |
2127 | 1.32k | stackPush(Builder.createMul(LHS, RHS)); |
2128 | 1.32k | break; |
2129 | 619 | } |
2130 | 1.26k | case OpCode::I32__div_s: |
2131 | 1.60k | case OpCode::I64__div_s: { |
2132 | 1.60k | LLVM::Value RHS = stackPop(); |
2133 | 1.60k | LLVM::Value LHS = stackPop(); |
2134 | 1.60k | if constexpr (kForceDivCheck) { |
2135 | 1.60k | const bool Is32 = Instr.getOpCode() == OpCode::I32__div_s; |
2136 | 1.60k | LLVM::Value IntZero = |
2137 | 1.60k | Is32 ? LLContext.getInt32(0) : LLContext.getInt64(0); |
2138 | 1.60k | LLVM::Value IntMinusOne = |
2139 | 1.60k | Is32 ? LLContext.getInt32(static_cast<uint32_t>(INT32_C(-1))) |
2140 | 1.60k | : LLContext.getInt64(static_cast<uint64_t>(INT64_C(-1))); |
2141 | 1.60k | LLVM::Value IntMin = Is32 ? LLContext.getInt32(static_cast<uint32_t>( |
2142 | 1.26k | std::numeric_limits<int32_t>::min())) |
2143 | 1.60k | : LLContext.getInt64(static_cast<uint64_t>( |
2144 | 341 | std::numeric_limits<int64_t>::min())); |
2145 | | |
2146 | 1.60k | auto NoZeroBB = |
2147 | 1.60k | LLVM::BasicBlock::create(LLContext, F.Fn, "div.nozero"); |
2148 | 1.60k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "div.ok"); |
2149 | | |
2150 | 1.60k | auto IsNotZero = |
2151 | 1.60k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2152 | 1.60k | Builder.createCondBr(IsNotZero, NoZeroBB, |
2153 | 1.60k | getTrapBB(ErrCode::Value::DivideByZero)); |
2154 | | |
2155 | 1.60k | Builder.positionAtEnd(NoZeroBB); |
2156 | 1.60k | auto NotOverflow = Builder.createLikely( |
2157 | 1.60k | Builder.createOr(Builder.createICmpNE(LHS, IntMin), |
2158 | 1.60k | Builder.createICmpNE(RHS, IntMinusOne))); |
2159 | 1.60k | Builder.createCondBr(NotOverflow, OkBB, |
2160 | 1.60k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
2161 | | |
2162 | 1.60k | Builder.positionAtEnd(OkBB); |
2163 | 1.60k | } |
2164 | 1.60k | stackPush(Builder.createSDiv(LHS, RHS)); |
2165 | 1.60k | break; |
2166 | 1.26k | } |
2167 | 3.30k | case OpCode::I32__div_u: |
2168 | 3.61k | case OpCode::I64__div_u: { |
2169 | 3.61k | LLVM::Value RHS = stackPop(); |
2170 | 3.61k | LLVM::Value LHS = stackPop(); |
2171 | 3.61k | if constexpr (kForceDivCheck) { |
2172 | 3.61k | const bool Is32 = Instr.getOpCode() == OpCode::I32__div_u; |
2173 | 3.61k | LLVM::Value IntZero = |
2174 | 3.61k | Is32 ? LLContext.getInt32(0) : LLContext.getInt64(0); |
2175 | 3.61k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "div.ok"); |
2176 | | |
2177 | 3.61k | auto IsNotZero = |
2178 | 3.61k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2179 | 3.61k | Builder.createCondBr(IsNotZero, OkBB, |
2180 | 3.61k | getTrapBB(ErrCode::Value::DivideByZero)); |
2181 | 3.61k | Builder.positionAtEnd(OkBB); |
2182 | 3.61k | } |
2183 | 3.61k | stackPush(Builder.createUDiv(LHS, RHS)); |
2184 | 3.61k | break; |
2185 | 3.30k | } |
2186 | 1.20k | case OpCode::I32__rem_s: |
2187 | 1.65k | case OpCode::I64__rem_s: { |
2188 | 1.65k | LLVM::Value RHS = stackPop(); |
2189 | 1.65k | LLVM::Value LHS = stackPop(); |
2190 | | // handle INT32_MIN % -1 |
2191 | 1.65k | const bool Is32 = Instr.getOpCode() == OpCode::I32__rem_s; |
2192 | 1.65k | LLVM::Value IntMinusOne = |
2193 | 1.65k | Is32 ? LLContext.getInt32(static_cast<uint32_t>(INT32_C(-1))) |
2194 | 1.65k | : LLContext.getInt64(static_cast<uint64_t>(INT64_C(-1))); |
2195 | 1.65k | LLVM::Value IntMin = Is32 ? LLContext.getInt32(static_cast<uint32_t>( |
2196 | 1.20k | std::numeric_limits<int32_t>::min())) |
2197 | 1.65k | : LLContext.getInt64(static_cast<uint64_t>( |
2198 | 455 | std::numeric_limits<int64_t>::min())); |
2199 | 1.65k | LLVM::Value IntZero = |
2200 | 1.65k | Is32 ? LLContext.getInt32(0) : LLContext.getInt64(0); |
2201 | | |
2202 | 1.65k | auto NoOverflowBB = |
2203 | 1.65k | LLVM::BasicBlock::create(LLContext, F.Fn, "no.overflow"); |
2204 | 1.65k | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "end.overflow"); |
2205 | | |
2206 | 1.65k | if constexpr (kForceDivCheck) { |
2207 | 1.65k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "rem.ok"); |
2208 | | |
2209 | 1.65k | auto IsNotZero = |
2210 | 1.65k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2211 | 1.65k | Builder.createCondBr(IsNotZero, OkBB, |
2212 | 1.65k | getTrapBB(ErrCode::Value::DivideByZero)); |
2213 | 1.65k | Builder.positionAtEnd(OkBB); |
2214 | 1.65k | } |
2215 | | |
2216 | 1.65k | auto CurrBB = Builder.getInsertBlock(); |
2217 | | |
2218 | 1.65k | auto NotOverflow = Builder.createLikely( |
2219 | 1.65k | Builder.createOr(Builder.createICmpNE(LHS, IntMin), |
2220 | 1.65k | Builder.createICmpNE(RHS, IntMinusOne))); |
2221 | 1.65k | Builder.createCondBr(NotOverflow, NoOverflowBB, EndBB); |
2222 | | |
2223 | 1.65k | Builder.positionAtEnd(NoOverflowBB); |
2224 | 1.65k | auto Ret1 = Builder.createSRem(LHS, RHS); |
2225 | 1.65k | Builder.createBr(EndBB); |
2226 | | |
2227 | 1.65k | Builder.positionAtEnd(EndBB); |
2228 | 1.65k | auto Ret = Builder.createPHI(Ret1.getType()); |
2229 | 1.65k | Ret.addIncoming(Ret1, NoOverflowBB); |
2230 | 1.65k | Ret.addIncoming(IntZero, CurrBB); |
2231 | | |
2232 | 1.65k | stackPush(Ret); |
2233 | 1.65k | break; |
2234 | 1.20k | } |
2235 | 1.31k | case OpCode::I32__rem_u: |
2236 | 1.88k | case OpCode::I64__rem_u: { |
2237 | 1.88k | LLVM::Value RHS = stackPop(); |
2238 | 1.88k | LLVM::Value LHS = stackPop(); |
2239 | 1.88k | if constexpr (kForceDivCheck) { |
2240 | 1.88k | LLVM::Value IntZero = Instr.getOpCode() == OpCode::I32__rem_u |
2241 | 1.88k | ? LLContext.getInt32(0) |
2242 | 1.88k | : LLContext.getInt64(0); |
2243 | 1.88k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "rem.ok"); |
2244 | | |
2245 | 1.88k | auto IsNotZero = |
2246 | 1.88k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2247 | 1.88k | Builder.createCondBr(IsNotZero, OkBB, |
2248 | 1.88k | getTrapBB(ErrCode::Value::DivideByZero)); |
2249 | 1.88k | Builder.positionAtEnd(OkBB); |
2250 | 1.88k | } |
2251 | 1.88k | stackPush(Builder.createURem(LHS, RHS)); |
2252 | 1.88k | break; |
2253 | 1.31k | } |
2254 | 785 | case OpCode::I32__and: |
2255 | 2.12k | case OpCode::I64__and: { |
2256 | 2.12k | LLVM::Value RHS = stackPop(); |
2257 | 2.12k | LLVM::Value LHS = stackPop(); |
2258 | 2.12k | stackPush(Builder.createAnd(LHS, RHS)); |
2259 | 2.12k | break; |
2260 | 785 | } |
2261 | 1.37k | case OpCode::I32__or: |
2262 | 1.72k | case OpCode::I64__or: { |
2263 | 1.72k | LLVM::Value RHS = stackPop(); |
2264 | 1.72k | LLVM::Value LHS = stackPop(); |
2265 | 1.72k | stackPush(Builder.createOr(LHS, RHS)); |
2266 | 1.72k | break; |
2267 | 1.37k | } |
2268 | 1.61k | case OpCode::I32__xor: |
2269 | 2.25k | case OpCode::I64__xor: { |
2270 | 2.25k | LLVM::Value RHS = stackPop(); |
2271 | 2.25k | LLVM::Value LHS = stackPop(); |
2272 | 2.25k | stackPush(Builder.createXor(LHS, RHS)); |
2273 | 2.25k | break; |
2274 | 1.61k | } |
2275 | 1.91k | case OpCode::I32__shl: |
2276 | 2.33k | case OpCode::I64__shl: { |
2277 | 2.33k | LLVM::Value Mask = Instr.getOpCode() == OpCode::I32__shl |
2278 | 2.33k | ? LLContext.getInt32(31) |
2279 | 2.33k | : LLContext.getInt64(63); |
2280 | 2.33k | LLVM::Value RHS = Builder.createAnd(stackPop(), Mask); |
2281 | 2.33k | LLVM::Value LHS = stackPop(); |
2282 | 2.33k | stackPush(Builder.createShl(LHS, RHS)); |
2283 | 2.33k | break; |
2284 | 1.91k | } |
2285 | 2.69k | case OpCode::I32__shr_s: |
2286 | 3.08k | case OpCode::I64__shr_s: { |
2287 | 3.08k | LLVM::Value Mask = Instr.getOpCode() == OpCode::I32__shr_s |
2288 | 3.08k | ? LLContext.getInt32(31) |
2289 | 3.08k | : LLContext.getInt64(63); |
2290 | 3.08k | LLVM::Value RHS = Builder.createAnd(stackPop(), Mask); |
2291 | 3.08k | LLVM::Value LHS = stackPop(); |
2292 | 3.08k | stackPush(Builder.createAShr(LHS, RHS)); |
2293 | 3.08k | break; |
2294 | 2.69k | } |
2295 | 4.53k | case OpCode::I32__shr_u: |
2296 | 4.82k | case OpCode::I64__shr_u: { |
2297 | 4.82k | LLVM::Value Mask = Instr.getOpCode() == OpCode::I32__shr_u |
2298 | 4.82k | ? LLContext.getInt32(31) |
2299 | 4.82k | : LLContext.getInt64(63); |
2300 | 4.82k | LLVM::Value RHS = Builder.createAnd(stackPop(), Mask); |
2301 | 4.82k | LLVM::Value LHS = stackPop(); |
2302 | 4.82k | stackPush(Builder.createLShr(LHS, RHS)); |
2303 | 4.82k | break; |
2304 | 4.53k | } |
2305 | 2.66k | case OpCode::I32__rotl: { |
2306 | 2.66k | LLVM::Value RHS = stackPop(); |
2307 | 2.66k | LLVM::Value LHS = stackPop(); |
2308 | 2.66k | assuming(LLVM::Core::FShl != LLVM::Core::NotIntrinsic); |
2309 | 2.66k | stackPush(Builder.createIntrinsic(LLVM::Core::FShl, {Context.Int32Ty}, |
2310 | 2.66k | {LHS, LHS, RHS})); |
2311 | 2.66k | break; |
2312 | 2.66k | } |
2313 | 807 | case OpCode::I32__rotr: { |
2314 | 807 | LLVM::Value RHS = stackPop(); |
2315 | 807 | LLVM::Value LHS = stackPop(); |
2316 | 807 | assuming(LLVM::Core::FShr != LLVM::Core::NotIntrinsic); |
2317 | 807 | stackPush(Builder.createIntrinsic(LLVM::Core::FShr, {Context.Int32Ty}, |
2318 | 807 | {LHS, LHS, RHS})); |
2319 | 807 | break; |
2320 | 807 | } |
2321 | 1.14k | case OpCode::I64__rotl: { |
2322 | 1.14k | LLVM::Value RHS = stackPop(); |
2323 | 1.14k | LLVM::Value LHS = stackPop(); |
2324 | 1.14k | assuming(LLVM::Core::FShl != LLVM::Core::NotIntrinsic); |
2325 | 1.14k | stackPush(Builder.createIntrinsic(LLVM::Core::FShl, {Context.Int64Ty}, |
2326 | 1.14k | {LHS, LHS, RHS})); |
2327 | 1.14k | break; |
2328 | 1.14k | } |
2329 | 1.30k | case OpCode::I64__rotr: { |
2330 | 1.30k | LLVM::Value RHS = stackPop(); |
2331 | 1.30k | LLVM::Value LHS = stackPop(); |
2332 | 1.30k | assuming(LLVM::Core::FShr != LLVM::Core::NotIntrinsic); |
2333 | 1.30k | stackPush(Builder.createIntrinsic(LLVM::Core::FShr, {Context.Int64Ty}, |
2334 | 1.30k | {LHS, LHS, RHS})); |
2335 | 1.30k | break; |
2336 | 1.30k | } |
2337 | 280 | case OpCode::F32__add: |
2338 | 583 | case OpCode::F64__add: { |
2339 | 583 | LLVM::Value RHS = stackPop(); |
2340 | 583 | LLVM::Value LHS = stackPop(); |
2341 | 583 | stackPush(Builder.createFAdd(LHS, RHS)); |
2342 | 583 | break; |
2343 | 280 | } |
2344 | 141 | case OpCode::F32__sub: |
2345 | 437 | case OpCode::F64__sub: { |
2346 | 437 | LLVM::Value RHS = stackPop(); |
2347 | 437 | LLVM::Value LHS = stackPop(); |
2348 | 437 | stackPush(Builder.createFSub(LHS, RHS)); |
2349 | 437 | break; |
2350 | 141 | } |
2351 | 545 | case OpCode::F32__mul: |
2352 | 687 | case OpCode::F64__mul: { |
2353 | 687 | LLVM::Value RHS = stackPop(); |
2354 | 687 | LLVM::Value LHS = stackPop(); |
2355 | 687 | stackPush(Builder.createFMul(LHS, RHS)); |
2356 | 687 | break; |
2357 | 545 | } |
2358 | 231 | case OpCode::F32__div: |
2359 | 576 | case OpCode::F64__div: { |
2360 | 576 | LLVM::Value RHS = stackPop(); |
2361 | 576 | LLVM::Value LHS = stackPop(); |
2362 | 576 | stackPush(Builder.createFDiv(LHS, RHS)); |
2363 | 576 | break; |
2364 | 231 | } |
2365 | 307 | case OpCode::F32__min: |
2366 | 681 | case OpCode::F64__min: { |
2367 | 681 | LLVM::Value RHS = stackPop(); |
2368 | 681 | LLVM::Value LHS = stackPop(); |
2369 | 681 | auto FpTy = Instr.getOpCode() == OpCode::F32__min ? Context.FloatTy |
2370 | 681 | : Context.DoubleTy; |
2371 | 681 | auto IntTy = Instr.getOpCode() == OpCode::F32__min ? Context.Int32Ty |
2372 | 681 | : Context.Int64Ty; |
2373 | | |
2374 | 681 | auto UEQ = Builder.createFCmpUEQ(LHS, RHS); |
2375 | 681 | auto UNO = Builder.createFCmpUNO(LHS, RHS); |
2376 | | |
2377 | 681 | auto LHSInt = Builder.createBitCast(LHS, IntTy); |
2378 | 681 | auto RHSInt = Builder.createBitCast(RHS, IntTy); |
2379 | 681 | auto OrInt = Builder.createOr(LHSInt, RHSInt); |
2380 | 681 | auto OrFp = Builder.createBitCast(OrInt, FpTy); |
2381 | | |
2382 | 681 | auto AddFp = Builder.createFAdd(LHS, RHS); |
2383 | | |
2384 | 681 | assuming(LLVM::Core::MinNum != LLVM::Core::NotIntrinsic); |
2385 | 681 | auto MinFp = Builder.createIntrinsic(LLVM::Core::MinNum, |
2386 | 681 | {LHS.getType()}, {LHS, RHS}); |
2387 | | |
2388 | 681 | auto Ret = Builder.createSelect( |
2389 | 681 | UEQ, Builder.createSelect(UNO, AddFp, OrFp), MinFp); |
2390 | 681 | stackPush(Ret); |
2391 | 681 | break; |
2392 | 681 | } |
2393 | 334 | case OpCode::F32__max: |
2394 | 773 | case OpCode::F64__max: { |
2395 | 773 | LLVM::Value RHS = stackPop(); |
2396 | 773 | LLVM::Value LHS = stackPop(); |
2397 | 773 | auto FpTy = Instr.getOpCode() == OpCode::F32__max ? Context.FloatTy |
2398 | 773 | : Context.DoubleTy; |
2399 | 773 | auto IntTy = Instr.getOpCode() == OpCode::F32__max ? Context.Int32Ty |
2400 | 773 | : Context.Int64Ty; |
2401 | | |
2402 | 773 | auto UEQ = Builder.createFCmpUEQ(LHS, RHS); |
2403 | 773 | auto UNO = Builder.createFCmpUNO(LHS, RHS); |
2404 | | |
2405 | 773 | auto LHSInt = Builder.createBitCast(LHS, IntTy); |
2406 | 773 | auto RHSInt = Builder.createBitCast(RHS, IntTy); |
2407 | 773 | auto AndInt = Builder.createAnd(LHSInt, RHSInt); |
2408 | 773 | auto AndFp = Builder.createBitCast(AndInt, FpTy); |
2409 | | |
2410 | 773 | auto AddFp = Builder.createFAdd(LHS, RHS); |
2411 | | |
2412 | 773 | assuming(LLVM::Core::MaxNum != LLVM::Core::NotIntrinsic); |
2413 | 773 | auto MaxFp = Builder.createIntrinsic(LLVM::Core::MaxNum, |
2414 | 773 | {LHS.getType()}, {LHS, RHS}); |
2415 | | |
2416 | 773 | auto Ret = Builder.createSelect( |
2417 | 773 | UEQ, Builder.createSelect(UNO, AddFp, AndFp), MaxFp); |
2418 | 773 | stackPush(Ret); |
2419 | 773 | break; |
2420 | 773 | } |
2421 | 438 | case OpCode::F32__copysign: |
2422 | 853 | case OpCode::F64__copysign: { |
2423 | 853 | LLVM::Value RHS = stackPop(); |
2424 | 853 | LLVM::Value LHS = stackPop(); |
2425 | 853 | assuming(LLVM::Core::CopySign != LLVM::Core::NotIntrinsic); |
2426 | 853 | stackPush(Builder.createIntrinsic(LLVM::Core::CopySign, {LHS.getType()}, |
2427 | 853 | {LHS, RHS})); |
2428 | 853 | break; |
2429 | 853 | } |
2430 | | |
2431 | | // Saturating Truncation Numeric Instructions |
2432 | 197 | case OpCode::I32__trunc_sat_f32_s: |
2433 | 197 | compileSignedTruncSat(Context.Int32Ty); |
2434 | 197 | break; |
2435 | 88 | case OpCode::I32__trunc_sat_f32_u: |
2436 | 88 | compileUnsignedTruncSat(Context.Int32Ty); |
2437 | 88 | break; |
2438 | 442 | case OpCode::I32__trunc_sat_f64_s: |
2439 | 442 | compileSignedTruncSat(Context.Int32Ty); |
2440 | 442 | break; |
2441 | 308 | case OpCode::I32__trunc_sat_f64_u: |
2442 | 308 | compileUnsignedTruncSat(Context.Int32Ty); |
2443 | 308 | break; |
2444 | 426 | case OpCode::I64__trunc_sat_f32_s: |
2445 | 426 | compileSignedTruncSat(Context.Int64Ty); |
2446 | 426 | break; |
2447 | 348 | case OpCode::I64__trunc_sat_f32_u: |
2448 | 348 | compileUnsignedTruncSat(Context.Int64Ty); |
2449 | 348 | break; |
2450 | 338 | case OpCode::I64__trunc_sat_f64_s: |
2451 | 338 | compileSignedTruncSat(Context.Int64Ty); |
2452 | 338 | break; |
2453 | 423 | case OpCode::I64__trunc_sat_f64_u: |
2454 | 423 | compileUnsignedTruncSat(Context.Int64Ty); |
2455 | 423 | break; |
2456 | | |
2457 | | // SIMD Memory Instructions |
2458 | 4.88k | case OpCode::V128__load: |
2459 | 4.88k | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2460 | 4.88k | Instr.getMemoryAlign(), Context.Int128x1Ty); |
2461 | 4.88k | break; |
2462 | 219 | case OpCode::V128__load8x8_s: |
2463 | 219 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2464 | 219 | Instr.getMemoryAlign(), |
2465 | 219 | LLVM::Type::getVectorType(Context.Int8Ty, 8), |
2466 | 219 | Context.Int16x8Ty, true); |
2467 | 219 | break; |
2468 | 41 | case OpCode::V128__load8x8_u: |
2469 | 41 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2470 | 41 | Instr.getMemoryAlign(), |
2471 | 41 | LLVM::Type::getVectorType(Context.Int8Ty, 8), |
2472 | 41 | Context.Int16x8Ty, false); |
2473 | 41 | break; |
2474 | 365 | case OpCode::V128__load16x4_s: |
2475 | 365 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2476 | 365 | Instr.getMemoryAlign(), |
2477 | 365 | LLVM::Type::getVectorType(Context.Int16Ty, 4), |
2478 | 365 | Context.Int32x4Ty, true); |
2479 | 365 | break; |
2480 | 524 | case OpCode::V128__load16x4_u: |
2481 | 524 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2482 | 524 | Instr.getMemoryAlign(), |
2483 | 524 | LLVM::Type::getVectorType(Context.Int16Ty, 4), |
2484 | 524 | Context.Int32x4Ty, false); |
2485 | 524 | break; |
2486 | 158 | case OpCode::V128__load32x2_s: |
2487 | 158 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2488 | 158 | Instr.getMemoryAlign(), |
2489 | 158 | LLVM::Type::getVectorType(Context.Int32Ty, 2), |
2490 | 158 | Context.Int64x2Ty, true); |
2491 | 158 | break; |
2492 | 174 | case OpCode::V128__load32x2_u: |
2493 | 174 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2494 | 174 | Instr.getMemoryAlign(), |
2495 | 174 | LLVM::Type::getVectorType(Context.Int32Ty, 2), |
2496 | 174 | Context.Int64x2Ty, false); |
2497 | 174 | break; |
2498 | 72 | case OpCode::V128__load8_splat: |
2499 | 72 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2500 | 72 | Instr.getMemoryAlign(), Context.Int8Ty, |
2501 | 72 | Context.Int8x16Ty); |
2502 | 72 | break; |
2503 | 163 | case OpCode::V128__load16_splat: |
2504 | 163 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2505 | 163 | Instr.getMemoryAlign(), Context.Int16Ty, |
2506 | 163 | Context.Int16x8Ty); |
2507 | 163 | break; |
2508 | 232 | case OpCode::V128__load32_splat: |
2509 | 232 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2510 | 232 | Instr.getMemoryAlign(), Context.Int32Ty, |
2511 | 232 | Context.Int32x4Ty); |
2512 | 232 | break; |
2513 | 161 | case OpCode::V128__load64_splat: |
2514 | 161 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2515 | 161 | Instr.getMemoryAlign(), Context.Int64Ty, |
2516 | 161 | Context.Int64x2Ty); |
2517 | 161 | break; |
2518 | 83 | case OpCode::V128__load32_zero: |
2519 | 83 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2520 | 83 | Instr.getMemoryAlign(), Context.Int32Ty, |
2521 | 83 | Context.Int128Ty, false); |
2522 | 83 | break; |
2523 | 143 | case OpCode::V128__load64_zero: |
2524 | 143 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2525 | 143 | Instr.getMemoryAlign(), Context.Int64Ty, |
2526 | 143 | Context.Int128Ty, false); |
2527 | 143 | break; |
2528 | 298 | case OpCode::V128__store: |
2529 | 298 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2530 | 298 | Instr.getMemoryAlign(), Context.Int128x1Ty, false, true); |
2531 | 298 | break; |
2532 | 206 | case OpCode::V128__load8_lane: |
2533 | 206 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2534 | 206 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2535 | 206 | Context.Int8Ty, Context.Int8x16Ty); |
2536 | 206 | break; |
2537 | 146 | case OpCode::V128__load16_lane: |
2538 | 146 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2539 | 146 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2540 | 146 | Context.Int16Ty, Context.Int16x8Ty); |
2541 | 146 | break; |
2542 | 135 | case OpCode::V128__load32_lane: |
2543 | 135 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2544 | 135 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2545 | 135 | Context.Int32Ty, Context.Int32x4Ty); |
2546 | 135 | break; |
2547 | 22 | case OpCode::V128__load64_lane: |
2548 | 22 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2549 | 22 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2550 | 22 | Context.Int64Ty, Context.Int64x2Ty); |
2551 | 22 | break; |
2552 | 132 | case OpCode::V128__store8_lane: |
2553 | 132 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2554 | 132 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2555 | 132 | Context.Int8Ty, Context.Int8x16Ty); |
2556 | 132 | break; |
2557 | 103 | case OpCode::V128__store16_lane: |
2558 | 103 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2559 | 103 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2560 | 103 | Context.Int16Ty, Context.Int16x8Ty); |
2561 | 103 | break; |
2562 | 123 | case OpCode::V128__store32_lane: |
2563 | 123 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2564 | 123 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2565 | 123 | Context.Int32Ty, Context.Int32x4Ty); |
2566 | 123 | break; |
2567 | 54 | case OpCode::V128__store64_lane: |
2568 | 54 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2569 | 54 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2570 | 54 | Context.Int64Ty, Context.Int64x2Ty); |
2571 | 54 | break; |
2572 | | |
2573 | | // SIMD Const Instructions |
2574 | 351 | case OpCode::V128__const: { |
2575 | 351 | const auto Value = Instr.getNum().get<uint64x2_t>(); |
2576 | 351 | auto Vector = |
2577 | 351 | LLVM::Value::getConstVector64(LLContext, {Value[0], Value[1]}); |
2578 | 351 | stackPush(Builder.createBitCast(Vector, Context.Int64x2Ty)); |
2579 | 351 | break; |
2580 | 853 | } |
2581 | | |
2582 | | // SIMD Shuffle Instructions |
2583 | 15 | case OpCode::I8x16__shuffle: { |
2584 | 15 | auto V2 = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
2585 | 15 | auto V1 = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
2586 | 15 | const auto V3 = Instr.getNum().get<uint128_t>(); |
2587 | 15 | std::array<uint8_t, 16> Mask; |
2588 | 255 | for (size_t I = 0; I < 16; ++I) { |
2589 | 240 | auto Num = static_cast<uint8_t>(V3 >> (I * 8)); |
2590 | 240 | if constexpr (Endian::native == Endian::little) { |
2591 | 240 | Mask[I] = Num; |
2592 | | } else { |
2593 | | Mask[15 - I] = Num < 16 ? 15 - Num : 47 - Num; |
2594 | | } |
2595 | 240 | } |
2596 | 15 | stackPush(Builder.createBitCast( |
2597 | 15 | Builder.createShuffleVector( |
2598 | 15 | V1, V2, LLVM::Value::getConstVector8(LLContext, Mask)), |
2599 | 15 | Context.Int64x2Ty)); |
2600 | 15 | break; |
2601 | 853 | } |
2602 | | |
2603 | | // SIMD Lane Instructions |
2604 | 71 | case OpCode::I8x16__extract_lane_s: |
2605 | 71 | compileExtractLaneOp(Context.Int8x16Ty, Instr.getMemoryLane(), |
2606 | 71 | Context.Int32Ty, true); |
2607 | 71 | break; |
2608 | 28 | case OpCode::I8x16__extract_lane_u: |
2609 | 28 | compileExtractLaneOp(Context.Int8x16Ty, Instr.getMemoryLane(), |
2610 | 28 | Context.Int32Ty, false); |
2611 | 28 | break; |
2612 | 180 | case OpCode::I8x16__replace_lane: |
2613 | 180 | compileReplaceLaneOp(Context.Int8x16Ty, Instr.getMemoryLane()); |
2614 | 180 | break; |
2615 | 423 | case OpCode::I16x8__extract_lane_s: |
2616 | 423 | compileExtractLaneOp(Context.Int16x8Ty, Instr.getMemoryLane(), |
2617 | 423 | Context.Int32Ty, true); |
2618 | 423 | break; |
2619 | 454 | case OpCode::I16x8__extract_lane_u: |
2620 | 454 | compileExtractLaneOp(Context.Int16x8Ty, Instr.getMemoryLane(), |
2621 | 454 | Context.Int32Ty, false); |
2622 | 454 | break; |
2623 | 728 | case OpCode::I16x8__replace_lane: |
2624 | 728 | compileReplaceLaneOp(Context.Int16x8Ty, Instr.getMemoryLane()); |
2625 | 728 | break; |
2626 | 66 | case OpCode::I32x4__extract_lane: |
2627 | 66 | compileExtractLaneOp(Context.Int32x4Ty, Instr.getMemoryLane()); |
2628 | 66 | break; |
2629 | 325 | case OpCode::I32x4__replace_lane: |
2630 | 325 | compileReplaceLaneOp(Context.Int32x4Ty, Instr.getMemoryLane()); |
2631 | 325 | break; |
2632 | 129 | case OpCode::I64x2__extract_lane: |
2633 | 129 | compileExtractLaneOp(Context.Int64x2Ty, Instr.getMemoryLane()); |
2634 | 129 | break; |
2635 | 14 | case OpCode::I64x2__replace_lane: |
2636 | 14 | compileReplaceLaneOp(Context.Int64x2Ty, Instr.getMemoryLane()); |
2637 | 14 | break; |
2638 | 63 | case OpCode::F32x4__extract_lane: |
2639 | 63 | compileExtractLaneOp(Context.Floatx4Ty, Instr.getMemoryLane()); |
2640 | 63 | break; |
2641 | 23 | case OpCode::F32x4__replace_lane: |
2642 | 23 | compileReplaceLaneOp(Context.Floatx4Ty, Instr.getMemoryLane()); |
2643 | 23 | break; |
2644 | 85 | case OpCode::F64x2__extract_lane: |
2645 | 85 | compileExtractLaneOp(Context.Doublex2Ty, Instr.getMemoryLane()); |
2646 | 85 | break; |
2647 | 7 | case OpCode::F64x2__replace_lane: |
2648 | 7 | compileReplaceLaneOp(Context.Doublex2Ty, Instr.getMemoryLane()); |
2649 | 7 | break; |
2650 | | |
2651 | | // SIMD Numeric Instructions |
2652 | 72 | case OpCode::I8x16__swizzle: |
2653 | 72 | compileVectorSwizzle(); |
2654 | 72 | break; |
2655 | 39.0k | case OpCode::I8x16__splat: |
2656 | 39.0k | compileSplatOp(Context.Int8x16Ty); |
2657 | 39.0k | break; |
2658 | 9.08k | case OpCode::I16x8__splat: |
2659 | 9.08k | compileSplatOp(Context.Int16x8Ty); |
2660 | 9.08k | break; |
2661 | 1.33k | case OpCode::I32x4__splat: |
2662 | 1.33k | compileSplatOp(Context.Int32x4Ty); |
2663 | 1.33k | break; |
2664 | 710 | case OpCode::I64x2__splat: |
2665 | 710 | compileSplatOp(Context.Int64x2Ty); |
2666 | 710 | break; |
2667 | 354 | case OpCode::F32x4__splat: |
2668 | 354 | compileSplatOp(Context.Floatx4Ty); |
2669 | 354 | break; |
2670 | 141 | case OpCode::F64x2__splat: |
2671 | 141 | compileSplatOp(Context.Doublex2Ty); |
2672 | 141 | break; |
2673 | 91 | case OpCode::I8x16__eq: |
2674 | 91 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntEQ); |
2675 | 91 | break; |
2676 | 415 | case OpCode::I8x16__ne: |
2677 | 415 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntNE); |
2678 | 415 | break; |
2679 | 47 | case OpCode::I8x16__lt_s: |
2680 | 47 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSLT); |
2681 | 47 | break; |
2682 | 91 | case OpCode::I8x16__lt_u: |
2683 | 91 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntULT); |
2684 | 91 | break; |
2685 | 150 | case OpCode::I8x16__gt_s: |
2686 | 150 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSGT); |
2687 | 150 | break; |
2688 | 218 | case OpCode::I8x16__gt_u: |
2689 | 218 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntUGT); |
2690 | 218 | break; |
2691 | 103 | case OpCode::I8x16__le_s: |
2692 | 103 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSLE); |
2693 | 103 | break; |
2694 | 123 | case OpCode::I8x16__le_u: |
2695 | 123 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntULE); |
2696 | 123 | break; |
2697 | 647 | case OpCode::I8x16__ge_s: |
2698 | 647 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSGE); |
2699 | 647 | break; |
2700 | 108 | case OpCode::I8x16__ge_u: |
2701 | 108 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntUGE); |
2702 | 108 | break; |
2703 | 77 | case OpCode::I16x8__eq: |
2704 | 77 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntEQ); |
2705 | 77 | break; |
2706 | 183 | case OpCode::I16x8__ne: |
2707 | 183 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntNE); |
2708 | 183 | break; |
2709 | 52 | case OpCode::I16x8__lt_s: |
2710 | 52 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSLT); |
2711 | 52 | break; |
2712 | 233 | case OpCode::I16x8__lt_u: |
2713 | 233 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntULT); |
2714 | 233 | break; |
2715 | 261 | case OpCode::I16x8__gt_s: |
2716 | 261 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSGT); |
2717 | 261 | break; |
2718 | 137 | case OpCode::I16x8__gt_u: |
2719 | 137 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntUGT); |
2720 | 137 | break; |
2721 | 91 | case OpCode::I16x8__le_s: |
2722 | 91 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSLE); |
2723 | 91 | break; |
2724 | 96 | case OpCode::I16x8__le_u: |
2725 | 96 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntULE); |
2726 | 96 | break; |
2727 | 154 | case OpCode::I16x8__ge_s: |
2728 | 154 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSGE); |
2729 | 154 | break; |
2730 | 66 | case OpCode::I16x8__ge_u: |
2731 | 66 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntUGE); |
2732 | 66 | break; |
2733 | 58 | case OpCode::I32x4__eq: |
2734 | 58 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntEQ); |
2735 | 58 | break; |
2736 | 107 | case OpCode::I32x4__ne: |
2737 | 107 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntNE); |
2738 | 107 | break; |
2739 | 32 | case OpCode::I32x4__lt_s: |
2740 | 32 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSLT); |
2741 | 32 | break; |
2742 | 137 | case OpCode::I32x4__lt_u: |
2743 | 137 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntULT); |
2744 | 137 | break; |
2745 | 106 | case OpCode::I32x4__gt_s: |
2746 | 106 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSGT); |
2747 | 106 | break; |
2748 | 221 | case OpCode::I32x4__gt_u: |
2749 | 221 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntUGT); |
2750 | 221 | break; |
2751 | 282 | case OpCode::I32x4__le_s: |
2752 | 282 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSLE); |
2753 | 282 | break; |
2754 | 248 | case OpCode::I32x4__le_u: |
2755 | 248 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntULE); |
2756 | 248 | break; |
2757 | 54 | case OpCode::I32x4__ge_s: |
2758 | 54 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSGE); |
2759 | 54 | break; |
2760 | 98 | case OpCode::I32x4__ge_u: |
2761 | 98 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntUGE); |
2762 | 98 | break; |
2763 | 123 | case OpCode::I64x2__eq: |
2764 | 123 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntEQ); |
2765 | 123 | break; |
2766 | 72 | case OpCode::I64x2__ne: |
2767 | 72 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntNE); |
2768 | 72 | break; |
2769 | 48 | case OpCode::I64x2__lt_s: |
2770 | 48 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSLT); |
2771 | 48 | break; |
2772 | 133 | case OpCode::I64x2__gt_s: |
2773 | 133 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSGT); |
2774 | 133 | break; |
2775 | 31 | case OpCode::I64x2__le_s: |
2776 | 31 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSLE); |
2777 | 31 | break; |
2778 | 65 | case OpCode::I64x2__ge_s: |
2779 | 65 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSGE); |
2780 | 65 | break; |
2781 | 1.37k | case OpCode::F32x4__eq: |
2782 | 1.37k | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOEQ, |
2783 | 1.37k | Context.Int32x4Ty); |
2784 | 1.37k | break; |
2785 | 37 | case OpCode::F32x4__ne: |
2786 | 37 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealUNE, |
2787 | 37 | Context.Int32x4Ty); |
2788 | 37 | break; |
2789 | 715 | case OpCode::F32x4__lt: |
2790 | 715 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOLT, |
2791 | 715 | Context.Int32x4Ty); |
2792 | 715 | break; |
2793 | 73 | case OpCode::F32x4__gt: |
2794 | 73 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOGT, |
2795 | 73 | Context.Int32x4Ty); |
2796 | 73 | break; |
2797 | 361 | case OpCode::F32x4__le: |
2798 | 361 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOLE, |
2799 | 361 | Context.Int32x4Ty); |
2800 | 361 | break; |
2801 | 64 | case OpCode::F32x4__ge: |
2802 | 64 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOGE, |
2803 | 64 | Context.Int32x4Ty); |
2804 | 64 | break; |
2805 | 58 | case OpCode::F64x2__eq: |
2806 | 58 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOEQ, |
2807 | 58 | Context.Int64x2Ty); |
2808 | 58 | break; |
2809 | 99 | case OpCode::F64x2__ne: |
2810 | 99 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealUNE, |
2811 | 99 | Context.Int64x2Ty); |
2812 | 99 | break; |
2813 | 152 | case OpCode::F64x2__lt: |
2814 | 152 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOLT, |
2815 | 152 | Context.Int64x2Ty); |
2816 | 152 | break; |
2817 | 57 | case OpCode::F64x2__gt: |
2818 | 57 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOGT, |
2819 | 57 | Context.Int64x2Ty); |
2820 | 57 | break; |
2821 | 190 | case OpCode::F64x2__le: |
2822 | 190 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOLE, |
2823 | 190 | Context.Int64x2Ty); |
2824 | 190 | break; |
2825 | 88 | case OpCode::F64x2__ge: |
2826 | 88 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOGE, |
2827 | 88 | Context.Int64x2Ty); |
2828 | 88 | break; |
2829 | 132 | case OpCode::V128__not: |
2830 | 132 | Stack.back() = Builder.createNot(Stack.back()); |
2831 | 132 | break; |
2832 | 65 | case OpCode::V128__and: { |
2833 | 65 | auto RHS = stackPop(); |
2834 | 65 | auto LHS = stackPop(); |
2835 | 65 | stackPush(Builder.createAnd(LHS, RHS)); |
2836 | 65 | break; |
2837 | 853 | } |
2838 | 83 | case OpCode::V128__andnot: { |
2839 | 83 | auto RHS = stackPop(); |
2840 | 83 | auto LHS = stackPop(); |
2841 | 83 | stackPush(Builder.createAnd(LHS, Builder.createNot(RHS))); |
2842 | 83 | break; |
2843 | 853 | } |
2844 | 113 | case OpCode::V128__or: { |
2845 | 113 | auto RHS = stackPop(); |
2846 | 113 | auto LHS = stackPop(); |
2847 | 113 | stackPush(Builder.createOr(LHS, RHS)); |
2848 | 113 | break; |
2849 | 853 | } |
2850 | 61 | case OpCode::V128__xor: { |
2851 | 61 | auto RHS = stackPop(); |
2852 | 61 | auto LHS = stackPop(); |
2853 | 61 | stackPush(Builder.createXor(LHS, RHS)); |
2854 | 61 | break; |
2855 | 853 | } |
2856 | 110 | case OpCode::V128__bitselect: { |
2857 | 110 | auto C = stackPop(); |
2858 | 110 | auto V2 = stackPop(); |
2859 | 110 | auto V1 = stackPop(); |
2860 | 110 | stackPush(Builder.createXor( |
2861 | 110 | Builder.createAnd(Builder.createXor(V1, V2), C), V2)); |
2862 | 110 | break; |
2863 | 853 | } |
2864 | 107 | case OpCode::V128__any_true: |
2865 | 107 | compileVectorAnyTrue(); |
2866 | 107 | break; |
2867 | 870 | case OpCode::I8x16__abs: |
2868 | 870 | compileVectorAbs(Context.Int8x16Ty); |
2869 | 870 | break; |
2870 | 1.54k | case OpCode::I8x16__neg: |
2871 | 1.54k | compileVectorNeg(Context.Int8x16Ty); |
2872 | 1.54k | break; |
2873 | 145 | case OpCode::I8x16__popcnt: |
2874 | 145 | compileVectorPopcnt(); |
2875 | 145 | break; |
2876 | 331 | case OpCode::I8x16__all_true: |
2877 | 331 | compileVectorAllTrue(Context.Int8x16Ty); |
2878 | 331 | break; |
2879 | 805 | case OpCode::I8x16__bitmask: |
2880 | 805 | compileVectorBitMask(Context.Int8x16Ty); |
2881 | 805 | break; |
2882 | 83 | case OpCode::I8x16__narrow_i16x8_s: |
2883 | 83 | compileVectorNarrow(Context.Int16x8Ty, true); |
2884 | 83 | break; |
2885 | 194 | case OpCode::I8x16__narrow_i16x8_u: |
2886 | 194 | compileVectorNarrow(Context.Int16x8Ty, false); |
2887 | 194 | break; |
2888 | 147 | case OpCode::I8x16__shl: |
2889 | 147 | compileVectorShl(Context.Int8x16Ty); |
2890 | 147 | break; |
2891 | 1.04k | case OpCode::I8x16__shr_s: |
2892 | 1.04k | compileVectorAShr(Context.Int8x16Ty); |
2893 | 1.04k | break; |
2894 | 55 | case OpCode::I8x16__shr_u: |
2895 | 55 | compileVectorLShr(Context.Int8x16Ty); |
2896 | 55 | break; |
2897 | 50 | case OpCode::I8x16__add: |
2898 | 50 | compileVectorVectorAdd(Context.Int8x16Ty); |
2899 | 50 | break; |
2900 | 961 | case OpCode::I8x16__add_sat_s: |
2901 | 961 | compileVectorVectorAddSat(Context.Int8x16Ty, true); |
2902 | 961 | break; |
2903 | 78 | case OpCode::I8x16__add_sat_u: |
2904 | 78 | compileVectorVectorAddSat(Context.Int8x16Ty, false); |
2905 | 78 | break; |
2906 | 74 | case OpCode::I8x16__sub: |
2907 | 74 | compileVectorVectorSub(Context.Int8x16Ty); |
2908 | 74 | break; |
2909 | 190 | case OpCode::I8x16__sub_sat_s: |
2910 | 190 | compileVectorVectorSubSat(Context.Int8x16Ty, true); |
2911 | 190 | break; |
2912 | 83 | case OpCode::I8x16__sub_sat_u: |
2913 | 83 | compileVectorVectorSubSat(Context.Int8x16Ty, false); |
2914 | 83 | break; |
2915 | 68 | case OpCode::I8x16__min_s: |
2916 | 68 | compileVectorVectorSMin(Context.Int8x16Ty); |
2917 | 68 | break; |
2918 | 147 | case OpCode::I8x16__min_u: |
2919 | 147 | compileVectorVectorUMin(Context.Int8x16Ty); |
2920 | 147 | break; |
2921 | 282 | case OpCode::I8x16__max_s: |
2922 | 282 | compileVectorVectorSMax(Context.Int8x16Ty); |
2923 | 282 | break; |
2924 | 98 | case OpCode::I8x16__max_u: |
2925 | 98 | compileVectorVectorUMax(Context.Int8x16Ty); |
2926 | 98 | break; |
2927 | 123 | case OpCode::I8x16__avgr_u: |
2928 | 123 | compileVectorVectorUAvgr(Context.Int8x16Ty); |
2929 | 123 | break; |
2930 | 285 | case OpCode::I16x8__abs: |
2931 | 285 | compileVectorAbs(Context.Int16x8Ty); |
2932 | 285 | break; |
2933 | 205 | case OpCode::I16x8__neg: |
2934 | 205 | compileVectorNeg(Context.Int16x8Ty); |
2935 | 205 | break; |
2936 | 143 | case OpCode::I16x8__all_true: |
2937 | 143 | compileVectorAllTrue(Context.Int16x8Ty); |
2938 | 143 | break; |
2939 | 147 | case OpCode::I16x8__bitmask: |
2940 | 147 | compileVectorBitMask(Context.Int16x8Ty); |
2941 | 147 | break; |
2942 | 45 | case OpCode::I16x8__narrow_i32x4_s: |
2943 | 45 | compileVectorNarrow(Context.Int32x4Ty, true); |
2944 | 45 | break; |
2945 | 365 | case OpCode::I16x8__narrow_i32x4_u: |
2946 | 365 | compileVectorNarrow(Context.Int32x4Ty, false); |
2947 | 365 | break; |
2948 | 982 | case OpCode::I16x8__extend_low_i8x16_s: |
2949 | 982 | compileVectorExtend(Context.Int8x16Ty, true, true); |
2950 | 982 | break; |
2951 | 96 | case OpCode::I16x8__extend_high_i8x16_s: |
2952 | 96 | compileVectorExtend(Context.Int8x16Ty, true, false); |
2953 | 96 | break; |
2954 | 394 | case OpCode::I16x8__extend_low_i8x16_u: |
2955 | 394 | compileVectorExtend(Context.Int8x16Ty, false, true); |
2956 | 394 | break; |
2957 | 12 | case OpCode::I16x8__extend_high_i8x16_u: |
2958 | 12 | compileVectorExtend(Context.Int8x16Ty, false, false); |
2959 | 12 | break; |
2960 | 109 | case OpCode::I16x8__shl: |
2961 | 109 | compileVectorShl(Context.Int16x8Ty); |
2962 | 109 | break; |
2963 | 516 | case OpCode::I16x8__shr_s: |
2964 | 516 | compileVectorAShr(Context.Int16x8Ty); |
2965 | 516 | break; |
2966 | 127 | case OpCode::I16x8__shr_u: |
2967 | 127 | compileVectorLShr(Context.Int16x8Ty); |
2968 | 127 | break; |
2969 | 148 | case OpCode::I16x8__add: |
2970 | 148 | compileVectorVectorAdd(Context.Int16x8Ty); |
2971 | 148 | break; |
2972 | 21 | case OpCode::I16x8__add_sat_s: |
2973 | 21 | compileVectorVectorAddSat(Context.Int16x8Ty, true); |
2974 | 21 | break; |
2975 | 474 | case OpCode::I16x8__add_sat_u: |
2976 | 474 | compileVectorVectorAddSat(Context.Int16x8Ty, false); |
2977 | 474 | break; |
2978 | 347 | case OpCode::I16x8__sub: |
2979 | 347 | compileVectorVectorSub(Context.Int16x8Ty); |
2980 | 347 | break; |
2981 | 30 | case OpCode::I16x8__sub_sat_s: |
2982 | 30 | compileVectorVectorSubSat(Context.Int16x8Ty, true); |
2983 | 30 | break; |
2984 | 97 | case OpCode::I16x8__sub_sat_u: |
2985 | 97 | compileVectorVectorSubSat(Context.Int16x8Ty, false); |
2986 | 97 | break; |
2987 | 112 | case OpCode::I16x8__mul: |
2988 | 112 | compileVectorVectorMul(Context.Int16x8Ty); |
2989 | 112 | break; |
2990 | 162 | case OpCode::I16x8__min_s: |
2991 | 162 | compileVectorVectorSMin(Context.Int16x8Ty); |
2992 | 162 | break; |
2993 | 123 | case OpCode::I16x8__min_u: |
2994 | 123 | compileVectorVectorUMin(Context.Int16x8Ty); |
2995 | 123 | break; |
2996 | 79 | case OpCode::I16x8__max_s: |
2997 | 79 | compileVectorVectorSMax(Context.Int16x8Ty); |
2998 | 79 | break; |
2999 | 694 | case OpCode::I16x8__max_u: |
3000 | 694 | compileVectorVectorUMax(Context.Int16x8Ty); |
3001 | 694 | break; |
3002 | 145 | case OpCode::I16x8__avgr_u: |
3003 | 145 | compileVectorVectorUAvgr(Context.Int16x8Ty); |
3004 | 145 | break; |
3005 | 67 | case OpCode::I16x8__extmul_low_i8x16_s: |
3006 | 67 | compileVectorExtMul(Context.Int8x16Ty, true, true); |
3007 | 67 | break; |
3008 | 206 | case OpCode::I16x8__extmul_high_i8x16_s: |
3009 | 206 | compileVectorExtMul(Context.Int8x16Ty, true, false); |
3010 | 206 | break; |
3011 | 117 | case OpCode::I16x8__extmul_low_i8x16_u: |
3012 | 117 | compileVectorExtMul(Context.Int8x16Ty, false, true); |
3013 | 117 | break; |
3014 | 505 | case OpCode::I16x8__extmul_high_i8x16_u: |
3015 | 505 | compileVectorExtMul(Context.Int8x16Ty, false, false); |
3016 | 505 | break; |
3017 | 152 | case OpCode::I16x8__q15mulr_sat_s: |
3018 | 152 | compileVectorVectorQ15MulSat(); |
3019 | 152 | break; |
3020 | 316 | case OpCode::I16x8__extadd_pairwise_i8x16_s: |
3021 | 316 | compileVectorExtAddPairwise(Context.Int8x16Ty, true); |
3022 | 316 | break; |
3023 | 327 | case OpCode::I16x8__extadd_pairwise_i8x16_u: |
3024 | 327 | compileVectorExtAddPairwise(Context.Int8x16Ty, false); |
3025 | 327 | break; |
3026 | 57 | case OpCode::I32x4__abs: |
3027 | 57 | compileVectorAbs(Context.Int32x4Ty); |
3028 | 57 | break; |
3029 | 207 | case OpCode::I32x4__neg: |
3030 | 207 | compileVectorNeg(Context.Int32x4Ty); |
3031 | 207 | break; |
3032 | 185 | case OpCode::I32x4__all_true: |
3033 | 185 | compileVectorAllTrue(Context.Int32x4Ty); |
3034 | 185 | break; |
3035 | 92 | case OpCode::I32x4__bitmask: |
3036 | 92 | compileVectorBitMask(Context.Int32x4Ty); |
3037 | 92 | break; |
3038 | 109 | case OpCode::I32x4__extend_low_i16x8_s: |
3039 | 109 | compileVectorExtend(Context.Int16x8Ty, true, true); |
3040 | 109 | break; |
3041 | 517 | case OpCode::I32x4__extend_high_i16x8_s: |
3042 | 517 | compileVectorExtend(Context.Int16x8Ty, true, false); |
3043 | 517 | break; |
3044 | 1.88k | case OpCode::I32x4__extend_low_i16x8_u: |
3045 | 1.88k | compileVectorExtend(Context.Int16x8Ty, false, true); |
3046 | 1.88k | break; |
3047 | 142 | case OpCode::I32x4__extend_high_i16x8_u: |
3048 | 142 | compileVectorExtend(Context.Int16x8Ty, false, false); |
3049 | 142 | break; |
3050 | 1.53k | case OpCode::I32x4__shl: |
3051 | 1.53k | compileVectorShl(Context.Int32x4Ty); |
3052 | 1.53k | break; |
3053 | 409 | case OpCode::I32x4__shr_s: |
3054 | 409 | compileVectorAShr(Context.Int32x4Ty); |
3055 | 409 | break; |
3056 | 500 | case OpCode::I32x4__shr_u: |
3057 | 500 | compileVectorLShr(Context.Int32x4Ty); |
3058 | 500 | break; |
3059 | 167 | case OpCode::I32x4__add: |
3060 | 167 | compileVectorVectorAdd(Context.Int32x4Ty); |
3061 | 167 | break; |
3062 | 159 | case OpCode::I32x4__sub: |
3063 | 159 | compileVectorVectorSub(Context.Int32x4Ty); |
3064 | 159 | break; |
3065 | 254 | case OpCode::I32x4__mul: |
3066 | 254 | compileVectorVectorMul(Context.Int32x4Ty); |
3067 | 254 | break; |
3068 | 82 | case OpCode::I32x4__min_s: |
3069 | 82 | compileVectorVectorSMin(Context.Int32x4Ty); |
3070 | 82 | break; |
3071 | 70 | case OpCode::I32x4__min_u: |
3072 | 70 | compileVectorVectorUMin(Context.Int32x4Ty); |
3073 | 70 | break; |
3074 | 60 | case OpCode::I32x4__max_s: |
3075 | 60 | compileVectorVectorSMax(Context.Int32x4Ty); |
3076 | 60 | break; |
3077 | 79 | case OpCode::I32x4__max_u: |
3078 | 79 | compileVectorVectorUMax(Context.Int32x4Ty); |
3079 | 79 | break; |
3080 | 117 | case OpCode::I32x4__extmul_low_i16x8_s: |
3081 | 117 | compileVectorExtMul(Context.Int16x8Ty, true, true); |
3082 | 117 | break; |
3083 | 66 | case OpCode::I32x4__extmul_high_i16x8_s: |
3084 | 66 | compileVectorExtMul(Context.Int16x8Ty, true, false); |
3085 | 66 | break; |
3086 | 228 | case OpCode::I32x4__extmul_low_i16x8_u: |
3087 | 228 | compileVectorExtMul(Context.Int16x8Ty, false, true); |
3088 | 228 | break; |
3089 | 122 | case OpCode::I32x4__extmul_high_i16x8_u: |
3090 | 122 | compileVectorExtMul(Context.Int16x8Ty, false, false); |
3091 | 122 | break; |
3092 | 1.12k | case OpCode::I32x4__extadd_pairwise_i16x8_s: |
3093 | 1.12k | compileVectorExtAddPairwise(Context.Int16x8Ty, true); |
3094 | 1.12k | break; |
3095 | 628 | case OpCode::I32x4__extadd_pairwise_i16x8_u: |
3096 | 628 | compileVectorExtAddPairwise(Context.Int16x8Ty, false); |
3097 | 628 | break; |
3098 | 99 | case OpCode::I32x4__dot_i16x8_s: { |
3099 | 99 | auto ExtendTy = Context.Int16x8Ty.getExtendedElementVectorType(); |
3100 | 99 | auto Undef = LLVM::Value::getUndef(ExtendTy); |
3101 | 99 | auto LHS = Builder.createSExt( |
3102 | 99 | Builder.createBitCast(stackPop(), Context.Int16x8Ty), ExtendTy); |
3103 | 99 | auto RHS = Builder.createSExt( |
3104 | 99 | Builder.createBitCast(stackPop(), Context.Int16x8Ty), ExtendTy); |
3105 | 99 | auto M = Builder.createMul(LHS, RHS); |
3106 | 99 | auto L = Builder.createShuffleVector( |
3107 | 99 | M, Undef, |
3108 | 99 | LLVM::Value::getConstVector32(LLContext, {0U, 2U, 4U, 6U})); |
3109 | 99 | auto R = Builder.createShuffleVector( |
3110 | 99 | M, Undef, |
3111 | 99 | LLVM::Value::getConstVector32(LLContext, {1U, 3U, 5U, 7U})); |
3112 | 99 | auto V = Builder.createAdd(L, R); |
3113 | 99 | stackPush(Builder.createBitCast(V, Context.Int64x2Ty)); |
3114 | 99 | break; |
3115 | 853 | } |
3116 | 888 | case OpCode::I64x2__abs: |
3117 | 888 | compileVectorAbs(Context.Int64x2Ty); |
3118 | 888 | break; |
3119 | 544 | case OpCode::I64x2__neg: |
3120 | 544 | compileVectorNeg(Context.Int64x2Ty); |
3121 | 544 | break; |
3122 | 345 | case OpCode::I64x2__all_true: |
3123 | 345 | compileVectorAllTrue(Context.Int64x2Ty); |
3124 | 345 | break; |
3125 | 228 | case OpCode::I64x2__bitmask: |
3126 | 228 | compileVectorBitMask(Context.Int64x2Ty); |
3127 | 228 | break; |
3128 | 314 | case OpCode::I64x2__extend_low_i32x4_s: |
3129 | 314 | compileVectorExtend(Context.Int32x4Ty, true, true); |
3130 | 314 | break; |
3131 | 683 | case OpCode::I64x2__extend_high_i32x4_s: |
3132 | 683 | compileVectorExtend(Context.Int32x4Ty, true, false); |
3133 | 683 | break; |
3134 | 224 | case OpCode::I64x2__extend_low_i32x4_u: |
3135 | 224 | compileVectorExtend(Context.Int32x4Ty, false, true); |
3136 | 224 | break; |
3137 | 574 | case OpCode::I64x2__extend_high_i32x4_u: |
3138 | 574 | compileVectorExtend(Context.Int32x4Ty, false, false); |
3139 | 574 | break; |
3140 | 101 | case OpCode::I64x2__shl: |
3141 | 101 | compileVectorShl(Context.Int64x2Ty); |
3142 | 101 | break; |
3143 | 272 | case OpCode::I64x2__shr_s: |
3144 | 272 | compileVectorAShr(Context.Int64x2Ty); |
3145 | 272 | break; |
3146 | 74 | case OpCode::I64x2__shr_u: |
3147 | 74 | compileVectorLShr(Context.Int64x2Ty); |
3148 | 74 | break; |
3149 | 41 | case OpCode::I64x2__add: |
3150 | 41 | compileVectorVectorAdd(Context.Int64x2Ty); |
3151 | 41 | break; |
3152 | 293 | case OpCode::I64x2__sub: |
3153 | 293 | compileVectorVectorSub(Context.Int64x2Ty); |
3154 | 293 | break; |
3155 | 87 | case OpCode::I64x2__mul: |
3156 | 87 | compileVectorVectorMul(Context.Int64x2Ty); |
3157 | 87 | break; |
3158 | 37 | case OpCode::I64x2__extmul_low_i32x4_s: |
3159 | 37 | compileVectorExtMul(Context.Int32x4Ty, true, true); |
3160 | 37 | break; |
3161 | 514 | case OpCode::I64x2__extmul_high_i32x4_s: |
3162 | 514 | compileVectorExtMul(Context.Int32x4Ty, true, false); |
3163 | 514 | break; |
3164 | 31 | case OpCode::I64x2__extmul_low_i32x4_u: |
3165 | 31 | compileVectorExtMul(Context.Int32x4Ty, false, true); |
3166 | 31 | break; |
3167 | 131 | case OpCode::I64x2__extmul_high_i32x4_u: |
3168 | 131 | compileVectorExtMul(Context.Int32x4Ty, false, false); |
3169 | 131 | break; |
3170 | 105 | case OpCode::F32x4__abs: |
3171 | 105 | compileVectorFAbs(Context.Floatx4Ty); |
3172 | 105 | break; |
3173 | 155 | case OpCode::F32x4__neg: |
3174 | 155 | compileVectorFNeg(Context.Floatx4Ty); |
3175 | 155 | break; |
3176 | 214 | case OpCode::F32x4__sqrt: |
3177 | 214 | compileVectorFSqrt(Context.Floatx4Ty); |
3178 | 214 | break; |
3179 | 133 | case OpCode::F32x4__add: |
3180 | 133 | compileVectorVectorFAdd(Context.Floatx4Ty); |
3181 | 133 | break; |
3182 | 251 | case OpCode::F32x4__sub: |
3183 | 251 | compileVectorVectorFSub(Context.Floatx4Ty); |
3184 | 251 | break; |
3185 | 38 | case OpCode::F32x4__mul: |
3186 | 38 | compileVectorVectorFMul(Context.Floatx4Ty); |
3187 | 38 | break; |
3188 | 167 | case OpCode::F32x4__div: |
3189 | 167 | compileVectorVectorFDiv(Context.Floatx4Ty); |
3190 | 167 | break; |
3191 | 123 | case OpCode::F32x4__min: |
3192 | 123 | compileVectorVectorFMin(Context.Floatx4Ty); |
3193 | 123 | break; |
3194 | 36 | case OpCode::F32x4__max: |
3195 | 36 | compileVectorVectorFMax(Context.Floatx4Ty); |
3196 | 36 | break; |
3197 | 50 | case OpCode::F32x4__pmin: |
3198 | 50 | compileVectorVectorFPMin(Context.Floatx4Ty); |
3199 | 50 | break; |
3200 | 225 | case OpCode::F32x4__pmax: |
3201 | 225 | compileVectorVectorFPMax(Context.Floatx4Ty); |
3202 | 225 | break; |
3203 | 789 | case OpCode::F32x4__ceil: |
3204 | 789 | compileVectorFCeil(Context.Floatx4Ty); |
3205 | 789 | break; |
3206 | 1.74k | case OpCode::F32x4__floor: |
3207 | 1.74k | compileVectorFFloor(Context.Floatx4Ty); |
3208 | 1.74k | break; |
3209 | 1.61k | case OpCode::F32x4__trunc: |
3210 | 1.61k | compileVectorFTrunc(Context.Floatx4Ty); |
3211 | 1.61k | break; |
3212 | 217 | case OpCode::F32x4__nearest: |
3213 | 217 | compileVectorFNearest(Context.Floatx4Ty); |
3214 | 217 | break; |
3215 | 440 | case OpCode::F64x2__abs: |
3216 | 440 | compileVectorFAbs(Context.Doublex2Ty); |
3217 | 440 | break; |
3218 | 630 | case OpCode::F64x2__neg: |
3219 | 630 | compileVectorFNeg(Context.Doublex2Ty); |
3220 | 630 | break; |
3221 | 128 | case OpCode::F64x2__sqrt: |
3222 | 128 | compileVectorFSqrt(Context.Doublex2Ty); |
3223 | 128 | break; |
3224 | 48 | case OpCode::F64x2__add: |
3225 | 48 | compileVectorVectorFAdd(Context.Doublex2Ty); |
3226 | 48 | break; |
3227 | 208 | case OpCode::F64x2__sub: |
3228 | 208 | compileVectorVectorFSub(Context.Doublex2Ty); |
3229 | 208 | break; |
3230 | 210 | case OpCode::F64x2__mul: |
3231 | 210 | compileVectorVectorFMul(Context.Doublex2Ty); |
3232 | 210 | break; |
3233 | 37 | case OpCode::F64x2__div: |
3234 | 37 | compileVectorVectorFDiv(Context.Doublex2Ty); |
3235 | 37 | break; |
3236 | 166 | case OpCode::F64x2__min: |
3237 | 166 | compileVectorVectorFMin(Context.Doublex2Ty); |
3238 | 166 | break; |
3239 | 156 | case OpCode::F64x2__max: |
3240 | 156 | compileVectorVectorFMax(Context.Doublex2Ty); |
3241 | 156 | break; |
3242 | 335 | case OpCode::F64x2__pmin: |
3243 | 335 | compileVectorVectorFPMin(Context.Doublex2Ty); |
3244 | 335 | break; |
3245 | 106 | case OpCode::F64x2__pmax: |
3246 | 106 | compileVectorVectorFPMax(Context.Doublex2Ty); |
3247 | 106 | break; |
3248 | 562 | case OpCode::F64x2__ceil: |
3249 | 562 | compileVectorFCeil(Context.Doublex2Ty); |
3250 | 562 | break; |
3251 | 655 | case OpCode::F64x2__floor: |
3252 | 655 | compileVectorFFloor(Context.Doublex2Ty); |
3253 | 655 | break; |
3254 | 117 | case OpCode::F64x2__trunc: |
3255 | 117 | compileVectorFTrunc(Context.Doublex2Ty); |
3256 | 117 | break; |
3257 | 162 | case OpCode::F64x2__nearest: |
3258 | 162 | compileVectorFNearest(Context.Doublex2Ty); |
3259 | 162 | break; |
3260 | 215 | case OpCode::I32x4__trunc_sat_f32x4_s: |
3261 | 215 | compileVectorTruncSatS32(Context.Floatx4Ty, false); |
3262 | 215 | break; |
3263 | 3.68k | case OpCode::I32x4__trunc_sat_f32x4_u: |
3264 | 3.68k | compileVectorTruncSatU32(Context.Floatx4Ty, false); |
3265 | 3.68k | break; |
3266 | 333 | case OpCode::F32x4__convert_i32x4_s: |
3267 | 333 | compileVectorConvertS(Context.Int32x4Ty, Context.Floatx4Ty, false); |
3268 | 333 | break; |
3269 | 728 | case OpCode::F32x4__convert_i32x4_u: |
3270 | 728 | compileVectorConvertU(Context.Int32x4Ty, Context.Floatx4Ty, false); |
3271 | 728 | break; |
3272 | 743 | case OpCode::I32x4__trunc_sat_f64x2_s_zero: |
3273 | 743 | compileVectorTruncSatS32(Context.Doublex2Ty, true); |
3274 | 743 | break; |
3275 | 2.11k | case OpCode::I32x4__trunc_sat_f64x2_u_zero: |
3276 | 2.11k | compileVectorTruncSatU32(Context.Doublex2Ty, true); |
3277 | 2.11k | break; |
3278 | 355 | case OpCode::F64x2__convert_low_i32x4_s: |
3279 | 355 | compileVectorConvertS(Context.Int32x4Ty, Context.Doublex2Ty, true); |
3280 | 355 | break; |
3281 | 1.29k | case OpCode::F64x2__convert_low_i32x4_u: |
3282 | 1.29k | compileVectorConvertU(Context.Int32x4Ty, Context.Doublex2Ty, true); |
3283 | 1.29k | break; |
3284 | 595 | case OpCode::F32x4__demote_f64x2_zero: |
3285 | 595 | compileVectorDemote(); |
3286 | 595 | break; |
3287 | 625 | case OpCode::F64x2__promote_low_f32x4: |
3288 | 625 | compileVectorPromote(); |
3289 | 625 | break; |
3290 | | |
3291 | | // Relaxed SIMD Instructions |
3292 | 39 | case OpCode::I8x16__relaxed_swizzle: |
3293 | 39 | compileVectorSwizzle(); |
3294 | 39 | break; |
3295 | 6 | case OpCode::I32x4__relaxed_trunc_f32x4_s: |
3296 | 6 | compileVectorTruncSatS32(Context.Floatx4Ty, false); |
3297 | 6 | break; |
3298 | 10 | case OpCode::I32x4__relaxed_trunc_f32x4_u: |
3299 | 10 | compileVectorTruncSatU32(Context.Floatx4Ty, false); |
3300 | 10 | break; |
3301 | 10 | case OpCode::I32x4__relaxed_trunc_f64x2_s_zero: |
3302 | 10 | compileVectorTruncSatS32(Context.Doublex2Ty, true); |
3303 | 10 | break; |
3304 | 11 | case OpCode::I32x4__relaxed_trunc_f64x2_u_zero: |
3305 | 11 | compileVectorTruncSatU32(Context.Doublex2Ty, true); |
3306 | 11 | break; |
3307 | 6 | case OpCode::F32x4__relaxed_madd: |
3308 | 6 | compileVectorVectorMAdd(Context.Floatx4Ty); |
3309 | 6 | break; |
3310 | 10 | case OpCode::F32x4__relaxed_nmadd: |
3311 | 10 | compileVectorVectorNMAdd(Context.Floatx4Ty); |
3312 | 10 | break; |
3313 | 11 | case OpCode::F64x2__relaxed_madd: |
3314 | 11 | compileVectorVectorMAdd(Context.Doublex2Ty); |
3315 | 11 | break; |
3316 | 14 | case OpCode::F64x2__relaxed_nmadd: |
3317 | 14 | compileVectorVectorNMAdd(Context.Doublex2Ty); |
3318 | 14 | break; |
3319 | 3 | case OpCode::I8x16__relaxed_laneselect: |
3320 | 13 | case OpCode::I16x8__relaxed_laneselect: |
3321 | 23 | case OpCode::I32x4__relaxed_laneselect: |
3322 | 24 | case OpCode::I64x2__relaxed_laneselect: { |
3323 | 24 | auto C = stackPop(); |
3324 | 24 | auto V2 = stackPop(); |
3325 | 24 | auto V1 = stackPop(); |
3326 | 24 | stackPush(Builder.createXor( |
3327 | 24 | Builder.createAnd(Builder.createXor(V1, V2), C), V2)); |
3328 | 24 | break; |
3329 | 23 | } |
3330 | 7 | case OpCode::F32x4__relaxed_min: |
3331 | 7 | compileVectorVectorFMin(Context.Floatx4Ty); |
3332 | 7 | break; |
3333 | 14 | case OpCode::F32x4__relaxed_max: |
3334 | 14 | compileVectorVectorFMax(Context.Floatx4Ty); |
3335 | 14 | break; |
3336 | 10 | case OpCode::F64x2__relaxed_min: |
3337 | 10 | compileVectorVectorFMin(Context.Doublex2Ty); |
3338 | 10 | break; |
3339 | 8 | case OpCode::F64x2__relaxed_max: |
3340 | 8 | compileVectorVectorFMax(Context.Doublex2Ty); |
3341 | 8 | break; |
3342 | 10 | case OpCode::I16x8__relaxed_q15mulr_s: |
3343 | 10 | compileVectorVectorQ15MulSat(); |
3344 | 10 | break; |
3345 | 10 | case OpCode::I16x8__relaxed_dot_i8x16_i7x16_s: |
3346 | 10 | compileVectorRelaxedIntegerDotProduct(); |
3347 | 10 | break; |
3348 | 11 | case OpCode::I32x4__relaxed_dot_i8x16_i7x16_add_s: |
3349 | 11 | compileVectorRelaxedIntegerDotProductAdd(); |
3350 | 11 | break; |
3351 | | |
3352 | | // Atomic Instructions |
3353 | 193 | case OpCode::Atomic__fence: |
3354 | 193 | compileMemoryFence(); |
3355 | 193 | break; |
3356 | 28 | case OpCode::Memory__atomic__notify: |
3357 | 28 | compileAtomicNotify(Instr.getTargetIndex(), Instr.getMemoryOffset()); |
3358 | 28 | break; |
3359 | 5 | case OpCode::Memory__atomic__wait32: |
3360 | 5 | compileAtomicWait(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3361 | 5 | Context.Int32Ty, 32); |
3362 | 5 | break; |
3363 | 2 | case OpCode::Memory__atomic__wait64: |
3364 | 2 | compileAtomicWait(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3365 | 2 | Context.Int64Ty, 64); |
3366 | 2 | break; |
3367 | 0 | case OpCode::I32__atomic__load: |
3368 | 0 | compileAtomicLoad(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3369 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, |
3370 | 0 | Context.Int32Ty, true); |
3371 | 0 | break; |
3372 | 0 | case OpCode::I64__atomic__load: |
3373 | 0 | compileAtomicLoad(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3374 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, |
3375 | 0 | Context.Int64Ty, true); |
3376 | 0 | break; |
3377 | 0 | case OpCode::I32__atomic__load8_u: |
3378 | 0 | compileAtomicLoad(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3379 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, |
3380 | 0 | Context.Int8Ty); |
3381 | 0 | break; |
3382 | 0 | case OpCode::I32__atomic__load16_u: |
3383 | 0 | compileAtomicLoad(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3384 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, |
3385 | 0 | Context.Int16Ty); |
3386 | 0 | break; |
3387 | 0 | case OpCode::I64__atomic__load8_u: |
3388 | 0 | compileAtomicLoad(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3389 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, |
3390 | 0 | Context.Int8Ty); |
3391 | 0 | break; |
3392 | 0 | case OpCode::I64__atomic__load16_u: |
3393 | 0 | compileAtomicLoad(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3394 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, |
3395 | 0 | Context.Int16Ty); |
3396 | 0 | break; |
3397 | 0 | case OpCode::I64__atomic__load32_u: |
3398 | 0 | compileAtomicLoad(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3399 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, |
3400 | 0 | Context.Int32Ty); |
3401 | 0 | break; |
3402 | 0 | case OpCode::I32__atomic__store: |
3403 | 0 | compileAtomicStore(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3404 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, |
3405 | 0 | Context.Int32Ty, true); |
3406 | 0 | break; |
3407 | 0 | case OpCode::I64__atomic__store: |
3408 | 0 | compileAtomicStore(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3409 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, |
3410 | 0 | Context.Int64Ty, true); |
3411 | 0 | break; |
3412 | 0 | case OpCode::I32__atomic__store8: |
3413 | 0 | compileAtomicStore(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3414 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, |
3415 | 0 | Context.Int8Ty, true); |
3416 | 0 | break; |
3417 | 0 | case OpCode::I32__atomic__store16: |
3418 | 0 | compileAtomicStore(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3419 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, |
3420 | 0 | Context.Int16Ty, true); |
3421 | 0 | break; |
3422 | 0 | case OpCode::I64__atomic__store8: |
3423 | 0 | compileAtomicStore(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3424 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, |
3425 | 0 | Context.Int8Ty, true); |
3426 | 0 | break; |
3427 | 0 | case OpCode::I64__atomic__store16: |
3428 | 0 | compileAtomicStore(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3429 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, |
3430 | 0 | Context.Int16Ty, true); |
3431 | 0 | break; |
3432 | 0 | case OpCode::I64__atomic__store32: |
3433 | 0 | compileAtomicStore(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3434 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, |
3435 | 0 | Context.Int32Ty, true); |
3436 | 0 | break; |
3437 | 0 | case OpCode::I32__atomic__rmw__add: |
3438 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3439 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3440 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3441 | 0 | break; |
3442 | 0 | case OpCode::I64__atomic__rmw__add: |
3443 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3444 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3445 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3446 | 0 | break; |
3447 | 0 | case OpCode::I32__atomic__rmw8__add_u: |
3448 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3449 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3450 | 0 | Context.Int32Ty, Context.Int8Ty); |
3451 | 0 | break; |
3452 | 0 | case OpCode::I32__atomic__rmw16__add_u: |
3453 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3454 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3455 | 0 | Context.Int32Ty, Context.Int16Ty); |
3456 | 0 | break; |
3457 | 0 | case OpCode::I64__atomic__rmw8__add_u: |
3458 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3459 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3460 | 0 | Context.Int64Ty, Context.Int8Ty); |
3461 | 0 | break; |
3462 | 0 | case OpCode::I64__atomic__rmw16__add_u: |
3463 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3464 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3465 | 0 | Context.Int64Ty, Context.Int16Ty); |
3466 | 0 | break; |
3467 | 0 | case OpCode::I64__atomic__rmw32__add_u: |
3468 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3469 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3470 | 0 | Context.Int64Ty, Context.Int32Ty); |
3471 | 0 | break; |
3472 | 0 | case OpCode::I32__atomic__rmw__sub: |
3473 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3474 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3475 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3476 | 0 | break; |
3477 | 0 | case OpCode::I64__atomic__rmw__sub: |
3478 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3479 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3480 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3481 | 0 | break; |
3482 | 0 | case OpCode::I32__atomic__rmw8__sub_u: |
3483 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3484 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3485 | 0 | Context.Int32Ty, Context.Int8Ty); |
3486 | 0 | break; |
3487 | 0 | case OpCode::I32__atomic__rmw16__sub_u: |
3488 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3489 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3490 | 0 | Context.Int32Ty, Context.Int16Ty); |
3491 | 0 | break; |
3492 | 0 | case OpCode::I64__atomic__rmw8__sub_u: |
3493 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3494 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3495 | 0 | Context.Int64Ty, Context.Int8Ty); |
3496 | 0 | break; |
3497 | 0 | case OpCode::I64__atomic__rmw16__sub_u: |
3498 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3499 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3500 | 0 | Context.Int64Ty, Context.Int16Ty); |
3501 | 0 | break; |
3502 | 0 | case OpCode::I64__atomic__rmw32__sub_u: |
3503 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3504 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3505 | 0 | Context.Int64Ty, Context.Int32Ty); |
3506 | 0 | break; |
3507 | 0 | case OpCode::I32__atomic__rmw__and: |
3508 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3509 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3510 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3511 | 0 | break; |
3512 | 0 | case OpCode::I64__atomic__rmw__and: |
3513 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3514 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3515 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3516 | 0 | break; |
3517 | 0 | case OpCode::I32__atomic__rmw8__and_u: |
3518 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3519 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3520 | 0 | Context.Int32Ty, Context.Int8Ty); |
3521 | 0 | break; |
3522 | 0 | case OpCode::I32__atomic__rmw16__and_u: |
3523 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3524 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3525 | 0 | Context.Int32Ty, Context.Int16Ty); |
3526 | 0 | break; |
3527 | 0 | case OpCode::I64__atomic__rmw8__and_u: |
3528 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3529 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3530 | 0 | Context.Int64Ty, Context.Int8Ty); |
3531 | 0 | break; |
3532 | 0 | case OpCode::I64__atomic__rmw16__and_u: |
3533 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3534 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3535 | 0 | Context.Int64Ty, Context.Int16Ty); |
3536 | 0 | break; |
3537 | 0 | case OpCode::I64__atomic__rmw32__and_u: |
3538 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3539 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3540 | 0 | Context.Int64Ty, Context.Int32Ty); |
3541 | 0 | break; |
3542 | 0 | case OpCode::I32__atomic__rmw__or: |
3543 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3544 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3545 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3546 | 0 | break; |
3547 | 0 | case OpCode::I64__atomic__rmw__or: |
3548 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3549 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3550 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3551 | 0 | break; |
3552 | 0 | case OpCode::I32__atomic__rmw8__or_u: |
3553 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3554 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3555 | 0 | Context.Int32Ty, Context.Int8Ty); |
3556 | 0 | break; |
3557 | 0 | case OpCode::I32__atomic__rmw16__or_u: |
3558 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3559 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3560 | 0 | Context.Int32Ty, Context.Int16Ty); |
3561 | 0 | break; |
3562 | 0 | case OpCode::I64__atomic__rmw8__or_u: |
3563 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3564 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3565 | 0 | Context.Int64Ty, Context.Int8Ty); |
3566 | 0 | break; |
3567 | 0 | case OpCode::I64__atomic__rmw16__or_u: |
3568 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3569 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3570 | 0 | Context.Int64Ty, Context.Int16Ty); |
3571 | 0 | break; |
3572 | 0 | case OpCode::I64__atomic__rmw32__or_u: |
3573 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3574 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3575 | 0 | Context.Int64Ty, Context.Int32Ty); |
3576 | 0 | break; |
3577 | 0 | case OpCode::I32__atomic__rmw__xor: |
3578 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3579 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3580 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3581 | 0 | break; |
3582 | 0 | case OpCode::I64__atomic__rmw__xor: |
3583 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3584 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3585 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3586 | 0 | break; |
3587 | 0 | case OpCode::I32__atomic__rmw8__xor_u: |
3588 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3589 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3590 | 0 | Context.Int32Ty, Context.Int8Ty); |
3591 | 0 | break; |
3592 | 0 | case OpCode::I32__atomic__rmw16__xor_u: |
3593 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3594 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3595 | 0 | Context.Int32Ty, Context.Int16Ty); |
3596 | 0 | break; |
3597 | 0 | case OpCode::I64__atomic__rmw8__xor_u: |
3598 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3599 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3600 | 0 | Context.Int64Ty, Context.Int8Ty); |
3601 | 0 | break; |
3602 | 0 | case OpCode::I64__atomic__rmw16__xor_u: |
3603 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3604 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3605 | 0 | Context.Int64Ty, Context.Int16Ty); |
3606 | 0 | break; |
3607 | 0 | case OpCode::I64__atomic__rmw32__xor_u: |
3608 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3609 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3610 | 0 | Context.Int64Ty, Context.Int32Ty); |
3611 | 0 | break; |
3612 | 0 | case OpCode::I32__atomic__rmw__xchg: |
3613 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3614 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, |
3615 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3616 | 0 | break; |
3617 | 0 | case OpCode::I64__atomic__rmw__xchg: |
3618 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3619 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, |
3620 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3621 | 0 | break; |
3622 | 0 | case OpCode::I32__atomic__rmw8__xchg_u: |
3623 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3624 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, |
3625 | 0 | Context.Int32Ty, Context.Int8Ty); |
3626 | 0 | break; |
3627 | 0 | case OpCode::I32__atomic__rmw16__xchg_u: |
3628 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3629 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, |
3630 | 0 | Context.Int32Ty, Context.Int16Ty); |
3631 | 0 | break; |
3632 | 0 | case OpCode::I64__atomic__rmw8__xchg_u: |
3633 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3634 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, |
3635 | 0 | Context.Int64Ty, Context.Int8Ty); |
3636 | 0 | break; |
3637 | 0 | case OpCode::I64__atomic__rmw16__xchg_u: |
3638 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3639 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, |
3640 | 0 | Context.Int64Ty, Context.Int16Ty); |
3641 | 0 | break; |
3642 | 0 | case OpCode::I64__atomic__rmw32__xchg_u: |
3643 | 0 | compileAtomicRMWOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3644 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, |
3645 | 0 | Context.Int64Ty, Context.Int32Ty); |
3646 | 0 | break; |
3647 | 0 | case OpCode::I32__atomic__rmw__cmpxchg: |
3648 | 0 | compileAtomicCompareExchange( |
3649 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3650 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int32Ty, true); |
3651 | 0 | break; |
3652 | 0 | case OpCode::I64__atomic__rmw__cmpxchg: |
3653 | 0 | compileAtomicCompareExchange( |
3654 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3655 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int64Ty, true); |
3656 | 0 | break; |
3657 | 0 | case OpCode::I32__atomic__rmw8__cmpxchg_u: |
3658 | 0 | compileAtomicCompareExchange( |
3659 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3660 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int8Ty); |
3661 | 0 | break; |
3662 | 0 | case OpCode::I32__atomic__rmw16__cmpxchg_u: |
3663 | 0 | compileAtomicCompareExchange( |
3664 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3665 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int16Ty); |
3666 | 0 | break; |
3667 | 0 | case OpCode::I64__atomic__rmw8__cmpxchg_u: |
3668 | 0 | compileAtomicCompareExchange( |
3669 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3670 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int8Ty); |
3671 | 0 | break; |
3672 | 0 | case OpCode::I64__atomic__rmw16__cmpxchg_u: |
3673 | 0 | compileAtomicCompareExchange( |
3674 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3675 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int16Ty); |
3676 | 0 | break; |
3677 | 0 | case OpCode::I64__atomic__rmw32__cmpxchg_u: |
3678 | 0 | compileAtomicCompareExchange( |
3679 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3680 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int32Ty); |
3681 | 0 | break; |
3682 | | |
3683 | 0 | default: |
3684 | 0 | assumingUnreachable(); |
3685 | 1.08M | } |
3686 | 1.08M | return {}; |
3687 | 1.08M | }; |
3688 | | |
3689 | 1.58M | for (const auto &Instr : Instrs) { |
3690 | | // Update instruction count |
3691 | 1.58M | if (LocalInstrCount) { |
3692 | 0 | Builder.createStore( |
3693 | 0 | Builder.createAdd( |
3694 | 0 | Builder.createLoad(Context.Int64Ty, LocalInstrCount), |
3695 | 0 | LLContext.getInt64(1)), |
3696 | 0 | LocalInstrCount); |
3697 | 0 | } |
3698 | 1.58M | if (LocalGas) { |
3699 | 0 | auto NewGas = Builder.createAdd( |
3700 | 0 | Builder.createLoad(Context.Int64Ty, LocalGas), |
3701 | 0 | Builder.createLoad( |
3702 | 0 | Context.Int64Ty, |
3703 | 0 | Builder.createConstInBoundsGEP2_64( |
3704 | 0 | LLVM::Type::getArrayType(Context.Int64Ty, UINT16_MAX + 1), |
3705 | 0 | Context.getCostTable(Builder, ExecCtx), 0, |
3706 | 0 | uint16_t(Instr.getOpCode())))); |
3707 | 0 | Builder.createStore(NewGas, LocalGas); |
3708 | 0 | } |
3709 | | |
3710 | | // Make the instruction node according to Code. |
3711 | 1.58M | EXPECTED_TRY(Dispatch(Instr)); |
3712 | 1.58M | } |
3713 | 11.4k | return {}; |
3714 | 11.4k | } |
3715 | 2.10k | void compileSignedTrunc(LLVM::Type IntType) noexcept { |
3716 | 2.10k | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "strunc.norm"); |
3717 | 2.10k | auto NotMinBB = LLVM::BasicBlock::create(LLContext, F.Fn, "strunc.notmin"); |
3718 | 2.10k | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "strunc.notmax"); |
3719 | 2.10k | auto Value = stackPop(); |
3720 | 2.10k | const auto [Precise, MinFp, MaxFp] = |
3721 | 2.10k | [IntType, Value]() -> std::tuple<bool, LLVM::Value, LLVM::Value> { |
3722 | 2.10k | const auto BitWidth = IntType.getIntegerBitWidth(); |
3723 | 2.10k | const auto [Min, Max] = [BitWidth]() -> std::tuple<int64_t, int64_t> { |
3724 | 2.10k | switch (BitWidth) { |
3725 | 1.64k | case 32: |
3726 | 1.64k | return {std::numeric_limits<int32_t>::min(), |
3727 | 1.64k | std::numeric_limits<int32_t>::max()}; |
3728 | 461 | case 64: |
3729 | 461 | return {std::numeric_limits<int64_t>::min(), |
3730 | 461 | std::numeric_limits<int64_t>::max()}; |
3731 | 0 | default: |
3732 | 0 | assumingUnreachable(); |
3733 | 2.10k | } |
3734 | 2.10k | }(); |
3735 | 2.10k | auto FPType = Value.getType(); |
3736 | 2.10k | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3737 | 2.10k | const auto FPWidth = FPType.getFPMantissaWidth(); |
3738 | 2.10k | return {BitWidth <= FPWidth, LLVM::Value::getConstReal(FPType, Min), |
3739 | 2.10k | LLVM::Value::getConstReal(FPType, Max)}; |
3740 | 2.10k | }(); |
3741 | | |
3742 | 2.10k | auto IsNotNan = Builder.createLikely(Builder.createFCmpORD(Value, Value)); |
3743 | 2.10k | Builder.createCondBr(IsNotNan, NormBB, |
3744 | 2.10k | getTrapBB(ErrCode::Value::InvalidConvToInt)); |
3745 | | |
3746 | 2.10k | Builder.positionAtEnd(NormBB); |
3747 | 2.10k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3748 | 2.10k | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3749 | 2.10k | auto IsNotUnderflow = |
3750 | 2.10k | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3751 | 2.10k | Builder.createCondBr(IsNotUnderflow, NotMinBB, |
3752 | 2.10k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3753 | | |
3754 | 2.10k | Builder.positionAtEnd(NotMinBB); |
3755 | 2.10k | auto IsNotOverflow = Builder.createLikely( |
3756 | 2.10k | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3757 | 2.10k | Builder.createCondBr(IsNotOverflow, NotMaxBB, |
3758 | 2.10k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3759 | | |
3760 | 2.10k | Builder.positionAtEnd(NotMaxBB); |
3761 | 2.10k | stackPush(Builder.createFPToSI(Trunc, IntType)); |
3762 | 2.10k | } |
3763 | 1.40k | void compileSignedTruncSat(LLVM::Type IntType) noexcept { |
3764 | 1.40k | auto CurrBB = Builder.getInsertBlock(); |
3765 | 1.40k | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.norm"); |
3766 | 1.40k | auto NotMinBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.notmin"); |
3767 | 1.40k | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.notmax"); |
3768 | 1.40k | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.end"); |
3769 | 1.40k | auto Value = stackPop(); |
3770 | 1.40k | const auto [Precise, MinInt, MaxInt, MinFp, MaxFp] = [IntType, Value]() |
3771 | 1.40k | -> std::tuple<bool, uint64_t, uint64_t, LLVM::Value, LLVM::Value> { |
3772 | 1.40k | const auto BitWidth = IntType.getIntegerBitWidth(); |
3773 | 1.40k | const auto [Min, Max] = [BitWidth]() -> std::tuple<int64_t, int64_t> { |
3774 | 1.40k | switch (BitWidth) { |
3775 | 639 | case 32: |
3776 | 639 | return {std::numeric_limits<int32_t>::min(), |
3777 | 639 | std::numeric_limits<int32_t>::max()}; |
3778 | 764 | case 64: |
3779 | 764 | return {std::numeric_limits<int64_t>::min(), |
3780 | 764 | std::numeric_limits<int64_t>::max()}; |
3781 | 0 | default: |
3782 | 0 | assumingUnreachable(); |
3783 | 1.40k | } |
3784 | 1.40k | }(); |
3785 | 1.40k | auto FPType = Value.getType(); |
3786 | 1.40k | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3787 | 1.40k | const auto FPWidth = FPType.getFPMantissaWidth(); |
3788 | 1.40k | return {BitWidth <= FPWidth, static_cast<uint64_t>(Min), |
3789 | 1.40k | static_cast<uint64_t>(Max), |
3790 | 1.40k | LLVM::Value::getConstReal(FPType, Min), |
3791 | 1.40k | LLVM::Value::getConstReal(FPType, Max)}; |
3792 | 1.40k | }(); |
3793 | | |
3794 | 1.40k | auto IsNotNan = Builder.createLikely(Builder.createFCmpORD(Value, Value)); |
3795 | 1.40k | Builder.createCondBr(IsNotNan, NormBB, EndBB); |
3796 | | |
3797 | 1.40k | Builder.positionAtEnd(NormBB); |
3798 | 1.40k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3799 | 1.40k | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3800 | 1.40k | auto IsNotUnderflow = |
3801 | 1.40k | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3802 | 1.40k | Builder.createCondBr(IsNotUnderflow, NotMinBB, EndBB); |
3803 | | |
3804 | 1.40k | Builder.positionAtEnd(NotMinBB); |
3805 | 1.40k | auto IsNotOverflow = Builder.createLikely( |
3806 | 1.40k | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3807 | 1.40k | Builder.createCondBr(IsNotOverflow, NotMaxBB, EndBB); |
3808 | | |
3809 | 1.40k | Builder.positionAtEnd(NotMaxBB); |
3810 | 1.40k | auto IntValue = Builder.createFPToSI(Trunc, IntType); |
3811 | 1.40k | Builder.createBr(EndBB); |
3812 | | |
3813 | 1.40k | Builder.positionAtEnd(EndBB); |
3814 | 1.40k | auto PHIRet = Builder.createPHI(IntType); |
3815 | 1.40k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, 0, true), CurrBB); |
3816 | 1.40k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MinInt, true), NormBB); |
3817 | 1.40k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MaxInt, true), |
3818 | 1.40k | NotMinBB); |
3819 | 1.40k | PHIRet.addIncoming(IntValue, NotMaxBB); |
3820 | | |
3821 | 1.40k | stackPush(PHIRet); |
3822 | 1.40k | } |
3823 | 4.43k | void compileUnsignedTrunc(LLVM::Type IntType) noexcept { |
3824 | 4.43k | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "utrunc.norm"); |
3825 | 4.43k | auto NotMinBB = LLVM::BasicBlock::create(LLContext, F.Fn, "utrunc.notmin"); |
3826 | 4.43k | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "utrunc.notmax"); |
3827 | 4.43k | auto Value = stackPop(); |
3828 | 4.43k | const auto [Precise, MinFp, MaxFp] = |
3829 | 4.43k | [IntType, Value]() -> std::tuple<bool, LLVM::Value, LLVM::Value> { |
3830 | 4.43k | const auto BitWidth = IntType.getIntegerBitWidth(); |
3831 | 4.43k | const auto [Min, Max] = [BitWidth]() -> std::tuple<uint64_t, uint64_t> { |
3832 | 4.43k | switch (BitWidth) { |
3833 | 1.53k | case 32: |
3834 | 1.53k | return {std::numeric_limits<uint32_t>::min(), |
3835 | 1.53k | std::numeric_limits<uint32_t>::max()}; |
3836 | 2.90k | case 64: |
3837 | 2.90k | return {std::numeric_limits<uint64_t>::min(), |
3838 | 2.90k | std::numeric_limits<uint64_t>::max()}; |
3839 | 0 | default: |
3840 | 0 | assumingUnreachable(); |
3841 | 4.43k | } |
3842 | 4.43k | }(); |
3843 | 4.43k | auto FPType = Value.getType(); |
3844 | 4.43k | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3845 | 4.43k | const auto FPWidth = FPType.getFPMantissaWidth(); |
3846 | 4.43k | return {BitWidth <= FPWidth, LLVM::Value::getConstReal(FPType, Min), |
3847 | 4.43k | LLVM::Value::getConstReal(FPType, Max)}; |
3848 | 4.43k | }(); |
3849 | | |
3850 | 4.43k | auto IsNotNan = Builder.createLikely(Builder.createFCmpORD(Value, Value)); |
3851 | 4.43k | Builder.createCondBr(IsNotNan, NormBB, |
3852 | 4.43k | getTrapBB(ErrCode::Value::InvalidConvToInt)); |
3853 | | |
3854 | 4.43k | Builder.positionAtEnd(NormBB); |
3855 | 4.43k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3856 | 4.43k | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3857 | 4.43k | auto IsNotUnderflow = |
3858 | 4.43k | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3859 | 4.43k | Builder.createCondBr(IsNotUnderflow, NotMinBB, |
3860 | 4.43k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3861 | | |
3862 | 4.43k | Builder.positionAtEnd(NotMinBB); |
3863 | 4.43k | auto IsNotOverflow = Builder.createLikely( |
3864 | 4.43k | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3865 | 4.43k | Builder.createCondBr(IsNotOverflow, NotMaxBB, |
3866 | 4.43k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3867 | | |
3868 | 4.43k | Builder.positionAtEnd(NotMaxBB); |
3869 | 4.43k | stackPush(Builder.createFPToUI(Trunc, IntType)); |
3870 | 4.43k | } |
3871 | 1.16k | void compileUnsignedTruncSat(LLVM::Type IntType) noexcept { |
3872 | 1.16k | auto CurrBB = Builder.getInsertBlock(); |
3873 | 1.16k | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "usat.norm"); |
3874 | 1.16k | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "usat.notmax"); |
3875 | 1.16k | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "usat.end"); |
3876 | 1.16k | auto Value = stackPop(); |
3877 | 1.16k | const auto [Precise, MinInt, MaxInt, MinFp, MaxFp] = [IntType, Value]() |
3878 | 1.16k | -> std::tuple<bool, uint64_t, uint64_t, LLVM::Value, LLVM::Value> { |
3879 | 1.16k | const auto BitWidth = IntType.getIntegerBitWidth(); |
3880 | 1.16k | const auto [Min, Max] = [BitWidth]() -> std::tuple<uint64_t, uint64_t> { |
3881 | 1.16k | switch (BitWidth) { |
3882 | 396 | case 32: |
3883 | 396 | return {std::numeric_limits<uint32_t>::min(), |
3884 | 396 | std::numeric_limits<uint32_t>::max()}; |
3885 | 771 | case 64: |
3886 | 771 | return {std::numeric_limits<uint64_t>::min(), |
3887 | 771 | std::numeric_limits<uint64_t>::max()}; |
3888 | 0 | default: |
3889 | 0 | assumingUnreachable(); |
3890 | 1.16k | } |
3891 | 1.16k | }(); |
3892 | 1.16k | auto FPType = Value.getType(); |
3893 | 1.16k | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3894 | 1.16k | const auto FPWidth = FPType.getFPMantissaWidth(); |
3895 | 1.16k | return {BitWidth <= FPWidth, Min, Max, |
3896 | 1.16k | LLVM::Value::getConstReal(FPType, Min), |
3897 | 1.16k | LLVM::Value::getConstReal(FPType, Max)}; |
3898 | 1.16k | }(); |
3899 | | |
3900 | 1.16k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3901 | 1.16k | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3902 | 1.16k | auto IsNotUnderflow = |
3903 | 1.16k | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3904 | 1.16k | Builder.createCondBr(IsNotUnderflow, NormBB, EndBB); |
3905 | | |
3906 | 1.16k | Builder.positionAtEnd(NormBB); |
3907 | 1.16k | auto IsNotOverflow = Builder.createLikely( |
3908 | 1.16k | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3909 | 1.16k | Builder.createCondBr(IsNotOverflow, NotMaxBB, EndBB); |
3910 | | |
3911 | 1.16k | Builder.positionAtEnd(NotMaxBB); |
3912 | 1.16k | auto IntValue = Builder.createFPToUI(Trunc, IntType); |
3913 | 1.16k | Builder.createBr(EndBB); |
3914 | | |
3915 | 1.16k | Builder.positionAtEnd(EndBB); |
3916 | 1.16k | auto PHIRet = Builder.createPHI(IntType); |
3917 | 1.16k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MinInt), CurrBB); |
3918 | 1.16k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MaxInt), NormBB); |
3919 | 1.16k | PHIRet.addIncoming(IntValue, NotMaxBB); |
3920 | | |
3921 | 1.16k | stackPush(PHIRet); |
3922 | 1.16k | } |
3923 | | |
3924 | | void compileAtomicCheckOffsetAlignment(LLVM::Value Offset, |
3925 | 35 | LLVM::Type IntType) noexcept { |
3926 | 35 | const auto BitWidth = IntType.getIntegerBitWidth(); |
3927 | 35 | auto BWMask = LLContext.getInt64((BitWidth >> 3) - 1); |
3928 | 35 | auto Value = Builder.createAnd(Offset, BWMask); |
3929 | 35 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "address_align_ok"); |
3930 | 35 | auto IsAddressAligned = Builder.createLikely( |
3931 | 35 | Builder.createICmpEQ(Value, LLContext.getInt64(0))); |
3932 | 35 | Builder.createCondBr(IsAddressAligned, OkBB, |
3933 | 35 | getTrapBB(ErrCode::Value::UnalignedAtomicAccess)); |
3934 | | |
3935 | 35 | Builder.positionAtEnd(OkBB); |
3936 | 35 | } |
3937 | | |
3938 | 193 | void compileMemoryFence() noexcept { |
3939 | 193 | Builder.createFence(LLVMAtomicOrderingSequentiallyConsistent); |
3940 | 193 | } |
3941 | | void compileAtomicNotify(unsigned MemoryIndex, |
3942 | 28 | unsigned MemoryOffset) noexcept { |
3943 | 28 | auto Count = stackPop(); |
3944 | 28 | auto Addr = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3945 | 28 | if (MemoryOffset != 0) { |
3946 | 21 | Addr = Builder.createAdd(Addr, LLContext.getInt64(MemoryOffset)); |
3947 | 21 | } |
3948 | 28 | compileAtomicCheckOffsetAlignment(Addr, Context.Int32Ty); |
3949 | 28 | auto Offset = stackPop(); |
3950 | | |
3951 | 28 | stackPush(Builder.createCall( |
3952 | 28 | Context.getIntrinsic( |
3953 | 28 | Builder, Executable::Intrinsics::kMemAtomicNotify, |
3954 | 28 | LLVM::Type::getFunctionType( |
3955 | 28 | Context.Int32Ty, |
3956 | 28 | {Context.Int32Ty, Context.Int32Ty, Context.Int32Ty}, false)), |
3957 | 28 | {LLContext.getInt32(MemoryIndex), Offset, Count})); |
3958 | 28 | } |
3959 | | void compileAtomicWait(unsigned MemoryIndex, unsigned MemoryOffset, |
3960 | 7 | LLVM::Type TargetType, uint32_t BitWidth) noexcept { |
3961 | 7 | auto Timeout = stackPop(); |
3962 | 7 | auto ExpectedValue = Builder.createZExtOrTrunc(stackPop(), Context.Int64Ty); |
3963 | 7 | auto Addr = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3964 | 7 | if (MemoryOffset != 0) { |
3965 | 3 | Addr = Builder.createAdd(Addr, LLContext.getInt64(MemoryOffset)); |
3966 | 3 | } |
3967 | 7 | compileAtomicCheckOffsetAlignment(Addr, TargetType); |
3968 | 7 | auto Offset = stackPop(); |
3969 | | |
3970 | 7 | stackPush(Builder.createCall( |
3971 | 7 | Context.getIntrinsic( |
3972 | 7 | Builder, Executable::Intrinsics::kMemAtomicWait, |
3973 | 7 | LLVM::Type::getFunctionType(Context.Int32Ty, |
3974 | 7 | {Context.Int32Ty, Context.Int32Ty, |
3975 | 7 | Context.Int64Ty, Context.Int64Ty, |
3976 | 7 | Context.Int32Ty}, |
3977 | 7 | false)), |
3978 | 7 | {LLContext.getInt32(MemoryIndex), Offset, ExpectedValue, Timeout, |
3979 | 7 | LLContext.getInt32(BitWidth)})); |
3980 | 7 | } |
3981 | | void compileAtomicLoad(unsigned MemoryIndex, unsigned MemoryOffset, |
3982 | | unsigned Alignment, LLVM::Type IntType, |
3983 | 0 | LLVM::Type TargetType, bool Signed = false) noexcept { |
3984 | |
|
3985 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3986 | 0 | if (MemoryOffset != 0) { |
3987 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
3988 | 0 | } |
3989 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
3990 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
3991 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
3992 | 0 | Offset); |
3993 | |
|
3994 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
3995 | 0 | auto Load = switchEndian(Builder.createLoad(TargetType, Ptr, true)); |
3996 | 0 | Load.setAlignment(1 << Alignment); |
3997 | 0 | Load.setOrdering(LLVMAtomicOrderingSequentiallyConsistent); |
3998 | |
|
3999 | 0 | if (Signed) { |
4000 | 0 | Stack.back() = Builder.createSExt(Load, IntType); |
4001 | 0 | } else { |
4002 | 0 | Stack.back() = Builder.createZExt(Load, IntType); |
4003 | 0 | } |
4004 | 0 | } |
4005 | | void compileAtomicStore(unsigned MemoryIndex, unsigned MemoryOffset, |
4006 | | unsigned Alignment, LLVM::Type, LLVM::Type TargetType, |
4007 | 0 | bool Signed = false) noexcept { |
4008 | 0 | auto V = stackPop(); |
4009 | |
|
4010 | 0 | if (Signed) { |
4011 | 0 | V = Builder.createSExtOrTrunc(V, TargetType); |
4012 | 0 | } else { |
4013 | 0 | V = Builder.createZExtOrTrunc(V, TargetType); |
4014 | 0 | } |
4015 | 0 | V = switchEndian(V); |
4016 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
4017 | 0 | if (MemoryOffset != 0) { |
4018 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
4019 | 0 | } |
4020 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
4021 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
4022 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
4023 | 0 | Offset); |
4024 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
4025 | 0 | auto Store = Builder.createStore(V, Ptr, true); |
4026 | 0 | Store.setAlignment(1 << Alignment); |
4027 | 0 | Store.setOrdering(LLVMAtomicOrderingSequentiallyConsistent); |
4028 | 0 | } |
4029 | | |
4030 | | void compileAtomicRMWOp(unsigned MemoryIndex, unsigned MemoryOffset, |
4031 | | [[maybe_unused]] unsigned Alignment, |
4032 | | LLVMAtomicRMWBinOp BinOp, LLVM::Type IntType, |
4033 | 0 | LLVM::Type TargetType, bool Signed = false) noexcept { |
4034 | 0 | auto Value = Builder.createSExtOrTrunc(stackPop(), TargetType); |
4035 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
4036 | 0 | if (MemoryOffset != 0) { |
4037 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
4038 | 0 | } |
4039 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
4040 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
4041 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
4042 | 0 | Offset); |
4043 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
4044 | |
|
4045 | 0 | LLVM::Value Ret; |
4046 | | if constexpr (Endian::native == Endian::big) { |
4047 | | if (BinOp == LLVMAtomicRMWBinOp::LLVMAtomicRMWBinOpAdd || |
4048 | | BinOp == LLVMAtomicRMWBinOp::LLVMAtomicRMWBinOpSub) { |
4049 | | auto AtomicBB = LLVM::BasicBlock::create(LLContext, F.Fn, "atomic.rmw"); |
4050 | | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "atomic.rmw.ok"); |
4051 | | Builder.createBr(AtomicBB); |
4052 | | Builder.positionAtEnd(AtomicBB); |
4053 | | |
4054 | | auto Load = Builder.createLoad(TargetType, Ptr, true); |
4055 | | Load.setOrdering(LLVMAtomicOrderingMonotonic); |
4056 | | Load.setAlignment(1 << Alignment); |
4057 | | |
4058 | | LLVM::Value New; |
4059 | | if (BinOp == LLVMAtomicRMWBinOp::LLVMAtomicRMWBinOpAdd) |
4060 | | New = Builder.createAdd(switchEndian(Load), Value); |
4061 | | else if (BinOp == LLVMAtomicRMWBinOp::LLVMAtomicRMWBinOpSub) { |
4062 | | New = Builder.createSub(switchEndian(Load), Value); |
4063 | | } else { |
4064 | | assumingUnreachable(); |
4065 | | } |
4066 | | New = switchEndian(New); |
4067 | | |
4068 | | auto Exchange = Builder.createAtomicCmpXchg( |
4069 | | Ptr, Load, New, LLVMAtomicOrderingSequentiallyConsistent, |
4070 | | LLVMAtomicOrderingSequentiallyConsistent); |
4071 | | |
4072 | | Ret = Builder.createExtractValue(Exchange, 0); |
4073 | | auto Success = Builder.createExtractValue(Exchange, 1); |
4074 | | Builder.createCondBr(Success, OkBB, AtomicBB); |
4075 | | Builder.positionAtEnd(OkBB); |
4076 | | } else { |
4077 | | Ret = Builder.createAtomicRMW(BinOp, Ptr, switchEndian(Value), |
4078 | | LLVMAtomicOrderingSequentiallyConsistent); |
4079 | | } |
4080 | 0 | } else { |
4081 | 0 | Ret = Builder.createAtomicRMW(BinOp, Ptr, switchEndian(Value), |
4082 | 0 | LLVMAtomicOrderingSequentiallyConsistent); |
4083 | 0 | } |
4084 | 0 | Ret = switchEndian(Ret); |
4085 | | #if LLVM_VERSION_MAJOR >= 13 |
4086 | | Ret.setAlignment(1 << Alignment); |
4087 | | #endif |
4088 | 0 | if (Signed) { |
4089 | 0 | Stack.back() = Builder.createSExt(Ret, IntType); |
4090 | 0 | } else { |
4091 | 0 | Stack.back() = Builder.createZExt(Ret, IntType); |
4092 | 0 | } |
4093 | 0 | } |
4094 | | void compileAtomicCompareExchange(unsigned MemoryIndex, unsigned MemoryOffset, |
4095 | | [[maybe_unused]] unsigned Alignment, |
4096 | | LLVM::Type IntType, LLVM::Type TargetType, |
4097 | 0 | bool Signed = false) noexcept { |
4098 | |
|
4099 | 0 | auto Replacement = Builder.createSExtOrTrunc(stackPop(), TargetType); |
4100 | 0 | auto Expected = Builder.createSExtOrTrunc(stackPop(), TargetType); |
4101 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
4102 | 0 | if (MemoryOffset != 0) { |
4103 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
4104 | 0 | } |
4105 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
4106 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
4107 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
4108 | 0 | Offset); |
4109 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
4110 | |
|
4111 | 0 | auto Ret = Builder.createAtomicCmpXchg( |
4112 | 0 | Ptr, switchEndian(Expected), switchEndian(Replacement), |
4113 | 0 | LLVMAtomicOrderingSequentiallyConsistent, |
4114 | 0 | LLVMAtomicOrderingSequentiallyConsistent); |
4115 | | #if LLVM_VERSION_MAJOR >= 13 |
4116 | | Ret.setAlignment(1 << Alignment); |
4117 | | #endif |
4118 | 0 | auto OldVal = Builder.createExtractValue(Ret, 0); |
4119 | 0 | OldVal = switchEndian(OldVal); |
4120 | 0 | if (Signed) { |
4121 | 0 | Stack.back() = Builder.createSExt(OldVal, IntType); |
4122 | 0 | } else { |
4123 | 0 | Stack.back() = Builder.createZExt(OldVal, IntType); |
4124 | 0 | } |
4125 | 0 | } |
4126 | | |
4127 | 12.2k | void compileReturn() noexcept { |
4128 | 12.2k | updateInstrCount(); |
4129 | 12.2k | updateGas(); |
4130 | 12.2k | auto Ty = F.Ty.getReturnType(); |
4131 | 12.2k | if (Ty.isVoidTy()) { |
4132 | 2.06k | Builder.createRetVoid(); |
4133 | 10.1k | } else if (Ty.isStructTy()) { |
4134 | 356 | const auto Count = Ty.getStructNumElements(); |
4135 | 356 | std::vector<LLVM::Value> Ret(Count); |
4136 | 1.34k | for (unsigned I = 0; I < Count; ++I) { |
4137 | 985 | const unsigned J = Count - 1 - I; |
4138 | 985 | Ret[J] = stackPop(); |
4139 | 985 | } |
4140 | 356 | Builder.createAggregateRet(Ret); |
4141 | 9.79k | } else { |
4142 | 9.79k | Builder.createRet(stackPop()); |
4143 | 9.79k | } |
4144 | 12.2k | } |
4145 | | |
4146 | 20.3k | void updateInstrCount() noexcept { |
4147 | 20.3k | if (LocalInstrCount) { |
4148 | 0 | auto Store [[maybe_unused]] = Builder.createAtomicRMW( |
4149 | 0 | LLVMAtomicRMWBinOpAdd, Context.getInstrCount(Builder, ExecCtx), |
4150 | 0 | Builder.createLoad(Context.Int64Ty, LocalInstrCount), |
4151 | 0 | LLVMAtomicOrderingMonotonic); |
4152 | | #if LLVM_VERSION_MAJOR >= 13 |
4153 | | Store.setAlignment(8); |
4154 | | #endif |
4155 | 0 | Builder.createStore(LLContext.getInt64(0), LocalInstrCount); |
4156 | 0 | } |
4157 | 20.3k | } |
4158 | | |
4159 | 22.2k | void updateGas() noexcept { |
4160 | 22.2k | if (LocalGas) { |
4161 | 0 | auto CurrBB = Builder.getInsertBlock(); |
4162 | 0 | auto CheckBB = LLVM::BasicBlock::create(LLContext, F.Fn, "gas_check"); |
4163 | 0 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "gas_ok"); |
4164 | 0 | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "gas_end"); |
4165 | |
|
4166 | 0 | auto Cost = Builder.createLoad(Context.Int64Ty, LocalGas); |
4167 | 0 | Cost.setAlignment(64); |
4168 | 0 | auto GasPtr = Context.getGas(Builder, ExecCtx); |
4169 | 0 | auto GasLimit = Context.getGasLimit(Builder, ExecCtx); |
4170 | 0 | auto Gas = Builder.createLoad(Context.Int64Ty, GasPtr); |
4171 | 0 | Gas.setAlignment(64); |
4172 | 0 | Gas.setOrdering(LLVMAtomicOrderingMonotonic); |
4173 | 0 | Builder.createBr(CheckBB); |
4174 | 0 | Builder.positionAtEnd(CheckBB); |
4175 | |
|
4176 | 0 | auto PHIOldGas = Builder.createPHI(Context.Int64Ty); |
4177 | 0 | auto NewGas = Builder.createAdd(PHIOldGas, Cost); |
4178 | 0 | auto IsGasRemain = |
4179 | 0 | Builder.createLikely(Builder.createICmpULE(NewGas, GasLimit)); |
4180 | 0 | Builder.createCondBr(IsGasRemain, OkBB, |
4181 | 0 | getTrapBB(ErrCode::Value::CostLimitExceeded)); |
4182 | 0 | Builder.positionAtEnd(OkBB); |
4183 | |
|
4184 | 0 | auto RGasAndSucceed = Builder.createAtomicCmpXchg( |
4185 | 0 | GasPtr, PHIOldGas, NewGas, LLVMAtomicOrderingMonotonic, |
4186 | 0 | LLVMAtomicOrderingMonotonic); |
4187 | | #if LLVM_VERSION_MAJOR >= 13 |
4188 | | RGasAndSucceed.setAlignment(8); |
4189 | | #endif |
4190 | 0 | RGasAndSucceed.setWeak(true); |
4191 | 0 | auto RGas = Builder.createExtractValue(RGasAndSucceed, 0); |
4192 | 0 | auto Succeed = Builder.createExtractValue(RGasAndSucceed, 1); |
4193 | 0 | Builder.createCondBr(Builder.createLikely(Succeed), EndBB, CheckBB); |
4194 | 0 | Builder.positionAtEnd(EndBB); |
4195 | |
|
4196 | 0 | Builder.createStore(LLContext.getInt64(0), LocalGas); |
4197 | |
|
4198 | 0 | PHIOldGas.addIncoming(Gas, CurrBB); |
4199 | 0 | PHIOldGas.addIncoming(RGas, OkBB); |
4200 | 0 | } |
4201 | 22.2k | } |
4202 | | |
4203 | 3.26k | void updateGasAtTrap() noexcept { |
4204 | 3.26k | if (LocalGas) { |
4205 | 0 | auto Update [[maybe_unused]] = Builder.createAtomicRMW( |
4206 | 0 | LLVMAtomicRMWBinOpAdd, Context.getGas(Builder, ExecCtx), |
4207 | 0 | Builder.createLoad(Context.Int64Ty, LocalGas), |
4208 | 0 | LLVMAtomicOrderingMonotonic); |
4209 | | #if LLVM_VERSION_MAJOR >= 13 |
4210 | | Update.setAlignment(8); |
4211 | | #endif |
4212 | 0 | } |
4213 | 3.26k | } |
4214 | | |
4215 | | private: |
4216 | 3.67k | void compileCallOp(const unsigned int FuncIndex) noexcept { |
4217 | 3.67k | const auto &FuncType = |
4218 | 3.67k | Context.CompositeTypes[std::get<0>(Context.Functions[FuncIndex])] |
4219 | 3.67k | ->getFuncType(); |
4220 | 3.67k | const auto &Function = std::get<1>(Context.Functions[FuncIndex]); |
4221 | 3.67k | const auto &ParamTypes = FuncType.getParamTypes(); |
4222 | | |
4223 | 3.67k | std::vector<LLVM::Value> Args(ParamTypes.size() + 1); |
4224 | 3.67k | Args[0] = F.Fn.getFirstParam(); |
4225 | 4.50k | for (size_t I = 0; I < ParamTypes.size(); ++I) { |
4226 | 830 | const size_t J = ParamTypes.size() - 1 - I; |
4227 | 830 | Args[J + 1] = stackPop(); |
4228 | 830 | } |
4229 | | |
4230 | 3.67k | auto Ret = Builder.createCall(Function, Args); |
4231 | 3.67k | auto Ty = Ret.getType(); |
4232 | 3.67k | if (Ty.isVoidTy()) { |
4233 | | // nothing to do |
4234 | 1.93k | } else if (Ty.isStructTy()) { |
4235 | 169 | for (auto Val : unpackStruct(Builder, Ret)) { |
4236 | 169 | stackPush(Val); |
4237 | 169 | } |
4238 | 1.66k | } else { |
4239 | 1.66k | stackPush(Ret); |
4240 | 1.66k | } |
4241 | 3.67k | } |
4242 | | |
4243 | | void compileIndirectCallOp(const uint32_t TableIndex, |
4244 | 1.04k | const uint32_t FuncTypeIndex) noexcept { |
4245 | 1.04k | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.not_null"); |
4246 | 1.04k | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.is_null"); |
4247 | 1.04k | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.end"); |
4248 | | |
4249 | 1.04k | LLVM::Value FuncIndex = stackPop(); |
4250 | 1.04k | const auto &FuncType = Context.CompositeTypes[FuncTypeIndex]->getFuncType(); |
4251 | 1.04k | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4252 | 1.04k | auto RTy = FTy.getReturnType(); |
4253 | | |
4254 | 1.04k | const size_t ArgSize = FuncType.getParamTypes().size(); |
4255 | 1.04k | const size_t RetSize = |
4256 | 1.04k | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4257 | 1.04k | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4258 | 1.04k | ArgsVec[0] = F.Fn.getFirstParam(); |
4259 | 1.83k | for (size_t I = 0; I < ArgSize; ++I) { |
4260 | 790 | const size_t J = ArgSize - I; |
4261 | 790 | ArgsVec[J] = stackPop(); |
4262 | 790 | } |
4263 | | |
4264 | 1.04k | std::vector<LLVM::Value> FPtrRetsVec; |
4265 | 1.04k | FPtrRetsVec.reserve(RetSize); |
4266 | 1.04k | { |
4267 | 1.04k | auto FPtr = Builder.createCall( |
4268 | 1.04k | Context.getIntrinsic( |
4269 | 1.04k | Builder, Executable::Intrinsics::kTableGetFuncSymbol, |
4270 | 1.04k | LLVM::Type::getFunctionType( |
4271 | 1.04k | FTy.getPointerTo(), |
4272 | 1.04k | {Context.Int32Ty, Context.Int32Ty, Context.Int32Ty}, false)), |
4273 | 1.04k | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4274 | 1.04k | FuncIndex}); |
4275 | 1.04k | Builder.createCondBr( |
4276 | 1.04k | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4277 | 1.04k | NotNullBB, IsNullBB); |
4278 | 1.04k | Builder.positionAtEnd(NotNullBB); |
4279 | | |
4280 | 1.04k | auto FPtrRet = |
4281 | 1.04k | Builder.createCall(LLVM::FunctionCallee{FTy, FPtr}, ArgsVec); |
4282 | 1.04k | if (RetSize == 0) { |
4283 | | // nothing to do |
4284 | 714 | } else if (RetSize == 1) { |
4285 | 697 | FPtrRetsVec.push_back(FPtrRet); |
4286 | 697 | } else { |
4287 | 34 | for (auto Val : unpackStruct(Builder, FPtrRet)) { |
4288 | 34 | FPtrRetsVec.push_back(Val); |
4289 | 34 | } |
4290 | 17 | } |
4291 | 1.04k | } |
4292 | | |
4293 | 1.04k | Builder.createBr(EndBB); |
4294 | 1.04k | Builder.positionAtEnd(IsNullBB); |
4295 | | |
4296 | 1.04k | std::vector<LLVM::Value> RetsVec; |
4297 | 1.04k | { |
4298 | 1.04k | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4299 | 1.04k | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4300 | 1.04k | Builder.createArrayPtrStore( |
4301 | 1.04k | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4302 | 1.04k | kValSize); |
4303 | | |
4304 | 1.04k | Builder.createCall( |
4305 | 1.04k | Context.getIntrinsic( |
4306 | 1.04k | Builder, Executable::Intrinsics::kCallIndirect, |
4307 | 1.04k | LLVM::Type::getFunctionType(Context.VoidTy, |
4308 | 1.04k | {Context.Int32Ty, Context.Int32Ty, |
4309 | 1.04k | Context.Int32Ty, Context.Int8PtrTy, |
4310 | 1.04k | Context.Int8PtrTy}, |
4311 | 1.04k | false)), |
4312 | 1.04k | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4313 | 1.04k | FuncIndex, Args, Rets}); |
4314 | | |
4315 | 1.04k | if (RetSize == 0) { |
4316 | | // nothing to do |
4317 | 714 | } else if (RetSize == 1) { |
4318 | 697 | RetsVec.push_back( |
4319 | 697 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4320 | 697 | } else { |
4321 | 17 | RetsVec = Builder.createArrayPtrLoad(RetSize, RTy, Rets, Context.Int8Ty, |
4322 | 17 | kValSize); |
4323 | 17 | } |
4324 | 1.04k | Builder.createBr(EndBB); |
4325 | 1.04k | Builder.positionAtEnd(EndBB); |
4326 | 1.04k | } |
4327 | | |
4328 | 1.77k | for (unsigned I = 0; I < RetSize; ++I) { |
4329 | 731 | auto PHIRet = Builder.createPHI(FPtrRetsVec[I].getType()); |
4330 | 731 | PHIRet.addIncoming(FPtrRetsVec[I], NotNullBB); |
4331 | 731 | PHIRet.addIncoming(RetsVec[I], IsNullBB); |
4332 | 731 | stackPush(PHIRet); |
4333 | 731 | } |
4334 | 1.04k | } |
4335 | | |
4336 | 63 | void compileReturnCallOp(const unsigned int FuncIndex) noexcept { |
4337 | 63 | const auto &FuncType = |
4338 | 63 | Context.CompositeTypes[std::get<0>(Context.Functions[FuncIndex])] |
4339 | 63 | ->getFuncType(); |
4340 | 63 | const auto &Function = std::get<1>(Context.Functions[FuncIndex]); |
4341 | 63 | const auto &ParamTypes = FuncType.getParamTypes(); |
4342 | | |
4343 | 63 | std::vector<LLVM::Value> Args(ParamTypes.size() + 1); |
4344 | 63 | Args[0] = F.Fn.getFirstParam(); |
4345 | 127 | for (size_t I = 0; I < ParamTypes.size(); ++I) { |
4346 | 64 | const size_t J = ParamTypes.size() - 1 - I; |
4347 | 64 | Args[J + 1] = stackPop(); |
4348 | 64 | } |
4349 | | |
4350 | 63 | auto Ret = Builder.createCall(Function, Args); |
4351 | 63 | auto Ty = Ret.getType(); |
4352 | 63 | if (Ty.isVoidTy()) { |
4353 | 1 | Builder.createRetVoid(); |
4354 | 62 | } else { |
4355 | 62 | Builder.createRet(Ret); |
4356 | 62 | } |
4357 | 63 | } |
4358 | | |
4359 | | void compileReturnIndirectCallOp(const uint32_t TableIndex, |
4360 | 104 | const uint32_t FuncTypeIndex) noexcept { |
4361 | 104 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.not_null"); |
4362 | 104 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.is_null"); |
4363 | | |
4364 | 104 | LLVM::Value FuncIndex = stackPop(); |
4365 | 104 | const auto &FuncType = Context.CompositeTypes[FuncTypeIndex]->getFuncType(); |
4366 | 104 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4367 | 104 | auto RTy = FTy.getReturnType(); |
4368 | | |
4369 | 104 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4370 | 104 | const size_t RetSize = |
4371 | 104 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4372 | 104 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4373 | 104 | ArgsVec[0] = F.Fn.getFirstParam(); |
4374 | 205 | for (size_t I = 0; I < ArgSize; ++I) { |
4375 | 101 | const size_t J = ArgSize - I; |
4376 | 101 | ArgsVec[J] = stackPop(); |
4377 | 101 | } |
4378 | | |
4379 | 104 | { |
4380 | 104 | auto FPtr = Builder.createCall( |
4381 | 104 | Context.getIntrinsic( |
4382 | 104 | Builder, Executable::Intrinsics::kTableGetFuncSymbol, |
4383 | 104 | LLVM::Type::getFunctionType( |
4384 | 104 | FTy.getPointerTo(), |
4385 | 104 | {Context.Int32Ty, Context.Int32Ty, Context.Int32Ty}, false)), |
4386 | 104 | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4387 | 104 | FuncIndex}); |
4388 | 104 | Builder.createCondBr( |
4389 | 104 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4390 | 104 | NotNullBB, IsNullBB); |
4391 | 104 | Builder.positionAtEnd(NotNullBB); |
4392 | | |
4393 | 104 | auto FPtrRet = |
4394 | 104 | Builder.createCall(LLVM::FunctionCallee(FTy, FPtr), ArgsVec); |
4395 | 104 | if (RetSize == 0) { |
4396 | 19 | Builder.createRetVoid(); |
4397 | 85 | } else { |
4398 | 85 | Builder.createRet(FPtrRet); |
4399 | 85 | } |
4400 | 104 | } |
4401 | | |
4402 | 104 | Builder.positionAtEnd(IsNullBB); |
4403 | | |
4404 | 104 | { |
4405 | 104 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4406 | 104 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4407 | 104 | Builder.createArrayPtrStore( |
4408 | 104 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4409 | 104 | kValSize); |
4410 | | |
4411 | 104 | Builder.createCall( |
4412 | 104 | Context.getIntrinsic( |
4413 | 104 | Builder, Executable::Intrinsics::kCallIndirect, |
4414 | 104 | LLVM::Type::getFunctionType(Context.VoidTy, |
4415 | 104 | {Context.Int32Ty, Context.Int32Ty, |
4416 | 104 | Context.Int32Ty, Context.Int8PtrTy, |
4417 | 104 | Context.Int8PtrTy}, |
4418 | 104 | false)), |
4419 | 104 | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4420 | 104 | FuncIndex, Args, Rets}); |
4421 | | |
4422 | 104 | if (RetSize == 0) { |
4423 | 19 | Builder.createRetVoid(); |
4424 | 85 | } else if (RetSize == 1) { |
4425 | 81 | Builder.createRet( |
4426 | 81 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4427 | 81 | } else { |
4428 | 4 | Builder.createAggregateRet(Builder.createArrayPtrLoad( |
4429 | 4 | RetSize, RTy, Rets, Context.Int8Ty, kValSize)); |
4430 | 4 | } |
4431 | 104 | } |
4432 | 104 | } |
4433 | | |
4434 | 11 | void compileCallRefOp(const unsigned int TypeIndex) noexcept { |
4435 | 11 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.not_null"); |
4436 | 11 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.is_null"); |
4437 | 11 | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.end"); |
4438 | | |
4439 | 11 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
4440 | 11 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.ref_not_null"); |
4441 | 11 | auto IsRefNotNull = Builder.createLikely(Builder.createICmpNE( |
4442 | 11 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
4443 | 11 | LLContext.getInt64(0))); |
4444 | 11 | Builder.createCondBr(IsRefNotNull, OkBB, |
4445 | 11 | getTrapBB(ErrCode::Value::AccessNullFunc)); |
4446 | 11 | Builder.positionAtEnd(OkBB); |
4447 | | |
4448 | 11 | const auto &FuncType = Context.CompositeTypes[TypeIndex]->getFuncType(); |
4449 | 11 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4450 | 11 | auto RTy = FTy.getReturnType(); |
4451 | | |
4452 | 11 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4453 | 11 | const size_t RetSize = |
4454 | 11 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4455 | 11 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4456 | 11 | ArgsVec[0] = F.Fn.getFirstParam(); |
4457 | 12 | for (size_t I = 0; I < ArgSize; ++I) { |
4458 | 1 | const size_t J = ArgSize - I; |
4459 | 1 | ArgsVec[J] = stackPop(); |
4460 | 1 | } |
4461 | | |
4462 | 11 | std::vector<LLVM::Value> FPtrRetsVec; |
4463 | 11 | FPtrRetsVec.reserve(RetSize); |
4464 | 11 | { |
4465 | 11 | auto FPtr = Builder.createCall( |
4466 | 11 | Context.getIntrinsic( |
4467 | 11 | Builder, Executable::Intrinsics::kRefGetFuncSymbol, |
4468 | 11 | LLVM::Type::getFunctionType(FTy.getPointerTo(), |
4469 | 11 | {Context.Int64x2Ty}, false)), |
4470 | 11 | {Ref}); |
4471 | 11 | Builder.createCondBr( |
4472 | 11 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4473 | 11 | NotNullBB, IsNullBB); |
4474 | 11 | Builder.positionAtEnd(NotNullBB); |
4475 | | |
4476 | 11 | auto FPtrRet = |
4477 | 11 | Builder.createCall(LLVM::FunctionCallee{FTy, FPtr}, ArgsVec); |
4478 | 11 | if (RetSize == 0) { |
4479 | | // nothing to do |
4480 | 10 | } else if (RetSize == 1) { |
4481 | 10 | FPtrRetsVec.push_back(FPtrRet); |
4482 | 10 | } else { |
4483 | 0 | for (auto Val : unpackStruct(Builder, FPtrRet)) { |
4484 | 0 | FPtrRetsVec.push_back(Val); |
4485 | 0 | } |
4486 | 0 | } |
4487 | 11 | } |
4488 | | |
4489 | 11 | Builder.createBr(EndBB); |
4490 | 11 | Builder.positionAtEnd(IsNullBB); |
4491 | | |
4492 | 11 | std::vector<LLVM::Value> RetsVec; |
4493 | 11 | { |
4494 | 11 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4495 | 11 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4496 | 11 | Builder.createArrayPtrStore( |
4497 | 11 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4498 | 11 | kValSize); |
4499 | | |
4500 | 11 | Builder.createCall( |
4501 | 11 | Context.getIntrinsic( |
4502 | 11 | Builder, Executable::Intrinsics::kCallRef, |
4503 | 11 | LLVM::Type::getFunctionType( |
4504 | 11 | Context.VoidTy, |
4505 | 11 | {Context.Int64x2Ty, Context.Int8PtrTy, Context.Int8PtrTy}, |
4506 | 11 | false)), |
4507 | 11 | {Ref, Args, Rets}); |
4508 | | |
4509 | 11 | if (RetSize == 0) { |
4510 | | // nothing to do |
4511 | 10 | } else if (RetSize == 1) { |
4512 | 10 | RetsVec.push_back( |
4513 | 10 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4514 | 10 | } else { |
4515 | 0 | RetsVec = Builder.createArrayPtrLoad(RetSize, RTy, Rets, Context.Int8Ty, |
4516 | 0 | kValSize); |
4517 | 0 | } |
4518 | 11 | Builder.createBr(EndBB); |
4519 | 11 | Builder.positionAtEnd(EndBB); |
4520 | 11 | } |
4521 | | |
4522 | 21 | for (unsigned I = 0; I < RetSize; ++I) { |
4523 | 10 | auto PHIRet = Builder.createPHI(FPtrRetsVec[I].getType()); |
4524 | 10 | PHIRet.addIncoming(FPtrRetsVec[I], NotNullBB); |
4525 | 10 | PHIRet.addIncoming(RetsVec[I], IsNullBB); |
4526 | 10 | stackPush(PHIRet); |
4527 | 10 | } |
4528 | 11 | } |
4529 | | |
4530 | 2 | void compileReturnCallRefOp(const unsigned int TypeIndex) noexcept { |
4531 | 2 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.not_null"); |
4532 | 2 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.is_null"); |
4533 | | |
4534 | 2 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
4535 | 2 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.ref_not_null"); |
4536 | 2 | auto IsRefNotNull = Builder.createLikely(Builder.createICmpNE( |
4537 | 2 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
4538 | 2 | LLContext.getInt64(0))); |
4539 | 2 | Builder.createCondBr(IsRefNotNull, OkBB, |
4540 | 2 | getTrapBB(ErrCode::Value::AccessNullFunc)); |
4541 | 2 | Builder.positionAtEnd(OkBB); |
4542 | | |
4543 | 2 | const auto &FuncType = Context.CompositeTypes[TypeIndex]->getFuncType(); |
4544 | 2 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4545 | 2 | auto RTy = FTy.getReturnType(); |
4546 | | |
4547 | 2 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4548 | 2 | const size_t RetSize = |
4549 | 2 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4550 | 2 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4551 | 2 | ArgsVec[0] = F.Fn.getFirstParam(); |
4552 | 3 | for (size_t I = 0; I < ArgSize; ++I) { |
4553 | 1 | const size_t J = ArgSize - I; |
4554 | 1 | ArgsVec[J] = stackPop(); |
4555 | 1 | } |
4556 | | |
4557 | 2 | { |
4558 | 2 | auto FPtr = Builder.createCall( |
4559 | 2 | Context.getIntrinsic( |
4560 | 2 | Builder, Executable::Intrinsics::kRefGetFuncSymbol, |
4561 | 2 | LLVM::Type::getFunctionType(FTy.getPointerTo(), |
4562 | 2 | {Context.Int64x2Ty}, false)), |
4563 | 2 | {Ref}); |
4564 | 2 | Builder.createCondBr( |
4565 | 2 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4566 | 2 | NotNullBB, IsNullBB); |
4567 | 2 | Builder.positionAtEnd(NotNullBB); |
4568 | | |
4569 | 2 | auto FPtrRet = |
4570 | 2 | Builder.createCall(LLVM::FunctionCallee(FTy, FPtr), ArgsVec); |
4571 | 2 | if (RetSize == 0) { |
4572 | 1 | Builder.createRetVoid(); |
4573 | 1 | } else { |
4574 | 1 | Builder.createRet(FPtrRet); |
4575 | 1 | } |
4576 | 2 | } |
4577 | | |
4578 | 2 | Builder.positionAtEnd(IsNullBB); |
4579 | | |
4580 | 2 | { |
4581 | 2 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4582 | 2 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4583 | 2 | Builder.createArrayPtrStore( |
4584 | 2 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4585 | 2 | kValSize); |
4586 | | |
4587 | 2 | Builder.createCall( |
4588 | 2 | Context.getIntrinsic( |
4589 | 2 | Builder, Executable::Intrinsics::kCallRef, |
4590 | 2 | LLVM::Type::getFunctionType( |
4591 | 2 | Context.VoidTy, |
4592 | 2 | {Context.Int64x2Ty, Context.Int8PtrTy, Context.Int8PtrTy}, |
4593 | 2 | false)), |
4594 | 2 | {Ref, Args, Rets}); |
4595 | | |
4596 | 2 | if (RetSize == 0) { |
4597 | 1 | Builder.createRetVoid(); |
4598 | 1 | } else if (RetSize == 1) { |
4599 | 1 | Builder.createRet( |
4600 | 1 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4601 | 1 | } else { |
4602 | 0 | Builder.createAggregateRet(Builder.createArrayPtrLoad( |
4603 | 0 | RetSize, RTy, Rets, Context.Int8Ty, kValSize)); |
4604 | 0 | } |
4605 | 2 | } |
4606 | 2 | } |
4607 | | |
4608 | | void compileLoadOp(unsigned MemoryIndex, unsigned Offset, unsigned Alignment, |
4609 | 19.5k | LLVM::Type LoadTy) noexcept { |
4610 | 19.5k | if constexpr (kForceUnalignment) { |
4611 | 19.5k | Alignment = 0; |
4612 | 19.5k | } |
4613 | 19.5k | auto Off = Builder.createZExt(stackPop(), Context.Int64Ty); |
4614 | 19.5k | if (Offset != 0) { |
4615 | 12.9k | Off = Builder.createAdd(Off, LLContext.getInt64(Offset)); |
4616 | 12.9k | } |
4617 | | |
4618 | 19.5k | auto VPtr = Builder.createInBoundsGEP1( |
4619 | 19.5k | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), Off); |
4620 | 19.5k | auto Ptr = Builder.createBitCast(VPtr, LoadTy.getPointerTo()); |
4621 | 19.5k | auto LoadInst = Builder.createLoad(LoadTy, Ptr, true); |
4622 | 19.5k | LoadInst.setAlignment(1 << Alignment); |
4623 | 19.5k | stackPush(switchEndian(LoadInst)); |
4624 | 19.5k | } |
4625 | | void compileLoadOp(unsigned MemoryIndex, unsigned Offset, unsigned Alignment, |
4626 | | LLVM::Type LoadTy, LLVM::Type ExtendTy, |
4627 | 7.79k | bool Signed) noexcept { |
4628 | 7.79k | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4629 | 7.79k | if (Signed) { |
4630 | 3.37k | Stack.back() = Builder.createSExt(Stack.back(), ExtendTy); |
4631 | 4.42k | } else { |
4632 | 4.42k | Stack.back() = Builder.createZExt(Stack.back(), ExtendTy); |
4633 | 4.42k | } |
4634 | 7.79k | } |
4635 | | void compileVectorLoadOp(unsigned MemoryIndex, unsigned Offset, |
4636 | 4.88k | unsigned Alignment, LLVM::Type LoadTy) noexcept { |
4637 | 4.88k | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4638 | 4.88k | Stack.back() = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
4639 | 4.88k | } |
4640 | | void compileVectorLoadOp(unsigned MemoryIndex, unsigned Offset, |
4641 | | unsigned Alignment, LLVM::Type LoadTy, |
4642 | 1.70k | LLVM::Type ExtendTy, bool Signed) noexcept { |
4643 | 1.70k | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy, ExtendTy, Signed); |
4644 | 1.70k | Stack.back() = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
4645 | 1.70k | } |
4646 | | void compileSplatLoadOp(unsigned MemoryIndex, unsigned Offset, |
4647 | | unsigned Alignment, LLVM::Type LoadTy, |
4648 | 628 | LLVM::Type VectorTy) noexcept { |
4649 | 628 | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4650 | 628 | compileSplatOp(VectorTy); |
4651 | 628 | } |
4652 | | void compileLoadLaneOp(unsigned MemoryIndex, unsigned Offset, |
4653 | | unsigned Alignment, unsigned Index, LLVM::Type LoadTy, |
4654 | 509 | LLVM::Type VectorTy) noexcept { |
4655 | 509 | auto Vector = stackPop(); |
4656 | 509 | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4657 | | if constexpr (Endian::native == Endian::big) { |
4658 | | Index = VectorTy.getVectorSize() - 1 - Index; |
4659 | | } |
4660 | 509 | auto Value = Stack.back(); |
4661 | 509 | Stack.back() = Builder.createBitCast( |
4662 | 509 | Builder.createInsertElement(Builder.createBitCast(Vector, VectorTy), |
4663 | 509 | Value, LLContext.getInt64(Index)), |
4664 | 509 | Context.Int64x2Ty); |
4665 | 509 | } |
4666 | | void compileStoreOp(unsigned MemoryIndex, unsigned Offset, unsigned Alignment, |
4667 | | LLVM::Type LoadTy, bool Trunc = false, |
4668 | 3.45k | bool BitCast = false) noexcept { |
4669 | 3.45k | if constexpr (kForceUnalignment) { |
4670 | 3.45k | Alignment = 0; |
4671 | 3.45k | } |
4672 | 3.45k | auto V = stackPop(); |
4673 | 3.45k | auto Off = Builder.createZExt(stackPop(), Context.Int64Ty); |
4674 | 3.45k | if (Offset != 0) { |
4675 | 2.52k | Off = Builder.createAdd(Off, LLContext.getInt64(Offset)); |
4676 | 2.52k | } |
4677 | | |
4678 | 3.45k | if (Trunc) { |
4679 | 689 | V = Builder.createTrunc(V, LoadTy); |
4680 | 689 | } |
4681 | 3.45k | if (BitCast) { |
4682 | 298 | V = Builder.createBitCast(V, LoadTy); |
4683 | 298 | } |
4684 | 3.45k | V = switchEndian(V); |
4685 | 3.45k | auto VPtr = Builder.createInBoundsGEP1( |
4686 | 3.45k | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), Off); |
4687 | 3.45k | auto Ptr = Builder.createBitCast(VPtr, LoadTy.getPointerTo()); |
4688 | 3.45k | auto StoreInst = Builder.createStore(V, Ptr, true); |
4689 | 3.45k | StoreInst.setAlignment(1 << Alignment); |
4690 | 3.45k | } |
4691 | | void compileStoreLaneOp(unsigned MemoryIndex, unsigned Offset, |
4692 | | unsigned Alignment, unsigned Index, LLVM::Type LoadTy, |
4693 | 412 | LLVM::Type VectorTy) noexcept { |
4694 | 412 | auto Vector = Stack.back(); |
4695 | | if constexpr (Endian::native == Endian::big) { |
4696 | | Index = VectorTy.getVectorSize() - Index - 1; |
4697 | | } |
4698 | 412 | Stack.back() = Builder.createExtractElement( |
4699 | 412 | Builder.createBitCast(Vector, VectorTy), LLContext.getInt64(Index)); |
4700 | 412 | compileStoreOp(MemoryIndex, Offset, Alignment, LoadTy); |
4701 | 412 | } |
4702 | 51.2k | void compileSplatOp(LLVM::Type VectorTy) noexcept { |
4703 | 51.2k | auto Undef = LLVM::Value::getUndef(VectorTy); |
4704 | 51.2k | auto Zeros = LLVM::Value::getConstNull( |
4705 | 51.2k | LLVM::Type::getVectorType(Context.Int32Ty, VectorTy.getVectorSize())); |
4706 | 51.2k | auto Value = Builder.createTrunc(Stack.back(), VectorTy.getElementType()); |
4707 | 51.2k | auto Vector = |
4708 | 51.2k | Builder.createInsertElement(Undef, Value, LLContext.getInt64(0)); |
4709 | 51.2k | Vector = Builder.createShuffleVector(Vector, Undef, Zeros); |
4710 | | |
4711 | 51.2k | Stack.back() = Builder.createBitCast(Vector, Context.Int64x2Ty); |
4712 | 51.2k | } |
4713 | 1.31k | void compileExtractLaneOp(LLVM::Type VectorTy, unsigned Index) noexcept { |
4714 | 1.31k | auto Vector = Builder.createBitCast(Stack.back(), VectorTy); |
4715 | | if constexpr (Endian::native == Endian::big) { |
4716 | | Index = VectorTy.getVectorSize() - Index - 1; |
4717 | | } |
4718 | 1.31k | Stack.back() = |
4719 | 1.31k | Builder.createExtractElement(Vector, LLContext.getInt64(Index)); |
4720 | 1.31k | } |
4721 | | void compileExtractLaneOp(LLVM::Type VectorTy, unsigned Index, |
4722 | 976 | LLVM::Type ExtendTy, bool Signed) noexcept { |
4723 | 976 | compileExtractLaneOp(VectorTy, Index); |
4724 | 976 | if (Signed) { |
4725 | 494 | Stack.back() = Builder.createSExt(Stack.back(), ExtendTy); |
4726 | 494 | } else { |
4727 | 482 | Stack.back() = Builder.createZExt(Stack.back(), ExtendTy); |
4728 | 482 | } |
4729 | 976 | } |
4730 | 1.27k | void compileReplaceLaneOp(LLVM::Type VectorTy, unsigned Index) noexcept { |
4731 | 1.27k | auto Value = Builder.createTrunc(stackPop(), VectorTy.getElementType()); |
4732 | 1.27k | auto Vector = Stack.back(); |
4733 | | if constexpr (Endian::native == Endian::big) { |
4734 | | Index = VectorTy.getVectorSize() - Index - 1; |
4735 | | } |
4736 | 1.27k | Stack.back() = Builder.createBitCast( |
4737 | 1.27k | Builder.createInsertElement(Builder.createBitCast(Vector, VectorTy), |
4738 | 1.27k | Value, LLContext.getInt64(Index)), |
4739 | 1.27k | Context.Int64x2Ty); |
4740 | 1.27k | } |
4741 | | void compileVectorCompareOp(LLVM::Type VectorTy, |
4742 | 5.15k | LLVMIntPredicate Predicate) noexcept { |
4743 | 5.15k | auto RHS = stackPop(); |
4744 | 5.15k | auto LHS = stackPop(); |
4745 | 5.15k | auto Result = Builder.createSExt( |
4746 | 5.15k | Builder.createICmp(Predicate, Builder.createBitCast(LHS, VectorTy), |
4747 | 5.15k | Builder.createBitCast(RHS, VectorTy)), |
4748 | 5.15k | VectorTy); |
4749 | 5.15k | stackPush(Builder.createBitCast(Result, Context.Int64x2Ty)); |
4750 | 5.15k | } |
4751 | | void compileVectorCompareOp(LLVM::Type VectorTy, LLVMRealPredicate Predicate, |
4752 | 3.27k | LLVM::Type ResultTy) noexcept { |
4753 | 3.27k | auto RHS = stackPop(); |
4754 | 3.27k | auto LHS = stackPop(); |
4755 | 3.27k | auto Result = Builder.createSExt( |
4756 | 3.27k | Builder.createFCmp(Predicate, Builder.createBitCast(LHS, VectorTy), |
4757 | 3.27k | Builder.createBitCast(RHS, VectorTy)), |
4758 | 3.27k | ResultTy); |
4759 | 3.27k | stackPush(Builder.createBitCast(Result, Context.Int64x2Ty)); |
4760 | 3.27k | } |
4761 | | template <typename Func> |
4762 | 25.3k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4763 | 25.3k | auto V = Builder.createBitCast(Stack.back(), VectorTy); |
4764 | 25.3k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); |
4765 | 25.3k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 2.10k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 2.10k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 2.10k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 2.10k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 2.49k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 2.49k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 2.49k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 2.49k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorPopcnt()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorPopcnt()::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 145 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 145 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 145 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 145 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorExtAddPairwise(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorExtAddPairwise(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 2.39k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 2.39k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 2.39k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 2.39k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 545 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 545 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 545 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 545 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 785 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 785 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 785 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 785 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFSqrt(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFSqrt(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 342 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 342 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 342 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 342 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFCeil(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFCeil(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 1.35k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 1.35k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 1.35k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 1.35k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFFloor(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFFloor(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 2.40k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 2.40k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 2.40k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 2.40k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFTrunc(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFTrunc(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 1.73k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 1.73k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 1.73k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 1.73k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFNearest(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFNearest(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 379 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 379 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 379 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 379 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorTruncSatS32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorTruncSatS32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 974 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 974 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 974 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 974 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorTruncSatU32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorTruncSatU32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 5.81k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 5.81k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 5.81k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 5.81k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorConvertS(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorConvertS(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 688 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 688 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 688 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 688 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorConvertU(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorConvertU(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 2.02k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 2.02k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 2.02k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 2.02k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorDemote()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorDemote()::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 595 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 595 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 595 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 595 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorPromote()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorPromote()::{lambda(auto:1)#1}&&)Line | Count | Source | 4762 | 625 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4763 | 625 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4764 | 625 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4765 | 625 | } |
|
4766 | 2.10k | void compileVectorAbs(LLVM::Type VectorTy) noexcept { |
4767 | 2.10k | compileVectorOp(VectorTy, [this, VectorTy](auto V) noexcept { |
4768 | 2.10k | auto Zero = LLVM::Value::getConstNull(VectorTy); |
4769 | 2.10k | auto C = Builder.createICmpSLT(V, Zero); |
4770 | 2.10k | return Builder.createSelect(C, Builder.createNeg(V), V); |
4771 | 2.10k | }); |
4772 | 2.10k | } |
4773 | 2.49k | void compileVectorNeg(LLVM::Type VectorTy) noexcept { |
4774 | 2.49k | compileVectorOp(VectorTy, |
4775 | 2.49k | [this](auto V) noexcept { return Builder.createNeg(V); }); |
4776 | 2.49k | } |
4777 | 145 | void compileVectorPopcnt() noexcept { |
4778 | 145 | compileVectorOp(Context.Int8x16Ty, [this](auto V) noexcept { |
4779 | 145 | assuming(LLVM::Core::Ctpop != LLVM::Core::NotIntrinsic); |
4780 | 145 | return Builder.createUnaryIntrinsic(LLVM::Core::Ctpop, V); |
4781 | 145 | }); |
4782 | 145 | } |
4783 | | template <typename Func> |
4784 | 2.38k | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4785 | 2.38k | auto V = Builder.createBitCast(Stack.back(), VectorTy); |
4786 | 2.38k | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); |
4787 | 2.38k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorReduceIOp<(anonymous namespace)::FunctionCompiler::compileVectorAnyTrue()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAnyTrue()::{lambda(auto:1)#1}&&)Line | Count | Source | 4784 | 107 | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4785 | 107 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4786 | 107 | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); | 4787 | 107 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorReduceIOp<(anonymous namespace)::FunctionCompiler::compileVectorAllTrue(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAllTrue(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4784 | 1.00k | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4785 | 1.00k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4786 | 1.00k | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); | 4787 | 1.00k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorReduceIOp<(anonymous namespace)::FunctionCompiler::compileVectorBitMask(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorBitMask(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&)Line | Count | Source | 4784 | 1.27k | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4785 | 1.27k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4786 | 1.27k | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); | 4787 | 1.27k | } |
|
4788 | 107 | void compileVectorAnyTrue() noexcept { |
4789 | 107 | compileVectorReduceIOp(Context.Int128x1Ty, [this](auto V) noexcept { |
4790 | 107 | auto Zero = LLVM::Value::getConstNull(Context.Int128x1Ty); |
4791 | 107 | return Builder.createBitCast(Builder.createICmpNE(V, Zero), |
4792 | 107 | LLContext.getInt1Ty()); |
4793 | 107 | }); |
4794 | 107 | } |
4795 | 1.00k | void compileVectorAllTrue(LLVM::Type VectorTy) noexcept { |
4796 | 1.00k | compileVectorReduceIOp(VectorTy, [this, VectorTy](auto V) noexcept { |
4797 | 1.00k | const auto Size = VectorTy.getVectorSize(); |
4798 | 1.00k | auto IntType = LLContext.getIntNTy(Size); |
4799 | 1.00k | auto Zero = LLVM::Value::getConstNull(VectorTy); |
4800 | 1.00k | auto Cmp = Builder.createBitCast(Builder.createICmpEQ(V, Zero), IntType); |
4801 | 1.00k | auto CmpZero = LLVM::Value::getConstInt(IntType, 0); |
4802 | 1.00k | return Builder.createICmpEQ(Cmp, CmpZero); |
4803 | 1.00k | }); |
4804 | 1.00k | } |
4805 | 1.27k | void compileVectorBitMask(LLVM::Type VectorTy) noexcept { |
4806 | 1.27k | compileVectorReduceIOp(VectorTy, [this, VectorTy](auto V) noexcept { |
4807 | 1.27k | const auto Size = VectorTy.getVectorSize(); |
4808 | 1.27k | auto IntType = LLContext.getIntNTy(Size); |
4809 | 1.27k | auto Zero = LLVM::Value::getConstNull(VectorTy); |
4810 | 1.27k | return Builder.createBitCast(Builder.createICmpSLT(V, Zero), IntType); |
4811 | 1.27k | }); |
4812 | 1.27k | } |
4813 | | template <typename Func> |
4814 | 4.89k | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4815 | 4.89k | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; |
4816 | 4.89k | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; |
4817 | 4.89k | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); |
4818 | 4.89k | auto RHS = Builder.createVectorSplat( |
4819 | 4.89k | VectorTy.getVectorSize(), |
4820 | 4.89k | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) |
4821 | 4.89k | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); |
4822 | 4.89k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
4823 | 4.89k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); |
4824 | 4.89k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorShiftOp<(anonymous namespace)::FunctionCompiler::compileVectorShl(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorShl(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4814 | 1.89k | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4815 | 1.89k | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; | 4816 | 1.89k | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; | 4817 | 1.89k | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); | 4818 | 1.89k | auto RHS = Builder.createVectorSplat( | 4819 | 1.89k | VectorTy.getVectorSize(), | 4820 | 1.89k | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) | 4821 | 1.89k | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); | 4822 | 1.89k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4823 | 1.89k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4824 | 1.89k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorShiftOp<(anonymous namespace)::FunctionCompiler::compileVectorAShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4814 | 2.24k | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4815 | 2.24k | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; | 4816 | 2.24k | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; | 4817 | 2.24k | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); | 4818 | 2.24k | auto RHS = Builder.createVectorSplat( | 4819 | 2.24k | VectorTy.getVectorSize(), | 4820 | 2.24k | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) | 4821 | 2.24k | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); | 4822 | 2.24k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4823 | 2.24k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4824 | 2.24k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorShiftOp<(anonymous namespace)::FunctionCompiler::compileVectorLShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorLShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4814 | 756 | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4815 | 756 | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; | 4816 | 756 | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; | 4817 | 756 | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); | 4818 | 756 | auto RHS = Builder.createVectorSplat( | 4819 | 756 | VectorTy.getVectorSize(), | 4820 | 756 | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) | 4821 | 756 | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); | 4822 | 756 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4823 | 756 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4824 | 756 | } |
|
4825 | 1.89k | void compileVectorShl(LLVM::Type VectorTy) noexcept { |
4826 | 1.89k | compileVectorShiftOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4827 | 1.89k | return Builder.createShl(LHS, RHS); |
4828 | 1.89k | }); |
4829 | 1.89k | } |
4830 | 756 | void compileVectorLShr(LLVM::Type VectorTy) noexcept { |
4831 | 756 | compileVectorShiftOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4832 | 756 | return Builder.createLShr(LHS, RHS); |
4833 | 756 | }); |
4834 | 756 | } |
4835 | 2.24k | void compileVectorAShr(LLVM::Type VectorTy) noexcept { |
4836 | 2.24k | compileVectorShiftOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4837 | 2.24k | return Builder.createAShr(LHS, RHS); |
4838 | 2.24k | }); |
4839 | 2.24k | } |
4840 | | template <typename Func> |
4841 | 8.36k | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4842 | 8.36k | auto RHS = Builder.createBitCast(stackPop(), VectorTy); |
4843 | 8.36k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
4844 | 8.36k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); |
4845 | 8.36k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 406 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 406 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 406 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 406 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 406 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorAddSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorAddSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 1.53k | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 1.53k | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 1.53k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 1.53k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 1.53k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 873 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 873 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 873 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 873 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 873 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSubSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSubSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 400 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 400 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 400 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 400 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 400 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 312 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 312 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 312 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 312 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 312 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorUMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorUMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 340 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 340 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 340 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 340 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 340 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 421 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 421 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 421 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 421 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 421 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorUMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorUMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 871 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 871 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 871 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 871 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 871 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorUAvgr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorUAvgr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 268 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 268 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 268 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 268 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 268 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 453 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 453 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 453 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 453 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 453 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorQ15MulSat()::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorQ15MulSat()::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 162 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 162 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 162 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 162 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 162 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 181 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 181 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 181 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 181 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 181 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 459 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 459 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 459 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 459 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 459 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 248 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 248 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 248 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 248 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 248 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFDiv(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFDiv(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 204 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 204 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 204 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 204 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 204 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 306 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 306 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 306 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 306 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 306 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 214 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 214 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 214 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 214 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 214 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFPMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFPMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 385 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 385 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 385 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 385 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 385 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFPMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFPMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&)Line | Count | Source | 4841 | 331 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4842 | 331 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4843 | 331 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4844 | 331 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4845 | 331 | } |
|
4846 | 406 | void compileVectorVectorAdd(LLVM::Type VectorTy) noexcept { |
4847 | 406 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4848 | 406 | return Builder.createAdd(LHS, RHS); |
4849 | 406 | }); |
4850 | 406 | } |
4851 | 1.53k | void compileVectorVectorAddSat(LLVM::Type VectorTy, bool Signed) noexcept { |
4852 | 1.53k | auto ID = Signed ? LLVM::Core::SAddSat : LLVM::Core::UAddSat; |
4853 | 1.53k | assuming(ID != LLVM::Core::NotIntrinsic); |
4854 | 1.53k | compileVectorVectorOp( |
4855 | 1.53k | VectorTy, [this, VectorTy, ID](auto LHS, auto RHS) noexcept { |
4856 | 1.53k | return Builder.createIntrinsic(ID, {VectorTy}, {LHS, RHS}); |
4857 | 1.53k | }); |
4858 | 1.53k | } |
4859 | 873 | void compileVectorVectorSub(LLVM::Type VectorTy) noexcept { |
4860 | 873 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4861 | 873 | return Builder.createSub(LHS, RHS); |
4862 | 873 | }); |
4863 | 873 | } |
4864 | 400 | void compileVectorVectorSubSat(LLVM::Type VectorTy, bool Signed) noexcept { |
4865 | 400 | auto ID = Signed ? LLVM::Core::SSubSat : LLVM::Core::USubSat; |
4866 | 400 | assuming(ID != LLVM::Core::NotIntrinsic); |
4867 | 400 | compileVectorVectorOp( |
4868 | 400 | VectorTy, [this, VectorTy, ID](auto LHS, auto RHS) noexcept { |
4869 | 400 | return Builder.createIntrinsic(ID, {VectorTy}, {LHS, RHS}); |
4870 | 400 | }); |
4871 | 400 | } |
4872 | 453 | void compileVectorVectorMul(LLVM::Type VectorTy) noexcept { |
4873 | 453 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4874 | 453 | return Builder.createMul(LHS, RHS); |
4875 | 453 | }); |
4876 | 453 | } |
4877 | 111 | void compileVectorSwizzle() noexcept { |
4878 | 111 | auto Index = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
4879 | 111 | auto Vector = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
4880 | | |
4881 | 111 | #if defined(__x86_64__) |
4882 | 111 | if (Context.SupportSSSE3) { |
4883 | 111 | auto Magic = Builder.createVectorSplat(16, LLContext.getInt8(112)); |
4884 | 111 | auto Added = Builder.createAdd(Index, Magic); |
4885 | 111 | auto NewIndex = Builder.createSelect( |
4886 | 111 | Builder.createICmpUGT(Index, Added), |
4887 | 111 | LLVM::Value::getConstAllOnes(Context.Int8x16Ty), Added); |
4888 | 111 | assuming(LLVM::Core::X86SSSE3PShufB128 != LLVM::Core::NotIntrinsic); |
4889 | 111 | stackPush(Builder.createBitCast( |
4890 | 111 | Builder.createIntrinsic(LLVM::Core::X86SSSE3PShufB128, {}, |
4891 | 111 | {Vector, NewIndex}), |
4892 | 111 | Context.Int64x2Ty)); |
4893 | 111 | return; |
4894 | 111 | } |
4895 | 0 | #endif |
4896 | | |
4897 | | #if defined(__aarch64__) |
4898 | | if (Context.SupportNEON) { |
4899 | | assuming(LLVM::Core::AArch64NeonTbl1 != LLVM::Core::NotIntrinsic); |
4900 | | stackPush(Builder.createBitCast( |
4901 | | Builder.createIntrinsic(LLVM::Core::AArch64NeonTbl1, |
4902 | | {Context.Int8x16Ty}, {Vector, Index}), |
4903 | | Context.Int64x2Ty)); |
4904 | | return; |
4905 | | } |
4906 | | #endif |
4907 | | |
4908 | 0 | auto Mask = Builder.createVectorSplat(16, LLContext.getInt8(15)); |
4909 | 0 | auto Zero = Builder.createVectorSplat(16, LLContext.getInt8(0)); |
4910 | |
|
4911 | | #if defined(__s390x__) |
4912 | | assuming(LLVM::Core::S390VPerm != LLVM::Core::NotIntrinsic); |
4913 | | auto Exceed = Builder.createICmpULE(Index, Mask); |
4914 | | Index = Builder.createSub(Mask, Index); |
4915 | | auto Result = Builder.createIntrinsic(LLVM::Core::S390VPerm, {}, |
4916 | | {Vector, Zero, Index}); |
4917 | | Result = Builder.createSelect(Exceed, Result, Zero); |
4918 | | stackPush(Builder.createBitCast(Result, Context.Int64x2Ty)); |
4919 | | return; |
4920 | | #endif |
4921 | | |
4922 | | // Fallback case. |
4923 | | // If the SSSE3 is not supported on the x86_64 platform or |
4924 | | // the NEON is not supported on the aarch64 platform, |
4925 | | // then fallback to this. |
4926 | 0 | auto IsOver = Builder.createICmpUGT(Index, Mask); |
4927 | 0 | auto InboundIndex = Builder.createAnd(Index, Mask); |
4928 | 0 | auto Array = Builder.createArray(16, 1); |
4929 | 0 | for (size_t I = 0; I < 16; ++I) { |
4930 | 0 | Builder.createStore( |
4931 | 0 | Builder.createExtractElement(Vector, LLContext.getInt64(I)), |
4932 | 0 | Builder.createInBoundsGEP1(Context.Int8Ty, Array, |
4933 | 0 | LLContext.getInt64(I))); |
4934 | 0 | } |
4935 | 0 | LLVM::Value Ret = LLVM::Value::getUndef(Context.Int8x16Ty); |
4936 | 0 | for (size_t I = 0; I < 16; ++I) { |
4937 | 0 | auto Idx = |
4938 | 0 | Builder.createExtractElement(InboundIndex, LLContext.getInt64(I)); |
4939 | 0 | auto Value = Builder.createLoad( |
4940 | 0 | Context.Int8Ty, |
4941 | 0 | Builder.createInBoundsGEP1(Context.Int8Ty, Array, Idx)); |
4942 | 0 | Ret = Builder.createInsertElement(Ret, Value, LLContext.getInt64(I)); |
4943 | 0 | } |
4944 | 0 | Ret = Builder.createSelect(IsOver, Zero, Ret); |
4945 | 0 | stackPush(Builder.createBitCast(Ret, Context.Int64x2Ty)); |
4946 | 0 | } |
4947 | | |
4948 | 162 | void compileVectorVectorQ15MulSat() noexcept { |
4949 | 162 | compileVectorVectorOp( |
4950 | 162 | Context.Int16x8Ty, [this](auto LHS, auto RHS) noexcept -> LLVM::Value { |
4951 | 162 | #if defined(__x86_64__) |
4952 | 162 | if (Context.SupportSSSE3) { |
4953 | 162 | assuming(LLVM::Core::X86SSSE3PMulHrSw128 != |
4954 | 162 | LLVM::Core::NotIntrinsic); |
4955 | 162 | auto Result = Builder.createIntrinsic( |
4956 | 162 | LLVM::Core::X86SSSE3PMulHrSw128, {}, {LHS, RHS}); |
4957 | 162 | auto IntMaxV = Builder.createVectorSplat( |
4958 | 162 | 8, LLContext.getInt16(UINT16_C(0x8000))); |
4959 | 162 | auto NotOver = Builder.createSExt( |
4960 | 162 | Builder.createICmpEQ(Result, IntMaxV), Context.Int16x8Ty); |
4961 | 162 | return Builder.createXor(Result, NotOver); |
4962 | 162 | } |
4963 | 0 | #endif |
4964 | | |
4965 | | #if defined(__aarch64__) |
4966 | | if (Context.SupportNEON) { |
4967 | | assuming(LLVM::Core::AArch64NeonSQRDMulH != |
4968 | | LLVM::Core::NotIntrinsic); |
4969 | | return Builder.createBinaryIntrinsic( |
4970 | | LLVM::Core::AArch64NeonSQRDMulH, LHS, RHS); |
4971 | | } |
4972 | | #endif |
4973 | | |
4974 | | // Fallback case. |
4975 | | // If the SSSE3 is not supported on the x86_64 platform or |
4976 | | // the NEON is not supported on the aarch64 platform, |
4977 | | // then fallback to this. |
4978 | 0 | auto ExtTy = Context.Int16x8Ty.getExtendedElementVectorType(); |
4979 | 0 | auto Offset = Builder.createVectorSplat( |
4980 | 0 | 8, LLContext.getInt32(UINT32_C(0x4000))); |
4981 | 0 | auto Shift = |
4982 | 0 | Builder.createVectorSplat(8, LLContext.getInt32(UINT32_C(15))); |
4983 | 0 | auto ExtLHS = Builder.createSExt(LHS, ExtTy); |
4984 | 0 | auto ExtRHS = Builder.createSExt(RHS, ExtTy); |
4985 | 0 | auto Result = Builder.createTrunc( |
4986 | 0 | Builder.createAShr( |
4987 | 0 | Builder.createAdd(Builder.createMul(ExtLHS, ExtRHS), Offset), |
4988 | 0 | Shift), |
4989 | 0 | Context.Int16x8Ty); |
4990 | 0 | auto IntMaxV = Builder.createVectorSplat( |
4991 | 0 | 8, LLContext.getInt16(UINT16_C(0x8000))); |
4992 | 0 | auto NotOver = Builder.createSExt( |
4993 | 0 | Builder.createICmpEQ(Result, IntMaxV), Context.Int16x8Ty); |
4994 | 0 | return Builder.createXor(Result, NotOver); |
4995 | 162 | }); |
4996 | 162 | } |
4997 | 312 | void compileVectorVectorSMin(LLVM::Type VectorTy) noexcept { |
4998 | 312 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4999 | 312 | auto C = Builder.createICmpSLE(LHS, RHS); |
5000 | 312 | return Builder.createSelect(C, LHS, RHS); |
5001 | 312 | }); |
5002 | 312 | } |
5003 | 340 | void compileVectorVectorUMin(LLVM::Type VectorTy) noexcept { |
5004 | 340 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5005 | 340 | auto C = Builder.createICmpULE(LHS, RHS); |
5006 | 340 | return Builder.createSelect(C, LHS, RHS); |
5007 | 340 | }); |
5008 | 340 | } |
5009 | 421 | void compileVectorVectorSMax(LLVM::Type VectorTy) noexcept { |
5010 | 421 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5011 | 421 | auto C = Builder.createICmpSGE(LHS, RHS); |
5012 | 421 | return Builder.createSelect(C, LHS, RHS); |
5013 | 421 | }); |
5014 | 421 | } |
5015 | 871 | void compileVectorVectorUMax(LLVM::Type VectorTy) noexcept { |
5016 | 871 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5017 | 871 | auto C = Builder.createICmpUGE(LHS, RHS); |
5018 | 871 | return Builder.createSelect(C, LHS, RHS); |
5019 | 871 | }); |
5020 | 871 | } |
5021 | 268 | void compileVectorVectorUAvgr(LLVM::Type VectorTy) noexcept { |
5022 | 268 | auto ExtendTy = VectorTy.getExtendedElementVectorType(); |
5023 | 268 | compileVectorVectorOp( |
5024 | 268 | VectorTy, |
5025 | 268 | [this, VectorTy, ExtendTy](auto LHS, auto RHS) noexcept -> LLVM::Value { |
5026 | 268 | #if defined(__x86_64__) |
5027 | 268 | if (Context.SupportSSE2) { |
5028 | 268 | const auto ID = [VectorTy]() noexcept { |
5029 | 268 | switch (VectorTy.getElementType().getIntegerBitWidth()) { |
5030 | 123 | case 8: |
5031 | 123 | return LLVM::Core::X86SSE2PAvgB; |
5032 | 145 | case 16: |
5033 | 145 | return LLVM::Core::X86SSE2PAvgW; |
5034 | 0 | default: |
5035 | 0 | assumingUnreachable(); |
5036 | 268 | } |
5037 | 268 | }(); |
5038 | 268 | assuming(ID != LLVM::Core::NotIntrinsic); |
5039 | 268 | return Builder.createIntrinsic(ID, {}, {LHS, RHS}); |
5040 | 268 | } |
5041 | 0 | #endif |
5042 | | |
5043 | | #if defined(__aarch64__) |
5044 | | if (Context.SupportNEON) { |
5045 | | assuming(LLVM::Core::AArch64NeonURHAdd != LLVM::Core::NotIntrinsic); |
5046 | | return Builder.createBinaryIntrinsic(LLVM::Core::AArch64NeonURHAdd, |
5047 | | LHS, RHS); |
5048 | | } |
5049 | | #endif |
5050 | | |
5051 | | // Fallback case. |
5052 | | // If the SSE2 is not supported on the x86_64 platform or |
5053 | | // the NEON is not supported on the aarch64 platform, |
5054 | | // then fallback to this. |
5055 | 0 | auto EL = Builder.createZExt(LHS, ExtendTy); |
5056 | 0 | auto ER = Builder.createZExt(RHS, ExtendTy); |
5057 | 0 | auto One = Builder.createZExt( |
5058 | 0 | Builder.createVectorSplat(ExtendTy.getVectorSize(), |
5059 | 0 | LLContext.getTrue()), |
5060 | 0 | ExtendTy); |
5061 | 0 | return Builder.createTrunc( |
5062 | 0 | Builder.createLShr( |
5063 | 0 | Builder.createAdd(Builder.createAdd(EL, ER), One), One), |
5064 | 0 | VectorTy); |
5065 | 268 | }); |
5066 | 268 | } |
5067 | 687 | void compileVectorNarrow(LLVM::Type FromTy, bool Signed) noexcept { |
5068 | 687 | auto [MinInt, |
5069 | 687 | MaxInt] = [&]() noexcept -> std::tuple<LLVM::Value, LLVM::Value> { |
5070 | 687 | switch (FromTy.getElementType().getIntegerBitWidth()) { |
5071 | 277 | case 16: { |
5072 | 277 | const auto Min = |
5073 | 277 | static_cast<int16_t>(Signed ? std::numeric_limits<int8_t>::min() |
5074 | 277 | : std::numeric_limits<uint8_t>::min()); |
5075 | 277 | const auto Max = |
5076 | 277 | static_cast<int16_t>(Signed ? std::numeric_limits<int8_t>::max() |
5077 | 277 | : std::numeric_limits<uint8_t>::max()); |
5078 | 277 | return {LLContext.getInt16(static_cast<uint16_t>(Min)), |
5079 | 277 | LLContext.getInt16(static_cast<uint16_t>(Max))}; |
5080 | 0 | } |
5081 | 410 | case 32: { |
5082 | 410 | const auto Min = |
5083 | 410 | static_cast<int32_t>(Signed ? std::numeric_limits<int16_t>::min() |
5084 | 410 | : std::numeric_limits<uint16_t>::min()); |
5085 | 410 | const auto Max = |
5086 | 410 | static_cast<int32_t>(Signed ? std::numeric_limits<int16_t>::max() |
5087 | 410 | : std::numeric_limits<uint16_t>::max()); |
5088 | 410 | return {LLContext.getInt32(static_cast<uint32_t>(Min)), |
5089 | 410 | LLContext.getInt32(static_cast<uint32_t>(Max))}; |
5090 | 0 | } |
5091 | 0 | default: |
5092 | 0 | assumingUnreachable(); |
5093 | 687 | } |
5094 | 687 | }(); |
5095 | 687 | const auto Count = FromTy.getVectorSize(); |
5096 | 687 | auto VMin = Builder.createVectorSplat(Count, MinInt); |
5097 | 687 | auto VMax = Builder.createVectorSplat(Count, MaxInt); |
5098 | | |
5099 | 687 | auto TruncTy = FromTy.getTruncatedElementVectorType(); |
5100 | | |
5101 | 687 | auto F2 = Builder.createBitCast(stackPop(), FromTy); |
5102 | 687 | F2 = Builder.createSelect(Builder.createICmpSLT(F2, VMin), VMin, F2); |
5103 | 687 | F2 = Builder.createSelect(Builder.createICmpSGT(F2, VMax), VMax, F2); |
5104 | 687 | F2 = Builder.createTrunc(F2, TruncTy); |
5105 | | |
5106 | 687 | auto F1 = Builder.createBitCast(stackPop(), FromTy); |
5107 | 687 | F1 = Builder.createSelect(Builder.createICmpSLT(F1, VMin), VMin, F1); |
5108 | 687 | F1 = Builder.createSelect(Builder.createICmpSGT(F1, VMax), VMax, F1); |
5109 | 687 | F1 = Builder.createTrunc(F1, TruncTy); |
5110 | | |
5111 | 687 | std::vector<uint32_t> Mask(Count * 2); |
5112 | 687 | std::iota(Mask.begin(), Mask.end(), 0); |
5113 | 687 | auto V = Endian::native == Endian::little |
5114 | 687 | ? Builder.createShuffleVector( |
5115 | 687 | F1, F2, LLVM::Value::getConstVector32(LLContext, Mask)) |
5116 | 687 | : Builder.createShuffleVector( |
5117 | 0 | F2, F1, LLVM::Value::getConstVector32(LLContext, Mask)); |
5118 | 687 | stackPush(Builder.createBitCast(V, Context.Int64x2Ty)); |
5119 | 687 | } |
5120 | 5.93k | void compileVectorExtend(LLVM::Type FromTy, bool Signed, bool Low) noexcept { |
5121 | 5.93k | auto ExtTy = FromTy.getExtendedElementVectorType(); |
5122 | 5.93k | const auto Count = FromTy.getVectorSize(); |
5123 | 5.93k | std::vector<uint32_t> Mask(Count / 2); |
5124 | | if constexpr (Endian::native == Endian::big) { |
5125 | | Low = !Low; |
5126 | | } |
5127 | 5.93k | std::iota(Mask.begin(), Mask.end(), Low ? 0 : Count / 2); |
5128 | 5.93k | auto R = Builder.createBitCast(Stack.back(), FromTy); |
5129 | 5.93k | if (Signed) { |
5130 | 2.70k | R = Builder.createSExt(R, ExtTy); |
5131 | 3.23k | } else { |
5132 | 3.23k | R = Builder.createZExt(R, ExtTy); |
5133 | 3.23k | } |
5134 | 5.93k | R = Builder.createShuffleVector( |
5135 | 5.93k | R, LLVM::Value::getUndef(ExtTy), |
5136 | 5.93k | LLVM::Value::getConstVector32(LLContext, Mask)); |
5137 | 5.93k | Stack.back() = Builder.createBitCast(R, Context.Int64x2Ty); |
5138 | 5.93k | } |
5139 | 2.14k | void compileVectorExtMul(LLVM::Type FromTy, bool Signed, bool Low) noexcept { |
5140 | 2.14k | auto ExtTy = FromTy.getExtendedElementVectorType(); |
5141 | 2.14k | const auto Count = FromTy.getVectorSize(); |
5142 | 2.14k | std::vector<uint32_t> Mask(Count / 2); |
5143 | 2.14k | std::iota(Mask.begin(), Mask.end(), Low ? 0 : Count / 2); |
5144 | 4.28k | auto Extend = [this, FromTy, Signed, ExtTy, &Mask](LLVM::Value R) noexcept { |
5145 | 4.28k | R = Builder.createBitCast(R, FromTy); |
5146 | 4.28k | if (Signed) { |
5147 | 2.01k | R = Builder.createSExt(R, ExtTy); |
5148 | 2.26k | } else { |
5149 | 2.26k | R = Builder.createZExt(R, ExtTy); |
5150 | 2.26k | } |
5151 | 4.28k | return Builder.createShuffleVector( |
5152 | 4.28k | R, LLVM::Value::getUndef(ExtTy), |
5153 | 4.28k | LLVM::Value::getConstVector32(LLContext, Mask)); |
5154 | 4.28k | }; |
5155 | 2.14k | auto RHS = Extend(stackPop()); |
5156 | 2.14k | auto LHS = Extend(stackPop()); |
5157 | 2.14k | stackPush( |
5158 | 2.14k | Builder.createBitCast(Builder.createMul(RHS, LHS), Context.Int64x2Ty)); |
5159 | 2.14k | } |
5160 | 2.39k | void compileVectorExtAddPairwise(LLVM::Type VectorTy, bool Signed) noexcept { |
5161 | 2.39k | compileVectorOp( |
5162 | 2.39k | VectorTy, [this, VectorTy, Signed](auto V) noexcept -> LLVM::Value { |
5163 | 2.39k | auto ExtTy = VectorTy.getExtendedElementVectorType() |
5164 | 2.39k | .getHalfElementsVectorType(); |
5165 | 2.39k | #if defined(__x86_64__) |
5166 | 2.39k | const auto Count = VectorTy.getVectorSize(); |
5167 | 2.39k | if (Context.SupportXOP) { |
5168 | 0 | const auto ID = [Count, Signed]() noexcept { |
5169 | 0 | switch (Count) { |
5170 | 0 | case 8: |
5171 | 0 | return Signed ? LLVM::Core::X86XOpVPHAddWD |
5172 | 0 | : LLVM::Core::X86XOpVPHAddUWD; |
5173 | 0 | case 16: |
5174 | 0 | return Signed ? LLVM::Core::X86XOpVPHAddBW |
5175 | 0 | : LLVM::Core::X86XOpVPHAddUBW; |
5176 | 0 | default: |
5177 | 0 | assumingUnreachable(); |
5178 | 0 | } |
5179 | 0 | }(); |
5180 | 0 | assuming(ID != LLVM::Core::NotIntrinsic); |
5181 | 0 | return Builder.createUnaryIntrinsic(ID, V); |
5182 | 0 | } |
5183 | 2.39k | if (Context.SupportSSSE3 && Count == 16) { |
5184 | 643 | assuming(LLVM::Core::X86SSSE3PMAddUbSw128 != |
5185 | 643 | LLVM::Core::NotIntrinsic); |
5186 | 643 | if (Signed) { |
5187 | 316 | return Builder.createIntrinsic( |
5188 | 316 | LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5189 | 316 | {Builder.createVectorSplat(16, LLContext.getInt8(1)), V}); |
5190 | 327 | } else { |
5191 | 327 | return Builder.createIntrinsic( |
5192 | 327 | LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5193 | 327 | {V, Builder.createVectorSplat(16, LLContext.getInt8(1))}); |
5194 | 327 | } |
5195 | 643 | } |
5196 | 1.75k | if (Context.SupportSSE2 && Count == 8) { |
5197 | 1.75k | assuming(LLVM::Core::X86SSE2PMAddWd != LLVM::Core::NotIntrinsic); |
5198 | 1.75k | if (Signed) { |
5199 | 1.12k | return Builder.createIntrinsic( |
5200 | 1.12k | LLVM::Core::X86SSE2PMAddWd, {}, |
5201 | 1.12k | {V, Builder.createVectorSplat(8, LLContext.getInt16(1))}); |
5202 | 1.12k | } else { |
5203 | 628 | V = Builder.createXor( |
5204 | 628 | V, Builder.createVectorSplat(8, LLContext.getInt16(0x8000))); |
5205 | 628 | V = Builder.createIntrinsic( |
5206 | 628 | LLVM::Core::X86SSE2PMAddWd, {}, |
5207 | 628 | {V, Builder.createVectorSplat(8, LLContext.getInt16(1))}); |
5208 | 628 | return Builder.createAdd( |
5209 | 628 | V, Builder.createVectorSplat(4, LLContext.getInt32(0x10000))); |
5210 | 628 | } |
5211 | 1.75k | } |
5212 | 0 | #endif |
5213 | | |
5214 | | #if defined(__aarch64__) |
5215 | | if (Context.SupportNEON) { |
5216 | | const auto ID = Signed ? LLVM::Core::AArch64NeonSAddLP |
5217 | | : LLVM::Core::AArch64NeonUAddLP; |
5218 | | assuming(ID != LLVM::Core::NotIntrinsic); |
5219 | | return Builder.createIntrinsic(ID, {ExtTy, VectorTy}, {V}); |
5220 | | } |
5221 | | #endif |
5222 | | |
5223 | | // Fallback case. |
5224 | | // If the XOP, SSSE3, or SSE2 is not supported on the x86_64 platform |
5225 | | // or the NEON is not supported on the aarch64 platform, |
5226 | | // then fallback to this. |
5227 | 0 | auto Width = LLVM::Value::getConstInt( |
5228 | 0 | ExtTy.getElementType(), |
5229 | 0 | VectorTy.getElementType().getIntegerBitWidth()); |
5230 | 0 | Width = Builder.createVectorSplat(ExtTy.getVectorSize(), Width); |
5231 | 0 | auto EV = Builder.createBitCast(V, ExtTy); |
5232 | 0 | LLVM::Value L, R; |
5233 | 0 | if (Signed) { |
5234 | 0 | L = Builder.createAShr(EV, Width); |
5235 | 0 | R = Builder.createAShr(Builder.createShl(EV, Width), Width); |
5236 | 0 | } else { |
5237 | 0 | L = Builder.createLShr(EV, Width); |
5238 | 0 | R = Builder.createLShr(Builder.createShl(EV, Width), Width); |
5239 | 0 | } |
5240 | 0 | return Builder.createAdd(L, R); |
5241 | 1.75k | }); |
5242 | 2.39k | } |
5243 | 545 | void compileVectorFAbs(LLVM::Type VectorTy) noexcept { |
5244 | 545 | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5245 | 545 | assuming(LLVM::Core::Fabs != LLVM::Core::NotIntrinsic); |
5246 | 545 | return Builder.createUnaryIntrinsic(LLVM::Core::Fabs, V); |
5247 | 545 | }); |
5248 | 545 | } |
5249 | 785 | void compileVectorFNeg(LLVM::Type VectorTy) noexcept { |
5250 | 785 | compileVectorOp(VectorTy, |
5251 | 785 | [this](auto V) noexcept { return Builder.createFNeg(V); }); |
5252 | 785 | } |
5253 | 342 | void compileVectorFSqrt(LLVM::Type VectorTy) noexcept { |
5254 | 342 | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5255 | 342 | assuming(LLVM::Core::Sqrt != LLVM::Core::NotIntrinsic); |
5256 | 342 | return Builder.createUnaryIntrinsic(LLVM::Core::Sqrt, V); |
5257 | 342 | }); |
5258 | 342 | } |
5259 | 1.35k | void compileVectorFCeil(LLVM::Type VectorTy) noexcept { |
5260 | 1.35k | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5261 | 1.35k | assuming(LLVM::Core::Ceil != LLVM::Core::NotIntrinsic); |
5262 | 1.35k | return Builder.createUnaryIntrinsic(LLVM::Core::Ceil, V); |
5263 | 1.35k | }); |
5264 | 1.35k | } |
5265 | 2.40k | void compileVectorFFloor(LLVM::Type VectorTy) noexcept { |
5266 | 2.40k | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5267 | 2.40k | assuming(LLVM::Core::Floor != LLVM::Core::NotIntrinsic); |
5268 | 2.40k | return Builder.createUnaryIntrinsic(LLVM::Core::Floor, V); |
5269 | 2.40k | }); |
5270 | 2.40k | } |
5271 | 1.73k | void compileVectorFTrunc(LLVM::Type VectorTy) noexcept { |
5272 | 1.73k | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5273 | 1.73k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
5274 | 1.73k | return Builder.createUnaryIntrinsic(LLVM::Core::Trunc, V); |
5275 | 1.73k | }); |
5276 | 1.73k | } |
5277 | 379 | void compileVectorFNearest(LLVM::Type VectorTy) noexcept { |
5278 | 379 | compileVectorOp(VectorTy, [&](auto V) noexcept { |
5279 | 379 | #if LLVM_VERSION_MAJOR >= 12 && !defined(__s390x__) |
5280 | 379 | assuming(LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic); |
5281 | 379 | if (LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic) { |
5282 | 379 | return Builder.createUnaryIntrinsic(LLVM::Core::Roundeven, V); |
5283 | 379 | } |
5284 | 0 | #endif |
5285 | | |
5286 | 0 | #if defined(__x86_64__) |
5287 | 0 | if (Context.SupportSSE4_1) { |
5288 | 0 | const bool IsFloat = VectorTy.getElementType().isFloatTy(); |
5289 | 0 | auto ID = |
5290 | 0 | IsFloat ? LLVM::Core::X86SSE41RoundPs : LLVM::Core::X86SSE41RoundPd; |
5291 | 0 | assuming(ID != LLVM::Core::NotIntrinsic); |
5292 | 0 | return Builder.createIntrinsic(ID, {}, {V, LLContext.getInt32(8)}); |
5293 | 0 | } |
5294 | 0 | #endif |
5295 | | |
5296 | | #if defined(__aarch64__) |
5297 | | if (Context.SupportNEON && |
5298 | | LLVM::Core::AArch64NeonFRIntN != LLVM::Core::NotIntrinsic) { |
5299 | | return Builder.createUnaryIntrinsic(LLVM::Core::AArch64NeonFRIntN, V); |
5300 | | } |
5301 | | #endif |
5302 | | |
5303 | | // Fallback case. |
5304 | | // If the SSE4.1 is not supported on the x86_64 platform or |
5305 | | // the NEON is not supported on the aarch64 platform, |
5306 | | // then fallback to this. |
5307 | 0 | assuming(LLVM::Core::Nearbyint != LLVM::Core::NotIntrinsic); |
5308 | 0 | return Builder.createUnaryIntrinsic(LLVM::Core::Nearbyint, V); |
5309 | 0 | }); |
5310 | 379 | } |
5311 | 181 | void compileVectorVectorFAdd(LLVM::Type VectorTy) noexcept { |
5312 | 181 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5313 | 181 | return Builder.createFAdd(LHS, RHS); |
5314 | 181 | }); |
5315 | 181 | } |
5316 | 459 | void compileVectorVectorFSub(LLVM::Type VectorTy) noexcept { |
5317 | 459 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5318 | 459 | return Builder.createFSub(LHS, RHS); |
5319 | 459 | }); |
5320 | 459 | } |
5321 | 248 | void compileVectorVectorFMul(LLVM::Type VectorTy) noexcept { |
5322 | 248 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5323 | 248 | return Builder.createFMul(LHS, RHS); |
5324 | 248 | }); |
5325 | 248 | } |
5326 | 204 | void compileVectorVectorFDiv(LLVM::Type VectorTy) noexcept { |
5327 | 204 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5328 | 204 | return Builder.createFDiv(LHS, RHS); |
5329 | 204 | }); |
5330 | 204 | } |
5331 | 306 | void compileVectorVectorFMin(LLVM::Type VectorTy) noexcept { |
5332 | 306 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5333 | 306 | auto LNaN = Builder.createFCmpUNO(LHS, LHS); |
5334 | 306 | auto RNaN = Builder.createFCmpUNO(RHS, RHS); |
5335 | 306 | auto OLT = Builder.createFCmpOLT(LHS, RHS); |
5336 | 306 | auto OGT = Builder.createFCmpOGT(LHS, RHS); |
5337 | 306 | auto Ret = Builder.createBitCast( |
5338 | 306 | Builder.createOr(Builder.createBitCast(LHS, Context.Int64x2Ty), |
5339 | 306 | Builder.createBitCast(RHS, Context.Int64x2Ty)), |
5340 | 306 | LHS.getType()); |
5341 | 306 | Ret = Builder.createSelect(OGT, RHS, Ret); |
5342 | 306 | Ret = Builder.createSelect(OLT, LHS, Ret); |
5343 | 306 | Ret = Builder.createSelect(RNaN, RHS, Ret); |
5344 | 306 | Ret = Builder.createSelect(LNaN, LHS, Ret); |
5345 | 306 | return Ret; |
5346 | 306 | }); |
5347 | 306 | } |
5348 | 214 | void compileVectorVectorFMax(LLVM::Type VectorTy) noexcept { |
5349 | 214 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5350 | 214 | auto LNaN = Builder.createFCmpUNO(LHS, LHS); |
5351 | 214 | auto RNaN = Builder.createFCmpUNO(RHS, RHS); |
5352 | 214 | auto OLT = Builder.createFCmpOLT(LHS, RHS); |
5353 | 214 | auto OGT = Builder.createFCmpOGT(LHS, RHS); |
5354 | 214 | auto Ret = Builder.createBitCast( |
5355 | 214 | Builder.createAnd(Builder.createBitCast(LHS, Context.Int64x2Ty), |
5356 | 214 | Builder.createBitCast(RHS, Context.Int64x2Ty)), |
5357 | 214 | LHS.getType()); |
5358 | 214 | Ret = Builder.createSelect(OLT, RHS, Ret); |
5359 | 214 | Ret = Builder.createSelect(OGT, LHS, Ret); |
5360 | 214 | Ret = Builder.createSelect(RNaN, RHS, Ret); |
5361 | 214 | Ret = Builder.createSelect(LNaN, LHS, Ret); |
5362 | 214 | return Ret; |
5363 | 214 | }); |
5364 | 214 | } |
5365 | 385 | void compileVectorVectorFPMin(LLVM::Type VectorTy) noexcept { |
5366 | 385 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5367 | 385 | auto Cmp = Builder.createFCmpOLT(RHS, LHS); |
5368 | 385 | return Builder.createSelect(Cmp, RHS, LHS); |
5369 | 385 | }); |
5370 | 385 | } |
5371 | 331 | void compileVectorVectorFPMax(LLVM::Type VectorTy) noexcept { |
5372 | 331 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5373 | 331 | auto Cmp = Builder.createFCmpOGT(RHS, LHS); |
5374 | 331 | return Builder.createSelect(Cmp, RHS, LHS); |
5375 | 331 | }); |
5376 | 331 | } |
5377 | 974 | void compileVectorTruncSatS32(LLVM::Type VectorTy, bool PadZero) noexcept { |
5378 | 974 | compileVectorOp(VectorTy, [this, VectorTy, PadZero](auto V) noexcept { |
5379 | 974 | const auto Size = VectorTy.getVectorSize(); |
5380 | 974 | auto FPTy = VectorTy.getElementType(); |
5381 | 974 | auto IntMin = LLContext.getInt32( |
5382 | 974 | static_cast<uint32_t>(std::numeric_limits<int32_t>::min())); |
5383 | 974 | auto IntMax = LLContext.getInt32( |
5384 | 974 | static_cast<uint32_t>(std::numeric_limits<int32_t>::max())); |
5385 | 974 | auto IntMinV = Builder.createVectorSplat(Size, IntMin); |
5386 | 974 | auto IntMaxV = Builder.createVectorSplat(Size, IntMax); |
5387 | 974 | auto IntZeroV = LLVM::Value::getConstNull(IntMinV.getType()); |
5388 | 974 | auto FPMin = Builder.createSIToFP(IntMin, FPTy); |
5389 | 974 | auto FPMax = Builder.createSIToFP(IntMax, FPTy); |
5390 | 974 | auto FPMinV = Builder.createVectorSplat(Size, FPMin); |
5391 | 974 | auto FPMaxV = Builder.createVectorSplat(Size, FPMax); |
5392 | | |
5393 | 974 | auto Normal = Builder.createFCmpORD(V, V); |
5394 | 974 | auto NotUnder = Builder.createFCmpUGE(V, FPMinV); |
5395 | 974 | auto NotOver = Builder.createFCmpULT(V, FPMaxV); |
5396 | 974 | V = Builder.createFPToSI( |
5397 | 974 | V, LLVM::Type::getVectorType(LLContext.getInt32Ty(), Size)); |
5398 | 974 | V = Builder.createSelect(Normal, V, IntZeroV); |
5399 | 974 | V = Builder.createSelect(NotUnder, V, IntMinV); |
5400 | 974 | V = Builder.createSelect(NotOver, V, IntMaxV); |
5401 | 974 | if (PadZero) { |
5402 | 753 | std::vector<uint32_t> Mask(Size * 2); |
5403 | 753 | std::iota(Mask.begin(), Mask.end(), 0); |
5404 | 753 | if constexpr (Endian::native == Endian::little) { |
5405 | 753 | V = Builder.createShuffleVector( |
5406 | 753 | V, IntZeroV, LLVM::Value::getConstVector32(LLContext, Mask)); |
5407 | | } else { |
5408 | | V = Builder.createShuffleVector( |
5409 | | IntZeroV, V, LLVM::Value::getConstVector32(LLContext, Mask)); |
5410 | | } |
5411 | 753 | } |
5412 | 974 | return V; |
5413 | 974 | }); |
5414 | 974 | } |
5415 | 5.81k | void compileVectorTruncSatU32(LLVM::Type VectorTy, bool PadZero) noexcept { |
5416 | 5.81k | compileVectorOp(VectorTy, [this, VectorTy, PadZero](auto V) noexcept { |
5417 | 5.81k | const auto Size = VectorTy.getVectorSize(); |
5418 | 5.81k | auto FPTy = VectorTy.getElementType(); |
5419 | 5.81k | auto IntMin = LLContext.getInt32(std::numeric_limits<uint32_t>::min()); |
5420 | 5.81k | auto IntMax = LLContext.getInt32(std::numeric_limits<uint32_t>::max()); |
5421 | 5.81k | auto IntMinV = Builder.createVectorSplat(Size, IntMin); |
5422 | 5.81k | auto IntMaxV = Builder.createVectorSplat(Size, IntMax); |
5423 | 5.81k | auto FPMin = Builder.createUIToFP(IntMin, FPTy); |
5424 | 5.81k | auto FPMax = Builder.createUIToFP(IntMax, FPTy); |
5425 | 5.81k | auto FPMinV = Builder.createVectorSplat(Size, FPMin); |
5426 | 5.81k | auto FPMaxV = Builder.createVectorSplat(Size, FPMax); |
5427 | | |
5428 | 5.81k | auto NotUnder = Builder.createFCmpOGE(V, FPMinV); |
5429 | 5.81k | auto NotOver = Builder.createFCmpULT(V, FPMaxV); |
5430 | 5.81k | V = Builder.createFPToUI( |
5431 | 5.81k | V, LLVM::Type::getVectorType(LLContext.getInt32Ty(), Size)); |
5432 | 5.81k | V = Builder.createSelect(NotUnder, V, IntMinV); |
5433 | 5.81k | V = Builder.createSelect(NotOver, V, IntMaxV); |
5434 | 5.81k | if (PadZero) { |
5435 | 2.12k | auto IntZeroV = LLVM::Value::getConstNull(IntMinV.getType()); |
5436 | 2.12k | std::vector<uint32_t> Mask(Size * 2); |
5437 | 2.12k | std::iota(Mask.begin(), Mask.end(), 0); |
5438 | 2.12k | if constexpr (Endian::native == Endian::little) { |
5439 | 2.12k | V = Builder.createShuffleVector( |
5440 | 2.12k | V, IntZeroV, LLVM::Value::getConstVector32(LLContext, Mask)); |
5441 | | } else { |
5442 | | V = Builder.createShuffleVector( |
5443 | | IntZeroV, V, LLVM::Value::getConstVector32(LLContext, Mask)); |
5444 | | } |
5445 | 2.12k | } |
5446 | 5.81k | return V; |
5447 | 5.81k | }); |
5448 | 5.81k | } |
5449 | | void compileVectorConvertS(LLVM::Type VectorTy, LLVM::Type FPVectorTy, |
5450 | 688 | bool Low) noexcept { |
5451 | 688 | compileVectorOp(VectorTy, |
5452 | 688 | [this, VectorTy, FPVectorTy, Low](auto V) noexcept { |
5453 | 688 | if (Low) { |
5454 | 355 | const auto Size = VectorTy.getVectorSize() / 2; |
5455 | 355 | std::vector<uint32_t> Mask(Size); |
5456 | 355 | if constexpr (Endian::native == Endian::little) { |
5457 | 355 | std::iota(Mask.begin(), Mask.end(), 0); |
5458 | | } else { |
5459 | | std::iota(Mask.begin(), Mask.end(), Size); |
5460 | | } |
5461 | 355 | V = Builder.createShuffleVector( |
5462 | 355 | V, LLVM::Value::getUndef(VectorTy), |
5463 | 355 | LLVM::Value::getConstVector32(LLContext, Mask)); |
5464 | 355 | } |
5465 | 688 | return Builder.createSIToFP(V, FPVectorTy); |
5466 | 688 | }); |
5467 | 688 | } |
5468 | | void compileVectorConvertU(LLVM::Type VectorTy, LLVM::Type FPVectorTy, |
5469 | 2.02k | bool Low) noexcept { |
5470 | 2.02k | compileVectorOp(VectorTy, |
5471 | 2.02k | [this, VectorTy, FPVectorTy, Low](auto V) noexcept { |
5472 | 2.02k | if (Low) { |
5473 | 1.29k | const auto Size = VectorTy.getVectorSize() / 2; |
5474 | 1.29k | std::vector<uint32_t> Mask(Size); |
5475 | 1.29k | if constexpr (Endian::native == Endian::little) { |
5476 | 1.29k | std::iota(Mask.begin(), Mask.end(), 0); |
5477 | | } else { |
5478 | | std::iota(Mask.begin(), Mask.end(), Size); |
5479 | | } |
5480 | 1.29k | V = Builder.createShuffleVector( |
5481 | 1.29k | V, LLVM::Value::getUndef(VectorTy), |
5482 | 1.29k | LLVM::Value::getConstVector32(LLContext, Mask)); |
5483 | 1.29k | } |
5484 | 2.02k | return Builder.createUIToFP(V, FPVectorTy); |
5485 | 2.02k | }); |
5486 | 2.02k | } |
5487 | 595 | void compileVectorDemote() noexcept { |
5488 | 595 | compileVectorOp(Context.Doublex2Ty, [this](auto V) noexcept { |
5489 | 595 | auto Demoted = Builder.createFPTrunc( |
5490 | 595 | V, LLVM::Type::getVectorType(Context.FloatTy, 2)); |
5491 | 595 | auto ZeroV = LLVM::Value::getConstNull(Demoted.getType()); |
5492 | 595 | if constexpr (Endian::native == Endian::little) { |
5493 | 595 | return Builder.createShuffleVector( |
5494 | 595 | Demoted, ZeroV, |
5495 | 595 | LLVM::Value::getConstVector32(LLContext, {0u, 1u, 2u, 3u})); |
5496 | | } else { |
5497 | | return Builder.createShuffleVector( |
5498 | | Demoted, ZeroV, |
5499 | | LLVM::Value::getConstVector32(LLContext, {3u, 2u, 1u, 0u})); |
5500 | | } |
5501 | 595 | }); |
5502 | 595 | } |
5503 | 625 | void compileVectorPromote() noexcept { |
5504 | 625 | compileVectorOp(Context.Floatx4Ty, [this](auto V) noexcept { |
5505 | 625 | auto UndefV = LLVM::Value::getUndef(V.getType()); |
5506 | 625 | auto Low = Builder.createShuffleVector( |
5507 | 625 | V, UndefV, LLVM::Value::getConstVector32(LLContext, {0u, 1u})); |
5508 | 625 | return Builder.createFPExt( |
5509 | 625 | Low, LLVM::Type::getVectorType(Context.DoubleTy, 2)); |
5510 | 625 | }); |
5511 | 625 | } |
5512 | | |
5513 | 17 | void compileVectorVectorMAdd(LLVM::Type VectorTy) noexcept { |
5514 | 17 | auto C = Builder.createBitCast(stackPop(), VectorTy); |
5515 | 17 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); |
5516 | 17 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
5517 | 17 | stackPush(Builder.createBitCast( |
5518 | 17 | Builder.createFAdd(Builder.createFMul(LHS, RHS), C), |
5519 | 17 | Context.Int64x2Ty)); |
5520 | 17 | } |
5521 | | |
5522 | 24 | void compileVectorVectorNMAdd(LLVM::Type VectorTy) noexcept { |
5523 | 24 | auto C = Builder.createBitCast(stackPop(), VectorTy); |
5524 | 24 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); |
5525 | 24 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
5526 | 24 | stackPush(Builder.createBitCast( |
5527 | 24 | Builder.createFAdd(Builder.createFMul(Builder.createFNeg(LHS), RHS), C), |
5528 | 24 | Context.Int64x2Ty)); |
5529 | 24 | } |
5530 | | |
5531 | 10 | void compileVectorRelaxedIntegerDotProduct() noexcept { |
5532 | 10 | auto OriTy = Context.Int8x16Ty; |
5533 | 10 | auto ExtTy = Context.Int16x8Ty; |
5534 | 10 | auto RHS = Builder.createBitCast(stackPop(), OriTy); |
5535 | 10 | auto LHS = Builder.createBitCast(stackPop(), OriTy); |
5536 | 10 | #if defined(__x86_64__) |
5537 | 10 | if (Context.SupportSSSE3) { |
5538 | 10 | assuming(LLVM::Core::X86SSSE3PMAddUbSw128 != LLVM::Core::NotIntrinsic); |
5539 | | // WebAssembly Relaxed SIMD spec: signed(LHS) * unsigned/signed(RHS) |
5540 | | // But PMAddUbSw128 is unsigned(LHS) * signed(RHS). Therefore swap both |
5541 | | // side to match the WebAssembly spec |
5542 | 10 | return stackPush(Builder.createBitCast( |
5543 | 10 | Builder.createIntrinsic(LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5544 | 10 | {RHS, LHS}), |
5545 | 10 | Context.Int64x2Ty)); |
5546 | 10 | } |
5547 | 0 | #endif |
5548 | 0 | auto Width = LLVM::Value::getConstInt( |
5549 | 0 | ExtTy.getElementType(), OriTy.getElementType().getIntegerBitWidth()); |
5550 | 0 | Width = Builder.createVectorSplat(ExtTy.getVectorSize(), Width); |
5551 | 0 | auto EA = Builder.createBitCast(LHS, ExtTy); |
5552 | 0 | auto EB = Builder.createBitCast(RHS, ExtTy); |
5553 | |
|
5554 | 0 | LLVM::Value AL, AR, BL, BR; |
5555 | 0 | AL = Builder.createAShr(EA, Width); |
5556 | 0 | AR = Builder.createAShr(Builder.createShl(EA, Width), Width); |
5557 | 0 | BL = Builder.createAShr(EB, Width); |
5558 | 0 | BR = Builder.createAShr(Builder.createShl(EB, Width), Width); |
5559 | |
|
5560 | 0 | return stackPush(Builder.createBitCast( |
5561 | 0 | Builder.createAdd(Builder.createMul(AL, BL), Builder.createMul(AR, BR)), |
5562 | 0 | Context.Int64x2Ty)); |
5563 | 10 | } |
5564 | | |
5565 | 11 | void compileVectorRelaxedIntegerDotProductAdd() noexcept { |
5566 | 11 | auto OriTy = Context.Int8x16Ty; |
5567 | 11 | auto ExtTy = Context.Int16x8Ty; |
5568 | 11 | auto FinTy = Context.Int32x4Ty; |
5569 | 11 | auto VC = Builder.createBitCast(stackPop(), FinTy); |
5570 | 11 | auto RHS = Builder.createBitCast(stackPop(), OriTy); |
5571 | 11 | auto LHS = Builder.createBitCast(stackPop(), OriTy); |
5572 | 11 | LLVM::Value IM; |
5573 | 11 | #if defined(__x86_64__) |
5574 | 11 | if (Context.SupportSSSE3) { |
5575 | 11 | assuming(LLVM::Core::X86SSSE3PMAddUbSw128 != LLVM::Core::NotIntrinsic); |
5576 | | // WebAssembly Relaxed SIMD spec: signed(LHS) * unsigned/signed(RHS) |
5577 | | // But PMAddUbSw128 is unsigned(LHS) * signed(RHS). Therefore swap both |
5578 | | // side to match the WebAssembly spec |
5579 | 11 | IM = Builder.createIntrinsic(LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5580 | 11 | {RHS, LHS}); |
5581 | 11 | } else |
5582 | 0 | #endif |
5583 | 0 | { |
5584 | 0 | auto Width = LLVM::Value::getConstInt( |
5585 | 0 | ExtTy.getElementType(), OriTy.getElementType().getIntegerBitWidth()); |
5586 | 0 | Width = Builder.createVectorSplat(ExtTy.getVectorSize(), Width); |
5587 | 0 | auto EA = Builder.createBitCast(LHS, ExtTy); |
5588 | 0 | auto EB = Builder.createBitCast(RHS, ExtTy); |
5589 | |
|
5590 | 0 | LLVM::Value AL, AR, BL, BR; |
5591 | 0 | AL = Builder.createAShr(EA, Width); |
5592 | 0 | AR = Builder.createAShr(Builder.createShl(EA, Width), Width); |
5593 | 0 | BL = Builder.createAShr(EB, Width); |
5594 | 0 | BR = Builder.createAShr(Builder.createShl(EB, Width), Width); |
5595 | 0 | IM = Builder.createAdd(Builder.createMul(AL, BL), |
5596 | 0 | Builder.createMul(AR, BR)); |
5597 | 0 | } |
5598 | | |
5599 | 11 | auto Width = LLVM::Value::getConstInt( |
5600 | 11 | FinTy.getElementType(), ExtTy.getElementType().getIntegerBitWidth()); |
5601 | 11 | Width = Builder.createVectorSplat(FinTy.getVectorSize(), Width); |
5602 | 11 | auto IME = Builder.createBitCast(IM, FinTy); |
5603 | 11 | auto L = Builder.createAShr(IME, Width); |
5604 | 11 | auto R = Builder.createAShr(Builder.createShl(IME, Width), Width); |
5605 | | |
5606 | 11 | return stackPush(Builder.createBitCast( |
5607 | 11 | Builder.createAdd(Builder.createAdd(L, R), VC), Context.Int64x2Ty)); |
5608 | 11 | } |
5609 | | |
5610 | | void |
5611 | | enterBlock(LLVM::BasicBlock JumpBlock, LLVM::BasicBlock NextBlock, |
5612 | | LLVM::BasicBlock ElseBlock, std::vector<LLVM::Value> Args, |
5613 | | std::pair<std::vector<ValType>, std::vector<ValType>> Type, |
5614 | | std::vector<std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5615 | 22.1k | ReturnPHI = {}) noexcept { |
5616 | 22.1k | assuming(Type.first.size() == Args.size()); |
5617 | 22.1k | for (auto &Value : Args) { |
5618 | 4.46k | stackPush(Value); |
5619 | 4.46k | } |
5620 | 22.1k | const auto Unreachable = isUnreachable(); |
5621 | 22.1k | ControlStack.emplace_back(Stack.size() - Args.size(), Unreachable, |
5622 | 22.1k | JumpBlock, NextBlock, ElseBlock, std::move(Args), |
5623 | 22.1k | std::move(Type), std::move(ReturnPHI)); |
5624 | 22.1k | } |
5625 | | |
5626 | 22.1k | Control leaveBlock() noexcept { |
5627 | 22.1k | Control Entry = std::move(ControlStack.back()); |
5628 | 22.1k | ControlStack.pop_back(); |
5629 | | |
5630 | 22.1k | auto NextBlock = Entry.NextBlock ? Entry.NextBlock : Entry.JumpBlock; |
5631 | 22.1k | if (!Entry.Unreachable) { |
5632 | 13.9k | const auto &ReturnType = Entry.Type.second; |
5633 | 13.9k | if (!ReturnType.empty()) { |
5634 | 10.6k | std::vector<LLVM::Value> Rets(ReturnType.size()); |
5635 | 21.6k | for (size_t I = 0; I < Rets.size(); ++I) { |
5636 | 11.0k | const size_t J = Rets.size() - 1 - I; |
5637 | 11.0k | Rets[J] = stackPop(); |
5638 | 11.0k | } |
5639 | 10.6k | Entry.ReturnPHI.emplace_back(std::move(Rets), Builder.getInsertBlock()); |
5640 | 10.6k | } |
5641 | 13.9k | Builder.createBr(NextBlock); |
5642 | 13.9k | } else { |
5643 | 8.24k | Builder.createUnreachable(); |
5644 | 8.24k | } |
5645 | 22.1k | Builder.positionAtEnd(NextBlock); |
5646 | 22.1k | Stack.erase(Stack.begin() + static_cast<int64_t>(Entry.StackSize), |
5647 | 22.1k | Stack.end()); |
5648 | 22.1k | return Entry; |
5649 | 22.1k | } |
5650 | | |
5651 | 5.18k | void checkStop() noexcept { |
5652 | 5.18k | if (!Interruptible) { |
5653 | 5.18k | return; |
5654 | 5.18k | } |
5655 | 0 | auto NotStopBB = LLVM::BasicBlock::create(LLContext, F.Fn, "NotStop"); |
5656 | 0 | auto StopToken = Builder.createAtomicRMW( |
5657 | 0 | LLVMAtomicRMWBinOpXchg, Context.getStopToken(Builder, ExecCtx), |
5658 | 0 | LLContext.getInt32(0), LLVMAtomicOrderingMonotonic); |
5659 | | #if LLVM_VERSION_MAJOR >= 13 |
5660 | | StopToken.setAlignment(32); |
5661 | | #endif |
5662 | 0 | auto NotStop = Builder.createLikely( |
5663 | 0 | Builder.createICmpEQ(StopToken, LLContext.getInt32(0))); |
5664 | 0 | Builder.createCondBr(NotStop, NotStopBB, |
5665 | 0 | getTrapBB(ErrCode::Value::Interrupted)); |
5666 | |
|
5667 | 0 | Builder.positionAtEnd(NotStopBB); |
5668 | 0 | } |
5669 | | |
5670 | 5.99k | void setUnreachable() noexcept { |
5671 | 5.99k | if (ControlStack.empty()) { |
5672 | 0 | IsUnreachable = true; |
5673 | 5.99k | } else { |
5674 | 5.99k | ControlStack.back().Unreachable = true; |
5675 | 5.99k | } |
5676 | 5.99k | } |
5677 | | |
5678 | 1.58M | bool isUnreachable() const noexcept { |
5679 | 1.58M | if (ControlStack.empty()) { |
5680 | 11.4k | return IsUnreachable; |
5681 | 1.57M | } else { |
5682 | 1.57M | return ControlStack.back().Unreachable; |
5683 | 1.57M | } |
5684 | 1.58M | } |
5685 | | |
5686 | | void |
5687 | | buildPHI(Span<const ValType> RetType, |
5688 | | Span<const std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5689 | 19.4k | Incomings) noexcept { |
5690 | 19.4k | if (isVoidReturn(RetType)) { |
5691 | 5.86k | return; |
5692 | 5.86k | } |
5693 | 13.5k | std::vector<LLVM::Value> Nodes; |
5694 | 13.5k | if (Incomings.size() == 0) { |
5695 | 2.81k | const auto &Types = toLLVMTypeVector(LLContext, RetType); |
5696 | 2.81k | Nodes.reserve(Types.size()); |
5697 | 3.17k | for (LLVM::Type Type : Types) { |
5698 | 3.17k | Nodes.push_back(LLVM::Value::getUndef(Type)); |
5699 | 3.17k | } |
5700 | 10.7k | } else if (Incomings.size() == 1) { |
5701 | 9.53k | Nodes = std::move(std::get<0>(Incomings.front())); |
5702 | 9.53k | } else { |
5703 | 1.20k | const auto &Types = toLLVMTypeVector(LLContext, RetType); |
5704 | 1.20k | Nodes.reserve(Types.size()); |
5705 | 2.53k | for (size_t I = 0; I < Types.size(); ++I) { |
5706 | 1.32k | auto PHIRet = Builder.createPHI(Types[I]); |
5707 | 3.44k | for (auto &[Value, BB] : Incomings) { |
5708 | 3.44k | assuming(Value.size() == Types.size()); |
5709 | 3.44k | PHIRet.addIncoming(Value[I], BB); |
5710 | 3.44k | } |
5711 | 1.32k | Nodes.push_back(PHIRet); |
5712 | 1.32k | } |
5713 | 1.20k | } |
5714 | 14.3k | for (auto &Val : Nodes) { |
5715 | 14.3k | stackPush(Val); |
5716 | 14.3k | } |
5717 | 13.5k | } |
5718 | | |
5719 | 37.6k | void setLableJumpPHI(unsigned int Index) noexcept { |
5720 | 37.6k | assuming(Index < ControlStack.size()); |
5721 | 37.6k | auto &Entry = *(ControlStack.rbegin() + Index); |
5722 | 37.6k | if (Entry.NextBlock) { // is loop |
5723 | 2.14k | std::vector<LLVM::Value> Args(Entry.Type.first.size()); |
5724 | 3.93k | for (size_t I = 0; I < Args.size(); ++I) { |
5725 | 1.79k | const size_t J = Args.size() - 1 - I; |
5726 | 1.79k | Args[J] = stackPop(); |
5727 | 1.79k | } |
5728 | 3.93k | for (size_t I = 0; I < Args.size(); ++I) { |
5729 | 1.79k | Entry.Args[I].addIncoming(Args[I], Builder.getInsertBlock()); |
5730 | 1.79k | stackPush(Args[I]); |
5731 | 1.79k | } |
5732 | 35.5k | } else if (!Entry.Type.second.empty()) { // has return value |
5733 | 2.10k | std::vector<LLVM::Value> Rets(Entry.Type.second.size()); |
5734 | 4.36k | for (size_t I = 0; I < Rets.size(); ++I) { |
5735 | 2.25k | const size_t J = Rets.size() - 1 - I; |
5736 | 2.25k | Rets[J] = stackPop(); |
5737 | 2.25k | } |
5738 | 4.36k | for (size_t I = 0; I < Rets.size(); ++I) { |
5739 | 2.25k | stackPush(Rets[I]); |
5740 | 2.25k | } |
5741 | 2.10k | Entry.ReturnPHI.emplace_back(std::move(Rets), Builder.getInsertBlock()); |
5742 | 2.10k | } |
5743 | 37.6k | } |
5744 | | |
5745 | 37.6k | LLVM::BasicBlock getLabel(unsigned int Index) const noexcept { |
5746 | 37.6k | return (ControlStack.rbegin() + Index)->JumpBlock; |
5747 | 37.6k | } |
5748 | | |
5749 | 958k | void stackPush(LLVM::Value Value) noexcept { Stack.push_back(Value); } |
5750 | 364k | LLVM::Value stackPop() noexcept { |
5751 | 364k | assuming(!ControlStack.empty() || !Stack.empty()); |
5752 | 364k | assuming(ControlStack.empty() || |
5753 | 364k | Stack.size() > ControlStack.back().StackSize); |
5754 | 364k | auto Value = Stack.back(); |
5755 | 364k | Stack.pop_back(); |
5756 | 364k | return Value; |
5757 | 364k | } |
5758 | | |
5759 | 22.9k | LLVM::Value switchEndian(LLVM::Value Value) { |
5760 | | if constexpr (Endian::native == Endian::big) { |
5761 | | auto Type = Value.getType(); |
5762 | | if ((Type.isIntegerTy() && Type.getIntegerBitWidth() > 8) || |
5763 | | (Type.isVectorTy() && Type.getVectorSize() == 1)) { |
5764 | | return Builder.createUnaryIntrinsic(LLVM::Core::Bswap, Value); |
5765 | | } |
5766 | | if (Type.isVectorTy()) { |
5767 | | LLVM::Type VecType = Type.getElementType().getIntegerBitWidth() == 128 |
5768 | | ? Context.Int128Ty |
5769 | | : Context.Int64Ty; |
5770 | | Value = Builder.createBitCast(Value, VecType); |
5771 | | Value = Builder.createUnaryIntrinsic(LLVM::Core::Bswap, Value); |
5772 | | return Builder.createBitCast(Value, Type); |
5773 | | } |
5774 | | if (Type.isFloatTy() || Type.isDoubleTy()) { |
5775 | | LLVM::Type IntType = |
5776 | | Type.isFloatTy() ? Context.Int32Ty : Context.Int64Ty; |
5777 | | Value = Builder.createBitCast(Value, IntType); |
5778 | | Value = Builder.createUnaryIntrinsic(LLVM::Core::Bswap, Value); |
5779 | | return Builder.createBitCast(Value, Type); |
5780 | | } |
5781 | | } |
5782 | 22.9k | return Value; |
5783 | 22.9k | } |
5784 | | |
5785 | | LLVM::Compiler::CompileContext &Context; |
5786 | | LLVM::Context LLContext; |
5787 | | std::vector<std::pair<LLVM::Type, LLVM::Value>> Local; |
5788 | | std::vector<LLVM::Value> Stack; |
5789 | | LLVM::Value LocalInstrCount = nullptr; |
5790 | | LLVM::Value LocalGas = nullptr; |
5791 | | std::unordered_map<ErrCode::Value, LLVM::BasicBlock> TrapBB; |
5792 | | bool IsUnreachable = false; |
5793 | | bool Interruptible = false; |
5794 | | struct Control { |
5795 | | size_t StackSize; |
5796 | | bool Unreachable; |
5797 | | LLVM::BasicBlock JumpBlock; |
5798 | | LLVM::BasicBlock NextBlock; |
5799 | | LLVM::BasicBlock ElseBlock; |
5800 | | std::vector<LLVM::Value> Args; |
5801 | | std::pair<std::vector<ValType>, std::vector<ValType>> Type; |
5802 | | std::vector<std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5803 | | ReturnPHI; |
5804 | | Control(size_t S, bool U, LLVM::BasicBlock J, LLVM::BasicBlock N, |
5805 | | LLVM::BasicBlock E, std::vector<LLVM::Value> A, |
5806 | | std::pair<std::vector<ValType>, std::vector<ValType>> T, |
5807 | | std::vector<std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5808 | | R) noexcept |
5809 | 22.1k | : StackSize(S), Unreachable(U), JumpBlock(J), NextBlock(N), |
5810 | 22.1k | ElseBlock(E), Args(std::move(A)), Type(std::move(T)), |
5811 | 22.1k | ReturnPHI(std::move(R)) {} |
5812 | | Control(const Control &) = default; |
5813 | 27.3k | Control(Control &&) = default; |
5814 | | Control &operator=(const Control &) = default; |
5815 | 1.01k | Control &operator=(Control &&) = default; |
5816 | | }; |
5817 | | std::vector<Control> ControlStack; |
5818 | | LLVM::FunctionCallee F; |
5819 | | LLVM::Value ExecCtx; |
5820 | | LLVM::Builder Builder; |
5821 | | }; |
5822 | | |
5823 | | std::vector<LLVM::Value> unpackStruct(LLVM::Builder &Builder, |
5824 | 449 | LLVM::Value Struct) noexcept { |
5825 | 449 | const auto N = Struct.getType().getStructNumElements(); |
5826 | 449 | std::vector<LLVM::Value> Ret; |
5827 | 449 | Ret.reserve(N); |
5828 | 1.61k | for (unsigned I = 0; I < N; ++I) { |
5829 | 1.16k | Ret.push_back(Builder.createExtractValue(Struct, I)); |
5830 | 1.16k | } |
5831 | 449 | return Ret; |
5832 | 449 | } |
5833 | | |
5834 | | } // namespace |
5835 | | |
5836 | | namespace WasmEdge { |
5837 | | namespace LLVM { |
5838 | | |
5839 | 2.30k | Expect<void> Compiler::checkConfigure() noexcept { |
5840 | | // Note: Although the exception handling proposal and memory64 proposal is not |
5841 | | // implemented in AOT yet, we should not trap here because the default |
5842 | | // configuration becomes WASM 3.0 which contains these proposals. |
5843 | 2.30k | if (Conf.hasProposal(Proposal::ExceptionHandling)) { |
5844 | 2.30k | spdlog::warn("Proposal Exception Handling is not yet supported in WasmEdge " |
5845 | 2.30k | "AOT/JIT. The compilation will be trapped when related data " |
5846 | 2.30k | "structure or instructions found in WASM."); |
5847 | 2.30k | } |
5848 | 2.30k | if (Conf.hasProposal(Proposal::Memory64)) { |
5849 | 0 | spdlog::warn("Proposal Memory64 is not yet supported in WasmEdge AOT/JIT. " |
5850 | 0 | "The compilation will be trapped when related data " |
5851 | 0 | "structure or instructions found in WASM."); |
5852 | 0 | } |
5853 | 2.30k | if (Conf.hasProposal(Proposal::Annotations)) { |
5854 | 0 | spdlog::error(ErrCode::Value::InvalidAOTConfigure); |
5855 | 0 | spdlog::error(" Proposal Custom Annotation Syntax is not yet supported " |
5856 | 0 | "in WasmEdge AOT/JIT."); |
5857 | 0 | return Unexpect(ErrCode::Value::InvalidAOTConfigure); |
5858 | 0 | } |
5859 | 2.30k | return {}; |
5860 | 2.30k | } |
5861 | | |
5862 | 2.30k | Expect<Data> Compiler::compile(const AST::Module &Module) noexcept { |
5863 | | // Check the module is validated. |
5864 | 2.30k | if (unlikely(!Module.getIsValidated())) { |
5865 | 0 | spdlog::error(ErrCode::Value::NotValidated); |
5866 | 0 | return Unexpect(ErrCode::Value::NotValidated); |
5867 | 0 | } |
5868 | | |
5869 | 2.30k | std::unique_lock Lock(Mutex); |
5870 | 2.30k | spdlog::info("compile start"sv); |
5871 | | |
5872 | 2.30k | LLVM::Core::init(); |
5873 | | |
5874 | 2.30k | LLVM::Data D; |
5875 | 2.30k | auto LLContext = D.extract().getLLContext(); |
5876 | 2.30k | auto &LLModule = D.extract().LLModule; |
5877 | 2.30k | LLModule.setTarget(LLVM::getDefaultTargetTriple().unwrap()); |
5878 | 2.30k | LLModule.addFlag(LLVMModuleFlagBehaviorError, "PIC Level"sv, 2); |
5879 | | |
5880 | 2.30k | CompileContext NewContext(LLContext, LLModule, |
5881 | 2.30k | Conf.getCompilerConfigure().isGenericBinary()); |
5882 | 2.30k | struct RAIICleanup { |
5883 | 2.30k | RAIICleanup(CompileContext *&Context, CompileContext &NewContext) |
5884 | 2.30k | : Context(Context) { |
5885 | 2.30k | Context = &NewContext; |
5886 | 2.30k | } |
5887 | 2.30k | ~RAIICleanup() { Context = nullptr; } |
5888 | 2.30k | CompileContext *&Context; |
5889 | 2.30k | }; |
5890 | 2.30k | RAIICleanup Cleanup(Context, NewContext); |
5891 | | |
5892 | | // Compile Function Types |
5893 | 2.30k | compile(Module.getTypeSection()); |
5894 | | // Compile ImportSection |
5895 | 2.30k | compile(Module.getImportSection()); |
5896 | | // Compile GlobalSection |
5897 | 2.30k | compile(Module.getGlobalSection()); |
5898 | | // Compile MemorySection (MemorySec, DataSec) |
5899 | 2.30k | compile(Module.getMemorySection(), Module.getDataSection()); |
5900 | | // Compile TableSection (TableSec, ElemSec) |
5901 | 2.30k | compile(Module.getTableSection(), Module.getElementSection()); |
5902 | | // compile Functions in module. (FunctionSec, CodeSec) |
5903 | 2.30k | EXPECTED_TRY(compile(Module.getFunctionSection(), Module.getCodeSection())); |
5904 | | // Compile ExportSection |
5905 | 2.29k | compile(Module.getExportSection()); |
5906 | | // StartSection is not required to compile |
5907 | | |
5908 | 2.29k | spdlog::info("verify start"sv); |
5909 | 2.29k | LLModule.verify(LLVMPrintMessageAction); |
5910 | | |
5911 | 2.29k | spdlog::info("optimize start"sv); |
5912 | 2.29k | auto &TM = D.extract().TM; |
5913 | 2.29k | { |
5914 | 2.29k | auto Triple = LLModule.getTarget(); |
5915 | 2.29k | auto [TheTarget, ErrorMessage] = LLVM::Target::getFromTriple(Triple); |
5916 | 2.29k | if (ErrorMessage) { |
5917 | 0 | spdlog::error("getFromTriple failed:{}"sv, ErrorMessage.string_view()); |
5918 | 0 | return Unexpect(ErrCode::Value::IllegalPath); |
5919 | 2.29k | } else { |
5920 | 2.29k | std::string CPUName; |
5921 | | #if defined(__riscv) && __riscv_xlen == 64 |
5922 | | CPUName = "generic-rv64"s; |
5923 | | #else |
5924 | 2.29k | if (!Conf.getCompilerConfigure().isGenericBinary()) { |
5925 | 2.29k | CPUName = LLVM::getHostCPUName().string_view(); |
5926 | 2.29k | } else { |
5927 | 0 | CPUName = "generic"s; |
5928 | 0 | } |
5929 | 2.29k | #endif |
5930 | | |
5931 | 2.29k | TM = LLVM::TargetMachine::create( |
5932 | 2.29k | TheTarget, Triple, CPUName.c_str(), |
5933 | 2.29k | LLVM::getHostCPUFeatures().unwrap(), |
5934 | 2.29k | toLLVMCodeGenLevel( |
5935 | 2.29k | Conf.getCompilerConfigure().getOptimizationLevel()), |
5936 | 2.29k | LLVMRelocPIC, LLVMCodeModelDefault); |
5937 | 2.29k | } |
5938 | | |
5939 | | #if LLVM_VERSION_MAJOR >= 13 |
5940 | | auto PBO = LLVM::PassBuilderOptions::create(); |
5941 | | if (auto Error = PBO.runPasses( |
5942 | | LLModule, |
5943 | | toLLVMLevel(Conf.getCompilerConfigure().getOptimizationLevel()), |
5944 | | TM)) { |
5945 | | spdlog::error("{}"sv, Error.message().string_view()); |
5946 | | } |
5947 | | #else |
5948 | 2.29k | auto FP = LLVM::PassManager::createForModule(LLModule); |
5949 | 2.29k | auto MP = LLVM::PassManager::create(); |
5950 | | |
5951 | 2.29k | TM.addAnalysisPasses(MP); |
5952 | 2.29k | TM.addAnalysisPasses(FP); |
5953 | 2.29k | { |
5954 | 2.29k | auto PMB = LLVM::PassManagerBuilder::create(); |
5955 | 2.29k | auto [OptLevel, SizeLevel] = |
5956 | 2.29k | toLLVMLevel(Conf.getCompilerConfigure().getOptimizationLevel()); |
5957 | 2.29k | PMB.setOptLevel(OptLevel); |
5958 | 2.29k | PMB.setSizeLevel(SizeLevel); |
5959 | 2.29k | PMB.populateFunctionPassManager(FP); |
5960 | 2.29k | PMB.populateModulePassManager(MP); |
5961 | 2.29k | } |
5962 | 2.29k | switch (Conf.getCompilerConfigure().getOptimizationLevel()) { |
5963 | 0 | case CompilerConfigure::OptimizationLevel::O0: |
5964 | 0 | case CompilerConfigure::OptimizationLevel::O1: |
5965 | 0 | FP.addTailCallEliminationPass(); |
5966 | 0 | break; |
5967 | 2.29k | default: |
5968 | 2.29k | break; |
5969 | 2.29k | } |
5970 | | |
5971 | 2.29k | FP.initializeFunctionPassManager(); |
5972 | 25.3k | for (auto Fn = LLModule.getFirstFunction(); Fn; Fn = Fn.getNextFunction()) { |
5973 | 23.0k | FP.runFunctionPassManager(Fn); |
5974 | 23.0k | } |
5975 | 2.29k | FP.finalizeFunctionPassManager(); |
5976 | 2.29k | MP.runPassManager(LLModule); |
5977 | 2.29k | #endif |
5978 | 2.29k | } |
5979 | | |
5980 | | // Set initializer for constant value |
5981 | 2.29k | if (auto IntrinsicsTable = LLModule.getNamedGlobal("intrinsics")) { |
5982 | 1.33k | IntrinsicsTable.setInitializer( |
5983 | 1.33k | LLVM::Value::getConstNull(IntrinsicsTable.getType())); |
5984 | 1.33k | IntrinsicsTable.setGlobalConstant(false); |
5985 | 1.33k | } else { |
5986 | 965 | auto IntrinsicsTableTy = LLVM::Type::getArrayType( |
5987 | 965 | LLContext.getInt8Ty().getPointerTo(), |
5988 | 965 | static_cast<uint32_t>(Executable::Intrinsics::kIntrinsicMax)); |
5989 | 965 | LLModule.addGlobal( |
5990 | 965 | IntrinsicsTableTy.getPointerTo(), false, LLVMExternalLinkage, |
5991 | 965 | LLVM::Value::getConstNull(IntrinsicsTableTy), "intrinsics"); |
5992 | 965 | } |
5993 | | |
5994 | 2.29k | spdlog::info("optimize done"sv); |
5995 | 2.29k | return Expect<Data>{std::move(D)}; |
5996 | 2.29k | } |
5997 | | |
5998 | 2.30k | void Compiler::compile(const AST::TypeSection &TypeSec) noexcept { |
5999 | 2.30k | auto WrapperTy = |
6000 | 2.30k | LLVM::Type::getFunctionType(Context->VoidTy, |
6001 | 2.30k | {Context->ExecCtxPtrTy, Context->Int8PtrTy, |
6002 | 2.30k | Context->Int8PtrTy, Context->Int8PtrTy}, |
6003 | 2.30k | false); |
6004 | 2.30k | auto SubTypes = TypeSec.getContent(); |
6005 | 2.30k | const auto Size = SubTypes.size(); |
6006 | 2.30k | if (Size == 0) { |
6007 | 136 | return; |
6008 | 136 | } |
6009 | 2.17k | Context->CompositeTypes.reserve(Size); |
6010 | 2.17k | Context->FunctionWrappers.reserve(Size); |
6011 | | |
6012 | | // Iterate and compile types. |
6013 | 6.83k | for (size_t I = 0; I < Size; ++I) { |
6014 | 4.66k | const auto &CompType = SubTypes[I].getCompositeType(); |
6015 | 4.66k | const auto Name = fmt::format("t{}"sv, Context->CompositeTypes.size()); |
6016 | 4.66k | if (CompType.isFunc()) { |
6017 | | // Check function type is unique |
6018 | 4.60k | { |
6019 | 4.60k | bool Unique = true; |
6020 | 17.9k | for (size_t J = 0; J < I; ++J) { |
6021 | 13.5k | if (Context->CompositeTypes[J] && |
6022 | 13.5k | Context->CompositeTypes[J]->isFunc()) { |
6023 | 13.4k | const auto &OldFuncType = Context->CompositeTypes[J]->getFuncType(); |
6024 | 13.4k | if (OldFuncType == CompType.getFuncType()) { |
6025 | 162 | Unique = false; |
6026 | 162 | Context->CompositeTypes.push_back(Context->CompositeTypes[J]); |
6027 | 162 | auto F = Context->FunctionWrappers[J]; |
6028 | 162 | Context->FunctionWrappers.push_back(F); |
6029 | 162 | auto A = Context->LLModule.addAlias(WrapperTy, F, Name.c_str()); |
6030 | 162 | A.setLinkage(LLVMExternalLinkage); |
6031 | 162 | A.setVisibility(LLVMProtectedVisibility); |
6032 | 162 | A.setDSOLocal(true); |
6033 | 162 | A.setDLLStorageClass(LLVMDLLExportStorageClass); |
6034 | 162 | break; |
6035 | 162 | } |
6036 | 13.4k | } |
6037 | 13.5k | } |
6038 | 4.60k | if (!Unique) { |
6039 | 162 | continue; |
6040 | 162 | } |
6041 | 4.60k | } |
6042 | | |
6043 | | // Create Wrapper |
6044 | 4.44k | auto F = Context->LLModule.addFunction(WrapperTy, LLVMExternalLinkage, |
6045 | 4.44k | Name.c_str()); |
6046 | 4.44k | { |
6047 | 4.44k | F.setVisibility(LLVMProtectedVisibility); |
6048 | 4.44k | F.setDSOLocal(true); |
6049 | 4.44k | F.setDLLStorageClass(LLVMDLLExportStorageClass); |
6050 | 4.44k | F.addFnAttr(Context->NoStackArgProbe); |
6051 | 4.44k | F.addFnAttr(Context->StrictFP); |
6052 | 4.44k | F.addFnAttr(Context->UWTable); |
6053 | 4.44k | F.addParamAttr(0, Context->ReadOnly); |
6054 | 4.44k | F.addParamAttr(0, Context->NoAlias); |
6055 | 4.44k | F.addParamAttr(1, Context->NoAlias); |
6056 | 4.44k | F.addParamAttr(2, Context->NoAlias); |
6057 | 4.44k | F.addParamAttr(3, Context->NoAlias); |
6058 | | |
6059 | 4.44k | LLVM::Builder Builder(Context->LLContext); |
6060 | 4.44k | Builder.positionAtEnd( |
6061 | 4.44k | LLVM::BasicBlock::create(Context->LLContext, F, "entry")); |
6062 | | |
6063 | 4.44k | auto FTy = toLLVMType(Context->LLContext, Context->ExecCtxPtrTy, |
6064 | 4.44k | CompType.getFuncType()); |
6065 | 4.44k | auto RTy = FTy.getReturnType(); |
6066 | 4.44k | std::vector<LLVM::Type> FPTy(FTy.getNumParams()); |
6067 | 4.44k | FTy.getParamTypes(FPTy); |
6068 | | |
6069 | 4.44k | const size_t ArgCount = FPTy.size() - 1; |
6070 | 4.44k | auto ExecCtxPtr = F.getFirstParam(); |
6071 | 4.44k | auto RawFunc = LLVM::FunctionCallee{ |
6072 | 4.44k | FTy, Builder.createBitCast(ExecCtxPtr.getNextParam(), |
6073 | 4.44k | FTy.getPointerTo())}; |
6074 | 4.44k | auto RawArgs = ExecCtxPtr.getNextParam().getNextParam(); |
6075 | 4.44k | auto RawRets = RawArgs.getNextParam(); |
6076 | | |
6077 | 4.44k | std::vector<LLVM::Value> Args; |
6078 | 4.44k | Args.reserve(FTy.getNumParams()); |
6079 | 4.44k | Args.push_back(ExecCtxPtr); |
6080 | 9.20k | for (size_t J = 0; J < ArgCount; ++J) { |
6081 | 4.76k | Args.push_back(Builder.createValuePtrLoad( |
6082 | 4.76k | FPTy[J + 1], RawArgs, Context->Int8Ty, J * kValSize)); |
6083 | 4.76k | } |
6084 | | |
6085 | 4.44k | auto Ret = Builder.createCall(RawFunc, Args); |
6086 | 4.44k | if (RTy.isVoidTy()) { |
6087 | | // nothing to do |
6088 | 2.99k | } else if (RTy.isStructTy()) { |
6089 | 358 | auto Rets = unpackStruct(Builder, Ret); |
6090 | 358 | Builder.createArrayPtrStore(Rets, RawRets, Context->Int8Ty, kValSize); |
6091 | 2.63k | } else { |
6092 | 2.63k | Builder.createValuePtrStore(Ret, RawRets, Context->Int8Ty); |
6093 | 2.63k | } |
6094 | 4.44k | Builder.createRetVoid(); |
6095 | 4.44k | } |
6096 | | // Copy wrapper, param and return lists to module instance. |
6097 | 4.44k | Context->FunctionWrappers.push_back(F); |
6098 | 4.44k | } else { |
6099 | | // Non function type case. Create empty wrapper. |
6100 | 57 | auto F = Context->LLModule.addFunction(WrapperTy, LLVMExternalLinkage, |
6101 | 57 | Name.c_str()); |
6102 | 57 | { |
6103 | 57 | F.setVisibility(LLVMProtectedVisibility); |
6104 | 57 | F.setDSOLocal(true); |
6105 | 57 | F.setDLLStorageClass(LLVMDLLExportStorageClass); |
6106 | 57 | F.addFnAttr(Context->NoStackArgProbe); |
6107 | 57 | F.addFnAttr(Context->StrictFP); |
6108 | 57 | F.addFnAttr(Context->UWTable); |
6109 | 57 | F.addParamAttr(0, Context->ReadOnly); |
6110 | 57 | F.addParamAttr(0, Context->NoAlias); |
6111 | 57 | F.addParamAttr(1, Context->NoAlias); |
6112 | 57 | F.addParamAttr(2, Context->NoAlias); |
6113 | 57 | F.addParamAttr(3, Context->NoAlias); |
6114 | | |
6115 | 57 | LLVM::Builder Builder(Context->LLContext); |
6116 | 57 | Builder.positionAtEnd( |
6117 | 57 | LLVM::BasicBlock::create(Context->LLContext, F, "entry")); |
6118 | 57 | Builder.createRetVoid(); |
6119 | 57 | } |
6120 | 57 | Context->FunctionWrappers.push_back(F); |
6121 | 57 | } |
6122 | 4.50k | Context->CompositeTypes.push_back(&CompType); |
6123 | 4.50k | } |
6124 | 2.17k | } |
6125 | | |
6126 | 2.30k | void Compiler::compile(const AST::ImportSection &ImportSec) noexcept { |
6127 | | // Iterate and compile import descriptions. |
6128 | 2.30k | for (const auto &ImpDesc : ImportSec.getContent()) { |
6129 | | // Get data from import description. |
6130 | 443 | const auto &ExtType = ImpDesc.getExternalType(); |
6131 | | |
6132 | | // Add the imports into module instance. |
6133 | 443 | switch (ExtType) { |
6134 | 302 | case ExternalType::Function: // Function type index |
6135 | 302 | { |
6136 | 302 | const auto FuncID = static_cast<uint32_t>(Context->Functions.size()); |
6137 | | // Get the function type index in module. |
6138 | 302 | uint32_t TypeIdx = ImpDesc.getExternalFuncTypeIdx(); |
6139 | 302 | assuming(TypeIdx < Context->CompositeTypes.size()); |
6140 | 302 | assuming(Context->CompositeTypes[TypeIdx]->isFunc()); |
6141 | 302 | const auto &FuncType = Context->CompositeTypes[TypeIdx]->getFuncType(); |
6142 | 302 | auto FTy = |
6143 | 302 | toLLVMType(Context->LLContext, Context->ExecCtxPtrTy, FuncType); |
6144 | 302 | auto RTy = FTy.getReturnType(); |
6145 | 302 | auto F = LLVM::FunctionCallee{ |
6146 | 302 | FTy, |
6147 | 302 | Context->LLModule.addFunction(FTy, LLVMInternalLinkage, |
6148 | 302 | fmt::format("f{}"sv, FuncID).c_str())}; |
6149 | 302 | F.Fn.setDSOLocal(true); |
6150 | 302 | F.Fn.addFnAttr(Context->NoStackArgProbe); |
6151 | 302 | F.Fn.addFnAttr(Context->StrictFP); |
6152 | 302 | F.Fn.addFnAttr(Context->UWTable); |
6153 | 302 | F.Fn.addParamAttr(0, Context->ReadOnly); |
6154 | 302 | F.Fn.addParamAttr(0, Context->NoAlias); |
6155 | | |
6156 | 302 | LLVM::Builder Builder(Context->LLContext); |
6157 | 302 | Builder.positionAtEnd( |
6158 | 302 | LLVM::BasicBlock::create(Context->LLContext, F.Fn, "entry")); |
6159 | | |
6160 | 302 | const auto ArgSize = FuncType.getParamTypes().size(); |
6161 | 302 | const auto RetSize = |
6162 | 302 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
6163 | | |
6164 | 302 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
6165 | 302 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
6166 | | |
6167 | 302 | auto Arg = F.Fn.getFirstParam(); |
6168 | 452 | for (unsigned I = 0; I < ArgSize; ++I) { |
6169 | 150 | Arg = Arg.getNextParam(); |
6170 | 150 | Builder.createValuePtrStore(Arg, Args, Context->Int8Ty, I * kValSize); |
6171 | 150 | } |
6172 | | |
6173 | 302 | Builder.createCall( |
6174 | 302 | Context->getIntrinsic( |
6175 | 302 | Builder, Executable::Intrinsics::kCall, |
6176 | 302 | LLVM::Type::getFunctionType( |
6177 | 302 | Context->VoidTy, |
6178 | 302 | {Context->Int32Ty, Context->Int8PtrTy, Context->Int8PtrTy}, |
6179 | 302 | false)), |
6180 | 302 | {Context->LLContext.getInt32(FuncID), Args, Rets}); |
6181 | | |
6182 | 302 | if (RetSize == 0) { |
6183 | 172 | Builder.createRetVoid(); |
6184 | 172 | } else if (RetSize == 1) { |
6185 | 86 | Builder.createRet( |
6186 | 86 | Builder.createValuePtrLoad(RTy, Rets, Context->Int8Ty)); |
6187 | 86 | } else { |
6188 | 44 | Builder.createAggregateRet(Builder.createArrayPtrLoad( |
6189 | 44 | RetSize, RTy, Rets, Context->Int8Ty, kValSize)); |
6190 | 44 | } |
6191 | | |
6192 | 302 | Context->Functions.emplace_back(TypeIdx, F, nullptr); |
6193 | 302 | break; |
6194 | 302 | } |
6195 | 51 | case ExternalType::Table: // Table type |
6196 | 51 | { |
6197 | | // Nothing to do. |
6198 | 51 | break; |
6199 | 302 | } |
6200 | 35 | case ExternalType::Memory: // Memory type |
6201 | 35 | { |
6202 | | // Nothing to do. |
6203 | 35 | break; |
6204 | 302 | } |
6205 | 50 | case ExternalType::Global: // Global type |
6206 | 50 | { |
6207 | | // Get global type. External type checked in validation. |
6208 | 50 | const auto &GlobType = ImpDesc.getExternalGlobalType(); |
6209 | 50 | const auto &ValType = GlobType.getValType(); |
6210 | 50 | auto Type = toLLVMType(Context->LLContext, ValType); |
6211 | 50 | Context->Globals.push_back(Type); |
6212 | 50 | break; |
6213 | 302 | } |
6214 | 5 | case ExternalType::Tag: // Tag type |
6215 | 5 | { |
6216 | | // TODO: EXCEPTION - implement the AOT. |
6217 | 5 | break; |
6218 | 302 | } |
6219 | 0 | default: |
6220 | 0 | assumingUnreachable(); |
6221 | 443 | } |
6222 | 443 | } |
6223 | 2.30k | } |
6224 | | |
6225 | 2.29k | void Compiler::compile(const AST::ExportSection &) noexcept {} |
6226 | | |
6227 | 2.30k | void Compiler::compile(const AST::GlobalSection &GlobalSec) noexcept { |
6228 | 2.30k | for (const auto &GlobalSeg : GlobalSec.getContent()) { |
6229 | 152 | const auto &ValType = GlobalSeg.getGlobalType().getValType(); |
6230 | 152 | auto Type = toLLVMType(Context->LLContext, ValType); |
6231 | 152 | Context->Globals.push_back(Type); |
6232 | 152 | } |
6233 | 2.30k | } |
6234 | | |
6235 | | void Compiler::compile(const AST::MemorySection &, |
6236 | 2.30k | const AST::DataSection &) noexcept {} |
6237 | | |
6238 | | void Compiler::compile(const AST::TableSection &, |
6239 | 2.30k | const AST::ElementSection &) noexcept {} |
6240 | | |
6241 | | Expect<void> Compiler::compile(const AST::FunctionSection &FuncSec, |
6242 | 2.30k | const AST::CodeSection &CodeSec) noexcept { |
6243 | 2.30k | const auto &TypeIdxs = FuncSec.getContent(); |
6244 | 2.30k | const auto &CodeSegs = CodeSec.getContent(); |
6245 | 2.30k | assuming(TypeIdxs.size() == CodeSegs.size()); |
6246 | | |
6247 | 13.7k | for (size_t I = 0; I < CodeSegs.size(); ++I) { |
6248 | 11.4k | const auto &TypeIdx = TypeIdxs[I]; |
6249 | 11.4k | const auto &Code = CodeSegs[I]; |
6250 | 11.4k | assuming(TypeIdx < Context->CompositeTypes.size()); |
6251 | 11.4k | assuming(Context->CompositeTypes[TypeIdx]->isFunc()); |
6252 | 11.4k | const auto &FuncType = Context->CompositeTypes[TypeIdx]->getFuncType(); |
6253 | 11.4k | const auto FuncID = Context->Functions.size(); |
6254 | 11.4k | auto FTy = toLLVMType(Context->LLContext, Context->ExecCtxPtrTy, FuncType); |
6255 | 11.4k | LLVM::FunctionCallee F = {FTy, Context->LLModule.addFunction( |
6256 | 11.4k | FTy, LLVMExternalLinkage, |
6257 | 11.4k | fmt::format("f{}"sv, FuncID).c_str())}; |
6258 | 11.4k | F.Fn.setVisibility(LLVMProtectedVisibility); |
6259 | 11.4k | F.Fn.setDSOLocal(true); |
6260 | 11.4k | F.Fn.setDLLStorageClass(LLVMDLLExportStorageClass); |
6261 | 11.4k | F.Fn.addFnAttr(Context->NoStackArgProbe); |
6262 | 11.4k | F.Fn.addFnAttr(Context->StrictFP); |
6263 | 11.4k | F.Fn.addFnAttr(Context->UWTable); |
6264 | 11.4k | F.Fn.addParamAttr(0, Context->ReadOnly); |
6265 | 11.4k | F.Fn.addParamAttr(0, Context->NoAlias); |
6266 | | |
6267 | 11.4k | Context->Functions.emplace_back(TypeIdx, F, &Code); |
6268 | 11.4k | } |
6269 | | |
6270 | 11.7k | for (auto [T, F, Code] : Context->Functions) { |
6271 | 11.7k | if (!Code) { |
6272 | 302 | continue; |
6273 | 302 | } |
6274 | | |
6275 | 11.4k | std::vector<ValType> Locals; |
6276 | 11.4k | for (const auto &Local : Code->getLocals()) { |
6277 | 2.37M | for (unsigned I = 0; I < Local.first; ++I) { |
6278 | 2.37M | Locals.push_back(Local.second); |
6279 | 2.37M | } |
6280 | 1.75k | } |
6281 | 11.4k | FunctionCompiler FC(*Context, F, Locals, |
6282 | 11.4k | Conf.getCompilerConfigure().isInterruptible(), |
6283 | 11.4k | Conf.getStatisticsConfigure().isInstructionCounting(), |
6284 | 11.4k | Conf.getStatisticsConfigure().isCostMeasuring()); |
6285 | 11.4k | auto Type = Context->resolveBlockType(T); |
6286 | 11.4k | EXPECTED_TRY(FC.compile(*Code, std::move(Type))); |
6287 | 11.4k | F.Fn.eliminateUnreachableBlocks(); |
6288 | 11.4k | } |
6289 | 2.29k | return {}; |
6290 | 2.30k | } |
6291 | | |
6292 | | } // namespace LLVM |
6293 | | } // namespace WasmEdge |