/src/WasmEdge/lib/llvm/compiler.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | // SPDX-License-Identifier: Apache-2.0 |
2 | | // SPDX-FileCopyrightText: 2019-2024 Second State INC |
3 | | |
4 | | #include "llvm/compiler.h" |
5 | | |
6 | | #include "aot/version.h" |
7 | | #include "common/defines.h" |
8 | | #include "common/filesystem.h" |
9 | | #include "common/spdlog.h" |
10 | | #include "data.h" |
11 | | #include "llvm.h" |
12 | | #include "system/allocator.h" |
13 | | |
14 | | #include <algorithm> |
15 | | #include <array> |
16 | | #include <cinttypes> |
17 | | #include <cstdint> |
18 | | #include <cstdlib> |
19 | | #include <limits> |
20 | | #include <memory> |
21 | | #include <numeric> |
22 | | #include <string> |
23 | | #include <string_view> |
24 | | #include <system_error> |
25 | | |
26 | | namespace LLVM = WasmEdge::LLVM; |
27 | | using namespace std::literals; |
28 | | |
29 | | namespace { |
30 | | |
31 | | static bool |
32 | | isVoidReturn(WasmEdge::Span<const WasmEdge::ValType> ValTypes) noexcept; |
33 | | static LLVM::Type toLLVMType(LLVM::Context LLContext, |
34 | | const WasmEdge::ValType &ValType) noexcept; |
35 | | static std::vector<LLVM::Type> |
36 | | toLLVMArgsType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
37 | | WasmEdge::Span<const WasmEdge::ValType> ValTypes) noexcept; |
38 | | static LLVM::Type |
39 | | toLLVMRetsType(LLVM::Context LLContext, |
40 | | WasmEdge::Span<const WasmEdge::ValType> ValTypes) noexcept; |
41 | | static LLVM::Type |
42 | | toLLVMType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
43 | | const WasmEdge::AST::FunctionType &FuncType) noexcept; |
44 | | static LLVM::Value |
45 | | toLLVMConstantZero(LLVM::Context LLContext, |
46 | | const WasmEdge::ValType &ValType) noexcept; |
47 | | static std::vector<LLVM::Value> unpackStruct(LLVM::Builder &Builder, |
48 | | LLVM::Value Struct) noexcept; |
49 | | class FunctionCompiler; |
50 | | |
51 | | // XXX: Misalignment handler not implemented yet, forcing unalignment |
52 | | // force unalignment load/store |
53 | | static inline constexpr const bool kForceUnalignment = true; |
54 | | |
55 | | // force checking div/rem on zero |
56 | | static inline constexpr const bool kForceDivCheck = true; |
57 | | |
58 | | // Size of a ValVariant |
59 | | static inline constexpr const uint32_t kValSize = sizeof(WasmEdge::ValVariant); |
60 | | |
61 | | // Translate Compiler::OptimizationLevel to llvm::PassBuilder version |
62 | | #if LLVM_VERSION_MAJOR >= 13 |
63 | | static inline const char * |
64 | | toLLVMLevel(WasmEdge::CompilerConfigure::OptimizationLevel Level) noexcept { |
65 | | using OL = WasmEdge::CompilerConfigure::OptimizationLevel; |
66 | | switch (Level) { |
67 | | case OL::O0: |
68 | | return "default<O0>,function(tailcallelim)"; |
69 | | case OL::O1: |
70 | | return "default<O1>,function(tailcallelim)"; |
71 | | case OL::O2: |
72 | | return "default<O2>"; |
73 | | case OL::O3: |
74 | | return "default<O3>"; |
75 | | case OL::Os: |
76 | | return "default<Os>"; |
77 | | case OL::Oz: |
78 | | return "default<Oz>"; |
79 | | default: |
80 | | assumingUnreachable(); |
81 | | } |
82 | | } |
83 | | #else |
84 | | static inline std::pair<unsigned int, unsigned int> |
85 | 2.14k | toLLVMLevel(WasmEdge::CompilerConfigure::OptimizationLevel Level) noexcept { |
86 | 2.14k | using OL = WasmEdge::CompilerConfigure::OptimizationLevel; |
87 | 2.14k | switch (Level) { |
88 | 0 | case OL::O0: |
89 | 0 | return {0, 0}; |
90 | 0 | case OL::O1: |
91 | 0 | return {1, 0}; |
92 | 0 | case OL::O2: |
93 | 0 | return {2, 0}; |
94 | 2.14k | case OL::O3: |
95 | 2.14k | return {3, 0}; |
96 | 0 | case OL::Os: |
97 | 0 | return {2, 1}; |
98 | 0 | case OL::Oz: |
99 | 0 | return {2, 2}; |
100 | 0 | default: |
101 | 0 | assumingUnreachable(); |
102 | 2.14k | } |
103 | 2.14k | } |
104 | | #endif |
105 | | |
106 | | static inline LLVMCodeGenOptLevel toLLVMCodeGenLevel( |
107 | 2.14k | WasmEdge::CompilerConfigure::OptimizationLevel Level) noexcept { |
108 | 2.14k | using OL = WasmEdge::CompilerConfigure::OptimizationLevel; |
109 | 2.14k | switch (Level) { |
110 | 0 | case OL::O0: |
111 | 0 | return LLVMCodeGenLevelNone; |
112 | 0 | case OL::O1: |
113 | 0 | return LLVMCodeGenLevelLess; |
114 | 0 | case OL::O2: |
115 | 0 | return LLVMCodeGenLevelDefault; |
116 | 2.14k | case OL::O3: |
117 | 2.14k | return LLVMCodeGenLevelAggressive; |
118 | 0 | case OL::Os: |
119 | 0 | return LLVMCodeGenLevelDefault; |
120 | 0 | case OL::Oz: |
121 | 0 | return LLVMCodeGenLevelDefault; |
122 | 0 | default: |
123 | 0 | assumingUnreachable(); |
124 | 2.14k | } |
125 | 2.14k | } |
126 | | } // namespace |
127 | | |
128 | | struct LLVM::Compiler::CompileContext { |
129 | | LLVM::Context LLContext; |
130 | | LLVM::Module &LLModule; |
131 | | LLVM::Attribute Cold; |
132 | | LLVM::Attribute NoAlias; |
133 | | LLVM::Attribute NoInline; |
134 | | LLVM::Attribute NoReturn; |
135 | | LLVM::Attribute ReadOnly; |
136 | | LLVM::Attribute StrictFP; |
137 | | LLVM::Attribute UWTable; |
138 | | LLVM::Attribute NoStackArgProbe; |
139 | | LLVM::Type VoidTy; |
140 | | LLVM::Type Int8Ty; |
141 | | LLVM::Type Int16Ty; |
142 | | LLVM::Type Int32Ty; |
143 | | LLVM::Type Int64Ty; |
144 | | LLVM::Type Int128Ty; |
145 | | LLVM::Type FloatTy; |
146 | | LLVM::Type DoubleTy; |
147 | | LLVM::Type Int8x16Ty; |
148 | | LLVM::Type Int16x8Ty; |
149 | | LLVM::Type Int32x4Ty; |
150 | | LLVM::Type Floatx4Ty; |
151 | | LLVM::Type Int64x2Ty; |
152 | | LLVM::Type Doublex2Ty; |
153 | | LLVM::Type Int128x1Ty; |
154 | | LLVM::Type Int8PtrTy; |
155 | | LLVM::Type Int32PtrTy; |
156 | | LLVM::Type Int64PtrTy; |
157 | | LLVM::Type Int128PtrTy; |
158 | | LLVM::Type Int8PtrPtrTy; |
159 | | LLVM::Type ExecCtxTy; |
160 | | LLVM::Type ExecCtxPtrTy; |
161 | | LLVM::Type IntrinsicsTableTy; |
162 | | LLVM::Type IntrinsicsTablePtrTy; |
163 | | LLVM::Message SubtargetFeatures; |
164 | | |
165 | | #if defined(__x86_64__) |
166 | | #if defined(__XOP__) |
167 | | bool SupportXOP = true; |
168 | | #else |
169 | | bool SupportXOP = false; |
170 | | #endif |
171 | | |
172 | | #if defined(__SSE4_1__) |
173 | | bool SupportSSE4_1 = true; |
174 | | #else |
175 | | bool SupportSSE4_1 = false; |
176 | | #endif |
177 | | |
178 | | #if defined(__SSSE3__) |
179 | | bool SupportSSSE3 = true; |
180 | | #else |
181 | | bool SupportSSSE3 = false; |
182 | | #endif |
183 | | |
184 | | #if defined(__SSE2__) |
185 | | bool SupportSSE2 = true; |
186 | | #else |
187 | | bool SupportSSE2 = false; |
188 | | #endif |
189 | | #endif |
190 | | |
191 | | #if defined(__aarch64__) |
192 | | #if defined(__ARM_NEON__) || defined(__ARM_NEON) || defined(__ARM_NEON_FP) |
193 | | bool SupportNEON = true; |
194 | | #else |
195 | | bool SupportNEON = false; |
196 | | #endif |
197 | | #endif |
198 | | |
199 | | std::vector<const AST::CompositeType *> CompositeTypes; |
200 | | std::vector<LLVM::Value> FunctionWrappers; |
201 | | std::vector<std::tuple<uint32_t, LLVM::FunctionCallee, |
202 | | const WasmEdge::AST::CodeSegment *>> |
203 | | Functions; |
204 | | std::vector<LLVM::Type> Globals; |
205 | | LLVM::Value IntrinsicsTable; |
206 | | LLVM::FunctionCallee Trap; |
207 | | CompileContext(LLVM::Context C, LLVM::Module &M, |
208 | | bool IsGenericBinary) noexcept |
209 | 2.14k | : LLContext(C), LLModule(M), |
210 | 2.14k | Cold(LLVM::Attribute::createEnum(C, LLVM::Core::Cold, 0)), |
211 | 2.14k | NoAlias(LLVM::Attribute::createEnum(C, LLVM::Core::NoAlias, 0)), |
212 | 2.14k | NoInline(LLVM::Attribute::createEnum(C, LLVM::Core::NoInline, 0)), |
213 | 2.14k | NoReturn(LLVM::Attribute::createEnum(C, LLVM::Core::NoReturn, 0)), |
214 | 2.14k | ReadOnly(LLVM::Attribute::createEnum(C, LLVM::Core::ReadOnly, 0)), |
215 | 2.14k | StrictFP(LLVM::Attribute::createEnum(C, LLVM::Core::StrictFP, 0)), |
216 | 2.14k | UWTable(LLVM::Attribute::createEnum(C, LLVM::Core::UWTable, |
217 | 2.14k | LLVM::Core::UWTableDefault)), |
218 | | NoStackArgProbe( |
219 | 2.14k | LLVM::Attribute::createString(C, "no-stack-arg-probe"sv, {})), |
220 | 2.14k | VoidTy(LLContext.getVoidTy()), Int8Ty(LLContext.getInt8Ty()), |
221 | 2.14k | Int16Ty(LLContext.getInt16Ty()), Int32Ty(LLContext.getInt32Ty()), |
222 | 2.14k | Int64Ty(LLContext.getInt64Ty()), Int128Ty(LLContext.getInt128Ty()), |
223 | 2.14k | FloatTy(LLContext.getFloatTy()), DoubleTy(LLContext.getDoubleTy()), |
224 | 2.14k | Int8x16Ty(LLVM::Type::getVectorType(Int8Ty, 16)), |
225 | 2.14k | Int16x8Ty(LLVM::Type::getVectorType(Int16Ty, 8)), |
226 | 2.14k | Int32x4Ty(LLVM::Type::getVectorType(Int32Ty, 4)), |
227 | 2.14k | Floatx4Ty(LLVM::Type::getVectorType(FloatTy, 4)), |
228 | 2.14k | Int64x2Ty(LLVM::Type::getVectorType(Int64Ty, 2)), |
229 | 2.14k | Doublex2Ty(LLVM::Type::getVectorType(DoubleTy, 2)), |
230 | 2.14k | Int128x1Ty(LLVM::Type::getVectorType(Int128Ty, 1)), |
231 | 2.14k | Int8PtrTy(Int8Ty.getPointerTo()), Int32PtrTy(Int32Ty.getPointerTo()), |
232 | 2.14k | Int64PtrTy(Int64Ty.getPointerTo()), |
233 | 2.14k | Int128PtrTy(Int128Ty.getPointerTo()), |
234 | 2.14k | Int8PtrPtrTy(Int8PtrTy.getPointerTo()), |
235 | 2.14k | ExecCtxTy(LLVM::Type::getStructType( |
236 | 2.14k | "ExecCtx", |
237 | 2.14k | std::initializer_list<LLVM::Type>{ |
238 | | // Memory |
239 | 2.14k | Int8PtrTy.getPointerTo(), |
240 | | // Globals |
241 | 2.14k | Int128PtrTy.getPointerTo(), |
242 | | // InstrCount |
243 | 2.14k | Int64PtrTy, |
244 | | // CostTable |
245 | 2.14k | LLVM::Type::getArrayType(Int64Ty, UINT16_MAX + 1) |
246 | 2.14k | .getPointerTo(), |
247 | | // Gas |
248 | 2.14k | Int64PtrTy, |
249 | | // GasLimit |
250 | 2.14k | Int64Ty, |
251 | | // StopToken |
252 | 2.14k | Int32PtrTy, |
253 | 2.14k | })), |
254 | 2.14k | ExecCtxPtrTy(ExecCtxTy.getPointerTo()), |
255 | 2.14k | IntrinsicsTableTy(LLVM::Type::getArrayType( |
256 | 2.14k | Int8PtrTy, |
257 | 2.14k | static_cast<uint32_t>(Executable::Intrinsics::kIntrinsicMax))), |
258 | 2.14k | IntrinsicsTablePtrTy(IntrinsicsTableTy.getPointerTo()), |
259 | 2.14k | IntrinsicsTable(LLModule.addGlobal(IntrinsicsTablePtrTy, true, |
260 | 2.14k | LLVMExternalLinkage, LLVM::Value(), |
261 | 2.14k | "intrinsics")) { |
262 | 2.14k | Trap.Ty = LLVM::Type::getFunctionType(VoidTy, {Int32Ty}); |
263 | 2.14k | Trap.Fn = LLModule.addFunction(Trap.Ty, LLVMPrivateLinkage, "trap"); |
264 | 2.14k | Trap.Fn.setDSOLocal(true); |
265 | 2.14k | Trap.Fn.addFnAttr(NoStackArgProbe); |
266 | 2.14k | Trap.Fn.addFnAttr(StrictFP); |
267 | 2.14k | Trap.Fn.addFnAttr(UWTable); |
268 | 2.14k | Trap.Fn.addFnAttr(NoReturn); |
269 | 2.14k | Trap.Fn.addFnAttr(Cold); |
270 | 2.14k | Trap.Fn.addFnAttr(NoInline); |
271 | | |
272 | 2.14k | LLModule.addGlobal(Int32Ty, true, LLVMExternalLinkage, |
273 | 2.14k | LLVM::Value::getConstInt(Int32Ty, AOT::kBinaryVersion), |
274 | 2.14k | "version"); |
275 | | |
276 | 2.14k | if (!IsGenericBinary) { |
277 | 2.14k | SubtargetFeatures = LLVM::getHostCPUFeatures(); |
278 | 2.14k | auto Features = SubtargetFeatures.string_view(); |
279 | 186k | while (!Features.empty()) { |
280 | 184k | std::string_view Feature; |
281 | 184k | if (auto Pos = Features.find(','); Pos != std::string_view::npos) { |
282 | 182k | Feature = Features.substr(0, Pos); |
283 | 182k | Features = Features.substr(Pos + 1); |
284 | 182k | } else { |
285 | 2.14k | Feature = std::exchange(Features, std::string_view()); |
286 | 2.14k | } |
287 | 184k | if (Feature[0] != '+') { |
288 | 113k | continue; |
289 | 113k | } |
290 | 70.7k | Feature = Feature.substr(1); |
291 | | |
292 | 70.7k | #if defined(__x86_64__) |
293 | 70.7k | if (!SupportXOP && Feature == "xop"sv) { |
294 | 0 | SupportXOP = true; |
295 | 0 | } |
296 | 70.7k | if (!SupportSSE4_1 && Feature == "sse4.1"sv) { |
297 | 2.14k | SupportSSE4_1 = true; |
298 | 2.14k | } |
299 | 70.7k | if (!SupportSSSE3 && Feature == "ssse3"sv) { |
300 | 2.14k | SupportSSSE3 = true; |
301 | 2.14k | } |
302 | 70.7k | if (!SupportSSE2 && Feature == "sse2"sv) { |
303 | 0 | SupportSSE2 = true; |
304 | 0 | } |
305 | | #elif defined(__aarch64__) |
306 | | if (!SupportNEON && Feature == "neon"sv) { |
307 | | SupportNEON = true; |
308 | | } |
309 | | #endif |
310 | 70.7k | } |
311 | 2.14k | } |
312 | | |
313 | 2.14k | { |
314 | | // create trap |
315 | 2.14k | LLVM::Builder Builder(LLContext); |
316 | 2.14k | Builder.positionAtEnd( |
317 | 2.14k | LLVM::BasicBlock::create(LLContext, Trap.Fn, "entry")); |
318 | 2.14k | auto FnTy = LLVM::Type::getFunctionType(VoidTy, {Int32Ty}); |
319 | 2.14k | auto CallTrap = Builder.createCall( |
320 | 2.14k | getIntrinsic(Builder, Executable::Intrinsics::kTrap, FnTy), |
321 | 2.14k | {Trap.Fn.getFirstParam()}); |
322 | 2.14k | CallTrap.addCallSiteAttribute(NoReturn); |
323 | 2.14k | Builder.createUnreachable(); |
324 | 2.14k | } |
325 | 2.14k | } |
326 | | LLVM::Value getMemory(LLVM::Builder &Builder, LLVM::Value ExecCtx, |
327 | 22.0k | uint32_t Index) noexcept { |
328 | 22.0k | auto Array = Builder.createExtractValue(ExecCtx, 0); |
329 | | #if WASMEDGE_ALLOCATOR_IS_STABLE |
330 | | auto VPtr = Builder.createLoad( |
331 | | Int8PtrTy, Builder.createInBoundsGEP1(Int8PtrTy, Array, |
332 | | LLContext.getInt64(Index))); |
333 | | VPtr.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
334 | | LLVM::Metadata(LLContext, {})); |
335 | | #else |
336 | 22.0k | auto VPtrPtr = Builder.createLoad( |
337 | 22.0k | Int8PtrPtrTy, Builder.createInBoundsGEP1(Int8PtrPtrTy, Array, |
338 | 22.0k | LLContext.getInt64(Index))); |
339 | 22.0k | VPtrPtr.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
340 | 22.0k | LLVM::Metadata(LLContext, {})); |
341 | 22.0k | auto VPtr = Builder.createLoad( |
342 | 22.0k | Int8PtrTy, |
343 | 22.0k | Builder.createInBoundsGEP1(Int8PtrTy, VPtrPtr, LLContext.getInt64(0))); |
344 | 22.0k | #endif |
345 | 22.0k | return Builder.createBitCast(VPtr, Int8PtrTy); |
346 | 22.0k | } |
347 | | std::pair<LLVM::Type, LLVM::Value> getGlobal(LLVM::Builder &Builder, |
348 | | LLVM::Value ExecCtx, |
349 | 360 | uint32_t Index) noexcept { |
350 | 360 | auto Ty = Globals[Index]; |
351 | 360 | auto Array = Builder.createExtractValue(ExecCtx, 1); |
352 | 360 | auto VPtr = Builder.createLoad( |
353 | 360 | Int128PtrTy, Builder.createInBoundsGEP1(Int8PtrTy, Array, |
354 | 360 | LLContext.getInt64(Index))); |
355 | 360 | VPtr.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
356 | 360 | LLVM::Metadata(LLContext, {})); |
357 | 360 | auto Ptr = Builder.createBitCast(VPtr, Ty.getPointerTo()); |
358 | 360 | return {Ty, Ptr}; |
359 | 360 | } |
360 | | LLVM::Value getInstrCount(LLVM::Builder &Builder, |
361 | 0 | LLVM::Value ExecCtx) noexcept { |
362 | 0 | return Builder.createExtractValue(ExecCtx, 2); |
363 | 0 | } |
364 | | LLVM::Value getCostTable(LLVM::Builder &Builder, |
365 | 0 | LLVM::Value ExecCtx) noexcept { |
366 | 0 | return Builder.createExtractValue(ExecCtx, 3); |
367 | 0 | } |
368 | 0 | LLVM::Value getGas(LLVM::Builder &Builder, LLVM::Value ExecCtx) noexcept { |
369 | 0 | return Builder.createExtractValue(ExecCtx, 4); |
370 | 0 | } |
371 | | LLVM::Value getGasLimit(LLVM::Builder &Builder, |
372 | 0 | LLVM::Value ExecCtx) noexcept { |
373 | 0 | return Builder.createExtractValue(ExecCtx, 5); |
374 | 0 | } |
375 | | LLVM::Value getStopToken(LLVM::Builder &Builder, |
376 | 0 | LLVM::Value ExecCtx) noexcept { |
377 | 0 | return Builder.createExtractValue(ExecCtx, 6); |
378 | 0 | } |
379 | | LLVM::FunctionCallee getIntrinsic(LLVM::Builder &Builder, |
380 | | Executable::Intrinsics Index, |
381 | 6.14k | LLVM::Type Ty) noexcept { |
382 | 6.14k | const auto Value = static_cast<uint32_t>(Index); |
383 | 6.14k | auto PtrTy = Ty.getPointerTo(); |
384 | 6.14k | auto PtrPtrTy = PtrTy.getPointerTo(); |
385 | 6.14k | auto IT = Builder.createLoad(IntrinsicsTablePtrTy, IntrinsicsTable); |
386 | 6.14k | IT.setMetadata(LLContext, LLVM::Core::InvariantGroup, |
387 | 6.14k | LLVM::Metadata(LLContext, {})); |
388 | 6.14k | auto VPtr = |
389 | 6.14k | Builder.createInBoundsGEP2(IntrinsicsTableTy, IT, LLContext.getInt64(0), |
390 | 6.14k | LLContext.getInt64(Value)); |
391 | 6.14k | auto Ptr = Builder.createBitCast(VPtr, PtrPtrTy); |
392 | 6.14k | return {Ty, Builder.createLoad(PtrTy, Ptr)}; |
393 | 6.14k | } |
394 | | std::pair<std::vector<ValType>, std::vector<ValType>> |
395 | 19.7k | resolveBlockType(const BlockType &BType) const noexcept { |
396 | 19.7k | using VecT = std::vector<ValType>; |
397 | 19.7k | using RetT = std::pair<VecT, VecT>; |
398 | 19.7k | if (BType.isEmpty()) { |
399 | 2.46k | return RetT{}; |
400 | 2.46k | } |
401 | 17.2k | if (BType.isValType()) { |
402 | 2.92k | return RetT{{}, {BType.getValType()}}; |
403 | 14.3k | } else { |
404 | | // Type index case. t2* = type[index].returns |
405 | 14.3k | const uint32_t TypeIdx = BType.getTypeIndex(); |
406 | 14.3k | const auto &FType = CompositeTypes[TypeIdx]->getFuncType(); |
407 | 14.3k | return RetT{ |
408 | 14.3k | VecT(FType.getParamTypes().begin(), FType.getParamTypes().end()), |
409 | 14.3k | VecT(FType.getReturnTypes().begin(), FType.getReturnTypes().end())}; |
410 | 14.3k | } |
411 | 17.2k | } |
412 | | }; |
413 | | |
414 | | namespace { |
415 | | |
416 | | using namespace WasmEdge; |
417 | | |
418 | 36.5k | static bool isVoidReturn(Span<const ValType> ValTypes) noexcept { |
419 | 36.5k | return ValTypes.empty(); |
420 | 36.5k | } |
421 | | |
422 | | static LLVM::Type toLLVMType(LLVM::Context LLContext, |
423 | 2.46M | const ValType &ValType) noexcept { |
424 | 2.46M | switch (ValType.getCode()) { |
425 | 277k | case TypeCode::I32: |
426 | 277k | return LLContext.getInt32Ty(); |
427 | 201k | case TypeCode::I64: |
428 | 201k | return LLContext.getInt64Ty(); |
429 | 0 | case TypeCode::Ref: |
430 | 17.5k | case TypeCode::RefNull: |
431 | 1.69M | case TypeCode::V128: |
432 | 1.69M | return LLVM::Type::getVectorType(LLContext.getInt64Ty(), 2); |
433 | 271k | case TypeCode::F32: |
434 | 271k | return LLContext.getFloatTy(); |
435 | 21.0k | case TypeCode::F64: |
436 | 21.0k | return LLContext.getDoubleTy(); |
437 | 0 | default: |
438 | 0 | assumingUnreachable(); |
439 | 2.46M | } |
440 | 2.46M | } |
441 | | |
442 | | static std::vector<LLVM::Type> |
443 | | toLLVMTypeVector(LLVM::Context LLContext, |
444 | 20.4k | Span<const ValType> ValTypes) noexcept { |
445 | 20.4k | std::vector<LLVM::Type> Result; |
446 | 20.4k | Result.reserve(ValTypes.size()); |
447 | 20.4k | for (const auto &Type : ValTypes) { |
448 | 20.2k | Result.push_back(toLLVMType(LLContext, Type)); |
449 | 20.2k | } |
450 | 20.4k | return Result; |
451 | 20.4k | } |
452 | | |
453 | | static std::vector<LLVM::Type> |
454 | | toLLVMArgsType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
455 | 16.8k | Span<const ValType> ValTypes) noexcept { |
456 | 16.8k | auto Result = toLLVMTypeVector(LLContext, ValTypes); |
457 | 16.8k | Result.insert(Result.begin(), ExecCtxPtrTy); |
458 | 16.8k | return Result; |
459 | 16.8k | } |
460 | | |
461 | | static LLVM::Type toLLVMRetsType(LLVM::Context LLContext, |
462 | 16.8k | Span<const ValType> ValTypes) noexcept { |
463 | 16.8k | if (isVoidReturn(ValTypes)) { |
464 | 4.00k | return LLContext.getVoidTy(); |
465 | 4.00k | } |
466 | 12.8k | if (ValTypes.size() == 1) { |
467 | 11.9k | return toLLVMType(LLContext, ValTypes.front()); |
468 | 11.9k | } |
469 | 868 | std::vector<LLVM::Type> Result; |
470 | 868 | Result.reserve(ValTypes.size()); |
471 | 2.49k | for (const auto &Type : ValTypes) { |
472 | 2.49k | Result.push_back(toLLVMType(LLContext, Type)); |
473 | 2.49k | } |
474 | 868 | return LLVM::Type::getStructType(Result); |
475 | 12.8k | } |
476 | | |
477 | | static LLVM::Type toLLVMType(LLVM::Context LLContext, LLVM::Type ExecCtxPtrTy, |
478 | 16.8k | const AST::FunctionType &FuncType) noexcept { |
479 | 16.8k | auto ArgsTy = |
480 | 16.8k | toLLVMArgsType(LLContext, ExecCtxPtrTy, FuncType.getParamTypes()); |
481 | 16.8k | auto RetTy = toLLVMRetsType(LLContext, FuncType.getReturnTypes()); |
482 | 16.8k | return LLVM::Type::getFunctionType(RetTy, ArgsTy); |
483 | 16.8k | } |
484 | | |
485 | | static LLVM::Value toLLVMConstantZero(LLVM::Context LLContext, |
486 | 2.43M | const ValType &ValType) noexcept { |
487 | 2.43M | switch (ValType.getCode()) { |
488 | 259k | case TypeCode::I32: |
489 | 259k | return LLVM::Value::getConstNull(LLContext.getInt32Ty()); |
490 | 197k | case TypeCode::I64: |
491 | 197k | return LLVM::Value::getConstNull(LLContext.getInt64Ty()); |
492 | 0 | case TypeCode::Ref: |
493 | 17.0k | case TypeCode::RefNull: |
494 | 1.69M | case TypeCode::V128: |
495 | 1.69M | return LLVM::Value::getConstNull( |
496 | 1.69M | LLVM::Type::getVectorType(LLContext.getInt64Ty(), 2)); |
497 | 268k | case TypeCode::F32: |
498 | 268k | return LLVM::Value::getConstNull(LLContext.getFloatTy()); |
499 | 18.0k | case TypeCode::F64: |
500 | 18.0k | return LLVM::Value::getConstNull(LLContext.getDoubleTy()); |
501 | 0 | default: |
502 | 0 | assumingUnreachable(); |
503 | 2.43M | } |
504 | 2.43M | } |
505 | | |
506 | | class FunctionCompiler { |
507 | | struct Control; |
508 | | |
509 | | public: |
510 | | FunctionCompiler(LLVM::Compiler::CompileContext &Context, |
511 | | LLVM::FunctionCallee F, Span<const ValType> Locals, |
512 | | bool Interruptible, bool InstructionCounting, |
513 | | bool GasMeasuring) noexcept |
514 | 11.7k | : Context(Context), LLContext(Context.LLContext), |
515 | 11.7k | Interruptible(Interruptible), F(F), Builder(LLContext) { |
516 | 11.7k | if (F.Fn) { |
517 | 11.7k | Builder.positionAtEnd(LLVM::BasicBlock::create(LLContext, F.Fn, "entry")); |
518 | 11.7k | ExecCtx = Builder.createLoad(Context.ExecCtxTy, F.Fn.getFirstParam()); |
519 | | |
520 | 11.7k | if (InstructionCounting) { |
521 | 0 | LocalInstrCount = Builder.createAlloca(Context.Int64Ty); |
522 | 0 | Builder.createStore(LLContext.getInt64(0), LocalInstrCount); |
523 | 0 | } |
524 | | |
525 | 11.7k | if (GasMeasuring) { |
526 | 0 | LocalGas = Builder.createAlloca(Context.Int64Ty); |
527 | 0 | Builder.createStore(LLContext.getInt64(0), LocalGas); |
528 | 0 | } |
529 | | |
530 | 22.2k | for (LLVM::Value Arg = F.Fn.getFirstParam().getNextParam(); Arg; |
531 | 11.7k | Arg = Arg.getNextParam()) { |
532 | 10.5k | LLVM::Type Ty = Arg.getType(); |
533 | 10.5k | LLVM::Value ArgPtr = Builder.createAlloca(Ty); |
534 | 10.5k | Builder.createStore(Arg, ArgPtr); |
535 | 10.5k | Local.emplace_back(Ty, ArgPtr); |
536 | 10.5k | } |
537 | | |
538 | 2.43M | for (const auto &Type : Locals) { |
539 | 2.43M | LLVM::Type Ty = toLLVMType(LLContext, Type); |
540 | 2.43M | LLVM::Value ArgPtr = Builder.createAlloca(Ty); |
541 | 2.43M | Builder.createStore(toLLVMConstantZero(LLContext, Type), ArgPtr); |
542 | 2.43M | Local.emplace_back(Ty, ArgPtr); |
543 | 2.43M | } |
544 | 11.7k | } |
545 | 11.7k | } |
546 | | |
547 | 26.0k | LLVM::BasicBlock getTrapBB(ErrCode::Value Error) noexcept { |
548 | 26.0k | if (auto Iter = TrapBB.find(Error); Iter != TrapBB.end()) { |
549 | 23.1k | return Iter->second; |
550 | 23.1k | } |
551 | 2.93k | auto BB = LLVM::BasicBlock::create(LLContext, F.Fn, "trap"); |
552 | 2.93k | TrapBB.emplace(Error, BB); |
553 | 2.93k | return BB; |
554 | 26.0k | } |
555 | | |
556 | | void |
557 | | compile(const AST::CodeSegment &Code, |
558 | 11.7k | std::pair<std::vector<ValType>, std::vector<ValType>> Type) noexcept { |
559 | 11.7k | auto RetBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ret"); |
560 | 11.7k | Type.first.clear(); |
561 | 11.7k | enterBlock(RetBB, {}, {}, {}, std::move(Type)); |
562 | 11.7k | compile(Code.getExpr().getInstrs()); |
563 | 11.7k | assuming(ControlStack.empty()); |
564 | 11.7k | compileReturn(); |
565 | | |
566 | 11.7k | for (auto &[Error, BB] : TrapBB) { |
567 | 2.93k | Builder.positionAtEnd(BB); |
568 | 2.93k | updateInstrCount(); |
569 | 2.93k | updateGasAtTrap(); |
570 | 2.93k | auto CallTrap = Builder.createCall( |
571 | 2.93k | Context.Trap, {LLContext.getInt32(static_cast<uint32_t>(Error))}); |
572 | 2.93k | CallTrap.addCallSiteAttribute(Context.NoReturn); |
573 | 2.93k | Builder.createUnreachable(); |
574 | 2.93k | } |
575 | 11.7k | } |
576 | | |
577 | 11.7k | void compile(AST::InstrView Instrs) noexcept { |
578 | 1.35M | auto Dispatch = [this](const AST::Instruction &Instr) -> void { |
579 | 1.35M | switch (Instr.getOpCode()) { |
580 | | // Control instructions (for blocks) |
581 | 3.93k | case OpCode::Block: { |
582 | 3.93k | auto Block = LLVM::BasicBlock::create(LLContext, F.Fn, "block"); |
583 | 3.93k | auto EndBlock = LLVM::BasicBlock::create(LLContext, F.Fn, "block.end"); |
584 | 3.93k | Builder.createBr(Block); |
585 | | |
586 | 3.93k | Builder.positionAtEnd(Block); |
587 | 3.93k | auto Type = Context.resolveBlockType(Instr.getBlockType()); |
588 | 3.93k | const auto Arity = Type.first.size(); |
589 | 3.93k | std::vector<LLVM::Value> Args(Arity); |
590 | 3.93k | if (isUnreachable()) { |
591 | 682 | for (size_t I = 0; I < Arity; ++I) { |
592 | 228 | auto Ty = toLLVMType(LLContext, Type.first[I]); |
593 | 228 | Args[I] = LLVM::Value::getUndef(Ty); |
594 | 228 | } |
595 | 3.48k | } else { |
596 | 3.91k | for (size_t I = 0; I < Arity; ++I) { |
597 | 436 | const size_t J = Arity - 1 - I; |
598 | 436 | Args[J] = stackPop(); |
599 | 436 | } |
600 | 3.48k | } |
601 | 3.93k | enterBlock(EndBlock, {}, {}, std::move(Args), std::move(Type)); |
602 | 3.93k | checkStop(); |
603 | 3.93k | updateGas(); |
604 | 3.93k | return; |
605 | 0 | } |
606 | 1.52k | case OpCode::Loop: { |
607 | 1.52k | auto Curr = Builder.getInsertBlock(); |
608 | 1.52k | auto Loop = LLVM::BasicBlock::create(LLContext, F.Fn, "loop"); |
609 | 1.52k | auto EndLoop = LLVM::BasicBlock::create(LLContext, F.Fn, "loop.end"); |
610 | 1.52k | Builder.createBr(Loop); |
611 | | |
612 | 1.52k | Builder.positionAtEnd(Loop); |
613 | 1.52k | auto Type = Context.resolveBlockType(Instr.getBlockType()); |
614 | 1.52k | const auto Arity = Type.first.size(); |
615 | 1.52k | std::vector<LLVM::Value> Args(Arity); |
616 | 1.52k | if (isUnreachable()) { |
617 | 648 | for (size_t I = 0; I < Arity; ++I) { |
618 | 266 | auto Ty = toLLVMType(LLContext, Type.first[I]); |
619 | 266 | auto Value = LLVM::Value::getUndef(Ty); |
620 | 266 | auto PHINode = Builder.createPHI(Ty); |
621 | 266 | PHINode.addIncoming(Value, Curr); |
622 | 266 | Args[I] = PHINode; |
623 | 266 | } |
624 | 1.14k | } else { |
625 | 1.64k | for (size_t I = 0; I < Arity; ++I) { |
626 | 502 | const size_t J = Arity - 1 - I; |
627 | 502 | auto Value = stackPop(); |
628 | 502 | auto PHINode = Builder.createPHI(Value.getType()); |
629 | 502 | PHINode.addIncoming(Value, Curr); |
630 | 502 | Args[J] = PHINode; |
631 | 502 | } |
632 | 1.14k | } |
633 | 1.52k | enterBlock(Loop, EndLoop, {}, std::move(Args), std::move(Type)); |
634 | 1.52k | checkStop(); |
635 | 1.52k | updateGas(); |
636 | 1.52k | return; |
637 | 0 | } |
638 | 2.54k | case OpCode::If: { |
639 | 2.54k | auto Then = LLVM::BasicBlock::create(LLContext, F.Fn, "then"); |
640 | 2.54k | auto Else = LLVM::BasicBlock::create(LLContext, F.Fn, "else"); |
641 | 2.54k | auto EndIf = LLVM::BasicBlock::create(LLContext, F.Fn, "if.end"); |
642 | 2.54k | LLVM::Value Cond; |
643 | 2.54k | if (isUnreachable()) { |
644 | 495 | Cond = LLVM::Value::getUndef(LLContext.getInt1Ty()); |
645 | 2.05k | } else { |
646 | 2.05k | Cond = Builder.createICmpNE(stackPop(), LLContext.getInt32(0)); |
647 | 2.05k | } |
648 | 2.54k | Builder.createCondBr(Cond, Then, Else); |
649 | | |
650 | 2.54k | Builder.positionAtEnd(Then); |
651 | 2.54k | auto Type = Context.resolveBlockType(Instr.getBlockType()); |
652 | 2.54k | const auto Arity = Type.first.size(); |
653 | 2.54k | std::vector<LLVM::Value> Args(Arity); |
654 | 2.54k | if (isUnreachable()) { |
655 | 996 | for (size_t I = 0; I < Arity; ++I) { |
656 | 501 | auto Ty = toLLVMType(LLContext, Type.first[I]); |
657 | 501 | Args[I] = LLVM::Value::getUndef(Ty); |
658 | 501 | } |
659 | 2.05k | } else { |
660 | 2.88k | for (size_t I = 0; I < Arity; ++I) { |
661 | 831 | const size_t J = Arity - 1 - I; |
662 | 831 | Args[J] = stackPop(); |
663 | 831 | } |
664 | 2.05k | } |
665 | 2.54k | enterBlock(EndIf, {}, Else, std::move(Args), std::move(Type)); |
666 | 2.54k | return; |
667 | 0 | } |
668 | 19.7k | case OpCode::End: { |
669 | 19.7k | auto Entry = leaveBlock(); |
670 | 19.7k | if (Entry.ElseBlock) { |
671 | 1.09k | auto Block = Builder.getInsertBlock(); |
672 | 1.09k | Builder.positionAtEnd(Entry.ElseBlock); |
673 | 1.09k | enterBlock(Block, {}, {}, std::move(Entry.Args), |
674 | 1.09k | std::move(Entry.Type), std::move(Entry.ReturnPHI)); |
675 | 1.09k | Entry = leaveBlock(); |
676 | 1.09k | } |
677 | 19.7k | buildPHI(Entry.Type.second, Entry.ReturnPHI); |
678 | 19.7k | return; |
679 | 0 | } |
680 | 1.45k | case OpCode::Else: { |
681 | 1.45k | auto Entry = leaveBlock(); |
682 | 1.45k | Builder.positionAtEnd(Entry.ElseBlock); |
683 | 1.45k | enterBlock(Entry.JumpBlock, {}, {}, std::move(Entry.Args), |
684 | 1.45k | std::move(Entry.Type), std::move(Entry.ReturnPHI)); |
685 | 1.45k | return; |
686 | 0 | } |
687 | 1.32M | default: |
688 | 1.32M | break; |
689 | 1.35M | } |
690 | | |
691 | 1.32M | if (isUnreachable()) { |
692 | 417k | return; |
693 | 417k | } |
694 | | |
695 | 911k | switch (Instr.getOpCode()) { |
696 | | // Control instructions |
697 | 2.91k | case OpCode::Unreachable: |
698 | 2.91k | Builder.createBr(getTrapBB(ErrCode::Value::Unreachable)); |
699 | 2.91k | setUnreachable(); |
700 | 2.91k | Builder.positionAtEnd( |
701 | 2.91k | LLVM::BasicBlock::create(LLContext, F.Fn, "unreachable.end")); |
702 | 2.91k | break; |
703 | 36.4k | case OpCode::Nop: |
704 | 36.4k | break; |
705 | | // LEGACY-EH: remove the `Try` cases after deprecating legacy EH. |
706 | | // case OpCode::Try: |
707 | | // case OpCode::Throw: |
708 | | // case OpCode::Throw_ref: |
709 | 962 | case OpCode::Br: { |
710 | 962 | const auto Label = Instr.getJump().TargetIndex; |
711 | 962 | setLableJumpPHI(Label); |
712 | 962 | Builder.createBr(getLabel(Label)); |
713 | 962 | setUnreachable(); |
714 | 962 | Builder.positionAtEnd( |
715 | 962 | LLVM::BasicBlock::create(LLContext, F.Fn, "br.end")); |
716 | 962 | break; |
717 | 0 | } |
718 | 389 | case OpCode::Br_if: { |
719 | 389 | const auto Label = Instr.getJump().TargetIndex; |
720 | 389 | auto Cond = Builder.createICmpNE(stackPop(), LLContext.getInt32(0)); |
721 | 389 | setLableJumpPHI(Label); |
722 | 389 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "br_if.end"); |
723 | 389 | Builder.createCondBr(Cond, getLabel(Label), Next); |
724 | 389 | Builder.positionAtEnd(Next); |
725 | 389 | break; |
726 | 0 | } |
727 | 998 | case OpCode::Br_table: { |
728 | 998 | auto LabelTable = Instr.getLabelList(); |
729 | 998 | assuming(LabelTable.size() <= std::numeric_limits<uint32_t>::max()); |
730 | 998 | const auto LabelTableSize = |
731 | 998 | static_cast<uint32_t>(LabelTable.size() - 1); |
732 | 998 | auto Value = stackPop(); |
733 | 998 | setLableJumpPHI(LabelTable[LabelTableSize].TargetIndex); |
734 | 998 | auto Switch = Builder.createSwitch( |
735 | 998 | Value, getLabel(LabelTable[LabelTableSize].TargetIndex), |
736 | 998 | LabelTableSize); |
737 | 20.5k | for (uint32_t I = 0; I < LabelTableSize; ++I) { |
738 | 19.5k | setLableJumpPHI(LabelTable[I].TargetIndex); |
739 | 19.5k | Switch.addCase(LLContext.getInt32(I), |
740 | 19.5k | getLabel(LabelTable[I].TargetIndex)); |
741 | 19.5k | } |
742 | 998 | setUnreachable(); |
743 | 998 | Builder.positionAtEnd( |
744 | 998 | LLVM::BasicBlock::create(LLContext, F.Fn, "br_table.end")); |
745 | 998 | break; |
746 | 998 | } |
747 | 0 | case OpCode::Br_on_null: { |
748 | 0 | const auto Label = Instr.getJump().TargetIndex; |
749 | 0 | auto Value = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
750 | 0 | auto Cond = Builder.createICmpEQ( |
751 | 0 | Builder.createExtractElement(Value, LLContext.getInt64(1)), |
752 | 0 | LLContext.getInt64(0)); |
753 | 0 | setLableJumpPHI(Label); |
754 | 0 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "br_on_null.end"); |
755 | 0 | Builder.createCondBr(Cond, getLabel(Label), Next); |
756 | 0 | Builder.positionAtEnd(Next); |
757 | 0 | stackPush(Value); |
758 | 0 | break; |
759 | 998 | } |
760 | 0 | case OpCode::Br_on_non_null: { |
761 | 0 | const auto Label = Instr.getJump().TargetIndex; |
762 | 0 | auto Cond = Builder.createICmpNE( |
763 | 0 | Builder.createExtractElement( |
764 | 0 | Builder.createBitCast(Stack.back(), Context.Int64x2Ty), |
765 | 0 | LLContext.getInt64(1)), |
766 | 0 | LLContext.getInt64(0)); |
767 | 0 | setLableJumpPHI(Label); |
768 | 0 | auto Next = |
769 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "br_on_non_null.end"); |
770 | 0 | Builder.createCondBr(Cond, getLabel(Label), Next); |
771 | 0 | Builder.positionAtEnd(Next); |
772 | 0 | stackPop(); |
773 | 0 | break; |
774 | 998 | } |
775 | 0 | case OpCode::Br_on_cast: |
776 | 0 | case OpCode::Br_on_cast_fail: { |
777 | 0 | auto Ref = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
778 | 0 | const auto Label = Instr.getBrCast().Jump.TargetIndex; |
779 | 0 | std::array<uint8_t, 16> Buf = {0}; |
780 | 0 | std::copy_n(Instr.getBrCast().RType2.getRawData().cbegin(), 8, |
781 | 0 | Buf.begin()); |
782 | 0 | auto VType = Builder.createExtractElement( |
783 | 0 | Builder.createBitCast(LLVM::Value::getConstVector8(LLContext, Buf), |
784 | 0 | Context.Int64x2Ty), |
785 | 0 | LLContext.getInt64(0)); |
786 | 0 | auto IsRefTest = Builder.createCall( |
787 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefTest, |
788 | 0 | LLVM::Type::getFunctionType( |
789 | 0 | Context.Int32Ty, |
790 | 0 | {Context.Int64x2Ty, Context.Int64Ty}, |
791 | 0 | false)), |
792 | 0 | {Ref, VType}); |
793 | 0 | auto Cond = |
794 | 0 | (Instr.getOpCode() == OpCode::Br_on_cast) |
795 | 0 | ? Builder.createICmpNE(IsRefTest, LLContext.getInt32(0)) |
796 | 0 | : Builder.createICmpEQ(IsRefTest, LLContext.getInt32(0)); |
797 | 0 | setLableJumpPHI(Label); |
798 | 0 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "br_on_cast.end"); |
799 | 0 | Builder.createCondBr(Cond, getLabel(Label), Next); |
800 | 0 | Builder.positionAtEnd(Next); |
801 | 0 | break; |
802 | 0 | } |
803 | 748 | case OpCode::Return: |
804 | 748 | compileReturn(); |
805 | 748 | setUnreachable(); |
806 | 748 | Builder.positionAtEnd( |
807 | 748 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret.end")); |
808 | 748 | break; |
809 | 3.00k | case OpCode::Call: |
810 | 3.00k | updateInstrCount(); |
811 | 3.00k | updateGas(); |
812 | 3.00k | compileCallOp(Instr.getTargetIndex()); |
813 | 3.00k | break; |
814 | 661 | case OpCode::Call_indirect: |
815 | 661 | updateInstrCount(); |
816 | 661 | updateGas(); |
817 | 661 | compileIndirectCallOp(Instr.getSourceIndex(), Instr.getTargetIndex()); |
818 | 661 | break; |
819 | 0 | case OpCode::Return_call: |
820 | 0 | updateInstrCount(); |
821 | 0 | updateGas(); |
822 | 0 | compileReturnCallOp(Instr.getTargetIndex()); |
823 | 0 | setUnreachable(); |
824 | 0 | Builder.positionAtEnd( |
825 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret_call.end")); |
826 | 0 | break; |
827 | 0 | case OpCode::Return_call_indirect: |
828 | 0 | updateInstrCount(); |
829 | 0 | updateGas(); |
830 | 0 | compileReturnIndirectCallOp(Instr.getSourceIndex(), |
831 | 0 | Instr.getTargetIndex()); |
832 | 0 | setUnreachable(); |
833 | 0 | Builder.positionAtEnd( |
834 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret_call_indir.end")); |
835 | 0 | break; |
836 | 0 | case OpCode::Call_ref: |
837 | 0 | updateInstrCount(); |
838 | 0 | updateGas(); |
839 | 0 | compileCallRefOp(Instr.getTargetIndex()); |
840 | 0 | break; |
841 | 0 | case OpCode::Return_call_ref: |
842 | 0 | updateInstrCount(); |
843 | 0 | updateGas(); |
844 | 0 | compileReturnCallRefOp(Instr.getTargetIndex()); |
845 | 0 | setUnreachable(); |
846 | 0 | Builder.positionAtEnd( |
847 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "ret_call_ref.end")); |
848 | 0 | break; |
849 | | // LEGACY-EH: remove the `Catch` cases after deprecating legacy EH. |
850 | | // case OpCode::Catch: |
851 | | // case OpCode::Catch_all: |
852 | | // case OpCode::Try_table: |
853 | | |
854 | | // Reference Instructions |
855 | 825 | case OpCode::Ref__null: { |
856 | 825 | std::array<uint8_t, 16> Buf = {0}; |
857 | | // For null references, the dynamic type down scaling is needed. |
858 | 825 | ValType VType; |
859 | 825 | if (Instr.getValType().isAbsHeapType()) { |
860 | 825 | switch (Instr.getValType().getHeapTypeCode()) { |
861 | 0 | case TypeCode::NullFuncRef: |
862 | 359 | case TypeCode::FuncRef: |
863 | 359 | VType = TypeCode::NullFuncRef; |
864 | 359 | break; |
865 | 0 | case TypeCode::NullExternRef: |
866 | 466 | case TypeCode::ExternRef: |
867 | 466 | VType = TypeCode::NullExternRef; |
868 | 466 | break; |
869 | 0 | case TypeCode::NullRef: |
870 | 0 | case TypeCode::AnyRef: |
871 | 0 | case TypeCode::EqRef: |
872 | 0 | case TypeCode::I31Ref: |
873 | 0 | case TypeCode::StructRef: |
874 | 0 | case TypeCode::ArrayRef: |
875 | 0 | VType = TypeCode::NullRef; |
876 | 0 | break; |
877 | 0 | default: |
878 | 0 | assumingUnreachable(); |
879 | 825 | } |
880 | 825 | } else { |
881 | 0 | assuming(Instr.getValType().getTypeIndex() < |
882 | 0 | Context.CompositeTypes.size()); |
883 | 0 | const auto *CompType = |
884 | 0 | Context.CompositeTypes[Instr.getValType().getTypeIndex()]; |
885 | 0 | assuming(CompType != nullptr); |
886 | 0 | if (CompType->isFunc()) { |
887 | 0 | VType = TypeCode::NullFuncRef; |
888 | 0 | } else { |
889 | 0 | VType = TypeCode::NullRef; |
890 | 0 | } |
891 | 0 | } |
892 | 825 | std::copy_n(VType.getRawData().cbegin(), 8, Buf.begin()); |
893 | 825 | stackPush(Builder.createBitCast( |
894 | 825 | LLVM::Value::getConstVector8(LLContext, Buf), Context.Int64x2Ty)); |
895 | 825 | break; |
896 | 825 | } |
897 | 423 | case OpCode::Ref__is_null: |
898 | 423 | stackPush(Builder.createZExt( |
899 | 423 | Builder.createICmpEQ( |
900 | 423 | Builder.createExtractElement( |
901 | 423 | Builder.createBitCast(stackPop(), Context.Int64x2Ty), |
902 | 423 | LLContext.getInt64(1)), |
903 | 423 | LLContext.getInt64(0)), |
904 | 423 | Context.Int32Ty)); |
905 | 423 | break; |
906 | 27 | case OpCode::Ref__func: |
907 | 27 | stackPush(Builder.createCall( |
908 | 27 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefFunc, |
909 | 27 | LLVM::Type::getFunctionType(Context.Int64x2Ty, |
910 | 27 | {Context.Int32Ty}, |
911 | 27 | false)), |
912 | 27 | {LLContext.getInt32(Instr.getTargetIndex())})); |
913 | 27 | break; |
914 | 0 | case OpCode::Ref__eq: { |
915 | 0 | LLVM::Value RHS = stackPop(); |
916 | 0 | LLVM::Value LHS = stackPop(); |
917 | 0 | stackPush(Builder.createZExt( |
918 | 0 | Builder.createICmpEQ( |
919 | 0 | Builder.createExtractElement(LHS, LLContext.getInt64(1)), |
920 | 0 | Builder.createExtractElement(RHS, LLContext.getInt64(1))), |
921 | 0 | Context.Int32Ty)); |
922 | 0 | break; |
923 | 825 | } |
924 | 0 | case OpCode::Ref__as_non_null: { |
925 | 0 | auto Next = |
926 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "ref_as_non_null.ok"); |
927 | 0 | Stack.back() = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
928 | 0 | auto IsNotNull = Builder.createLikely(Builder.createICmpNE( |
929 | 0 | Builder.createExtractElement(Stack.back(), LLContext.getInt64(1)), |
930 | 0 | LLContext.getInt64(0))); |
931 | 0 | Builder.createCondBr(IsNotNull, Next, |
932 | 0 | getTrapBB(ErrCode::Value::CastNullToNonNull)); |
933 | 0 | Builder.positionAtEnd(Next); |
934 | 0 | break; |
935 | 825 | } |
936 | | |
937 | | // Reference Instructions (GC proposal) |
938 | 0 | case OpCode::Struct__new: |
939 | 0 | case OpCode::Struct__new_default: { |
940 | 0 | LLVM::Value Args = LLVM::Value::getConstPointerNull(Context.Int8PtrTy); |
941 | 0 | assuming(Instr.getTargetIndex() < Context.CompositeTypes.size()); |
942 | 0 | const auto *CompType = Context.CompositeTypes[Instr.getTargetIndex()]; |
943 | 0 | assuming(CompType != nullptr && !CompType->isFunc()); |
944 | 0 | auto ArgSize = CompType->getFieldTypes().size(); |
945 | 0 | if (Instr.getOpCode() == OpCode::Struct__new) { |
946 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize, nullptr); |
947 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
948 | 0 | ArgsVec[ArgSize - I - 1] = stackPop(); |
949 | 0 | } |
950 | 0 | Args = Builder.createArray(ArgSize, kValSize); |
951 | 0 | Builder.createArrayPtrStore(ArgsVec, Args, Context.Int8Ty, kValSize); |
952 | 0 | } else { |
953 | 0 | ArgSize = 0; |
954 | 0 | } |
955 | 0 | stackPush(Builder.createCall( |
956 | 0 | Context.getIntrinsic( |
957 | 0 | Builder, Executable::Intrinsics::kStructNew, |
958 | 0 | LLVM::Type::getFunctionType( |
959 | 0 | Context.Int64x2Ty, |
960 | 0 | {Context.Int32Ty, Context.Int8PtrTy, Context.Int32Ty}, |
961 | 0 | false)), |
962 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), Args, |
963 | 0 | LLContext.getInt32(static_cast<uint32_t>(ArgSize))})); |
964 | 0 | break; |
965 | 0 | } |
966 | 0 | case OpCode::Struct__get: |
967 | 0 | case OpCode::Struct__get_u: |
968 | 0 | case OpCode::Struct__get_s: { |
969 | 0 | assuming(static_cast<size_t>(Instr.getTargetIndex()) < |
970 | 0 | Context.CompositeTypes.size()); |
971 | 0 | const auto *CompType = Context.CompositeTypes[Instr.getTargetIndex()]; |
972 | 0 | assuming(CompType != nullptr && !CompType->isFunc()); |
973 | 0 | assuming(static_cast<size_t>(Instr.getSourceIndex()) < |
974 | 0 | CompType->getFieldTypes().size()); |
975 | 0 | const auto &StorageType = |
976 | 0 | CompType->getFieldTypes()[Instr.getSourceIndex()].getStorageType(); |
977 | 0 | auto Ref = stackPop(); |
978 | 0 | auto IsSigned = (Instr.getOpCode() == OpCode::Struct__get_s) |
979 | 0 | ? LLContext.getInt8(1) |
980 | 0 | : LLContext.getInt8(0); |
981 | 0 | LLVM::Value Ret = Builder.createAlloca(Context.Int64x2Ty); |
982 | 0 | Builder.createCall( |
983 | 0 | Context.getIntrinsic( |
984 | 0 | Builder, Executable::Intrinsics::kStructGet, |
985 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
986 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
987 | 0 | Context.Int32Ty, Context.Int8Ty, |
988 | 0 | Context.Int8PtrTy}, |
989 | 0 | false)), |
990 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), |
991 | 0 | LLContext.getInt32(Instr.getSourceIndex()), IsSigned, Ret}); |
992 | |
|
993 | 0 | switch (StorageType.getCode()) { |
994 | 0 | case TypeCode::I8: |
995 | 0 | case TypeCode::I16: |
996 | 0 | case TypeCode::I32: { |
997 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int32Ty, Ret, |
998 | 0 | Context.Int64x2Ty)); |
999 | 0 | break; |
1000 | 0 | } |
1001 | 0 | case TypeCode::I64: { |
1002 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64Ty, Ret, |
1003 | 0 | Context.Int64x2Ty)); |
1004 | 0 | break; |
1005 | 0 | } |
1006 | 0 | case TypeCode::F32: { |
1007 | 0 | stackPush(Builder.createValuePtrLoad(Context.FloatTy, Ret, |
1008 | 0 | Context.Int64x2Ty)); |
1009 | 0 | break; |
1010 | 0 | } |
1011 | 0 | case TypeCode::F64: { |
1012 | 0 | stackPush(Builder.createValuePtrLoad(Context.DoubleTy, Ret, |
1013 | 0 | Context.Int64x2Ty)); |
1014 | 0 | break; |
1015 | 0 | } |
1016 | 0 | case TypeCode::V128: |
1017 | 0 | case TypeCode::Ref: |
1018 | 0 | case TypeCode::RefNull: { |
1019 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64x2Ty, Ret, |
1020 | 0 | Context.Int64x2Ty)); |
1021 | 0 | break; |
1022 | 0 | } |
1023 | 0 | default: |
1024 | 0 | assumingUnreachable(); |
1025 | 0 | } |
1026 | 0 | break; |
1027 | 0 | } |
1028 | 0 | case OpCode::Struct__set: { |
1029 | 0 | auto Val = stackPop(); |
1030 | 0 | auto Ref = stackPop(); |
1031 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1032 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1033 | 0 | Builder.createCall( |
1034 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kStructSet, |
1035 | 0 | LLVM::Type::getFunctionType( |
1036 | 0 | Context.VoidTy, |
1037 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1038 | 0 | Context.Int32Ty, Context.Int8PtrTy}, |
1039 | 0 | false)), |
1040 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), |
1041 | 0 | LLContext.getInt32(Instr.getSourceIndex()), Arg}); |
1042 | 0 | break; |
1043 | 0 | } |
1044 | 0 | case OpCode::Array__new: { |
1045 | 0 | auto Length = stackPop(); |
1046 | 0 | auto Val = stackPop(); |
1047 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1048 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1049 | 0 | stackPush(Builder.createCall( |
1050 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArrayNew, |
1051 | 0 | LLVM::Type::getFunctionType( |
1052 | 0 | Context.Int64x2Ty, |
1053 | 0 | {Context.Int32Ty, Context.Int32Ty, |
1054 | 0 | Context.Int8PtrTy, Context.Int32Ty}, |
1055 | 0 | false)), |
1056 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), Length, Arg, |
1057 | 0 | LLContext.getInt32(1)})); |
1058 | 0 | break; |
1059 | 0 | } |
1060 | 0 | case OpCode::Array__new_default: { |
1061 | 0 | auto Length = stackPop(); |
1062 | 0 | LLVM::Value Arg = LLVM::Value::getConstPointerNull(Context.Int8PtrTy); |
1063 | 0 | stackPush(Builder.createCall( |
1064 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArrayNew, |
1065 | 0 | LLVM::Type::getFunctionType( |
1066 | 0 | Context.Int64x2Ty, |
1067 | 0 | {Context.Int32Ty, Context.Int32Ty, |
1068 | 0 | Context.Int8PtrTy, Context.Int32Ty}, |
1069 | 0 | false)), |
1070 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), Length, Arg, |
1071 | 0 | LLContext.getInt32(0)})); |
1072 | 0 | break; |
1073 | 0 | } |
1074 | 0 | case OpCode::Array__new_fixed: { |
1075 | 0 | const auto ArgSize = Instr.getSourceIndex(); |
1076 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize, nullptr); |
1077 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
1078 | 0 | ArgsVec[ArgSize - I - 1] = stackPop(); |
1079 | 0 | } |
1080 | 0 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
1081 | 0 | Builder.createArrayPtrStore(ArgsVec, Args, Context.Int8Ty, kValSize); |
1082 | 0 | stackPush(Builder.createCall( |
1083 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArrayNew, |
1084 | 0 | LLVM::Type::getFunctionType( |
1085 | 0 | Context.Int64x2Ty, |
1086 | 0 | {Context.Int32Ty, Context.Int32Ty, |
1087 | 0 | Context.Int8PtrTy, Context.Int32Ty}, |
1088 | 0 | false)), |
1089 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), |
1090 | 0 | LLContext.getInt32(ArgSize), Args, LLContext.getInt32(ArgSize)})); |
1091 | 0 | break; |
1092 | 0 | } |
1093 | 0 | case OpCode::Array__new_data: |
1094 | 0 | case OpCode::Array__new_elem: { |
1095 | 0 | auto Length = stackPop(); |
1096 | 0 | auto Start = stackPop(); |
1097 | 0 | stackPush(Builder.createCall( |
1098 | 0 | Context.getIntrinsic( |
1099 | 0 | Builder, |
1100 | 0 | ((Instr.getOpCode() == OpCode::Array__new_data) |
1101 | 0 | ? Executable::Intrinsics::kArrayNewData |
1102 | 0 | : Executable::Intrinsics::kArrayNewElem), |
1103 | 0 | LLVM::Type::getFunctionType(Context.Int64x2Ty, |
1104 | 0 | {Context.Int32Ty, Context.Int32Ty, |
1105 | 0 | Context.Int32Ty, Context.Int32Ty}, |
1106 | 0 | false)), |
1107 | 0 | {LLContext.getInt32(Instr.getTargetIndex()), |
1108 | 0 | LLContext.getInt32(Instr.getSourceIndex()), Start, Length})); |
1109 | 0 | break; |
1110 | 0 | } |
1111 | 0 | case OpCode::Array__get: |
1112 | 0 | case OpCode::Array__get_u: |
1113 | 0 | case OpCode::Array__get_s: { |
1114 | 0 | assuming(static_cast<size_t>(Instr.getTargetIndex()) < |
1115 | 0 | Context.CompositeTypes.size()); |
1116 | 0 | const auto *CompType = Context.CompositeTypes[Instr.getTargetIndex()]; |
1117 | 0 | assuming(CompType != nullptr && !CompType->isFunc()); |
1118 | 0 | assuming(static_cast<size_t>(1) == CompType->getFieldTypes().size()); |
1119 | 0 | const auto &StorageType = CompType->getFieldTypes()[0].getStorageType(); |
1120 | 0 | auto Idx = stackPop(); |
1121 | 0 | auto Ref = stackPop(); |
1122 | 0 | auto IsSigned = (Instr.getOpCode() == OpCode::Array__get_s) |
1123 | 0 | ? LLContext.getInt8(1) |
1124 | 0 | : LLContext.getInt8(0); |
1125 | 0 | LLVM::Value Ret = Builder.createAlloca(Context.Int64x2Ty); |
1126 | 0 | Builder.createCall( |
1127 | 0 | Context.getIntrinsic( |
1128 | 0 | Builder, Executable::Intrinsics::kArrayGet, |
1129 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1130 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1131 | 0 | Context.Int32Ty, Context.Int8Ty, |
1132 | 0 | Context.Int8PtrTy}, |
1133 | 0 | false)), |
1134 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), Idx, IsSigned, |
1135 | 0 | Ret}); |
1136 | |
|
1137 | 0 | switch (StorageType.getCode()) { |
1138 | 0 | case TypeCode::I8: |
1139 | 0 | case TypeCode::I16: |
1140 | 0 | case TypeCode::I32: { |
1141 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int32Ty, Ret, |
1142 | 0 | Context.Int64x2Ty)); |
1143 | 0 | break; |
1144 | 0 | } |
1145 | 0 | case TypeCode::I64: { |
1146 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64Ty, Ret, |
1147 | 0 | Context.Int64x2Ty)); |
1148 | 0 | break; |
1149 | 0 | } |
1150 | 0 | case TypeCode::F32: { |
1151 | 0 | stackPush(Builder.createValuePtrLoad(Context.FloatTy, Ret, |
1152 | 0 | Context.Int64x2Ty)); |
1153 | 0 | break; |
1154 | 0 | } |
1155 | 0 | case TypeCode::F64: { |
1156 | 0 | stackPush(Builder.createValuePtrLoad(Context.DoubleTy, Ret, |
1157 | 0 | Context.Int64x2Ty)); |
1158 | 0 | break; |
1159 | 0 | } |
1160 | 0 | case TypeCode::V128: |
1161 | 0 | case TypeCode::Ref: |
1162 | 0 | case TypeCode::RefNull: { |
1163 | 0 | stackPush(Builder.createValuePtrLoad(Context.Int64x2Ty, Ret, |
1164 | 0 | Context.Int64x2Ty)); |
1165 | 0 | break; |
1166 | 0 | } |
1167 | 0 | default: |
1168 | 0 | assumingUnreachable(); |
1169 | 0 | } |
1170 | 0 | break; |
1171 | 0 | } |
1172 | 0 | case OpCode::Array__set: { |
1173 | 0 | auto Val = stackPop(); |
1174 | 0 | auto Idx = stackPop(); |
1175 | 0 | auto Ref = stackPop(); |
1176 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1177 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1178 | 0 | Builder.createCall( |
1179 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kArraySet, |
1180 | 0 | LLVM::Type::getFunctionType( |
1181 | 0 | Context.VoidTy, |
1182 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1183 | 0 | Context.Int32Ty, Context.Int8PtrTy}, |
1184 | 0 | false)), |
1185 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), Idx, Arg}); |
1186 | 0 | break; |
1187 | 0 | } |
1188 | 0 | case OpCode::Array__len: { |
1189 | 0 | auto Ref = stackPop(); |
1190 | 0 | stackPush(Builder.createCall( |
1191 | 0 | Context.getIntrinsic( |
1192 | 0 | Builder, Executable::Intrinsics::kArrayLen, |
1193 | 0 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1194 | 0 | {Context.Int64x2Ty}, false)), |
1195 | 0 | {Ref})); |
1196 | 0 | break; |
1197 | 0 | } |
1198 | 0 | case OpCode::Array__fill: { |
1199 | 0 | auto Cnt = stackPop(); |
1200 | 0 | auto Val = stackPop(); |
1201 | 0 | auto Off = stackPop(); |
1202 | 0 | auto Ref = stackPop(); |
1203 | 0 | LLVM::Value Arg = Builder.createAlloca(Context.Int64x2Ty); |
1204 | 0 | Builder.createValuePtrStore(Val, Arg, Context.Int64x2Ty); |
1205 | 0 | Builder.createCall( |
1206 | 0 | Context.getIntrinsic( |
1207 | 0 | Builder, Executable::Intrinsics::kArrayFill, |
1208 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1209 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1210 | 0 | Context.Int32Ty, Context.Int32Ty, |
1211 | 0 | Context.Int8PtrTy}, |
1212 | 0 | false)), |
1213 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), Off, Cnt, Arg}); |
1214 | 0 | break; |
1215 | 0 | } |
1216 | 0 | case OpCode::Array__copy: { |
1217 | 0 | auto Cnt = stackPop(); |
1218 | 0 | auto SrcOff = stackPop(); |
1219 | 0 | auto SrcRef = stackPop(); |
1220 | 0 | auto DstOff = stackPop(); |
1221 | 0 | auto DstRef = stackPop(); |
1222 | 0 | Builder.createCall( |
1223 | 0 | Context.getIntrinsic( |
1224 | 0 | Builder, Executable::Intrinsics::kArrayCopy, |
1225 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1226 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1227 | 0 | Context.Int32Ty, Context.Int64x2Ty, |
1228 | 0 | Context.Int32Ty, Context.Int32Ty, |
1229 | 0 | Context.Int32Ty}, |
1230 | 0 | false)), |
1231 | 0 | {DstRef, LLContext.getInt32(Instr.getTargetIndex()), DstOff, SrcRef, |
1232 | 0 | LLContext.getInt32(Instr.getSourceIndex()), SrcOff, Cnt}); |
1233 | 0 | break; |
1234 | 0 | } |
1235 | 0 | case OpCode::Array__init_data: |
1236 | 0 | case OpCode::Array__init_elem: { |
1237 | 0 | auto Cnt = stackPop(); |
1238 | 0 | auto SrcOff = stackPop(); |
1239 | 0 | auto DstOff = stackPop(); |
1240 | 0 | auto Ref = stackPop(); |
1241 | 0 | Builder.createCall( |
1242 | 0 | Context.getIntrinsic( |
1243 | 0 | Builder, |
1244 | 0 | ((Instr.getOpCode() == OpCode::Array__init_data) |
1245 | 0 | ? Executable::Intrinsics::kArrayInitData |
1246 | 0 | : Executable::Intrinsics::kArrayInitElem), |
1247 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
1248 | 0 | {Context.Int64x2Ty, Context.Int32Ty, |
1249 | 0 | Context.Int32Ty, Context.Int32Ty, |
1250 | 0 | Context.Int32Ty, Context.Int32Ty}, |
1251 | 0 | false)), |
1252 | 0 | {Ref, LLContext.getInt32(Instr.getTargetIndex()), |
1253 | 0 | LLContext.getInt32(Instr.getSourceIndex()), DstOff, SrcOff, Cnt}); |
1254 | 0 | break; |
1255 | 0 | } |
1256 | 0 | case OpCode::Ref__test: |
1257 | 0 | case OpCode::Ref__test_null: { |
1258 | 0 | auto Ref = stackPop(); |
1259 | 0 | std::array<uint8_t, 16> Buf = {0}; |
1260 | 0 | std::copy_n(Instr.getValType().getRawData().cbegin(), 8, Buf.begin()); |
1261 | 0 | auto VType = Builder.createExtractElement( |
1262 | 0 | Builder.createBitCast(LLVM::Value::getConstVector8(LLContext, Buf), |
1263 | 0 | Context.Int64x2Ty), |
1264 | 0 | LLContext.getInt64(0)); |
1265 | 0 | stackPush(Builder.createCall( |
1266 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefTest, |
1267 | 0 | LLVM::Type::getFunctionType( |
1268 | 0 | Context.Int32Ty, |
1269 | 0 | {Context.Int64x2Ty, Context.Int64Ty}, |
1270 | 0 | false)), |
1271 | 0 | {Ref, VType})); |
1272 | 0 | break; |
1273 | 0 | } |
1274 | 0 | case OpCode::Ref__cast: |
1275 | 0 | case OpCode::Ref__cast_null: { |
1276 | 0 | auto Ref = stackPop(); |
1277 | 0 | std::array<uint8_t, 16> Buf = {0}; |
1278 | 0 | std::copy_n(Instr.getValType().getRawData().cbegin(), 8, Buf.begin()); |
1279 | 0 | auto VType = Builder.createExtractElement( |
1280 | 0 | Builder.createBitCast(LLVM::Value::getConstVector8(LLContext, Buf), |
1281 | 0 | Context.Int64x2Ty), |
1282 | 0 | LLContext.getInt64(0)); |
1283 | 0 | stackPush(Builder.createCall( |
1284 | 0 | Context.getIntrinsic(Builder, Executable::Intrinsics::kRefCast, |
1285 | 0 | LLVM::Type::getFunctionType( |
1286 | 0 | Context.Int64x2Ty, |
1287 | 0 | {Context.Int64x2Ty, Context.Int64Ty}, |
1288 | 0 | false)), |
1289 | 0 | {Ref, VType})); |
1290 | 0 | break; |
1291 | 0 | } |
1292 | 0 | case OpCode::Any__convert_extern: { |
1293 | 0 | std::array<uint8_t, 16> RawRef = {0}; |
1294 | 0 | auto Ref = stackPop(); |
1295 | 0 | auto PtrVal = Builder.createExtractElement(Ref, LLContext.getInt64(1)); |
1296 | 0 | auto IsNullBB = |
1297 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "any_conv_extern.null"); |
1298 | 0 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, |
1299 | 0 | "any_conv_extern.not_null"); |
1300 | 0 | auto IsExtrefBB = LLVM::BasicBlock::create(LLContext, F.Fn, |
1301 | 0 | "any_conv_extern.is_extref"); |
1302 | 0 | auto EndBB = |
1303 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "any_conv_extern.end"); |
1304 | 0 | auto CondIsNull = Builder.createICmpEQ(PtrVal, LLContext.getInt64(0)); |
1305 | 0 | Builder.createCondBr(CondIsNull, IsNullBB, NotNullBB); |
1306 | |
|
1307 | 0 | Builder.positionAtEnd(IsNullBB); |
1308 | 0 | auto VT = ValType(TypeCode::RefNull, TypeCode::NullRef); |
1309 | 0 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1310 | 0 | auto Ret1 = Builder.createBitCast( |
1311 | 0 | LLVM::Value::getConstVector8(LLContext, RawRef), Context.Int64x2Ty); |
1312 | 0 | Builder.createBr(EndBB); |
1313 | |
|
1314 | 0 | Builder.positionAtEnd(NotNullBB); |
1315 | 0 | auto Ret2 = Builder.createBitCast( |
1316 | 0 | Builder.createInsertElement( |
1317 | 0 | Builder.createBitCast(Ref, Context.Int8x16Ty), |
1318 | 0 | LLContext.getInt8(0), LLContext.getInt64(1)), |
1319 | 0 | Context.Int64x2Ty); |
1320 | 0 | auto HType = Builder.createExtractElement( |
1321 | 0 | Builder.createBitCast(Ret2, Context.Int8x16Ty), |
1322 | 0 | LLContext.getInt64(3)); |
1323 | 0 | auto CondIsExtref = Builder.createOr( |
1324 | 0 | Builder.createICmpEQ(HType, LLContext.getInt8(static_cast<uint8_t>( |
1325 | 0 | TypeCode::ExternRef))), |
1326 | 0 | Builder.createICmpEQ(HType, LLContext.getInt8(static_cast<uint8_t>( |
1327 | 0 | TypeCode::NullExternRef)))); |
1328 | 0 | Builder.createCondBr(CondIsExtref, IsExtrefBB, EndBB); |
1329 | |
|
1330 | 0 | Builder.positionAtEnd(IsExtrefBB); |
1331 | 0 | VT = ValType(TypeCode::Ref, TypeCode::AnyRef); |
1332 | 0 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1333 | 0 | auto Ret3 = Builder.createInsertElement( |
1334 | 0 | Builder.createBitCast( |
1335 | 0 | LLVM::Value::getConstVector8(LLContext, RawRef), |
1336 | 0 | Context.Int64x2Ty), |
1337 | 0 | PtrVal, LLContext.getInt64(1)); |
1338 | 0 | Builder.createBr(EndBB); |
1339 | |
|
1340 | 0 | Builder.positionAtEnd(EndBB); |
1341 | 0 | auto Ret = Builder.createPHI(Context.Int64x2Ty); |
1342 | 0 | Ret.addIncoming(Ret1, IsNullBB); |
1343 | 0 | Ret.addIncoming(Ret2, NotNullBB); |
1344 | 0 | Ret.addIncoming(Ret3, IsExtrefBB); |
1345 | 0 | stackPush(Ret); |
1346 | 0 | break; |
1347 | 0 | } |
1348 | 0 | case OpCode::Extern__convert_any: { |
1349 | 0 | std::array<uint8_t, 16> RawRef = {0}; |
1350 | 0 | auto Ref = stackPop(); |
1351 | 0 | auto IsNullBB = |
1352 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "extern_conv_any.null"); |
1353 | 0 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, |
1354 | 0 | "extern_conv_any.not_null"); |
1355 | 0 | auto EndBB = |
1356 | 0 | LLVM::BasicBlock::create(LLContext, F.Fn, "extern_conv_any.end"); |
1357 | 0 | auto CondIsNull = Builder.createICmpEQ( |
1358 | 0 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
1359 | 0 | LLContext.getInt64(0)); |
1360 | 0 | Builder.createCondBr(CondIsNull, IsNullBB, NotNullBB); |
1361 | |
|
1362 | 0 | Builder.positionAtEnd(IsNullBB); |
1363 | 0 | auto VT = ValType(TypeCode::RefNull, TypeCode::NullExternRef); |
1364 | 0 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1365 | 0 | auto Ret1 = Builder.createBitCast( |
1366 | 0 | LLVM::Value::getConstVector8(LLContext, RawRef), Context.Int64x2Ty); |
1367 | 0 | Builder.createBr(EndBB); |
1368 | |
|
1369 | 0 | Builder.positionAtEnd(NotNullBB); |
1370 | 0 | auto Ret2 = Builder.createBitCast( |
1371 | 0 | Builder.createInsertElement( |
1372 | 0 | Builder.createBitCast(Ref, Context.Int8x16Ty), |
1373 | 0 | LLContext.getInt8(1), LLContext.getInt64(1)), |
1374 | 0 | Context.Int64x2Ty); |
1375 | 0 | Builder.createBr(EndBB); |
1376 | |
|
1377 | 0 | Builder.positionAtEnd(EndBB); |
1378 | 0 | auto Ret = Builder.createPHI(Context.Int64x2Ty); |
1379 | 0 | Ret.addIncoming(Ret1, IsNullBB); |
1380 | 0 | Ret.addIncoming(Ret2, NotNullBB); |
1381 | 0 | stackPush(Ret); |
1382 | 0 | break; |
1383 | 0 | } |
1384 | 0 | case OpCode::Ref__i31: { |
1385 | 0 | std::array<uint8_t, 16> RawRef = {0}; |
1386 | 0 | auto VT = ValType(TypeCode::Ref, TypeCode::I31Ref); |
1387 | 0 | std::copy_n(VT.getRawData().cbegin(), 8, RawRef.begin()); |
1388 | 0 | auto Ref = Builder.createBitCast( |
1389 | 0 | LLVM::Value::getConstVector8(LLContext, RawRef), Context.Int64x2Ty); |
1390 | 0 | auto Val = Builder.createZExt( |
1391 | 0 | Builder.createOr( |
1392 | 0 | Builder.createAnd(stackPop(), LLContext.getInt32(0x7FFFFFFFU)), |
1393 | 0 | LLContext.getInt32(0x80000000U)), |
1394 | 0 | Context.Int64Ty); |
1395 | 0 | stackPush(Builder.createInsertElement(Ref, Val, LLContext.getInt64(1))); |
1396 | 0 | break; |
1397 | 0 | } |
1398 | 0 | case OpCode::I31__get_s: { |
1399 | 0 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "i31.get.ok"); |
1400 | 0 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
1401 | 0 | auto Val = Builder.createTrunc( |
1402 | 0 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
1403 | 0 | Context.Int32Ty); |
1404 | 0 | auto IsNotNull = Builder.createLikely(Builder.createICmpNE( |
1405 | 0 | Builder.createAnd(Val, LLContext.getInt32(0x80000000U)), |
1406 | 0 | LLContext.getInt32(0))); |
1407 | 0 | Builder.createCondBr(IsNotNull, Next, |
1408 | 0 | getTrapBB(ErrCode::Value::AccessNullI31)); |
1409 | 0 | Builder.positionAtEnd(Next); |
1410 | 0 | Val = Builder.createAnd(Val, LLContext.getInt32(0x7FFFFFFFU)); |
1411 | 0 | stackPush(Builder.createOr( |
1412 | 0 | Val, Builder.createShl( |
1413 | 0 | Builder.createAnd(Val, LLContext.getInt32(0x40000000U)), |
1414 | 0 | LLContext.getInt32(1)))); |
1415 | 0 | break; |
1416 | 0 | } |
1417 | 0 | case OpCode::I31__get_u: { |
1418 | 0 | auto Next = LLVM::BasicBlock::create(LLContext, F.Fn, "i31.get.ok"); |
1419 | 0 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
1420 | 0 | auto Val = Builder.createTrunc( |
1421 | 0 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
1422 | 0 | Context.Int32Ty); |
1423 | 0 | auto IsNotNull = Builder.createLikely(Builder.createICmpNE( |
1424 | 0 | Builder.createAnd(Val, LLContext.getInt32(0x80000000U)), |
1425 | 0 | LLContext.getInt32(0))); |
1426 | 0 | Builder.createCondBr(IsNotNull, Next, |
1427 | 0 | getTrapBB(ErrCode::Value::AccessNullI31)); |
1428 | 0 | Builder.positionAtEnd(Next); |
1429 | 0 | stackPush(Builder.createAnd(Val, LLContext.getInt32(0x7FFFFFFFU))); |
1430 | 0 | break; |
1431 | 0 | } |
1432 | | |
1433 | | // Parametric Instructions |
1434 | 3.49k | case OpCode::Drop: |
1435 | 3.49k | stackPop(); |
1436 | 3.49k | break; |
1437 | 622 | case OpCode::Select: |
1438 | 1.04k | case OpCode::Select_t: { |
1439 | 1.04k | auto Cond = Builder.createICmpNE(stackPop(), LLContext.getInt32(0)); |
1440 | 1.04k | auto False = stackPop(); |
1441 | 1.04k | auto True = stackPop(); |
1442 | 1.04k | stackPush(Builder.createSelect(Cond, True, False)); |
1443 | 1.04k | break; |
1444 | 622 | } |
1445 | | |
1446 | | // Variable Instructions |
1447 | 12.8k | case OpCode::Local__get: { |
1448 | 12.8k | const auto &L = Local[Instr.getTargetIndex()]; |
1449 | 12.8k | stackPush(Builder.createLoad(L.first, L.second)); |
1450 | 12.8k | break; |
1451 | 622 | } |
1452 | 4.60k | case OpCode::Local__set: |
1453 | 4.60k | Builder.createStore(stackPop(), Local[Instr.getTargetIndex()].second); |
1454 | 4.60k | break; |
1455 | 771 | case OpCode::Local__tee: |
1456 | 771 | Builder.createStore(Stack.back(), Local[Instr.getTargetIndex()].second); |
1457 | 771 | break; |
1458 | 309 | case OpCode::Global__get: { |
1459 | 309 | const auto G = |
1460 | 309 | Context.getGlobal(Builder, ExecCtx, Instr.getTargetIndex()); |
1461 | 309 | stackPush(Builder.createLoad(G.first, G.second)); |
1462 | 309 | break; |
1463 | 622 | } |
1464 | 51 | case OpCode::Global__set: |
1465 | 51 | Builder.createStore( |
1466 | 51 | stackPop(), |
1467 | 51 | Context.getGlobal(Builder, ExecCtx, Instr.getTargetIndex()).second); |
1468 | 51 | break; |
1469 | | |
1470 | | // Table Instructions |
1471 | 31 | case OpCode::Table__get: { |
1472 | 31 | auto Idx = stackPop(); |
1473 | 31 | stackPush(Builder.createCall( |
1474 | 31 | Context.getIntrinsic( |
1475 | 31 | Builder, Executable::Intrinsics::kTableGet, |
1476 | 31 | LLVM::Type::getFunctionType(Context.Int64x2Ty, |
1477 | 31 | {Context.Int32Ty, Context.Int32Ty}, |
1478 | 31 | false)), |
1479 | 31 | {LLContext.getInt32(Instr.getTargetIndex()), Idx})); |
1480 | 31 | break; |
1481 | 622 | } |
1482 | 25 | case OpCode::Table__set: { |
1483 | 25 | auto Ref = stackPop(); |
1484 | 25 | auto Idx = stackPop(); |
1485 | 25 | Builder.createCall( |
1486 | 25 | Context.getIntrinsic( |
1487 | 25 | Builder, Executable::Intrinsics::kTableSet, |
1488 | 25 | LLVM::Type::getFunctionType( |
1489 | 25 | Context.Int64Ty, |
1490 | 25 | {Context.Int32Ty, Context.Int32Ty, Context.Int64x2Ty}, |
1491 | 25 | false)), |
1492 | 25 | {LLContext.getInt32(Instr.getTargetIndex()), Idx, Ref}); |
1493 | 25 | break; |
1494 | 622 | } |
1495 | 24 | case OpCode::Table__init: { |
1496 | 24 | auto Len = stackPop(); |
1497 | 24 | auto Src = stackPop(); |
1498 | 24 | auto Dst = stackPop(); |
1499 | 24 | Builder.createCall( |
1500 | 24 | Context.getIntrinsic( |
1501 | 24 | Builder, Executable::Intrinsics::kTableInit, |
1502 | 24 | LLVM::Type::getFunctionType(Context.VoidTy, |
1503 | 24 | {Context.Int32Ty, Context.Int32Ty, |
1504 | 24 | Context.Int32Ty, Context.Int32Ty, |
1505 | 24 | Context.Int32Ty}, |
1506 | 24 | false)), |
1507 | 24 | {LLContext.getInt32(Instr.getTargetIndex()), |
1508 | 24 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1509 | 24 | break; |
1510 | 622 | } |
1511 | 28 | case OpCode::Elem__drop: { |
1512 | 28 | Builder.createCall( |
1513 | 28 | Context.getIntrinsic(Builder, Executable::Intrinsics::kElemDrop, |
1514 | 28 | LLVM::Type::getFunctionType( |
1515 | 28 | Context.VoidTy, {Context.Int32Ty}, false)), |
1516 | 28 | {LLContext.getInt32(Instr.getTargetIndex())}); |
1517 | 28 | break; |
1518 | 622 | } |
1519 | 15 | case OpCode::Table__copy: { |
1520 | 15 | auto Len = stackPop(); |
1521 | 15 | auto Src = stackPop(); |
1522 | 15 | auto Dst = stackPop(); |
1523 | 15 | Builder.createCall( |
1524 | 15 | Context.getIntrinsic( |
1525 | 15 | Builder, Executable::Intrinsics::kTableCopy, |
1526 | 15 | LLVM::Type::getFunctionType(Context.VoidTy, |
1527 | 15 | {Context.Int32Ty, Context.Int32Ty, |
1528 | 15 | Context.Int32Ty, Context.Int32Ty, |
1529 | 15 | Context.Int32Ty}, |
1530 | 15 | false)), |
1531 | 15 | {LLContext.getInt32(Instr.getTargetIndex()), |
1532 | 15 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1533 | 15 | break; |
1534 | 622 | } |
1535 | 21 | case OpCode::Table__grow: { |
1536 | 21 | auto NewSize = stackPop(); |
1537 | 21 | auto Val = stackPop(); |
1538 | 21 | stackPush(Builder.createCall( |
1539 | 21 | Context.getIntrinsic( |
1540 | 21 | Builder, Executable::Intrinsics::kTableGrow, |
1541 | 21 | LLVM::Type::getFunctionType( |
1542 | 21 | Context.Int32Ty, |
1543 | 21 | {Context.Int32Ty, Context.Int64x2Ty, Context.Int32Ty}, |
1544 | 21 | false)), |
1545 | 21 | {LLContext.getInt32(Instr.getTargetIndex()), Val, NewSize})); |
1546 | 21 | break; |
1547 | 622 | } |
1548 | 25 | case OpCode::Table__size: { |
1549 | 25 | stackPush(Builder.createCall( |
1550 | 25 | Context.getIntrinsic(Builder, Executable::Intrinsics::kTableSize, |
1551 | 25 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1552 | 25 | {Context.Int32Ty}, |
1553 | 25 | false)), |
1554 | 25 | {LLContext.getInt32(Instr.getTargetIndex())})); |
1555 | 25 | break; |
1556 | 622 | } |
1557 | 3 | case OpCode::Table__fill: { |
1558 | 3 | auto Len = stackPop(); |
1559 | 3 | auto Val = stackPop(); |
1560 | 3 | auto Off = stackPop(); |
1561 | 3 | Builder.createCall( |
1562 | 3 | Context.getIntrinsic(Builder, Executable::Intrinsics::kTableFill, |
1563 | 3 | LLVM::Type::getFunctionType( |
1564 | 3 | Context.Int32Ty, |
1565 | 3 | {Context.Int32Ty, Context.Int32Ty, |
1566 | 3 | Context.Int64x2Ty, Context.Int32Ty}, |
1567 | 3 | false)), |
1568 | 3 | {LLContext.getInt32(Instr.getTargetIndex()), Off, Val, Len}); |
1569 | 3 | break; |
1570 | 622 | } |
1571 | | |
1572 | | // Memory Instructions |
1573 | 1.13k | case OpCode::I32__load: |
1574 | 1.13k | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1575 | 1.13k | Instr.getMemoryAlign(), Context.Int32Ty); |
1576 | 1.13k | break; |
1577 | 3.60k | case OpCode::I64__load: |
1578 | 3.60k | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1579 | 3.60k | Instr.getMemoryAlign(), Context.Int64Ty); |
1580 | 3.60k | break; |
1581 | 122 | case OpCode::F32__load: |
1582 | 122 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1583 | 122 | Instr.getMemoryAlign(), Context.FloatTy); |
1584 | 122 | break; |
1585 | 257 | case OpCode::F64__load: |
1586 | 257 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1587 | 257 | Instr.getMemoryAlign(), Context.DoubleTy); |
1588 | 257 | break; |
1589 | 584 | case OpCode::I32__load8_s: |
1590 | 584 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1591 | 584 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int32Ty, |
1592 | 584 | true); |
1593 | 584 | break; |
1594 | 189 | case OpCode::I32__load8_u: |
1595 | 189 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1596 | 189 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int32Ty, |
1597 | 189 | false); |
1598 | 189 | break; |
1599 | 301 | case OpCode::I32__load16_s: |
1600 | 301 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1601 | 301 | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int32Ty, |
1602 | 301 | true); |
1603 | 301 | break; |
1604 | 1.51k | case OpCode::I32__load16_u: |
1605 | 1.51k | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1606 | 1.51k | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int32Ty, |
1607 | 1.51k | false); |
1608 | 1.51k | break; |
1609 | 677 | case OpCode::I64__load8_s: |
1610 | 677 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1611 | 677 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int64Ty, |
1612 | 677 | true); |
1613 | 677 | break; |
1614 | 419 | case OpCode::I64__load8_u: |
1615 | 419 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1616 | 419 | Instr.getMemoryAlign(), Context.Int8Ty, Context.Int64Ty, |
1617 | 419 | false); |
1618 | 419 | break; |
1619 | 420 | case OpCode::I64__load16_s: |
1620 | 420 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1621 | 420 | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int64Ty, |
1622 | 420 | true); |
1623 | 420 | break; |
1624 | 508 | case OpCode::I64__load16_u: |
1625 | 508 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1626 | 508 | Instr.getMemoryAlign(), Context.Int16Ty, Context.Int64Ty, |
1627 | 508 | false); |
1628 | 508 | break; |
1629 | 385 | case OpCode::I64__load32_s: |
1630 | 385 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1631 | 385 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int64Ty, |
1632 | 385 | true); |
1633 | 385 | break; |
1634 | 467 | case OpCode::I64__load32_u: |
1635 | 467 | compileLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1636 | 467 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int64Ty, |
1637 | 467 | false); |
1638 | 467 | break; |
1639 | 420 | case OpCode::I32__store: |
1640 | 420 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1641 | 420 | Instr.getMemoryAlign(), Context.Int32Ty); |
1642 | 420 | break; |
1643 | 1.39k | case OpCode::I64__store: |
1644 | 1.39k | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1645 | 1.39k | Instr.getMemoryAlign(), Context.Int64Ty); |
1646 | 1.39k | break; |
1647 | 75 | case OpCode::F32__store: |
1648 | 75 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1649 | 75 | Instr.getMemoryAlign(), Context.FloatTy); |
1650 | 75 | break; |
1651 | 71 | case OpCode::F64__store: |
1652 | 71 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1653 | 71 | Instr.getMemoryAlign(), Context.DoubleTy); |
1654 | 71 | break; |
1655 | 294 | case OpCode::I32__store8: |
1656 | 336 | case OpCode::I64__store8: |
1657 | 336 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1658 | 336 | Instr.getMemoryAlign(), Context.Int8Ty, true); |
1659 | 336 | break; |
1660 | 234 | case OpCode::I32__store16: |
1661 | 312 | case OpCode::I64__store16: |
1662 | 312 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1663 | 312 | Instr.getMemoryAlign(), Context.Int16Ty, true); |
1664 | 312 | break; |
1665 | 72 | case OpCode::I64__store32: |
1666 | 72 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
1667 | 72 | Instr.getMemoryAlign(), Context.Int32Ty, true); |
1668 | 72 | break; |
1669 | 532 | case OpCode::Memory__size: |
1670 | 532 | stackPush(Builder.createCall( |
1671 | 532 | Context.getIntrinsic(Builder, Executable::Intrinsics::kMemSize, |
1672 | 532 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1673 | 532 | {Context.Int32Ty}, |
1674 | 532 | false)), |
1675 | 532 | {LLContext.getInt32(Instr.getTargetIndex())})); |
1676 | 532 | break; |
1677 | 526 | case OpCode::Memory__grow: { |
1678 | 526 | auto Diff = stackPop(); |
1679 | 526 | stackPush(Builder.createCall( |
1680 | 526 | Context.getIntrinsic( |
1681 | 526 | Builder, Executable::Intrinsics::kMemGrow, |
1682 | 526 | LLVM::Type::getFunctionType(Context.Int32Ty, |
1683 | 526 | {Context.Int32Ty, Context.Int32Ty}, |
1684 | 526 | false)), |
1685 | 526 | {LLContext.getInt32(Instr.getTargetIndex()), Diff})); |
1686 | 526 | break; |
1687 | 234 | } |
1688 | 23 | case OpCode::Memory__init: { |
1689 | 23 | auto Len = stackPop(); |
1690 | 23 | auto Src = stackPop(); |
1691 | 23 | auto Dst = stackPop(); |
1692 | 23 | Builder.createCall( |
1693 | 23 | Context.getIntrinsic( |
1694 | 23 | Builder, Executable::Intrinsics::kMemInit, |
1695 | 23 | LLVM::Type::getFunctionType(Context.VoidTy, |
1696 | 23 | {Context.Int32Ty, Context.Int32Ty, |
1697 | 23 | Context.Int32Ty, Context.Int32Ty, |
1698 | 23 | Context.Int32Ty}, |
1699 | 23 | false)), |
1700 | 23 | {LLContext.getInt32(Instr.getTargetIndex()), |
1701 | 23 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1702 | 23 | break; |
1703 | 234 | } |
1704 | 22 | case OpCode::Data__drop: { |
1705 | 22 | Builder.createCall( |
1706 | 22 | Context.getIntrinsic(Builder, Executable::Intrinsics::kDataDrop, |
1707 | 22 | LLVM::Type::getFunctionType( |
1708 | 22 | Context.VoidTy, {Context.Int32Ty}, false)), |
1709 | 22 | {LLContext.getInt32(Instr.getTargetIndex())}); |
1710 | 22 | break; |
1711 | 234 | } |
1712 | 367 | case OpCode::Memory__copy: { |
1713 | 367 | auto Len = stackPop(); |
1714 | 367 | auto Src = stackPop(); |
1715 | 367 | auto Dst = stackPop(); |
1716 | 367 | Builder.createCall( |
1717 | 367 | Context.getIntrinsic( |
1718 | 367 | Builder, Executable::Intrinsics::kMemCopy, |
1719 | 367 | LLVM::Type::getFunctionType(Context.VoidTy, |
1720 | 367 | {Context.Int32Ty, Context.Int32Ty, |
1721 | 367 | Context.Int32Ty, Context.Int32Ty, |
1722 | 367 | Context.Int32Ty}, |
1723 | 367 | false)), |
1724 | 367 | {LLContext.getInt32(Instr.getTargetIndex()), |
1725 | 367 | LLContext.getInt32(Instr.getSourceIndex()), Dst, Src, Len}); |
1726 | 367 | break; |
1727 | 234 | } |
1728 | 739 | case OpCode::Memory__fill: { |
1729 | 739 | auto Len = stackPop(); |
1730 | 739 | auto Val = Builder.createTrunc(stackPop(), Context.Int8Ty); |
1731 | 739 | auto Off = stackPop(); |
1732 | 739 | Builder.createCall( |
1733 | 739 | Context.getIntrinsic( |
1734 | 739 | Builder, Executable::Intrinsics::kMemFill, |
1735 | 739 | LLVM::Type::getFunctionType(Context.VoidTy, |
1736 | 739 | {Context.Int32Ty, Context.Int32Ty, |
1737 | 739 | Context.Int8Ty, Context.Int32Ty}, |
1738 | 739 | false)), |
1739 | 739 | {LLContext.getInt32(Instr.getTargetIndex()), Off, Val, Len}); |
1740 | 739 | break; |
1741 | 234 | } |
1742 | | |
1743 | | // Const Numeric Instructions |
1744 | 490k | case OpCode::I32__const: |
1745 | 490k | stackPush(LLContext.getInt32(Instr.getNum().get<uint32_t>())); |
1746 | 490k | break; |
1747 | 78.0k | case OpCode::I64__const: |
1748 | 78.0k | stackPush(LLContext.getInt64(Instr.getNum().get<uint64_t>())); |
1749 | 78.0k | break; |
1750 | 13.4k | case OpCode::F32__const: |
1751 | 13.4k | stackPush(LLContext.getFloat(Instr.getNum().get<float>())); |
1752 | 13.4k | break; |
1753 | 6.35k | case OpCode::F64__const: |
1754 | 6.35k | stackPush(LLContext.getDouble(Instr.getNum().get<double>())); |
1755 | 6.35k | break; |
1756 | | |
1757 | | // Unary Numeric Instructions |
1758 | 6.51k | case OpCode::I32__eqz: |
1759 | 6.51k | stackPush(Builder.createZExt( |
1760 | 6.51k | Builder.createICmpEQ(stackPop(), LLContext.getInt32(0)), |
1761 | 6.51k | Context.Int32Ty)); |
1762 | 6.51k | break; |
1763 | 1.05k | case OpCode::I64__eqz: |
1764 | 1.05k | stackPush(Builder.createZExt( |
1765 | 1.05k | Builder.createICmpEQ(stackPop(), LLContext.getInt64(0)), |
1766 | 1.05k | Context.Int32Ty)); |
1767 | 1.05k | break; |
1768 | 1.82k | case OpCode::I32__clz: |
1769 | 1.82k | assuming(LLVM::Core::Ctlz != LLVM::Core::NotIntrinsic); |
1770 | 1.82k | stackPush(Builder.createIntrinsic(LLVM::Core::Ctlz, {Context.Int32Ty}, |
1771 | 1.82k | {stackPop(), LLContext.getFalse()})); |
1772 | 1.82k | break; |
1773 | 307 | case OpCode::I64__clz: |
1774 | 307 | assuming(LLVM::Core::Ctlz != LLVM::Core::NotIntrinsic); |
1775 | 307 | stackPush(Builder.createIntrinsic(LLVM::Core::Ctlz, {Context.Int64Ty}, |
1776 | 307 | {stackPop(), LLContext.getFalse()})); |
1777 | 307 | break; |
1778 | 1.96k | case OpCode::I32__ctz: |
1779 | 1.96k | assuming(LLVM::Core::Cttz != LLVM::Core::NotIntrinsic); |
1780 | 1.96k | stackPush(Builder.createIntrinsic(LLVM::Core::Cttz, {Context.Int32Ty}, |
1781 | 1.96k | {stackPop(), LLContext.getFalse()})); |
1782 | 1.96k | break; |
1783 | 449 | case OpCode::I64__ctz: |
1784 | 449 | assuming(LLVM::Core::Cttz != LLVM::Core::NotIntrinsic); |
1785 | 449 | stackPush(Builder.createIntrinsic(LLVM::Core::Cttz, {Context.Int64Ty}, |
1786 | 449 | {stackPop(), LLContext.getFalse()})); |
1787 | 449 | break; |
1788 | 11.6k | case OpCode::I32__popcnt: |
1789 | 13.5k | case OpCode::I64__popcnt: |
1790 | 13.5k | assuming(LLVM::Core::Ctpop != LLVM::Core::NotIntrinsic); |
1791 | 13.5k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Ctpop, stackPop())); |
1792 | 13.5k | break; |
1793 | 826 | case OpCode::F32__abs: |
1794 | 1.36k | case OpCode::F64__abs: |
1795 | 1.36k | assuming(LLVM::Core::Fabs != LLVM::Core::NotIntrinsic); |
1796 | 1.36k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Fabs, stackPop())); |
1797 | 1.36k | break; |
1798 | 1.05k | case OpCode::F32__neg: |
1799 | 1.83k | case OpCode::F64__neg: |
1800 | 1.83k | stackPush(Builder.createFNeg(stackPop())); |
1801 | 1.83k | break; |
1802 | 1.73k | case OpCode::F32__ceil: |
1803 | 3.33k | case OpCode::F64__ceil: |
1804 | 3.33k | assuming(LLVM::Core::Ceil != LLVM::Core::NotIntrinsic); |
1805 | 3.33k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Ceil, stackPop())); |
1806 | 3.33k | break; |
1807 | 858 | case OpCode::F32__floor: |
1808 | 1.23k | case OpCode::F64__floor: |
1809 | 1.23k | assuming(LLVM::Core::Floor != LLVM::Core::NotIntrinsic); |
1810 | 1.23k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Floor, stackPop())); |
1811 | 1.23k | break; |
1812 | 551 | case OpCode::F32__trunc: |
1813 | 834 | case OpCode::F64__trunc: |
1814 | 834 | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
1815 | 834 | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Trunc, stackPop())); |
1816 | 834 | break; |
1817 | 820 | case OpCode::F32__nearest: |
1818 | 1.19k | case OpCode::F64__nearest: { |
1819 | 1.19k | const bool IsFloat = Instr.getOpCode() == OpCode::F32__nearest; |
1820 | 1.19k | LLVM::Value Value = stackPop(); |
1821 | | |
1822 | 1.19k | #if LLVM_VERSION_MAJOR >= 12 |
1823 | 1.19k | assuming(LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic); |
1824 | 1.19k | if (LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic) { |
1825 | 1.19k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Roundeven, Value)); |
1826 | 1.19k | break; |
1827 | 1.19k | } |
1828 | 0 | #endif |
1829 | | |
1830 | | // The VectorSize is only used when SSE4_1 or NEON is supported. |
1831 | 0 | [[maybe_unused]] const uint32_t VectorSize = IsFloat ? 4 : 2; |
1832 | 0 | #if defined(__x86_64__) |
1833 | 0 | if (Context.SupportSSE4_1) { |
1834 | 0 | auto Zero = LLContext.getInt64(0); |
1835 | 0 | auto VectorTy = |
1836 | 0 | LLVM::Type::getVectorType(Value.getType(), VectorSize); |
1837 | 0 | LLVM::Value Ret = LLVM::Value::getUndef(VectorTy); |
1838 | 0 | Ret = Builder.createInsertElement(Ret, Value, Zero); |
1839 | 0 | auto ID = IsFloat ? LLVM::Core::X86SSE41RoundSs |
1840 | 0 | : LLVM::Core::X86SSE41RoundSd; |
1841 | 0 | assuming(ID != LLVM::Core::NotIntrinsic); |
1842 | 0 | Ret = Builder.createIntrinsic(ID, {}, |
1843 | 0 | {Ret, Ret, LLContext.getInt32(8)}); |
1844 | 0 | Ret = Builder.createExtractElement(Ret, Zero); |
1845 | 0 | stackPush(Ret); |
1846 | 0 | break; |
1847 | 0 | } |
1848 | 0 | #endif |
1849 | | |
1850 | | #if defined(__aarch64__) |
1851 | | if (Context.SupportNEON && |
1852 | | LLVM::Core::AArch64NeonFRIntN != LLVM::Core::NotIntrinsic) { |
1853 | | auto Zero = LLContext.getInt64(0); |
1854 | | auto VectorTy = |
1855 | | LLVM::Type::getVectorType(Value.getType(), VectorSize); |
1856 | | LLVM::Value Ret = LLVM::Value::getUndef(VectorTy); |
1857 | | Ret = Builder.createInsertElement(Ret, Value, Zero); |
1858 | | Ret = |
1859 | | Builder.createUnaryIntrinsic(LLVM::Core::AArch64NeonFRIntN, Ret); |
1860 | | Ret = Builder.createExtractElement(Ret, Zero); |
1861 | | stackPush(Ret); |
1862 | | break; |
1863 | | } |
1864 | | #endif |
1865 | | |
1866 | | // Fallback case. |
1867 | | // If the SSE4.1 is not supported on the x86_64 platform or |
1868 | | // the NEON is not supported on the aarch64 platform, |
1869 | | // then fallback to this. |
1870 | 0 | assuming(LLVM::Core::Nearbyint != LLVM::Core::NotIntrinsic); |
1871 | 0 | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Nearbyint, Value)); |
1872 | 0 | break; |
1873 | 0 | } |
1874 | 398 | case OpCode::F32__sqrt: |
1875 | 1.60k | case OpCode::F64__sqrt: |
1876 | 1.60k | assuming(LLVM::Core::Sqrt != LLVM::Core::NotIntrinsic); |
1877 | 1.60k | stackPush(Builder.createUnaryIntrinsic(LLVM::Core::Sqrt, stackPop())); |
1878 | 1.60k | break; |
1879 | 307 | case OpCode::I32__wrap_i64: |
1880 | 307 | stackPush(Builder.createTrunc(stackPop(), Context.Int32Ty)); |
1881 | 307 | break; |
1882 | 1.10k | case OpCode::I32__trunc_f32_s: |
1883 | 1.10k | compileSignedTrunc(Context.Int32Ty); |
1884 | 1.10k | break; |
1885 | 237 | case OpCode::I32__trunc_f64_s: |
1886 | 237 | compileSignedTrunc(Context.Int32Ty); |
1887 | 237 | break; |
1888 | 153 | case OpCode::I32__trunc_f32_u: |
1889 | 153 | compileUnsignedTrunc(Context.Int32Ty); |
1890 | 153 | break; |
1891 | 1.01k | case OpCode::I32__trunc_f64_u: |
1892 | 1.01k | compileUnsignedTrunc(Context.Int32Ty); |
1893 | 1.01k | break; |
1894 | 1.76k | case OpCode::I64__extend_i32_s: |
1895 | 1.76k | stackPush(Builder.createSExt(stackPop(), Context.Int64Ty)); |
1896 | 1.76k | break; |
1897 | 341 | case OpCode::I64__extend_i32_u: |
1898 | 341 | stackPush(Builder.createZExt(stackPop(), Context.Int64Ty)); |
1899 | 341 | break; |
1900 | 45 | case OpCode::I64__trunc_f32_s: |
1901 | 45 | compileSignedTrunc(Context.Int64Ty); |
1902 | 45 | break; |
1903 | 333 | case OpCode::I64__trunc_f64_s: |
1904 | 333 | compileSignedTrunc(Context.Int64Ty); |
1905 | 333 | break; |
1906 | 797 | case OpCode::I64__trunc_f32_u: |
1907 | 797 | compileUnsignedTrunc(Context.Int64Ty); |
1908 | 797 | break; |
1909 | 1.06k | case OpCode::I64__trunc_f64_u: |
1910 | 1.06k | compileUnsignedTrunc(Context.Int64Ty); |
1911 | 1.06k | break; |
1912 | 1.50k | case OpCode::F32__convert_i32_s: |
1913 | 1.91k | case OpCode::F32__convert_i64_s: |
1914 | 1.91k | stackPush(Builder.createSIToFP(stackPop(), Context.FloatTy)); |
1915 | 1.91k | break; |
1916 | 596 | case OpCode::F32__convert_i32_u: |
1917 | 1.55k | case OpCode::F32__convert_i64_u: |
1918 | 1.55k | stackPush(Builder.createUIToFP(stackPop(), Context.FloatTy)); |
1919 | 1.55k | break; |
1920 | 1.28k | case OpCode::F64__convert_i32_s: |
1921 | 4.63k | case OpCode::F64__convert_i64_s: |
1922 | 4.63k | stackPush(Builder.createSIToFP(stackPop(), Context.DoubleTy)); |
1923 | 4.63k | break; |
1924 | 1.33k | case OpCode::F64__convert_i32_u: |
1925 | 1.51k | case OpCode::F64__convert_i64_u: |
1926 | 1.51k | stackPush(Builder.createUIToFP(stackPop(), Context.DoubleTy)); |
1927 | 1.51k | break; |
1928 | 209 | case OpCode::F32__demote_f64: |
1929 | 209 | stackPush(Builder.createFPTrunc(stackPop(), Context.FloatTy)); |
1930 | 209 | break; |
1931 | 91 | case OpCode::F64__promote_f32: |
1932 | 91 | stackPush(Builder.createFPExt(stackPop(), Context.DoubleTy)); |
1933 | 91 | break; |
1934 | 445 | case OpCode::I32__reinterpret_f32: |
1935 | 445 | stackPush(Builder.createBitCast(stackPop(), Context.Int32Ty)); |
1936 | 445 | break; |
1937 | 673 | case OpCode::I64__reinterpret_f64: |
1938 | 673 | stackPush(Builder.createBitCast(stackPop(), Context.Int64Ty)); |
1939 | 673 | break; |
1940 | 3.69k | case OpCode::F32__reinterpret_i32: |
1941 | 3.69k | stackPush(Builder.createBitCast(stackPop(), Context.FloatTy)); |
1942 | 3.69k | break; |
1943 | 1.17k | case OpCode::F64__reinterpret_i64: |
1944 | 1.17k | stackPush(Builder.createBitCast(stackPop(), Context.DoubleTy)); |
1945 | 1.17k | break; |
1946 | 889 | case OpCode::I32__extend8_s: |
1947 | 889 | stackPush(Builder.createSExt( |
1948 | 889 | Builder.createTrunc(stackPop(), Context.Int8Ty), Context.Int32Ty)); |
1949 | 889 | break; |
1950 | 2.75k | case OpCode::I32__extend16_s: |
1951 | 2.75k | stackPush(Builder.createSExt( |
1952 | 2.75k | Builder.createTrunc(stackPop(), Context.Int16Ty), Context.Int32Ty)); |
1953 | 2.75k | break; |
1954 | 347 | case OpCode::I64__extend8_s: |
1955 | 347 | stackPush(Builder.createSExt( |
1956 | 347 | Builder.createTrunc(stackPop(), Context.Int8Ty), Context.Int64Ty)); |
1957 | 347 | break; |
1958 | 603 | case OpCode::I64__extend16_s: |
1959 | 603 | stackPush(Builder.createSExt( |
1960 | 603 | Builder.createTrunc(stackPop(), Context.Int16Ty), Context.Int64Ty)); |
1961 | 603 | break; |
1962 | 584 | case OpCode::I64__extend32_s: |
1963 | 584 | stackPush(Builder.createSExt( |
1964 | 584 | Builder.createTrunc(stackPop(), Context.Int32Ty), Context.Int64Ty)); |
1965 | 584 | break; |
1966 | | |
1967 | | // Binary Numeric Instructions |
1968 | 1.32k | case OpCode::I32__eq: |
1969 | 1.52k | case OpCode::I64__eq: { |
1970 | 1.52k | LLVM::Value RHS = stackPop(); |
1971 | 1.52k | LLVM::Value LHS = stackPop(); |
1972 | 1.52k | stackPush(Builder.createZExt(Builder.createICmpEQ(LHS, RHS), |
1973 | 1.52k | Context.Int32Ty)); |
1974 | 1.52k | break; |
1975 | 1.32k | } |
1976 | 656 | case OpCode::I32__ne: |
1977 | 681 | case OpCode::I64__ne: { |
1978 | 681 | LLVM::Value RHS = stackPop(); |
1979 | 681 | LLVM::Value LHS = stackPop(); |
1980 | 681 | stackPush(Builder.createZExt(Builder.createICmpNE(LHS, RHS), |
1981 | 681 | Context.Int32Ty)); |
1982 | 681 | break; |
1983 | 656 | } |
1984 | 4.20k | case OpCode::I32__lt_s: |
1985 | 4.73k | case OpCode::I64__lt_s: { |
1986 | 4.73k | LLVM::Value RHS = stackPop(); |
1987 | 4.73k | LLVM::Value LHS = stackPop(); |
1988 | 4.73k | stackPush(Builder.createZExt(Builder.createICmpSLT(LHS, RHS), |
1989 | 4.73k | Context.Int32Ty)); |
1990 | 4.73k | break; |
1991 | 4.20k | } |
1992 | 5.13k | case OpCode::I32__lt_u: |
1993 | 5.47k | case OpCode::I64__lt_u: { |
1994 | 5.47k | LLVM::Value RHS = stackPop(); |
1995 | 5.47k | LLVM::Value LHS = stackPop(); |
1996 | 5.47k | stackPush(Builder.createZExt(Builder.createICmpULT(LHS, RHS), |
1997 | 5.47k | Context.Int32Ty)); |
1998 | 5.47k | break; |
1999 | 5.13k | } |
2000 | 1.03k | case OpCode::I32__gt_s: |
2001 | 1.40k | case OpCode::I64__gt_s: { |
2002 | 1.40k | LLVM::Value RHS = stackPop(); |
2003 | 1.40k | LLVM::Value LHS = stackPop(); |
2004 | 1.40k | stackPush(Builder.createZExt(Builder.createICmpSGT(LHS, RHS), |
2005 | 1.40k | Context.Int32Ty)); |
2006 | 1.40k | break; |
2007 | 1.03k | } |
2008 | 5.28k | case OpCode::I32__gt_u: |
2009 | 5.40k | case OpCode::I64__gt_u: { |
2010 | 5.40k | LLVM::Value RHS = stackPop(); |
2011 | 5.40k | LLVM::Value LHS = stackPop(); |
2012 | 5.40k | stackPush(Builder.createZExt(Builder.createICmpUGT(LHS, RHS), |
2013 | 5.40k | Context.Int32Ty)); |
2014 | 5.40k | break; |
2015 | 5.28k | } |
2016 | 1.85k | case OpCode::I32__le_s: |
2017 | 2.50k | case OpCode::I64__le_s: { |
2018 | 2.50k | LLVM::Value RHS = stackPop(); |
2019 | 2.50k | LLVM::Value LHS = stackPop(); |
2020 | 2.50k | stackPush(Builder.createZExt(Builder.createICmpSLE(LHS, RHS), |
2021 | 2.50k | Context.Int32Ty)); |
2022 | 2.50k | break; |
2023 | 1.85k | } |
2024 | 458 | case OpCode::I32__le_u: |
2025 | 1.52k | case OpCode::I64__le_u: { |
2026 | 1.52k | LLVM::Value RHS = stackPop(); |
2027 | 1.52k | LLVM::Value LHS = stackPop(); |
2028 | 1.52k | stackPush(Builder.createZExt(Builder.createICmpULE(LHS, RHS), |
2029 | 1.52k | Context.Int32Ty)); |
2030 | 1.52k | break; |
2031 | 458 | } |
2032 | 1.07k | case OpCode::I32__ge_s: |
2033 | 1.11k | case OpCode::I64__ge_s: { |
2034 | 1.11k | LLVM::Value RHS = stackPop(); |
2035 | 1.11k | LLVM::Value LHS = stackPop(); |
2036 | 1.11k | stackPush(Builder.createZExt(Builder.createICmpSGE(LHS, RHS), |
2037 | 1.11k | Context.Int32Ty)); |
2038 | 1.11k | break; |
2039 | 1.07k | } |
2040 | 1.51k | case OpCode::I32__ge_u: |
2041 | 2.06k | case OpCode::I64__ge_u: { |
2042 | 2.06k | LLVM::Value RHS = stackPop(); |
2043 | 2.06k | LLVM::Value LHS = stackPop(); |
2044 | 2.06k | stackPush(Builder.createZExt(Builder.createICmpUGE(LHS, RHS), |
2045 | 2.06k | Context.Int32Ty)); |
2046 | 2.06k | break; |
2047 | 1.51k | } |
2048 | 160 | case OpCode::F32__eq: |
2049 | 216 | case OpCode::F64__eq: { |
2050 | 216 | LLVM::Value RHS = stackPop(); |
2051 | 216 | LLVM::Value LHS = stackPop(); |
2052 | 216 | stackPush(Builder.createZExt(Builder.createFCmpOEQ(LHS, RHS), |
2053 | 216 | Context.Int32Ty)); |
2054 | 216 | break; |
2055 | 160 | } |
2056 | 90 | case OpCode::F32__ne: |
2057 | 117 | case OpCode::F64__ne: { |
2058 | 117 | LLVM::Value RHS = stackPop(); |
2059 | 117 | LLVM::Value LHS = stackPop(); |
2060 | 117 | stackPush(Builder.createZExt(Builder.createFCmpUNE(LHS, RHS), |
2061 | 117 | Context.Int32Ty)); |
2062 | 117 | break; |
2063 | 90 | } |
2064 | 177 | case OpCode::F32__lt: |
2065 | 303 | case OpCode::F64__lt: { |
2066 | 303 | LLVM::Value RHS = stackPop(); |
2067 | 303 | LLVM::Value LHS = stackPop(); |
2068 | 303 | stackPush(Builder.createZExt(Builder.createFCmpOLT(LHS, RHS), |
2069 | 303 | Context.Int32Ty)); |
2070 | 303 | break; |
2071 | 177 | } |
2072 | 150 | case OpCode::F32__gt: |
2073 | 207 | case OpCode::F64__gt: { |
2074 | 207 | LLVM::Value RHS = stackPop(); |
2075 | 207 | LLVM::Value LHS = stackPop(); |
2076 | 207 | stackPush(Builder.createZExt(Builder.createFCmpOGT(LHS, RHS), |
2077 | 207 | Context.Int32Ty)); |
2078 | 207 | break; |
2079 | 150 | } |
2080 | 76 | case OpCode::F32__le: |
2081 | 178 | case OpCode::F64__le: { |
2082 | 178 | LLVM::Value RHS = stackPop(); |
2083 | 178 | LLVM::Value LHS = stackPop(); |
2084 | 178 | stackPush(Builder.createZExt(Builder.createFCmpOLE(LHS, RHS), |
2085 | 178 | Context.Int32Ty)); |
2086 | 178 | break; |
2087 | 76 | } |
2088 | 234 | case OpCode::F32__ge: |
2089 | 259 | case OpCode::F64__ge: { |
2090 | 259 | LLVM::Value RHS = stackPop(); |
2091 | 259 | LLVM::Value LHS = stackPop(); |
2092 | 259 | stackPush(Builder.createZExt(Builder.createFCmpOGE(LHS, RHS), |
2093 | 259 | Context.Int32Ty)); |
2094 | 259 | break; |
2095 | 234 | } |
2096 | 771 | case OpCode::I32__add: |
2097 | 1.22k | case OpCode::I64__add: { |
2098 | 1.22k | LLVM::Value RHS = stackPop(); |
2099 | 1.22k | LLVM::Value LHS = stackPop(); |
2100 | 1.22k | stackPush(Builder.createAdd(LHS, RHS)); |
2101 | 1.22k | break; |
2102 | 771 | } |
2103 | 1.65k | case OpCode::I32__sub: |
2104 | 2.09k | case OpCode::I64__sub: { |
2105 | 2.09k | LLVM::Value RHS = stackPop(); |
2106 | 2.09k | LLVM::Value LHS = stackPop(); |
2107 | | |
2108 | 2.09k | stackPush(Builder.createSub(LHS, RHS)); |
2109 | 2.09k | break; |
2110 | 1.65k | } |
2111 | 575 | case OpCode::I32__mul: |
2112 | 958 | case OpCode::I64__mul: { |
2113 | 958 | LLVM::Value RHS = stackPop(); |
2114 | 958 | LLVM::Value LHS = stackPop(); |
2115 | 958 | stackPush(Builder.createMul(LHS, RHS)); |
2116 | 958 | break; |
2117 | 575 | } |
2118 | 1.04k | case OpCode::I32__div_s: |
2119 | 1.39k | case OpCode::I64__div_s: { |
2120 | 1.39k | LLVM::Value RHS = stackPop(); |
2121 | 1.39k | LLVM::Value LHS = stackPop(); |
2122 | 1.39k | if constexpr (kForceDivCheck) { |
2123 | 1.39k | const bool Is32 = Instr.getOpCode() == OpCode::I32__div_s; |
2124 | 1.39k | LLVM::Value IntZero = |
2125 | 1.39k | Is32 ? LLContext.getInt32(0) : LLContext.getInt64(0); |
2126 | 1.39k | LLVM::Value IntMinusOne = |
2127 | 1.39k | Is32 ? LLContext.getInt32(static_cast<uint32_t>(INT32_C(-1))) |
2128 | 1.39k | : LLContext.getInt64(static_cast<uint64_t>(INT64_C(-1))); |
2129 | 1.39k | LLVM::Value IntMin = Is32 ? LLContext.getInt32(static_cast<uint32_t>( |
2130 | 1.04k | std::numeric_limits<int32_t>::min())) |
2131 | 1.39k | : LLContext.getInt64(static_cast<uint64_t>( |
2132 | 353 | std::numeric_limits<int64_t>::min())); |
2133 | | |
2134 | 1.39k | auto NoZeroBB = |
2135 | 1.39k | LLVM::BasicBlock::create(LLContext, F.Fn, "div.nozero"); |
2136 | 1.39k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "div.ok"); |
2137 | | |
2138 | 1.39k | auto IsNotZero = |
2139 | 1.39k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2140 | 1.39k | Builder.createCondBr(IsNotZero, NoZeroBB, |
2141 | 1.39k | getTrapBB(ErrCode::Value::DivideByZero)); |
2142 | | |
2143 | 1.39k | Builder.positionAtEnd(NoZeroBB); |
2144 | 1.39k | auto NotOverflow = Builder.createLikely( |
2145 | 1.39k | Builder.createOr(Builder.createICmpNE(LHS, IntMin), |
2146 | 1.39k | Builder.createICmpNE(RHS, IntMinusOne))); |
2147 | 1.39k | Builder.createCondBr(NotOverflow, OkBB, |
2148 | 1.39k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
2149 | | |
2150 | 1.39k | Builder.positionAtEnd(OkBB); |
2151 | 1.39k | } |
2152 | 1.39k | stackPush(Builder.createSDiv(LHS, RHS)); |
2153 | 1.39k | break; |
2154 | 1.04k | } |
2155 | 3.30k | case OpCode::I32__div_u: |
2156 | 3.59k | case OpCode::I64__div_u: { |
2157 | 3.59k | LLVM::Value RHS = stackPop(); |
2158 | 3.59k | LLVM::Value LHS = stackPop(); |
2159 | 3.59k | if constexpr (kForceDivCheck) { |
2160 | 3.59k | const bool Is32 = Instr.getOpCode() == OpCode::I32__div_u; |
2161 | 3.59k | LLVM::Value IntZero = |
2162 | 3.59k | Is32 ? LLContext.getInt32(0) : LLContext.getInt64(0); |
2163 | 3.59k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "div.ok"); |
2164 | | |
2165 | 3.59k | auto IsNotZero = |
2166 | 3.59k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2167 | 3.59k | Builder.createCondBr(IsNotZero, OkBB, |
2168 | 3.59k | getTrapBB(ErrCode::Value::DivideByZero)); |
2169 | 3.59k | Builder.positionAtEnd(OkBB); |
2170 | 3.59k | } |
2171 | 3.59k | stackPush(Builder.createUDiv(LHS, RHS)); |
2172 | 3.59k | break; |
2173 | 3.30k | } |
2174 | 814 | case OpCode::I32__rem_s: |
2175 | 1.24k | case OpCode::I64__rem_s: { |
2176 | 1.24k | LLVM::Value RHS = stackPop(); |
2177 | 1.24k | LLVM::Value LHS = stackPop(); |
2178 | | // handle INT32_MIN % -1 |
2179 | 1.24k | const bool Is32 = Instr.getOpCode() == OpCode::I32__rem_s; |
2180 | 1.24k | LLVM::Value IntMinusOne = |
2181 | 1.24k | Is32 ? LLContext.getInt32(static_cast<uint32_t>(INT32_C(-1))) |
2182 | 1.24k | : LLContext.getInt64(static_cast<uint64_t>(INT64_C(-1))); |
2183 | 1.24k | LLVM::Value IntMin = Is32 ? LLContext.getInt32(static_cast<uint32_t>( |
2184 | 814 | std::numeric_limits<int32_t>::min())) |
2185 | 1.24k | : LLContext.getInt64(static_cast<uint64_t>( |
2186 | 431 | std::numeric_limits<int64_t>::min())); |
2187 | 1.24k | LLVM::Value IntZero = |
2188 | 1.24k | Is32 ? LLContext.getInt32(0) : LLContext.getInt64(0); |
2189 | | |
2190 | 1.24k | auto NoOverflowBB = |
2191 | 1.24k | LLVM::BasicBlock::create(LLContext, F.Fn, "no.overflow"); |
2192 | 1.24k | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "end.overflow"); |
2193 | | |
2194 | 1.24k | if constexpr (kForceDivCheck) { |
2195 | 1.24k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "rem.ok"); |
2196 | | |
2197 | 1.24k | auto IsNotZero = |
2198 | 1.24k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2199 | 1.24k | Builder.createCondBr(IsNotZero, OkBB, |
2200 | 1.24k | getTrapBB(ErrCode::Value::DivideByZero)); |
2201 | 1.24k | Builder.positionAtEnd(OkBB); |
2202 | 1.24k | } |
2203 | | |
2204 | 1.24k | auto CurrBB = Builder.getInsertBlock(); |
2205 | | |
2206 | 1.24k | auto NotOverflow = Builder.createLikely( |
2207 | 1.24k | Builder.createOr(Builder.createICmpNE(LHS, IntMin), |
2208 | 1.24k | Builder.createICmpNE(RHS, IntMinusOne))); |
2209 | 1.24k | Builder.createCondBr(NotOverflow, NoOverflowBB, EndBB); |
2210 | | |
2211 | 1.24k | Builder.positionAtEnd(NoOverflowBB); |
2212 | 1.24k | auto Ret1 = Builder.createSRem(LHS, RHS); |
2213 | 1.24k | Builder.createBr(EndBB); |
2214 | | |
2215 | 1.24k | Builder.positionAtEnd(EndBB); |
2216 | 1.24k | auto Ret = Builder.createPHI(Ret1.getType()); |
2217 | 1.24k | Ret.addIncoming(Ret1, NoOverflowBB); |
2218 | 1.24k | Ret.addIncoming(IntZero, CurrBB); |
2219 | | |
2220 | 1.24k | stackPush(Ret); |
2221 | 1.24k | break; |
2222 | 814 | } |
2223 | 785 | case OpCode::I32__rem_u: |
2224 | 1.25k | case OpCode::I64__rem_u: { |
2225 | 1.25k | LLVM::Value RHS = stackPop(); |
2226 | 1.25k | LLVM::Value LHS = stackPop(); |
2227 | 1.25k | if constexpr (kForceDivCheck) { |
2228 | 1.25k | LLVM::Value IntZero = Instr.getOpCode() == OpCode::I32__rem_u |
2229 | 1.25k | ? LLContext.getInt32(0) |
2230 | 1.25k | : LLContext.getInt64(0); |
2231 | 1.25k | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "rem.ok"); |
2232 | | |
2233 | 1.25k | auto IsNotZero = |
2234 | 1.25k | Builder.createLikely(Builder.createICmpNE(RHS, IntZero)); |
2235 | 1.25k | Builder.createCondBr(IsNotZero, OkBB, |
2236 | 1.25k | getTrapBB(ErrCode::Value::DivideByZero)); |
2237 | 1.25k | Builder.positionAtEnd(OkBB); |
2238 | 1.25k | } |
2239 | 1.25k | stackPush(Builder.createURem(LHS, RHS)); |
2240 | 1.25k | break; |
2241 | 785 | } |
2242 | 631 | case OpCode::I32__and: |
2243 | 1.98k | case OpCode::I64__and: { |
2244 | 1.98k | LLVM::Value RHS = stackPop(); |
2245 | 1.98k | LLVM::Value LHS = stackPop(); |
2246 | 1.98k | stackPush(Builder.createAnd(LHS, RHS)); |
2247 | 1.98k | break; |
2248 | 631 | } |
2249 | 953 | case OpCode::I32__or: |
2250 | 1.28k | case OpCode::I64__or: { |
2251 | 1.28k | LLVM::Value RHS = stackPop(); |
2252 | 1.28k | LLVM::Value LHS = stackPop(); |
2253 | 1.28k | stackPush(Builder.createOr(LHS, RHS)); |
2254 | 1.28k | break; |
2255 | 953 | } |
2256 | 916 | case OpCode::I32__xor: |
2257 | 1.21k | case OpCode::I64__xor: { |
2258 | 1.21k | LLVM::Value RHS = stackPop(); |
2259 | 1.21k | LLVM::Value LHS = stackPop(); |
2260 | 1.21k | stackPush(Builder.createXor(LHS, RHS)); |
2261 | 1.21k | break; |
2262 | 916 | } |
2263 | 1.31k | case OpCode::I32__shl: |
2264 | 1.52k | case OpCode::I64__shl: { |
2265 | 1.52k | LLVM::Value Mask = Instr.getOpCode() == OpCode::I32__shl |
2266 | 1.52k | ? LLContext.getInt32(31) |
2267 | 1.52k | : LLContext.getInt64(63); |
2268 | 1.52k | LLVM::Value RHS = Builder.createAnd(stackPop(), Mask); |
2269 | 1.52k | LLVM::Value LHS = stackPop(); |
2270 | 1.52k | stackPush(Builder.createShl(LHS, RHS)); |
2271 | 1.52k | break; |
2272 | 1.31k | } |
2273 | 1.13k | case OpCode::I32__shr_s: |
2274 | 1.52k | case OpCode::I64__shr_s: { |
2275 | 1.52k | LLVM::Value Mask = Instr.getOpCode() == OpCode::I32__shr_s |
2276 | 1.52k | ? LLContext.getInt32(31) |
2277 | 1.52k | : LLContext.getInt64(63); |
2278 | 1.52k | LLVM::Value RHS = Builder.createAnd(stackPop(), Mask); |
2279 | 1.52k | LLVM::Value LHS = stackPop(); |
2280 | 1.52k | stackPush(Builder.createAShr(LHS, RHS)); |
2281 | 1.52k | break; |
2282 | 1.13k | } |
2283 | 3.31k | case OpCode::I32__shr_u: |
2284 | 3.58k | case OpCode::I64__shr_u: { |
2285 | 3.58k | LLVM::Value Mask = Instr.getOpCode() == OpCode::I32__shr_u |
2286 | 3.58k | ? LLContext.getInt32(31) |
2287 | 3.58k | : LLContext.getInt64(63); |
2288 | 3.58k | LLVM::Value RHS = Builder.createAnd(stackPop(), Mask); |
2289 | 3.58k | LLVM::Value LHS = stackPop(); |
2290 | 3.58k | stackPush(Builder.createLShr(LHS, RHS)); |
2291 | 3.58k | break; |
2292 | 3.31k | } |
2293 | 2.25k | case OpCode::I32__rotl: { |
2294 | 2.25k | LLVM::Value RHS = stackPop(); |
2295 | 2.25k | LLVM::Value LHS = stackPop(); |
2296 | 2.25k | assuming(LLVM::Core::FShl != LLVM::Core::NotIntrinsic); |
2297 | 2.25k | stackPush(Builder.createIntrinsic(LLVM::Core::FShl, {Context.Int32Ty}, |
2298 | 2.25k | {LHS, LHS, RHS})); |
2299 | 2.25k | break; |
2300 | 2.25k | } |
2301 | 671 | case OpCode::I32__rotr: { |
2302 | 671 | LLVM::Value RHS = stackPop(); |
2303 | 671 | LLVM::Value LHS = stackPop(); |
2304 | 671 | assuming(LLVM::Core::FShr != LLVM::Core::NotIntrinsic); |
2305 | 671 | stackPush(Builder.createIntrinsic(LLVM::Core::FShr, {Context.Int32Ty}, |
2306 | 671 | {LHS, LHS, RHS})); |
2307 | 671 | break; |
2308 | 671 | } |
2309 | 677 | case OpCode::I64__rotl: { |
2310 | 677 | LLVM::Value RHS = stackPop(); |
2311 | 677 | LLVM::Value LHS = stackPop(); |
2312 | 677 | assuming(LLVM::Core::FShl != LLVM::Core::NotIntrinsic); |
2313 | 677 | stackPush(Builder.createIntrinsic(LLVM::Core::FShl, {Context.Int64Ty}, |
2314 | 677 | {LHS, LHS, RHS})); |
2315 | 677 | break; |
2316 | 677 | } |
2317 | 1.33k | case OpCode::I64__rotr: { |
2318 | 1.33k | LLVM::Value RHS = stackPop(); |
2319 | 1.33k | LLVM::Value LHS = stackPop(); |
2320 | 1.33k | assuming(LLVM::Core::FShr != LLVM::Core::NotIntrinsic); |
2321 | 1.33k | stackPush(Builder.createIntrinsic(LLVM::Core::FShr, {Context.Int64Ty}, |
2322 | 1.33k | {LHS, LHS, RHS})); |
2323 | 1.33k | break; |
2324 | 1.33k | } |
2325 | 272 | case OpCode::F32__add: |
2326 | 568 | case OpCode::F64__add: { |
2327 | 568 | LLVM::Value RHS = stackPop(); |
2328 | 568 | LLVM::Value LHS = stackPop(); |
2329 | 568 | stackPush(Builder.createFAdd(LHS, RHS)); |
2330 | 568 | break; |
2331 | 272 | } |
2332 | 149 | case OpCode::F32__sub: |
2333 | 436 | case OpCode::F64__sub: { |
2334 | 436 | LLVM::Value RHS = stackPop(); |
2335 | 436 | LLVM::Value LHS = stackPop(); |
2336 | 436 | stackPush(Builder.createFSub(LHS, RHS)); |
2337 | 436 | break; |
2338 | 149 | } |
2339 | 525 | case OpCode::F32__mul: |
2340 | 663 | case OpCode::F64__mul: { |
2341 | 663 | LLVM::Value RHS = stackPop(); |
2342 | 663 | LLVM::Value LHS = stackPop(); |
2343 | 663 | stackPush(Builder.createFMul(LHS, RHS)); |
2344 | 663 | break; |
2345 | 525 | } |
2346 | 224 | case OpCode::F32__div: |
2347 | 561 | case OpCode::F64__div: { |
2348 | 561 | LLVM::Value RHS = stackPop(); |
2349 | 561 | LLVM::Value LHS = stackPop(); |
2350 | 561 | stackPush(Builder.createFDiv(LHS, RHS)); |
2351 | 561 | break; |
2352 | 224 | } |
2353 | 305 | case OpCode::F32__min: |
2354 | 690 | case OpCode::F64__min: { |
2355 | 690 | LLVM::Value RHS = stackPop(); |
2356 | 690 | LLVM::Value LHS = stackPop(); |
2357 | 690 | auto FpTy = Instr.getOpCode() == OpCode::F32__min ? Context.FloatTy |
2358 | 690 | : Context.DoubleTy; |
2359 | 690 | auto IntTy = Instr.getOpCode() == OpCode::F32__min ? Context.Int32Ty |
2360 | 690 | : Context.Int64Ty; |
2361 | | |
2362 | 690 | auto UEQ = Builder.createFCmpUEQ(LHS, RHS); |
2363 | 690 | auto UNO = Builder.createFCmpUNO(LHS, RHS); |
2364 | | |
2365 | 690 | auto LHSInt = Builder.createBitCast(LHS, IntTy); |
2366 | 690 | auto RHSInt = Builder.createBitCast(RHS, IntTy); |
2367 | 690 | auto OrInt = Builder.createOr(LHSInt, RHSInt); |
2368 | 690 | auto OrFp = Builder.createBitCast(OrInt, FpTy); |
2369 | | |
2370 | 690 | auto AddFp = Builder.createFAdd(LHS, RHS); |
2371 | | |
2372 | 690 | assuming(LLVM::Core::MinNum != LLVM::Core::NotIntrinsic); |
2373 | 690 | auto MinFp = Builder.createIntrinsic(LLVM::Core::MinNum, |
2374 | 690 | {LHS.getType()}, {LHS, RHS}); |
2375 | | |
2376 | 690 | auto Ret = Builder.createSelect( |
2377 | 690 | UEQ, Builder.createSelect(UNO, AddFp, OrFp), MinFp); |
2378 | 690 | stackPush(Ret); |
2379 | 690 | break; |
2380 | 690 | } |
2381 | 331 | case OpCode::F32__max: |
2382 | 673 | case OpCode::F64__max: { |
2383 | 673 | LLVM::Value RHS = stackPop(); |
2384 | 673 | LLVM::Value LHS = stackPop(); |
2385 | 673 | auto FpTy = Instr.getOpCode() == OpCode::F32__max ? Context.FloatTy |
2386 | 673 | : Context.DoubleTy; |
2387 | 673 | auto IntTy = Instr.getOpCode() == OpCode::F32__max ? Context.Int32Ty |
2388 | 673 | : Context.Int64Ty; |
2389 | | |
2390 | 673 | auto UEQ = Builder.createFCmpUEQ(LHS, RHS); |
2391 | 673 | auto UNO = Builder.createFCmpUNO(LHS, RHS); |
2392 | | |
2393 | 673 | auto LHSInt = Builder.createBitCast(LHS, IntTy); |
2394 | 673 | auto RHSInt = Builder.createBitCast(RHS, IntTy); |
2395 | 673 | auto AndInt = Builder.createAnd(LHSInt, RHSInt); |
2396 | 673 | auto AndFp = Builder.createBitCast(AndInt, FpTy); |
2397 | | |
2398 | 673 | auto AddFp = Builder.createFAdd(LHS, RHS); |
2399 | | |
2400 | 673 | assuming(LLVM::Core::MaxNum != LLVM::Core::NotIntrinsic); |
2401 | 673 | auto MaxFp = Builder.createIntrinsic(LLVM::Core::MaxNum, |
2402 | 673 | {LHS.getType()}, {LHS, RHS}); |
2403 | | |
2404 | 673 | auto Ret = Builder.createSelect( |
2405 | 673 | UEQ, Builder.createSelect(UNO, AddFp, AndFp), MaxFp); |
2406 | 673 | stackPush(Ret); |
2407 | 673 | break; |
2408 | 673 | } |
2409 | 429 | case OpCode::F32__copysign: |
2410 | 825 | case OpCode::F64__copysign: { |
2411 | 825 | LLVM::Value RHS = stackPop(); |
2412 | 825 | LLVM::Value LHS = stackPop(); |
2413 | 825 | assuming(LLVM::Core::CopySign != LLVM::Core::NotIntrinsic); |
2414 | 825 | stackPush(Builder.createIntrinsic(LLVM::Core::CopySign, {LHS.getType()}, |
2415 | 825 | {LHS, RHS})); |
2416 | 825 | break; |
2417 | 825 | } |
2418 | | |
2419 | | // Saturating Truncation Numeric Instructions |
2420 | 187 | case OpCode::I32__trunc_sat_f32_s: |
2421 | 187 | compileSignedTruncSat(Context.Int32Ty); |
2422 | 187 | break; |
2423 | 95 | case OpCode::I32__trunc_sat_f32_u: |
2424 | 95 | compileUnsignedTruncSat(Context.Int32Ty); |
2425 | 95 | break; |
2426 | 312 | case OpCode::I32__trunc_sat_f64_s: |
2427 | 312 | compileSignedTruncSat(Context.Int32Ty); |
2428 | 312 | break; |
2429 | 194 | case OpCode::I32__trunc_sat_f64_u: |
2430 | 194 | compileUnsignedTruncSat(Context.Int32Ty); |
2431 | 194 | break; |
2432 | 350 | case OpCode::I64__trunc_sat_f32_s: |
2433 | 350 | compileSignedTruncSat(Context.Int64Ty); |
2434 | 350 | break; |
2435 | 351 | case OpCode::I64__trunc_sat_f32_u: |
2436 | 351 | compileUnsignedTruncSat(Context.Int64Ty); |
2437 | 351 | break; |
2438 | 290 | case OpCode::I64__trunc_sat_f64_s: |
2439 | 290 | compileSignedTruncSat(Context.Int64Ty); |
2440 | 290 | break; |
2441 | 335 | case OpCode::I64__trunc_sat_f64_u: |
2442 | 335 | compileUnsignedTruncSat(Context.Int64Ty); |
2443 | 335 | break; |
2444 | | |
2445 | | // SIMD Memory Instructions |
2446 | 5.48k | case OpCode::V128__load: |
2447 | 5.48k | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2448 | 5.48k | Instr.getMemoryAlign(), Context.Int128x1Ty); |
2449 | 5.48k | break; |
2450 | 196 | case OpCode::V128__load8x8_s: |
2451 | 196 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2452 | 196 | Instr.getMemoryAlign(), |
2453 | 196 | LLVM::Type::getVectorType(Context.Int8Ty, 8), |
2454 | 196 | Context.Int16x8Ty, true); |
2455 | 196 | break; |
2456 | 51 | case OpCode::V128__load8x8_u: |
2457 | 51 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2458 | 51 | Instr.getMemoryAlign(), |
2459 | 51 | LLVM::Type::getVectorType(Context.Int8Ty, 8), |
2460 | 51 | Context.Int16x8Ty, false); |
2461 | 51 | break; |
2462 | 375 | case OpCode::V128__load16x4_s: |
2463 | 375 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2464 | 375 | Instr.getMemoryAlign(), |
2465 | 375 | LLVM::Type::getVectorType(Context.Int16Ty, 4), |
2466 | 375 | Context.Int32x4Ty, true); |
2467 | 375 | break; |
2468 | 426 | case OpCode::V128__load16x4_u: |
2469 | 426 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2470 | 426 | Instr.getMemoryAlign(), |
2471 | 426 | LLVM::Type::getVectorType(Context.Int16Ty, 4), |
2472 | 426 | Context.Int32x4Ty, false); |
2473 | 426 | break; |
2474 | 144 | case OpCode::V128__load32x2_s: |
2475 | 144 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2476 | 144 | Instr.getMemoryAlign(), |
2477 | 144 | LLVM::Type::getVectorType(Context.Int32Ty, 2), |
2478 | 144 | Context.Int64x2Ty, true); |
2479 | 144 | break; |
2480 | 138 | case OpCode::V128__load32x2_u: |
2481 | 138 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2482 | 138 | Instr.getMemoryAlign(), |
2483 | 138 | LLVM::Type::getVectorType(Context.Int32Ty, 2), |
2484 | 138 | Context.Int64x2Ty, false); |
2485 | 138 | break; |
2486 | 81 | case OpCode::V128__load8_splat: |
2487 | 81 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2488 | 81 | Instr.getMemoryAlign(), Context.Int8Ty, |
2489 | 81 | Context.Int8x16Ty); |
2490 | 81 | break; |
2491 | 136 | case OpCode::V128__load16_splat: |
2492 | 136 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2493 | 136 | Instr.getMemoryAlign(), Context.Int16Ty, |
2494 | 136 | Context.Int16x8Ty); |
2495 | 136 | break; |
2496 | 212 | case OpCode::V128__load32_splat: |
2497 | 212 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2498 | 212 | Instr.getMemoryAlign(), Context.Int32Ty, |
2499 | 212 | Context.Int32x4Ty); |
2500 | 212 | break; |
2501 | 149 | case OpCode::V128__load64_splat: |
2502 | 149 | compileSplatLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2503 | 149 | Instr.getMemoryAlign(), Context.Int64Ty, |
2504 | 149 | Context.Int64x2Ty); |
2505 | 149 | break; |
2506 | 69 | case OpCode::V128__load32_zero: |
2507 | 69 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2508 | 69 | Instr.getMemoryAlign(), Context.Int32Ty, |
2509 | 69 | Context.Int128Ty, false); |
2510 | 69 | break; |
2511 | 151 | case OpCode::V128__load64_zero: |
2512 | 151 | compileVectorLoadOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2513 | 151 | Instr.getMemoryAlign(), Context.Int64Ty, |
2514 | 151 | Context.Int128Ty, false); |
2515 | 151 | break; |
2516 | 264 | case OpCode::V128__store: |
2517 | 264 | compileStoreOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2518 | 264 | Instr.getMemoryAlign(), Context.Int128x1Ty, false, true); |
2519 | 264 | break; |
2520 | 179 | case OpCode::V128__load8_lane: |
2521 | 179 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2522 | 179 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2523 | 179 | Context.Int8Ty, Context.Int8x16Ty); |
2524 | 179 | break; |
2525 | 193 | case OpCode::V128__load16_lane: |
2526 | 193 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2527 | 193 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2528 | 193 | Context.Int16Ty, Context.Int16x8Ty); |
2529 | 193 | break; |
2530 | 124 | case OpCode::V128__load32_lane: |
2531 | 124 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2532 | 124 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2533 | 124 | Context.Int32Ty, Context.Int32x4Ty); |
2534 | 124 | break; |
2535 | 21 | case OpCode::V128__load64_lane: |
2536 | 21 | compileLoadLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2537 | 21 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2538 | 21 | Context.Int64Ty, Context.Int64x2Ty); |
2539 | 21 | break; |
2540 | 101 | case OpCode::V128__store8_lane: |
2541 | 101 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2542 | 101 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2543 | 101 | Context.Int8Ty, Context.Int8x16Ty); |
2544 | 101 | break; |
2545 | 94 | case OpCode::V128__store16_lane: |
2546 | 94 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2547 | 94 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2548 | 94 | Context.Int16Ty, Context.Int16x8Ty); |
2549 | 94 | break; |
2550 | 130 | case OpCode::V128__store32_lane: |
2551 | 130 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2552 | 130 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2553 | 130 | Context.Int32Ty, Context.Int32x4Ty); |
2554 | 130 | break; |
2555 | 22 | case OpCode::V128__store64_lane: |
2556 | 22 | compileStoreLaneOp(Instr.getTargetIndex(), Instr.getMemoryOffset(), |
2557 | 22 | Instr.getMemoryAlign(), Instr.getMemoryLane(), |
2558 | 22 | Context.Int64Ty, Context.Int64x2Ty); |
2559 | 22 | break; |
2560 | | |
2561 | | // SIMD Const Instructions |
2562 | 399 | case OpCode::V128__const: { |
2563 | 399 | const auto Value = Instr.getNum().get<uint64x2_t>(); |
2564 | 399 | auto Vector = |
2565 | 399 | LLVM::Value::getConstVector64(LLContext, {Value[0], Value[1]}); |
2566 | 399 | stackPush(Builder.createBitCast(Vector, Context.Int64x2Ty)); |
2567 | 399 | break; |
2568 | 825 | } |
2569 | | |
2570 | | // SIMD Shuffle Instructions |
2571 | 15 | case OpCode::I8x16__shuffle: { |
2572 | 15 | auto V2 = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
2573 | 15 | auto V1 = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
2574 | 15 | const auto V3 = Instr.getNum().get<uint128_t>(); |
2575 | 15 | std::array<uint8_t, 16> Mask; |
2576 | 255 | for (size_t I = 0; I < 16; ++I) { |
2577 | 240 | Mask[I] = static_cast<uint8_t>(V3 >> (I * 8)); |
2578 | 240 | } |
2579 | 15 | stackPush(Builder.createBitCast( |
2580 | 15 | Builder.createShuffleVector( |
2581 | 15 | V1, V2, LLVM::Value::getConstVector8(LLContext, Mask)), |
2582 | 15 | Context.Int64x2Ty)); |
2583 | 15 | break; |
2584 | 825 | } |
2585 | | |
2586 | | // SIMD Lane Instructions |
2587 | 76 | case OpCode::I8x16__extract_lane_s: |
2588 | 76 | compileExtractLaneOp(Context.Int8x16Ty, Instr.getMemoryLane(), |
2589 | 76 | Context.Int32Ty, true); |
2590 | 76 | break; |
2591 | 28 | case OpCode::I8x16__extract_lane_u: |
2592 | 28 | compileExtractLaneOp(Context.Int8x16Ty, Instr.getMemoryLane(), |
2593 | 28 | Context.Int32Ty, false); |
2594 | 28 | break; |
2595 | 162 | case OpCode::I8x16__replace_lane: |
2596 | 162 | compileReplaceLaneOp(Context.Int8x16Ty, Instr.getMemoryLane()); |
2597 | 162 | break; |
2598 | 331 | case OpCode::I16x8__extract_lane_s: |
2599 | 331 | compileExtractLaneOp(Context.Int16x8Ty, Instr.getMemoryLane(), |
2600 | 331 | Context.Int32Ty, true); |
2601 | 331 | break; |
2602 | 468 | case OpCode::I16x8__extract_lane_u: |
2603 | 468 | compileExtractLaneOp(Context.Int16x8Ty, Instr.getMemoryLane(), |
2604 | 468 | Context.Int32Ty, false); |
2605 | 468 | break; |
2606 | 251 | case OpCode::I16x8__replace_lane: |
2607 | 251 | compileReplaceLaneOp(Context.Int16x8Ty, Instr.getMemoryLane()); |
2608 | 251 | break; |
2609 | 68 | case OpCode::I32x4__extract_lane: |
2610 | 68 | compileExtractLaneOp(Context.Int32x4Ty, Instr.getMemoryLane()); |
2611 | 68 | break; |
2612 | 242 | case OpCode::I32x4__replace_lane: |
2613 | 242 | compileReplaceLaneOp(Context.Int32x4Ty, Instr.getMemoryLane()); |
2614 | 242 | break; |
2615 | 128 | case OpCode::I64x2__extract_lane: |
2616 | 128 | compileExtractLaneOp(Context.Int64x2Ty, Instr.getMemoryLane()); |
2617 | 128 | break; |
2618 | 14 | case OpCode::I64x2__replace_lane: |
2619 | 14 | compileReplaceLaneOp(Context.Int64x2Ty, Instr.getMemoryLane()); |
2620 | 14 | break; |
2621 | 68 | case OpCode::F32x4__extract_lane: |
2622 | 68 | compileExtractLaneOp(Context.Floatx4Ty, Instr.getMemoryLane()); |
2623 | 68 | break; |
2624 | 20 | case OpCode::F32x4__replace_lane: |
2625 | 20 | compileReplaceLaneOp(Context.Floatx4Ty, Instr.getMemoryLane()); |
2626 | 20 | break; |
2627 | 73 | case OpCode::F64x2__extract_lane: |
2628 | 73 | compileExtractLaneOp(Context.Doublex2Ty, Instr.getMemoryLane()); |
2629 | 73 | break; |
2630 | 7 | case OpCode::F64x2__replace_lane: |
2631 | 7 | compileReplaceLaneOp(Context.Doublex2Ty, Instr.getMemoryLane()); |
2632 | 7 | break; |
2633 | | |
2634 | | // SIMD Numeric Instructions |
2635 | 66 | case OpCode::I8x16__swizzle: |
2636 | 66 | compileVectorSwizzle(); |
2637 | 66 | break; |
2638 | 31.5k | case OpCode::I8x16__splat: |
2639 | 31.5k | compileSplatOp(Context.Int8x16Ty); |
2640 | 31.5k | break; |
2641 | 8.84k | case OpCode::I16x8__splat: |
2642 | 8.84k | compileSplatOp(Context.Int16x8Ty); |
2643 | 8.84k | break; |
2644 | 1.12k | case OpCode::I32x4__splat: |
2645 | 1.12k | compileSplatOp(Context.Int32x4Ty); |
2646 | 1.12k | break; |
2647 | 398 | case OpCode::I64x2__splat: |
2648 | 398 | compileSplatOp(Context.Int64x2Ty); |
2649 | 398 | break; |
2650 | 343 | case OpCode::F32x4__splat: |
2651 | 343 | compileSplatOp(Context.Floatx4Ty); |
2652 | 343 | break; |
2653 | 56 | case OpCode::F64x2__splat: |
2654 | 56 | compileSplatOp(Context.Doublex2Ty); |
2655 | 56 | break; |
2656 | 118 | case OpCode::I8x16__eq: |
2657 | 118 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntEQ); |
2658 | 118 | break; |
2659 | 328 | case OpCode::I8x16__ne: |
2660 | 328 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntNE); |
2661 | 328 | break; |
2662 | 77 | case OpCode::I8x16__lt_s: |
2663 | 77 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSLT); |
2664 | 77 | break; |
2665 | 81 | case OpCode::I8x16__lt_u: |
2666 | 81 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntULT); |
2667 | 81 | break; |
2668 | 142 | case OpCode::I8x16__gt_s: |
2669 | 142 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSGT); |
2670 | 142 | break; |
2671 | 180 | case OpCode::I8x16__gt_u: |
2672 | 180 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntUGT); |
2673 | 180 | break; |
2674 | 88 | case OpCode::I8x16__le_s: |
2675 | 88 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSLE); |
2676 | 88 | break; |
2677 | 121 | case OpCode::I8x16__le_u: |
2678 | 121 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntULE); |
2679 | 121 | break; |
2680 | 433 | case OpCode::I8x16__ge_s: |
2681 | 433 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntSGE); |
2682 | 433 | break; |
2683 | 121 | case OpCode::I8x16__ge_u: |
2684 | 121 | compileVectorCompareOp(Context.Int8x16Ty, LLVMIntUGE); |
2685 | 121 | break; |
2686 | 116 | case OpCode::I16x8__eq: |
2687 | 116 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntEQ); |
2688 | 116 | break; |
2689 | 248 | case OpCode::I16x8__ne: |
2690 | 248 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntNE); |
2691 | 248 | break; |
2692 | 79 | case OpCode::I16x8__lt_s: |
2693 | 79 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSLT); |
2694 | 79 | break; |
2695 | 231 | case OpCode::I16x8__lt_u: |
2696 | 231 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntULT); |
2697 | 231 | break; |
2698 | 227 | case OpCode::I16x8__gt_s: |
2699 | 227 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSGT); |
2700 | 227 | break; |
2701 | 161 | case OpCode::I16x8__gt_u: |
2702 | 161 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntUGT); |
2703 | 161 | break; |
2704 | 81 | case OpCode::I16x8__le_s: |
2705 | 81 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSLE); |
2706 | 81 | break; |
2707 | 106 | case OpCode::I16x8__le_u: |
2708 | 106 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntULE); |
2709 | 106 | break; |
2710 | 170 | case OpCode::I16x8__ge_s: |
2711 | 170 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntSGE); |
2712 | 170 | break; |
2713 | 77 | case OpCode::I16x8__ge_u: |
2714 | 77 | compileVectorCompareOp(Context.Int16x8Ty, LLVMIntUGE); |
2715 | 77 | break; |
2716 | 82 | case OpCode::I32x4__eq: |
2717 | 82 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntEQ); |
2718 | 82 | break; |
2719 | 119 | case OpCode::I32x4__ne: |
2720 | 119 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntNE); |
2721 | 119 | break; |
2722 | 69 | case OpCode::I32x4__lt_s: |
2723 | 69 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSLT); |
2724 | 69 | break; |
2725 | 146 | case OpCode::I32x4__lt_u: |
2726 | 146 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntULT); |
2727 | 146 | break; |
2728 | 109 | case OpCode::I32x4__gt_s: |
2729 | 109 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSGT); |
2730 | 109 | break; |
2731 | 253 | case OpCode::I32x4__gt_u: |
2732 | 253 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntUGT); |
2733 | 253 | break; |
2734 | 231 | case OpCode::I32x4__le_s: |
2735 | 231 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSLE); |
2736 | 231 | break; |
2737 | 261 | case OpCode::I32x4__le_u: |
2738 | 261 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntULE); |
2739 | 261 | break; |
2740 | 77 | case OpCode::I32x4__ge_s: |
2741 | 77 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntSGE); |
2742 | 77 | break; |
2743 | 90 | case OpCode::I32x4__ge_u: |
2744 | 90 | compileVectorCompareOp(Context.Int32x4Ty, LLVMIntUGE); |
2745 | 90 | break; |
2746 | 124 | case OpCode::I64x2__eq: |
2747 | 124 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntEQ); |
2748 | 124 | break; |
2749 | 52 | case OpCode::I64x2__ne: |
2750 | 52 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntNE); |
2751 | 52 | break; |
2752 | 51 | case OpCode::I64x2__lt_s: |
2753 | 51 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSLT); |
2754 | 51 | break; |
2755 | 161 | case OpCode::I64x2__gt_s: |
2756 | 161 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSGT); |
2757 | 161 | break; |
2758 | 37 | case OpCode::I64x2__le_s: |
2759 | 37 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSLE); |
2760 | 37 | break; |
2761 | 47 | case OpCode::I64x2__ge_s: |
2762 | 47 | compileVectorCompareOp(Context.Int64x2Ty, LLVMIntSGE); |
2763 | 47 | break; |
2764 | 1.22k | case OpCode::F32x4__eq: |
2765 | 1.22k | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOEQ, |
2766 | 1.22k | Context.Int32x4Ty); |
2767 | 1.22k | break; |
2768 | 45 | case OpCode::F32x4__ne: |
2769 | 45 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealUNE, |
2770 | 45 | Context.Int32x4Ty); |
2771 | 45 | break; |
2772 | 915 | case OpCode::F32x4__lt: |
2773 | 915 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOLT, |
2774 | 915 | Context.Int32x4Ty); |
2775 | 915 | break; |
2776 | 88 | case OpCode::F32x4__gt: |
2777 | 88 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOGT, |
2778 | 88 | Context.Int32x4Ty); |
2779 | 88 | break; |
2780 | 347 | case OpCode::F32x4__le: |
2781 | 347 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOLE, |
2782 | 347 | Context.Int32x4Ty); |
2783 | 347 | break; |
2784 | 82 | case OpCode::F32x4__ge: |
2785 | 82 | compileVectorCompareOp(Context.Floatx4Ty, LLVMRealOGE, |
2786 | 82 | Context.Int32x4Ty); |
2787 | 82 | break; |
2788 | 83 | case OpCode::F64x2__eq: |
2789 | 83 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOEQ, |
2790 | 83 | Context.Int64x2Ty); |
2791 | 83 | break; |
2792 | 149 | case OpCode::F64x2__ne: |
2793 | 149 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealUNE, |
2794 | 149 | Context.Int64x2Ty); |
2795 | 149 | break; |
2796 | 168 | case OpCode::F64x2__lt: |
2797 | 168 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOLT, |
2798 | 168 | Context.Int64x2Ty); |
2799 | 168 | break; |
2800 | 78 | case OpCode::F64x2__gt: |
2801 | 78 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOGT, |
2802 | 78 | Context.Int64x2Ty); |
2803 | 78 | break; |
2804 | 195 | case OpCode::F64x2__le: |
2805 | 195 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOLE, |
2806 | 195 | Context.Int64x2Ty); |
2807 | 195 | break; |
2808 | 101 | case OpCode::F64x2__ge: |
2809 | 101 | compileVectorCompareOp(Context.Doublex2Ty, LLVMRealOGE, |
2810 | 101 | Context.Int64x2Ty); |
2811 | 101 | break; |
2812 | 182 | case OpCode::V128__not: |
2813 | 182 | Stack.back() = Builder.createNot(Stack.back()); |
2814 | 182 | break; |
2815 | 76 | case OpCode::V128__and: { |
2816 | 76 | auto RHS = stackPop(); |
2817 | 76 | auto LHS = stackPop(); |
2818 | 76 | stackPush(Builder.createAnd(LHS, RHS)); |
2819 | 76 | break; |
2820 | 825 | } |
2821 | 92 | case OpCode::V128__andnot: { |
2822 | 92 | auto RHS = stackPop(); |
2823 | 92 | auto LHS = stackPop(); |
2824 | 92 | stackPush(Builder.createAnd(LHS, Builder.createNot(RHS))); |
2825 | 92 | break; |
2826 | 825 | } |
2827 | 131 | case OpCode::V128__or: { |
2828 | 131 | auto RHS = stackPop(); |
2829 | 131 | auto LHS = stackPop(); |
2830 | 131 | stackPush(Builder.createOr(LHS, RHS)); |
2831 | 131 | break; |
2832 | 825 | } |
2833 | 64 | case OpCode::V128__xor: { |
2834 | 64 | auto RHS = stackPop(); |
2835 | 64 | auto LHS = stackPop(); |
2836 | 64 | stackPush(Builder.createXor(LHS, RHS)); |
2837 | 64 | break; |
2838 | 825 | } |
2839 | 126 | case OpCode::V128__bitselect: { |
2840 | 126 | auto C = stackPop(); |
2841 | 126 | auto V2 = stackPop(); |
2842 | 126 | auto V1 = stackPop(); |
2843 | 126 | stackPush(Builder.createXor( |
2844 | 126 | Builder.createAnd(Builder.createXor(V1, V2), C), V2)); |
2845 | 126 | break; |
2846 | 825 | } |
2847 | 112 | case OpCode::V128__any_true: |
2848 | 112 | compileVectorAnyTrue(); |
2849 | 112 | break; |
2850 | 1.03k | case OpCode::I8x16__abs: |
2851 | 1.03k | compileVectorAbs(Context.Int8x16Ty); |
2852 | 1.03k | break; |
2853 | 1.97k | case OpCode::I8x16__neg: |
2854 | 1.97k | compileVectorNeg(Context.Int8x16Ty); |
2855 | 1.97k | break; |
2856 | 109 | case OpCode::I8x16__popcnt: |
2857 | 109 | compileVectorPopcnt(); |
2858 | 109 | break; |
2859 | 319 | case OpCode::I8x16__all_true: |
2860 | 319 | compileVectorAllTrue(Context.Int8x16Ty); |
2861 | 319 | break; |
2862 | 602 | case OpCode::I8x16__bitmask: |
2863 | 602 | compileVectorBitMask(Context.Int8x16Ty); |
2864 | 602 | break; |
2865 | 93 | case OpCode::I8x16__narrow_i16x8_s: |
2866 | 93 | compileVectorNarrow(Context.Int16x8Ty, true); |
2867 | 93 | break; |
2868 | 168 | case OpCode::I8x16__narrow_i16x8_u: |
2869 | 168 | compileVectorNarrow(Context.Int16x8Ty, false); |
2870 | 168 | break; |
2871 | 99 | case OpCode::I8x16__shl: |
2872 | 99 | compileVectorShl(Context.Int8x16Ty); |
2873 | 99 | break; |
2874 | 859 | case OpCode::I8x16__shr_s: |
2875 | 859 | compileVectorAShr(Context.Int8x16Ty); |
2876 | 859 | break; |
2877 | 104 | case OpCode::I8x16__shr_u: |
2878 | 104 | compileVectorLShr(Context.Int8x16Ty); |
2879 | 104 | break; |
2880 | 55 | case OpCode::I8x16__add: |
2881 | 55 | compileVectorVectorAdd(Context.Int8x16Ty); |
2882 | 55 | break; |
2883 | 450 | case OpCode::I8x16__add_sat_s: |
2884 | 450 | compileVectorVectorAddSat(Context.Int8x16Ty, true); |
2885 | 450 | break; |
2886 | 89 | case OpCode::I8x16__add_sat_u: |
2887 | 89 | compileVectorVectorAddSat(Context.Int8x16Ty, false); |
2888 | 89 | break; |
2889 | 71 | case OpCode::I8x16__sub: |
2890 | 71 | compileVectorVectorSub(Context.Int8x16Ty); |
2891 | 71 | break; |
2892 | 148 | case OpCode::I8x16__sub_sat_s: |
2893 | 148 | compileVectorVectorSubSat(Context.Int8x16Ty, true); |
2894 | 148 | break; |
2895 | 98 | case OpCode::I8x16__sub_sat_u: |
2896 | 98 | compileVectorVectorSubSat(Context.Int8x16Ty, false); |
2897 | 98 | break; |
2898 | 62 | case OpCode::I8x16__min_s: |
2899 | 62 | compileVectorVectorSMin(Context.Int8x16Ty); |
2900 | 62 | break; |
2901 | 66 | case OpCode::I8x16__min_u: |
2902 | 66 | compileVectorVectorUMin(Context.Int8x16Ty); |
2903 | 66 | break; |
2904 | 325 | case OpCode::I8x16__max_s: |
2905 | 325 | compileVectorVectorSMax(Context.Int8x16Ty); |
2906 | 325 | break; |
2907 | 93 | case OpCode::I8x16__max_u: |
2908 | 93 | compileVectorVectorUMax(Context.Int8x16Ty); |
2909 | 93 | break; |
2910 | 124 | case OpCode::I8x16__avgr_u: |
2911 | 124 | compileVectorVectorUAvgr(Context.Int8x16Ty); |
2912 | 124 | break; |
2913 | 202 | case OpCode::I16x8__abs: |
2914 | 202 | compileVectorAbs(Context.Int16x8Ty); |
2915 | 202 | break; |
2916 | 196 | case OpCode::I16x8__neg: |
2917 | 196 | compileVectorNeg(Context.Int16x8Ty); |
2918 | 196 | break; |
2919 | 117 | case OpCode::I16x8__all_true: |
2920 | 117 | compileVectorAllTrue(Context.Int16x8Ty); |
2921 | 117 | break; |
2922 | 118 | case OpCode::I16x8__bitmask: |
2923 | 118 | compileVectorBitMask(Context.Int16x8Ty); |
2924 | 118 | break; |
2925 | 48 | case OpCode::I16x8__narrow_i32x4_s: |
2926 | 48 | compileVectorNarrow(Context.Int32x4Ty, true); |
2927 | 48 | break; |
2928 | 352 | case OpCode::I16x8__narrow_i32x4_u: |
2929 | 352 | compileVectorNarrow(Context.Int32x4Ty, false); |
2930 | 352 | break; |
2931 | 625 | case OpCode::I16x8__extend_low_i8x16_s: |
2932 | 625 | compileVectorExtend(Context.Int8x16Ty, true, true); |
2933 | 625 | break; |
2934 | 65 | case OpCode::I16x8__extend_high_i8x16_s: |
2935 | 65 | compileVectorExtend(Context.Int8x16Ty, true, false); |
2936 | 65 | break; |
2937 | 378 | case OpCode::I16x8__extend_low_i8x16_u: |
2938 | 378 | compileVectorExtend(Context.Int8x16Ty, false, true); |
2939 | 378 | break; |
2940 | 17 | case OpCode::I16x8__extend_high_i8x16_u: |
2941 | 17 | compileVectorExtend(Context.Int8x16Ty, false, false); |
2942 | 17 | break; |
2943 | 92 | case OpCode::I16x8__shl: |
2944 | 92 | compileVectorShl(Context.Int16x8Ty); |
2945 | 92 | break; |
2946 | 265 | case OpCode::I16x8__shr_s: |
2947 | 265 | compileVectorAShr(Context.Int16x8Ty); |
2948 | 265 | break; |
2949 | 73 | case OpCode::I16x8__shr_u: |
2950 | 73 | compileVectorLShr(Context.Int16x8Ty); |
2951 | 73 | break; |
2952 | 136 | case OpCode::I16x8__add: |
2953 | 136 | compileVectorVectorAdd(Context.Int16x8Ty); |
2954 | 136 | break; |
2955 | 20 | case OpCode::I16x8__add_sat_s: |
2956 | 20 | compileVectorVectorAddSat(Context.Int16x8Ty, true); |
2957 | 20 | break; |
2958 | 423 | case OpCode::I16x8__add_sat_u: |
2959 | 423 | compileVectorVectorAddSat(Context.Int16x8Ty, false); |
2960 | 423 | break; |
2961 | 307 | case OpCode::I16x8__sub: |
2962 | 307 | compileVectorVectorSub(Context.Int16x8Ty); |
2963 | 307 | break; |
2964 | 23 | case OpCode::I16x8__sub_sat_s: |
2965 | 23 | compileVectorVectorSubSat(Context.Int16x8Ty, true); |
2966 | 23 | break; |
2967 | 68 | case OpCode::I16x8__sub_sat_u: |
2968 | 68 | compileVectorVectorSubSat(Context.Int16x8Ty, false); |
2969 | 68 | break; |
2970 | 117 | case OpCode::I16x8__mul: |
2971 | 117 | compileVectorVectorMul(Context.Int16x8Ty); |
2972 | 117 | break; |
2973 | 141 | case OpCode::I16x8__min_s: |
2974 | 141 | compileVectorVectorSMin(Context.Int16x8Ty); |
2975 | 141 | break; |
2976 | 122 | case OpCode::I16x8__min_u: |
2977 | 122 | compileVectorVectorUMin(Context.Int16x8Ty); |
2978 | 122 | break; |
2979 | 81 | case OpCode::I16x8__max_s: |
2980 | 81 | compileVectorVectorSMax(Context.Int16x8Ty); |
2981 | 81 | break; |
2982 | 501 | case OpCode::I16x8__max_u: |
2983 | 501 | compileVectorVectorUMax(Context.Int16x8Ty); |
2984 | 501 | break; |
2985 | 103 | case OpCode::I16x8__avgr_u: |
2986 | 103 | compileVectorVectorUAvgr(Context.Int16x8Ty); |
2987 | 103 | break; |
2988 | 68 | case OpCode::I16x8__extmul_low_i8x16_s: |
2989 | 68 | compileVectorExtMul(Context.Int8x16Ty, true, true); |
2990 | 68 | break; |
2991 | 202 | case OpCode::I16x8__extmul_high_i8x16_s: |
2992 | 202 | compileVectorExtMul(Context.Int8x16Ty, true, false); |
2993 | 202 | break; |
2994 | 112 | case OpCode::I16x8__extmul_low_i8x16_u: |
2995 | 112 | compileVectorExtMul(Context.Int8x16Ty, false, true); |
2996 | 112 | break; |
2997 | 436 | case OpCode::I16x8__extmul_high_i8x16_u: |
2998 | 436 | compileVectorExtMul(Context.Int8x16Ty, false, false); |
2999 | 436 | break; |
3000 | 132 | case OpCode::I16x8__q15mulr_sat_s: |
3001 | 132 | compileVectorVectorQ15MulSat(); |
3002 | 132 | break; |
3003 | 370 | case OpCode::I16x8__extadd_pairwise_i8x16_s: |
3004 | 370 | compileVectorExtAddPairwise(Context.Int8x16Ty, true); |
3005 | 370 | break; |
3006 | 334 | case OpCode::I16x8__extadd_pairwise_i8x16_u: |
3007 | 334 | compileVectorExtAddPairwise(Context.Int8x16Ty, false); |
3008 | 334 | break; |
3009 | 92 | case OpCode::I32x4__abs: |
3010 | 92 | compileVectorAbs(Context.Int32x4Ty); |
3011 | 92 | break; |
3012 | 182 | case OpCode::I32x4__neg: |
3013 | 182 | compileVectorNeg(Context.Int32x4Ty); |
3014 | 182 | break; |
3015 | 174 | case OpCode::I32x4__all_true: |
3016 | 174 | compileVectorAllTrue(Context.Int32x4Ty); |
3017 | 174 | break; |
3018 | 86 | case OpCode::I32x4__bitmask: |
3019 | 86 | compileVectorBitMask(Context.Int32x4Ty); |
3020 | 86 | break; |
3021 | 116 | case OpCode::I32x4__extend_low_i16x8_s: |
3022 | 116 | compileVectorExtend(Context.Int16x8Ty, true, true); |
3023 | 116 | break; |
3024 | 532 | case OpCode::I32x4__extend_high_i16x8_s: |
3025 | 532 | compileVectorExtend(Context.Int16x8Ty, true, false); |
3026 | 532 | break; |
3027 | 1.90k | case OpCode::I32x4__extend_low_i16x8_u: |
3028 | 1.90k | compileVectorExtend(Context.Int16x8Ty, false, true); |
3029 | 1.90k | break; |
3030 | 153 | case OpCode::I32x4__extend_high_i16x8_u: |
3031 | 153 | compileVectorExtend(Context.Int16x8Ty, false, false); |
3032 | 153 | break; |
3033 | 962 | case OpCode::I32x4__shl: |
3034 | 962 | compileVectorShl(Context.Int32x4Ty); |
3035 | 962 | break; |
3036 | 189 | case OpCode::I32x4__shr_s: |
3037 | 189 | compileVectorAShr(Context.Int32x4Ty); |
3038 | 189 | break; |
3039 | 121 | case OpCode::I32x4__shr_u: |
3040 | 121 | compileVectorLShr(Context.Int32x4Ty); |
3041 | 121 | break; |
3042 | 110 | case OpCode::I32x4__add: |
3043 | 110 | compileVectorVectorAdd(Context.Int32x4Ty); |
3044 | 110 | break; |
3045 | 152 | case OpCode::I32x4__sub: |
3046 | 152 | compileVectorVectorSub(Context.Int32x4Ty); |
3047 | 152 | break; |
3048 | 214 | case OpCode::I32x4__mul: |
3049 | 214 | compileVectorVectorMul(Context.Int32x4Ty); |
3050 | 214 | break; |
3051 | 136 | case OpCode::I32x4__min_s: |
3052 | 136 | compileVectorVectorSMin(Context.Int32x4Ty); |
3053 | 136 | break; |
3054 | 81 | case OpCode::I32x4__min_u: |
3055 | 81 | compileVectorVectorUMin(Context.Int32x4Ty); |
3056 | 81 | break; |
3057 | 98 | case OpCode::I32x4__max_s: |
3058 | 98 | compileVectorVectorSMax(Context.Int32x4Ty); |
3059 | 98 | break; |
3060 | 122 | case OpCode::I32x4__max_u: |
3061 | 122 | compileVectorVectorUMax(Context.Int32x4Ty); |
3062 | 122 | break; |
3063 | 113 | case OpCode::I32x4__extmul_low_i16x8_s: |
3064 | 113 | compileVectorExtMul(Context.Int16x8Ty, true, true); |
3065 | 113 | break; |
3066 | 52 | case OpCode::I32x4__extmul_high_i16x8_s: |
3067 | 52 | compileVectorExtMul(Context.Int16x8Ty, true, false); |
3068 | 52 | break; |
3069 | 183 | case OpCode::I32x4__extmul_low_i16x8_u: |
3070 | 183 | compileVectorExtMul(Context.Int16x8Ty, false, true); |
3071 | 183 | break; |
3072 | 44 | case OpCode::I32x4__extmul_high_i16x8_u: |
3073 | 44 | compileVectorExtMul(Context.Int16x8Ty, false, false); |
3074 | 44 | break; |
3075 | 814 | case OpCode::I32x4__extadd_pairwise_i16x8_s: |
3076 | 814 | compileVectorExtAddPairwise(Context.Int16x8Ty, true); |
3077 | 814 | break; |
3078 | 515 | case OpCode::I32x4__extadd_pairwise_i16x8_u: |
3079 | 515 | compileVectorExtAddPairwise(Context.Int16x8Ty, false); |
3080 | 515 | break; |
3081 | 144 | case OpCode::I32x4__dot_i16x8_s: { |
3082 | 144 | auto ExtendTy = Context.Int16x8Ty.getExtendedElementVectorType(); |
3083 | 144 | auto Undef = LLVM::Value::getUndef(ExtendTy); |
3084 | 144 | auto LHS = Builder.createSExt( |
3085 | 144 | Builder.createBitCast(stackPop(), Context.Int16x8Ty), ExtendTy); |
3086 | 144 | auto RHS = Builder.createSExt( |
3087 | 144 | Builder.createBitCast(stackPop(), Context.Int16x8Ty), ExtendTy); |
3088 | 144 | auto M = Builder.createMul(LHS, RHS); |
3089 | 144 | auto L = Builder.createShuffleVector( |
3090 | 144 | M, Undef, |
3091 | 144 | LLVM::Value::getConstVector32(LLContext, {0U, 2U, 4U, 6U})); |
3092 | 144 | auto R = Builder.createShuffleVector( |
3093 | 144 | M, Undef, |
3094 | 144 | LLVM::Value::getConstVector32(LLContext, {1U, 3U, 5U, 7U})); |
3095 | 144 | auto V = Builder.createAdd(L, R); |
3096 | 144 | stackPush(Builder.createBitCast(V, Context.Int64x2Ty)); |
3097 | 144 | break; |
3098 | 825 | } |
3099 | 680 | case OpCode::I64x2__abs: |
3100 | 680 | compileVectorAbs(Context.Int64x2Ty); |
3101 | 680 | break; |
3102 | 430 | case OpCode::I64x2__neg: |
3103 | 430 | compileVectorNeg(Context.Int64x2Ty); |
3104 | 430 | break; |
3105 | 299 | case OpCode::I64x2__all_true: |
3106 | 299 | compileVectorAllTrue(Context.Int64x2Ty); |
3107 | 299 | break; |
3108 | 213 | case OpCode::I64x2__bitmask: |
3109 | 213 | compileVectorBitMask(Context.Int64x2Ty); |
3110 | 213 | break; |
3111 | 116 | case OpCode::I64x2__extend_low_i32x4_s: |
3112 | 116 | compileVectorExtend(Context.Int32x4Ty, true, true); |
3113 | 116 | break; |
3114 | 592 | case OpCode::I64x2__extend_high_i32x4_s: |
3115 | 592 | compileVectorExtend(Context.Int32x4Ty, true, false); |
3116 | 592 | break; |
3117 | 127 | case OpCode::I64x2__extend_low_i32x4_u: |
3118 | 127 | compileVectorExtend(Context.Int32x4Ty, false, true); |
3119 | 127 | break; |
3120 | 562 | case OpCode::I64x2__extend_high_i32x4_u: |
3121 | 562 | compileVectorExtend(Context.Int32x4Ty, false, false); |
3122 | 562 | break; |
3123 | 135 | case OpCode::I64x2__shl: |
3124 | 135 | compileVectorShl(Context.Int64x2Ty); |
3125 | 135 | break; |
3126 | 281 | case OpCode::I64x2__shr_s: |
3127 | 281 | compileVectorAShr(Context.Int64x2Ty); |
3128 | 281 | break; |
3129 | 100 | case OpCode::I64x2__shr_u: |
3130 | 100 | compileVectorLShr(Context.Int64x2Ty); |
3131 | 100 | break; |
3132 | 53 | case OpCode::I64x2__add: |
3133 | 53 | compileVectorVectorAdd(Context.Int64x2Ty); |
3134 | 53 | break; |
3135 | 251 | case OpCode::I64x2__sub: |
3136 | 251 | compileVectorVectorSub(Context.Int64x2Ty); |
3137 | 251 | break; |
3138 | 81 | case OpCode::I64x2__mul: |
3139 | 81 | compileVectorVectorMul(Context.Int64x2Ty); |
3140 | 81 | break; |
3141 | 43 | case OpCode::I64x2__extmul_low_i32x4_s: |
3142 | 43 | compileVectorExtMul(Context.Int32x4Ty, true, true); |
3143 | 43 | break; |
3144 | 283 | case OpCode::I64x2__extmul_high_i32x4_s: |
3145 | 283 | compileVectorExtMul(Context.Int32x4Ty, true, false); |
3146 | 283 | break; |
3147 | 35 | case OpCode::I64x2__extmul_low_i32x4_u: |
3148 | 35 | compileVectorExtMul(Context.Int32x4Ty, false, true); |
3149 | 35 | break; |
3150 | 137 | case OpCode::I64x2__extmul_high_i32x4_u: |
3151 | 137 | compileVectorExtMul(Context.Int32x4Ty, false, false); |
3152 | 137 | break; |
3153 | 56 | case OpCode::F32x4__abs: |
3154 | 56 | compileVectorFAbs(Context.Floatx4Ty); |
3155 | 56 | break; |
3156 | 142 | case OpCode::F32x4__neg: |
3157 | 142 | compileVectorFNeg(Context.Floatx4Ty); |
3158 | 142 | break; |
3159 | 198 | case OpCode::F32x4__sqrt: |
3160 | 198 | compileVectorFSqrt(Context.Floatx4Ty); |
3161 | 198 | break; |
3162 | 127 | case OpCode::F32x4__add: |
3163 | 127 | compileVectorVectorFAdd(Context.Floatx4Ty); |
3164 | 127 | break; |
3165 | 249 | case OpCode::F32x4__sub: |
3166 | 249 | compileVectorVectorFSub(Context.Floatx4Ty); |
3167 | 249 | break; |
3168 | 42 | case OpCode::F32x4__mul: |
3169 | 42 | compileVectorVectorFMul(Context.Floatx4Ty); |
3170 | 42 | break; |
3171 | 190 | case OpCode::F32x4__div: |
3172 | 190 | compileVectorVectorFDiv(Context.Floatx4Ty); |
3173 | 190 | break; |
3174 | 123 | case OpCode::F32x4__min: |
3175 | 123 | compileVectorVectorFMin(Context.Floatx4Ty); |
3176 | 123 | break; |
3177 | 43 | case OpCode::F32x4__max: |
3178 | 43 | compileVectorVectorFMax(Context.Floatx4Ty); |
3179 | 43 | break; |
3180 | 27 | case OpCode::F32x4__pmin: |
3181 | 27 | compileVectorVectorFPMin(Context.Floatx4Ty); |
3182 | 27 | break; |
3183 | 211 | case OpCode::F32x4__pmax: |
3184 | 211 | compileVectorVectorFPMax(Context.Floatx4Ty); |
3185 | 211 | break; |
3186 | 978 | case OpCode::F32x4__ceil: |
3187 | 978 | compileVectorFCeil(Context.Floatx4Ty); |
3188 | 978 | break; |
3189 | 1.87k | case OpCode::F32x4__floor: |
3190 | 1.87k | compileVectorFFloor(Context.Floatx4Ty); |
3191 | 1.87k | break; |
3192 | 1.92k | case OpCode::F32x4__trunc: |
3193 | 1.92k | compileVectorFTrunc(Context.Floatx4Ty); |
3194 | 1.92k | break; |
3195 | 272 | case OpCode::F32x4__nearest: |
3196 | 272 | compileVectorFNearest(Context.Floatx4Ty); |
3197 | 272 | break; |
3198 | 441 | case OpCode::F64x2__abs: |
3199 | 441 | compileVectorFAbs(Context.Doublex2Ty); |
3200 | 441 | break; |
3201 | 802 | case OpCode::F64x2__neg: |
3202 | 802 | compileVectorFNeg(Context.Doublex2Ty); |
3203 | 802 | break; |
3204 | 126 | case OpCode::F64x2__sqrt: |
3205 | 126 | compileVectorFSqrt(Context.Doublex2Ty); |
3206 | 126 | break; |
3207 | 51 | case OpCode::F64x2__add: |
3208 | 51 | compileVectorVectorFAdd(Context.Doublex2Ty); |
3209 | 51 | break; |
3210 | 221 | case OpCode::F64x2__sub: |
3211 | 221 | compileVectorVectorFSub(Context.Doublex2Ty); |
3212 | 221 | break; |
3213 | 183 | case OpCode::F64x2__mul: |
3214 | 183 | compileVectorVectorFMul(Context.Doublex2Ty); |
3215 | 183 | break; |
3216 | 41 | case OpCode::F64x2__div: |
3217 | 41 | compileVectorVectorFDiv(Context.Doublex2Ty); |
3218 | 41 | break; |
3219 | 178 | case OpCode::F64x2__min: |
3220 | 178 | compileVectorVectorFMin(Context.Doublex2Ty); |
3221 | 178 | break; |
3222 | 168 | case OpCode::F64x2__max: |
3223 | 168 | compileVectorVectorFMax(Context.Doublex2Ty); |
3224 | 168 | break; |
3225 | 262 | case OpCode::F64x2__pmin: |
3226 | 262 | compileVectorVectorFPMin(Context.Doublex2Ty); |
3227 | 262 | break; |
3228 | 65 | case OpCode::F64x2__pmax: |
3229 | 65 | compileVectorVectorFPMax(Context.Doublex2Ty); |
3230 | 65 | break; |
3231 | 683 | case OpCode::F64x2__ceil: |
3232 | 683 | compileVectorFCeil(Context.Doublex2Ty); |
3233 | 683 | break; |
3234 | 815 | case OpCode::F64x2__floor: |
3235 | 815 | compileVectorFFloor(Context.Doublex2Ty); |
3236 | 815 | break; |
3237 | 112 | case OpCode::F64x2__trunc: |
3238 | 112 | compileVectorFTrunc(Context.Doublex2Ty); |
3239 | 112 | break; |
3240 | 161 | case OpCode::F64x2__nearest: |
3241 | 161 | compileVectorFNearest(Context.Doublex2Ty); |
3242 | 161 | break; |
3243 | 205 | case OpCode::I32x4__trunc_sat_f32x4_s: |
3244 | 205 | compileVectorTruncSatS32(Context.Floatx4Ty, false); |
3245 | 205 | break; |
3246 | 3.73k | case OpCode::I32x4__trunc_sat_f32x4_u: |
3247 | 3.73k | compileVectorTruncSatU32(Context.Floatx4Ty, false); |
3248 | 3.73k | break; |
3249 | 333 | case OpCode::F32x4__convert_i32x4_s: |
3250 | 333 | compileVectorConvertS(Context.Int32x4Ty, Context.Floatx4Ty, false); |
3251 | 333 | break; |
3252 | 720 | case OpCode::F32x4__convert_i32x4_u: |
3253 | 720 | compileVectorConvertU(Context.Int32x4Ty, Context.Floatx4Ty, false); |
3254 | 720 | break; |
3255 | 755 | case OpCode::I32x4__trunc_sat_f64x2_s_zero: |
3256 | 755 | compileVectorTruncSatS32(Context.Doublex2Ty, true); |
3257 | 755 | break; |
3258 | 2.14k | case OpCode::I32x4__trunc_sat_f64x2_u_zero: |
3259 | 2.14k | compileVectorTruncSatU32(Context.Doublex2Ty, true); |
3260 | 2.14k | break; |
3261 | 333 | case OpCode::F64x2__convert_low_i32x4_s: |
3262 | 333 | compileVectorConvertS(Context.Int32x4Ty, Context.Doublex2Ty, true); |
3263 | 333 | break; |
3264 | 1.25k | case OpCode::F64x2__convert_low_i32x4_u: |
3265 | 1.25k | compileVectorConvertU(Context.Int32x4Ty, Context.Doublex2Ty, true); |
3266 | 1.25k | break; |
3267 | 733 | case OpCode::F32x4__demote_f64x2_zero: |
3268 | 733 | compileVectorDemote(); |
3269 | 733 | break; |
3270 | 723 | case OpCode::F64x2__promote_low_f32x4: |
3271 | 723 | compileVectorPromote(); |
3272 | 723 | break; |
3273 | | |
3274 | | // Relaxed SIMD Instructions |
3275 | 0 | case OpCode::I8x16__relaxed_swizzle: |
3276 | 0 | compileVectorSwizzle(); |
3277 | 0 | break; |
3278 | 0 | case OpCode::I32x4__relaxed_trunc_f32x4_s: |
3279 | 0 | compileVectorTruncSatS32(Context.Floatx4Ty, false); |
3280 | 0 | break; |
3281 | 0 | case OpCode::I32x4__relaxed_trunc_f32x4_u: |
3282 | 0 | compileVectorTruncSatU32(Context.Floatx4Ty, false); |
3283 | 0 | break; |
3284 | 0 | case OpCode::I32x4__relaxed_trunc_f64x2_s_zero: |
3285 | 0 | compileVectorTruncSatS32(Context.Doublex2Ty, true); |
3286 | 0 | break; |
3287 | 0 | case OpCode::I32x4__relaxed_trunc_f64x2_u_zero: |
3288 | 0 | compileVectorTruncSatU32(Context.Doublex2Ty, true); |
3289 | 0 | break; |
3290 | 0 | case OpCode::F32x4__relaxed_madd: |
3291 | 0 | compileVectorVectorMAdd(Context.Floatx4Ty); |
3292 | 0 | break; |
3293 | 0 | case OpCode::F32x4__relaxed_nmadd: |
3294 | 0 | compileVectorVectorNMAdd(Context.Floatx4Ty); |
3295 | 0 | break; |
3296 | 0 | case OpCode::F64x2__relaxed_madd: |
3297 | 0 | compileVectorVectorMAdd(Context.Doublex2Ty); |
3298 | 0 | break; |
3299 | 0 | case OpCode::F64x2__relaxed_nmadd: |
3300 | 0 | compileVectorVectorNMAdd(Context.Doublex2Ty); |
3301 | 0 | break; |
3302 | 0 | case OpCode::I8x16__relaxed_laneselect: |
3303 | 0 | case OpCode::I16x8__relaxed_laneselect: |
3304 | 0 | case OpCode::I32x4__relaxed_laneselect: |
3305 | 0 | case OpCode::I64x2__relaxed_laneselect: { |
3306 | 0 | auto C = stackPop(); |
3307 | 0 | auto V2 = stackPop(); |
3308 | 0 | auto V1 = stackPop(); |
3309 | 0 | stackPush(Builder.createXor( |
3310 | 0 | Builder.createAnd(Builder.createXor(V1, V2), C), V2)); |
3311 | 0 | break; |
3312 | 0 | } |
3313 | 0 | case OpCode::F32x4__relaxed_min: |
3314 | 0 | compileVectorVectorFMin(Context.Floatx4Ty); |
3315 | 0 | break; |
3316 | 0 | case OpCode::F32x4__relaxed_max: |
3317 | 0 | compileVectorVectorFMax(Context.Floatx4Ty); |
3318 | 0 | break; |
3319 | 0 | case OpCode::F64x2__relaxed_min: |
3320 | 0 | compileVectorVectorFMin(Context.Doublex2Ty); |
3321 | 0 | break; |
3322 | 0 | case OpCode::F64x2__relaxed_max: |
3323 | 0 | compileVectorVectorFMax(Context.Doublex2Ty); |
3324 | 0 | break; |
3325 | 0 | case OpCode::I16x8__relaxed_q15mulr_s: |
3326 | 0 | compileVectorVectorQ15MulSat(); |
3327 | 0 | break; |
3328 | 0 | case OpCode::I16x8__relaxed_dot_i8x16_i7x16_s: |
3329 | 0 | compileVectorRelaxedIntegerDotProduct(); |
3330 | 0 | break; |
3331 | 0 | case OpCode::I32x4__relaxed_dot_i8x16_i7x16_add_s: |
3332 | 0 | compileVectorRelaxedIntegerDotProductAdd(); |
3333 | 0 | break; |
3334 | | |
3335 | | // Atomic Instructions |
3336 | 185 | case OpCode::Atomic__fence: |
3337 | 185 | return compileMemoryFence(); |
3338 | 29 | case OpCode::Memory__atomic__notify: |
3339 | 29 | return compileAtomicNotify(Instr.getTargetIndex(), |
3340 | 29 | Instr.getMemoryOffset()); |
3341 | 7 | case OpCode::Memory__atomic__wait32: |
3342 | 7 | return compileAtomicWait(Instr.getTargetIndex(), |
3343 | 7 | Instr.getMemoryOffset(), Context.Int32Ty, 32); |
3344 | 4 | case OpCode::Memory__atomic__wait64: |
3345 | 4 | return compileAtomicWait(Instr.getTargetIndex(), |
3346 | 4 | Instr.getMemoryOffset(), Context.Int64Ty, 64); |
3347 | 0 | case OpCode::I32__atomic__load: |
3348 | 0 | return compileAtomicLoad( |
3349 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3350 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int32Ty, true); |
3351 | 0 | case OpCode::I64__atomic__load: |
3352 | 0 | return compileAtomicLoad( |
3353 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3354 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int64Ty, true); |
3355 | 0 | case OpCode::I32__atomic__load8_u: |
3356 | 0 | return compileAtomicLoad( |
3357 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3358 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int8Ty); |
3359 | 0 | case OpCode::I32__atomic__load16_u: |
3360 | 0 | return compileAtomicLoad( |
3361 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3362 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int16Ty); |
3363 | 0 | case OpCode::I64__atomic__load8_u: |
3364 | 0 | return compileAtomicLoad( |
3365 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3366 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int8Ty); |
3367 | 0 | case OpCode::I64__atomic__load16_u: |
3368 | 0 | return compileAtomicLoad( |
3369 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3370 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int16Ty); |
3371 | 0 | case OpCode::I64__atomic__load32_u: |
3372 | 0 | return compileAtomicLoad( |
3373 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3374 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int32Ty); |
3375 | 0 | case OpCode::I32__atomic__store: |
3376 | 0 | return compileAtomicStore( |
3377 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3378 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int32Ty, true); |
3379 | 0 | case OpCode::I64__atomic__store: |
3380 | 0 | return compileAtomicStore( |
3381 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3382 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int64Ty, true); |
3383 | 0 | case OpCode::I32__atomic__store8: |
3384 | 0 | return compileAtomicStore( |
3385 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3386 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int8Ty, true); |
3387 | 0 | case OpCode::I32__atomic__store16: |
3388 | 0 | return compileAtomicStore( |
3389 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3390 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int16Ty, true); |
3391 | 0 | case OpCode::I64__atomic__store8: |
3392 | 0 | return compileAtomicStore( |
3393 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3394 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int8Ty, true); |
3395 | 0 | case OpCode::I64__atomic__store16: |
3396 | 0 | return compileAtomicStore( |
3397 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3398 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int16Ty, true); |
3399 | 0 | case OpCode::I64__atomic__store32: |
3400 | 0 | return compileAtomicStore( |
3401 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3402 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int32Ty, true); |
3403 | 0 | case OpCode::I32__atomic__rmw__add: |
3404 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3405 | 0 | Instr.getMemoryOffset(), |
3406 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3407 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3408 | 0 | case OpCode::I64__atomic__rmw__add: |
3409 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3410 | 0 | Instr.getMemoryOffset(), |
3411 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3412 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3413 | 0 | case OpCode::I32__atomic__rmw8__add_u: |
3414 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3415 | 0 | Instr.getMemoryOffset(), |
3416 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3417 | 0 | Context.Int32Ty, Context.Int8Ty); |
3418 | 0 | case OpCode::I32__atomic__rmw16__add_u: |
3419 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3420 | 0 | Instr.getMemoryOffset(), |
3421 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3422 | 0 | Context.Int32Ty, Context.Int16Ty); |
3423 | 0 | case OpCode::I64__atomic__rmw8__add_u: |
3424 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3425 | 0 | Instr.getMemoryOffset(), |
3426 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3427 | 0 | Context.Int64Ty, Context.Int8Ty); |
3428 | 0 | case OpCode::I64__atomic__rmw16__add_u: |
3429 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3430 | 0 | Instr.getMemoryOffset(), |
3431 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3432 | 0 | Context.Int64Ty, Context.Int16Ty); |
3433 | 0 | case OpCode::I64__atomic__rmw32__add_u: |
3434 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3435 | 0 | Instr.getMemoryOffset(), |
3436 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAdd, |
3437 | 0 | Context.Int64Ty, Context.Int32Ty); |
3438 | 0 | case OpCode::I32__atomic__rmw__sub: |
3439 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3440 | 0 | Instr.getMemoryOffset(), |
3441 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3442 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3443 | 0 | case OpCode::I64__atomic__rmw__sub: |
3444 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3445 | 0 | Instr.getMemoryOffset(), |
3446 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3447 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3448 | 0 | case OpCode::I32__atomic__rmw8__sub_u: |
3449 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3450 | 0 | Instr.getMemoryOffset(), |
3451 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3452 | 0 | Context.Int32Ty, Context.Int8Ty); |
3453 | 0 | case OpCode::I32__atomic__rmw16__sub_u: |
3454 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3455 | 0 | Instr.getMemoryOffset(), |
3456 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3457 | 0 | Context.Int32Ty, Context.Int16Ty); |
3458 | 0 | case OpCode::I64__atomic__rmw8__sub_u: |
3459 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3460 | 0 | Instr.getMemoryOffset(), |
3461 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3462 | 0 | Context.Int64Ty, Context.Int8Ty); |
3463 | 0 | case OpCode::I64__atomic__rmw16__sub_u: |
3464 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3465 | 0 | Instr.getMemoryOffset(), |
3466 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3467 | 0 | Context.Int64Ty, Context.Int16Ty); |
3468 | 0 | case OpCode::I64__atomic__rmw32__sub_u: |
3469 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3470 | 0 | Instr.getMemoryOffset(), |
3471 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpSub, |
3472 | 0 | Context.Int64Ty, Context.Int32Ty); |
3473 | 0 | case OpCode::I32__atomic__rmw__and: |
3474 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3475 | 0 | Instr.getMemoryOffset(), |
3476 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3477 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3478 | 0 | case OpCode::I64__atomic__rmw__and: |
3479 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3480 | 0 | Instr.getMemoryOffset(), |
3481 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3482 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3483 | 0 | case OpCode::I32__atomic__rmw8__and_u: |
3484 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3485 | 0 | Instr.getMemoryOffset(), |
3486 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3487 | 0 | Context.Int32Ty, Context.Int8Ty); |
3488 | 0 | case OpCode::I32__atomic__rmw16__and_u: |
3489 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3490 | 0 | Instr.getMemoryOffset(), |
3491 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3492 | 0 | Context.Int32Ty, Context.Int16Ty); |
3493 | 0 | case OpCode::I64__atomic__rmw8__and_u: |
3494 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3495 | 0 | Instr.getMemoryOffset(), |
3496 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3497 | 0 | Context.Int64Ty, Context.Int8Ty); |
3498 | 0 | case OpCode::I64__atomic__rmw16__and_u: |
3499 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3500 | 0 | Instr.getMemoryOffset(), |
3501 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3502 | 0 | Context.Int64Ty, Context.Int16Ty); |
3503 | 0 | case OpCode::I64__atomic__rmw32__and_u: |
3504 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3505 | 0 | Instr.getMemoryOffset(), |
3506 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpAnd, |
3507 | 0 | Context.Int64Ty, Context.Int32Ty); |
3508 | 0 | case OpCode::I32__atomic__rmw__or: |
3509 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3510 | 0 | Instr.getMemoryOffset(), |
3511 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3512 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3513 | 0 | case OpCode::I64__atomic__rmw__or: |
3514 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3515 | 0 | Instr.getMemoryOffset(), |
3516 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3517 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3518 | 0 | case OpCode::I32__atomic__rmw8__or_u: |
3519 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3520 | 0 | Instr.getMemoryOffset(), |
3521 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3522 | 0 | Context.Int32Ty, Context.Int8Ty); |
3523 | 0 | case OpCode::I32__atomic__rmw16__or_u: |
3524 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3525 | 0 | Instr.getMemoryOffset(), |
3526 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3527 | 0 | Context.Int32Ty, Context.Int16Ty); |
3528 | 0 | case OpCode::I64__atomic__rmw8__or_u: |
3529 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3530 | 0 | Instr.getMemoryOffset(), |
3531 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3532 | 0 | Context.Int64Ty, Context.Int8Ty); |
3533 | 0 | case OpCode::I64__atomic__rmw16__or_u: |
3534 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3535 | 0 | Instr.getMemoryOffset(), |
3536 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3537 | 0 | Context.Int64Ty, Context.Int16Ty); |
3538 | 0 | case OpCode::I64__atomic__rmw32__or_u: |
3539 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3540 | 0 | Instr.getMemoryOffset(), |
3541 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpOr, |
3542 | 0 | Context.Int64Ty, Context.Int32Ty); |
3543 | 0 | case OpCode::I32__atomic__rmw__xor: |
3544 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3545 | 0 | Instr.getMemoryOffset(), |
3546 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3547 | 0 | Context.Int32Ty, Context.Int32Ty, true); |
3548 | 0 | case OpCode::I64__atomic__rmw__xor: |
3549 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3550 | 0 | Instr.getMemoryOffset(), |
3551 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3552 | 0 | Context.Int64Ty, Context.Int64Ty, true); |
3553 | 0 | case OpCode::I32__atomic__rmw8__xor_u: |
3554 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3555 | 0 | Instr.getMemoryOffset(), |
3556 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3557 | 0 | Context.Int32Ty, Context.Int8Ty); |
3558 | 0 | case OpCode::I32__atomic__rmw16__xor_u: |
3559 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3560 | 0 | Instr.getMemoryOffset(), |
3561 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3562 | 0 | Context.Int32Ty, Context.Int16Ty); |
3563 | 0 | case OpCode::I64__atomic__rmw8__xor_u: |
3564 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3565 | 0 | Instr.getMemoryOffset(), |
3566 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3567 | 0 | Context.Int64Ty, Context.Int8Ty); |
3568 | 0 | case OpCode::I64__atomic__rmw16__xor_u: |
3569 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3570 | 0 | Instr.getMemoryOffset(), |
3571 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3572 | 0 | Context.Int64Ty, Context.Int16Ty); |
3573 | 0 | case OpCode::I64__atomic__rmw32__xor_u: |
3574 | 0 | return compileAtomicRMWOp(Instr.getTargetIndex(), |
3575 | 0 | Instr.getMemoryOffset(), |
3576 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXor, |
3577 | 0 | Context.Int64Ty, Context.Int32Ty); |
3578 | 0 | case OpCode::I32__atomic__rmw__xchg: |
3579 | 0 | return compileAtomicRMWOp( |
3580 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3581 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int32Ty, |
3582 | 0 | Context.Int32Ty, true); |
3583 | 0 | case OpCode::I64__atomic__rmw__xchg: |
3584 | 0 | return compileAtomicRMWOp( |
3585 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3586 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int64Ty, |
3587 | 0 | Context.Int64Ty, true); |
3588 | 0 | case OpCode::I32__atomic__rmw8__xchg_u: |
3589 | 0 | return compileAtomicRMWOp( |
3590 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3591 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int32Ty, |
3592 | 0 | Context.Int8Ty); |
3593 | 0 | case OpCode::I32__atomic__rmw16__xchg_u: |
3594 | 0 | return compileAtomicRMWOp( |
3595 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3596 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int32Ty, |
3597 | 0 | Context.Int16Ty); |
3598 | 0 | case OpCode::I64__atomic__rmw8__xchg_u: |
3599 | 0 | return compileAtomicRMWOp( |
3600 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3601 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int64Ty, |
3602 | 0 | Context.Int8Ty); |
3603 | 0 | case OpCode::I64__atomic__rmw16__xchg_u: |
3604 | 0 | return compileAtomicRMWOp( |
3605 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3606 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int64Ty, |
3607 | 0 | Context.Int16Ty); |
3608 | 0 | case OpCode::I64__atomic__rmw32__xchg_u: |
3609 | 0 | return compileAtomicRMWOp( |
3610 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3611 | 0 | Instr.getMemoryAlign(), LLVMAtomicRMWBinOpXchg, Context.Int64Ty, |
3612 | 0 | Context.Int32Ty); |
3613 | 0 | case OpCode::I32__atomic__rmw__cmpxchg: |
3614 | 0 | return compileAtomicCompareExchange( |
3615 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3616 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int32Ty, true); |
3617 | 0 | case OpCode::I64__atomic__rmw__cmpxchg: |
3618 | 0 | return compileAtomicCompareExchange( |
3619 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3620 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int64Ty, true); |
3621 | 0 | case OpCode::I32__atomic__rmw8__cmpxchg_u: |
3622 | 0 | return compileAtomicCompareExchange( |
3623 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3624 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int8Ty); |
3625 | 0 | case OpCode::I32__atomic__rmw16__cmpxchg_u: |
3626 | 0 | return compileAtomicCompareExchange( |
3627 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3628 | 0 | Instr.getMemoryAlign(), Context.Int32Ty, Context.Int16Ty); |
3629 | 0 | case OpCode::I64__atomic__rmw8__cmpxchg_u: |
3630 | 0 | return compileAtomicCompareExchange( |
3631 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3632 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int8Ty); |
3633 | 0 | case OpCode::I64__atomic__rmw16__cmpxchg_u: |
3634 | 0 | return compileAtomicCompareExchange( |
3635 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3636 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int16Ty); |
3637 | 0 | case OpCode::I64__atomic__rmw32__cmpxchg_u: |
3638 | 0 | return compileAtomicCompareExchange( |
3639 | 0 | Instr.getTargetIndex(), Instr.getMemoryOffset(), |
3640 | 0 | Instr.getMemoryAlign(), Context.Int64Ty, Context.Int32Ty); |
3641 | | |
3642 | 0 | default: |
3643 | 0 | assumingUnreachable(); |
3644 | 911k | } |
3645 | 911k | return; |
3646 | 911k | }; |
3647 | 1.35M | for (const auto &Instr : Instrs) { |
3648 | | // Update instruction count |
3649 | 1.35M | if (LocalInstrCount) { |
3650 | 0 | Builder.createStore( |
3651 | 0 | Builder.createAdd( |
3652 | 0 | Builder.createLoad(Context.Int64Ty, LocalInstrCount), |
3653 | 0 | LLContext.getInt64(1)), |
3654 | 0 | LocalInstrCount); |
3655 | 0 | } |
3656 | 1.35M | if (LocalGas) { |
3657 | 0 | auto NewGas = Builder.createAdd( |
3658 | 0 | Builder.createLoad(Context.Int64Ty, LocalGas), |
3659 | 0 | Builder.createLoad( |
3660 | 0 | Context.Int64Ty, |
3661 | 0 | Builder.createConstInBoundsGEP2_64( |
3662 | 0 | LLVM::Type::getArrayType(Context.Int64Ty, UINT16_MAX + 1), |
3663 | 0 | Context.getCostTable(Builder, ExecCtx), 0, |
3664 | 0 | uint16_t(Instr.getOpCode())))); |
3665 | 0 | Builder.createStore(NewGas, LocalGas); |
3666 | 0 | } |
3667 | | |
3668 | | // Make the instruction node according to Code. |
3669 | 1.35M | Dispatch(Instr); |
3670 | 1.35M | } |
3671 | 11.7k | } |
3672 | 1.71k | void compileSignedTrunc(LLVM::Type IntType) noexcept { |
3673 | 1.71k | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "strunc.norm"); |
3674 | 1.71k | auto NotMinBB = LLVM::BasicBlock::create(LLContext, F.Fn, "strunc.notmin"); |
3675 | 1.71k | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "strunc.notmax"); |
3676 | 1.71k | auto Value = stackPop(); |
3677 | 1.71k | const auto [Precise, MinFp, MaxFp] = |
3678 | 1.71k | [IntType, Value]() -> std::tuple<bool, LLVM::Value, LLVM::Value> { |
3679 | 1.71k | const auto BitWidth = IntType.getIntegerBitWidth(); |
3680 | 1.71k | const auto [Min, Max] = [BitWidth]() -> std::tuple<int64_t, int64_t> { |
3681 | 1.71k | switch (BitWidth) { |
3682 | 1.33k | case 32: |
3683 | 1.33k | return {std::numeric_limits<int32_t>::min(), |
3684 | 1.33k | std::numeric_limits<int32_t>::max()}; |
3685 | 378 | case 64: |
3686 | 378 | return {std::numeric_limits<int64_t>::min(), |
3687 | 378 | std::numeric_limits<int64_t>::max()}; |
3688 | 0 | default: |
3689 | 0 | assumingUnreachable(); |
3690 | 1.71k | } |
3691 | 1.71k | }(); |
3692 | 1.71k | auto FPType = Value.getType(); |
3693 | 1.71k | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3694 | 1.71k | const auto FPWidth = FPType.getFPMantissaWidth(); |
3695 | 1.71k | return {BitWidth <= FPWidth, LLVM::Value::getConstReal(FPType, Min), |
3696 | 1.71k | LLVM::Value::getConstReal(FPType, Max)}; |
3697 | 1.71k | }(); |
3698 | | |
3699 | 1.71k | auto IsNotNan = Builder.createLikely(Builder.createFCmpORD(Value, Value)); |
3700 | 1.71k | Builder.createCondBr(IsNotNan, NormBB, |
3701 | 1.71k | getTrapBB(ErrCode::Value::InvalidConvToInt)); |
3702 | | |
3703 | 1.71k | Builder.positionAtEnd(NormBB); |
3704 | 1.71k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3705 | 1.71k | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3706 | 1.71k | auto IsNotUnderflow = |
3707 | 1.71k | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3708 | 1.71k | Builder.createCondBr(IsNotUnderflow, NotMinBB, |
3709 | 1.71k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3710 | | |
3711 | 1.71k | Builder.positionAtEnd(NotMinBB); |
3712 | 1.71k | auto IsNotOverflow = Builder.createLikely( |
3713 | 1.71k | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3714 | 1.71k | Builder.createCondBr(IsNotOverflow, NotMaxBB, |
3715 | 1.71k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3716 | | |
3717 | 1.71k | Builder.positionAtEnd(NotMaxBB); |
3718 | 1.71k | stackPush(Builder.createFPToSI(Trunc, IntType)); |
3719 | 1.71k | } |
3720 | 1.13k | void compileSignedTruncSat(LLVM::Type IntType) noexcept { |
3721 | 1.13k | auto CurrBB = Builder.getInsertBlock(); |
3722 | 1.13k | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.norm"); |
3723 | 1.13k | auto NotMinBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.notmin"); |
3724 | 1.13k | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.notmax"); |
3725 | 1.13k | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "ssat.end"); |
3726 | 1.13k | auto Value = stackPop(); |
3727 | 1.13k | const auto [Precise, MinInt, MaxInt, MinFp, MaxFp] = [IntType, Value]() |
3728 | 1.13k | -> std::tuple<bool, uint64_t, uint64_t, LLVM::Value, LLVM::Value> { |
3729 | 1.13k | const auto BitWidth = IntType.getIntegerBitWidth(); |
3730 | 1.13k | const auto [Min, Max] = [BitWidth]() -> std::tuple<int64_t, int64_t> { |
3731 | 1.13k | switch (BitWidth) { |
3732 | 499 | case 32: |
3733 | 499 | return {std::numeric_limits<int32_t>::min(), |
3734 | 499 | std::numeric_limits<int32_t>::max()}; |
3735 | 640 | case 64: |
3736 | 640 | return {std::numeric_limits<int64_t>::min(), |
3737 | 640 | std::numeric_limits<int64_t>::max()}; |
3738 | 0 | default: |
3739 | 0 | assumingUnreachable(); |
3740 | 1.13k | } |
3741 | 1.13k | }(); |
3742 | 1.13k | auto FPType = Value.getType(); |
3743 | 1.13k | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3744 | 1.13k | const auto FPWidth = FPType.getFPMantissaWidth(); |
3745 | 1.13k | return {BitWidth <= FPWidth, static_cast<uint64_t>(Min), |
3746 | 1.13k | static_cast<uint64_t>(Max), |
3747 | 1.13k | LLVM::Value::getConstReal(FPType, Min), |
3748 | 1.13k | LLVM::Value::getConstReal(FPType, Max)}; |
3749 | 1.13k | }(); |
3750 | | |
3751 | 1.13k | auto IsNotNan = Builder.createLikely(Builder.createFCmpORD(Value, Value)); |
3752 | 1.13k | Builder.createCondBr(IsNotNan, NormBB, EndBB); |
3753 | | |
3754 | 1.13k | Builder.positionAtEnd(NormBB); |
3755 | 1.13k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3756 | 1.13k | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3757 | 1.13k | auto IsNotUnderflow = |
3758 | 1.13k | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3759 | 1.13k | Builder.createCondBr(IsNotUnderflow, NotMinBB, EndBB); |
3760 | | |
3761 | 1.13k | Builder.positionAtEnd(NotMinBB); |
3762 | 1.13k | auto IsNotOverflow = Builder.createLikely( |
3763 | 1.13k | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3764 | 1.13k | Builder.createCondBr(IsNotOverflow, NotMaxBB, EndBB); |
3765 | | |
3766 | 1.13k | Builder.positionAtEnd(NotMaxBB); |
3767 | 1.13k | auto IntValue = Builder.createFPToSI(Trunc, IntType); |
3768 | 1.13k | Builder.createBr(EndBB); |
3769 | | |
3770 | 1.13k | Builder.positionAtEnd(EndBB); |
3771 | 1.13k | auto PHIRet = Builder.createPHI(IntType); |
3772 | 1.13k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, 0, true), CurrBB); |
3773 | 1.13k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MinInt, true), NormBB); |
3774 | 1.13k | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MaxInt, true), |
3775 | 1.13k | NotMinBB); |
3776 | 1.13k | PHIRet.addIncoming(IntValue, NotMaxBB); |
3777 | | |
3778 | 1.13k | stackPush(PHIRet); |
3779 | 1.13k | } |
3780 | 3.02k | void compileUnsignedTrunc(LLVM::Type IntType) noexcept { |
3781 | 3.02k | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "utrunc.norm"); |
3782 | 3.02k | auto NotMinBB = LLVM::BasicBlock::create(LLContext, F.Fn, "utrunc.notmin"); |
3783 | 3.02k | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "utrunc.notmax"); |
3784 | 3.02k | auto Value = stackPop(); |
3785 | 3.02k | const auto [Precise, MinFp, MaxFp] = |
3786 | 3.02k | [IntType, Value]() -> std::tuple<bool, LLVM::Value, LLVM::Value> { |
3787 | 3.02k | const auto BitWidth = IntType.getIntegerBitWidth(); |
3788 | 3.02k | const auto [Min, Max] = [BitWidth]() -> std::tuple<uint64_t, uint64_t> { |
3789 | 3.02k | switch (BitWidth) { |
3790 | 1.17k | case 32: |
3791 | 1.17k | return {std::numeric_limits<uint32_t>::min(), |
3792 | 1.17k | std::numeric_limits<uint32_t>::max()}; |
3793 | 1.85k | case 64: |
3794 | 1.85k | return {std::numeric_limits<uint64_t>::min(), |
3795 | 1.85k | std::numeric_limits<uint64_t>::max()}; |
3796 | 0 | default: |
3797 | 0 | assumingUnreachable(); |
3798 | 3.02k | } |
3799 | 3.02k | }(); |
3800 | 3.02k | auto FPType = Value.getType(); |
3801 | 3.02k | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3802 | 3.02k | const auto FPWidth = FPType.getFPMantissaWidth(); |
3803 | 3.02k | return {BitWidth <= FPWidth, LLVM::Value::getConstReal(FPType, Min), |
3804 | 3.02k | LLVM::Value::getConstReal(FPType, Max)}; |
3805 | 3.02k | }(); |
3806 | | |
3807 | 3.02k | auto IsNotNan = Builder.createLikely(Builder.createFCmpORD(Value, Value)); |
3808 | 3.02k | Builder.createCondBr(IsNotNan, NormBB, |
3809 | 3.02k | getTrapBB(ErrCode::Value::InvalidConvToInt)); |
3810 | | |
3811 | 3.02k | Builder.positionAtEnd(NormBB); |
3812 | 3.02k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3813 | 3.02k | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3814 | 3.02k | auto IsNotUnderflow = |
3815 | 3.02k | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3816 | 3.02k | Builder.createCondBr(IsNotUnderflow, NotMinBB, |
3817 | 3.02k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3818 | | |
3819 | 3.02k | Builder.positionAtEnd(NotMinBB); |
3820 | 3.02k | auto IsNotOverflow = Builder.createLikely( |
3821 | 3.02k | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3822 | 3.02k | Builder.createCondBr(IsNotOverflow, NotMaxBB, |
3823 | 3.02k | getTrapBB(ErrCode::Value::IntegerOverflow)); |
3824 | | |
3825 | 3.02k | Builder.positionAtEnd(NotMaxBB); |
3826 | 3.02k | stackPush(Builder.createFPToUI(Trunc, IntType)); |
3827 | 3.02k | } |
3828 | 975 | void compileUnsignedTruncSat(LLVM::Type IntType) noexcept { |
3829 | 975 | auto CurrBB = Builder.getInsertBlock(); |
3830 | 975 | auto NormBB = LLVM::BasicBlock::create(LLContext, F.Fn, "usat.norm"); |
3831 | 975 | auto NotMaxBB = LLVM::BasicBlock::create(LLContext, F.Fn, "usat.notmax"); |
3832 | 975 | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "usat.end"); |
3833 | 975 | auto Value = stackPop(); |
3834 | 975 | const auto [Precise, MinInt, MaxInt, MinFp, MaxFp] = [IntType, Value]() |
3835 | 975 | -> std::tuple<bool, uint64_t, uint64_t, LLVM::Value, LLVM::Value> { |
3836 | 975 | const auto BitWidth = IntType.getIntegerBitWidth(); |
3837 | 975 | const auto [Min, Max] = [BitWidth]() -> std::tuple<uint64_t, uint64_t> { |
3838 | 975 | switch (BitWidth) { |
3839 | 289 | case 32: |
3840 | 289 | return {std::numeric_limits<uint32_t>::min(), |
3841 | 289 | std::numeric_limits<uint32_t>::max()}; |
3842 | 686 | case 64: |
3843 | 686 | return {std::numeric_limits<uint64_t>::min(), |
3844 | 686 | std::numeric_limits<uint64_t>::max()}; |
3845 | 0 | default: |
3846 | 0 | assumingUnreachable(); |
3847 | 975 | } |
3848 | 975 | }(); |
3849 | 975 | auto FPType = Value.getType(); |
3850 | 975 | assuming(FPType.isFloatTy() || FPType.isDoubleTy()); |
3851 | 975 | const auto FPWidth = FPType.getFPMantissaWidth(); |
3852 | 975 | return {BitWidth <= FPWidth, Min, Max, |
3853 | 975 | LLVM::Value::getConstReal(FPType, Min), |
3854 | 975 | LLVM::Value::getConstReal(FPType, Max)}; |
3855 | 975 | }(); |
3856 | | |
3857 | 975 | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
3858 | 975 | auto Trunc = Builder.createUnaryIntrinsic(LLVM::Core::Trunc, Value); |
3859 | 975 | auto IsNotUnderflow = |
3860 | 975 | Builder.createLikely(Builder.createFCmpOGE(Trunc, MinFp)); |
3861 | 975 | Builder.createCondBr(IsNotUnderflow, NormBB, EndBB); |
3862 | | |
3863 | 975 | Builder.positionAtEnd(NormBB); |
3864 | 975 | auto IsNotOverflow = Builder.createLikely( |
3865 | 975 | Builder.createFCmp(Precise ? LLVMRealOLE : LLVMRealOLT, Trunc, MaxFp)); |
3866 | 975 | Builder.createCondBr(IsNotOverflow, NotMaxBB, EndBB); |
3867 | | |
3868 | 975 | Builder.positionAtEnd(NotMaxBB); |
3869 | 975 | auto IntValue = Builder.createFPToUI(Trunc, IntType); |
3870 | 975 | Builder.createBr(EndBB); |
3871 | | |
3872 | 975 | Builder.positionAtEnd(EndBB); |
3873 | 975 | auto PHIRet = Builder.createPHI(IntType); |
3874 | 975 | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MinInt), CurrBB); |
3875 | 975 | PHIRet.addIncoming(LLVM::Value::getConstInt(IntType, MaxInt), NormBB); |
3876 | 975 | PHIRet.addIncoming(IntValue, NotMaxBB); |
3877 | | |
3878 | 975 | stackPush(PHIRet); |
3879 | 975 | } |
3880 | | |
3881 | | void compileAtomicCheckOffsetAlignment(LLVM::Value Offset, |
3882 | 40 | LLVM::Type IntType) noexcept { |
3883 | 40 | const auto BitWidth = IntType.getIntegerBitWidth(); |
3884 | 40 | auto BWMask = LLContext.getInt64((BitWidth >> 3) - 1); |
3885 | 40 | auto Value = Builder.createAnd(Offset, BWMask); |
3886 | 40 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "address_align_ok"); |
3887 | 40 | auto IsAddressAligned = Builder.createLikely( |
3888 | 40 | Builder.createICmpEQ(Value, LLContext.getInt64(0))); |
3889 | 40 | Builder.createCondBr(IsAddressAligned, OkBB, |
3890 | 40 | getTrapBB(ErrCode::Value::UnalignedAtomicAccess)); |
3891 | | |
3892 | 40 | Builder.positionAtEnd(OkBB); |
3893 | 40 | } |
3894 | | |
3895 | 185 | void compileMemoryFence() noexcept { |
3896 | 185 | Builder.createFence(LLVMAtomicOrderingSequentiallyConsistent); |
3897 | 185 | } |
3898 | | void compileAtomicNotify(unsigned MemoryIndex, |
3899 | 29 | unsigned MemoryOffset) noexcept { |
3900 | 29 | auto Count = stackPop(); |
3901 | 29 | auto Addr = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3902 | 29 | if (MemoryOffset != 0) { |
3903 | 21 | Addr = Builder.createAdd(Addr, LLContext.getInt64(MemoryOffset)); |
3904 | 21 | } |
3905 | 29 | compileAtomicCheckOffsetAlignment(Addr, Context.Int32Ty); |
3906 | 29 | auto Offset = stackPop(); |
3907 | | |
3908 | 29 | stackPush(Builder.createCall( |
3909 | 29 | Context.getIntrinsic( |
3910 | 29 | Builder, Executable::Intrinsics::kMemAtomicNotify, |
3911 | 29 | LLVM::Type::getFunctionType( |
3912 | 29 | Context.Int32Ty, |
3913 | 29 | {Context.Int32Ty, Context.Int32Ty, Context.Int32Ty}, false)), |
3914 | 29 | {LLContext.getInt32(MemoryIndex), Offset, Count})); |
3915 | 29 | } |
3916 | | void compileAtomicWait(unsigned MemoryIndex, unsigned MemoryOffset, |
3917 | 11 | LLVM::Type TargetType, uint32_t BitWidth) noexcept { |
3918 | 11 | auto Timeout = stackPop(); |
3919 | 11 | auto ExpectedValue = Builder.createZExtOrTrunc(stackPop(), Context.Int64Ty); |
3920 | 11 | auto Addr = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3921 | 11 | if (MemoryOffset != 0) { |
3922 | 6 | Addr = Builder.createAdd(Addr, LLContext.getInt64(MemoryOffset)); |
3923 | 6 | } |
3924 | 11 | compileAtomicCheckOffsetAlignment(Addr, TargetType); |
3925 | 11 | auto Offset = stackPop(); |
3926 | | |
3927 | 11 | stackPush(Builder.createCall( |
3928 | 11 | Context.getIntrinsic( |
3929 | 11 | Builder, Executable::Intrinsics::kMemAtomicWait, |
3930 | 11 | LLVM::Type::getFunctionType(Context.Int32Ty, |
3931 | 11 | {Context.Int32Ty, Context.Int32Ty, |
3932 | 11 | Context.Int64Ty, Context.Int64Ty, |
3933 | 11 | Context.Int32Ty}, |
3934 | 11 | false)), |
3935 | 11 | {LLContext.getInt32(MemoryIndex), Offset, ExpectedValue, Timeout, |
3936 | 11 | LLContext.getInt32(BitWidth)})); |
3937 | 11 | } |
3938 | | void compileAtomicLoad(unsigned MemoryIndex, unsigned MemoryOffset, |
3939 | | unsigned Alignment, LLVM::Type IntType, |
3940 | 0 | LLVM::Type TargetType, bool Signed = false) noexcept { |
3941 | |
|
3942 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3943 | 0 | if (MemoryOffset != 0) { |
3944 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
3945 | 0 | } |
3946 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
3947 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
3948 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
3949 | 0 | Offset); |
3950 | |
|
3951 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
3952 | 0 | auto Load = Builder.createLoad(TargetType, Ptr, true); |
3953 | 0 | Load.setAlignment(1 << Alignment); |
3954 | 0 | Load.setOrdering(LLVMAtomicOrderingSequentiallyConsistent); |
3955 | |
|
3956 | 0 | if (Signed) { |
3957 | 0 | Stack.back() = Builder.createSExt(Load, IntType); |
3958 | 0 | } else { |
3959 | 0 | Stack.back() = Builder.createZExt(Load, IntType); |
3960 | 0 | } |
3961 | 0 | } |
3962 | | void compileAtomicStore(unsigned MemoryIndex, unsigned MemoryOffset, |
3963 | | unsigned Alignment, LLVM::Type, LLVM::Type TargetType, |
3964 | 0 | bool Signed = false) noexcept { |
3965 | 0 | auto V = stackPop(); |
3966 | |
|
3967 | 0 | if (Signed) { |
3968 | 0 | V = Builder.createSExtOrTrunc(V, TargetType); |
3969 | 0 | } else { |
3970 | 0 | V = Builder.createZExtOrTrunc(V, TargetType); |
3971 | 0 | } |
3972 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3973 | 0 | if (MemoryOffset != 0) { |
3974 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
3975 | 0 | } |
3976 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
3977 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
3978 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
3979 | 0 | Offset); |
3980 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
3981 | 0 | auto Store = Builder.createStore(V, Ptr, true); |
3982 | 0 | Store.setAlignment(1 << Alignment); |
3983 | 0 | Store.setOrdering(LLVMAtomicOrderingSequentiallyConsistent); |
3984 | 0 | } |
3985 | | |
3986 | | void compileAtomicRMWOp(unsigned MemoryIndex, unsigned MemoryOffset, |
3987 | | [[maybe_unused]] unsigned Alignment, |
3988 | | LLVMAtomicRMWBinOp BinOp, LLVM::Type IntType, |
3989 | 0 | LLVM::Type TargetType, bool Signed = false) noexcept { |
3990 | 0 | auto Value = Builder.createSExtOrTrunc(stackPop(), TargetType); |
3991 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
3992 | 0 | if (MemoryOffset != 0) { |
3993 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
3994 | 0 | } |
3995 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
3996 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
3997 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
3998 | 0 | Offset); |
3999 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
4000 | |
|
4001 | 0 | auto Ret = Builder.createAtomicRMW( |
4002 | 0 | BinOp, Ptr, Value, LLVMAtomicOrderingSequentiallyConsistent); |
4003 | | #if LLVM_VERSION_MAJOR >= 13 |
4004 | | Ret.setAlignment(1 << Alignment); |
4005 | | #endif |
4006 | 0 | if (Signed) { |
4007 | 0 | Stack.back() = Builder.createSExt(Ret, IntType); |
4008 | 0 | } else { |
4009 | 0 | Stack.back() = Builder.createZExt(Ret, IntType); |
4010 | 0 | } |
4011 | 0 | } |
4012 | | void compileAtomicCompareExchange(unsigned MemoryIndex, unsigned MemoryOffset, |
4013 | | [[maybe_unused]] unsigned Alignment, |
4014 | | LLVM::Type IntType, LLVM::Type TargetType, |
4015 | 0 | bool Signed = false) noexcept { |
4016 | |
|
4017 | 0 | auto Replacement = Builder.createSExtOrTrunc(stackPop(), TargetType); |
4018 | 0 | auto Expected = Builder.createSExtOrTrunc(stackPop(), TargetType); |
4019 | 0 | auto Offset = Builder.createZExt(Stack.back(), Context.Int64Ty); |
4020 | 0 | if (MemoryOffset != 0) { |
4021 | 0 | Offset = Builder.createAdd(Offset, LLContext.getInt64(MemoryOffset)); |
4022 | 0 | } |
4023 | 0 | compileAtomicCheckOffsetAlignment(Offset, TargetType); |
4024 | 0 | auto VPtr = Builder.createInBoundsGEP1( |
4025 | 0 | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), |
4026 | 0 | Offset); |
4027 | 0 | auto Ptr = Builder.createBitCast(VPtr, TargetType.getPointerTo()); |
4028 | |
|
4029 | 0 | auto Ret = Builder.createAtomicCmpXchg( |
4030 | 0 | Ptr, Expected, Replacement, LLVMAtomicOrderingSequentiallyConsistent, |
4031 | 0 | LLVMAtomicOrderingSequentiallyConsistent); |
4032 | | #if LLVM_VERSION_MAJOR >= 13 |
4033 | | Ret.setAlignment(1 << Alignment); |
4034 | | #endif |
4035 | 0 | auto OldVal = Builder.createExtractValue(Ret, 0); |
4036 | 0 | if (Signed) { |
4037 | 0 | Stack.back() = Builder.createSExt(OldVal, IntType); |
4038 | 0 | } else { |
4039 | 0 | Stack.back() = Builder.createZExt(OldVal, IntType); |
4040 | 0 | } |
4041 | 0 | } |
4042 | | |
4043 | 12.4k | void compileReturn() noexcept { |
4044 | 12.4k | updateInstrCount(); |
4045 | 12.4k | updateGas(); |
4046 | 12.4k | auto Ty = F.Ty.getReturnType(); |
4047 | 12.4k | if (Ty.isVoidTy()) { |
4048 | 2.44k | Builder.createRetVoid(); |
4049 | 10.0k | } else if (Ty.isStructTy()) { |
4050 | 489 | const auto Count = Ty.getStructNumElements(); |
4051 | 489 | std::vector<LLVM::Value> Ret(Count); |
4052 | 1.86k | for (unsigned I = 0; I < Count; ++I) { |
4053 | 1.37k | const unsigned J = Count - 1 - I; |
4054 | 1.37k | Ret[J] = stackPop(); |
4055 | 1.37k | } |
4056 | 489 | Builder.createAggregateRet(Ret); |
4057 | 9.52k | } else { |
4058 | 9.52k | Builder.createRet(stackPop()); |
4059 | 9.52k | } |
4060 | 12.4k | } |
4061 | | |
4062 | 19.0k | void updateInstrCount() noexcept { |
4063 | 19.0k | if (LocalInstrCount) { |
4064 | 0 | auto Store [[maybe_unused]] = Builder.createAtomicRMW( |
4065 | 0 | LLVMAtomicRMWBinOpAdd, Context.getInstrCount(Builder, ExecCtx), |
4066 | 0 | Builder.createLoad(Context.Int64Ty, LocalInstrCount), |
4067 | 0 | LLVMAtomicOrderingMonotonic); |
4068 | | #if LLVM_VERSION_MAJOR >= 13 |
4069 | | Store.setAlignment(8); |
4070 | | #endif |
4071 | 0 | Builder.createStore(LLContext.getInt64(0), LocalInstrCount); |
4072 | 0 | } |
4073 | 19.0k | } |
4074 | | |
4075 | 21.5k | void updateGas() noexcept { |
4076 | 21.5k | if (LocalGas) { |
4077 | 0 | auto CurrBB = Builder.getInsertBlock(); |
4078 | 0 | auto CheckBB = LLVM::BasicBlock::create(LLContext, F.Fn, "gas_check"); |
4079 | 0 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "gas_ok"); |
4080 | 0 | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "gas_end"); |
4081 | |
|
4082 | 0 | auto Cost = Builder.createLoad(Context.Int64Ty, LocalGas); |
4083 | 0 | Cost.setAlignment(64); |
4084 | 0 | auto GasPtr = Context.getGas(Builder, ExecCtx); |
4085 | 0 | auto GasLimit = Context.getGasLimit(Builder, ExecCtx); |
4086 | 0 | auto Gas = Builder.createLoad(Context.Int64Ty, GasPtr); |
4087 | 0 | Gas.setAlignment(64); |
4088 | 0 | Gas.setOrdering(LLVMAtomicOrderingMonotonic); |
4089 | 0 | Builder.createBr(CheckBB); |
4090 | 0 | Builder.positionAtEnd(CheckBB); |
4091 | |
|
4092 | 0 | auto PHIOldGas = Builder.createPHI(Context.Int64Ty); |
4093 | 0 | auto NewGas = Builder.createAdd(PHIOldGas, Cost); |
4094 | 0 | auto IsGasRemain = |
4095 | 0 | Builder.createLikely(Builder.createICmpULE(NewGas, GasLimit)); |
4096 | 0 | Builder.createCondBr(IsGasRemain, OkBB, |
4097 | 0 | getTrapBB(ErrCode::Value::CostLimitExceeded)); |
4098 | 0 | Builder.positionAtEnd(OkBB); |
4099 | |
|
4100 | 0 | auto RGasAndSucceed = Builder.createAtomicCmpXchg( |
4101 | 0 | GasPtr, PHIOldGas, NewGas, LLVMAtomicOrderingMonotonic, |
4102 | 0 | LLVMAtomicOrderingMonotonic); |
4103 | | #if LLVM_VERSION_MAJOR >= 13 |
4104 | | RGasAndSucceed.setAlignment(8); |
4105 | | #endif |
4106 | 0 | RGasAndSucceed.setWeak(true); |
4107 | 0 | auto RGas = Builder.createExtractValue(RGasAndSucceed, 0); |
4108 | 0 | auto Succeed = Builder.createExtractValue(RGasAndSucceed, 1); |
4109 | 0 | Builder.createCondBr(Builder.createLikely(Succeed), EndBB, CheckBB); |
4110 | 0 | Builder.positionAtEnd(EndBB); |
4111 | |
|
4112 | 0 | Builder.createStore(LLContext.getInt64(0), LocalGas); |
4113 | |
|
4114 | 0 | PHIOldGas.addIncoming(Gas, CurrBB); |
4115 | 0 | PHIOldGas.addIncoming(RGas, OkBB); |
4116 | 0 | } |
4117 | 21.5k | } |
4118 | | |
4119 | 2.93k | void updateGasAtTrap() noexcept { |
4120 | 2.93k | if (LocalGas) { |
4121 | 0 | auto Update [[maybe_unused]] = Builder.createAtomicRMW( |
4122 | 0 | LLVMAtomicRMWBinOpAdd, Context.getGas(Builder, ExecCtx), |
4123 | 0 | Builder.createLoad(Context.Int64Ty, LocalGas), |
4124 | 0 | LLVMAtomicOrderingMonotonic); |
4125 | | #if LLVM_VERSION_MAJOR >= 13 |
4126 | | Update.setAlignment(8); |
4127 | | #endif |
4128 | 0 | } |
4129 | 2.93k | } |
4130 | | |
4131 | | private: |
4132 | 3.00k | void compileCallOp(const unsigned int FuncIndex) noexcept { |
4133 | 3.00k | const auto &FuncType = |
4134 | 3.00k | Context.CompositeTypes[std::get<0>(Context.Functions[FuncIndex])] |
4135 | 3.00k | ->getFuncType(); |
4136 | 3.00k | const auto &Function = std::get<1>(Context.Functions[FuncIndex]); |
4137 | 3.00k | const auto &ParamTypes = FuncType.getParamTypes(); |
4138 | | |
4139 | 3.00k | std::vector<LLVM::Value> Args(ParamTypes.size() + 1); |
4140 | 3.00k | Args[0] = F.Fn.getFirstParam(); |
4141 | 3.79k | for (size_t I = 0; I < ParamTypes.size(); ++I) { |
4142 | 790 | const size_t J = ParamTypes.size() - 1 - I; |
4143 | 790 | Args[J + 1] = stackPop(); |
4144 | 790 | } |
4145 | | |
4146 | 3.00k | auto Ret = Builder.createCall(Function, Args); |
4147 | 3.00k | auto Ty = Ret.getType(); |
4148 | 3.00k | if (Ty.isVoidTy()) { |
4149 | | // nothing to do |
4150 | 1.71k | } else if (Ty.isStructTy()) { |
4151 | 180 | for (auto Val : unpackStruct(Builder, Ret)) { |
4152 | 180 | stackPush(Val); |
4153 | 180 | } |
4154 | 1.20k | } else { |
4155 | 1.20k | stackPush(Ret); |
4156 | 1.20k | } |
4157 | 3.00k | } |
4158 | | |
4159 | | void compileIndirectCallOp(const uint32_t TableIndex, |
4160 | 661 | const uint32_t FuncTypeIndex) noexcept { |
4161 | 661 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.not_null"); |
4162 | 661 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.is_null"); |
4163 | 661 | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.end"); |
4164 | | |
4165 | 661 | LLVM::Value FuncIndex = stackPop(); |
4166 | 661 | const auto &FuncType = Context.CompositeTypes[FuncTypeIndex]->getFuncType(); |
4167 | 661 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4168 | 661 | auto RTy = FTy.getReturnType(); |
4169 | | |
4170 | 661 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4171 | 661 | const size_t RetSize = |
4172 | 661 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4173 | 661 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4174 | 661 | ArgsVec[0] = F.Fn.getFirstParam(); |
4175 | 1.30k | for (size_t I = 0; I < ArgSize; ++I) { |
4176 | 643 | const size_t J = ArgSize - I; |
4177 | 643 | ArgsVec[J] = stackPop(); |
4178 | 643 | } |
4179 | | |
4180 | 661 | std::vector<LLVM::Value> FPtrRetsVec; |
4181 | 661 | FPtrRetsVec.reserve(RetSize); |
4182 | 661 | { |
4183 | 661 | auto FPtr = Builder.createCall( |
4184 | 661 | Context.getIntrinsic( |
4185 | 661 | Builder, Executable::Intrinsics::kTableGetFuncSymbol, |
4186 | 661 | LLVM::Type::getFunctionType( |
4187 | 661 | FTy.getPointerTo(), |
4188 | 661 | {Context.Int32Ty, Context.Int32Ty, Context.Int32Ty}, false)), |
4189 | 661 | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4190 | 661 | FuncIndex}); |
4191 | 661 | Builder.createCondBr( |
4192 | 661 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4193 | 661 | NotNullBB, IsNullBB); |
4194 | 661 | Builder.positionAtEnd(NotNullBB); |
4195 | | |
4196 | 661 | auto FPtrRet = |
4197 | 661 | Builder.createCall(LLVM::FunctionCallee{FTy, FPtr}, ArgsVec); |
4198 | 661 | if (RetSize == 0) { |
4199 | | // nothing to do |
4200 | 515 | } else if (RetSize == 1) { |
4201 | 496 | FPtrRetsVec.push_back(FPtrRet); |
4202 | 496 | } else { |
4203 | 38 | for (auto Val : unpackStruct(Builder, FPtrRet)) { |
4204 | 38 | FPtrRetsVec.push_back(Val); |
4205 | 38 | } |
4206 | 19 | } |
4207 | 661 | } |
4208 | | |
4209 | 661 | Builder.createBr(EndBB); |
4210 | 661 | Builder.positionAtEnd(IsNullBB); |
4211 | | |
4212 | 661 | std::vector<LLVM::Value> RetsVec; |
4213 | 661 | { |
4214 | 661 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4215 | 661 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4216 | 661 | Builder.createArrayPtrStore( |
4217 | 661 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4218 | 661 | kValSize); |
4219 | | |
4220 | 661 | Builder.createCall( |
4221 | 661 | Context.getIntrinsic( |
4222 | 661 | Builder, Executable::Intrinsics::kCallIndirect, |
4223 | 661 | LLVM::Type::getFunctionType(Context.VoidTy, |
4224 | 661 | {Context.Int32Ty, Context.Int32Ty, |
4225 | 661 | Context.Int32Ty, Context.Int8PtrTy, |
4226 | 661 | Context.Int8PtrTy}, |
4227 | 661 | false)), |
4228 | 661 | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4229 | 661 | FuncIndex, Args, Rets}); |
4230 | | |
4231 | 661 | if (RetSize == 0) { |
4232 | | // nothing to do |
4233 | 515 | } else if (RetSize == 1) { |
4234 | 496 | RetsVec.push_back( |
4235 | 496 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4236 | 496 | } else { |
4237 | 19 | RetsVec = Builder.createArrayPtrLoad(RetSize, RTy, Rets, Context.Int8Ty, |
4238 | 19 | kValSize); |
4239 | 19 | } |
4240 | 661 | Builder.createBr(EndBB); |
4241 | 661 | Builder.positionAtEnd(EndBB); |
4242 | 661 | } |
4243 | | |
4244 | 1.19k | for (unsigned I = 0; I < RetSize; ++I) { |
4245 | 534 | auto PHIRet = Builder.createPHI(FPtrRetsVec[I].getType()); |
4246 | 534 | PHIRet.addIncoming(FPtrRetsVec[I], NotNullBB); |
4247 | 534 | PHIRet.addIncoming(RetsVec[I], IsNullBB); |
4248 | 534 | stackPush(PHIRet); |
4249 | 534 | } |
4250 | 661 | } |
4251 | | |
4252 | 0 | void compileReturnCallOp(const unsigned int FuncIndex) noexcept { |
4253 | 0 | const auto &FuncType = |
4254 | 0 | Context.CompositeTypes[std::get<0>(Context.Functions[FuncIndex])] |
4255 | 0 | ->getFuncType(); |
4256 | 0 | const auto &Function = std::get<1>(Context.Functions[FuncIndex]); |
4257 | 0 | const auto &ParamTypes = FuncType.getParamTypes(); |
4258 | |
|
4259 | 0 | std::vector<LLVM::Value> Args(ParamTypes.size() + 1); |
4260 | 0 | Args[0] = F.Fn.getFirstParam(); |
4261 | 0 | for (size_t I = 0; I < ParamTypes.size(); ++I) { |
4262 | 0 | const size_t J = ParamTypes.size() - 1 - I; |
4263 | 0 | Args[J + 1] = stackPop(); |
4264 | 0 | } |
4265 | |
|
4266 | 0 | auto Ret = Builder.createCall(Function, Args); |
4267 | 0 | auto Ty = Ret.getType(); |
4268 | 0 | if (Ty.isVoidTy()) { |
4269 | 0 | Builder.createRetVoid(); |
4270 | 0 | } else { |
4271 | 0 | Builder.createRet(Ret); |
4272 | 0 | } |
4273 | 0 | } |
4274 | | |
4275 | | void compileReturnIndirectCallOp(const uint32_t TableIndex, |
4276 | 0 | const uint32_t FuncTypeIndex) noexcept { |
4277 | 0 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.not_null"); |
4278 | 0 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.is_null"); |
4279 | |
|
4280 | 0 | LLVM::Value FuncIndex = stackPop(); |
4281 | 0 | const auto &FuncType = Context.CompositeTypes[FuncTypeIndex]->getFuncType(); |
4282 | 0 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4283 | 0 | auto RTy = FTy.getReturnType(); |
4284 | |
|
4285 | 0 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4286 | 0 | const size_t RetSize = |
4287 | 0 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4288 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4289 | 0 | ArgsVec[0] = F.Fn.getFirstParam(); |
4290 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
4291 | 0 | const size_t J = ArgSize - I; |
4292 | 0 | ArgsVec[J] = stackPop(); |
4293 | 0 | } |
4294 | |
|
4295 | 0 | { |
4296 | 0 | auto FPtr = Builder.createCall( |
4297 | 0 | Context.getIntrinsic( |
4298 | 0 | Builder, Executable::Intrinsics::kTableGetFuncSymbol, |
4299 | 0 | LLVM::Type::getFunctionType( |
4300 | 0 | FTy.getPointerTo(), |
4301 | 0 | {Context.Int32Ty, Context.Int32Ty, Context.Int32Ty}, false)), |
4302 | 0 | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4303 | 0 | FuncIndex}); |
4304 | 0 | Builder.createCondBr( |
4305 | 0 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4306 | 0 | NotNullBB, IsNullBB); |
4307 | 0 | Builder.positionAtEnd(NotNullBB); |
4308 | |
|
4309 | 0 | auto FPtrRet = |
4310 | 0 | Builder.createCall(LLVM::FunctionCallee(FTy, FPtr), ArgsVec); |
4311 | 0 | if (RetSize == 0) { |
4312 | 0 | Builder.createRetVoid(); |
4313 | 0 | } else { |
4314 | 0 | Builder.createRet(FPtrRet); |
4315 | 0 | } |
4316 | 0 | } |
4317 | |
|
4318 | 0 | Builder.positionAtEnd(IsNullBB); |
4319 | |
|
4320 | 0 | { |
4321 | 0 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4322 | 0 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4323 | 0 | Builder.createArrayPtrStore( |
4324 | 0 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4325 | 0 | kValSize); |
4326 | |
|
4327 | 0 | Builder.createCall( |
4328 | 0 | Context.getIntrinsic( |
4329 | 0 | Builder, Executable::Intrinsics::kCallIndirect, |
4330 | 0 | LLVM::Type::getFunctionType(Context.VoidTy, |
4331 | 0 | {Context.Int32Ty, Context.Int32Ty, |
4332 | 0 | Context.Int32Ty, Context.Int8PtrTy, |
4333 | 0 | Context.Int8PtrTy}, |
4334 | 0 | false)), |
4335 | 0 | {LLContext.getInt32(TableIndex), LLContext.getInt32(FuncTypeIndex), |
4336 | 0 | FuncIndex, Args, Rets}); |
4337 | |
|
4338 | 0 | if (RetSize == 0) { |
4339 | 0 | Builder.createRetVoid(); |
4340 | 0 | } else if (RetSize == 1) { |
4341 | 0 | Builder.createRet( |
4342 | 0 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4343 | 0 | } else { |
4344 | 0 | Builder.createAggregateRet(Builder.createArrayPtrLoad( |
4345 | 0 | RetSize, RTy, Rets, Context.Int8Ty, kValSize)); |
4346 | 0 | } |
4347 | 0 | } |
4348 | 0 | } |
4349 | | |
4350 | 0 | void compileCallRefOp(const unsigned int TypeIndex) noexcept { |
4351 | 0 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.not_null"); |
4352 | 0 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.is_null"); |
4353 | 0 | auto EndBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_i.end"); |
4354 | |
|
4355 | 0 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
4356 | 0 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.ref_not_null"); |
4357 | 0 | auto IsRefNotNull = Builder.createLikely(Builder.createICmpNE( |
4358 | 0 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
4359 | 0 | LLContext.getInt64(0))); |
4360 | 0 | Builder.createCondBr(IsRefNotNull, OkBB, |
4361 | 0 | getTrapBB(ErrCode::Value::AccessNullFunc)); |
4362 | 0 | Builder.positionAtEnd(OkBB); |
4363 | |
|
4364 | 0 | const auto &FuncType = Context.CompositeTypes[TypeIndex]->getFuncType(); |
4365 | 0 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4366 | 0 | auto RTy = FTy.getReturnType(); |
4367 | |
|
4368 | 0 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4369 | 0 | const size_t RetSize = |
4370 | 0 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4371 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4372 | 0 | ArgsVec[0] = F.Fn.getFirstParam(); |
4373 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
4374 | 0 | const size_t J = ArgSize - I; |
4375 | 0 | ArgsVec[J] = stackPop(); |
4376 | 0 | } |
4377 | |
|
4378 | 0 | std::vector<LLVM::Value> FPtrRetsVec; |
4379 | 0 | FPtrRetsVec.reserve(RetSize); |
4380 | 0 | { |
4381 | 0 | auto FPtr = Builder.createCall( |
4382 | 0 | Context.getIntrinsic( |
4383 | 0 | Builder, Executable::Intrinsics::kRefGetFuncSymbol, |
4384 | 0 | LLVM::Type::getFunctionType(FTy.getPointerTo(), |
4385 | 0 | {Context.Int64x2Ty}, false)), |
4386 | 0 | {Ref}); |
4387 | 0 | Builder.createCondBr( |
4388 | 0 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4389 | 0 | NotNullBB, IsNullBB); |
4390 | 0 | Builder.positionAtEnd(NotNullBB); |
4391 | |
|
4392 | 0 | auto FPtrRet = |
4393 | 0 | Builder.createCall(LLVM::FunctionCallee{FTy, FPtr}, ArgsVec); |
4394 | 0 | if (RetSize == 0) { |
4395 | | // nothing to do |
4396 | 0 | } else if (RetSize == 1) { |
4397 | 0 | FPtrRetsVec.push_back(FPtrRet); |
4398 | 0 | } else { |
4399 | 0 | for (auto Val : unpackStruct(Builder, FPtrRet)) { |
4400 | 0 | FPtrRetsVec.push_back(Val); |
4401 | 0 | } |
4402 | 0 | } |
4403 | 0 | } |
4404 | |
|
4405 | 0 | Builder.createBr(EndBB); |
4406 | 0 | Builder.positionAtEnd(IsNullBB); |
4407 | |
|
4408 | 0 | std::vector<LLVM::Value> RetsVec; |
4409 | 0 | { |
4410 | 0 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4411 | 0 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4412 | 0 | Builder.createArrayPtrStore( |
4413 | 0 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4414 | 0 | kValSize); |
4415 | |
|
4416 | 0 | Builder.createCall( |
4417 | 0 | Context.getIntrinsic( |
4418 | 0 | Builder, Executable::Intrinsics::kCallRef, |
4419 | 0 | LLVM::Type::getFunctionType( |
4420 | 0 | Context.VoidTy, |
4421 | 0 | {Context.Int64x2Ty, Context.Int8PtrTy, Context.Int8PtrTy}, |
4422 | 0 | false)), |
4423 | 0 | {Ref, Args, Rets}); |
4424 | |
|
4425 | 0 | if (RetSize == 0) { |
4426 | | // nothing to do |
4427 | 0 | } else if (RetSize == 1) { |
4428 | 0 | RetsVec.push_back( |
4429 | 0 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4430 | 0 | } else { |
4431 | 0 | RetsVec = Builder.createArrayPtrLoad(RetSize, RTy, Rets, Context.Int8Ty, |
4432 | 0 | kValSize); |
4433 | 0 | } |
4434 | 0 | Builder.createBr(EndBB); |
4435 | 0 | Builder.positionAtEnd(EndBB); |
4436 | 0 | } |
4437 | |
|
4438 | 0 | for (unsigned I = 0; I < RetSize; ++I) { |
4439 | 0 | auto PHIRet = Builder.createPHI(FPtrRetsVec[I].getType()); |
4440 | 0 | PHIRet.addIncoming(FPtrRetsVec[I], NotNullBB); |
4441 | 0 | PHIRet.addIncoming(RetsVec[I], IsNullBB); |
4442 | 0 | stackPush(PHIRet); |
4443 | 0 | } |
4444 | 0 | } |
4445 | | |
4446 | 0 | void compileReturnCallRefOp(const unsigned int TypeIndex) noexcept { |
4447 | 0 | auto NotNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.not_null"); |
4448 | 0 | auto IsNullBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.is_null"); |
4449 | |
|
4450 | 0 | auto Ref = Builder.createBitCast(stackPop(), Context.Int64x2Ty); |
4451 | 0 | auto OkBB = LLVM::BasicBlock::create(LLContext, F.Fn, "c_r.ref_not_null"); |
4452 | 0 | auto IsRefNotNull = Builder.createLikely(Builder.createICmpNE( |
4453 | 0 | Builder.createExtractElement(Ref, LLContext.getInt64(1)), |
4454 | 0 | LLContext.getInt64(0))); |
4455 | 0 | Builder.createCondBr(IsRefNotNull, OkBB, |
4456 | 0 | getTrapBB(ErrCode::Value::AccessNullFunc)); |
4457 | 0 | Builder.positionAtEnd(OkBB); |
4458 | |
|
4459 | 0 | const auto &FuncType = Context.CompositeTypes[TypeIndex]->getFuncType(); |
4460 | 0 | auto FTy = toLLVMType(Context.LLContext, Context.ExecCtxPtrTy, FuncType); |
4461 | 0 | auto RTy = FTy.getReturnType(); |
4462 | |
|
4463 | 0 | const size_t ArgSize = FuncType.getParamTypes().size(); |
4464 | 0 | const size_t RetSize = |
4465 | 0 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
4466 | 0 | std::vector<LLVM::Value> ArgsVec(ArgSize + 1, nullptr); |
4467 | 0 | ArgsVec[0] = F.Fn.getFirstParam(); |
4468 | 0 | for (size_t I = 0; I < ArgSize; ++I) { |
4469 | 0 | const size_t J = ArgSize - I; |
4470 | 0 | ArgsVec[J] = stackPop(); |
4471 | 0 | } |
4472 | |
|
4473 | 0 | { |
4474 | 0 | auto FPtr = Builder.createCall( |
4475 | 0 | Context.getIntrinsic( |
4476 | 0 | Builder, Executable::Intrinsics::kRefGetFuncSymbol, |
4477 | 0 | LLVM::Type::getFunctionType(FTy.getPointerTo(), |
4478 | 0 | {Context.Int64x2Ty}, false)), |
4479 | 0 | {Ref}); |
4480 | 0 | Builder.createCondBr( |
4481 | 0 | Builder.createLikely(Builder.createNot(Builder.createIsNull(FPtr))), |
4482 | 0 | NotNullBB, IsNullBB); |
4483 | 0 | Builder.positionAtEnd(NotNullBB); |
4484 | |
|
4485 | 0 | auto FPtrRet = |
4486 | 0 | Builder.createCall(LLVM::FunctionCallee(FTy, FPtr), ArgsVec); |
4487 | 0 | if (RetSize == 0) { |
4488 | 0 | Builder.createRetVoid(); |
4489 | 0 | } else { |
4490 | 0 | Builder.createRet(FPtrRet); |
4491 | 0 | } |
4492 | 0 | } |
4493 | |
|
4494 | 0 | Builder.positionAtEnd(IsNullBB); |
4495 | |
|
4496 | 0 | { |
4497 | 0 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
4498 | 0 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
4499 | 0 | Builder.createArrayPtrStore( |
4500 | 0 | Span<LLVM::Value>(ArgsVec.begin() + 1, ArgSize), Args, Context.Int8Ty, |
4501 | 0 | kValSize); |
4502 | |
|
4503 | 0 | Builder.createCall( |
4504 | 0 | Context.getIntrinsic( |
4505 | 0 | Builder, Executable::Intrinsics::kCallRef, |
4506 | 0 | LLVM::Type::getFunctionType( |
4507 | 0 | Context.VoidTy, |
4508 | 0 | {Context.Int64x2Ty, Context.Int8PtrTy, Context.Int8PtrTy}, |
4509 | 0 | false)), |
4510 | 0 | {Ref, Args, Rets}); |
4511 | |
|
4512 | 0 | if (RetSize == 0) { |
4513 | 0 | Builder.createRetVoid(); |
4514 | 0 | } else if (RetSize == 1) { |
4515 | 0 | Builder.createRet( |
4516 | 0 | Builder.createValuePtrLoad(RTy, Rets, Context.Int8Ty)); |
4517 | 0 | } else { |
4518 | 0 | Builder.createAggregateRet(Builder.createArrayPtrLoad( |
4519 | 0 | RetSize, RTy, Rets, Context.Int8Ty, kValSize)); |
4520 | 0 | } |
4521 | 0 | } |
4522 | 0 | } |
4523 | | |
4524 | | void compileLoadOp(unsigned MemoryIndex, unsigned Offset, unsigned Alignment, |
4525 | 18.7k | LLVM::Type LoadTy) noexcept { |
4526 | 18.7k | if constexpr (kForceUnalignment) { |
4527 | 18.7k | Alignment = 0; |
4528 | 18.7k | } |
4529 | 18.7k | auto Off = Builder.createZExt(stackPop(), Context.Int64Ty); |
4530 | 18.7k | if (Offset != 0) { |
4531 | 11.5k | Off = Builder.createAdd(Off, LLContext.getInt64(Offset)); |
4532 | 11.5k | } |
4533 | | |
4534 | 18.7k | auto VPtr = Builder.createInBoundsGEP1( |
4535 | 18.7k | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), Off); |
4536 | 18.7k | auto Ptr = Builder.createBitCast(VPtr, LoadTy.getPointerTo()); |
4537 | 18.7k | auto LoadInst = Builder.createLoad(LoadTy, Ptr, true); |
4538 | 18.7k | LoadInst.setAlignment(1 << Alignment); |
4539 | 18.7k | stackPush(LoadInst); |
4540 | 18.7k | } |
4541 | | void compileLoadOp(unsigned MemoryIndex, unsigned Offset, unsigned Alignment, |
4542 | | LLVM::Type LoadTy, LLVM::Type ExtendTy, |
4543 | 7.01k | bool Signed) noexcept { |
4544 | 7.01k | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4545 | 7.01k | if (Signed) { |
4546 | 3.08k | Stack.back() = Builder.createSExt(Stack.back(), ExtendTy); |
4547 | 3.93k | } else { |
4548 | 3.93k | Stack.back() = Builder.createZExt(Stack.back(), ExtendTy); |
4549 | 3.93k | } |
4550 | 7.01k | } |
4551 | | void compileVectorLoadOp(unsigned MemoryIndex, unsigned Offset, |
4552 | 5.48k | unsigned Alignment, LLVM::Type LoadTy) noexcept { |
4553 | 5.48k | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4554 | 5.48k | Stack.back() = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
4555 | 5.48k | } |
4556 | | void compileVectorLoadOp(unsigned MemoryIndex, unsigned Offset, |
4557 | | unsigned Alignment, LLVM::Type LoadTy, |
4558 | 1.55k | LLVM::Type ExtendTy, bool Signed) noexcept { |
4559 | 1.55k | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy, ExtendTy, Signed); |
4560 | 1.55k | Stack.back() = Builder.createBitCast(Stack.back(), Context.Int64x2Ty); |
4561 | 1.55k | } |
4562 | | void compileSplatLoadOp(unsigned MemoryIndex, unsigned Offset, |
4563 | | unsigned Alignment, LLVM::Type LoadTy, |
4564 | 578 | LLVM::Type VectorTy) noexcept { |
4565 | 578 | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4566 | 578 | compileSplatOp(VectorTy); |
4567 | 578 | } |
4568 | | void compileLoadLaneOp(unsigned MemoryIndex, unsigned Offset, |
4569 | | unsigned Alignment, unsigned Index, LLVM::Type LoadTy, |
4570 | 517 | LLVM::Type VectorTy) noexcept { |
4571 | 517 | auto Vector = stackPop(); |
4572 | 517 | compileLoadOp(MemoryIndex, Offset, Alignment, LoadTy); |
4573 | 517 | auto Value = Stack.back(); |
4574 | 517 | Stack.back() = Builder.createBitCast( |
4575 | 517 | Builder.createInsertElement(Builder.createBitCast(Vector, VectorTy), |
4576 | 517 | Value, LLContext.getInt64(Index)), |
4577 | 517 | Context.Int64x2Ty); |
4578 | 517 | } |
4579 | | void compileStoreOp(unsigned MemoryIndex, unsigned Offset, unsigned Alignment, |
4580 | | LLVM::Type LoadTy, bool Trunc = false, |
4581 | 3.28k | bool BitCast = false) noexcept { |
4582 | 3.28k | if constexpr (kForceUnalignment) { |
4583 | 3.28k | Alignment = 0; |
4584 | 3.28k | } |
4585 | 3.28k | auto V = stackPop(); |
4586 | 3.28k | auto Off = Builder.createZExt(stackPop(), Context.Int64Ty); |
4587 | 3.28k | if (Offset != 0) { |
4588 | 2.25k | Off = Builder.createAdd(Off, LLContext.getInt64(Offset)); |
4589 | 2.25k | } |
4590 | | |
4591 | 3.28k | if (Trunc) { |
4592 | 720 | V = Builder.createTrunc(V, LoadTy); |
4593 | 720 | } |
4594 | 3.28k | if (BitCast) { |
4595 | 264 | V = Builder.createBitCast(V, LoadTy); |
4596 | 264 | } |
4597 | 3.28k | auto VPtr = Builder.createInBoundsGEP1( |
4598 | 3.28k | Context.Int8Ty, Context.getMemory(Builder, ExecCtx, MemoryIndex), Off); |
4599 | 3.28k | auto Ptr = Builder.createBitCast(VPtr, LoadTy.getPointerTo()); |
4600 | 3.28k | auto StoreInst = Builder.createStore(V, Ptr, true); |
4601 | 3.28k | StoreInst.setAlignment(1 << Alignment); |
4602 | 3.28k | } |
4603 | | void compileStoreLaneOp(unsigned MemoryIndex, unsigned Offset, |
4604 | | unsigned Alignment, unsigned Index, LLVM::Type LoadTy, |
4605 | 347 | LLVM::Type VectorTy) noexcept { |
4606 | 347 | auto Vector = Stack.back(); |
4607 | 347 | Stack.back() = Builder.createExtractElement( |
4608 | 347 | Builder.createBitCast(Vector, VectorTy), LLContext.getInt64(Index)); |
4609 | 347 | compileStoreOp(MemoryIndex, Offset, Alignment, LoadTy); |
4610 | 347 | } |
4611 | 42.9k | void compileSplatOp(LLVM::Type VectorTy) noexcept { |
4612 | 42.9k | auto Undef = LLVM::Value::getUndef(VectorTy); |
4613 | 42.9k | auto Zeros = LLVM::Value::getConstNull( |
4614 | 42.9k | LLVM::Type::getVectorType(Context.Int32Ty, VectorTy.getVectorSize())); |
4615 | 42.9k | auto Value = Builder.createTrunc(Stack.back(), VectorTy.getElementType()); |
4616 | 42.9k | auto Vector = |
4617 | 42.9k | Builder.createInsertElement(Undef, Value, LLContext.getInt64(0)); |
4618 | 42.9k | Vector = Builder.createShuffleVector(Vector, Undef, Zeros); |
4619 | | |
4620 | 42.9k | Stack.back() = Builder.createBitCast(Vector, Context.Int64x2Ty); |
4621 | 42.9k | } |
4622 | 1.24k | void compileExtractLaneOp(LLVM::Type VectorTy, unsigned Index) noexcept { |
4623 | 1.24k | auto Vector = Builder.createBitCast(Stack.back(), VectorTy); |
4624 | 1.24k | Stack.back() = |
4625 | 1.24k | Builder.createExtractElement(Vector, LLContext.getInt64(Index)); |
4626 | 1.24k | } |
4627 | | void compileExtractLaneOp(LLVM::Type VectorTy, unsigned Index, |
4628 | 903 | LLVM::Type ExtendTy, bool Signed) noexcept { |
4629 | 903 | compileExtractLaneOp(VectorTy, Index); |
4630 | 903 | if (Signed) { |
4631 | 407 | Stack.back() = Builder.createSExt(Stack.back(), ExtendTy); |
4632 | 496 | } else { |
4633 | 496 | Stack.back() = Builder.createZExt(Stack.back(), ExtendTy); |
4634 | 496 | } |
4635 | 903 | } |
4636 | 696 | void compileReplaceLaneOp(LLVM::Type VectorTy, unsigned Index) noexcept { |
4637 | 696 | auto Value = Builder.createTrunc(stackPop(), VectorTy.getElementType()); |
4638 | 696 | auto Vector = Stack.back(); |
4639 | 696 | Stack.back() = Builder.createBitCast( |
4640 | 696 | Builder.createInsertElement(Builder.createBitCast(Vector, VectorTy), |
4641 | 696 | Value, LLContext.getInt64(Index)), |
4642 | 696 | Context.Int64x2Ty); |
4643 | 696 | } |
4644 | | void compileVectorCompareOp(LLVM::Type VectorTy, |
4645 | 5.09k | LLVMIntPredicate Predicate) noexcept { |
4646 | 5.09k | auto RHS = stackPop(); |
4647 | 5.09k | auto LHS = stackPop(); |
4648 | 5.09k | auto Result = Builder.createSExt( |
4649 | 5.09k | Builder.createICmp(Predicate, Builder.createBitCast(LHS, VectorTy), |
4650 | 5.09k | Builder.createBitCast(RHS, VectorTy)), |
4651 | 5.09k | VectorTy); |
4652 | 5.09k | stackPush(Builder.createBitCast(Result, Context.Int64x2Ty)); |
4653 | 5.09k | } |
4654 | | void compileVectorCompareOp(LLVM::Type VectorTy, LLVMRealPredicate Predicate, |
4655 | 3.47k | LLVM::Type ResultTy) noexcept { |
4656 | 3.47k | auto RHS = stackPop(); |
4657 | 3.47k | auto LHS = stackPop(); |
4658 | 3.47k | auto Result = Builder.createSExt( |
4659 | 3.47k | Builder.createFCmp(Predicate, Builder.createBitCast(LHS, VectorTy), |
4660 | 3.47k | Builder.createBitCast(RHS, VectorTy)), |
4661 | 3.47k | ResultTy); |
4662 | 3.47k | stackPush(Builder.createBitCast(Result, Context.Int64x2Ty)); |
4663 | 3.47k | } |
4664 | | template <typename Func> |
4665 | 26.4k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4666 | 26.4k | auto V = Builder.createBitCast(Stack.back(), VectorTy); |
4667 | 26.4k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); |
4668 | 26.4k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 2.01k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 2.01k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 2.01k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 2.01k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 2.78k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 2.78k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 2.78k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 2.78k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorPopcnt()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorPopcnt()::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 109 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 109 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 109 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 109 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorExtAddPairwise(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorExtAddPairwise(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 2.03k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 2.03k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 2.03k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 2.03k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFAbs(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 497 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 497 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 497 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 497 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFNeg(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 944 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 944 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 944 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 944 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFSqrt(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFSqrt(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 324 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 324 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 324 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 324 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFCeil(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFCeil(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 1.66k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 1.66k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 1.66k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 1.66k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFFloor(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFFloor(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 2.69k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 2.69k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 2.69k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 2.69k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFTrunc(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFTrunc(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 2.03k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 2.03k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 2.03k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 2.03k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorFNearest(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorFNearest(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 433 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 433 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 433 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 433 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorTruncSatS32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorTruncSatS32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 960 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 960 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 960 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 960 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorTruncSatU32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorTruncSatU32(WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 5.87k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 5.87k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 5.87k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 5.87k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorConvertS(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorConvertS(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 666 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 666 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 666 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 666 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorConvertU(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorConvertU(WasmEdge::LLVM::Type, WasmEdge::LLVM::Type, bool)::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 1.97k | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 1.97k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 1.97k | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 1.97k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorDemote()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorDemote()::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 733 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 733 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 733 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 733 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorPromote()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorPromote()::{lambda(auto:1)#1}&&) Line | Count | Source | 4665 | 723 | void compileVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4666 | 723 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4667 | 723 | Stack.back() = Builder.createBitCast(Op(V), Context.Int64x2Ty); | 4668 | 723 | } |
|
4669 | 2.01k | void compileVectorAbs(LLVM::Type VectorTy) noexcept { |
4670 | 2.01k | compileVectorOp(VectorTy, [this, VectorTy](auto V) noexcept { |
4671 | 2.01k | auto Zero = LLVM::Value::getConstNull(VectorTy); |
4672 | 2.01k | auto C = Builder.createICmpSLT(V, Zero); |
4673 | 2.01k | return Builder.createSelect(C, Builder.createNeg(V), V); |
4674 | 2.01k | }); |
4675 | 2.01k | } |
4676 | 2.78k | void compileVectorNeg(LLVM::Type VectorTy) noexcept { |
4677 | 2.78k | compileVectorOp(VectorTy, |
4678 | 2.78k | [this](auto V) noexcept { return Builder.createNeg(V); }); |
4679 | 2.78k | } |
4680 | 109 | void compileVectorPopcnt() noexcept { |
4681 | 109 | compileVectorOp(Context.Int8x16Ty, [this](auto V) noexcept { |
4682 | 109 | assuming(LLVM::Core::Ctpop != LLVM::Core::NotIntrinsic); |
4683 | 109 | return Builder.createUnaryIntrinsic(LLVM::Core::Ctpop, V); |
4684 | 109 | }); |
4685 | 109 | } |
4686 | | template <typename Func> |
4687 | 2.04k | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4688 | 2.04k | auto V = Builder.createBitCast(Stack.back(), VectorTy); |
4689 | 2.04k | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); |
4690 | 2.04k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorReduceIOp<(anonymous namespace)::FunctionCompiler::compileVectorAnyTrue()::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAnyTrue()::{lambda(auto:1)#1}&&) Line | Count | Source | 4687 | 112 | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4688 | 112 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4689 | 112 | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); | 4690 | 112 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorReduceIOp<(anonymous namespace)::FunctionCompiler::compileVectorAllTrue(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAllTrue(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4687 | 909 | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4688 | 909 | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4689 | 909 | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); | 4690 | 909 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorReduceIOp<(anonymous namespace)::FunctionCompiler::compileVectorBitMask(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorBitMask(WasmEdge::LLVM::Type)::{lambda(auto:1)#1}&&) Line | Count | Source | 4687 | 1.01k | void compileVectorReduceIOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4688 | 1.01k | auto V = Builder.createBitCast(Stack.back(), VectorTy); | 4689 | 1.01k | Stack.back() = Builder.createZExt(Op(V), Context.Int32Ty); | 4690 | 1.01k | } |
|
4691 | 112 | void compileVectorAnyTrue() noexcept { |
4692 | 112 | compileVectorReduceIOp(Context.Int128x1Ty, [this](auto V) noexcept { |
4693 | 112 | auto Zero = LLVM::Value::getConstNull(Context.Int128x1Ty); |
4694 | 112 | return Builder.createBitCast(Builder.createICmpNE(V, Zero), |
4695 | 112 | LLContext.getInt1Ty()); |
4696 | 112 | }); |
4697 | 112 | } |
4698 | 909 | void compileVectorAllTrue(LLVM::Type VectorTy) noexcept { |
4699 | 909 | compileVectorReduceIOp(VectorTy, [this, VectorTy](auto V) noexcept { |
4700 | 909 | const auto Size = VectorTy.getVectorSize(); |
4701 | 909 | auto IntType = LLContext.getIntNTy(Size); |
4702 | 909 | auto Zero = LLVM::Value::getConstNull(VectorTy); |
4703 | 909 | auto Cmp = Builder.createBitCast(Builder.createICmpEQ(V, Zero), IntType); |
4704 | 909 | auto CmpZero = LLVM::Value::getConstInt(IntType, 0); |
4705 | 909 | return Builder.createICmpEQ(Cmp, CmpZero); |
4706 | 909 | }); |
4707 | 909 | } |
4708 | 1.01k | void compileVectorBitMask(LLVM::Type VectorTy) noexcept { |
4709 | 1.01k | compileVectorReduceIOp(VectorTy, [this, VectorTy](auto V) noexcept { |
4710 | 1.01k | const auto Size = VectorTy.getVectorSize(); |
4711 | 1.01k | auto IntType = LLContext.getIntNTy(Size); |
4712 | 1.01k | auto Zero = LLVM::Value::getConstNull(VectorTy); |
4713 | 1.01k | return Builder.createBitCast(Builder.createICmpSLT(V, Zero), IntType); |
4714 | 1.01k | }); |
4715 | 1.01k | } |
4716 | | template <typename Func> |
4717 | 3.28k | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4718 | 3.28k | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; |
4719 | 3.28k | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; |
4720 | 3.28k | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); |
4721 | 3.28k | auto RHS = Builder.createVectorSplat( |
4722 | 3.28k | VectorTy.getVectorSize(), |
4723 | 3.28k | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) |
4724 | 3.28k | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); |
4725 | 3.28k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
4726 | 3.28k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); |
4727 | 3.28k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorShiftOp<(anonymous namespace)::FunctionCompiler::compileVectorShl(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorShl(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4717 | 1.28k | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4718 | 1.28k | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; | 4719 | 1.28k | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; | 4720 | 1.28k | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); | 4721 | 1.28k | auto RHS = Builder.createVectorSplat( | 4722 | 1.28k | VectorTy.getVectorSize(), | 4723 | 1.28k | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) | 4724 | 1.28k | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); | 4725 | 1.28k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4726 | 1.28k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4727 | 1.28k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorShiftOp<(anonymous namespace)::FunctionCompiler::compileVectorAShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorAShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4717 | 1.59k | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4718 | 1.59k | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; | 4719 | 1.59k | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; | 4720 | 1.59k | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); | 4721 | 1.59k | auto RHS = Builder.createVectorSplat( | 4722 | 1.59k | VectorTy.getVectorSize(), | 4723 | 1.59k | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) | 4724 | 1.59k | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); | 4725 | 1.59k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4726 | 1.59k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4727 | 1.59k | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorShiftOp<(anonymous namespace)::FunctionCompiler::compileVectorLShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorLShr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4717 | 398 | void compileVectorShiftOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4718 | 398 | const bool Trunc = VectorTy.getElementType().getIntegerBitWidth() < 32; | 4719 | 398 | const uint32_t Mask = VectorTy.getElementType().getIntegerBitWidth() - 1; | 4720 | 398 | auto N = Builder.createAnd(stackPop(), LLContext.getInt32(Mask)); | 4721 | 398 | auto RHS = Builder.createVectorSplat( | 4722 | 398 | VectorTy.getVectorSize(), | 4723 | 398 | Trunc ? Builder.createTrunc(N, VectorTy.getElementType()) | 4724 | 398 | : Builder.createZExtOrTrunc(N, VectorTy.getElementType())); | 4725 | 398 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4726 | 398 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4727 | 398 | } |
|
4728 | 1.28k | void compileVectorShl(LLVM::Type VectorTy) noexcept { |
4729 | 1.28k | compileVectorShiftOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4730 | 1.28k | return Builder.createShl(LHS, RHS); |
4731 | 1.28k | }); |
4732 | 1.28k | } |
4733 | 398 | void compileVectorLShr(LLVM::Type VectorTy) noexcept { |
4734 | 398 | compileVectorShiftOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4735 | 398 | return Builder.createLShr(LHS, RHS); |
4736 | 398 | }); |
4737 | 398 | } |
4738 | 1.59k | void compileVectorAShr(LLVM::Type VectorTy) noexcept { |
4739 | 1.59k | compileVectorShiftOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4740 | 1.59k | return Builder.createAShr(LHS, RHS); |
4741 | 1.59k | }); |
4742 | 1.59k | } |
4743 | | template <typename Func> |
4744 | 7.23k | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { |
4745 | 7.23k | auto RHS = Builder.createBitCast(stackPop(), VectorTy); |
4746 | 7.23k | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
4747 | 7.23k | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); |
4748 | 7.23k | } compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 354 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 354 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 354 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 354 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 354 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorAddSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorAddSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 982 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 982 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 982 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 982 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 982 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 781 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 781 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 781 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 781 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 781 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSubSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSubSat(WasmEdge::LLVM::Type, bool)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 337 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 337 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 337 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 337 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 337 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 339 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 339 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 339 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 339 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 339 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorUMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorUMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 269 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 269 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 269 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 269 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 269 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorSMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorSMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 504 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 504 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 504 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 504 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 504 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorUMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorUMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 716 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 716 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 716 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 716 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 716 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorUAvgr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorUAvgr(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 227 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 227 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 227 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 227 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 227 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 412 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 412 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 412 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 412 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 412 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorQ15MulSat()::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorQ15MulSat()::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 132 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 132 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 132 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 132 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 132 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFAdd(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 178 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 178 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 178 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 178 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 178 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFSub(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 470 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 470 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 470 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 470 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 470 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFMul(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 225 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 225 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 225 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 225 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 225 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFDiv(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFDiv(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 231 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 231 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 231 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 231 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 231 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 301 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 301 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 301 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 301 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 301 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 211 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 211 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 211 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 211 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 211 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFPMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFPMin(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 289 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 289 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 289 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 289 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 289 | } |
compiler.cpp:void (anonymous namespace)::FunctionCompiler::compileVectorVectorOp<(anonymous namespace)::FunctionCompiler::compileVectorVectorFPMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}>(WasmEdge::LLVM::Type, (anonymous namespace)::FunctionCompiler::compileVectorVectorFPMax(WasmEdge::LLVM::Type)::{lambda(auto:1, auto:2)#1}&&) Line | Count | Source | 4744 | 276 | void compileVectorVectorOp(LLVM::Type VectorTy, Func &&Op) noexcept { | 4745 | 276 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); | 4746 | 276 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); | 4747 | 276 | stackPush(Builder.createBitCast(Op(LHS, RHS), Context.Int64x2Ty)); | 4748 | 276 | } |
|
4749 | 354 | void compileVectorVectorAdd(LLVM::Type VectorTy) noexcept { |
4750 | 354 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4751 | 354 | return Builder.createAdd(LHS, RHS); |
4752 | 354 | }); |
4753 | 354 | } |
4754 | 982 | void compileVectorVectorAddSat(LLVM::Type VectorTy, bool Signed) noexcept { |
4755 | 982 | auto ID = Signed ? LLVM::Core::SAddSat : LLVM::Core::UAddSat; |
4756 | 982 | assuming(ID != LLVM::Core::NotIntrinsic); |
4757 | 982 | compileVectorVectorOp( |
4758 | 982 | VectorTy, [this, VectorTy, ID](auto LHS, auto RHS) noexcept { |
4759 | 982 | return Builder.createIntrinsic(ID, {VectorTy}, {LHS, RHS}); |
4760 | 982 | }); |
4761 | 982 | } |
4762 | 781 | void compileVectorVectorSub(LLVM::Type VectorTy) noexcept { |
4763 | 781 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4764 | 781 | return Builder.createSub(LHS, RHS); |
4765 | 781 | }); |
4766 | 781 | } |
4767 | 337 | void compileVectorVectorSubSat(LLVM::Type VectorTy, bool Signed) noexcept { |
4768 | 337 | auto ID = Signed ? LLVM::Core::SSubSat : LLVM::Core::USubSat; |
4769 | 337 | assuming(ID != LLVM::Core::NotIntrinsic); |
4770 | 337 | compileVectorVectorOp( |
4771 | 337 | VectorTy, [this, VectorTy, ID](auto LHS, auto RHS) noexcept { |
4772 | 337 | return Builder.createIntrinsic(ID, {VectorTy}, {LHS, RHS}); |
4773 | 337 | }); |
4774 | 337 | } |
4775 | 412 | void compileVectorVectorMul(LLVM::Type VectorTy) noexcept { |
4776 | 412 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4777 | 412 | return Builder.createMul(LHS, RHS); |
4778 | 412 | }); |
4779 | 412 | } |
4780 | 66 | void compileVectorSwizzle() noexcept { |
4781 | 66 | auto Index = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
4782 | 66 | auto Vector = Builder.createBitCast(stackPop(), Context.Int8x16Ty); |
4783 | | |
4784 | 66 | #if defined(__x86_64__) |
4785 | 66 | if (Context.SupportSSSE3) { |
4786 | 66 | auto Magic = Builder.createVectorSplat(16, LLContext.getInt8(112)); |
4787 | 66 | auto Added = Builder.createAdd(Index, Magic); |
4788 | 66 | auto NewIndex = Builder.createSelect( |
4789 | 66 | Builder.createICmpUGT(Index, Added), |
4790 | 66 | LLVM::Value::getConstAllOnes(Context.Int8x16Ty), Added); |
4791 | 66 | assuming(LLVM::Core::X86SSSE3PShufB128 != LLVM::Core::NotIntrinsic); |
4792 | 66 | stackPush(Builder.createBitCast( |
4793 | 66 | Builder.createIntrinsic(LLVM::Core::X86SSSE3PShufB128, {}, |
4794 | 66 | {Vector, NewIndex}), |
4795 | 66 | Context.Int64x2Ty)); |
4796 | 66 | return; |
4797 | 66 | } |
4798 | 0 | #endif |
4799 | | |
4800 | | #if defined(__aarch64__) |
4801 | | if (Context.SupportNEON) { |
4802 | | assuming(LLVM::Core::AArch64NeonTbl1 != LLVM::Core::NotIntrinsic); |
4803 | | stackPush(Builder.createBitCast( |
4804 | | Builder.createIntrinsic(LLVM::Core::AArch64NeonTbl1, |
4805 | | {Context.Int8x16Ty}, {Vector, Index}), |
4806 | | Context.Int64x2Ty)); |
4807 | | return; |
4808 | | } |
4809 | | #endif |
4810 | | |
4811 | | // Fallback case. |
4812 | | // If the SSSE3 is not supported on the x86_64 platform or |
4813 | | // the NEON is not supported on the aarch64 platform, |
4814 | | // then fallback to this. |
4815 | 0 | auto Mask = Builder.createVectorSplat(16, LLContext.getInt8(15)); |
4816 | 0 | auto Zero = Builder.createVectorSplat(16, LLContext.getInt8(0)); |
4817 | 0 | auto IsOver = Builder.createICmpUGT(Index, Mask); |
4818 | 0 | auto InboundIndex = Builder.createAnd(Index, Mask); |
4819 | 0 | auto Array = Builder.createArray(16, 1); |
4820 | 0 | for (size_t I = 0; I < 16; ++I) { |
4821 | 0 | Builder.createStore( |
4822 | 0 | Builder.createExtractElement(Vector, LLContext.getInt64(I)), |
4823 | 0 | Builder.createInBoundsGEP1(Context.Int8Ty, Array, |
4824 | 0 | LLContext.getInt64(I))); |
4825 | 0 | } |
4826 | 0 | LLVM::Value Ret = LLVM::Value::getUndef(Context.Int8x16Ty); |
4827 | 0 | for (size_t I = 0; I < 16; ++I) { |
4828 | 0 | auto Idx = |
4829 | 0 | Builder.createExtractElement(InboundIndex, LLContext.getInt64(I)); |
4830 | 0 | auto Value = Builder.createLoad( |
4831 | 0 | Context.Int8Ty, |
4832 | 0 | Builder.createInBoundsGEP1(Context.Int8Ty, Array, Idx)); |
4833 | 0 | Ret = Builder.createInsertElement(Ret, Value, LLContext.getInt64(I)); |
4834 | 0 | } |
4835 | 0 | Ret = Builder.createSelect(IsOver, Zero, Ret); |
4836 | 0 | stackPush(Builder.createBitCast(Ret, Context.Int64x2Ty)); |
4837 | 0 | } |
4838 | | |
4839 | 132 | void compileVectorVectorQ15MulSat() noexcept { |
4840 | 132 | compileVectorVectorOp( |
4841 | 132 | Context.Int16x8Ty, [this](auto LHS, auto RHS) noexcept -> LLVM::Value { |
4842 | 132 | #if defined(__x86_64__) |
4843 | 132 | if (Context.SupportSSSE3) { |
4844 | 132 | assuming(LLVM::Core::X86SSSE3PMulHrSw128 != |
4845 | 132 | LLVM::Core::NotIntrinsic); |
4846 | 132 | auto Result = Builder.createIntrinsic( |
4847 | 132 | LLVM::Core::X86SSSE3PMulHrSw128, {}, {LHS, RHS}); |
4848 | 132 | auto IntMaxV = Builder.createVectorSplat( |
4849 | 132 | 8, LLContext.getInt16(UINT16_C(0x8000))); |
4850 | 132 | auto NotOver = Builder.createSExt( |
4851 | 132 | Builder.createICmpEQ(Result, IntMaxV), Context.Int16x8Ty); |
4852 | 132 | return Builder.createXor(Result, NotOver); |
4853 | 132 | } |
4854 | 0 | #endif |
4855 | | |
4856 | | #if defined(__aarch64__) |
4857 | | if (Context.SupportNEON) { |
4858 | | assuming(LLVM::Core::AArch64NeonSQRDMulH != |
4859 | | LLVM::Core::NotIntrinsic); |
4860 | | return Builder.createBinaryIntrinsic( |
4861 | | LLVM::Core::AArch64NeonSQRDMulH, LHS, RHS); |
4862 | | } |
4863 | | #endif |
4864 | | |
4865 | | // Fallback case. |
4866 | | // If the SSSE3 is not supported on the x86_64 platform or |
4867 | | // the NEON is not supported on the aarch64 platform, |
4868 | | // then fallback to this. |
4869 | 0 | auto ExtTy = Context.Int16x8Ty.getExtendedElementVectorType(); |
4870 | 0 | auto Offset = Builder.createVectorSplat( |
4871 | 0 | 8, LLContext.getInt32(UINT32_C(0x4000))); |
4872 | 0 | auto Shift = |
4873 | 0 | Builder.createVectorSplat(8, LLContext.getInt32(UINT32_C(15))); |
4874 | 0 | auto ExtLHS = Builder.createSExt(LHS, ExtTy); |
4875 | 0 | auto ExtRHS = Builder.createSExt(RHS, ExtTy); |
4876 | 0 | auto Result = Builder.createTrunc( |
4877 | 0 | Builder.createAShr( |
4878 | 0 | Builder.createAdd(Builder.createMul(ExtLHS, ExtRHS), Offset), |
4879 | 0 | Shift), |
4880 | 0 | Context.Int16x8Ty); |
4881 | 0 | auto IntMaxV = Builder.createVectorSplat( |
4882 | 0 | 8, LLContext.getInt16(UINT16_C(0x8000))); |
4883 | 0 | auto NotOver = Builder.createSExt( |
4884 | 0 | Builder.createICmpEQ(Result, IntMaxV), Context.Int16x8Ty); |
4885 | 0 | return Builder.createXor(Result, NotOver); |
4886 | 132 | }); |
4887 | 132 | } |
4888 | 339 | void compileVectorVectorSMin(LLVM::Type VectorTy) noexcept { |
4889 | 339 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4890 | 339 | auto C = Builder.createICmpSLE(LHS, RHS); |
4891 | 339 | return Builder.createSelect(C, LHS, RHS); |
4892 | 339 | }); |
4893 | 339 | } |
4894 | 269 | void compileVectorVectorUMin(LLVM::Type VectorTy) noexcept { |
4895 | 269 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4896 | 269 | auto C = Builder.createICmpULE(LHS, RHS); |
4897 | 269 | return Builder.createSelect(C, LHS, RHS); |
4898 | 269 | }); |
4899 | 269 | } |
4900 | 504 | void compileVectorVectorSMax(LLVM::Type VectorTy) noexcept { |
4901 | 504 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4902 | 504 | auto C = Builder.createICmpSGE(LHS, RHS); |
4903 | 504 | return Builder.createSelect(C, LHS, RHS); |
4904 | 504 | }); |
4905 | 504 | } |
4906 | 716 | void compileVectorVectorUMax(LLVM::Type VectorTy) noexcept { |
4907 | 716 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
4908 | 716 | auto C = Builder.createICmpUGE(LHS, RHS); |
4909 | 716 | return Builder.createSelect(C, LHS, RHS); |
4910 | 716 | }); |
4911 | 716 | } |
4912 | 227 | void compileVectorVectorUAvgr(LLVM::Type VectorTy) noexcept { |
4913 | 227 | auto ExtendTy = VectorTy.getExtendedElementVectorType(); |
4914 | 227 | compileVectorVectorOp( |
4915 | 227 | VectorTy, |
4916 | 227 | [this, VectorTy, ExtendTy](auto LHS, auto RHS) noexcept -> LLVM::Value { |
4917 | 227 | #if defined(__x86_64__) |
4918 | 227 | if (Context.SupportSSE2) { |
4919 | 227 | const auto ID = [VectorTy]() noexcept { |
4920 | 227 | switch (VectorTy.getElementType().getIntegerBitWidth()) { |
4921 | 124 | case 8: |
4922 | 124 | return LLVM::Core::X86SSE2PAvgB; |
4923 | 103 | case 16: |
4924 | 103 | return LLVM::Core::X86SSE2PAvgW; |
4925 | 0 | default: |
4926 | 0 | assumingUnreachable(); |
4927 | 227 | } |
4928 | 227 | }(); |
4929 | 227 | assuming(ID != LLVM::Core::NotIntrinsic); |
4930 | 227 | return Builder.createIntrinsic(ID, {}, {LHS, RHS}); |
4931 | 227 | } |
4932 | 0 | #endif |
4933 | | |
4934 | | #if defined(__aarch64__) |
4935 | | if (Context.SupportNEON) { |
4936 | | assuming(LLVM::Core::AArch64NeonURHAdd != LLVM::Core::NotIntrinsic); |
4937 | | return Builder.createBinaryIntrinsic(LLVM::Core::AArch64NeonURHAdd, |
4938 | | LHS, RHS); |
4939 | | } |
4940 | | #endif |
4941 | | |
4942 | | // Fallback case. |
4943 | | // If the SSE2 is not supported on the x86_64 platform or |
4944 | | // the NEON is not supported on the aarch64 platform, |
4945 | | // then fallback to this. |
4946 | 0 | auto EL = Builder.createZExt(LHS, ExtendTy); |
4947 | 0 | auto ER = Builder.createZExt(RHS, ExtendTy); |
4948 | 0 | auto One = Builder.createZExt( |
4949 | 0 | Builder.createVectorSplat(ExtendTy.getVectorSize(), |
4950 | 0 | LLContext.getTrue()), |
4951 | 0 | ExtendTy); |
4952 | 0 | return Builder.createTrunc( |
4953 | 0 | Builder.createLShr( |
4954 | 0 | Builder.createAdd(Builder.createAdd(EL, ER), One), One), |
4955 | 0 | VectorTy); |
4956 | 227 | }); |
4957 | 227 | } |
4958 | 661 | void compileVectorNarrow(LLVM::Type FromTy, bool Signed) noexcept { |
4959 | 661 | auto [MinInt, |
4960 | 661 | MaxInt] = [&]() noexcept -> std::tuple<LLVM::Value, LLVM::Value> { |
4961 | 661 | switch (FromTy.getElementType().getIntegerBitWidth()) { |
4962 | 261 | case 16: { |
4963 | 261 | const auto Min = |
4964 | 261 | static_cast<int16_t>(Signed ? std::numeric_limits<int8_t>::min() |
4965 | 261 | : std::numeric_limits<uint8_t>::min()); |
4966 | 261 | const auto Max = |
4967 | 261 | static_cast<int16_t>(Signed ? std::numeric_limits<int8_t>::max() |
4968 | 261 | : std::numeric_limits<uint8_t>::max()); |
4969 | 261 | return {LLContext.getInt16(static_cast<uint16_t>(Min)), |
4970 | 261 | LLContext.getInt16(static_cast<uint16_t>(Max))}; |
4971 | 0 | } |
4972 | 400 | case 32: { |
4973 | 400 | const auto Min = |
4974 | 400 | static_cast<int32_t>(Signed ? std::numeric_limits<int16_t>::min() |
4975 | 400 | : std::numeric_limits<uint16_t>::min()); |
4976 | 400 | const auto Max = |
4977 | 400 | static_cast<int32_t>(Signed ? std::numeric_limits<int16_t>::max() |
4978 | 400 | : std::numeric_limits<uint16_t>::max()); |
4979 | 400 | return {LLContext.getInt32(static_cast<uint32_t>(Min)), |
4980 | 400 | LLContext.getInt32(static_cast<uint32_t>(Max))}; |
4981 | 0 | } |
4982 | 0 | default: |
4983 | 0 | assumingUnreachable(); |
4984 | 661 | } |
4985 | 661 | }(); |
4986 | 661 | const auto Count = FromTy.getVectorSize(); |
4987 | 661 | auto VMin = Builder.createVectorSplat(Count, MinInt); |
4988 | 661 | auto VMax = Builder.createVectorSplat(Count, MaxInt); |
4989 | | |
4990 | 661 | auto TruncTy = FromTy.getTruncatedElementVectorType(); |
4991 | | |
4992 | 661 | auto F2 = Builder.createBitCast(stackPop(), FromTy); |
4993 | 661 | F2 = Builder.createSelect(Builder.createICmpSLT(F2, VMin), VMin, F2); |
4994 | 661 | F2 = Builder.createSelect(Builder.createICmpSGT(F2, VMax), VMax, F2); |
4995 | 661 | F2 = Builder.createTrunc(F2, TruncTy); |
4996 | | |
4997 | 661 | auto F1 = Builder.createBitCast(stackPop(), FromTy); |
4998 | 661 | F1 = Builder.createSelect(Builder.createICmpSLT(F1, VMin), VMin, F1); |
4999 | 661 | F1 = Builder.createSelect(Builder.createICmpSGT(F1, VMax), VMax, F1); |
5000 | 661 | F1 = Builder.createTrunc(F1, TruncTy); |
5001 | | |
5002 | 661 | std::vector<uint32_t> Mask(Count * 2); |
5003 | 661 | std::iota(Mask.begin(), Mask.end(), 0); |
5004 | 661 | stackPush(Builder.createBitCast( |
5005 | 661 | Builder.createShuffleVector( |
5006 | 661 | F1, F2, LLVM::Value::getConstVector32(LLContext, Mask)), |
5007 | 661 | Context.Int64x2Ty)); |
5008 | 661 | } |
5009 | 5.18k | void compileVectorExtend(LLVM::Type FromTy, bool Signed, bool Low) noexcept { |
5010 | 5.18k | auto ExtTy = FromTy.getExtendedElementVectorType(); |
5011 | 5.18k | const auto Count = FromTy.getVectorSize(); |
5012 | 5.18k | std::vector<uint32_t> Mask(Count / 2); |
5013 | 5.18k | std::iota(Mask.begin(), Mask.end(), Low ? 0 : Count / 2); |
5014 | 5.18k | auto R = Builder.createBitCast(Stack.back(), FromTy); |
5015 | 5.18k | if (Signed) { |
5016 | 2.04k | R = Builder.createSExt(R, ExtTy); |
5017 | 3.13k | } else { |
5018 | 3.13k | R = Builder.createZExt(R, ExtTy); |
5019 | 3.13k | } |
5020 | 5.18k | R = Builder.createShuffleVector( |
5021 | 5.18k | R, LLVM::Value::getUndef(ExtTy), |
5022 | 5.18k | LLVM::Value::getConstVector32(LLContext, Mask)); |
5023 | 5.18k | Stack.back() = Builder.createBitCast(R, Context.Int64x2Ty); |
5024 | 5.18k | } |
5025 | 1.70k | void compileVectorExtMul(LLVM::Type FromTy, bool Signed, bool Low) noexcept { |
5026 | 1.70k | auto ExtTy = FromTy.getExtendedElementVectorType(); |
5027 | 1.70k | const auto Count = FromTy.getVectorSize(); |
5028 | 1.70k | std::vector<uint32_t> Mask(Count / 2); |
5029 | 1.70k | std::iota(Mask.begin(), Mask.end(), Low ? 0 : Count / 2); |
5030 | 3.41k | auto Extend = [this, FromTy, Signed, ExtTy, &Mask](LLVM::Value R) noexcept { |
5031 | 3.41k | R = Builder.createBitCast(R, FromTy); |
5032 | 3.41k | if (Signed) { |
5033 | 1.52k | R = Builder.createSExt(R, ExtTy); |
5034 | 1.89k | } else { |
5035 | 1.89k | R = Builder.createZExt(R, ExtTy); |
5036 | 1.89k | } |
5037 | 3.41k | return Builder.createShuffleVector( |
5038 | 3.41k | R, LLVM::Value::getUndef(ExtTy), |
5039 | 3.41k | LLVM::Value::getConstVector32(LLContext, Mask)); |
5040 | 3.41k | }; |
5041 | 1.70k | auto RHS = Extend(stackPop()); |
5042 | 1.70k | auto LHS = Extend(stackPop()); |
5043 | 1.70k | stackPush( |
5044 | 1.70k | Builder.createBitCast(Builder.createMul(RHS, LHS), Context.Int64x2Ty)); |
5045 | 1.70k | } |
5046 | 2.03k | void compileVectorExtAddPairwise(LLVM::Type VectorTy, bool Signed) noexcept { |
5047 | 2.03k | compileVectorOp( |
5048 | 2.03k | VectorTy, [this, VectorTy, Signed](auto V) noexcept -> LLVM::Value { |
5049 | 2.03k | auto ExtTy = VectorTy.getExtendedElementVectorType() |
5050 | 2.03k | .getHalfElementsVectorType(); |
5051 | 2.03k | #if defined(__x86_64__) |
5052 | 2.03k | const auto Count = VectorTy.getVectorSize(); |
5053 | 2.03k | if (Context.SupportXOP) { |
5054 | 0 | const auto ID = [Count, Signed]() noexcept { |
5055 | 0 | switch (Count) { |
5056 | 0 | case 8: |
5057 | 0 | return Signed ? LLVM::Core::X86XOpVPHAddWD |
5058 | 0 | : LLVM::Core::X86XOpVPHAddUWD; |
5059 | 0 | case 16: |
5060 | 0 | return Signed ? LLVM::Core::X86XOpVPHAddBW |
5061 | 0 | : LLVM::Core::X86XOpVPHAddUBW; |
5062 | 0 | default: |
5063 | 0 | assumingUnreachable(); |
5064 | 0 | } |
5065 | 0 | }(); |
5066 | 0 | assuming(ID != LLVM::Core::NotIntrinsic); |
5067 | 0 | return Builder.createUnaryIntrinsic(ID, V); |
5068 | 0 | } |
5069 | 2.03k | if (Context.SupportSSSE3 && Count == 16) { |
5070 | 704 | assuming(LLVM::Core::X86SSSE3PMAddUbSw128 != |
5071 | 704 | LLVM::Core::NotIntrinsic); |
5072 | 704 | if (Signed) { |
5073 | 370 | return Builder.createIntrinsic( |
5074 | 370 | LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5075 | 370 | {Builder.createVectorSplat(16, LLContext.getInt8(1)), V}); |
5076 | 370 | } else { |
5077 | 334 | return Builder.createIntrinsic( |
5078 | 334 | LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5079 | 334 | {V, Builder.createVectorSplat(16, LLContext.getInt8(1))}); |
5080 | 334 | } |
5081 | 704 | } |
5082 | 1.32k | if (Context.SupportSSE2 && Count == 8) { |
5083 | 1.32k | assuming(LLVM::Core::X86SSE2PMAddWd != LLVM::Core::NotIntrinsic); |
5084 | 1.32k | if (Signed) { |
5085 | 814 | return Builder.createIntrinsic( |
5086 | 814 | LLVM::Core::X86SSE2PMAddWd, {}, |
5087 | 814 | {V, Builder.createVectorSplat(8, LLContext.getInt16(1))}); |
5088 | 814 | } else { |
5089 | 515 | V = Builder.createXor( |
5090 | 515 | V, Builder.createVectorSplat(8, LLContext.getInt16(0x8000))); |
5091 | 515 | V = Builder.createIntrinsic( |
5092 | 515 | LLVM::Core::X86SSE2PMAddWd, {}, |
5093 | 515 | {V, Builder.createVectorSplat(8, LLContext.getInt16(1))}); |
5094 | 515 | return Builder.createAdd( |
5095 | 515 | V, Builder.createVectorSplat(4, LLContext.getInt32(0x10000))); |
5096 | 515 | } |
5097 | 1.32k | } |
5098 | 0 | #endif |
5099 | | |
5100 | | #if defined(__aarch64__) |
5101 | | if (Context.SupportNEON) { |
5102 | | const auto ID = Signed ? LLVM::Core::AArch64NeonSAddLP |
5103 | | : LLVM::Core::AArch64NeonUAddLP; |
5104 | | assuming(ID != LLVM::Core::NotIntrinsic); |
5105 | | return Builder.createIntrinsic(ID, {ExtTy, VectorTy}, {V}); |
5106 | | } |
5107 | | #endif |
5108 | | |
5109 | | // Fallback case. |
5110 | | // If the XOP, SSSE3, or SSE2 is not supported on the x86_64 platform |
5111 | | // or the NEON is not supported on the aarch64 platform, |
5112 | | // then fallback to this. |
5113 | 0 | auto Width = LLVM::Value::getConstInt( |
5114 | 0 | ExtTy.getElementType(), |
5115 | 0 | VectorTy.getElementType().getIntegerBitWidth()); |
5116 | 0 | Width = Builder.createVectorSplat(ExtTy.getVectorSize(), Width); |
5117 | 0 | auto EV = Builder.createBitCast(V, ExtTy); |
5118 | 0 | LLVM::Value L, R; |
5119 | 0 | if (Signed) { |
5120 | 0 | L = Builder.createAShr(EV, Width); |
5121 | 0 | R = Builder.createAShr(Builder.createShl(EV, Width), Width); |
5122 | 0 | } else { |
5123 | 0 | L = Builder.createLShr(EV, Width); |
5124 | 0 | R = Builder.createLShr(Builder.createShl(EV, Width), Width); |
5125 | 0 | } |
5126 | 0 | return Builder.createAdd(L, R); |
5127 | 1.32k | }); |
5128 | 2.03k | } |
5129 | 497 | void compileVectorFAbs(LLVM::Type VectorTy) noexcept { |
5130 | 497 | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5131 | 497 | assuming(LLVM::Core::Fabs != LLVM::Core::NotIntrinsic); |
5132 | 497 | return Builder.createUnaryIntrinsic(LLVM::Core::Fabs, V); |
5133 | 497 | }); |
5134 | 497 | } |
5135 | 944 | void compileVectorFNeg(LLVM::Type VectorTy) noexcept { |
5136 | 944 | compileVectorOp(VectorTy, |
5137 | 944 | [this](auto V) noexcept { return Builder.createFNeg(V); }); |
5138 | 944 | } |
5139 | 324 | void compileVectorFSqrt(LLVM::Type VectorTy) noexcept { |
5140 | 324 | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5141 | 324 | assuming(LLVM::Core::Sqrt != LLVM::Core::NotIntrinsic); |
5142 | 324 | return Builder.createUnaryIntrinsic(LLVM::Core::Sqrt, V); |
5143 | 324 | }); |
5144 | 324 | } |
5145 | 1.66k | void compileVectorFCeil(LLVM::Type VectorTy) noexcept { |
5146 | 1.66k | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5147 | 1.66k | assuming(LLVM::Core::Ceil != LLVM::Core::NotIntrinsic); |
5148 | 1.66k | return Builder.createUnaryIntrinsic(LLVM::Core::Ceil, V); |
5149 | 1.66k | }); |
5150 | 1.66k | } |
5151 | 2.69k | void compileVectorFFloor(LLVM::Type VectorTy) noexcept { |
5152 | 2.69k | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5153 | 2.69k | assuming(LLVM::Core::Floor != LLVM::Core::NotIntrinsic); |
5154 | 2.69k | return Builder.createUnaryIntrinsic(LLVM::Core::Floor, V); |
5155 | 2.69k | }); |
5156 | 2.69k | } |
5157 | 2.03k | void compileVectorFTrunc(LLVM::Type VectorTy) noexcept { |
5158 | 2.03k | compileVectorOp(VectorTy, [this](auto V) noexcept { |
5159 | 2.03k | assuming(LLVM::Core::Trunc != LLVM::Core::NotIntrinsic); |
5160 | 2.03k | return Builder.createUnaryIntrinsic(LLVM::Core::Trunc, V); |
5161 | 2.03k | }); |
5162 | 2.03k | } |
5163 | 433 | void compileVectorFNearest(LLVM::Type VectorTy) noexcept { |
5164 | 433 | compileVectorOp(VectorTy, [&](auto V) noexcept { |
5165 | 433 | #if LLVM_VERSION_MAJOR >= 12 |
5166 | 433 | assuming(LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic); |
5167 | 433 | if (LLVM::Core::Roundeven != LLVM::Core::NotIntrinsic) { |
5168 | 433 | return Builder.createUnaryIntrinsic(LLVM::Core::Roundeven, V); |
5169 | 433 | } |
5170 | 0 | #endif |
5171 | | |
5172 | 0 | #if defined(__x86_64__) |
5173 | 0 | if (Context.SupportSSE4_1) { |
5174 | 0 | const bool IsFloat = VectorTy.getElementType().isFloatTy(); |
5175 | 0 | auto ID = |
5176 | 0 | IsFloat ? LLVM::Core::X86SSE41RoundPs : LLVM::Core::X86SSE41RoundPd; |
5177 | 0 | assuming(ID != LLVM::Core::NotIntrinsic); |
5178 | 0 | return Builder.createIntrinsic(ID, {}, {V, LLContext.getInt32(8)}); |
5179 | 0 | } |
5180 | 0 | #endif |
5181 | | |
5182 | | #if defined(__aarch64__) |
5183 | | if (Context.SupportNEON && |
5184 | | LLVM::Core::AArch64NeonFRIntN != LLVM::Core::NotIntrinsic) { |
5185 | | return Builder.createUnaryIntrinsic(LLVM::Core::AArch64NeonFRIntN, V); |
5186 | | } |
5187 | | #endif |
5188 | | |
5189 | | // Fallback case. |
5190 | | // If the SSE4.1 is not supported on the x86_64 platform or |
5191 | | // the NEON is not supported on the aarch64 platform, |
5192 | | // then fallback to this. |
5193 | 0 | assuming(LLVM::Core::Nearbyint != LLVM::Core::NotIntrinsic); |
5194 | 0 | return Builder.createUnaryIntrinsic(LLVM::Core::Nearbyint, V); |
5195 | 0 | }); |
5196 | 433 | } |
5197 | 178 | void compileVectorVectorFAdd(LLVM::Type VectorTy) noexcept { |
5198 | 178 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5199 | 178 | return Builder.createFAdd(LHS, RHS); |
5200 | 178 | }); |
5201 | 178 | } |
5202 | 470 | void compileVectorVectorFSub(LLVM::Type VectorTy) noexcept { |
5203 | 470 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5204 | 470 | return Builder.createFSub(LHS, RHS); |
5205 | 470 | }); |
5206 | 470 | } |
5207 | 225 | void compileVectorVectorFMul(LLVM::Type VectorTy) noexcept { |
5208 | 225 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5209 | 225 | return Builder.createFMul(LHS, RHS); |
5210 | 225 | }); |
5211 | 225 | } |
5212 | 231 | void compileVectorVectorFDiv(LLVM::Type VectorTy) noexcept { |
5213 | 231 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5214 | 231 | return Builder.createFDiv(LHS, RHS); |
5215 | 231 | }); |
5216 | 231 | } |
5217 | 301 | void compileVectorVectorFMin(LLVM::Type VectorTy) noexcept { |
5218 | 301 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5219 | 301 | auto LNaN = Builder.createFCmpUNO(LHS, LHS); |
5220 | 301 | auto RNaN = Builder.createFCmpUNO(RHS, RHS); |
5221 | 301 | auto OLT = Builder.createFCmpOLT(LHS, RHS); |
5222 | 301 | auto OGT = Builder.createFCmpOGT(LHS, RHS); |
5223 | 301 | auto Ret = Builder.createBitCast( |
5224 | 301 | Builder.createOr(Builder.createBitCast(LHS, Context.Int64x2Ty), |
5225 | 301 | Builder.createBitCast(RHS, Context.Int64x2Ty)), |
5226 | 301 | LHS.getType()); |
5227 | 301 | Ret = Builder.createSelect(OGT, RHS, Ret); |
5228 | 301 | Ret = Builder.createSelect(OLT, LHS, Ret); |
5229 | 301 | Ret = Builder.createSelect(RNaN, RHS, Ret); |
5230 | 301 | Ret = Builder.createSelect(LNaN, LHS, Ret); |
5231 | 301 | return Ret; |
5232 | 301 | }); |
5233 | 301 | } |
5234 | 211 | void compileVectorVectorFMax(LLVM::Type VectorTy) noexcept { |
5235 | 211 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5236 | 211 | auto LNaN = Builder.createFCmpUNO(LHS, LHS); |
5237 | 211 | auto RNaN = Builder.createFCmpUNO(RHS, RHS); |
5238 | 211 | auto OLT = Builder.createFCmpOLT(LHS, RHS); |
5239 | 211 | auto OGT = Builder.createFCmpOGT(LHS, RHS); |
5240 | 211 | auto Ret = Builder.createBitCast( |
5241 | 211 | Builder.createAnd(Builder.createBitCast(LHS, Context.Int64x2Ty), |
5242 | 211 | Builder.createBitCast(RHS, Context.Int64x2Ty)), |
5243 | 211 | LHS.getType()); |
5244 | 211 | Ret = Builder.createSelect(OLT, RHS, Ret); |
5245 | 211 | Ret = Builder.createSelect(OGT, LHS, Ret); |
5246 | 211 | Ret = Builder.createSelect(RNaN, RHS, Ret); |
5247 | 211 | Ret = Builder.createSelect(LNaN, LHS, Ret); |
5248 | 211 | return Ret; |
5249 | 211 | }); |
5250 | 211 | } |
5251 | 289 | void compileVectorVectorFPMin(LLVM::Type VectorTy) noexcept { |
5252 | 289 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5253 | 289 | auto Cmp = Builder.createFCmpOLT(RHS, LHS); |
5254 | 289 | return Builder.createSelect(Cmp, RHS, LHS); |
5255 | 289 | }); |
5256 | 289 | } |
5257 | 276 | void compileVectorVectorFPMax(LLVM::Type VectorTy) noexcept { |
5258 | 276 | compileVectorVectorOp(VectorTy, [this](auto LHS, auto RHS) noexcept { |
5259 | 276 | auto Cmp = Builder.createFCmpOGT(RHS, LHS); |
5260 | 276 | return Builder.createSelect(Cmp, RHS, LHS); |
5261 | 276 | }); |
5262 | 276 | } |
5263 | 960 | void compileVectorTruncSatS32(LLVM::Type VectorTy, bool PadZero) noexcept { |
5264 | 960 | compileVectorOp(VectorTy, [this, VectorTy, PadZero](auto V) noexcept { |
5265 | 960 | const auto Size = VectorTy.getVectorSize(); |
5266 | 960 | auto FPTy = VectorTy.getElementType(); |
5267 | 960 | auto IntMin = LLContext.getInt32( |
5268 | 960 | static_cast<uint32_t>(std::numeric_limits<int32_t>::min())); |
5269 | 960 | auto IntMax = LLContext.getInt32( |
5270 | 960 | static_cast<uint32_t>(std::numeric_limits<int32_t>::max())); |
5271 | 960 | auto IntMinV = Builder.createVectorSplat(Size, IntMin); |
5272 | 960 | auto IntMaxV = Builder.createVectorSplat(Size, IntMax); |
5273 | 960 | auto IntZeroV = LLVM::Value::getConstNull(IntMinV.getType()); |
5274 | 960 | auto FPMin = Builder.createSIToFP(IntMin, FPTy); |
5275 | 960 | auto FPMax = Builder.createSIToFP(IntMax, FPTy); |
5276 | 960 | auto FPMinV = Builder.createVectorSplat(Size, FPMin); |
5277 | 960 | auto FPMaxV = Builder.createVectorSplat(Size, FPMax); |
5278 | | |
5279 | 960 | auto Normal = Builder.createFCmpORD(V, V); |
5280 | 960 | auto NotUnder = Builder.createFCmpUGE(V, FPMinV); |
5281 | 960 | auto NotOver = Builder.createFCmpULT(V, FPMaxV); |
5282 | 960 | V = Builder.createFPToSI( |
5283 | 960 | V, LLVM::Type::getVectorType(LLContext.getInt32Ty(), Size)); |
5284 | 960 | V = Builder.createSelect(Normal, V, IntZeroV); |
5285 | 960 | V = Builder.createSelect(NotUnder, V, IntMinV); |
5286 | 960 | V = Builder.createSelect(NotOver, V, IntMaxV); |
5287 | 960 | if (PadZero) { |
5288 | 755 | std::vector<uint32_t> Mask(Size * 2); |
5289 | 755 | std::iota(Mask.begin(), Mask.end(), 0); |
5290 | 755 | V = Builder.createShuffleVector( |
5291 | 755 | V, IntZeroV, LLVM::Value::getConstVector32(LLContext, Mask)); |
5292 | 755 | } |
5293 | 960 | return V; |
5294 | 960 | }); |
5295 | 960 | } |
5296 | 5.87k | void compileVectorTruncSatU32(LLVM::Type VectorTy, bool PadZero) noexcept { |
5297 | 5.87k | compileVectorOp(VectorTy, [this, VectorTy, PadZero](auto V) noexcept { |
5298 | 5.87k | const auto Size = VectorTy.getVectorSize(); |
5299 | 5.87k | auto FPTy = VectorTy.getElementType(); |
5300 | 5.87k | auto IntMin = LLContext.getInt32(std::numeric_limits<uint32_t>::min()); |
5301 | 5.87k | auto IntMax = LLContext.getInt32(std::numeric_limits<uint32_t>::max()); |
5302 | 5.87k | auto IntMinV = Builder.createVectorSplat(Size, IntMin); |
5303 | 5.87k | auto IntMaxV = Builder.createVectorSplat(Size, IntMax); |
5304 | 5.87k | auto FPMin = Builder.createUIToFP(IntMin, FPTy); |
5305 | 5.87k | auto FPMax = Builder.createUIToFP(IntMax, FPTy); |
5306 | 5.87k | auto FPMinV = Builder.createVectorSplat(Size, FPMin); |
5307 | 5.87k | auto FPMaxV = Builder.createVectorSplat(Size, FPMax); |
5308 | | |
5309 | 5.87k | auto NotUnder = Builder.createFCmpOGE(V, FPMinV); |
5310 | 5.87k | auto NotOver = Builder.createFCmpULT(V, FPMaxV); |
5311 | 5.87k | V = Builder.createFPToUI( |
5312 | 5.87k | V, LLVM::Type::getVectorType(LLContext.getInt32Ty(), Size)); |
5313 | 5.87k | V = Builder.createSelect(NotUnder, V, IntMinV); |
5314 | 5.87k | V = Builder.createSelect(NotOver, V, IntMaxV); |
5315 | 5.87k | if (PadZero) { |
5316 | 2.14k | auto IntZeroV = LLVM::Value::getConstNull(IntMinV.getType()); |
5317 | 2.14k | std::vector<uint32_t> Mask(Size * 2); |
5318 | 2.14k | std::iota(Mask.begin(), Mask.end(), 0); |
5319 | 2.14k | V = Builder.createShuffleVector( |
5320 | 2.14k | V, IntZeroV, LLVM::Value::getConstVector32(LLContext, Mask)); |
5321 | 2.14k | } |
5322 | 5.87k | return V; |
5323 | 5.87k | }); |
5324 | 5.87k | } |
5325 | | void compileVectorConvertS(LLVM::Type VectorTy, LLVM::Type FPVectorTy, |
5326 | 666 | bool Low) noexcept { |
5327 | 666 | compileVectorOp(VectorTy, |
5328 | 666 | [this, VectorTy, FPVectorTy, Low](auto V) noexcept { |
5329 | 666 | if (Low) { |
5330 | 333 | const auto Size = VectorTy.getVectorSize() / 2; |
5331 | 333 | std::vector<uint32_t> Mask(Size); |
5332 | 333 | std::iota(Mask.begin(), Mask.end(), 0); |
5333 | 333 | V = Builder.createShuffleVector( |
5334 | 333 | V, LLVM::Value::getUndef(VectorTy), |
5335 | 333 | LLVM::Value::getConstVector32(LLContext, Mask)); |
5336 | 333 | } |
5337 | 666 | return Builder.createSIToFP(V, FPVectorTy); |
5338 | 666 | }); |
5339 | 666 | } |
5340 | | void compileVectorConvertU(LLVM::Type VectorTy, LLVM::Type FPVectorTy, |
5341 | 1.97k | bool Low) noexcept { |
5342 | 1.97k | compileVectorOp(VectorTy, |
5343 | 1.97k | [this, VectorTy, FPVectorTy, Low](auto V) noexcept { |
5344 | 1.97k | if (Low) { |
5345 | 1.25k | const auto Size = VectorTy.getVectorSize() / 2; |
5346 | 1.25k | std::vector<uint32_t> Mask(Size); |
5347 | 1.25k | std::iota(Mask.begin(), Mask.end(), 0); |
5348 | 1.25k | V = Builder.createShuffleVector( |
5349 | 1.25k | V, LLVM::Value::getUndef(VectorTy), |
5350 | 1.25k | LLVM::Value::getConstVector32(LLContext, Mask)); |
5351 | 1.25k | } |
5352 | 1.97k | return Builder.createUIToFP(V, FPVectorTy); |
5353 | 1.97k | }); |
5354 | 1.97k | } |
5355 | 733 | void compileVectorDemote() noexcept { |
5356 | 733 | compileVectorOp(Context.Doublex2Ty, [this](auto V) noexcept { |
5357 | 733 | auto Demoted = Builder.createFPTrunc( |
5358 | 733 | V, LLVM::Type::getVectorType(Context.FloatTy, 2)); |
5359 | 733 | auto ZeroV = LLVM::Value::getConstNull(Demoted.getType()); |
5360 | 733 | return Builder.createShuffleVector( |
5361 | 733 | Demoted, ZeroV, |
5362 | 733 | LLVM::Value::getConstVector32(LLContext, {0u, 1u, 2u, 3u})); |
5363 | 733 | }); |
5364 | 733 | } |
5365 | 723 | void compileVectorPromote() noexcept { |
5366 | 723 | compileVectorOp(Context.Floatx4Ty, [this](auto V) noexcept { |
5367 | 723 | auto UndefV = LLVM::Value::getUndef(V.getType()); |
5368 | 723 | auto Low = Builder.createShuffleVector( |
5369 | 723 | V, UndefV, LLVM::Value::getConstVector32(LLContext, {0u, 1u})); |
5370 | 723 | return Builder.createFPExt( |
5371 | 723 | Low, LLVM::Type::getVectorType(Context.DoubleTy, 2)); |
5372 | 723 | }); |
5373 | 723 | } |
5374 | | |
5375 | 0 | void compileVectorVectorMAdd(LLVM::Type VectorTy) noexcept { |
5376 | 0 | auto C = Builder.createBitCast(stackPop(), VectorTy); |
5377 | 0 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); |
5378 | 0 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
5379 | 0 | stackPush(Builder.createBitCast( |
5380 | 0 | Builder.createFAdd(Builder.createFMul(LHS, RHS), C), |
5381 | 0 | Context.Int64x2Ty)); |
5382 | 0 | } |
5383 | | |
5384 | 0 | void compileVectorVectorNMAdd(LLVM::Type VectorTy) noexcept { |
5385 | 0 | auto C = Builder.createBitCast(stackPop(), VectorTy); |
5386 | 0 | auto RHS = Builder.createBitCast(stackPop(), VectorTy); |
5387 | 0 | auto LHS = Builder.createBitCast(stackPop(), VectorTy); |
5388 | 0 | stackPush(Builder.createBitCast( |
5389 | 0 | Builder.createFAdd(Builder.createFMul(Builder.createFNeg(LHS), RHS), C), |
5390 | 0 | Context.Int64x2Ty)); |
5391 | 0 | } |
5392 | | |
5393 | 0 | void compileVectorRelaxedIntegerDotProduct() noexcept { |
5394 | 0 | auto OriTy = Context.Int8x16Ty; |
5395 | 0 | auto ExtTy = Context.Int16x8Ty; |
5396 | 0 | auto RHS = Builder.createBitCast(stackPop(), OriTy); |
5397 | 0 | auto LHS = Builder.createBitCast(stackPop(), OriTy); |
5398 | 0 | #if defined(__x86_64__) |
5399 | 0 | if (Context.SupportSSSE3) { |
5400 | 0 | assuming(LLVM::Core::X86SSSE3PMAddUbSw128 != LLVM::Core::NotIntrinsic); |
5401 | | // WebAssembly Relaxed SIMD spec: signed(LHS) * unsigned/signed(RHS) |
5402 | | // But PMAddUbSw128 is unsigned(LHS) * signed(RHS). Therefore swap both |
5403 | | // side to match the WebAssembly spec |
5404 | 0 | return stackPush(Builder.createBitCast( |
5405 | 0 | Builder.createIntrinsic(LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5406 | 0 | {RHS, LHS}), |
5407 | 0 | Context.Int64x2Ty)); |
5408 | 0 | } |
5409 | 0 | #endif |
5410 | 0 | auto Width = LLVM::Value::getConstInt( |
5411 | 0 | ExtTy.getElementType(), OriTy.getElementType().getIntegerBitWidth()); |
5412 | 0 | Width = Builder.createVectorSplat(ExtTy.getVectorSize(), Width); |
5413 | 0 | auto EA = Builder.createBitCast(LHS, ExtTy); |
5414 | 0 | auto EB = Builder.createBitCast(RHS, ExtTy); |
5415 | |
|
5416 | 0 | LLVM::Value AL, AR, BL, BR; |
5417 | 0 | AL = Builder.createAShr(EA, Width); |
5418 | 0 | AR = Builder.createAShr(Builder.createShl(EA, Width), Width); |
5419 | 0 | BL = Builder.createAShr(EB, Width); |
5420 | 0 | BR = Builder.createAShr(Builder.createShl(EB, Width), Width); |
5421 | |
|
5422 | 0 | return stackPush(Builder.createBitCast( |
5423 | 0 | Builder.createAdd(Builder.createMul(AL, BL), Builder.createMul(AR, BR)), |
5424 | 0 | Context.Int64x2Ty)); |
5425 | 0 | } |
5426 | | |
5427 | 0 | void compileVectorRelaxedIntegerDotProductAdd() noexcept { |
5428 | 0 | auto OriTy = Context.Int8x16Ty; |
5429 | 0 | auto ExtTy = Context.Int16x8Ty; |
5430 | 0 | auto FinTy = Context.Int32x4Ty; |
5431 | 0 | auto VC = Builder.createBitCast(stackPop(), FinTy); |
5432 | 0 | auto RHS = Builder.createBitCast(stackPop(), OriTy); |
5433 | 0 | auto LHS = Builder.createBitCast(stackPop(), OriTy); |
5434 | 0 | LLVM::Value IM; |
5435 | 0 | #if defined(__x86_64__) |
5436 | 0 | if (Context.SupportSSSE3) { |
5437 | 0 | assuming(LLVM::Core::X86SSSE3PMAddUbSw128 != LLVM::Core::NotIntrinsic); |
5438 | | // WebAssembly Relaxed SIMD spec: signed(LHS) * unsigned/signed(RHS) |
5439 | | // But PMAddUbSw128 is unsigned(LHS) * signed(RHS). Therefore swap both |
5440 | | // side to match the WebAssembly spec |
5441 | 0 | IM = Builder.createIntrinsic(LLVM::Core::X86SSSE3PMAddUbSw128, {}, |
5442 | 0 | {RHS, LHS}); |
5443 | 0 | } else |
5444 | 0 | #endif |
5445 | 0 | { |
5446 | 0 | auto Width = LLVM::Value::getConstInt( |
5447 | 0 | ExtTy.getElementType(), OriTy.getElementType().getIntegerBitWidth()); |
5448 | 0 | Width = Builder.createVectorSplat(ExtTy.getVectorSize(), Width); |
5449 | 0 | auto EA = Builder.createBitCast(LHS, ExtTy); |
5450 | 0 | auto EB = Builder.createBitCast(RHS, ExtTy); |
5451 | |
|
5452 | 0 | LLVM::Value AL, AR, BL, BR; |
5453 | 0 | AL = Builder.createAShr(EA, Width); |
5454 | 0 | AR = Builder.createAShr(Builder.createShl(EA, Width), Width); |
5455 | 0 | BL = Builder.createAShr(EB, Width); |
5456 | 0 | BR = Builder.createAShr(Builder.createShl(EB, Width), Width); |
5457 | 0 | IM = Builder.createAdd(Builder.createMul(AL, BL), |
5458 | 0 | Builder.createMul(AR, BR)); |
5459 | 0 | } |
5460 | | |
5461 | 0 | auto Width = LLVM::Value::getConstInt( |
5462 | 0 | FinTy.getElementType(), ExtTy.getElementType().getIntegerBitWidth()); |
5463 | 0 | Width = Builder.createVectorSplat(FinTy.getVectorSize(), Width); |
5464 | 0 | auto IME = Builder.createBitCast(IM, FinTy); |
5465 | 0 | auto L = Builder.createAShr(IME, Width); |
5466 | 0 | auto R = Builder.createAShr(Builder.createShl(IME, Width), Width); |
5467 | |
|
5468 | 0 | return stackPush(Builder.createBitCast( |
5469 | 0 | Builder.createAdd(Builder.createAdd(L, R), VC), Context.Int64x2Ty)); |
5470 | 0 | } |
5471 | | |
5472 | | void |
5473 | | enterBlock(LLVM::BasicBlock JumpBlock, LLVM::BasicBlock NextBlock, |
5474 | | LLVM::BasicBlock ElseBlock, std::vector<LLVM::Value> Args, |
5475 | | std::pair<std::vector<ValType>, std::vector<ValType>> Type, |
5476 | | std::vector<std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5477 | 22.2k | ReturnPHI = {}) noexcept { |
5478 | 22.2k | assuming(Type.first.size() == Args.size()); |
5479 | 22.2k | for (auto &Value : Args) { |
5480 | 4.09k | stackPush(Value); |
5481 | 4.09k | } |
5482 | 22.2k | const auto Unreachable = isUnreachable(); |
5483 | 22.2k | ControlStack.emplace_back(Stack.size() - Args.size(), Unreachable, |
5484 | 22.2k | JumpBlock, NextBlock, ElseBlock, std::move(Args), |
5485 | 22.2k | std::move(Type), std::move(ReturnPHI)); |
5486 | 22.2k | } |
5487 | | |
5488 | 22.2k | Control leaveBlock() noexcept { |
5489 | 22.2k | Control Entry = std::move(ControlStack.back()); |
5490 | 22.2k | ControlStack.pop_back(); |
5491 | | |
5492 | 22.2k | auto NextBlock = Entry.NextBlock ? Entry.NextBlock : Entry.JumpBlock; |
5493 | 22.2k | if (!Entry.Unreachable) { |
5494 | 14.8k | const auto &ReturnType = Entry.Type.second; |
5495 | 14.8k | if (!ReturnType.empty()) { |
5496 | 10.8k | std::vector<LLVM::Value> Rets(ReturnType.size()); |
5497 | 22.3k | for (size_t I = 0; I < Rets.size(); ++I) { |
5498 | 11.5k | const size_t J = Rets.size() - 1 - I; |
5499 | 11.5k | Rets[J] = stackPop(); |
5500 | 11.5k | } |
5501 | 10.8k | Entry.ReturnPHI.emplace_back(std::move(Rets), Builder.getInsertBlock()); |
5502 | 10.8k | } |
5503 | 14.8k | Builder.createBr(NextBlock); |
5504 | 14.8k | } else { |
5505 | 7.45k | Builder.createUnreachable(); |
5506 | 7.45k | } |
5507 | 22.2k | Builder.positionAtEnd(NextBlock); |
5508 | 22.2k | Stack.erase(Stack.begin() + static_cast<int64_t>(Entry.StackSize), |
5509 | 22.2k | Stack.end()); |
5510 | 22.2k | return Entry; |
5511 | 22.2k | } |
5512 | | |
5513 | 5.46k | void checkStop() noexcept { |
5514 | 5.46k | if (!Interruptible) { |
5515 | 5.46k | return; |
5516 | 5.46k | } |
5517 | 0 | auto NotStopBB = LLVM::BasicBlock::create(LLContext, F.Fn, "NotStop"); |
5518 | 0 | auto StopToken = Builder.createAtomicRMW( |
5519 | 0 | LLVMAtomicRMWBinOpXchg, Context.getStopToken(Builder, ExecCtx), |
5520 | 0 | LLContext.getInt32(0), LLVMAtomicOrderingMonotonic); |
5521 | | #if LLVM_VERSION_MAJOR >= 13 |
5522 | | StopToken.setAlignment(32); |
5523 | | #endif |
5524 | 0 | auto NotStop = Builder.createLikely( |
5525 | 0 | Builder.createICmpEQ(StopToken, LLContext.getInt32(0))); |
5526 | 0 | Builder.createCondBr(NotStop, NotStopBB, |
5527 | 0 | getTrapBB(ErrCode::Value::Interrupted)); |
5528 | |
|
5529 | 0 | Builder.positionAtEnd(NotStopBB); |
5530 | 0 | } |
5531 | | |
5532 | 5.62k | void setUnreachable() noexcept { |
5533 | 5.62k | if (ControlStack.empty()) { |
5534 | 0 | IsUnreachable = true; |
5535 | 5.62k | } else { |
5536 | 5.62k | ControlStack.back().Unreachable = true; |
5537 | 5.62k | } |
5538 | 5.62k | } |
5539 | | |
5540 | 1.36M | bool isUnreachable() const noexcept { |
5541 | 1.36M | if (ControlStack.empty()) { |
5542 | 11.7k | return IsUnreachable; |
5543 | 1.34M | } else { |
5544 | 1.34M | return ControlStack.back().Unreachable; |
5545 | 1.34M | } |
5546 | 1.36M | } |
5547 | | |
5548 | | void |
5549 | | buildPHI(Span<const ValType> RetType, |
5550 | | Span<const std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5551 | 19.7k | Incomings) noexcept { |
5552 | 19.7k | if (isVoidReturn(RetType)) { |
5553 | 6.08k | return; |
5554 | 6.08k | } |
5555 | 13.6k | std::vector<LLVM::Value> Nodes; |
5556 | 13.6k | if (Incomings.size() == 0) { |
5557 | 2.49k | const auto &Types = toLLVMTypeVector(LLContext, RetType); |
5558 | 2.49k | Nodes.reserve(Types.size()); |
5559 | 2.86k | for (LLVM::Type Type : Types) { |
5560 | 2.86k | Nodes.push_back(LLVM::Value::getUndef(Type)); |
5561 | 2.86k | } |
5562 | 11.1k | } else if (Incomings.size() == 1) { |
5563 | 9.99k | Nodes = std::move(std::get<0>(Incomings.front())); |
5564 | 9.99k | } else { |
5565 | 1.14k | const auto &Types = toLLVMTypeVector(LLContext, RetType); |
5566 | 1.14k | Nodes.reserve(Types.size()); |
5567 | 2.43k | for (size_t I = 0; I < Types.size(); ++I) { |
5568 | 1.28k | auto PHIRet = Builder.createPHI(Types[I]); |
5569 | 3.30k | for (auto &[Value, BB] : Incomings) { |
5570 | 3.30k | assuming(Value.size() == Types.size()); |
5571 | 3.30k | PHIRet.addIncoming(Value[I], BB); |
5572 | 3.30k | } |
5573 | 1.28k | Nodes.push_back(PHIRet); |
5574 | 1.28k | } |
5575 | 1.14k | } |
5576 | 14.7k | for (auto &Val : Nodes) { |
5577 | 14.7k | stackPush(Val); |
5578 | 14.7k | } |
5579 | 13.6k | } |
5580 | | |
5581 | 21.8k | void setLableJumpPHI(unsigned int Index) noexcept { |
5582 | 21.8k | assuming(Index < ControlStack.size()); |
5583 | 21.8k | auto &Entry = *(ControlStack.rbegin() + Index); |
5584 | 21.8k | if (Entry.NextBlock) { // is loop |
5585 | 2.21k | std::vector<LLVM::Value> Args(Entry.Type.first.size()); |
5586 | 4.11k | for (size_t I = 0; I < Args.size(); ++I) { |
5587 | 1.89k | const size_t J = Args.size() - 1 - I; |
5588 | 1.89k | Args[J] = stackPop(); |
5589 | 1.89k | } |
5590 | 4.11k | for (size_t I = 0; I < Args.size(); ++I) { |
5591 | 1.89k | Entry.Args[I].addIncoming(Args[I], Builder.getInsertBlock()); |
5592 | 1.89k | stackPush(Args[I]); |
5593 | 1.89k | } |
5594 | 19.6k | } else if (!Entry.Type.second.empty()) { // has return value |
5595 | 2.15k | std::vector<LLVM::Value> Rets(Entry.Type.second.size()); |
5596 | 4.51k | for (size_t I = 0; I < Rets.size(); ++I) { |
5597 | 2.36k | const size_t J = Rets.size() - 1 - I; |
5598 | 2.36k | Rets[J] = stackPop(); |
5599 | 2.36k | } |
5600 | 4.51k | for (size_t I = 0; I < Rets.size(); ++I) { |
5601 | 2.36k | stackPush(Rets[I]); |
5602 | 2.36k | } |
5603 | 2.15k | Entry.ReturnPHI.emplace_back(std::move(Rets), Builder.getInsertBlock()); |
5604 | 2.15k | } |
5605 | 21.8k | } |
5606 | | |
5607 | 21.8k | LLVM::BasicBlock getLabel(unsigned int Index) const noexcept { |
5608 | 21.8k | return (ControlStack.rbegin() + Index)->JumpBlock; |
5609 | 21.8k | } |
5610 | | |
5611 | 798k | void stackPush(LLVM::Value Value) noexcept { Stack.push_back(Value); } |
5612 | 308k | LLVM::Value stackPop() noexcept { |
5613 | 308k | assuming(!ControlStack.empty() || !Stack.empty()); |
5614 | 308k | assuming(ControlStack.empty() || |
5615 | 308k | Stack.size() > ControlStack.back().StackSize); |
5616 | 308k | auto Value = Stack.back(); |
5617 | 308k | Stack.pop_back(); |
5618 | 308k | return Value; |
5619 | 308k | } |
5620 | | |
5621 | | LLVM::Compiler::CompileContext &Context; |
5622 | | LLVM::Context LLContext; |
5623 | | std::vector<std::pair<LLVM::Type, LLVM::Value>> Local; |
5624 | | std::vector<LLVM::Value> Stack; |
5625 | | LLVM::Value LocalInstrCount = nullptr; |
5626 | | LLVM::Value LocalGas = nullptr; |
5627 | | std::unordered_map<ErrCode::Value, LLVM::BasicBlock> TrapBB; |
5628 | | bool IsUnreachable = false; |
5629 | | bool Interruptible = false; |
5630 | | struct Control { |
5631 | | size_t StackSize; |
5632 | | bool Unreachable; |
5633 | | LLVM::BasicBlock JumpBlock; |
5634 | | LLVM::BasicBlock NextBlock; |
5635 | | LLVM::BasicBlock ElseBlock; |
5636 | | std::vector<LLVM::Value> Args; |
5637 | | std::pair<std::vector<ValType>, std::vector<ValType>> Type; |
5638 | | std::vector<std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5639 | | ReturnPHI; |
5640 | | Control(size_t S, bool U, LLVM::BasicBlock J, LLVM::BasicBlock N, |
5641 | | LLVM::BasicBlock E, std::vector<LLVM::Value> A, |
5642 | | std::pair<std::vector<ValType>, std::vector<ValType>> T, |
5643 | | std::vector<std::tuple<std::vector<LLVM::Value>, LLVM::BasicBlock>> |
5644 | | R) noexcept |
5645 | 22.2k | : StackSize(S), Unreachable(U), JumpBlock(J), NextBlock(N), |
5646 | 22.2k | ElseBlock(E), Args(std::move(A)), Type(std::move(T)), |
5647 | 22.2k | ReturnPHI(std::move(R)) {} |
5648 | | Control(const Control &) = default; |
5649 | 27.9k | Control(Control &&) = default; |
5650 | | Control &operator=(const Control &) = default; |
5651 | 1.09k | Control &operator=(Control &&) = default; |
5652 | | }; |
5653 | | std::vector<Control> ControlStack; |
5654 | | LLVM::FunctionCallee F; |
5655 | | LLVM::Value ExecCtx; |
5656 | | LLVM::Builder Builder; |
5657 | | }; |
5658 | | |
5659 | | std::vector<LLVM::Value> unpackStruct(LLVM::Builder &Builder, |
5660 | 496 | LLVM::Value Struct) noexcept { |
5661 | 496 | const auto N = Struct.getType().getStructNumElements(); |
5662 | 496 | std::vector<LLVM::Value> Ret; |
5663 | 496 | Ret.reserve(N); |
5664 | 1.89k | for (unsigned I = 0; I < N; ++I) { |
5665 | 1.39k | Ret.push_back(Builder.createExtractValue(Struct, I)); |
5666 | 1.39k | } |
5667 | 496 | return Ret; |
5668 | 496 | } |
5669 | | |
5670 | | } // namespace |
5671 | | |
5672 | | namespace WasmEdge { |
5673 | | namespace LLVM { |
5674 | | |
5675 | 2.14k | Expect<void> Compiler::checkConfigure() noexcept { |
5676 | 2.14k | if (Conf.hasProposal(Proposal::ExceptionHandling)) { |
5677 | 0 | spdlog::error(ErrCode::Value::InvalidConfigure); |
5678 | 0 | spdlog::error( |
5679 | 0 | " Proposal ExceptionHandling is not yet supported in LLVM backend"); |
5680 | 0 | return Unexpect(ErrCode::Value::InvalidConfigure); |
5681 | 0 | } |
5682 | 2.14k | return {}; |
5683 | 2.14k | } |
5684 | | |
5685 | 2.14k | Expect<Data> Compiler::compile(const AST::Module &Module) noexcept { |
5686 | | // Check the module is validated. |
5687 | 2.14k | if (unlikely(!Module.getIsValidated())) { |
5688 | 0 | spdlog::error(ErrCode::Value::NotValidated); |
5689 | 0 | return Unexpect(ErrCode::Value::NotValidated); |
5690 | 0 | } |
5691 | | |
5692 | 2.14k | std::unique_lock Lock(Mutex); |
5693 | 2.14k | spdlog::info("compile start"sv); |
5694 | | |
5695 | 2.14k | LLVM::Core::init(); |
5696 | | |
5697 | 2.14k | LLVM::Data D; |
5698 | 2.14k | auto LLContext = D.extract().LLContext(); |
5699 | 2.14k | auto &LLModule = D.extract().LLModule; |
5700 | 2.14k | LLModule.setTarget(LLVM::getDefaultTargetTriple().unwrap()); |
5701 | 2.14k | LLModule.addFlag(LLVMModuleFlagBehaviorError, "PIC Level"sv, 2); |
5702 | | |
5703 | 2.14k | CompileContext NewContext(LLContext, LLModule, |
5704 | 2.14k | Conf.getCompilerConfigure().isGenericBinary()); |
5705 | 2.14k | struct RAIICleanup { |
5706 | 2.14k | RAIICleanup(CompileContext *&Context, CompileContext &NewContext) |
5707 | 2.14k | : Context(Context) { |
5708 | 2.14k | Context = &NewContext; |
5709 | 2.14k | } |
5710 | 2.14k | ~RAIICleanup() { Context = nullptr; } |
5711 | 2.14k | CompileContext *&Context; |
5712 | 2.14k | }; |
5713 | 2.14k | RAIICleanup Cleanup(Context, NewContext); |
5714 | | |
5715 | | // Compile Function Types |
5716 | 2.14k | compile(Module.getTypeSection()); |
5717 | | // Compile ImportSection |
5718 | 2.14k | compile(Module.getImportSection()); |
5719 | | // Compile GlobalSection |
5720 | 2.14k | compile(Module.getGlobalSection()); |
5721 | | // Compile MemorySection (MemorySec, DataSec) |
5722 | 2.14k | compile(Module.getMemorySection(), Module.getDataSection()); |
5723 | | // Compile TableSection (TableSec, ElemSec) |
5724 | 2.14k | compile(Module.getTableSection(), Module.getElementSection()); |
5725 | | // compile Functions in module. (FunctionSec, CodeSec) |
5726 | 2.14k | compile(Module.getFunctionSection(), Module.getCodeSection()); |
5727 | | // Compile ExportSection |
5728 | 2.14k | compile(Module.getExportSection()); |
5729 | | // StartSection is not required to compile |
5730 | | |
5731 | 2.14k | spdlog::info("verify start"sv); |
5732 | 2.14k | LLModule.verify(LLVMPrintMessageAction); |
5733 | | |
5734 | 2.14k | spdlog::info("optimize start"sv); |
5735 | 2.14k | auto &TM = D.extract().TM; |
5736 | 2.14k | { |
5737 | 2.14k | auto Triple = LLModule.getTarget(); |
5738 | 2.14k | auto [TheTarget, ErrorMessage] = LLVM::Target::getFromTriple(Triple); |
5739 | 2.14k | if (ErrorMessage) { |
5740 | 0 | spdlog::error("getFromTriple failed:{}"sv, ErrorMessage.string_view()); |
5741 | 0 | return Unexpect(ErrCode::Value::IllegalPath); |
5742 | 2.14k | } else { |
5743 | 2.14k | std::string CPUName; |
5744 | | #if defined(__riscv) && __riscv_xlen == 64 |
5745 | | CPUName = "generic-rv64"s; |
5746 | | #else |
5747 | 2.14k | if (!Conf.getCompilerConfigure().isGenericBinary()) { |
5748 | 2.14k | CPUName = LLVM::getHostCPUName().string_view(); |
5749 | 2.14k | } else { |
5750 | 0 | CPUName = "generic"s; |
5751 | 0 | } |
5752 | 2.14k | #endif |
5753 | | |
5754 | 2.14k | TM = LLVM::TargetMachine::create( |
5755 | 2.14k | TheTarget, Triple, CPUName.c_str(), |
5756 | 2.14k | LLVM::getHostCPUFeatures().unwrap(), |
5757 | 2.14k | toLLVMCodeGenLevel( |
5758 | 2.14k | Conf.getCompilerConfigure().getOptimizationLevel()), |
5759 | 2.14k | LLVMRelocPIC, LLVMCodeModelDefault); |
5760 | 2.14k | } |
5761 | | |
5762 | | #if LLVM_VERSION_MAJOR >= 13 |
5763 | | auto PBO = LLVM::PassBuilderOptions::create(); |
5764 | | if (auto Error = PBO.runPasses( |
5765 | | LLModule, |
5766 | | toLLVMLevel(Conf.getCompilerConfigure().getOptimizationLevel()), |
5767 | | TM)) { |
5768 | | spdlog::error("{}"sv, Error.message().string_view()); |
5769 | | } |
5770 | | #else |
5771 | 2.14k | auto FP = LLVM::PassManager::createForModule(LLModule); |
5772 | 2.14k | auto MP = LLVM::PassManager::create(); |
5773 | | |
5774 | 2.14k | TM.addAnalysisPasses(MP); |
5775 | 2.14k | TM.addAnalysisPasses(FP); |
5776 | 2.14k | { |
5777 | 2.14k | auto PMB = LLVM::PassManagerBuilder::create(); |
5778 | 2.14k | auto [OptLevel, SizeLevel] = |
5779 | 2.14k | toLLVMLevel(Conf.getCompilerConfigure().getOptimizationLevel()); |
5780 | 2.14k | PMB.setOptLevel(OptLevel); |
5781 | 2.14k | PMB.setSizeLevel(SizeLevel); |
5782 | 2.14k | PMB.populateFunctionPassManager(FP); |
5783 | 2.14k | PMB.populateModulePassManager(MP); |
5784 | 2.14k | } |
5785 | 2.14k | switch (Conf.getCompilerConfigure().getOptimizationLevel()) { |
5786 | 0 | case CompilerConfigure::OptimizationLevel::O0: |
5787 | 0 | case CompilerConfigure::OptimizationLevel::O1: |
5788 | 0 | FP.addTailCallEliminationPass(); |
5789 | 0 | break; |
5790 | 2.14k | default: |
5791 | 2.14k | break; |
5792 | 2.14k | } |
5793 | | |
5794 | 2.14k | FP.initializeFunctionPassManager(); |
5795 | 24.3k | for (auto Fn = LLModule.getFirstFunction(); Fn; Fn = Fn.getNextFunction()) { |
5796 | 22.2k | FP.runFunctionPassManager(Fn); |
5797 | 22.2k | } |
5798 | 2.14k | FP.finalizeFunctionPassManager(); |
5799 | 2.14k | MP.runPassManager(LLModule); |
5800 | 2.14k | #endif |
5801 | 2.14k | } |
5802 | | |
5803 | | // Set initializer for constant value |
5804 | 2.14k | if (auto IntrinsicsTable = LLModule.getNamedGlobal("intrinsics")) { |
5805 | 1.24k | IntrinsicsTable.setInitializer( |
5806 | 1.24k | LLVM::Value::getConstNull(IntrinsicsTable.getType())); |
5807 | 1.24k | IntrinsicsTable.setGlobalConstant(false); |
5808 | 1.24k | } else { |
5809 | 903 | auto IntrinsicsTableTy = LLVM::Type::getArrayType( |
5810 | 903 | LLContext.getInt8Ty().getPointerTo(), |
5811 | 903 | static_cast<uint32_t>(Executable::Intrinsics::kIntrinsicMax)); |
5812 | 903 | LLModule.addGlobal( |
5813 | 903 | IntrinsicsTableTy.getPointerTo(), false, LLVMExternalLinkage, |
5814 | 903 | LLVM::Value::getConstNull(IntrinsicsTableTy), "intrinsics"); |
5815 | 903 | } |
5816 | | |
5817 | 2.14k | spdlog::info("optimize done"sv); |
5818 | 2.14k | return Expect<Data>{std::move(D)}; |
5819 | 2.14k | } |
5820 | | |
5821 | 2.14k | void Compiler::compile(const AST::TypeSection &TypeSec) noexcept { |
5822 | 2.14k | auto WrapperTy = |
5823 | 2.14k | LLVM::Type::getFunctionType(Context->VoidTy, |
5824 | 2.14k | {Context->ExecCtxPtrTy, Context->Int8PtrTy, |
5825 | 2.14k | Context->Int8PtrTy, Context->Int8PtrTy}, |
5826 | 2.14k | false); |
5827 | 2.14k | auto SubTypes = TypeSec.getContent(); |
5828 | 2.14k | const auto Size = SubTypes.size(); |
5829 | 2.14k | if (Size == 0) { |
5830 | 114 | return; |
5831 | 114 | } |
5832 | 2.02k | Context->CompositeTypes.reserve(Size); |
5833 | 2.02k | Context->FunctionWrappers.reserve(Size); |
5834 | | |
5835 | | // Iterate and compile types. |
5836 | 6.35k | for (size_t I = 0; I < Size; ++I) { |
5837 | 4.32k | const auto &CompType = SubTypes[I].getCompositeType(); |
5838 | 4.32k | const auto Name = fmt::format("t{}"sv, Context->CompositeTypes.size()); |
5839 | 4.32k | if (CompType.isFunc()) { |
5840 | | // Check function type is unique |
5841 | 4.32k | { |
5842 | 4.32k | bool Unique = true; |
5843 | 18.1k | for (size_t J = 0; J < I; ++J) { |
5844 | 13.9k | if (Context->CompositeTypes[J] && |
5845 | 13.9k | Context->CompositeTypes[J]->isFunc()) { |
5846 | 13.9k | const auto &OldFuncType = Context->CompositeTypes[J]->getFuncType(); |
5847 | 13.9k | if (OldFuncType == CompType.getFuncType()) { |
5848 | 132 | Unique = false; |
5849 | 132 | Context->CompositeTypes.push_back(Context->CompositeTypes[J]); |
5850 | 132 | auto F = Context->FunctionWrappers[J]; |
5851 | 132 | Context->FunctionWrappers.push_back(F); |
5852 | 132 | auto A = Context->LLModule.addAlias(WrapperTy, F, Name.c_str()); |
5853 | 132 | A.setLinkage(LLVMExternalLinkage); |
5854 | 132 | A.setVisibility(LLVMProtectedVisibility); |
5855 | 132 | A.setDSOLocal(true); |
5856 | 132 | A.setDLLStorageClass(LLVMDLLExportStorageClass); |
5857 | 132 | break; |
5858 | 132 | } |
5859 | 13.9k | } |
5860 | 13.9k | } |
5861 | 4.32k | if (!Unique) { |
5862 | 132 | continue; |
5863 | 132 | } |
5864 | 4.32k | } |
5865 | | |
5866 | | // Create Wrapper |
5867 | 4.19k | auto F = Context->LLModule.addFunction(WrapperTy, LLVMExternalLinkage, |
5868 | 4.19k | Name.c_str()); |
5869 | 4.19k | { |
5870 | 4.19k | F.setVisibility(LLVMProtectedVisibility); |
5871 | 4.19k | F.setDSOLocal(true); |
5872 | 4.19k | F.setDLLStorageClass(LLVMDLLExportStorageClass); |
5873 | 4.19k | F.addFnAttr(Context->NoStackArgProbe); |
5874 | 4.19k | F.addFnAttr(Context->StrictFP); |
5875 | 4.19k | F.addFnAttr(Context->UWTable); |
5876 | 4.19k | F.addParamAttr(0, Context->ReadOnly); |
5877 | 4.19k | F.addParamAttr(0, Context->NoAlias); |
5878 | 4.19k | F.addParamAttr(1, Context->NoAlias); |
5879 | 4.19k | F.addParamAttr(2, Context->NoAlias); |
5880 | 4.19k | F.addParamAttr(3, Context->NoAlias); |
5881 | | |
5882 | 4.19k | LLVM::Builder Builder(Context->LLContext); |
5883 | 4.19k | Builder.positionAtEnd( |
5884 | 4.19k | LLVM::BasicBlock::create(Context->LLContext, F, "entry")); |
5885 | | |
5886 | 4.19k | auto FTy = toLLVMType(Context->LLContext, Context->ExecCtxPtrTy, |
5887 | 4.19k | CompType.getFuncType()); |
5888 | 4.19k | auto RTy = FTy.getReturnType(); |
5889 | 4.19k | std::vector<LLVM::Type> FPTy(FTy.getNumParams()); |
5890 | 4.19k | FTy.getParamTypes(FPTy); |
5891 | | |
5892 | 4.19k | const size_t ArgCount = FPTy.size() - 1; |
5893 | 4.19k | auto ExecCtxPtr = F.getFirstParam(); |
5894 | 4.19k | auto RawFunc = LLVM::FunctionCallee{ |
5895 | 4.19k | FTy, Builder.createBitCast(ExecCtxPtr.getNextParam(), |
5896 | 4.19k | FTy.getPointerTo())}; |
5897 | 4.19k | auto RawArgs = ExecCtxPtr.getNextParam().getNextParam(); |
5898 | 4.19k | auto RawRets = RawArgs.getNextParam(); |
5899 | | |
5900 | 4.19k | std::vector<LLVM::Value> Args; |
5901 | 4.19k | Args.reserve(FTy.getNumParams()); |
5902 | 4.19k | Args.push_back(ExecCtxPtr); |
5903 | 9.00k | for (size_t J = 0; J < ArgCount; ++J) { |
5904 | 4.81k | Args.push_back(Builder.createValuePtrLoad( |
5905 | 4.81k | FPTy[J + 1], RawArgs, Context->Int8Ty, J * kValSize)); |
5906 | 4.81k | } |
5907 | | |
5908 | 4.19k | auto Ret = Builder.createCall(RawFunc, Args); |
5909 | 4.19k | if (RTy.isVoidTy()) { |
5910 | | // nothing to do |
5911 | 2.87k | } else if (RTy.isStructTy()) { |
5912 | 398 | auto Rets = unpackStruct(Builder, Ret); |
5913 | 398 | Builder.createArrayPtrStore(Rets, RawRets, Context->Int8Ty, kValSize); |
5914 | 2.47k | } else { |
5915 | 2.47k | Builder.createValuePtrStore(Ret, RawRets, Context->Int8Ty); |
5916 | 2.47k | } |
5917 | 4.19k | Builder.createRetVoid(); |
5918 | 4.19k | } |
5919 | | // Copy wrapper, param and return lists to module instance. |
5920 | 4.19k | Context->FunctionWrappers.push_back(F); |
5921 | 4.19k | } else { |
5922 | | // Non function type case. Create empty wrapper. |
5923 | 0 | auto F = Context->LLModule.addFunction(WrapperTy, LLVMExternalLinkage, |
5924 | 0 | Name.c_str()); |
5925 | 0 | { |
5926 | 0 | F.setVisibility(LLVMProtectedVisibility); |
5927 | 0 | F.setDSOLocal(true); |
5928 | 0 | F.setDLLStorageClass(LLVMDLLExportStorageClass); |
5929 | 0 | F.addFnAttr(Context->NoStackArgProbe); |
5930 | 0 | F.addFnAttr(Context->StrictFP); |
5931 | 0 | F.addFnAttr(Context->UWTable); |
5932 | 0 | F.addParamAttr(0, Context->ReadOnly); |
5933 | 0 | F.addParamAttr(0, Context->NoAlias); |
5934 | 0 | F.addParamAttr(1, Context->NoAlias); |
5935 | 0 | F.addParamAttr(2, Context->NoAlias); |
5936 | 0 | F.addParamAttr(3, Context->NoAlias); |
5937 | |
|
5938 | 0 | LLVM::Builder Builder(Context->LLContext); |
5939 | 0 | Builder.positionAtEnd( |
5940 | 0 | LLVM::BasicBlock::create(Context->LLContext, F, "entry")); |
5941 | 0 | Builder.createRetVoid(); |
5942 | 0 | } |
5943 | 0 | Context->FunctionWrappers.push_back(F); |
5944 | 0 | } |
5945 | 4.19k | Context->CompositeTypes.push_back(&CompType); |
5946 | 4.19k | } |
5947 | 2.02k | } |
5948 | | |
5949 | 2.14k | void Compiler::compile(const AST::ImportSection &ImportSec) noexcept { |
5950 | | // Iterate and compile import descriptions. |
5951 | 2.14k | for (const auto &ImpDesc : ImportSec.getContent()) { |
5952 | | // Get data from import description. |
5953 | 304 | const auto &ExtType = ImpDesc.getExternalType(); |
5954 | | |
5955 | | // Add the imports into module instance. |
5956 | 304 | switch (ExtType) { |
5957 | 235 | case ExternalType::Function: // Function type index |
5958 | 235 | { |
5959 | 235 | const auto FuncID = static_cast<uint32_t>(Context->Functions.size()); |
5960 | | // Get the function type index in module. |
5961 | 235 | uint32_t TypeIdx = ImpDesc.getExternalFuncTypeIdx(); |
5962 | 235 | assuming(TypeIdx < Context->CompositeTypes.size()); |
5963 | 235 | assuming(Context->CompositeTypes[TypeIdx]->isFunc()); |
5964 | 235 | const auto &FuncType = Context->CompositeTypes[TypeIdx]->getFuncType(); |
5965 | 235 | auto FTy = |
5966 | 235 | toLLVMType(Context->LLContext, Context->ExecCtxPtrTy, FuncType); |
5967 | 235 | auto RTy = FTy.getReturnType(); |
5968 | 235 | auto F = LLVM::FunctionCallee{ |
5969 | 235 | FTy, |
5970 | 235 | Context->LLModule.addFunction(FTy, LLVMInternalLinkage, |
5971 | 235 | fmt::format("f{}"sv, FuncID).c_str())}; |
5972 | 235 | F.Fn.setDSOLocal(true); |
5973 | 235 | F.Fn.addFnAttr(Context->NoStackArgProbe); |
5974 | 235 | F.Fn.addFnAttr(Context->StrictFP); |
5975 | 235 | F.Fn.addFnAttr(Context->UWTable); |
5976 | 235 | F.Fn.addParamAttr(0, Context->ReadOnly); |
5977 | 235 | F.Fn.addParamAttr(0, Context->NoAlias); |
5978 | | |
5979 | 235 | LLVM::Builder Builder(Context->LLContext); |
5980 | 235 | Builder.positionAtEnd( |
5981 | 235 | LLVM::BasicBlock::create(Context->LLContext, F.Fn, "entry")); |
5982 | | |
5983 | 235 | const auto ArgSize = FuncType.getParamTypes().size(); |
5984 | 235 | const auto RetSize = |
5985 | 235 | RTy.isVoidTy() ? 0 : FuncType.getReturnTypes().size(); |
5986 | | |
5987 | 235 | LLVM::Value Args = Builder.createArray(ArgSize, kValSize); |
5988 | 235 | LLVM::Value Rets = Builder.createArray(RetSize, kValSize); |
5989 | | |
5990 | 235 | auto Arg = F.Fn.getFirstParam(); |
5991 | 333 | for (unsigned I = 0; I < ArgSize; ++I) { |
5992 | 98 | Arg = Arg.getNextParam(); |
5993 | 98 | Builder.createValuePtrStore(Arg, Args, Context->Int8Ty, I * kValSize); |
5994 | 98 | } |
5995 | | |
5996 | 235 | Builder.createCall( |
5997 | 235 | Context->getIntrinsic( |
5998 | 235 | Builder, Executable::Intrinsics::kCall, |
5999 | 235 | LLVM::Type::getFunctionType( |
6000 | 235 | Context->VoidTy, |
6001 | 235 | {Context->Int32Ty, Context->Int8PtrTy, Context->Int8PtrTy}, |
6002 | 235 | false)), |
6003 | 235 | {Context->LLContext.getInt32(FuncID), Args, Rets}); |
6004 | | |
6005 | 235 | if (RetSize == 0) { |
6006 | 129 | Builder.createRetVoid(); |
6007 | 129 | } else if (RetSize == 1) { |
6008 | 87 | Builder.createRet( |
6009 | 87 | Builder.createValuePtrLoad(RTy, Rets, Context->Int8Ty)); |
6010 | 87 | } else { |
6011 | 19 | Builder.createAggregateRet(Builder.createArrayPtrLoad( |
6012 | 19 | RetSize, RTy, Rets, Context->Int8Ty, kValSize)); |
6013 | 19 | } |
6014 | | |
6015 | 235 | Context->Functions.emplace_back(TypeIdx, F, nullptr); |
6016 | 235 | break; |
6017 | 235 | } |
6018 | 27 | case ExternalType::Table: // Table type |
6019 | 27 | { |
6020 | | // Nothing to do. |
6021 | 27 | break; |
6022 | 235 | } |
6023 | 7 | case ExternalType::Memory: // Memory type |
6024 | 7 | { |
6025 | | // Nothing to do. |
6026 | 7 | break; |
6027 | 235 | } |
6028 | 35 | case ExternalType::Global: // Global type |
6029 | 35 | { |
6030 | | // Get global type. External type checked in validation. |
6031 | 35 | const auto &GlobType = ImpDesc.getExternalGlobalType(); |
6032 | 35 | const auto &ValType = GlobType.getValType(); |
6033 | 35 | auto Type = toLLVMType(Context->LLContext, ValType); |
6034 | 35 | Context->Globals.push_back(Type); |
6035 | 35 | break; |
6036 | 235 | } |
6037 | 0 | default: |
6038 | 0 | break; |
6039 | 304 | } |
6040 | 304 | } |
6041 | 2.14k | } |
6042 | | |
6043 | 2.14k | void Compiler::compile(const AST::ExportSection &) noexcept {} |
6044 | | |
6045 | 2.14k | void Compiler::compile(const AST::GlobalSection &GlobalSec) noexcept { |
6046 | 2.14k | for (const auto &GlobalSeg : GlobalSec.getContent()) { |
6047 | 113 | const auto &ValType = GlobalSeg.getGlobalType().getValType(); |
6048 | 113 | auto Type = toLLVMType(Context->LLContext, ValType); |
6049 | 113 | Context->Globals.push_back(Type); |
6050 | 113 | } |
6051 | 2.14k | } |
6052 | | |
6053 | | void Compiler::compile(const AST::MemorySection &, |
6054 | 2.14k | const AST::DataSection &) noexcept {} |
6055 | | |
6056 | | void Compiler::compile(const AST::TableSection &, |
6057 | 2.14k | const AST::ElementSection &) noexcept {} |
6058 | | |
6059 | | void Compiler::compile(const AST::FunctionSection &FuncSec, |
6060 | 2.14k | const AST::CodeSection &CodeSec) noexcept { |
6061 | 2.14k | const auto &TypeIdxs = FuncSec.getContent(); |
6062 | 2.14k | const auto &CodeSegs = CodeSec.getContent(); |
6063 | 2.14k | if (TypeIdxs.size() == 0 || CodeSegs.size() == 0) { |
6064 | 188 | return; |
6065 | 188 | } |
6066 | | |
6067 | 13.6k | for (size_t I = 0; I < TypeIdxs.size() && I < CodeSegs.size(); ++I) { |
6068 | 11.7k | const auto &TypeIdx = TypeIdxs[I]; |
6069 | 11.7k | const auto &Code = CodeSegs[I]; |
6070 | 11.7k | assuming(TypeIdx < Context->CompositeTypes.size()); |
6071 | 11.7k | assuming(Context->CompositeTypes[TypeIdx]->isFunc()); |
6072 | 11.7k | const auto &FuncType = Context->CompositeTypes[TypeIdx]->getFuncType(); |
6073 | 11.7k | const auto FuncID = Context->Functions.size(); |
6074 | 11.7k | auto FTy = toLLVMType(Context->LLContext, Context->ExecCtxPtrTy, FuncType); |
6075 | 11.7k | LLVM::FunctionCallee F = {FTy, Context->LLModule.addFunction( |
6076 | 11.7k | FTy, LLVMExternalLinkage, |
6077 | 11.7k | fmt::format("f{}"sv, FuncID).c_str())}; |
6078 | 11.7k | F.Fn.setVisibility(LLVMProtectedVisibility); |
6079 | 11.7k | F.Fn.setDSOLocal(true); |
6080 | 11.7k | F.Fn.setDLLStorageClass(LLVMDLLExportStorageClass); |
6081 | 11.7k | F.Fn.addFnAttr(Context->NoStackArgProbe); |
6082 | 11.7k | F.Fn.addFnAttr(Context->StrictFP); |
6083 | 11.7k | F.Fn.addFnAttr(Context->UWTable); |
6084 | 11.7k | F.Fn.addParamAttr(0, Context->ReadOnly); |
6085 | 11.7k | F.Fn.addParamAttr(0, Context->NoAlias); |
6086 | | |
6087 | 11.7k | Context->Functions.emplace_back(TypeIdx, F, &Code); |
6088 | 11.7k | } |
6089 | | |
6090 | 11.8k | for (auto [T, F, Code] : Context->Functions) { |
6091 | 11.8k | if (!Code) { |
6092 | 103 | continue; |
6093 | 103 | } |
6094 | | |
6095 | 11.7k | std::vector<ValType> Locals; |
6096 | 11.7k | for (const auto &Local : Code->getLocals()) { |
6097 | 2.43M | for (unsigned I = 0; I < Local.first; ++I) { |
6098 | 2.43M | Locals.push_back(Local.second); |
6099 | 2.43M | } |
6100 | 1.83k | } |
6101 | 11.7k | FunctionCompiler FC(*Context, F, Locals, |
6102 | 11.7k | Conf.getCompilerConfigure().isInterruptible(), |
6103 | 11.7k | Conf.getStatisticsConfigure().isInstructionCounting(), |
6104 | 11.7k | Conf.getStatisticsConfigure().isCostMeasuring()); |
6105 | 11.7k | auto Type = Context->resolveBlockType(T); |
6106 | 11.7k | FC.compile(*Code, std::move(Type)); |
6107 | 11.7k | F.Fn.eliminateUnreachableBlocks(); |
6108 | 11.7k | } |
6109 | 1.95k | } |
6110 | | |
6111 | | } // namespace LLVM |
6112 | | } // namespace WasmEdge |