/src/WasmEdge/lib/executor/helper.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | // SPDX-License-Identifier: Apache-2.0 |
2 | | // SPDX-FileCopyrightText: 2019-2024 Second State INC |
3 | | |
4 | | #include "executor/executor.h" |
5 | | |
6 | | #include "common/spdlog.h" |
7 | | #include "system/fault.h" |
8 | | #include "system/stacktrace.h" |
9 | | |
10 | | #include <cstdint> |
11 | | #include <utility> |
12 | | #include <vector> |
13 | | |
14 | | namespace WasmEdge { |
15 | | namespace Executor { |
16 | | |
17 | | Executor::SavedThreadLocal::SavedThreadLocal( |
18 | | Executor &Ex, Runtime::StackManager &StackMgr, |
19 | 0 | const Runtime::Instance::FunctionInstance &Func) noexcept { |
20 | | // Prepare the execution context. |
21 | 0 | auto *ModInst = |
22 | 0 | const_cast<Runtime::Instance::ModuleInstance *>(Func.getModule()); |
23 | 0 | SavedThis = This; |
24 | 0 | This = &Ex; |
25 | |
|
26 | 0 | SavedExecutionContext = ExecutionContext; |
27 | 0 | ExecutionContext.StopToken = &Ex.StopToken; |
28 | 0 | ExecutionContext.Memories = ModInst->MemoryPtrs.data(); |
29 | 0 | ExecutionContext.Globals = ModInst->GlobalPtrs.data(); |
30 | 0 | if (Ex.Stat) { |
31 | 0 | ExecutionContext.InstrCount = &Ex.Stat->getInstrCountRef(); |
32 | 0 | ExecutionContext.CostTable = Ex.Stat->getCostTable().data(); |
33 | 0 | ExecutionContext.Gas = &Ex.Stat->getTotalCostRef(); |
34 | 0 | ExecutionContext.GasLimit = Ex.Stat->getCostLimit(); |
35 | 0 | } |
36 | |
|
37 | 0 | SavedCurrentStack = CurrentStack; |
38 | 0 | CurrentStack = &StackMgr; |
39 | 0 | } |
40 | | |
41 | 0 | Executor::SavedThreadLocal::~SavedThreadLocal() noexcept { |
42 | 0 | CurrentStack = SavedCurrentStack; |
43 | 0 | ExecutionContext = SavedExecutionContext; |
44 | 0 | This = SavedThis; |
45 | 0 | } |
46 | | |
47 | | Expect<AST::InstrView::iterator> |
48 | | Executor::enterFunction(Runtime::StackManager &StackMgr, |
49 | | const Runtime::Instance::FunctionInstance &Func, |
50 | 0 | const AST::InstrView::iterator RetIt, bool IsTailCall) { |
51 | | // RetIt: the return position when the entered function returns. |
52 | | |
53 | | // Check if the interruption occurs. |
54 | 0 | if (unlikely(StopToken.exchange(0, std::memory_order_relaxed))) { |
55 | 0 | spdlog::error(ErrCode::Value::Interrupted); |
56 | 0 | return Unexpect(ErrCode::Value::Interrupted); |
57 | 0 | } |
58 | | |
59 | | // Get function type for the params and returns num. |
60 | 0 | const auto &FuncType = Func.getFuncType(); |
61 | 0 | const uint32_t ArgsN = static_cast<uint32_t>(FuncType.getParamTypes().size()); |
62 | 0 | const uint32_t RetsN = |
63 | 0 | static_cast<uint32_t>(FuncType.getReturnTypes().size()); |
64 | | |
65 | | // For the exception handler, remove the inactive handlers caused by the |
66 | | // branches. |
67 | 0 | StackMgr.removeInactiveHandler(RetIt - 1); |
68 | |
|
69 | 0 | if (Func.isHostFunction()) { |
70 | | // Host function case: Push args and call function. |
71 | 0 | auto &HostFunc = Func.getHostFunc(); |
72 | | |
73 | | // Generate CallingFrame from current frame. |
74 | | // The module instance will be nullptr if current frame is a dummy frame. |
75 | | // For this case, use the module instance of this host function. |
76 | 0 | const auto *ModInst = StackMgr.getModule(); |
77 | 0 | if (ModInst == nullptr) { |
78 | 0 | ModInst = Func.getModule(); |
79 | 0 | } |
80 | 0 | Runtime::CallingFrame CallFrame(this, ModInst); |
81 | | |
82 | | // Push frame. |
83 | 0 | StackMgr.pushFrame(Func.getModule(), // Module instance |
84 | 0 | RetIt, // Return PC |
85 | 0 | ArgsN, // Only args, no locals in stack |
86 | 0 | RetsN, // Returns num |
87 | 0 | IsTailCall // For tail-call |
88 | 0 | ); |
89 | | |
90 | | // Do the statistics if the statistics turned on. |
91 | 0 | if (Stat) { |
92 | | // Check host function cost. |
93 | 0 | if (unlikely(!Stat->addCost(HostFunc.getCost()))) { |
94 | 0 | spdlog::error(ErrCode::Value::CostLimitExceeded); |
95 | 0 | return Unexpect(ErrCode::Value::CostLimitExceeded); |
96 | 0 | } |
97 | | // Start recording time of running host function. |
98 | 0 | Stat->stopRecordWasm(); |
99 | 0 | Stat->startRecordHost(); |
100 | 0 | } |
101 | | |
102 | | // Call pre-host-function |
103 | 0 | HostFuncHelper.invokePreHostFunc(); |
104 | | |
105 | | // Run host function. |
106 | 0 | Span<ValVariant> Args = StackMgr.getTopSpan(ArgsN); |
107 | 0 | for (uint32_t I = 0; I < ArgsN; I++) { |
108 | | // For the number type cases of the arguments, the unused bits should be |
109 | | // erased due to the security issue. |
110 | 0 | cleanNumericVal(Args[I], FuncType.getParamTypes()[I]); |
111 | 0 | } |
112 | 0 | std::vector<ValVariant> Rets(RetsN); |
113 | 0 | auto Ret = HostFunc.run(CallFrame, std::move(Args), Rets); |
114 | | |
115 | | // Call post-host-function |
116 | 0 | HostFuncHelper.invokePostHostFunc(); |
117 | | |
118 | | // Do the statistics if the statistics turned on. |
119 | 0 | if (Stat) { |
120 | | // Stop recording time of running host function. |
121 | 0 | Stat->stopRecordHost(); |
122 | 0 | Stat->startRecordWasm(); |
123 | 0 | } |
124 | | |
125 | | // Check the host function execution status. |
126 | 0 | if (!Ret) { |
127 | 0 | if (Ret.error() == ErrCode::Value::HostFuncError || |
128 | 0 | Ret.error().getCategory() != ErrCategory::WASM) { |
129 | 0 | spdlog::error(Ret.error()); |
130 | 0 | } |
131 | 0 | return Unexpect(Ret); |
132 | 0 | } |
133 | | |
134 | | // Push returns back to stack. |
135 | 0 | for (auto &R : Rets) { |
136 | 0 | StackMgr.push(std::move(R)); |
137 | 0 | } |
138 | | |
139 | | // For host function case, the continuation will be the continuation from |
140 | | // the popped frame. |
141 | 0 | return StackMgr.popFrame(); |
142 | 0 | } else if (Func.isCompiledFunction()) { |
143 | | // Compiled function case: Execute the function and jump to the |
144 | | // continuation. |
145 | | |
146 | | // Push frame. |
147 | 0 | StackMgr.pushFrame(Func.getModule(), // Module instance |
148 | 0 | RetIt, // Return PC |
149 | 0 | ArgsN, // Only args, no locals in stack |
150 | 0 | RetsN, // Returns num |
151 | 0 | IsTailCall // For tail-call |
152 | 0 | ); |
153 | | |
154 | | // Prepare arguments. |
155 | 0 | Span<ValVariant> Args = StackMgr.getTopSpan(ArgsN); |
156 | 0 | std::vector<ValVariant> Rets(RetsN); |
157 | 0 | SavedThreadLocal Saved(*this, StackMgr, Func); |
158 | |
|
159 | 0 | ErrCode Err; |
160 | 0 | try { |
161 | | // Get symbol and execute the function. |
162 | 0 | Fault FaultHandler; |
163 | 0 | uint32_t Code = PREPARE_FAULT(FaultHandler); |
164 | 0 | if (Code != 0) { |
165 | 0 | auto InnerStackTrace = FaultHandler.stacktrace(); |
166 | 0 | { |
167 | 0 | std::array<void *, 256> Buffer; |
168 | 0 | auto OuterStackTrace = stackTrace(Buffer); |
169 | 0 | while (!OuterStackTrace.empty() && !InnerStackTrace.empty() && |
170 | 0 | InnerStackTrace[InnerStackTrace.size() - 1] == |
171 | 0 | OuterStackTrace[OuterStackTrace.size() - 1]) { |
172 | 0 | InnerStackTrace = InnerStackTrace.first(InnerStackTrace.size() - 1); |
173 | 0 | OuterStackTrace = OuterStackTrace.first(OuterStackTrace.size() - 1); |
174 | 0 | } |
175 | 0 | } |
176 | 0 | StackTraceSize = |
177 | 0 | compiledStackTrace(StackMgr, InnerStackTrace, StackTrace).size(); |
178 | 0 | Err = ErrCode(static_cast<ErrCategory>(Code >> 24), Code); |
179 | 0 | } else { |
180 | 0 | auto &Wrapper = FuncType.getSymbol(); |
181 | 0 | Wrapper(&ExecutionContext, Func.getSymbol().get(), Args.data(), |
182 | 0 | Rets.data()); |
183 | 0 | } |
184 | 0 | } catch (const ErrCode &E) { |
185 | 0 | Err = E; |
186 | 0 | } |
187 | 0 | if (unlikely(Err)) { |
188 | 0 | if (Err != ErrCode::Value::Terminated) { |
189 | 0 | spdlog::error(Err); |
190 | 0 | } |
191 | 0 | StackTraceSize += |
192 | 0 | interpreterStackTrace( |
193 | 0 | StackMgr, Span<uint32_t>{StackTrace}.subspan(StackTraceSize)) |
194 | 0 | .size(); |
195 | 0 | return Unexpect(Err); |
196 | 0 | } |
197 | | |
198 | | // Push returns back to stack. |
199 | 0 | for (uint32_t I = 0; I < Rets.size(); ++I) { |
200 | 0 | StackMgr.push(Rets[I]); |
201 | 0 | } |
202 | | |
203 | | // For compiled function case, the continuation will be the continuation |
204 | | // from the popped frame. |
205 | 0 | return StackMgr.popFrame(); |
206 | 0 | } else { |
207 | | // Native function case: Jump to the start of the function body. |
208 | | |
209 | | // Push local variables into the stack. |
210 | 0 | for (auto &Def : Func.getLocals()) { |
211 | 0 | for (uint32_t I = 0; I < Def.first; I++) { |
212 | 0 | StackMgr.push(ValueFromType(Def.second)); |
213 | 0 | } |
214 | 0 | } |
215 | | |
216 | | // Push frame. |
217 | | // The PC must -1 here because in the interpreter mode execution, the PC |
218 | | // will increase after the callee return. |
219 | 0 | StackMgr.pushFrame(Func.getModule(), // Module instance |
220 | 0 | RetIt - 1, // Return PC |
221 | 0 | ArgsN + Func.getLocalNum(), // Arguments num + local num |
222 | 0 | RetsN, // Returns num |
223 | 0 | IsTailCall // For tail-call |
224 | 0 | ); |
225 | | |
226 | | // For native function case, the continuation will be the start of the |
227 | | // function body. |
228 | 0 | return Func.getInstrs().begin(); |
229 | 0 | } |
230 | 0 | } |
231 | | |
232 | | Expect<void> |
233 | | Executor::branchToLabel(Runtime::StackManager &StackMgr, |
234 | | const AST::Instruction::JumpDescriptor &JumpDesc, |
235 | 0 | AST::InstrView::iterator &PC) noexcept { |
236 | | // Check the stop token. |
237 | 0 | if (unlikely(StopToken.exchange(0, std::memory_order_relaxed))) { |
238 | 0 | spdlog::error(ErrCode::Value::Interrupted); |
239 | 0 | return Unexpect(ErrCode::Value::Interrupted); |
240 | 0 | } |
241 | | |
242 | 0 | StackMgr.eraseValueStack(JumpDesc.StackEraseBegin, JumpDesc.StackEraseEnd); |
243 | | // PC need to -1 here because the PC will increase in the next iteration. |
244 | 0 | PC += (JumpDesc.PCOffset - 1); |
245 | 0 | return {}; |
246 | 0 | } |
247 | | |
248 | | Expect<void> Executor::throwException(Runtime::StackManager &StackMgr, |
249 | | Runtime::Instance::TagInstance &TagInst, |
250 | 0 | AST::InstrView::iterator &PC) noexcept { |
251 | 0 | StackMgr.removeInactiveHandler(PC); |
252 | 0 | auto AssocValSize = TagInst.getTagType().getAssocValSize(); |
253 | 0 | while (true) { |
254 | | // Pop the top handler. |
255 | 0 | auto Handler = StackMgr.popTopHandler(AssocValSize); |
256 | 0 | if (!Handler.has_value()) { |
257 | 0 | break; |
258 | 0 | } |
259 | | // Checking through the catch clause. |
260 | 0 | for (const auto &C : Handler->CatchClause) { |
261 | 0 | if (!C.IsAll && getTagInstByIdx(StackMgr, C.TagIndex) != &TagInst) { |
262 | | // For catching a specific tag, should check the equivalence of tag |
263 | | // address. |
264 | 0 | continue; |
265 | 0 | } |
266 | 0 | if (C.IsRef) { |
267 | | // For catching a exception reference, push the reference value onto |
268 | | // stack. |
269 | 0 | StackMgr.push( |
270 | 0 | RefVariant(ValType(TypeCode::Ref, TypeCode::ExnRef), &TagInst)); |
271 | 0 | } |
272 | | // When being here, an exception is caught. Move the PC to the try block |
273 | | // and branch to the label. |
274 | |
|
275 | 0 | PC = Handler->Try; |
276 | 0 | return branchToLabel(StackMgr, C.Jump, PC); |
277 | 0 | } |
278 | 0 | } |
279 | 0 | spdlog::error(ErrCode::Value::UncaughtException); |
280 | 0 | return Unexpect(ErrCode::Value::UncaughtException); |
281 | 0 | } |
282 | | |
283 | | const AST::SubType *Executor::getDefTypeByIdx(Runtime::StackManager &StackMgr, |
284 | 0 | const uint32_t Idx) const { |
285 | 0 | const auto *ModInst = StackMgr.getModule(); |
286 | | // When top frame is dummy frame, cannot find instance. |
287 | 0 | if (unlikely(ModInst == nullptr)) { |
288 | 0 | return nullptr; |
289 | 0 | } |
290 | 0 | return ModInst->unsafeGetType(Idx); |
291 | 0 | } |
292 | | |
293 | | const WasmEdge::AST::CompositeType & |
294 | | Executor::getCompositeTypeByIdx(Runtime::StackManager &StackMgr, |
295 | 0 | const uint32_t Idx) const noexcept { |
296 | 0 | auto *DefType = getDefTypeByIdx(StackMgr, Idx); |
297 | 0 | assuming(DefType); |
298 | 0 | const auto &CompType = DefType->getCompositeType(); |
299 | 0 | assuming(!CompType.isFunc()); |
300 | 0 | return CompType; |
301 | 0 | } |
302 | | |
303 | | const ValType & |
304 | | Executor::getStructStorageTypeByIdx(Runtime::StackManager &StackMgr, |
305 | | const uint32_t Idx, |
306 | 0 | const uint32_t Off) const noexcept { |
307 | 0 | const auto &CompType = getCompositeTypeByIdx(StackMgr, Idx); |
308 | 0 | assuming(static_cast<uint32_t>(CompType.getFieldTypes().size()) > Off); |
309 | 0 | return CompType.getFieldTypes()[Off].getStorageType(); |
310 | 0 | } |
311 | | |
312 | | const ValType & |
313 | | Executor::getArrayStorageTypeByIdx(Runtime::StackManager &StackMgr, |
314 | 0 | const uint32_t Idx) const noexcept { |
315 | 0 | const auto &CompType = getCompositeTypeByIdx(StackMgr, Idx); |
316 | 0 | assuming(static_cast<uint32_t>(CompType.getFieldTypes().size()) == 1); |
317 | 0 | return CompType.getFieldTypes()[0].getStorageType(); |
318 | 0 | } |
319 | | |
320 | | Runtime::Instance::FunctionInstance * |
321 | | Executor::getFuncInstByIdx(Runtime::StackManager &StackMgr, |
322 | 0 | const uint32_t Idx) const { |
323 | 0 | const auto *ModInst = StackMgr.getModule(); |
324 | | // When top frame is dummy frame, cannot find instance. |
325 | 0 | if (unlikely(ModInst == nullptr)) { |
326 | 0 | return nullptr; |
327 | 0 | } |
328 | 0 | return ModInst->unsafeGetFunction(Idx); |
329 | 0 | } |
330 | | |
331 | | Runtime::Instance::TableInstance * |
332 | | Executor::getTabInstByIdx(Runtime::StackManager &StackMgr, |
333 | 0 | const uint32_t Idx) const { |
334 | 0 | const auto *ModInst = StackMgr.getModule(); |
335 | | // When top frame is dummy frame, cannot find instance. |
336 | 0 | if (unlikely(ModInst == nullptr)) { |
337 | 0 | return nullptr; |
338 | 0 | } |
339 | 0 | return ModInst->unsafeGetTable(Idx); |
340 | 0 | } |
341 | | |
342 | | Runtime::Instance::MemoryInstance * |
343 | | Executor::getMemInstByIdx(Runtime::StackManager &StackMgr, |
344 | 0 | const uint32_t Idx) const { |
345 | 0 | const auto *ModInst = StackMgr.getModule(); |
346 | | // When top frame is dummy frame, cannot find instance. |
347 | 0 | if (unlikely(ModInst == nullptr)) { |
348 | 0 | return nullptr; |
349 | 0 | } |
350 | 0 | return ModInst->unsafeGetMemory(Idx); |
351 | 0 | } |
352 | | |
353 | | Runtime::Instance::TagInstance * |
354 | | Executor::getTagInstByIdx(Runtime::StackManager &StackMgr, |
355 | 0 | const uint32_t Idx) const { |
356 | 0 | const auto *ModInst = StackMgr.getModule(); |
357 | | // When top frame is dummy frame, cannot find instance. |
358 | 0 | if (unlikely(ModInst == nullptr)) { |
359 | 0 | return nullptr; |
360 | 0 | } |
361 | 0 | return ModInst->unsafeGetTag(Idx); |
362 | 0 | } |
363 | | |
364 | | Runtime::Instance::GlobalInstance * |
365 | | Executor::getGlobInstByIdx(Runtime::StackManager &StackMgr, |
366 | 0 | const uint32_t Idx) const { |
367 | 0 | const auto *ModInst = StackMgr.getModule(); |
368 | | // When top frame is dummy frame, cannot find instance. |
369 | 0 | if (unlikely(ModInst == nullptr)) { |
370 | 0 | return nullptr; |
371 | 0 | } |
372 | 0 | return ModInst->unsafeGetGlobal(Idx); |
373 | 0 | } |
374 | | |
375 | | Runtime::Instance::ElementInstance * |
376 | | Executor::getElemInstByIdx(Runtime::StackManager &StackMgr, |
377 | 0 | const uint32_t Idx) const { |
378 | 0 | const auto *ModInst = StackMgr.getModule(); |
379 | | // When top frame is dummy frame, cannot find instance. |
380 | 0 | if (unlikely(ModInst == nullptr)) { |
381 | 0 | return nullptr; |
382 | 0 | } |
383 | 0 | return ModInst->unsafeGetElem(Idx); |
384 | 0 | } |
385 | | |
386 | | Runtime::Instance::DataInstance * |
387 | | Executor::getDataInstByIdx(Runtime::StackManager &StackMgr, |
388 | 0 | const uint32_t Idx) const { |
389 | 0 | const auto *ModInst = StackMgr.getModule(); |
390 | | // When top frame is dummy frame, cannot find instance. |
391 | 0 | if (unlikely(ModInst == nullptr)) { |
392 | 0 | return nullptr; |
393 | 0 | } |
394 | 0 | return ModInst->unsafeGetData(Idx); |
395 | 0 | } |
396 | | |
397 | | TypeCode Executor::toBottomType(Runtime::StackManager &StackMgr, |
398 | 0 | const ValType &Type) const { |
399 | 0 | if (Type.isRefType()) { |
400 | 0 | if (Type.isAbsHeapType()) { |
401 | 0 | switch (Type.getHeapTypeCode()) { |
402 | 0 | case TypeCode::NullFuncRef: |
403 | 0 | case TypeCode::FuncRef: |
404 | 0 | return TypeCode::NullFuncRef; |
405 | 0 | case TypeCode::NullExternRef: |
406 | 0 | case TypeCode::ExternRef: |
407 | 0 | return TypeCode::NullExternRef; |
408 | 0 | case TypeCode::NullRef: |
409 | 0 | case TypeCode::AnyRef: |
410 | 0 | case TypeCode::EqRef: |
411 | 0 | case TypeCode::I31Ref: |
412 | 0 | case TypeCode::StructRef: |
413 | 0 | case TypeCode::ArrayRef: |
414 | 0 | return TypeCode::NullRef; |
415 | 0 | case TypeCode::ExnRef: |
416 | 0 | return TypeCode::ExnRef; |
417 | 0 | default: |
418 | 0 | assumingUnreachable(); |
419 | 0 | } |
420 | 0 | } else { |
421 | 0 | const auto &CompType = |
422 | 0 | (*StackMgr.getModule()->getType(Type.getTypeIndex())) |
423 | 0 | ->getCompositeType(); |
424 | 0 | if (CompType.isFunc()) { |
425 | 0 | return TypeCode::NullFuncRef; |
426 | 0 | } else { |
427 | 0 | return TypeCode::NullRef; |
428 | 0 | } |
429 | 0 | } |
430 | 0 | } else { |
431 | 0 | return Type.getCode(); |
432 | 0 | } |
433 | 0 | } |
434 | | |
435 | | void Executor::cleanNumericVal(ValVariant &Val, |
436 | 0 | const ValType &Type) const noexcept { |
437 | 0 | if (Type.isNumType()) { |
438 | 0 | switch (Type.getCode()) { |
439 | 0 | case TypeCode::I32: { |
440 | 0 | uint32_t V = Val.get<uint32_t>(); |
441 | 0 | Val.emplace<uint128_t>(static_cast<uint128_t>(0U)); |
442 | 0 | Val.emplace<uint32_t>(V); |
443 | 0 | break; |
444 | 0 | } |
445 | 0 | case TypeCode::F32: { |
446 | 0 | float V = Val.get<float>(); |
447 | 0 | Val.emplace<uint128_t>(static_cast<uint128_t>(0U)); |
448 | 0 | Val.emplace<float>(V); |
449 | 0 | break; |
450 | 0 | } |
451 | 0 | case TypeCode::I64: { |
452 | 0 | uint64_t V = Val.get<uint64_t>(); |
453 | 0 | Val.emplace<uint128_t>(static_cast<uint128_t>(0U)); |
454 | 0 | Val.emplace<uint64_t>(V); |
455 | 0 | break; |
456 | 0 | } |
457 | 0 | case TypeCode::F64: { |
458 | 0 | double V = Val.get<double>(); |
459 | 0 | Val.emplace<uint128_t>(static_cast<uint128_t>(0U)); |
460 | 0 | Val.emplace<double>(V); |
461 | 0 | break; |
462 | 0 | } |
463 | 0 | default: |
464 | 0 | break; |
465 | 0 | } |
466 | 0 | } |
467 | 0 | } |
468 | | |
469 | | ValVariant Executor::packVal(const ValType &Type, |
470 | 0 | const ValVariant &Val) const noexcept { |
471 | 0 | if (Type.isPackType()) { |
472 | 0 | switch (Type.getCode()) { |
473 | 0 | case TypeCode::I8: |
474 | 0 | return ValVariant(Val.get<uint32_t>() & 0xFFU); |
475 | 0 | case TypeCode::I16: |
476 | 0 | return ValVariant(Val.get<uint32_t>() & 0xFFFFU); |
477 | 0 | default: |
478 | 0 | assumingUnreachable(); |
479 | 0 | } |
480 | 0 | } |
481 | 0 | return Val; |
482 | 0 | } |
483 | | |
484 | | std::vector<ValVariant> |
485 | | Executor::packVals(const ValType &Type, |
486 | 0 | std::vector<ValVariant> &&Vals) const noexcept { |
487 | 0 | for (uint32_t I = 0; I < Vals.size(); I++) { |
488 | 0 | Vals[I] = packVal(Type, Vals[I]); |
489 | 0 | } |
490 | 0 | return std::move(Vals); |
491 | 0 | } |
492 | | |
493 | | ValVariant Executor::unpackVal(const ValType &Type, const ValVariant &Val, |
494 | 0 | bool IsSigned) const noexcept { |
495 | 0 | if (Type.isPackType()) { |
496 | 0 | uint32_t Num = Val.get<uint32_t>(); |
497 | 0 | switch (Type.getCode()) { |
498 | 0 | case TypeCode::I8: |
499 | 0 | if (IsSigned) { |
500 | 0 | return static_cast<uint32_t>(static_cast<int8_t>(Num)); |
501 | 0 | } else { |
502 | 0 | return static_cast<uint32_t>(static_cast<uint8_t>(Num)); |
503 | 0 | } |
504 | 0 | case TypeCode::I16: |
505 | 0 | if (IsSigned) { |
506 | 0 | return static_cast<uint32_t>(static_cast<int16_t>(Num)); |
507 | 0 | } else { |
508 | 0 | return static_cast<uint32_t>(static_cast<uint16_t>(Num)); |
509 | 0 | } |
510 | 0 | default: |
511 | 0 | assumingUnreachable(); |
512 | 0 | } |
513 | 0 | } |
514 | 0 | return Val; |
515 | 0 | } |
516 | | } // namespace Executor |
517 | | } // namespace WasmEdge |