/src/WasmEdge/lib/executor/helper.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | // SPDX-License-Identifier: Apache-2.0 |
2 | | // SPDX-FileCopyrightText: 2019-2024 Second State INC |
3 | | |
4 | | #include "executor/executor.h" |
5 | | |
6 | | #include "common/spdlog.h" |
7 | | #include "system/fault.h" |
8 | | #include "system/stacktrace.h" |
9 | | |
10 | | #include <cstdint> |
11 | | #include <utility> |
12 | | #include <vector> |
13 | | |
14 | | namespace WasmEdge { |
15 | | namespace Executor { |
16 | | |
17 | | Executor::SavedThreadLocal::SavedThreadLocal( |
18 | | Executor &Ex, Runtime::StackManager &StackMgr, |
19 | 0 | const Runtime::Instance::FunctionInstance &Func) noexcept { |
20 | | // Prepare the execution context. |
21 | 0 | auto *ModInst = |
22 | 0 | const_cast<Runtime::Instance::ModuleInstance *>(Func.getModule()); |
23 | 0 | SavedThis = This; |
24 | 0 | This = &Ex; |
25 | |
|
26 | 0 | SavedExecutionContext = ExecutionContext; |
27 | 0 | ExecutionContext.StopToken = &Ex.StopToken; |
28 | 0 | ExecutionContext.Memories = ModInst->MemoryPtrs.data(); |
29 | 0 | ExecutionContext.Globals = ModInst->GlobalPtrs.data(); |
30 | 0 | if (Ex.Stat) { |
31 | 0 | ExecutionContext.InstrCount = &Ex.Stat->getInstrCountRef(); |
32 | 0 | ExecutionContext.CostTable = Ex.Stat->getCostTable().data(); |
33 | 0 | ExecutionContext.Gas = &Ex.Stat->getTotalCostRef(); |
34 | 0 | ExecutionContext.GasLimit = Ex.Stat->getCostLimit(); |
35 | 0 | } |
36 | |
|
37 | 0 | SavedCurrentStack = CurrentStack; |
38 | 0 | CurrentStack = &StackMgr; |
39 | 0 | } |
40 | | |
41 | 0 | Executor::SavedThreadLocal::~SavedThreadLocal() noexcept { |
42 | 0 | CurrentStack = SavedCurrentStack; |
43 | 0 | ExecutionContext = SavedExecutionContext; |
44 | 0 | This = SavedThis; |
45 | 0 | } |
46 | | |
47 | | Expect<AST::InstrView::iterator> |
48 | | Executor::enterFunction(Runtime::StackManager &StackMgr, |
49 | | const Runtime::Instance::FunctionInstance &Func, |
50 | 0 | const AST::InstrView::iterator RetIt, bool IsTailCall) { |
51 | | // RetIt: the return position when the entered function returns. |
52 | | |
53 | | // Check if the interruption occurs. |
54 | 0 | if (unlikely(StopToken.exchange(0, std::memory_order_relaxed))) { |
55 | 0 | spdlog::error(ErrCode::Value::Interrupted); |
56 | 0 | return Unexpect(ErrCode::Value::Interrupted); |
57 | 0 | } |
58 | | |
59 | | // Get function type for the params and returns num. |
60 | 0 | const auto &FuncType = Func.getFuncType(); |
61 | 0 | const uint32_t ArgsN = static_cast<uint32_t>(FuncType.getParamTypes().size()); |
62 | 0 | const uint32_t RetsN = |
63 | 0 | static_cast<uint32_t>(FuncType.getReturnTypes().size()); |
64 | | |
65 | | // For the exception handler, remove the inactive handlers caused by the |
66 | | // branches. |
67 | 0 | if (likely(RetIt)) { |
68 | 0 | StackMgr.removeInactiveHandler(RetIt - 1); |
69 | 0 | } |
70 | |
|
71 | 0 | if (Func.isHostFunction()) { |
72 | | // Host function case: Push args and call function. |
73 | 0 | auto &HostFunc = Func.getHostFunc(); |
74 | | |
75 | | // Generate CallingFrame from current frame. |
76 | | // The module instance will be nullptr if current frame is a dummy frame. |
77 | | // For this case, use the module instance of this host function. |
78 | 0 | const auto *ModInst = StackMgr.getModule(); |
79 | 0 | if (ModInst == nullptr) { |
80 | 0 | ModInst = Func.getModule(); |
81 | 0 | } |
82 | 0 | Runtime::CallingFrame CallFrame(this, ModInst); |
83 | | |
84 | | // Push frame. |
85 | 0 | StackMgr.pushFrame(Func.getModule(), // Module instance |
86 | 0 | RetIt, // Return PC |
87 | 0 | ArgsN, // Only args, no locals in stack |
88 | 0 | RetsN, // Returns num |
89 | 0 | IsTailCall // For tail-call |
90 | 0 | ); |
91 | | |
92 | | // Do the statistics if the statistics turned on. |
93 | 0 | if (Stat) { |
94 | | // Check host function cost. |
95 | 0 | if (unlikely(!Stat->addCost(HostFunc.getCost()))) { |
96 | 0 | spdlog::error(ErrCode::Value::CostLimitExceeded); |
97 | 0 | return Unexpect(ErrCode::Value::CostLimitExceeded); |
98 | 0 | } |
99 | | // Start recording time of running host function. |
100 | 0 | Stat->stopRecordWasm(); |
101 | 0 | Stat->startRecordHost(); |
102 | 0 | } |
103 | | |
104 | | // Call pre-host-function |
105 | 0 | HostFuncHelper.invokePreHostFunc(); |
106 | | |
107 | | // Run host function. |
108 | 0 | Span<ValVariant> Args = StackMgr.getTopSpan(ArgsN); |
109 | 0 | for (uint32_t I = 0; I < ArgsN; I++) { |
110 | | // For the number type cases of the arguments, the unused bits should be |
111 | | // erased due to the security issue. |
112 | 0 | cleanNumericVal(Args[I], FuncType.getParamTypes()[I]); |
113 | 0 | } |
114 | 0 | std::vector<ValVariant> Rets(RetsN); |
115 | 0 | auto Ret = HostFunc.run(CallFrame, std::move(Args), Rets); |
116 | | |
117 | | // Call post-host-function |
118 | 0 | HostFuncHelper.invokePostHostFunc(); |
119 | | |
120 | | // Do the statistics if the statistics turned on. |
121 | 0 | if (Stat) { |
122 | | // Stop recording time of running host function. |
123 | 0 | Stat->stopRecordHost(); |
124 | 0 | Stat->startRecordWasm(); |
125 | 0 | } |
126 | | |
127 | | // Check the host function execution status. |
128 | 0 | if (!Ret) { |
129 | 0 | if (Ret.error() == ErrCode::Value::HostFuncError || |
130 | 0 | Ret.error().getCategory() != ErrCategory::WASM) { |
131 | 0 | spdlog::error(Ret.error()); |
132 | 0 | } |
133 | 0 | return Unexpect(Ret); |
134 | 0 | } |
135 | | |
136 | | // Push returns back to stack. |
137 | 0 | for (auto &R : Rets) { |
138 | 0 | StackMgr.push(std::move(R)); |
139 | 0 | } |
140 | | |
141 | | // For host function case, the continuation will be the continuation from |
142 | | // the popped frame. |
143 | 0 | return StackMgr.popFrame(); |
144 | 0 | } else if (Func.isCompiledFunction()) { |
145 | | // Compiled function case: Execute the function and jump to the |
146 | | // continuation. |
147 | | |
148 | | // Push frame. |
149 | 0 | StackMgr.pushFrame(Func.getModule(), // Module instance |
150 | 0 | RetIt, // Return PC |
151 | 0 | ArgsN, // Only args, no locals in stack |
152 | 0 | RetsN, // Returns num |
153 | 0 | IsTailCall // For tail-call |
154 | 0 | ); |
155 | | |
156 | | // Prepare arguments. |
157 | 0 | Span<ValVariant> Args = StackMgr.getTopSpan(ArgsN); |
158 | 0 | std::vector<ValVariant> Rets(RetsN); |
159 | 0 | SavedThreadLocal Saved(*this, StackMgr, Func); |
160 | |
|
161 | 0 | ErrCode Err; |
162 | 0 | try { |
163 | | // Get symbol and execute the function. |
164 | 0 | Fault FaultHandler; |
165 | 0 | uint32_t Code = PREPARE_FAULT(FaultHandler); |
166 | 0 | if (Code != 0) { |
167 | 0 | auto InnerStackTrace = FaultHandler.stacktrace(); |
168 | 0 | { |
169 | 0 | std::array<void *, 256> Buffer; |
170 | 0 | auto OuterStackTrace = stackTrace(Buffer); |
171 | 0 | while (!OuterStackTrace.empty() && !InnerStackTrace.empty() && |
172 | 0 | InnerStackTrace[InnerStackTrace.size() - 1] == |
173 | 0 | OuterStackTrace[OuterStackTrace.size() - 1]) { |
174 | 0 | InnerStackTrace = InnerStackTrace.first(InnerStackTrace.size() - 1); |
175 | 0 | OuterStackTrace = OuterStackTrace.first(OuterStackTrace.size() - 1); |
176 | 0 | } |
177 | 0 | } |
178 | 0 | StackTraceSize = |
179 | 0 | compiledStackTrace(StackMgr, InnerStackTrace, StackTrace).size(); |
180 | 0 | Err = ErrCode(static_cast<ErrCategory>(Code >> 24), Code); |
181 | 0 | } else { |
182 | 0 | auto &Wrapper = FuncType.getSymbol(); |
183 | 0 | Wrapper(&ExecutionContext, Func.getSymbol().get(), Args.data(), |
184 | 0 | Rets.data()); |
185 | 0 | } |
186 | 0 | } catch (const ErrCode &E) { |
187 | 0 | Err = E; |
188 | 0 | } |
189 | 0 | if (unlikely(Err)) { |
190 | 0 | if (Err != ErrCode::Value::Terminated) { |
191 | 0 | spdlog::error(Err); |
192 | 0 | } |
193 | 0 | StackTraceSize += |
194 | 0 | interpreterStackTrace( |
195 | 0 | StackMgr, Span<uint32_t>{StackTrace}.subspan(StackTraceSize)) |
196 | 0 | .size(); |
197 | 0 | return Unexpect(Err); |
198 | 0 | } |
199 | | |
200 | | // Push returns back to stack. |
201 | 0 | for (uint32_t I = 0; I < Rets.size(); ++I) { |
202 | 0 | StackMgr.push(Rets[I]); |
203 | 0 | } |
204 | | |
205 | | // For compiled function case, the continuation will be the continuation |
206 | | // from the popped frame. |
207 | 0 | return StackMgr.popFrame(); |
208 | 0 | } else { |
209 | | // Native function case: Jump to the start of the function body. |
210 | | |
211 | | // Push local variables into the stack. |
212 | 0 | for (auto &Def : Func.getLocals()) { |
213 | 0 | for (uint32_t I = 0; I < Def.first; I++) { |
214 | 0 | StackMgr.push(ValueFromType(Def.second)); |
215 | 0 | } |
216 | 0 | } |
217 | | |
218 | | // Push frame. |
219 | | // The PC must -1 here because in the interpreter mode execution, the PC |
220 | | // will increase after the callee return. |
221 | 0 | StackMgr.pushFrame(Func.getModule(), // Module instance |
222 | 0 | RetIt - 1, // Return PC |
223 | 0 | ArgsN + Func.getLocalNum(), // Arguments num + local num |
224 | 0 | RetsN, // Returns num |
225 | 0 | IsTailCall // For tail-call |
226 | 0 | ); |
227 | | |
228 | | // For native function case, the continuation will be the start of the |
229 | | // function body. |
230 | 0 | return Func.getInstrs().begin(); |
231 | 0 | } |
232 | 0 | } |
233 | | |
234 | | Expect<void> |
235 | | Executor::branchToLabel(Runtime::StackManager &StackMgr, |
236 | | const AST::Instruction::JumpDescriptor &JumpDesc, |
237 | 0 | AST::InstrView::iterator &PC) noexcept { |
238 | | // Check the stop token. |
239 | 0 | if (unlikely(StopToken.exchange(0, std::memory_order_relaxed))) { |
240 | 0 | spdlog::error(ErrCode::Value::Interrupted); |
241 | 0 | return Unexpect(ErrCode::Value::Interrupted); |
242 | 0 | } |
243 | | |
244 | 0 | StackMgr.eraseValueStack(JumpDesc.StackEraseBegin, JumpDesc.StackEraseEnd); |
245 | | // PC need to -1 here because the PC will increase in the next iteration. |
246 | 0 | PC += (JumpDesc.PCOffset - 1); |
247 | 0 | return {}; |
248 | 0 | } |
249 | | |
250 | | Expect<void> Executor::throwException(Runtime::StackManager &StackMgr, |
251 | | Runtime::Instance::TagInstance &TagInst, |
252 | 0 | AST::InstrView::iterator &PC) noexcept { |
253 | 0 | StackMgr.removeInactiveHandler(PC); |
254 | 0 | auto AssocValSize = TagInst.getTagType().getAssocValSize(); |
255 | 0 | while (true) { |
256 | | // Pop the top handler. |
257 | 0 | auto Handler = StackMgr.popTopHandler(AssocValSize); |
258 | 0 | if (!Handler.has_value()) { |
259 | 0 | break; |
260 | 0 | } |
261 | | // Checking through the catch clause. |
262 | 0 | for (const auto &C : Handler->CatchClause) { |
263 | 0 | if (!C.IsAll && getTagInstByIdx(StackMgr, C.TagIndex) != &TagInst) { |
264 | | // For catching a specific tag, should check the equivalence of tag |
265 | | // address. |
266 | 0 | continue; |
267 | 0 | } |
268 | 0 | if (C.IsRef) { |
269 | | // For catching a exception reference, push the reference value onto |
270 | | // stack. |
271 | 0 | StackMgr.push( |
272 | 0 | RefVariant(ValType(TypeCode::Ref, TypeCode::ExnRef), &TagInst)); |
273 | 0 | } |
274 | | // When being here, an exception is caught. Move the PC to the try block |
275 | | // and branch to the label. |
276 | |
|
277 | 0 | PC = Handler->Try; |
278 | 0 | return branchToLabel(StackMgr, C.Jump, PC); |
279 | 0 | } |
280 | 0 | } |
281 | 0 | spdlog::error(ErrCode::Value::UncaughtException); |
282 | 0 | return Unexpect(ErrCode::Value::UncaughtException); |
283 | 0 | } |
284 | | |
285 | | const AST::SubType *Executor::getDefTypeByIdx(Runtime::StackManager &StackMgr, |
286 | 0 | const uint32_t Idx) const { |
287 | 0 | const auto *ModInst = StackMgr.getModule(); |
288 | | // When top frame is dummy frame, cannot find instance. |
289 | 0 | if (unlikely(ModInst == nullptr)) { |
290 | 0 | return nullptr; |
291 | 0 | } |
292 | 0 | return ModInst->unsafeGetType(Idx); |
293 | 0 | } |
294 | | |
295 | | const WasmEdge::AST::CompositeType & |
296 | | Executor::getCompositeTypeByIdx(Runtime::StackManager &StackMgr, |
297 | 0 | const uint32_t Idx) const noexcept { |
298 | 0 | auto *DefType = getDefTypeByIdx(StackMgr, Idx); |
299 | 0 | assuming(DefType); |
300 | 0 | const auto &CompType = DefType->getCompositeType(); |
301 | 0 | assuming(!CompType.isFunc()); |
302 | 0 | return CompType; |
303 | 0 | } |
304 | | |
305 | | const ValType & |
306 | | Executor::getStructStorageTypeByIdx(Runtime::StackManager &StackMgr, |
307 | | const uint32_t Idx, |
308 | 0 | const uint32_t Off) const noexcept { |
309 | 0 | const auto &CompType = getCompositeTypeByIdx(StackMgr, Idx); |
310 | 0 | assuming(static_cast<uint32_t>(CompType.getFieldTypes().size()) > Off); |
311 | 0 | return CompType.getFieldTypes()[Off].getStorageType(); |
312 | 0 | } |
313 | | |
314 | | const ValType & |
315 | | Executor::getArrayStorageTypeByIdx(Runtime::StackManager &StackMgr, |
316 | 0 | const uint32_t Idx) const noexcept { |
317 | 0 | const auto &CompType = getCompositeTypeByIdx(StackMgr, Idx); |
318 | 0 | assuming(static_cast<uint32_t>(CompType.getFieldTypes().size()) == 1); |
319 | 0 | return CompType.getFieldTypes()[0].getStorageType(); |
320 | 0 | } |
321 | | |
322 | | Runtime::Instance::FunctionInstance * |
323 | | Executor::getFuncInstByIdx(Runtime::StackManager &StackMgr, |
324 | 0 | const uint32_t Idx) const { |
325 | 0 | const auto *ModInst = StackMgr.getModule(); |
326 | | // When top frame is dummy frame, cannot find instance. |
327 | 0 | if (unlikely(ModInst == nullptr)) { |
328 | 0 | return nullptr; |
329 | 0 | } |
330 | 0 | return ModInst->unsafeGetFunction(Idx); |
331 | 0 | } |
332 | | |
333 | | Runtime::Instance::TableInstance * |
334 | | Executor::getTabInstByIdx(Runtime::StackManager &StackMgr, |
335 | 0 | const uint32_t Idx) const { |
336 | 0 | const auto *ModInst = StackMgr.getModule(); |
337 | | // When top frame is dummy frame, cannot find instance. |
338 | 0 | if (unlikely(ModInst == nullptr)) { |
339 | 0 | return nullptr; |
340 | 0 | } |
341 | 0 | return ModInst->unsafeGetTable(Idx); |
342 | 0 | } |
343 | | |
344 | | Runtime::Instance::MemoryInstance * |
345 | | Executor::getMemInstByIdx(Runtime::StackManager &StackMgr, |
346 | 0 | const uint32_t Idx) const { |
347 | 0 | const auto *ModInst = StackMgr.getModule(); |
348 | | // When top frame is dummy frame, cannot find instance. |
349 | 0 | if (unlikely(ModInst == nullptr)) { |
350 | 0 | return nullptr; |
351 | 0 | } |
352 | 0 | return ModInst->unsafeGetMemory(Idx); |
353 | 0 | } |
354 | | |
355 | | Runtime::Instance::TagInstance * |
356 | | Executor::getTagInstByIdx(Runtime::StackManager &StackMgr, |
357 | 0 | const uint32_t Idx) const { |
358 | 0 | const auto *ModInst = StackMgr.getModule(); |
359 | | // When top frame is dummy frame, cannot find instance. |
360 | 0 | if (unlikely(ModInst == nullptr)) { |
361 | 0 | return nullptr; |
362 | 0 | } |
363 | 0 | return ModInst->unsafeGetTag(Idx); |
364 | 0 | } |
365 | | |
366 | | Runtime::Instance::GlobalInstance * |
367 | | Executor::getGlobInstByIdx(Runtime::StackManager &StackMgr, |
368 | 0 | const uint32_t Idx) const { |
369 | 0 | const auto *ModInst = StackMgr.getModule(); |
370 | | // When top frame is dummy frame, cannot find instance. |
371 | 0 | if (unlikely(ModInst == nullptr)) { |
372 | 0 | return nullptr; |
373 | 0 | } |
374 | 0 | return ModInst->unsafeGetGlobal(Idx); |
375 | 0 | } |
376 | | |
377 | | Runtime::Instance::ElementInstance * |
378 | | Executor::getElemInstByIdx(Runtime::StackManager &StackMgr, |
379 | 0 | const uint32_t Idx) const { |
380 | 0 | const auto *ModInst = StackMgr.getModule(); |
381 | | // When top frame is dummy frame, cannot find instance. |
382 | 0 | if (unlikely(ModInst == nullptr)) { |
383 | 0 | return nullptr; |
384 | 0 | } |
385 | 0 | return ModInst->unsafeGetElem(Idx); |
386 | 0 | } |
387 | | |
388 | | Runtime::Instance::DataInstance * |
389 | | Executor::getDataInstByIdx(Runtime::StackManager &StackMgr, |
390 | 0 | const uint32_t Idx) const { |
391 | 0 | const auto *ModInst = StackMgr.getModule(); |
392 | | // When top frame is dummy frame, cannot find instance. |
393 | 0 | if (unlikely(ModInst == nullptr)) { |
394 | 0 | return nullptr; |
395 | 0 | } |
396 | 0 | return ModInst->unsafeGetData(Idx); |
397 | 0 | } |
398 | | |
399 | | TypeCode Executor::toBottomType(Runtime::StackManager &StackMgr, |
400 | 0 | const ValType &Type) const { |
401 | 0 | if (Type.isRefType()) { |
402 | 0 | if (Type.isAbsHeapType()) { |
403 | 0 | switch (Type.getHeapTypeCode()) { |
404 | 0 | case TypeCode::NullFuncRef: |
405 | 0 | case TypeCode::FuncRef: |
406 | 0 | return TypeCode::NullFuncRef; |
407 | 0 | case TypeCode::NullExternRef: |
408 | 0 | case TypeCode::ExternRef: |
409 | 0 | return TypeCode::NullExternRef; |
410 | 0 | case TypeCode::NullRef: |
411 | 0 | case TypeCode::AnyRef: |
412 | 0 | case TypeCode::EqRef: |
413 | 0 | case TypeCode::I31Ref: |
414 | 0 | case TypeCode::StructRef: |
415 | 0 | case TypeCode::ArrayRef: |
416 | 0 | return TypeCode::NullRef; |
417 | 0 | case TypeCode::ExnRef: |
418 | 0 | return TypeCode::ExnRef; |
419 | 0 | default: |
420 | 0 | assumingUnreachable(); |
421 | 0 | } |
422 | 0 | } else { |
423 | 0 | const auto &CompType = |
424 | 0 | (*StackMgr.getModule()->getType(Type.getTypeIndex())) |
425 | 0 | ->getCompositeType(); |
426 | 0 | if (CompType.isFunc()) { |
427 | 0 | return TypeCode::NullFuncRef; |
428 | 0 | } else { |
429 | 0 | return TypeCode::NullRef; |
430 | 0 | } |
431 | 0 | } |
432 | 0 | } else { |
433 | 0 | return Type.getCode(); |
434 | 0 | } |
435 | 0 | } |
436 | | |
437 | | void Executor::cleanNumericVal(ValVariant &Val, |
438 | 0 | const ValType &Type) const noexcept { |
439 | 0 | if (Type.isNumType()) { |
440 | 0 | switch (Type.getCode()) { |
441 | 0 | case TypeCode::I32: { |
442 | 0 | uint32_t V = Val.get<uint32_t>(); |
443 | 0 | Val.emplace<uint128_t>(static_cast<uint128_t>(0U)); |
444 | 0 | Val.emplace<uint32_t>(V); |
445 | 0 | break; |
446 | 0 | } |
447 | 0 | case TypeCode::F32: { |
448 | 0 | float V = Val.get<float>(); |
449 | 0 | Val.emplace<uint128_t>(static_cast<uint128_t>(0U)); |
450 | 0 | Val.emplace<float>(V); |
451 | 0 | break; |
452 | 0 | } |
453 | 0 | case TypeCode::I64: { |
454 | 0 | uint64_t V = Val.get<uint64_t>(); |
455 | 0 | Val.emplace<uint128_t>(static_cast<uint128_t>(0U)); |
456 | 0 | Val.emplace<uint64_t>(V); |
457 | 0 | break; |
458 | 0 | } |
459 | 0 | case TypeCode::F64: { |
460 | 0 | double V = Val.get<double>(); |
461 | 0 | Val.emplace<uint128_t>(static_cast<uint128_t>(0U)); |
462 | 0 | Val.emplace<double>(V); |
463 | 0 | break; |
464 | 0 | } |
465 | 0 | default: |
466 | 0 | break; |
467 | 0 | } |
468 | 0 | } |
469 | 0 | } |
470 | | |
471 | | ValVariant Executor::packVal(const ValType &Type, |
472 | 0 | const ValVariant &Val) const noexcept { |
473 | 0 | if (Type.isPackType()) { |
474 | 0 | switch (Type.getCode()) { |
475 | 0 | case TypeCode::I8: |
476 | 0 | if constexpr (Endian::native == Endian::little) { |
477 | 0 | return ValVariant(Val.get<uint32_t>() & 0xFFU); |
478 | | } else { |
479 | | return ValVariant(Val.get<uint32_t>() << 24); |
480 | | } |
481 | 0 | case TypeCode::I16: |
482 | 0 | if constexpr (Endian::native == Endian::little) { |
483 | 0 | return ValVariant(Val.get<uint32_t>() & 0xFFFFU); |
484 | | } else { |
485 | | return ValVariant(Val.get<uint32_t>() << 16); |
486 | | } |
487 | 0 | default: |
488 | 0 | assumingUnreachable(); |
489 | 0 | } |
490 | 0 | } |
491 | 0 | return Val; |
492 | 0 | } |
493 | | |
494 | | std::vector<ValVariant> |
495 | | Executor::packVals(const ValType &Type, |
496 | 0 | std::vector<ValVariant> &&Vals) const noexcept { |
497 | 0 | for (uint32_t I = 0; I < Vals.size(); I++) { |
498 | 0 | Vals[I] = packVal(Type, Vals[I]); |
499 | 0 | } |
500 | 0 | return std::move(Vals); |
501 | 0 | } |
502 | | |
503 | | ValVariant Executor::unpackVal(const ValType &Type, const ValVariant &Val, |
504 | 0 | bool IsSigned) const noexcept { |
505 | 0 | if (Type.isPackType()) { |
506 | 0 | uint32_t Num = Val.get<uint32_t>(); |
507 | 0 | switch (Type.getCode()) { |
508 | 0 | case TypeCode::I8: |
509 | | if constexpr (Endian::native == Endian::big) { |
510 | | Num >>= 24; |
511 | | } |
512 | 0 | if (IsSigned) { |
513 | 0 | return static_cast<uint32_t>(static_cast<int8_t>(Num)); |
514 | 0 | } else { |
515 | 0 | return static_cast<uint32_t>(static_cast<uint8_t>(Num)); |
516 | 0 | } |
517 | 0 | case TypeCode::I16: |
518 | | if constexpr (Endian::native == Endian::big) { |
519 | | Num >>= 16; |
520 | | } |
521 | 0 | if (IsSigned) { |
522 | 0 | return static_cast<uint32_t>(static_cast<int16_t>(Num)); |
523 | 0 | } else { |
524 | 0 | return static_cast<uint32_t>(static_cast<uint16_t>(Num)); |
525 | 0 | } |
526 | 0 | default: |
527 | 0 | assumingUnreachable(); |
528 | 0 | } |
529 | 0 | } |
530 | 0 | return Val; |
531 | 0 | } |
532 | | } // namespace Executor |
533 | | } // namespace WasmEdge |