/src/WasmEdge/include/executor/engine/atomic.ipp
Line | Count | Source (jump to first uncovered line) |
1 | | // SPDX-License-Identifier: Apache-2.0 |
2 | | // SPDX-FileCopyrightText: 2019-2024 Second State INC |
3 | | |
4 | | #include "executor/executor.h" |
5 | | #include "runtime/instance/memory.h" |
6 | | #include <experimental/scope.hpp> |
7 | | |
8 | | #include <cstdint> |
9 | | |
10 | | namespace WasmEdge { |
11 | | namespace Executor { |
12 | | |
13 | | template <typename T> |
14 | | TypeT<T> Executor::runAtomicWaitOp(Runtime::StackManager &StackMgr, |
15 | | Runtime::Instance::MemoryInstance &MemInst, |
16 | 0 | const AST::Instruction &Instr) { |
17 | 0 | ValVariant RawTimeout = StackMgr.pop(); |
18 | 0 | ValVariant RawValue = StackMgr.pop(); |
19 | 0 | ValVariant &RawAddress = StackMgr.getTop(); |
20 | |
|
21 | 0 | uint32_t Address = RawAddress.get<uint32_t>(); |
22 | 0 | if (Address > |
23 | 0 | std::numeric_limits<uint32_t>::max() - Instr.getMemoryOffset()) { |
24 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
25 | 0 | spdlog::error(ErrInfo::InfoBoundary( |
26 | 0 | Address + static_cast<uint64_t>(Instr.getMemoryOffset()), sizeof(T), |
27 | 0 | MemInst.getBoundIdx())); |
28 | 0 | spdlog::error( |
29 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
30 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
31 | 0 | } |
32 | 0 | Address += Instr.getMemoryOffset(); |
33 | |
|
34 | 0 | if (Address % sizeof(T) != 0) { |
35 | 0 | spdlog::error(ErrCode::Value::UnalignedAtomicAccess); |
36 | 0 | spdlog::error( |
37 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
38 | 0 | return Unexpect(ErrCode::Value::UnalignedAtomicAccess); |
39 | 0 | } |
40 | | |
41 | 0 | int64_t Timeout = RawTimeout.get<int64_t>(); |
42 | |
|
43 | 0 | return atomicWait<T>(MemInst, Address, RawValue.get<T>(), Timeout) |
44 | 0 | .map_error([&Instr](auto E) { |
45 | 0 | spdlog::error(E); |
46 | 0 | spdlog::error( |
47 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
48 | 0 | return E; |
49 | 0 | }) Unexecuted instantiation: auto WasmEdge::Executor::Executor::runAtomicWaitOp<int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&)::{lambda(auto:1)#1}::operator()<WasmEdge::ErrCode>(WasmEdge::ErrCode) const Unexecuted instantiation: auto WasmEdge::Executor::Executor::runAtomicWaitOp<long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&)::{lambda(auto:1)#1}::operator()<WasmEdge::ErrCode>(WasmEdge::ErrCode) const |
50 | 0 | .map([&](auto V) { RawAddress.emplace<uint32_t>(V); }); Unexecuted instantiation: auto WasmEdge::Executor::Executor::runAtomicWaitOp<int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&)::{lambda(auto:1)#2}::operator()<unsigned int>(unsigned int) const Unexecuted instantiation: auto WasmEdge::Executor::Executor::runAtomicWaitOp<long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&)::{lambda(auto:1)#2}::operator()<unsigned int>(unsigned int) const |
51 | 0 | } Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicWaitOp<int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicWaitOp<long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) |
52 | | |
53 | | template <typename T, typename I> |
54 | | TypeT<T> Executor::runAtomicLoadOp(Runtime::StackManager &StackMgr, |
55 | | Runtime::Instance::MemoryInstance &MemInst, |
56 | 0 | const AST::Instruction &Instr) { |
57 | 0 | ValVariant &RawAddress = StackMgr.getTop(); |
58 | 0 | uint32_t Address = RawAddress.get<uint32_t>(); |
59 | |
|
60 | 0 | if (Address > |
61 | 0 | std::numeric_limits<uint32_t>::max() - Instr.getMemoryOffset()) { |
62 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
63 | 0 | spdlog::error(ErrInfo::InfoBoundary( |
64 | 0 | Address + static_cast<uint64_t>(Instr.getMemoryOffset()), sizeof(I), |
65 | 0 | MemInst.getBoundIdx())); |
66 | 0 | spdlog::error( |
67 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
68 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
69 | 0 | } |
70 | 0 | Address += Instr.getMemoryOffset(); |
71 | |
|
72 | 0 | if (Address % sizeof(I) != 0) { |
73 | 0 | spdlog::error(ErrCode::Value::UnalignedAtomicAccess); |
74 | 0 | spdlog::error( |
75 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
76 | 0 | return Unexpect(ErrCode::Value::UnalignedAtomicAccess); |
77 | 0 | } |
78 | | |
79 | | // make sure the address no OOB with size I |
80 | 0 | auto *AtomicObj = MemInst.getPointer<std::atomic<I> *>(Address); |
81 | 0 | if (!AtomicObj) { |
82 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
83 | 0 | spdlog::error( |
84 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
85 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
86 | 0 | } |
87 | | |
88 | 0 | I Return = AtomicObj->load(); |
89 | 0 | RawAddress.emplace<T>(static_cast<T>(Return)); |
90 | 0 | return {}; |
91 | 0 | } Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicLoadOp<int, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicLoadOp<long, unsigned long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicLoadOp<unsigned int, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicLoadOp<unsigned int, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicLoadOp<unsigned long, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicLoadOp<unsigned long, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicLoadOp<unsigned long, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) |
92 | | |
93 | | template <typename T, typename I> |
94 | | TypeT<T> Executor::runAtomicStoreOp(Runtime::StackManager &StackMgr, |
95 | | Runtime::Instance::MemoryInstance &MemInst, |
96 | 0 | const AST::Instruction &Instr) { |
97 | 0 | ValVariant RawValue = StackMgr.pop(); |
98 | 0 | ValVariant RawAddress = StackMgr.pop(); |
99 | 0 | uint32_t Address = RawAddress.get<uint32_t>(); |
100 | |
|
101 | 0 | if (Address > |
102 | 0 | std::numeric_limits<uint32_t>::max() - Instr.getMemoryOffset()) { |
103 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
104 | 0 | spdlog::error(ErrInfo::InfoBoundary( |
105 | 0 | Address + static_cast<uint64_t>(Instr.getMemoryOffset()), sizeof(I), |
106 | 0 | MemInst.getBoundIdx())); |
107 | 0 | spdlog::error( |
108 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
109 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
110 | 0 | } |
111 | 0 | Address += Instr.getMemoryOffset(); |
112 | |
|
113 | 0 | if (Address % sizeof(I) != 0) { |
114 | 0 | spdlog::error(ErrCode::Value::UnalignedAtomicAccess); |
115 | 0 | spdlog::error( |
116 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
117 | 0 | return Unexpect(ErrCode::Value::UnalignedAtomicAccess); |
118 | 0 | } |
119 | | |
120 | | // make sure the address no OOB with size I |
121 | 0 | auto *AtomicObj = MemInst.getPointer<std::atomic<I> *>(Address); |
122 | 0 | if (!AtomicObj) { |
123 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
124 | 0 | spdlog::error( |
125 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
126 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
127 | 0 | } |
128 | 0 | I Value = static_cast<I>(RawValue.get<T>()); |
129 | |
|
130 | 0 | AtomicObj->store(Value); |
131 | 0 | return {}; |
132 | 0 | } Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicStoreOp<int, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicStoreOp<long, unsigned long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicStoreOp<unsigned int, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicStoreOp<unsigned int, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicStoreOp<unsigned long, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicStoreOp<unsigned long, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicStoreOp<unsigned long, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) |
133 | | |
134 | | template <typename T, typename I> |
135 | | TypeT<T> Executor::runAtomicAddOp(Runtime::StackManager &StackMgr, |
136 | | Runtime::Instance::MemoryInstance &MemInst, |
137 | 0 | const AST::Instruction &Instr) { |
138 | 0 | ValVariant RawValue = StackMgr.pop(); |
139 | 0 | ValVariant &RawAddress = StackMgr.getTop(); |
140 | 0 | uint32_t Address = RawAddress.get<uint32_t>(); |
141 | |
|
142 | 0 | if (Address > |
143 | 0 | std::numeric_limits<uint32_t>::max() - Instr.getMemoryOffset()) { |
144 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
145 | 0 | spdlog::error(ErrInfo::InfoBoundary( |
146 | 0 | Address + static_cast<uint64_t>(Instr.getMemoryOffset()), sizeof(I), |
147 | 0 | MemInst.getBoundIdx())); |
148 | 0 | spdlog::error( |
149 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
150 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
151 | 0 | } |
152 | 0 | Address += Instr.getMemoryOffset(); |
153 | |
|
154 | 0 | if (Address % sizeof(I) != 0) { |
155 | 0 | spdlog::error(ErrCode::Value::UnalignedAtomicAccess); |
156 | 0 | spdlog::error( |
157 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
158 | 0 | return Unexpect(ErrCode::Value::UnalignedAtomicAccess); |
159 | 0 | } |
160 | | |
161 | | // make sure the address no OOB with size I |
162 | 0 | auto *AtomicObj = MemInst.getPointer<std::atomic<I> *>(Address); |
163 | 0 | if (!AtomicObj) { |
164 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
165 | 0 | spdlog::error( |
166 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
167 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
168 | 0 | } |
169 | 0 | I Value = static_cast<I>(RawValue.get<T>()); |
170 | |
|
171 | 0 | I Return = AtomicObj->fetch_add(Value); |
172 | 0 | RawAddress.emplace<T>(static_cast<T>(Return)); |
173 | 0 | return {}; |
174 | 0 | } Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAddOp<int, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAddOp<long, unsigned long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAddOp<unsigned int, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAddOp<unsigned int, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAddOp<unsigned long, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAddOp<unsigned long, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAddOp<unsigned long, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) |
175 | | |
176 | | template <typename T, typename I> |
177 | | TypeT<T> Executor::runAtomicSubOp(Runtime::StackManager &StackMgr, |
178 | | Runtime::Instance::MemoryInstance &MemInst, |
179 | 0 | const AST::Instruction &Instr) { |
180 | 0 | ValVariant RawValue = StackMgr.pop(); |
181 | 0 | ValVariant &RawAddress = StackMgr.getTop(); |
182 | 0 | uint32_t Address = RawAddress.get<uint32_t>(); |
183 | |
|
184 | 0 | if (Address > |
185 | 0 | std::numeric_limits<uint32_t>::max() - Instr.getMemoryOffset()) { |
186 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
187 | 0 | spdlog::error(ErrInfo::InfoBoundary( |
188 | 0 | Address + static_cast<uint64_t>(Instr.getMemoryOffset()), sizeof(I), |
189 | 0 | MemInst.getBoundIdx())); |
190 | 0 | spdlog::error( |
191 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
192 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
193 | 0 | } |
194 | 0 | Address += Instr.getMemoryOffset(); |
195 | |
|
196 | 0 | if (Address % sizeof(I) != 0) { |
197 | 0 | spdlog::error(ErrCode::Value::UnalignedAtomicAccess); |
198 | 0 | spdlog::error( |
199 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
200 | 0 | return Unexpect(ErrCode::Value::UnalignedAtomicAccess); |
201 | 0 | } |
202 | | |
203 | | // make sure the address no OOB with size I |
204 | 0 | auto *AtomicObj = MemInst.getPointer<std::atomic<I> *>(Address); |
205 | 0 | if (!AtomicObj) { |
206 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
207 | 0 | spdlog::error( |
208 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
209 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
210 | 0 | } |
211 | 0 | I Value = static_cast<I>(RawValue.get<T>()); |
212 | |
|
213 | 0 | I Return = AtomicObj->fetch_sub(Value); |
214 | 0 | RawAddress.emplace<T>(static_cast<T>(Return)); |
215 | 0 | return {}; |
216 | 0 | } Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicSubOp<int, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicSubOp<long, unsigned long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicSubOp<unsigned int, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicSubOp<unsigned int, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicSubOp<unsigned long, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicSubOp<unsigned long, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicSubOp<unsigned long, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) |
217 | | |
218 | | template <typename T, typename I> |
219 | | TypeT<T> Executor::runAtomicOrOp(Runtime::StackManager &StackMgr, |
220 | | Runtime::Instance::MemoryInstance &MemInst, |
221 | 0 | const AST::Instruction &Instr) { |
222 | 0 | ValVariant RawValue = StackMgr.pop(); |
223 | 0 | ValVariant &RawAddress = StackMgr.getTop(); |
224 | 0 | uint32_t Address = RawAddress.get<uint32_t>(); |
225 | |
|
226 | 0 | if (Address > |
227 | 0 | std::numeric_limits<uint32_t>::max() - Instr.getMemoryOffset()) { |
228 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
229 | 0 | spdlog::error(ErrInfo::InfoBoundary( |
230 | 0 | Address + static_cast<uint64_t>(Instr.getMemoryOffset()), sizeof(I), |
231 | 0 | MemInst.getBoundIdx())); |
232 | 0 | spdlog::error( |
233 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
234 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
235 | 0 | } |
236 | 0 | Address += Instr.getMemoryOffset(); |
237 | |
|
238 | 0 | if (Address % sizeof(I) != 0) { |
239 | 0 | spdlog::error(ErrCode::Value::UnalignedAtomicAccess); |
240 | 0 | spdlog::error( |
241 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
242 | 0 | return Unexpect(ErrCode::Value::UnalignedAtomicAccess); |
243 | 0 | } |
244 | | |
245 | | // make sure the address no OOB with size I |
246 | 0 | auto *AtomicObj = MemInst.getPointer<std::atomic<I> *>(Address); |
247 | 0 | if (!AtomicObj) { |
248 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
249 | 0 | spdlog::error( |
250 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
251 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
252 | 0 | } |
253 | 0 | I Value = static_cast<I>(RawValue.get<T>()); |
254 | |
|
255 | 0 | I Return = AtomicObj->fetch_or(Value); |
256 | 0 | RawAddress.emplace<T>(static_cast<T>(Return)); |
257 | 0 | return {}; |
258 | 0 | } Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicOrOp<int, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicOrOp<long, unsigned long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicOrOp<unsigned int, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicOrOp<unsigned int, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicOrOp<unsigned long, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicOrOp<unsigned long, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicOrOp<unsigned long, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) |
259 | | |
260 | | template <typename T, typename I> |
261 | | TypeT<T> Executor::runAtomicAndOp(Runtime::StackManager &StackMgr, |
262 | | Runtime::Instance::MemoryInstance &MemInst, |
263 | 0 | const AST::Instruction &Instr) { |
264 | 0 | ValVariant RawValue = StackMgr.pop(); |
265 | 0 | ValVariant &RawAddress = StackMgr.getTop(); |
266 | 0 | uint32_t Address = RawAddress.get<uint32_t>(); |
267 | |
|
268 | 0 | if (Address > |
269 | 0 | std::numeric_limits<uint32_t>::max() - Instr.getMemoryOffset()) { |
270 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
271 | 0 | spdlog::error(ErrInfo::InfoBoundary( |
272 | 0 | Address + static_cast<uint64_t>(Instr.getMemoryOffset()), sizeof(I), |
273 | 0 | MemInst.getBoundIdx())); |
274 | 0 | spdlog::error( |
275 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
276 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
277 | 0 | } |
278 | 0 | Address += Instr.getMemoryOffset(); |
279 | |
|
280 | 0 | if (Address % sizeof(I) != 0) { |
281 | 0 | spdlog::error(ErrCode::Value::UnalignedAtomicAccess); |
282 | 0 | spdlog::error( |
283 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
284 | 0 | return Unexpect(ErrCode::Value::UnalignedAtomicAccess); |
285 | 0 | } |
286 | | |
287 | | // make sure the address no OOB with size I |
288 | 0 | auto *AtomicObj = MemInst.getPointer<std::atomic<I> *>(Address); |
289 | 0 | if (!AtomicObj) { |
290 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
291 | 0 | spdlog::error( |
292 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
293 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
294 | 0 | } |
295 | 0 | I Value = static_cast<I>(RawValue.get<T>()); |
296 | |
|
297 | 0 | I Return = AtomicObj->fetch_and(Value); |
298 | 0 | RawAddress.emplace<T>(static_cast<T>(Return)); |
299 | 0 | return {}; |
300 | 0 | } Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAndOp<int, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAndOp<long, unsigned long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAndOp<unsigned int, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAndOp<unsigned int, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAndOp<unsigned long, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAndOp<unsigned long, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicAndOp<unsigned long, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) |
301 | | |
302 | | template <typename T, typename I> |
303 | | TypeT<T> Executor::runAtomicXorOp(Runtime::StackManager &StackMgr, |
304 | | Runtime::Instance::MemoryInstance &MemInst, |
305 | 0 | const AST::Instruction &Instr) { |
306 | 0 | ValVariant RawValue = StackMgr.pop(); |
307 | 0 | ValVariant &RawAddress = StackMgr.getTop(); |
308 | 0 | uint32_t Address = RawAddress.get<uint32_t>(); |
309 | |
|
310 | 0 | if (Address > |
311 | 0 | std::numeric_limits<uint32_t>::max() - Instr.getMemoryOffset()) { |
312 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
313 | 0 | spdlog::error(ErrInfo::InfoBoundary( |
314 | 0 | Address + static_cast<uint64_t>(Instr.getMemoryOffset()), sizeof(I), |
315 | 0 | MemInst.getBoundIdx())); |
316 | 0 | spdlog::error( |
317 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
318 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
319 | 0 | } |
320 | 0 | Address += Instr.getMemoryOffset(); |
321 | |
|
322 | 0 | if (Address % sizeof(I) != 0) { |
323 | 0 | spdlog::error(ErrCode::Value::UnalignedAtomicAccess); |
324 | 0 | spdlog::error( |
325 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
326 | 0 | return Unexpect(ErrCode::Value::UnalignedAtomicAccess); |
327 | 0 | } |
328 | | |
329 | | // make sure the address no OOB with size I |
330 | 0 | auto *AtomicObj = MemInst.getPointer<std::atomic<I> *>(Address); |
331 | 0 | if (!AtomicObj) { |
332 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
333 | 0 | spdlog::error( |
334 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
335 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
336 | 0 | } |
337 | 0 | I Value = static_cast<I>(RawValue.get<T>()); |
338 | |
|
339 | 0 | I Return = AtomicObj->fetch_xor(Value); |
340 | 0 | RawAddress.emplace<T>(static_cast<T>(Return)); |
341 | 0 | return {}; |
342 | 0 | } Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicXorOp<int, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicXorOp<long, unsigned long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicXorOp<unsigned int, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicXorOp<unsigned int, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicXorOp<unsigned long, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicXorOp<unsigned long, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicXorOp<unsigned long, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) |
343 | | |
344 | | template <typename T, typename I> |
345 | | TypeT<T> |
346 | | Executor::runAtomicExchangeOp(Runtime::StackManager &StackMgr, |
347 | | Runtime::Instance::MemoryInstance &MemInst, |
348 | 0 | const AST::Instruction &Instr) { |
349 | 0 | ValVariant RawValue = StackMgr.pop(); |
350 | 0 | ValVariant &RawAddress = StackMgr.getTop(); |
351 | 0 | uint32_t Address = RawAddress.get<uint32_t>(); |
352 | |
|
353 | 0 | if (Address > |
354 | 0 | std::numeric_limits<uint32_t>::max() - Instr.getMemoryOffset()) { |
355 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
356 | 0 | spdlog::error(ErrInfo::InfoBoundary( |
357 | 0 | Address + static_cast<uint64_t>(Instr.getMemoryOffset()), sizeof(I), |
358 | 0 | MemInst.getBoundIdx())); |
359 | 0 | spdlog::error( |
360 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
361 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
362 | 0 | } |
363 | 0 | Address += Instr.getMemoryOffset(); |
364 | |
|
365 | 0 | if (Address % sizeof(I) != 0) { |
366 | 0 | spdlog::error(ErrCode::Value::UnalignedAtomicAccess); |
367 | 0 | spdlog::error( |
368 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
369 | 0 | return Unexpect(ErrCode::Value::UnalignedAtomicAccess); |
370 | 0 | } |
371 | | |
372 | | // make sure the address no OOB with size I |
373 | 0 | auto *AtomicObj = MemInst.getPointer<std::atomic<I> *>(Address); |
374 | 0 | if (!AtomicObj) { |
375 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
376 | 0 | spdlog::error( |
377 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
378 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
379 | 0 | } |
380 | 0 | I Value = static_cast<I>(RawValue.get<T>()); |
381 | |
|
382 | 0 | I Return = AtomicObj->exchange(Value); |
383 | 0 | RawAddress.emplace<T>(static_cast<T>(Return)); |
384 | 0 | return {}; |
385 | 0 | } Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicExchangeOp<int, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicExchangeOp<long, unsigned long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicExchangeOp<unsigned int, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicExchangeOp<unsigned int, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicExchangeOp<unsigned long, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicExchangeOp<unsigned long, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicExchangeOp<unsigned long, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) |
386 | | |
387 | | template <typename T, typename I> |
388 | | TypeT<T> |
389 | | Executor::runAtomicCompareExchangeOp(Runtime::StackManager &StackMgr, |
390 | | Runtime::Instance::MemoryInstance &MemInst, |
391 | 0 | const AST::Instruction &Instr) { |
392 | 0 | ValVariant RawReplacement = StackMgr.pop(); |
393 | 0 | ValVariant RawExpected = StackMgr.pop(); |
394 | 0 | ValVariant &RawAddress = StackMgr.getTop(); |
395 | 0 | uint32_t Address = RawAddress.get<uint32_t>(); |
396 | |
|
397 | 0 | if (Address > |
398 | 0 | std::numeric_limits<uint32_t>::max() - Instr.getMemoryOffset()) { |
399 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
400 | 0 | spdlog::error(ErrInfo::InfoBoundary( |
401 | 0 | Address + static_cast<uint64_t>(Instr.getMemoryOffset()), sizeof(I), |
402 | 0 | MemInst.getBoundIdx())); |
403 | 0 | spdlog::error( |
404 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
405 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
406 | 0 | } |
407 | 0 | Address += Instr.getMemoryOffset(); |
408 | |
|
409 | 0 | if (Address % sizeof(I) != 0) { |
410 | 0 | spdlog::error(ErrCode::Value::UnalignedAtomicAccess); |
411 | 0 | spdlog::error( |
412 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
413 | 0 | return Unexpect(ErrCode::Value::UnalignedAtomicAccess); |
414 | 0 | } |
415 | | |
416 | | // make sure the address no OOB with size I |
417 | 0 | auto *AtomicObj = MemInst.getPointer<std::atomic<I> *>(Address); |
418 | 0 | if (!AtomicObj) { |
419 | 0 | spdlog::error(ErrCode::Value::MemoryOutOfBounds); |
420 | 0 | spdlog::error( |
421 | 0 | ErrInfo::InfoInstruction(Instr.getOpCode(), Instr.getOffset())); |
422 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
423 | 0 | } |
424 | 0 | I Replacement = static_cast<I>(RawReplacement.get<T>()); |
425 | 0 | I Expected = static_cast<I>(RawExpected.get<T>()); |
426 | |
|
427 | 0 | AtomicObj->compare_exchange_strong(Expected, Replacement); |
428 | 0 | RawAddress.emplace<T>(static_cast<T>(Expected)); |
429 | 0 | return {}; |
430 | 0 | } Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicCompareExchangeOp<int, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicCompareExchangeOp<long, unsigned long>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicCompareExchangeOp<unsigned int, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned int>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicCompareExchangeOp<unsigned int, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicCompareExchangeOp<unsigned long, unsigned char>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicCompareExchangeOp<unsigned long, unsigned short>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) Unexecuted instantiation: std::__1::enable_if<IsWasmNumV<unsigned long>, cxx20::expected<void, WasmEdge::ErrCode> >::type WasmEdge::Executor::Executor::runAtomicCompareExchangeOp<unsigned long, unsigned int>(WasmEdge::Runtime::StackManager&, WasmEdge::Runtime::Instance::MemoryInstance&, WasmEdge::AST::Instruction const&) |
431 | | |
432 | | template <typename T> |
433 | | Expect<uint32_t> |
434 | | Executor::atomicWait(Runtime::Instance::MemoryInstance &MemInst, |
435 | 0 | uint32_t Address, T Expected, int64_t Timeout) noexcept { |
436 | | // The error message should be handled by the caller, or the AOT mode will |
437 | | // produce the duplicated messages. |
438 | 0 | if (!MemInst.isShared()) { |
439 | 0 | return Unexpect(ErrCode::Value::ExpectSharedMemory); |
440 | 0 | } |
441 | | |
442 | 0 | if (auto *AtomicObj = MemInst.getPointer<std::atomic<T> *>(Address); |
443 | 0 | !AtomicObj) { |
444 | 0 | return Unexpect(ErrCode::Value::MemoryOutOfBounds); |
445 | 0 | } |
446 | | |
447 | 0 | std::optional<std::chrono::time_point<std::chrono::steady_clock>> Until; |
448 | 0 | if (Timeout >= 0) { |
449 | 0 | Until.emplace(std::chrono::steady_clock::now() + |
450 | 0 | std::chrono::nanoseconds(Timeout)); |
451 | 0 | } |
452 | |
|
453 | 0 | auto *AtomicObj = MemInst.getPointer<std::atomic<T> *>(Address); |
454 | 0 | assuming(AtomicObj); |
455 | | |
456 | 0 | if (AtomicObj->load() != Expected) { |
457 | 0 | return UINT32_C(1); // NotEqual |
458 | 0 | } |
459 | | |
460 | 0 | decltype(WaiterMap)::iterator WaiterIterator; |
461 | 0 | { |
462 | 0 | std::unique_lock<decltype(WaiterMapMutex)> Locker(WaiterMapMutex); |
463 | 0 | WaiterIterator = WaiterMap.emplace(Address, &MemInst); |
464 | 0 | } |
465 | |
|
466 | 0 | cxx20::scope_exit ScopeExitHolder([&]() noexcept { |
467 | 0 | std::unique_lock<decltype(WaiterMapMutex)> Locker(WaiterMapMutex); |
468 | 0 | WaiterMap.erase(WaiterIterator); |
469 | 0 | }); Unexecuted instantiation: WasmEdge::Executor::Executor::atomicWait<unsigned long>(WasmEdge::Runtime::Instance::MemoryInstance&, unsigned int, unsigned long, long)::{lambda()#1}::operator()() const Unexecuted instantiation: WasmEdge::Executor::Executor::atomicWait<unsigned int>(WasmEdge::Runtime::Instance::MemoryInstance&, unsigned int, unsigned int, long)::{lambda()#1}::operator()() const Unexecuted instantiation: WasmEdge::Executor::Executor::atomicWait<int>(WasmEdge::Runtime::Instance::MemoryInstance&, unsigned int, int, long)::{lambda()#1}::operator()() const Unexecuted instantiation: WasmEdge::Executor::Executor::atomicWait<long>(WasmEdge::Runtime::Instance::MemoryInstance&, unsigned int, long, long)::{lambda()#1}::operator()() const |
470 | |
|
471 | 0 | while (true) { |
472 | 0 | std::unique_lock<decltype(WaiterIterator->second.Mutex)> Locker( |
473 | 0 | WaiterIterator->second.Mutex); |
474 | 0 | std::cv_status WaitResult = std::cv_status::no_timeout; |
475 | 0 | if (!Until) { |
476 | 0 | WaiterIterator->second.Cond.wait(Locker); |
477 | 0 | } else { |
478 | 0 | WaitResult = WaiterIterator->second.Cond.wait_until(Locker, *Until); |
479 | 0 | } |
480 | 0 | if (unlikely(StopToken.load(std::memory_order_relaxed) != 0)) { |
481 | 0 | return Unexpect(ErrCode::Value::Interrupted); |
482 | 0 | } |
483 | 0 | if (likely(AtomicObj->load() != Expected)) { |
484 | 0 | return UINT32_C(0); // ok |
485 | 0 | } |
486 | 0 | if (WaitResult == std::cv_status::timeout) { |
487 | 0 | return UINT32_C(2); // Timed-out |
488 | 0 | } |
489 | 0 | } |
490 | 0 | } Unexecuted instantiation: cxx20::expected<unsigned int, WasmEdge::ErrCode> WasmEdge::Executor::Executor::atomicWait<unsigned long>(WasmEdge::Runtime::Instance::MemoryInstance&, unsigned int, unsigned long, long) Unexecuted instantiation: cxx20::expected<unsigned int, WasmEdge::ErrCode> WasmEdge::Executor::Executor::atomicWait<unsigned int>(WasmEdge::Runtime::Instance::MemoryInstance&, unsigned int, unsigned int, long) Unexecuted instantiation: cxx20::expected<unsigned int, WasmEdge::ErrCode> WasmEdge::Executor::Executor::atomicWait<int>(WasmEdge::Runtime::Instance::MemoryInstance&, unsigned int, int, long) Unexecuted instantiation: cxx20::expected<unsigned int, WasmEdge::ErrCode> WasmEdge::Executor::Executor::atomicWait<long>(WasmEdge::Runtime::Instance::MemoryInstance&, unsigned int, long, long) |
491 | | |
492 | | } // namespace Executor |
493 | | } // namespace WasmEdge |